1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd. 4 */ 5 6 #include <common.h> 7 #include <debug_uart.h> 8 #include <dm.h> 9 #include <ram.h> 10 #include <syscon.h> 11 #include <asm/io.h> 12 #include <asm/arch/clock.h> 13 #include <asm/arch/hardware.h> 14 #include <asm/arch/rk_atags.h> 15 #include <asm/arch/cru_rv1126.h> 16 #include <asm/arch/grf_rv1126.h> 17 #include <asm/arch/sdram_common.h> 18 #include <asm/arch/sdram_rv1126.h> 19 20 /* define training flag */ 21 #define CA_TRAINING (0x1 << 0) 22 #define READ_GATE_TRAINING (0x1 << 1) 23 #define WRITE_LEVELING (0x1 << 2) 24 #define WRITE_TRAINING (0x1 << 3) 25 #define READ_TRAINING (0x1 << 4) 26 #define FULL_TRAINING (0xff) 27 28 #define SKEW_RX_SIGNAL (0) 29 #define SKEW_TX_SIGNAL (1) 30 #define SKEW_CA_SIGNAL (2) 31 32 #define DESKEW_MDF_ABS_VAL (0) 33 #define DESKEW_MDF_DIFF_VAL (1) 34 35 #ifdef CONFIG_TPL_BUILD 36 #ifndef CONFIG_TPL_TINY_FRAMEWORK 37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!! 38 #endif 39 #endif 40 41 #ifdef CONFIG_TPL_BUILD 42 43 struct dram_info { 44 void __iomem *pctl; 45 void __iomem *phy; 46 struct rv1126_cru *cru; 47 struct msch_regs *msch; 48 struct rv1126_ddrgrf *ddrgrf; 49 struct rv1126_grf *grf; 50 struct ram_info info; 51 struct rv1126_pmugrf *pmugrf; 52 u32 sr_idle; 53 u32 pd_idle; 54 }; 55 56 #define GRF_BASE_ADDR 0xfe000000 57 #define PMU_GRF_BASE_ADDR 0xfe020000 58 #define DDR_GRF_BASE_ADDR 0xfe030000 59 #define BUS_SGRF_BASE_ADDR 0xfe0a0000 60 #define SERVER_MSCH_BASE_ADDR 0xfe800000 61 #define CRU_BASE_ADDR 0xff490000 62 #define DDR_PHY_BASE_ADDR 0xff4a0000 63 #define UPCTL2_BASE_ADDR 0xffa50000 64 65 #define SGRF_SOC_CON12 0x30 66 #define SGRF_SOC_CON13 0x34 67 68 struct dram_info dram_info; 69 70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3) 71 struct rv1126_sdram_params sdram_configs[] = { 72 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-330.inc" 73 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc" 74 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc" 75 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc" 76 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc" 77 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc" 78 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc" 79 }; 80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0) 81 struct rv1126_sdram_params sdram_configs[] = { 82 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-330.inc" 83 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc" 84 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc" 85 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc" 86 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc" 87 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc" 88 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc" 89 }; 90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6) 91 struct rv1126_sdram_params sdram_configs[] = { 92 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-330.inc" 93 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc" 94 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc" 95 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc" 96 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc" 97 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc" 98 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc" 99 }; 100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) 101 struct rv1126_sdram_params sdram_configs[] = { 102 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-330.inc" 103 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc" 104 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc" 105 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc" 106 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc" 107 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc" 108 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc" 109 }; 110 #endif 111 112 u32 common_info[] = { 113 #include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc" 114 }; 115 116 static struct rv1126_fsp_param fsp_param[MAX_IDX]; 117 118 static u8 lp3_odt_value; 119 120 static u8 wrlvl_result[2][4]; 121 122 /* DDR configuration 0-9 */ 123 u16 ddr_cfg_2_rbc[] = { 124 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */ 125 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */ 126 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */ 127 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */ 128 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */ 129 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */ 130 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */ 131 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */ 132 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */ 133 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */ 134 }; 135 136 /* DDR configuration 10-21 */ 137 u8 ddr4_cfg_2_rbc[] = { 138 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */ 139 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */ 140 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */ 141 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */ 142 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */ 143 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */ 144 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */ 145 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */ 146 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */ 147 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */ 148 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */ 149 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */ 150 }; 151 152 /* DDR configuration 22-28 */ 153 u16 ddr_cfg_2_rbc_p2[] = { 154 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */ 155 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */ 156 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */ 157 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */ 158 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */ 159 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */ 160 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */ 161 }; 162 163 u8 d4_rbc_2_d3_rbc[][2] = { 164 {10, 0}, 165 {11, 2}, 166 {12, 23}, 167 {13, 1}, 168 {14, 28}, 169 {15, 24}, 170 {16, 27}, 171 {17, 7}, 172 {18, 6}, 173 {19, 25}, 174 {20, 26}, 175 {21, 3} 176 }; 177 178 u32 addrmap[23][9] = { 179 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 180 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */ 181 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 182 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */ 183 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 184 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */ 185 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606, 186 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */ 187 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909, 188 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */ 189 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707, 190 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */ 191 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808, 192 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */ 193 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909, 194 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */ 195 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, 196 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */ 197 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 198 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */ 199 200 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 201 0x08080808, 0x00000f0f, 0x0801}, /* 10 */ 202 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 203 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */ 204 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 205 0x07070707, 0x00000f07, 0x0700}, /* 12 */ 206 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 207 0x07070707, 0x00000f0f, 0x0700}, /* 13 */ 208 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 209 0x07070707, 0x00000f07, 0x3f01}, /* 14 */ 210 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 211 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */ 212 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606, 213 0x06060606, 0x00000f06, 0x3f00}, /* 16 */ 214 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909, 215 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */ 216 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808, 217 0x08080808, 0x00000f0f, 0x0700}, /* 18 */ 218 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 219 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */ 220 221 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 222 0x07070707, 0x00000f07, 0x3f00}, /* 20 */ 223 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606, 224 0x06060606, 0x00000f06, 0x0600}, /* 21 */ 225 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505, 226 0x05050505, 0x00000f0f, 0x3f3f} /* 22 */ 227 }; 228 229 static u8 dq_sel[22][3] = { 230 {0x0, 0x17, 0x22}, 231 {0x1, 0x18, 0x23}, 232 {0x2, 0x19, 0x24}, 233 {0x3, 0x1a, 0x25}, 234 {0x4, 0x1b, 0x26}, 235 {0x5, 0x1c, 0x27}, 236 {0x6, 0x1d, 0x28}, 237 {0x7, 0x1e, 0x29}, 238 {0x8, 0x16, 0x21}, 239 {0x9, 0x1f, 0x2a}, 240 {0xa, 0x20, 0x2b}, 241 {0x10, 0x1, 0xc}, 242 {0x11, 0x2, 0xd}, 243 {0x12, 0x3, 0xe}, 244 {0x13, 0x4, 0xf}, 245 {0x14, 0x5, 0x10}, 246 {0x15, 0x6, 0x11}, 247 {0x16, 0x7, 0x12}, 248 {0x17, 0x8, 0x13}, 249 {0x18, 0x0, 0xb}, 250 {0x19, 0x9, 0x14}, 251 {0x1a, 0xa, 0x15} 252 }; 253 254 static u16 grp_addr[4] = { 255 ADD_GROUP_CS0_A, 256 ADD_GROUP_CS0_B, 257 ADD_GROUP_CS1_A, 258 ADD_GROUP_CS1_B 259 }; 260 261 static u8 wrlvl_result_offset[2][4] = { 262 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27}, 263 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29}, 264 }; 265 266 static u16 dqs_dq_skew_adr[16] = { 267 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */ 268 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */ 269 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */ 270 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */ 271 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */ 272 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */ 273 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */ 274 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */ 275 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */ 276 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */ 277 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */ 278 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */ 279 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */ 280 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */ 281 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */ 282 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */ 283 }; 284 285 static void rkclk_ddr_reset(struct dram_info *dram, 286 u32 ctl_srstn, u32 ctl_psrstn, 287 u32 phy_srstn, u32 phy_psrstn) 288 { 289 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) | 290 UPCTL2_ASRSTN_REQ(ctl_srstn), 291 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13); 292 293 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn), 294 &dram->cru->softrst_con[12]); 295 } 296 297 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz) 298 { 299 unsigned int refdiv, postdiv1, postdiv2, fbdiv; 300 int delay = 1000; 301 u32 mhz = hz / MHz; 302 303 refdiv = 1; 304 if (mhz <= 100) { 305 postdiv1 = 6; 306 postdiv2 = 4; 307 } else if (mhz <= 150) { 308 postdiv1 = 4; 309 postdiv2 = 4; 310 } else if (mhz <= 200) { 311 postdiv1 = 6; 312 postdiv2 = 2; 313 } else if (mhz <= 300) { 314 postdiv1 = 4; 315 postdiv2 = 2; 316 } else if (mhz <= 400) { 317 postdiv1 = 6; 318 postdiv2 = 1; 319 } else { 320 postdiv1 = 4; 321 postdiv2 = 1; 322 } 323 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24; 324 325 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode); 326 327 writel(0x1f000000, &dram->cru->clksel_con[64]); 328 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0); 329 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv), 330 &dram->cru->pll[1].con1); 331 332 while (delay > 0) { 333 udelay(1); 334 if (LOCK(readl(&dram->cru->pll[1].con1))) 335 break; 336 delay--; 337 } 338 339 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode); 340 } 341 342 static void rkclk_configure_ddr(struct dram_info *dram, 343 struct rv1126_sdram_params *sdram_params) 344 { 345 /* for inno ddr phy need freq / 2 */ 346 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2); 347 } 348 349 static void phy_soft_reset(struct dram_info *dram) 350 { 351 void __iomem *phy_base = dram->phy; 352 353 clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2); 354 udelay(1); 355 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 356 udelay(1); 357 } 358 359 static unsigned int 360 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params) 361 { 362 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 363 u32 cs, bw, die_bw, col, row, bank; 364 u32 cs1_row; 365 u32 i, tmp; 366 u32 ddrconf = -1; 367 u32 row_3_4; 368 369 cs = cap_info->rank; 370 bw = cap_info->bw; 371 die_bw = cap_info->dbw; 372 col = cap_info->col; 373 row = cap_info->cs0_row; 374 cs1_row = cap_info->cs1_row; 375 bank = cap_info->bk; 376 row_3_4 = cap_info->row_3_4; 377 378 if (sdram_params->base.dramtype == DDR4) { 379 if (cs == 2 && row == cs1_row && !row_3_4) { 380 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) | 381 die_bw; 382 for (i = 17; i < 21; i++) { 383 if (((tmp & 0xf) == 384 (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 385 ((tmp & 0x70) <= 386 (ddr4_cfg_2_rbc[i - 10] & 0x70))) { 387 ddrconf = i; 388 goto out; 389 } 390 } 391 } 392 393 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw; 394 for (i = 10; i < 21; i++) { 395 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 396 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) && 397 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) { 398 ddrconf = i; 399 goto out; 400 } 401 } 402 } else { 403 if (cs == 2 && row == cs1_row && bank == 3) { 404 for (i = 5; i < 8; i++) { 405 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] & 406 0x7)) && 407 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] & 408 (0x7 << 5))) { 409 ddrconf = i; 410 goto out; 411 } 412 } 413 } 414 415 tmp = ((cs - 1) << 8) | ((row - 13) << 5) | 416 ((bw + col - 10) << 0); 417 if (bank == 3) 418 tmp |= (1 << 3); 419 420 for (i = 0; i < 9; i++) 421 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) && 422 ((tmp & (7 << 5)) <= 423 (ddr_cfg_2_rbc[i] & (7 << 5))) && 424 ((tmp & (1 << 8)) <= 425 (ddr_cfg_2_rbc[i] & (1 << 8)))) { 426 ddrconf = i; 427 goto out; 428 } 429 if (cs == 1 && bank == 3 && row <= 17 && 430 (col + bw) == 12) 431 ddrconf = 23; 432 } 433 434 out: 435 if (ddrconf > 28) 436 printascii("calculate ddrconfig error\n"); 437 438 if (sdram_params->base.dramtype == DDR4) { 439 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 440 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) { 441 if (ddrconf == 21 && row > 16) 442 printascii("warn:ddrconf21 row > 16\n"); 443 else 444 ddrconf = d4_rbc_2_d3_rbc[i][1]; 445 break; 446 } 447 } 448 } 449 450 return ddrconf; 451 } 452 453 static void sw_set_req(struct dram_info *dram) 454 { 455 void __iomem *pctl_base = dram->pctl; 456 457 /* clear sw_done=0 */ 458 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL); 459 } 460 461 static void sw_set_ack(struct dram_info *dram) 462 { 463 void __iomem *pctl_base = dram->pctl; 464 465 /* set sw_done=1 */ 466 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL); 467 while (1) { 468 /* wait programming done */ 469 if (readl(pctl_base + DDR_PCTL2_SWSTAT) & 470 PCTL2_SW_DONE_ACK) 471 break; 472 } 473 } 474 475 static void set_ctl_address_map(struct dram_info *dram, 476 struct rv1126_sdram_params *sdram_params) 477 { 478 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 479 void __iomem *pctl_base = dram->pctl; 480 u32 ddrconf = cap_info->ddrconfig; 481 u32 i, row; 482 483 row = cap_info->cs0_row; 484 if (sdram_params->base.dramtype == DDR4) { 485 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 486 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) { 487 ddrconf = d4_rbc_2_d3_rbc[i][0]; 488 break; 489 } 490 } 491 } 492 493 if (ddrconf > ARRAY_SIZE(addrmap)) { 494 printascii("set ctl address map fail\n"); 495 return; 496 } 497 498 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0), 499 &addrmap[ddrconf][0], 9 * 4); 500 501 /* unused row set to 0xf */ 502 for (i = 17; i >= row; i--) 503 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 + 504 ((i - 12) * 8 / 32) * 4, 505 0xf << ((i - 12) * 8 % 32)); 506 507 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4) 508 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31); 509 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1) 510 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8); 511 512 if (cap_info->rank == 1) 513 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f); 514 } 515 516 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait) 517 { 518 void __iomem *phy_base = dram->phy; 519 u32 fbdiv, prediv, postdiv, postdiv_en; 520 521 if (wait) { 522 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB); 523 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) 524 continue; 525 } else { 526 freq /= MHz; 527 prediv = 1; 528 if (freq <= 200) { 529 fbdiv = 16; 530 postdiv = 2; 531 postdiv_en = 1; 532 } else if (freq <= 456) { 533 fbdiv = 8; 534 postdiv = 1; 535 postdiv_en = 1; 536 } else { 537 fbdiv = 4; 538 postdiv = 0; 539 postdiv_en = 0; 540 } 541 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50)); 542 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK, 543 (fbdiv >> 8) & 1); 544 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK, 545 postdiv_en << PHY_POSTDIV_EN_SHIFT); 546 547 clrsetbits_le32(PHY_REG(phy_base, 0x52), 548 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv); 549 clrsetbits_le32(PHY_REG(phy_base, 0x53), 550 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT, 551 postdiv << PHY_POSTDIV_SHIFT); 552 } 553 } 554 555 static const u16 d3_phy_drv_2_ohm[][2] = { 556 {PHY_DDR3_RON_506ohm, 506}, 557 {PHY_DDR3_RON_253ohm, 253}, 558 {PHY_DDR3_RON_169hm, 169}, 559 {PHY_DDR3_RON_127ohm, 127}, 560 {PHY_DDR3_RON_101ohm, 101}, 561 {PHY_DDR3_RON_84ohm, 84}, 562 {PHY_DDR3_RON_72ohm, 72}, 563 {PHY_DDR3_RON_63ohm, 63}, 564 {PHY_DDR3_RON_56ohm, 56}, 565 {PHY_DDR3_RON_51ohm, 51}, 566 {PHY_DDR3_RON_46ohm, 46}, 567 {PHY_DDR3_RON_42ohm, 42}, 568 {PHY_DDR3_RON_39ohm, 39}, 569 {PHY_DDR3_RON_36ohm, 36}, 570 {PHY_DDR3_RON_34ohm, 34}, 571 {PHY_DDR3_RON_32ohm, 32}, 572 {PHY_DDR3_RON_30ohm, 30}, 573 {PHY_DDR3_RON_28ohm, 28}, 574 {PHY_DDR3_RON_27ohm, 27}, 575 {PHY_DDR3_RON_25ohm, 25}, 576 {PHY_DDR3_RON_24ohm, 24}, 577 {PHY_DDR3_RON_23ohm, 23}, 578 {PHY_DDR3_RON_22ohm, 22} 579 }; 580 581 static u16 d3_phy_odt_2_ohm[][2] = { 582 {PHY_DDR3_RTT_DISABLE, 0}, 583 {PHY_DDR3_RTT_953ohm, 953}, 584 {PHY_DDR3_RTT_483ohm, 483}, 585 {PHY_DDR3_RTT_320ohm, 320}, 586 {PHY_DDR3_RTT_241ohm, 241}, 587 {PHY_DDR3_RTT_193ohm, 193}, 588 {PHY_DDR3_RTT_161ohm, 161}, 589 {PHY_DDR3_RTT_138ohm, 138}, 590 {PHY_DDR3_RTT_121ohm, 121}, 591 {PHY_DDR3_RTT_107ohm, 107}, 592 {PHY_DDR3_RTT_97ohm, 97}, 593 {PHY_DDR3_RTT_88ohm, 88}, 594 {PHY_DDR3_RTT_80ohm, 80}, 595 {PHY_DDR3_RTT_74ohm, 74}, 596 {PHY_DDR3_RTT_69ohm, 69}, 597 {PHY_DDR3_RTT_64ohm, 64}, 598 {PHY_DDR3_RTT_60ohm, 60}, 599 {PHY_DDR3_RTT_57ohm, 57}, 600 {PHY_DDR3_RTT_54ohm, 54}, 601 {PHY_DDR3_RTT_51ohm, 51}, 602 {PHY_DDR3_RTT_48ohm, 48}, 603 {PHY_DDR3_RTT_46ohm, 46}, 604 {PHY_DDR3_RTT_44ohm, 44}, 605 {PHY_DDR3_RTT_42ohm, 42} 606 }; 607 608 static u16 d4lp3_phy_drv_2_ohm[][2] = { 609 {PHY_DDR4_LPDDR3_RON_570ohm, 570}, 610 {PHY_DDR4_LPDDR3_RON_285ohm, 285}, 611 {PHY_DDR4_LPDDR3_RON_190ohm, 190}, 612 {PHY_DDR4_LPDDR3_RON_142ohm, 142}, 613 {PHY_DDR4_LPDDR3_RON_114ohm, 114}, 614 {PHY_DDR4_LPDDR3_RON_95ohm, 95}, 615 {PHY_DDR4_LPDDR3_RON_81ohm, 81}, 616 {PHY_DDR4_LPDDR3_RON_71ohm, 71}, 617 {PHY_DDR4_LPDDR3_RON_63ohm, 63}, 618 {PHY_DDR4_LPDDR3_RON_57ohm, 57}, 619 {PHY_DDR4_LPDDR3_RON_52ohm, 52}, 620 {PHY_DDR4_LPDDR3_RON_47ohm, 47}, 621 {PHY_DDR4_LPDDR3_RON_44ohm, 44}, 622 {PHY_DDR4_LPDDR3_RON_41ohm, 41}, 623 {PHY_DDR4_LPDDR3_RON_38ohm, 38}, 624 {PHY_DDR4_LPDDR3_RON_36ohm, 36}, 625 {PHY_DDR4_LPDDR3_RON_34ohm, 34}, 626 {PHY_DDR4_LPDDR3_RON_32ohm, 32}, 627 {PHY_DDR4_LPDDR3_RON_30ohm, 30}, 628 {PHY_DDR4_LPDDR3_RON_28ohm, 28}, 629 {PHY_DDR4_LPDDR3_RON_27ohm, 27}, 630 {PHY_DDR4_LPDDR3_RON_26ohm, 26}, 631 {PHY_DDR4_LPDDR3_RON_25ohm, 25} 632 }; 633 634 static u16 d4lp3_phy_odt_2_ohm[][2] = { 635 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0}, 636 {PHY_DDR4_LPDDR3_RTT_973ohm, 973}, 637 {PHY_DDR4_LPDDR3_RTT_493ohm, 493}, 638 {PHY_DDR4_LPDDR3_RTT_327ohm, 327}, 639 {PHY_DDR4_LPDDR3_RTT_247ohm, 247}, 640 {PHY_DDR4_LPDDR3_RTT_197ohm, 197}, 641 {PHY_DDR4_LPDDR3_RTT_164ohm, 164}, 642 {PHY_DDR4_LPDDR3_RTT_141ohm, 141}, 643 {PHY_DDR4_LPDDR3_RTT_123ohm, 123}, 644 {PHY_DDR4_LPDDR3_RTT_109ohm, 109}, 645 {PHY_DDR4_LPDDR3_RTT_99ohm, 99}, 646 {PHY_DDR4_LPDDR3_RTT_90ohm, 90}, 647 {PHY_DDR4_LPDDR3_RTT_82ohm, 82}, 648 {PHY_DDR4_LPDDR3_RTT_76ohm, 76}, 649 {PHY_DDR4_LPDDR3_RTT_70ohm, 70}, 650 {PHY_DDR4_LPDDR3_RTT_66ohm, 66}, 651 {PHY_DDR4_LPDDR3_RTT_62ohm, 62}, 652 {PHY_DDR4_LPDDR3_RTT_58ohm, 58}, 653 {PHY_DDR4_LPDDR3_RTT_55ohm, 55}, 654 {PHY_DDR4_LPDDR3_RTT_52ohm, 52}, 655 {PHY_DDR4_LPDDR3_RTT_49ohm, 49}, 656 {PHY_DDR4_LPDDR3_RTT_47ohm, 47}, 657 {PHY_DDR4_LPDDR3_RTT_45ohm, 45}, 658 {PHY_DDR4_LPDDR3_RTT_43ohm, 43} 659 }; 660 661 static u16 lp4_phy_drv_2_ohm[][2] = { 662 {PHY_LPDDR4_RON_606ohm, 606}, 663 {PHY_LPDDR4_RON_303ohm, 303}, 664 {PHY_LPDDR4_RON_202ohm, 202}, 665 {PHY_LPDDR4_RON_152ohm, 153}, 666 {PHY_LPDDR4_RON_121ohm, 121}, 667 {PHY_LPDDR4_RON_101ohm, 101}, 668 {PHY_LPDDR4_RON_87ohm, 87}, 669 {PHY_LPDDR4_RON_76ohm, 76}, 670 {PHY_LPDDR4_RON_67ohm, 67}, 671 {PHY_LPDDR4_RON_61ohm, 61}, 672 {PHY_LPDDR4_RON_55ohm, 55}, 673 {PHY_LPDDR4_RON_51ohm, 51}, 674 {PHY_LPDDR4_RON_47ohm, 47}, 675 {PHY_LPDDR4_RON_43ohm, 43}, 676 {PHY_LPDDR4_RON_40ohm, 40}, 677 {PHY_LPDDR4_RON_38ohm, 38}, 678 {PHY_LPDDR4_RON_36ohm, 36}, 679 {PHY_LPDDR4_RON_34ohm, 34}, 680 {PHY_LPDDR4_RON_32ohm, 32}, 681 {PHY_LPDDR4_RON_30ohm, 30}, 682 {PHY_LPDDR4_RON_29ohm, 29}, 683 {PHY_LPDDR4_RON_28ohm, 28}, 684 {PHY_LPDDR4_RON_26ohm, 26} 685 }; 686 687 static u16 lp4_phy_odt_2_ohm[][2] = { 688 {PHY_LPDDR4_RTT_DISABLE, 0}, 689 {PHY_LPDDR4_RTT_998ohm, 998}, 690 {PHY_LPDDR4_RTT_506ohm, 506}, 691 {PHY_LPDDR4_RTT_336ohm, 336}, 692 {PHY_LPDDR4_RTT_253ohm, 253}, 693 {PHY_LPDDR4_RTT_202ohm, 202}, 694 {PHY_LPDDR4_RTT_169ohm, 169}, 695 {PHY_LPDDR4_RTT_144ohm, 144}, 696 {PHY_LPDDR4_RTT_127ohm, 127}, 697 {PHY_LPDDR4_RTT_112ohm, 112}, 698 {PHY_LPDDR4_RTT_101ohm, 101}, 699 {PHY_LPDDR4_RTT_92ohm, 92}, 700 {PHY_LPDDR4_RTT_84ohm, 84}, 701 {PHY_LPDDR4_RTT_78ohm, 78}, 702 {PHY_LPDDR4_RTT_72ohm, 72}, 703 {PHY_LPDDR4_RTT_67ohm, 67}, 704 {PHY_LPDDR4_RTT_63ohm, 63}, 705 {PHY_LPDDR4_RTT_60ohm, 60}, 706 {PHY_LPDDR4_RTT_56ohm, 56}, 707 {PHY_LPDDR4_RTT_53ohm, 53}, 708 {PHY_LPDDR4_RTT_51ohm, 51}, 709 {PHY_LPDDR4_RTT_48ohm, 48}, 710 {PHY_LPDDR4_RTT_46ohm, 46}, 711 {PHY_LPDDR4_RTT_44ohm, 44} 712 }; 713 714 static u32 lp4_odt_calc(u32 odt_ohm) 715 { 716 u32 odt; 717 718 if (odt_ohm == 0) 719 odt = LPDDR4_DQODT_DIS; 720 else if (odt_ohm <= 40) 721 odt = LPDDR4_DQODT_40; 722 else if (odt_ohm <= 48) 723 odt = LPDDR4_DQODT_48; 724 else if (odt_ohm <= 60) 725 odt = LPDDR4_DQODT_60; 726 else if (odt_ohm <= 80) 727 odt = LPDDR4_DQODT_80; 728 else if (odt_ohm <= 120) 729 odt = LPDDR4_DQODT_120; 730 else 731 odt = LPDDR4_DQODT_240; 732 733 return odt; 734 } 735 736 static void *get_ddr_drv_odt_info(u32 dramtype) 737 { 738 struct sdram_head_info_index_v2 *index = 739 (struct sdram_head_info_index_v2 *)common_info; 740 void *ddr_info = 0; 741 742 if (dramtype == DDR4) 743 ddr_info = (void *)common_info + index->ddr4_index.offset * 4; 744 else if (dramtype == DDR3) 745 ddr_info = (void *)common_info + index->ddr3_index.offset * 4; 746 else if (dramtype == LPDDR3) 747 ddr_info = (void *)common_info + index->lp3_index.offset * 4; 748 else if (dramtype == LPDDR4) 749 ddr_info = (void *)common_info + index->lp4_index.offset * 4; 750 else 751 printascii("unsupported dram type\n"); 752 return ddr_info; 753 } 754 755 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info, 756 u32 freq_mhz, u32 dst_fsp) 757 { 758 void __iomem *pctl_base = dram->pctl; 759 u32 ca_vref, dq_vref; 760 761 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 762 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff); 763 else 764 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten); 765 766 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq)) 767 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff); 768 else 769 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten); 770 771 if (ca_vref < 100) 772 ca_vref = 100; 773 if (ca_vref > 420) 774 ca_vref = 420; 775 776 if (ca_vref <= 300) 777 ca_vref = (0 << 6) | (ca_vref - 100) / 4; 778 else 779 ca_vref = (1 << 6) | (ca_vref - 220) / 4; 780 781 if (dq_vref < 100) 782 dq_vref = 100; 783 if (dq_vref > 420) 784 dq_vref = 420; 785 786 if (dq_vref <= 300) 787 dq_vref = (0 << 6) | (dq_vref - 100) / 4; 788 else 789 dq_vref = (1 << 6) | (dq_vref - 220) / 4; 790 791 sw_set_req(dram); 792 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 793 DDR_PCTL2_INIT6, 794 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT, 795 ca_vref << PCTL2_LPDDR4_MR12_SHIFT); 796 797 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 798 DDR_PCTL2_INIT7, 799 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT, 800 dq_vref << PCTL2_LPDDR4_MR14_SHIFT); 801 sw_set_ack(dram); 802 } 803 804 static void set_ds_odt(struct dram_info *dram, 805 struct rv1126_sdram_params *sdram_params, u32 dst_fsp) 806 { 807 void __iomem *phy_base = dram->phy; 808 void __iomem *pctl_base = dram->pctl; 809 u32 dramtype = sdram_params->base.dramtype; 810 struct ddr2_3_4_lp2_3_info *ddr_info; 811 struct lp4_info *lp4_info; 812 u32 i, j, tmp; 813 const u16 (*p_drv)[2]; 814 const u16 (*p_odt)[2]; 815 u32 drv_info, sr_info; 816 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm; 817 u32 phy_odt_ohm, dram_odt_ohm; 818 u32 lp4_pu_cal, phy_lp4_drv_pd_en; 819 u32 phy_odt_up_en, phy_odt_dn_en; 820 u32 sr_dq, sr_clk; 821 u32 freq = sdram_params->base.ddr_freq; 822 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner; 823 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0; 824 u32 phy_dq_drv = 0; 825 u32 phy_odt_up = 0, phy_odt_dn = 0; 826 827 ddr_info = get_ddr_drv_odt_info(dramtype); 828 lp4_info = (void *)ddr_info; 829 830 if (!ddr_info) 831 return; 832 833 /* dram odt en freq control phy drv, dram odt and phy sr */ 834 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) { 835 drv_info = ddr_info->drv_when_odtoff; 836 dram_odt_ohm = 0; 837 sr_info = ddr_info->sr_when_odtoff; 838 phy_lp4_drv_pd_en = 839 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info); 840 } else { 841 drv_info = ddr_info->drv_when_odten; 842 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info); 843 sr_info = ddr_info->sr_when_odten; 844 phy_lp4_drv_pd_en = 845 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info); 846 } 847 phy_dq_drv_ohm = 848 DRV_INFO_PHY_DQ_DRV(drv_info); 849 phy_clk_drv_ohm = 850 DRV_INFO_PHY_CLK_DRV(drv_info); 851 phy_ca_drv_ohm = 852 DRV_INFO_PHY_CA_DRV(drv_info); 853 854 sr_dq = DQ_SR_INFO(sr_info); 855 sr_clk = CLK_SR_INFO(sr_info); 856 857 /* phy odt en freq control dram drv and phy odt */ 858 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) { 859 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff); 860 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info); 861 phy_odt_ohm = 0; 862 phy_odt_up_en = 0; 863 phy_odt_dn_en = 0; 864 } else { 865 dram_drv_ohm = 866 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten); 867 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info); 868 phy_odt_up_en = 869 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 870 phy_odt_dn_en = 871 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 872 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info); 873 } 874 875 if (dramtype == LPDDR4) { 876 if (phy_odt_ohm) { 877 phy_odt_up_en = 0; 878 phy_odt_dn_en = 1; 879 } 880 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 881 dram_caodt_ohm = 0; 882 else 883 dram_caodt_ohm = 884 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info); 885 } 886 887 if (dramtype == DDR3) { 888 p_drv = d3_phy_drv_2_ohm; 889 p_odt = d3_phy_odt_2_ohm; 890 } else if (dramtype == LPDDR4) { 891 p_drv = lp4_phy_drv_2_ohm; 892 p_odt = lp4_phy_odt_2_ohm; 893 } else { 894 p_drv = d4lp3_phy_drv_2_ohm; 895 p_odt = d4lp3_phy_odt_2_ohm; 896 } 897 898 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 899 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) { 900 phy_dq_drv = **(p_drv + i); 901 break; 902 } 903 if (i == 0) 904 break; 905 } 906 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 907 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) { 908 phy_clk_drv = **(p_drv + i); 909 break; 910 } 911 if (i == 0) 912 break; 913 } 914 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 915 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) { 916 phy_ca_drv = **(p_drv + i); 917 break; 918 } 919 if (i == 0) 920 break; 921 } 922 if (!phy_odt_ohm) 923 phy_odt = 0; 924 else 925 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) { 926 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) { 927 phy_odt = **(p_odt + i); 928 break; 929 } 930 if (i == 0) 931 break; 932 } 933 934 if (dramtype != LPDDR4) { 935 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en)) 936 vref_inner = 0x80; 937 else if (phy_odt_up_en) 938 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 / 939 (dram_drv_ohm + phy_odt_ohm); 940 else 941 vref_inner = phy_odt_ohm * 128 / 942 (phy_odt_ohm + dram_drv_ohm); 943 944 if (dramtype != DDR3 && dram_odt_ohm) 945 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 / 946 (phy_dq_drv_ohm + dram_odt_ohm); 947 else 948 vref_out = 0x80; 949 } else { 950 /* for lp4 */ 951 if (phy_odt_ohm) 952 vref_inner = 953 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) * 954 256) / 1000; 955 else 956 vref_inner = 957 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) * 958 256) / 1000; 959 960 vref_out = 0x80; 961 } 962 963 /* default ZQCALIB bypass mode */ 964 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv); 965 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv); 966 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv); 967 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv); 968 /* clk / cmd slew rate */ 969 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk); 970 971 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1; 972 if (phy_odt_up_en) 973 phy_odt_up = phy_odt; 974 if (phy_odt_dn_en) 975 phy_odt_dn = phy_odt; 976 977 for (i = 0; i < 4; i++) { 978 j = 0x110 + i * 0x10; 979 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up); 980 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn); 981 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv); 982 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv); 983 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10)); 984 985 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), 986 1 << 3, phy_lp4_drv_pd_en << 3); 987 /* dq slew rate */ 988 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10), 989 0x1f, sr_dq); 990 } 991 992 /* reg_rx_vref_value_update */ 993 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 994 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 995 996 /* RAM VREF */ 997 writel(vref_out, PHY_REG(phy_base, 0x105)); 998 if (dramtype == LPDDR3) 999 udelay(100); 1000 1001 if (dramtype == LPDDR4) 1002 set_lp4_vref(dram, lp4_info, freq, dst_fsp); 1003 1004 if (dramtype == DDR3 || dramtype == DDR4) { 1005 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1006 DDR_PCTL2_INIT3); 1007 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK; 1008 } else { 1009 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1010 DDR_PCTL2_INIT4); 1011 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK; 1012 } 1013 1014 if (dramtype == DDR3) { 1015 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK); 1016 if (dram_drv_ohm == 34) 1017 mr1_mr3 |= DDR3_DS_34; 1018 1019 if (dram_odt_ohm == 0) 1020 mr1_mr3 |= DDR3_RTT_NOM_DIS; 1021 else if (dram_odt_ohm <= 40) 1022 mr1_mr3 |= DDR3_RTT_NOM_40; 1023 else if (dram_odt_ohm <= 60) 1024 mr1_mr3 |= DDR3_RTT_NOM_60; 1025 else 1026 mr1_mr3 |= DDR3_RTT_NOM_120; 1027 1028 } else if (dramtype == DDR4) { 1029 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK); 1030 if (dram_drv_ohm == 48) 1031 mr1_mr3 |= DDR4_DS_48; 1032 1033 if (dram_odt_ohm == 0) 1034 mr1_mr3 |= DDR4_RTT_NOM_DIS; 1035 else if (dram_odt_ohm <= 34) 1036 mr1_mr3 |= DDR4_RTT_NOM_34; 1037 else if (dram_odt_ohm <= 40) 1038 mr1_mr3 |= DDR4_RTT_NOM_40; 1039 else if (dram_odt_ohm <= 48) 1040 mr1_mr3 |= DDR4_RTT_NOM_48; 1041 else if (dram_odt_ohm <= 60) 1042 mr1_mr3 |= DDR4_RTT_NOM_60; 1043 else 1044 mr1_mr3 |= DDR4_RTT_NOM_120; 1045 1046 } else if (dramtype == LPDDR3) { 1047 if (dram_drv_ohm <= 34) 1048 mr1_mr3 |= LPDDR3_DS_34; 1049 else if (dram_drv_ohm <= 40) 1050 mr1_mr3 |= LPDDR3_DS_40; 1051 else if (dram_drv_ohm <= 48) 1052 mr1_mr3 |= LPDDR3_DS_48; 1053 else if (dram_drv_ohm <= 60) 1054 mr1_mr3 |= LPDDR3_DS_60; 1055 else if (dram_drv_ohm <= 80) 1056 mr1_mr3 |= LPDDR3_DS_80; 1057 1058 if (dram_odt_ohm == 0) 1059 lp3_odt_value = LPDDR3_ODT_DIS; 1060 else if (dram_odt_ohm <= 60) 1061 lp3_odt_value = LPDDR3_ODT_60; 1062 else if (dram_odt_ohm <= 120) 1063 lp3_odt_value = LPDDR3_ODT_120; 1064 else 1065 lp3_odt_value = LPDDR3_ODT_240; 1066 } else {/* for lpddr4 */ 1067 /* MR3 for lp4 PU-CAL and PDDS */ 1068 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK); 1069 mr1_mr3 |= lp4_pu_cal; 1070 1071 tmp = lp4_odt_calc(dram_drv_ohm); 1072 if (!tmp) 1073 tmp = LPDDR4_PDDS_240; 1074 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT); 1075 1076 /* MR11 for lp4 ca odt, dq odt set */ 1077 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1078 DDR_PCTL2_INIT6); 1079 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK; 1080 1081 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK); 1082 1083 tmp = lp4_odt_calc(dram_odt_ohm); 1084 mr11 |= (tmp << LPDDR4_DQODT_SHIFT); 1085 1086 tmp = lp4_odt_calc(dram_caodt_ohm); 1087 mr11 |= (tmp << LPDDR4_CAODT_SHIFT); 1088 sw_set_req(dram); 1089 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1090 DDR_PCTL2_INIT6, 1091 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT, 1092 mr11 << PCTL2_LPDDR4_MR11_SHIFT); 1093 sw_set_ack(dram); 1094 1095 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */ 1096 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1097 DDR_PCTL2_INIT7); 1098 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK; 1099 mr22 &= ~LPDDR4_SOC_ODT_MASK; 1100 1101 tmp = lp4_odt_calc(phy_odt_ohm); 1102 mr22 |= tmp; 1103 mr22 = mr22 | 1104 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) << 1105 LPDDR4_ODTE_CK_SHIFT) | 1106 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) << 1107 LPDDR4_ODTE_CS_SHIFT) | 1108 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) << 1109 LPDDR4_ODTD_CA_SHIFT); 1110 1111 sw_set_req(dram); 1112 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1113 DDR_PCTL2_INIT7, 1114 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT, 1115 mr22 << PCTL2_LPDDR4_MR22_SHIFT); 1116 sw_set_ack(dram); 1117 } 1118 1119 if (dramtype == DDR4 || dramtype == DDR3) { 1120 sw_set_req(dram); 1121 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1122 DDR_PCTL2_INIT3, 1123 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT, 1124 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT); 1125 sw_set_ack(dram); 1126 } else { 1127 sw_set_req(dram); 1128 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1129 DDR_PCTL2_INIT4, 1130 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT, 1131 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT); 1132 sw_set_ack(dram); 1133 } 1134 } 1135 1136 static int sdram_cmd_dq_path_remap(struct dram_info *dram, 1137 struct rv1126_sdram_params *sdram_params) 1138 { 1139 void __iomem *phy_base = dram->phy; 1140 u32 dramtype = sdram_params->base.dramtype; 1141 struct sdram_head_info_index_v2 *index = 1142 (struct sdram_head_info_index_v2 *)common_info; 1143 struct dq_map_info *map_info; 1144 1145 map_info = (struct dq_map_info *)((void *)common_info + 1146 index->dq_map_index.offset * 4); 1147 1148 if (dramtype <= LPDDR4) 1149 writel((map_info->byte_map[dramtype / 4] >> 1150 ((dramtype % 4) * 8)) & 0xff, 1151 PHY_REG(phy_base, 0x4f)); 1152 1153 return 0; 1154 } 1155 1156 static void phy_cfg(struct dram_info *dram, 1157 struct rv1126_sdram_params *sdram_params) 1158 { 1159 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 1160 void __iomem *phy_base = dram->phy; 1161 u32 i, dq_map, tmp; 1162 u32 byte1 = 0, byte0 = 0; 1163 1164 sdram_cmd_dq_path_remap(dram, sdram_params); 1165 1166 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0); 1167 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) { 1168 writel(sdram_params->phy_regs.phy[i][1], 1169 phy_base + sdram_params->phy_regs.phy[i][0]); 1170 } 1171 1172 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5)); 1173 dq_map = readl(PHY_REG(phy_base, 0x4f)); 1174 for (i = 0; i < 4; i++) { 1175 if (((dq_map >> (i * 2)) & 0x3) == 0) 1176 byte0 = i; 1177 if (((dq_map >> (i * 2)) & 0x3) == 1) 1178 byte1 = i; 1179 } 1180 1181 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK); 1182 if (cap_info->bw == 2) 1183 tmp |= 0xf; 1184 else if (cap_info->bw == 1) 1185 tmp |= ((1 << byte0) | (1 << byte1)); 1186 else 1187 tmp |= (1 << byte0); 1188 1189 writel(tmp, PHY_REG(phy_base, 0xf)); 1190 1191 /* lpddr4 odt control by phy, enable cs0 odt */ 1192 if (sdram_params->base.dramtype == LPDDR4) 1193 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4, 1194 (1 << 6) | (1 << 4)); 1195 /* for ca training ca vref choose range1 */ 1196 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6)); 1197 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6)); 1198 /* for wr training PHY_0x7c[5], choose range0 */ 1199 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5)); 1200 } 1201 1202 static int update_refresh_reg(struct dram_info *dram) 1203 { 1204 void __iomem *pctl_base = dram->pctl; 1205 u32 ret; 1206 1207 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1); 1208 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3); 1209 1210 return 0; 1211 } 1212 1213 /* 1214 * rank = 1: cs0 1215 * rank = 2: cs1 1216 */ 1217 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype) 1218 { 1219 u32 ret; 1220 u32 i, temp; 1221 u32 dqmap; 1222 1223 void __iomem *pctl_base = dram->pctl; 1224 struct sdram_head_info_index_v2 *index = 1225 (struct sdram_head_info_index_v2 *)common_info; 1226 struct dq_map_info *map_info; 1227 1228 map_info = (struct dq_map_info *)((void *)common_info + 1229 index->dq_map_index.offset * 4); 1230 1231 if (dramtype == LPDDR2) 1232 dqmap = map_info->lp2_dq0_7_map; 1233 else 1234 dqmap = map_info->lp3_dq0_7_map; 1235 1236 pctl_read_mr(pctl_base, rank, mr_num); 1237 1238 ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff); 1239 1240 if (dramtype != LPDDR4) { 1241 temp = 0; 1242 for (i = 0; i < 8; i++) { 1243 temp = temp | (((ret >> i) & 0x1) << 1244 ((dqmap >> (i * 4)) & 0xf)); 1245 } 1246 } else { 1247 ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff); 1248 } 1249 1250 return ret; 1251 } 1252 1253 /* before call this function autorefresh should be disabled */ 1254 void send_a_refresh(struct dram_info *dram) 1255 { 1256 void __iomem *pctl_base = dram->pctl; 1257 1258 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3) 1259 continue; 1260 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD); 1261 } 1262 1263 void record_dq_prebit(struct dram_info *dram) 1264 { 1265 u32 group, i, tmp; 1266 void __iomem *phy_base = dram->phy; 1267 1268 for (group = 0; group < 4; group++) { 1269 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) { 1270 /* l_loop_invdelaysel */ 1271 writel(dq_sel[i][0], PHY_REG(phy_base, 1272 grp_addr[group] + 0x2c)); 1273 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e)); 1274 writel(tmp, PHY_REG(phy_base, 1275 grp_addr[group] + dq_sel[i][1])); 1276 1277 /* r_loop_invdelaysel */ 1278 writel(dq_sel[i][0], PHY_REG(phy_base, 1279 grp_addr[group] + 0x2d)); 1280 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f)); 1281 writel(tmp, PHY_REG(phy_base, 1282 grp_addr[group] + dq_sel[i][2])); 1283 } 1284 } 1285 } 1286 1287 static void update_dq_rx_prebit(struct dram_info *dram) 1288 { 1289 void __iomem *phy_base = dram->phy; 1290 1291 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4), 1292 BIT(4)); 1293 udelay(1); 1294 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4)); 1295 } 1296 1297 static void update_dq_tx_prebit(struct dram_info *dram) 1298 { 1299 void __iomem *phy_base = dram->phy; 1300 1301 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1302 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3)); 1303 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1304 udelay(1); 1305 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1306 } 1307 1308 static void update_ca_prebit(struct dram_info *dram) 1309 { 1310 void __iomem *phy_base = dram->phy; 1311 1312 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2)); 1313 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1314 udelay(1); 1315 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1316 } 1317 1318 /* 1319 * dir: 0: de-skew = delta_* 1320 * 1: de-skew = reg val - delta_* 1321 * delta_dir: value for differential signal: clk/ 1322 * delta_sig: value for single signal: ca/cmd 1323 */ 1324 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif, 1325 int delta_sig, u32 cs, u32 dramtype) 1326 { 1327 void __iomem *phy_base = dram->phy; 1328 u32 i, cs_en, tmp; 1329 1330 if (cs == 0) 1331 cs_en = 1; 1332 else if (cs == 2) 1333 cs_en = 2; 1334 else 1335 cs_en = 3; 1336 1337 for (i = 0; i < 0x20; i++) { 1338 if (dir == DESKEW_MDF_ABS_VAL) 1339 tmp = delta_sig; 1340 else 1341 tmp = readl(PHY_REG(phy_base, 0x150 + i)) + 1342 delta_sig; 1343 writel(tmp, PHY_REG(phy_base, 0x150 + i)); 1344 } 1345 1346 if (dir == DESKEW_MDF_ABS_VAL) 1347 tmp = delta_dif; 1348 else 1349 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) - 1350 delta_sig + delta_dif; 1351 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17)); 1352 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18)); 1353 if (dramtype == LPDDR4) { 1354 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4)); 1355 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa)); 1356 1357 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6); 1358 update_ca_prebit(dram); 1359 } 1360 } 1361 1362 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank) 1363 { 1364 u32 i, j, offset = 0; 1365 u32 min = 0x3f; 1366 void __iomem *phy_base = dram->phy; 1367 u32 byte_en; 1368 1369 if (signal == SKEW_TX_SIGNAL) 1370 offset = 8; 1371 1372 if (signal == SKEW_CA_SIGNAL) { 1373 for (i = 0; i < 0x20; i++) 1374 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i))); 1375 } else { 1376 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1377 for (j = offset; j < offset + rank * 4; j++) { 1378 if (!((byte_en >> (j % 4)) & 1)) 1379 continue; 1380 for (i = 0; i < 11; i++) 1381 min = MIN(min, 1382 readl(PHY_REG(phy_base, 1383 dqs_dq_skew_adr[j] + 1384 i))); 1385 } 1386 } 1387 1388 return min; 1389 } 1390 1391 static u32 low_power_update(struct dram_info *dram, u32 en) 1392 { 1393 void __iomem *pctl_base = dram->pctl; 1394 u32 lp_stat = 0; 1395 1396 if (en) { 1397 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf); 1398 } else { 1399 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf; 1400 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf); 1401 } 1402 1403 return lp_stat; 1404 } 1405 1406 /* 1407 * signal: 1408 * dir: 0: de-skew = delta_* 1409 * 1: de-skew = reg val - delta_* 1410 * delta_dir: value for differential signal: dqs 1411 * delta_sig: value for single signal: dq/dm 1412 */ 1413 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir, 1414 int delta_dif, int delta_sig, u32 rank) 1415 { 1416 void __iomem *phy_base = dram->phy; 1417 u32 i, j, tmp, offset; 1418 u32 byte_en; 1419 1420 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1421 1422 if (signal == SKEW_RX_SIGNAL) 1423 offset = 0; 1424 else 1425 offset = 8; 1426 1427 for (j = offset; j < (offset + rank * 4); j++) { 1428 if (!((byte_en >> (j % 4)) & 1)) 1429 continue; 1430 for (i = 0; i < 0x9; i++) { 1431 if (dir == DESKEW_MDF_ABS_VAL) 1432 tmp = delta_sig; 1433 else 1434 tmp = delta_sig + readl(PHY_REG(phy_base, 1435 dqs_dq_skew_adr[j] + 1436 i)); 1437 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i)); 1438 } 1439 if (dir == DESKEW_MDF_ABS_VAL) 1440 tmp = delta_dif; 1441 else 1442 tmp = delta_dif + readl(PHY_REG(phy_base, 1443 dqs_dq_skew_adr[j] + 9)); 1444 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9)); 1445 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa)); 1446 } 1447 if (signal == SKEW_RX_SIGNAL) 1448 update_dq_rx_prebit(dram); 1449 else 1450 update_dq_tx_prebit(dram); 1451 } 1452 1453 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype) 1454 { 1455 void __iomem *phy_base = dram->phy; 1456 u32 ret; 1457 u32 dis_auto_zq = 0; 1458 u32 odt_val_up, odt_val_dn; 1459 u32 i, j; 1460 1461 odt_val_dn = readl(PHY_REG(phy_base, 0x110)); 1462 odt_val_up = readl(PHY_REG(phy_base, 0x111)); 1463 1464 if (dramtype != LPDDR4) { 1465 for (i = 0; i < 4; i++) { 1466 j = 0x110 + i * 0x10; 1467 writel(PHY_DDR4_LPDDR3_RTT_247ohm, 1468 PHY_REG(phy_base, j)); 1469 writel(PHY_DDR4_LPDDR3_RTT_DISABLE, 1470 PHY_REG(phy_base, j + 0x1)); 1471 } 1472 } 1473 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1474 /* use normal read mode for data training */ 1475 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1476 1477 if (dramtype == DDR4) 1478 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1479 1480 /* choose training cs */ 1481 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs)); 1482 /* enable gate training */ 1483 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1); 1484 udelay(50); 1485 ret = readl(PHY_REG(phy_base, 0x91)); 1486 /* disable gate training */ 1487 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0); 1488 clrbits_le32(PHY_REG(phy_base, 2), 0x30); 1489 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1490 1491 if (ret & 0x20) 1492 ret = -1; 1493 else 1494 ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf); 1495 1496 if (dramtype != LPDDR4) { 1497 for (i = 0; i < 4; i++) { 1498 j = 0x110 + i * 0x10; 1499 writel(odt_val_dn, PHY_REG(phy_base, j)); 1500 writel(odt_val_up, PHY_REG(phy_base, j + 0x1)); 1501 } 1502 } 1503 return ret; 1504 } 1505 1506 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype, 1507 u32 rank) 1508 { 1509 void __iomem *pctl_base = dram->pctl; 1510 void __iomem *phy_base = dram->phy; 1511 u32 dis_auto_zq = 0; 1512 u32 tmp; 1513 u32 cur_fsp; 1514 u32 timeout_us = 1000; 1515 1516 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1517 1518 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1519 1520 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1521 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) & 1522 0xffff; 1523 writel(tmp & 0xff, PHY_REG(phy_base, 0x3)); 1524 1525 /* disable another cs's output */ 1526 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1527 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12), 1528 dramtype); 1529 if (dramtype == DDR3 || dramtype == DDR4) 1530 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1531 else 1532 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1533 1534 /* choose cs */ 1535 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1536 ((0x2 >> cs) << 6) | (0 << 2)); 1537 /* enable write leveling */ 1538 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1539 ((0x2 >> cs) << 6) | (1 << 2)); 1540 1541 while (1) { 1542 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) == 1543 (readl(PHY_REG(phy_base, 0xf)) & 0xf)) 1544 break; 1545 1546 udelay(1); 1547 if (timeout_us-- == 0) { 1548 printascii("error: write leveling timeout\n"); 1549 while (1) 1550 ; 1551 } 1552 } 1553 1554 /* disable write leveling */ 1555 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1556 ((0x2 >> cs) << 6) | (0 << 2)); 1557 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6); 1558 1559 /* enable another cs's output */ 1560 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1561 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12), 1562 dramtype); 1563 1564 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1565 1566 return 0; 1567 } 1568 1569 char pattern[32] = { 1570 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa, 1571 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55, 1572 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55, 1573 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa 1574 }; 1575 1576 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype, 1577 u32 mhz) 1578 { 1579 void __iomem *pctl_base = dram->pctl; 1580 void __iomem *phy_base = dram->phy; 1581 u32 trefi_1x, trfc_1x; 1582 u32 dis_auto_zq = 0; 1583 u32 timeout_us = 1000; 1584 u32 dqs_default; 1585 u32 cur_fsp; 1586 u32 vref_inner; 1587 u32 i; 1588 struct sdram_head_info_index_v2 *index = 1589 (struct sdram_head_info_index_v2 *)common_info; 1590 struct dq_map_info *map_info; 1591 1592 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff; 1593 if (dramtype == DDR3 && vref_inner == 0x80) { 1594 for (i = 0; i < 4; i++) 1595 writel(vref_inner - 0xa, 1596 PHY_REG(phy_base, 0x118 + i * 0x10)); 1597 1598 /* reg_rx_vref_value_update */ 1599 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1600 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1601 } 1602 1603 map_info = (struct dq_map_info *)((void *)common_info + 1604 index->dq_map_index.offset * 4); 1605 /* only 1cs a time, 0:cs0 1 cs1 */ 1606 if (cs > 1) 1607 return -1; 1608 1609 dqs_default = 0xf; 1610 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1611 1612 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1613 /* config refresh timing */ 1614 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1615 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1616 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1617 DDR_PCTL2_RFSHTMG) & 0x3ff; 1618 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1619 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1620 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1621 /* reg_phy_trfc */ 1622 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1623 /* reg_max_refi_cnt */ 1624 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1625 1626 /* choose training cs */ 1627 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6); 1628 1629 /* set dq map for ddr4 */ 1630 if (dramtype == DDR4) { 1631 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7)); 1632 for (i = 0; i < 4; i++) { 1633 writel((map_info->ddr4_dq_map[cs * 2] >> 1634 ((i % 4) * 8)) & 0xff, 1635 PHY_REG(phy_base, 0x238 + i)); 1636 writel((map_info->ddr4_dq_map[cs * 2 + 1] >> 1637 ((i % 4) * 8)) & 0xff, 1638 PHY_REG(phy_base, 0x2b8 + i)); 1639 } 1640 } 1641 1642 /* cha_l reg_l_rd_train_dqs_default[5:0] */ 1643 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default); 1644 /* cha_h reg_h_rd_train_dqs_default[5:0] */ 1645 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default); 1646 /* chb_l reg_l_rd_train_dqs_default[5:0] */ 1647 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default); 1648 /* chb_h reg_h_rd_train_dqs_default[5:0] */ 1649 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default); 1650 1651 /* Choose the read train auto mode */ 1652 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1); 1653 /* Enable the auto train of the read train */ 1654 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3); 1655 1656 /* Wait the train done. */ 1657 while (1) { 1658 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1) 1659 break; 1660 1661 udelay(1); 1662 if (timeout_us-- == 0) { 1663 printascii("error: read training timeout\n"); 1664 return -1; 1665 } 1666 } 1667 1668 /* Check the read train state */ 1669 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) || 1670 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) { 1671 printascii("error: read training error\n"); 1672 return -1; 1673 } 1674 1675 /* Exit the Read Training by setting */ 1676 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1)); 1677 1678 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1679 1680 if (dramtype == DDR3 && vref_inner == 0x80) { 1681 for (i = 0; i < 4; i++) 1682 writel(vref_inner, 1683 PHY_REG(phy_base, 0x118 + i * 0x10)); 1684 1685 /* reg_rx_vref_value_update */ 1686 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1687 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1688 } 1689 1690 return 0; 1691 } 1692 1693 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype, 1694 u32 mhz, u32 dst_fsp) 1695 { 1696 void __iomem *pctl_base = dram->pctl; 1697 void __iomem *phy_base = dram->phy; 1698 u32 trefi_1x, trfc_1x; 1699 u32 dis_auto_zq = 0; 1700 u32 timeout_us = 1000; 1701 u32 cur_fsp; 1702 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0; 1703 1704 if (dramtype == LPDDR3 && mhz <= 400) { 1705 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3; 1706 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3; 1707 cl = readl(PHY_REG(phy_base, offset)); 1708 cwl = readl(PHY_REG(phy_base, offset + 2)); 1709 1710 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8); 1711 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4); 1712 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype); 1713 } 1714 1715 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1716 1717 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */ 1718 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0); 1719 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */ 1720 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2); 1721 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */ 1722 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0); 1723 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */ 1724 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0); 1725 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */ 1726 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0); 1727 1728 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */ 1729 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3)); 1730 1731 /* config refresh timing */ 1732 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1733 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1734 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1735 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1736 DDR_PCTL2_RFSHTMG) & 0x3ff; 1737 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1738 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1739 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1740 /* reg_phy_trfc */ 1741 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1742 /* reg_max_refi_cnt */ 1743 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1744 1745 /* choose training cs */ 1746 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6); 1747 1748 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */ 1749 /* 0: Use the write-leveling value. */ 1750 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */ 1751 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4)); 1752 1753 /* PHY_0x7a [0] reg_dq_wr_train_auto */ 1754 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1755 1756 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1757 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1758 1759 send_a_refresh(dram); 1760 1761 while (1) { 1762 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1) 1763 break; 1764 1765 udelay(1); 1766 if (timeout_us-- == 0) { 1767 printascii("error: write training timeout\n"); 1768 while (1) 1769 ; 1770 } 1771 } 1772 1773 /* Check the write train state */ 1774 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) { 1775 printascii("error: write training error\n"); 1776 return -1; 1777 } 1778 1779 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1780 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1781 1782 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1783 1784 /* save LPDDR4 write vref to fsp_param for dfs */ 1785 if (dramtype == LPDDR4) { 1786 fsp_param[dst_fsp].vref_dq[cs] = 1787 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) + 1788 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2; 1789 /* add range info */ 1790 fsp_param[dst_fsp].vref_dq[cs] |= 1791 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1); 1792 } 1793 1794 if (dramtype == LPDDR3 && mhz <= 400) { 1795 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl); 1796 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl); 1797 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1798 DDR_PCTL2_INIT3); 1799 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 1800 dramtype); 1801 } 1802 1803 return 0; 1804 } 1805 1806 static int data_training(struct dram_info *dram, u32 cs, 1807 struct rv1126_sdram_params *sdram_params, u32 dst_fsp, 1808 u32 training_flag) 1809 { 1810 u32 ret = 0; 1811 1812 if (training_flag == FULL_TRAINING) 1813 training_flag = READ_GATE_TRAINING | WRITE_LEVELING | 1814 WRITE_TRAINING | READ_TRAINING; 1815 1816 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) { 1817 ret = data_training_wl(dram, cs, 1818 sdram_params->base.dramtype, 1819 sdram_params->ch.cap_info.rank); 1820 if (ret != 0) 1821 goto out; 1822 } 1823 1824 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) { 1825 ret = data_training_rg(dram, cs, 1826 sdram_params->base.dramtype); 1827 if (ret != 0) 1828 goto out; 1829 } 1830 1831 if ((training_flag & READ_TRAINING) == READ_TRAINING) { 1832 ret = data_training_rd(dram, cs, 1833 sdram_params->base.dramtype, 1834 sdram_params->base.ddr_freq); 1835 if (ret != 0) 1836 goto out; 1837 } 1838 1839 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) { 1840 ret = data_training_wr(dram, cs, 1841 sdram_params->base.dramtype, 1842 sdram_params->base.ddr_freq, dst_fsp); 1843 if (ret != 0) 1844 goto out; 1845 } 1846 1847 out: 1848 return ret; 1849 } 1850 1851 static int get_wrlvl_val(struct dram_info *dram, 1852 struct rv1126_sdram_params *sdram_params) 1853 { 1854 u32 i, j, clk_skew; 1855 void __iomem *phy_base = dram->phy; 1856 u32 lp_stat; 1857 int ret; 1858 1859 lp_stat = low_power_update(dram, 0); 1860 1861 clk_skew = readl(PHY_REG(phy_base, 0x150 + 0x17)); 1862 1863 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING); 1864 if (sdram_params->ch.cap_info.rank == 2) 1865 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING); 1866 1867 for (j = 0; j < 2; j++) 1868 for (i = 0; i < 4; i++) 1869 wrlvl_result[j][i] = 1870 readl(PHY_REG(phy_base, 1871 wrlvl_result_offset[j][i])) - 1872 clk_skew; 1873 1874 low_power_update(dram, lp_stat); 1875 1876 return ret; 1877 } 1878 1879 static int high_freq_training(struct dram_info *dram, 1880 struct rv1126_sdram_params *sdram_params, 1881 u32 fsp) 1882 { 1883 u32 i, j; 1884 void __iomem *phy_base = dram->phy; 1885 u32 dramtype = sdram_params->base.dramtype; 1886 int min_val; 1887 u32 dqs_skew, clk_skew, ca_skew; 1888 int ret; 1889 1890 dqs_skew = 0; 1891 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) 1892 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) 1893 dqs_skew += wrlvl_result[j][i]; 1894 dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank * 1895 ARRAY_SIZE(wrlvl_result[0])); 1896 1897 clk_skew = 0x20 - dqs_skew; 1898 dqs_skew = 0x20; 1899 1900 if (dramtype == LPDDR4) { 1901 clk_skew = 0; 1902 ca_skew = 0; 1903 } else if (dramtype == LPDDR3) { 1904 ca_skew = clk_skew - 4; 1905 } else { 1906 ca_skew = clk_skew; 1907 } 1908 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3, 1909 dramtype); 1910 1911 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233)); 1912 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237)); 1913 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 1914 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 1915 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING | 1916 READ_TRAINING | WRITE_TRAINING); 1917 if (sdram_params->ch.cap_info.rank == 2) { 1918 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233)); 1919 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237)); 1920 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 1921 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 1922 ret |= data_training(dram, 1, sdram_params, fsp, 1923 READ_GATE_TRAINING | READ_TRAINING | 1924 WRITE_TRAINING); 1925 } 1926 if (ret) 1927 goto out; 1928 1929 record_dq_prebit(dram); 1930 1931 min_val = get_min_value(dram, SKEW_RX_SIGNAL, 1932 sdram_params->ch.cap_info.rank) * -1; 1933 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL, 1934 min_val, min_val, sdram_params->ch.cap_info.rank); 1935 1936 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL, 1937 sdram_params->ch.cap_info.rank), 1938 get_min_value(dram, SKEW_CA_SIGNAL, 1939 sdram_params->ch.cap_info.rank)) * -1; 1940 1941 /* clk = 0, rx all skew -7, tx - min_value */ 1942 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3, 1943 dramtype); 1944 1945 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL, 1946 min_val, min_val, sdram_params->ch.cap_info.rank); 1947 1948 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING); 1949 if (sdram_params->ch.cap_info.rank == 2) 1950 ret |= data_training(dram, 1, sdram_params, 0, 1951 READ_GATE_TRAINING); 1952 out: 1953 return ret; 1954 } 1955 1956 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig) 1957 { 1958 writel(ddrconfig, &dram->msch->deviceconf); 1959 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0); 1960 } 1961 1962 static void update_noc_timing(struct dram_info *dram, 1963 struct rv1126_sdram_params *sdram_params) 1964 { 1965 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32, 1966 &dram->msch->ddrtiminga0); 1967 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32, 1968 &dram->msch->ddrtimingb0); 1969 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32, 1970 &dram->msch->ddrtimingc0); 1971 writel(sdram_params->ch.noc_timings.devtodev0.d32, 1972 &dram->msch->devtodev0); 1973 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode); 1974 writel(sdram_params->ch.noc_timings.ddr4timing.d32, 1975 &dram->msch->ddr4timing); 1976 } 1977 1978 static void dram_all_config(struct dram_info *dram, 1979 struct rv1126_sdram_params *sdram_params) 1980 { 1981 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 1982 u32 dram_type = sdram_params->base.dramtype; 1983 void __iomem *pctl_base = dram->pctl; 1984 u32 sys_reg2 = 0; 1985 u32 sys_reg3 = 0; 1986 u64 cs_cap[2]; 1987 u32 cs_pst; 1988 1989 set_ddrconfig(dram, cap_info->ddrconfig); 1990 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2, 1991 &sys_reg3, 0); 1992 writel(sys_reg2, &dram->pmugrf->os_reg[2]); 1993 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 1994 1995 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 1996 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 1997 1998 if (cap_info->rank == 2) { 1999 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2000 6 + 2; 2001 if (cs_pst > 28) 2002 cs_cap[0] = 1 << cs_pst; 2003 } 2004 2005 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) | 2006 (((cs_cap[0] >> 20) / 64) & 0xff), 2007 &dram->msch->devicesize); 2008 update_noc_timing(dram, sdram_params); 2009 } 2010 2011 static void enable_low_power(struct dram_info *dram, 2012 struct rv1126_sdram_params *sdram_params) 2013 { 2014 void __iomem *pctl_base = dram->pctl; 2015 u32 grf_lp_con; 2016 2017 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]); 2018 2019 if (sdram_params->base.dramtype == DDR4) 2020 grf_lp_con = (0x7 << 16) | (1 << 1); 2021 else if (sdram_params->base.dramtype == DDR3) 2022 grf_lp_con = (0x7 << 16) | (1 << 0); 2023 else 2024 grf_lp_con = (0x7 << 16) | (1 << 2); 2025 2026 /* en lpckdis_en */ 2027 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9); 2028 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con); 2029 2030 /* enable sr, pd */ 2031 if (dram->pd_idle == 0) 2032 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2033 else 2034 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2035 if (dram->sr_idle == 0) 2036 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2037 else 2038 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2039 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3)); 2040 } 2041 2042 static void ddr_set_atags(struct dram_info *dram, 2043 struct rv1126_sdram_params *sdram_params) 2044 { 2045 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2046 u32 dram_type = sdram_params->base.dramtype; 2047 void __iomem *pctl_base = dram->pctl; 2048 struct tag_serial t_serial; 2049 struct tag_ddr_mem t_ddrmem; 2050 struct tag_soc_info t_socinfo; 2051 u64 cs_cap[2]; 2052 u32 cs_pst = 0; 2053 2054 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2055 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2056 2057 memset(&t_serial, 0, sizeof(struct tag_serial)); 2058 2059 t_serial.version = 0; 2060 t_serial.enable = 1; 2061 t_serial.addr = CONFIG_DEBUG_UART_BASE; 2062 t_serial.baudrate = CONFIG_BAUDRATE; 2063 t_serial.m_mode = SERIAL_M_MODE_M0; 2064 t_serial.id = 2; 2065 2066 atags_destroy(); 2067 atags_set_tag(ATAG_SERIAL, &t_serial); 2068 2069 memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem)); 2070 if (cap_info->row_3_4) { 2071 cs_cap[0] = cs_cap[0] * 3 / 4; 2072 cs_cap[1] = cs_cap[1] * 3 / 4; 2073 } 2074 t_ddrmem.version = 0; 2075 t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE; 2076 if (cs_cap[1]) { 2077 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2078 6 + 2; 2079 } 2080 2081 if (cs_cap[1] && cs_pst > 27) { 2082 t_ddrmem.count = 2; 2083 t_ddrmem.bank[1] = 1 << cs_pst; 2084 t_ddrmem.bank[2] = cs_cap[0]; 2085 t_ddrmem.bank[3] = cs_cap[1]; 2086 } else { 2087 t_ddrmem.count = 1; 2088 t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1]; 2089 } 2090 2091 atags_set_tag(ATAG_DDR_MEM, &t_ddrmem); 2092 2093 memset(&t_socinfo, 0, sizeof(struct tag_soc_info)); 2094 t_socinfo.version = 0; 2095 t_socinfo.name = 0x1126; 2096 } 2097 2098 static void print_ddr_info(struct rv1126_sdram_params *sdram_params) 2099 { 2100 u32 split; 2101 2102 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2103 (1 << SPLIT_BYPASS_OFFSET)) != 0) 2104 split = 0; 2105 else 2106 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2107 SPLIT_SIZE_MASK; 2108 2109 sdram_print_ddr_info(&sdram_params->ch.cap_info, 2110 &sdram_params->base, split); 2111 } 2112 2113 static int sdram_init_(struct dram_info *dram, 2114 struct rv1126_sdram_params *sdram_params, u32 post_init) 2115 { 2116 void __iomem *pctl_base = dram->pctl; 2117 void __iomem *phy_base = dram->phy; 2118 u32 ddr4_vref; 2119 u32 mr_tmp; 2120 2121 rkclk_configure_ddr(dram, sdram_params); 2122 2123 rkclk_ddr_reset(dram, 1, 1, 1, 1); 2124 udelay(10); 2125 2126 rkclk_ddr_reset(dram, 1, 1, 1, 0); 2127 phy_cfg(dram, sdram_params); 2128 2129 rkclk_ddr_reset(dram, 1, 1, 0, 0); 2130 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1); 2131 2132 rkclk_ddr_reset(dram, 1, 0, 0, 0); 2133 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, 2134 dram->sr_idle, dram->pd_idle); 2135 2136 /* set frequency_mode */ 2137 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2138 /* set target_frequency to Frequency 0 */ 2139 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0); 2140 2141 set_ds_odt(dram, sdram_params, 0); 2142 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params); 2143 set_ctl_address_map(dram, sdram_params); 2144 2145 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4)); 2146 2147 rkclk_ddr_reset(dram, 0, 0, 0, 0); 2148 2149 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) 2150 continue; 2151 2152 if (sdram_params->base.dramtype == LPDDR3) { 2153 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3); 2154 } else if (sdram_params->base.dramtype == LPDDR4) { 2155 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6); 2156 /* MR11 */ 2157 pctl_write_mr(dram->pctl, 3, 11, 2158 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2159 LPDDR4); 2160 /* MR12 */ 2161 pctl_write_mr(dram->pctl, 3, 12, 2162 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2163 LPDDR4); 2164 2165 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); 2166 /* MR22 */ 2167 pctl_write_mr(dram->pctl, 3, 22, 2168 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2169 LPDDR4); 2170 /* MR14 */ 2171 pctl_write_mr(dram->pctl, 3, 14, 2172 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2173 LPDDR4); 2174 } 2175 2176 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) { 2177 if (post_init != 0) 2178 printascii("DTT cs0 error\n"); 2179 return -1; 2180 } 2181 2182 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) { 2183 if (data_training(dram, 1, sdram_params, 0, 2184 READ_GATE_TRAINING) != 0) { 2185 printascii("DTT cs1 error\n"); 2186 return -1; 2187 } 2188 } 2189 2190 if (sdram_params->base.dramtype == DDR4) { 2191 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39; 2192 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref, 2193 sdram_params->base.dramtype); 2194 } 2195 2196 dram_all_config(dram, sdram_params); 2197 enable_low_power(dram, sdram_params); 2198 2199 return 0; 2200 } 2201 2202 static u64 dram_detect_cap(struct dram_info *dram, 2203 struct rv1126_sdram_params *sdram_params, 2204 unsigned char channel) 2205 { 2206 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2207 void __iomem *pctl_base = dram->pctl; 2208 void __iomem *phy_base = dram->phy; 2209 u32 mr8; 2210 2211 u32 bktmp; 2212 u32 coltmp; 2213 u32 rowtmp; 2214 u32 cs; 2215 u32 bw = 1; 2216 u32 dram_type = sdram_params->base.dramtype; 2217 u32 pwrctl; 2218 2219 cap_info->bw = bw; 2220 if (dram_type != LPDDR4) { 2221 if (dram_type != DDR4) { 2222 coltmp = 12; 2223 bktmp = 3; 2224 if (dram_type == LPDDR2) 2225 rowtmp = 15; 2226 else 2227 rowtmp = 16; 2228 2229 if (sdram_detect_col(cap_info, coltmp) != 0) 2230 goto cap_err; 2231 2232 sdram_detect_bank(cap_info, coltmp, bktmp); 2233 sdram_detect_dbw(cap_info, dram_type); 2234 } else { 2235 coltmp = 10; 2236 bktmp = 4; 2237 rowtmp = 17; 2238 2239 cap_info->col = 10; 2240 cap_info->bk = 2; 2241 sdram_detect_bg(cap_info, coltmp); 2242 } 2243 2244 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0) 2245 goto cap_err; 2246 2247 sdram_detect_row_3_4(cap_info, coltmp, bktmp); 2248 } else { 2249 mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf; 2250 cap_info->col = 10; 2251 cap_info->bk = 3; 2252 cap_info->cs0_row = 14 + (mr8 + 1) / 2; 2253 if (mr8 % 2) 2254 cap_info->row_3_4 = 1; 2255 else 2256 cap_info->row_3_4 = 0; 2257 cap_info->dbw = 1; 2258 cap_info->bw = 2; 2259 } 2260 2261 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL); 2262 writel(0, pctl_base + DDR_PCTL2_PWRCTL); 2263 2264 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0) 2265 cs = 1; 2266 else 2267 cs = 0; 2268 cap_info->rank = cs + 1; 2269 2270 if (dram_type != LPDDR4) { 2271 setbits_le32(PHY_REG(phy_base, 0xf), 0xf); 2272 2273 phy_soft_reset(dram); 2274 2275 if (data_training(dram, 0, sdram_params, 0, 2276 READ_GATE_TRAINING) == 0) 2277 cap_info->bw = 2; 2278 else 2279 cap_info->bw = 1; 2280 } 2281 2282 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL); 2283 2284 cap_info->cs0_high16bit_row = cap_info->cs0_row; 2285 if (cs) { 2286 cap_info->cs1_row = cap_info->cs0_row; 2287 cap_info->cs1_high16bit_row = cap_info->cs0_row; 2288 } else { 2289 cap_info->cs1_row = 0; 2290 cap_info->cs1_high16bit_row = 0; 2291 } 2292 2293 return 0; 2294 cap_err: 2295 return -1; 2296 } 2297 2298 static int dram_detect_cs1_row(struct dram_info *dram, 2299 struct rv1126_sdram_params *sdram_params, 2300 unsigned char channel) 2301 { 2302 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2303 void __iomem *pctl_base = dram->pctl; 2304 u32 ret = 0; 2305 void __iomem *test_addr; 2306 u32 row, bktmp, coltmp, bw; 2307 u64 cs0_cap; 2308 u32 byte_mask; 2309 u32 cs_pst; 2310 u32 cs_add = 0; 2311 u32 max_row; 2312 2313 if (cap_info->rank == 2) { 2314 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2315 6 + 2; 2316 if (cs_pst < 28) 2317 cs_add = 1; 2318 2319 cs0_cap = 1 << cs_pst; 2320 2321 if (sdram_params->base.dramtype == DDR4) { 2322 if (cap_info->dbw == 0) 2323 bktmp = cap_info->bk + 2; 2324 else 2325 bktmp = cap_info->bk + 1; 2326 } else { 2327 bktmp = cap_info->bk; 2328 } 2329 bw = cap_info->bw; 2330 coltmp = cap_info->col; 2331 2332 if (bw == 2) 2333 byte_mask = 0xFFFF; 2334 else 2335 byte_mask = 0xFF; 2336 2337 max_row = (cs_pst == 31) ? 30 : 31; 2338 2339 max_row = max_row - bktmp - coltmp - bw - cs_add + 1; 2340 2341 row = (cap_info->cs0_row > max_row) ? max_row : 2342 cap_info->cs0_row; 2343 2344 for (; row > 12; row--) { 2345 test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE + 2346 (u32)cs0_cap + 2347 (1ul << (row + bktmp + coltmp + 2348 cs_add + bw - 1ul))); 2349 2350 writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap); 2351 writel(PATTERN, test_addr); 2352 2353 if (((readl(test_addr) & byte_mask) == 2354 (PATTERN & byte_mask)) && 2355 ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) & 2356 byte_mask) == 0)) { 2357 ret = row; 2358 break; 2359 } 2360 } 2361 } 2362 2363 return ret; 2364 } 2365 2366 /* return: 0 = success, other = fail */ 2367 static int sdram_init_detect(struct dram_info *dram, 2368 struct rv1126_sdram_params *sdram_params) 2369 { 2370 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2371 u32 ret; 2372 u32 sys_reg = 0; 2373 u32 sys_reg3 = 0; 2374 2375 if (sdram_init_(dram, sdram_params, 0) != 0) 2376 return -1; 2377 2378 if (sdram_params->base.dramtype == DDR3) { 2379 writel(PATTERN, CONFIG_SYS_SDRAM_BASE); 2380 if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN) 2381 return -1; 2382 } 2383 2384 if (dram_detect_cap(dram, sdram_params, 0) != 0) 2385 return -1; 2386 2387 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, 2388 sdram_params->base.dramtype); 2389 ret = sdram_init_(dram, sdram_params, 1); 2390 if (ret != 0) 2391 goto out; 2392 2393 cap_info->cs1_row = 2394 dram_detect_cs1_row(dram, sdram_params, 0); 2395 if (cap_info->cs1_row) { 2396 sys_reg = readl(&dram->pmugrf->os_reg[2]); 2397 sys_reg3 = readl(&dram->pmugrf->os_reg[3]); 2398 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row, 2399 sys_reg, sys_reg3, 0); 2400 writel(sys_reg, &dram->pmugrf->os_reg[2]); 2401 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2402 } 2403 2404 sdram_detect_high_row(cap_info); 2405 2406 out: 2407 return ret; 2408 } 2409 2410 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz) 2411 { 2412 u32 i; 2413 u32 offset = 0; 2414 struct ddr2_3_4_lp2_3_info *ddr_info; 2415 2416 if (!freq_mhz) { 2417 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype); 2418 if (ddr_info) 2419 freq_mhz = 2420 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2421 DDR_FREQ_MASK; 2422 else 2423 freq_mhz = 0; 2424 } 2425 2426 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) { 2427 if (sdram_configs[i].base.ddr_freq == 0 || 2428 freq_mhz < sdram_configs[i].base.ddr_freq) 2429 break; 2430 } 2431 offset = i == 0 ? 0 : i - 1; 2432 2433 return &sdram_configs[offset]; 2434 } 2435 2436 static const u16 pctl_need_update_reg[] = { 2437 DDR_PCTL2_RFSHTMG, 2438 DDR_PCTL2_INIT3, 2439 DDR_PCTL2_INIT4, 2440 DDR_PCTL2_INIT6, 2441 DDR_PCTL2_INIT7, 2442 DDR_PCTL2_DRAMTMG0, 2443 DDR_PCTL2_DRAMTMG1, 2444 DDR_PCTL2_DRAMTMG2, 2445 DDR_PCTL2_DRAMTMG3, 2446 DDR_PCTL2_DRAMTMG4, 2447 DDR_PCTL2_DRAMTMG5, 2448 DDR_PCTL2_DRAMTMG6, 2449 DDR_PCTL2_DRAMTMG7, 2450 DDR_PCTL2_DRAMTMG8, 2451 DDR_PCTL2_DRAMTMG9, 2452 DDR_PCTL2_DRAMTMG12, 2453 DDR_PCTL2_DRAMTMG13, 2454 DDR_PCTL2_DRAMTMG14, 2455 DDR_PCTL2_ZQCTL0, 2456 DDR_PCTL2_DFITMG0, 2457 DDR_PCTL2_ODTCFG 2458 }; 2459 2460 static const u16 phy_need_update_reg[] = { 2461 0x14, 2462 0x18, 2463 0x1c 2464 }; 2465 2466 static void pre_set_rate(struct dram_info *dram, 2467 struct rv1126_sdram_params *sdram_params, 2468 u32 dst_fsp, u32 dst_fsp_lp4) 2469 { 2470 u32 i, j, find; 2471 void __iomem *pctl_base = dram->pctl; 2472 void __iomem *phy_base = dram->phy; 2473 u32 phy_offset; 2474 u32 mr_tmp; 2475 u32 dramtype = sdram_params->base.dramtype; 2476 2477 sw_set_req(dram); 2478 /* pctl timing update */ 2479 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) { 2480 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF; 2481 j++) { 2482 if (sdram_params->pctl_regs.pctl[j][0] == 2483 pctl_need_update_reg[i]) { 2484 writel(sdram_params->pctl_regs.pctl[j][1], 2485 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2486 pctl_need_update_reg[i]); 2487 find = j; 2488 break; 2489 } 2490 } 2491 } 2492 sw_set_ack(dram); 2493 2494 /* phy timing update */ 2495 if (dst_fsp == 0) 2496 phy_offset = 0; 2497 else 2498 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3); 2499 /* cl cwl al update */ 2500 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) { 2501 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF; 2502 j++) { 2503 if (sdram_params->phy_regs.phy[j][0] == 2504 phy_need_update_reg[i]) { 2505 writel(sdram_params->phy_regs.phy[j][1], 2506 phy_base + phy_offset + 2507 phy_need_update_reg[i]); 2508 find = j; 2509 break; 2510 } 2511 } 2512 } 2513 2514 set_ds_odt(dram, sdram_params, dst_fsp); 2515 if (dramtype == LPDDR4) { 2516 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2517 DDR_PCTL2_INIT4); 2518 /* MR13 */ 2519 pctl_write_mr(dram->pctl, 3, 13, 2520 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2521 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2522 ((0x2 << 6) >> dst_fsp_lp4), dramtype); 2523 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2524 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2525 ((0x2 << 6) >> dst_fsp_lp4), 2526 PHY_REG(phy_base, 0x1b)); 2527 /* MR3 */ 2528 pctl_write_mr(dram->pctl, 3, 3, 2529 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & 2530 PCTL2_MR_MASK, 2531 dramtype); 2532 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK, 2533 PHY_REG(phy_base, 0x19)); 2534 2535 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2536 DDR_PCTL2_INIT3); 2537 /* MR1 */ 2538 pctl_write_mr(dram->pctl, 3, 1, 2539 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & 2540 PCTL2_MR_MASK, 2541 dramtype); 2542 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK, 2543 PHY_REG(phy_base, 0x17)); 2544 /* MR2 */ 2545 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 2546 dramtype); 2547 writel(mr_tmp & PCTL2_MR_MASK, 2548 PHY_REG(phy_base, 0x18)); 2549 2550 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2551 DDR_PCTL2_INIT6); 2552 /* MR11 */ 2553 pctl_write_mr(dram->pctl, 3, 11, 2554 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2555 dramtype); 2556 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2557 PHY_REG(phy_base, 0x1a)); 2558 /* MR12 */ 2559 pctl_write_mr(dram->pctl, 3, 12, 2560 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2561 dramtype); 2562 2563 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2564 DDR_PCTL2_INIT7); 2565 /* MR22 */ 2566 pctl_write_mr(dram->pctl, 3, 22, 2567 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2568 dramtype); 2569 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2570 PHY_REG(phy_base, 0x1d)); 2571 /* MR14 */ 2572 pctl_write_mr(dram->pctl, 3, 14, 2573 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2574 dramtype); 2575 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2576 PHY_REG(phy_base, 0x1c)); 2577 } 2578 2579 update_noc_timing(dram, sdram_params); 2580 } 2581 2582 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp, 2583 struct rv1126_sdram_params *sdram_params) 2584 { 2585 void __iomem *pctl_base = dram->pctl; 2586 void __iomem *phy_base = dram->phy; 2587 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp]; 2588 u32 temp, temp1; 2589 struct ddr2_3_4_lp2_3_info *ddr_info; 2590 2591 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype); 2592 2593 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq; 2594 2595 if (sdram_params->base.dramtype == LPDDR4) { 2596 p_fsp_param->rd_odt_up_en = 0; 2597 p_fsp_param->rd_odt_down_en = 1; 2598 } else { 2599 p_fsp_param->rd_odt_up_en = 2600 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 2601 p_fsp_param->rd_odt_down_en = 2602 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 2603 } 2604 2605 if (p_fsp_param->rd_odt_up_en) 2606 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111)); 2607 else if (p_fsp_param->rd_odt_down_en) 2608 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110)); 2609 else 2610 p_fsp_param->rd_odt = 0; 2611 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112)); 2612 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100)); 2613 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102)); 2614 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128)); 2615 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105)); 2616 2617 if (sdram_params->base.dramtype == DDR3) { 2618 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2619 DDR_PCTL2_INIT3); 2620 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 2621 p_fsp_param->ds_pdds = ((temp >> 1) & 0x1) | 2622 (((temp >> 5) & 0x1) << 1); 2623 p_fsp_param->dq_odt = ((temp >> 2) & 0x1) | 2624 (((temp >> 6) & 0x1) << 1) | 2625 (((temp >> 9) & 0x1) << 2); 2626 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2627 } else if (sdram_params->base.dramtype == DDR4) { 2628 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2629 DDR_PCTL2_INIT3); 2630 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 2631 p_fsp_param->ds_pdds = (temp >> 1) & 0x3; 2632 p_fsp_param->dq_odt = (temp >> 8) & 0x7; 2633 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2634 } else if (sdram_params->base.dramtype == LPDDR3) { 2635 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2636 DDR_PCTL2_INIT4); 2637 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 2638 p_fsp_param->ds_pdds = temp & 0xf; 2639 2640 p_fsp_param->dq_odt = lp3_odt_value; 2641 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2642 } else if (sdram_params->base.dramtype == LPDDR4) { 2643 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2644 DDR_PCTL2_INIT4); 2645 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 2646 p_fsp_param->ds_pdds = (temp >> 3) & 0x7; 2647 2648 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2649 DDR_PCTL2_INIT6); 2650 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK; 2651 p_fsp_param->dq_odt = temp & 0x7; 2652 p_fsp_param->ca_odt = (temp >> 4) & 0x7; 2653 2654 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)), 2655 readl(PHY_REG(phy_base, 0x3ce))); 2656 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)), 2657 readl(PHY_REG(phy_base, 0x3de))); 2658 p_fsp_param->vref_ca[0] = (temp + temp1) / 2; 2659 temp = MAX(readl(PHY_REG(phy_base, 0x3af)), 2660 readl(PHY_REG(phy_base, 0x3cf))); 2661 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)), 2662 readl(PHY_REG(phy_base, 0x3df))); 2663 p_fsp_param->vref_ca[1] = (temp + temp1) / 2; 2664 p_fsp_param->vref_ca[0] |= 2665 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 2666 p_fsp_param->vref_ca[1] |= 2667 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 2668 2669 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >> 2670 3) & 0x1; 2671 } 2672 2673 p_fsp_param->noc_timings.ddrtiminga0 = 2674 sdram_params->ch.noc_timings.ddrtiminga0; 2675 p_fsp_param->noc_timings.ddrtimingb0 = 2676 sdram_params->ch.noc_timings.ddrtimingb0; 2677 p_fsp_param->noc_timings.ddrtimingc0 = 2678 sdram_params->ch.noc_timings.ddrtimingc0; 2679 p_fsp_param->noc_timings.devtodev0 = 2680 sdram_params->ch.noc_timings.devtodev0; 2681 p_fsp_param->noc_timings.ddrmode = 2682 sdram_params->ch.noc_timings.ddrmode; 2683 p_fsp_param->noc_timings.ddr4timing = 2684 sdram_params->ch.noc_timings.ddr4timing; 2685 p_fsp_param->noc_timings.agingx0 = 2686 sdram_params->ch.noc_timings.agingx0; 2687 p_fsp_param->noc_timings.aging0 = 2688 sdram_params->ch.noc_timings.aging0; 2689 p_fsp_param->noc_timings.aging1 = 2690 sdram_params->ch.noc_timings.aging1; 2691 p_fsp_param->noc_timings.aging2 = 2692 sdram_params->ch.noc_timings.aging2; 2693 p_fsp_param->noc_timings.aging3 = 2694 sdram_params->ch.noc_timings.aging3; 2695 2696 p_fsp_param->flag = FSP_FLAG; 2697 } 2698 2699 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT 2700 static void copy_fsp_param_to_ddr(void) 2701 { 2702 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param, 2703 sizeof(fsp_param)); 2704 } 2705 #endif 2706 2707 void ddr_set_rate(struct dram_info *dram, 2708 struct rv1126_sdram_params *sdram_params, 2709 u32 freq, u32 cur_freq, u32 dst_fsp, 2710 u32 dst_fsp_lp4, u32 training_en) 2711 { 2712 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off; 2713 u32 mr_tmp; 2714 u32 lp_stat; 2715 u32 dramtype = sdram_params->base.dramtype; 2716 struct rv1126_sdram_params *sdram_params_new; 2717 void __iomem *pctl_base = dram->pctl; 2718 void __iomem *phy_base = dram->phy; 2719 2720 lp_stat = low_power_update(dram, 0); 2721 sdram_params_new = get_default_sdram_config(freq); 2722 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank; 2723 2724 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4); 2725 2726 while ((readl(pctl_base + DDR_PCTL2_STAT) & 2727 PCTL2_OPERATING_MODE_MASK) == 2728 PCTL2_OPERATING_MODE_SR) 2729 continue; 2730 2731 dest_dll_off = 0; 2732 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2733 DDR_PCTL2_INIT3); 2734 if ((dramtype == DDR3 && (dst_init3 & 1)) || 2735 (dramtype == DDR4 && !(dst_init3 & 1))) 2736 dest_dll_off = 1; 2737 2738 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 2739 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 2740 DDR_PCTL2_INIT3); 2741 cur_init3 &= PCTL2_MR_MASK; 2742 cur_dll_off = 1; 2743 if ((dramtype == DDR3 && !(cur_init3 & 1)) || 2744 (dramtype == DDR4 && (cur_init3 & 1))) 2745 cur_dll_off = 0; 2746 2747 if (!cur_dll_off) { 2748 if (dramtype == DDR3) 2749 cur_init3 |= 1; 2750 else 2751 cur_init3 &= ~1; 2752 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype); 2753 } 2754 2755 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 2756 PCTL2_DIS_AUTO_REFRESH); 2757 update_refresh_reg(dram); 2758 2759 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 2760 while (1) { 2761 if (((readl(pctl_base + DDR_PCTL2_STAT) & 2762 PCTL2_SELFREF_TYPE_MASK) == 2763 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) && 2764 ((readl(pctl_base + DDR_PCTL2_STAT) & 2765 PCTL2_OPERATING_MODE_MASK) == 2766 PCTL2_OPERATING_MODE_SR)) { 2767 break; 2768 } 2769 } 2770 2771 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 2772 PMUGRF_CON_DDRPHY_BUFFEREN_EN, 2773 dram->pmugrf->soc_con[0]); 2774 sw_set_req(dram); 2775 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC, 2776 PCTL2_DFI_INIT_COMPLETE_EN); 2777 sw_set_ack(dram); 2778 2779 sw_set_req(dram); 2780 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off) 2781 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 2782 else 2783 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 2784 2785 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0, 2786 PCTL2_DIS_SRX_ZQCL); 2787 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0, 2788 PCTL2_DIS_SRX_ZQCL); 2789 sw_set_ack(dram); 2790 2791 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT), 2792 dram->cru->clkgate_con[2]); 2793 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 2794 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) | 2795 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT), 2796 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 2797 2798 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 2799 rkclk_set_dpll(dram, freq * MHz / 2); 2800 phy_pll_set(dram, freq * MHz, 0); 2801 phy_pll_set(dram, freq * MHz, 1); 2802 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 2803 2804 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 2805 PMUGRF_CON_DDRPHY_BUFFEREN_DIS, 2806 dram->pmugrf->soc_con[0]); 2807 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT), 2808 dram->cru->clkgate_con[2]); 2809 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 2810 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) | 2811 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT), 2812 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 2813 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) & 2814 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) 2815 continue; 2816 2817 sw_set_req(dram); 2818 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2819 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp); 2820 sw_set_ack(dram); 2821 update_refresh_reg(dram); 2822 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2); 2823 2824 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 2825 while ((readl(pctl_base + DDR_PCTL2_STAT) & 2826 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR) 2827 continue; 2828 2829 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 2830 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 2831 2832 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4); 2833 if (dramtype == LPDDR3) { 2834 pctl_write_mr(dram->pctl, 3, 1, 2835 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) & 2836 PCTL2_MR_MASK, 2837 dramtype); 2838 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK, 2839 dramtype); 2840 pctl_write_mr(dram->pctl, 3, 3, 2841 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) & 2842 PCTL2_MR_MASK, 2843 dramtype); 2844 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype); 2845 } else if ((dramtype == DDR3) || (dramtype == DDR4)) { 2846 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK, 2847 dramtype); 2848 if (!dest_dll_off) { 2849 pctl_write_mr(dram->pctl, 3, 0, 2850 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) & 2851 PCTL2_MR_MASK) | DDR3_DLL_RESET, 2852 dramtype); 2853 udelay(2); 2854 } 2855 pctl_write_mr(dram->pctl, 3, 0, 2856 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT & 2857 PCTL2_MR_MASK) & (~DDR3_DLL_RESET), 2858 dramtype); 2859 pctl_write_mr(dram->pctl, 3, 2, 2860 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) & 2861 PCTL2_MR_MASK), dramtype); 2862 if (dramtype == DDR4) { 2863 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK, 2864 dramtype); 2865 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2866 DDR_PCTL2_INIT6); 2867 pctl_write_mr(dram->pctl, 3, 4, 2868 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) & 2869 PCTL2_MR_MASK, 2870 dramtype); 2871 pctl_write_mr(dram->pctl, 3, 5, 2872 mr_tmp >> PCTL2_DDR4_MR5_SHIFT & 2873 PCTL2_MR_MASK, 2874 dramtype); 2875 2876 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2877 DDR_PCTL2_INIT7); 2878 pctl_write_mr(dram->pctl, 3, 6, 2879 mr_tmp >> PCTL2_DDR4_MR6_SHIFT & 2880 PCTL2_MR_MASK, 2881 dramtype); 2882 } 2883 } else if (dramtype == LPDDR4) { 2884 pctl_write_mr(dram->pctl, 3, 13, 2885 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2886 PCTL2_MR_MASK) & (~(BIT(7)))) | 2887 dst_fsp_lp4 << 7, dramtype); 2888 } 2889 2890 /* training */ 2891 high_freq_training(dram, sdram_params_new, dst_fsp); 2892 2893 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 2894 PCTL2_DIS_AUTO_REFRESH); 2895 low_power_update(dram, lp_stat); 2896 2897 save_fsp_param(dram, dst_fsp, sdram_params_new); 2898 } 2899 2900 static void ddr_set_rate_for_fsp(struct dram_info *dram, 2901 struct rv1126_sdram_params *sdram_params) 2902 { 2903 struct ddr2_3_4_lp2_3_info *ddr_info; 2904 u32 f0; 2905 u32 dramtype = sdram_params->base.dramtype; 2906 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT 2907 u32 f1, f2, f3; 2908 #endif 2909 2910 ddr_info = get_ddr_drv_odt_info(dramtype); 2911 if (!ddr_info) 2912 return; 2913 2914 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2915 DDR_FREQ_MASK; 2916 2917 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT 2918 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param)); 2919 memset((void *)&fsp_param, 0, sizeof(fsp_param)); 2920 2921 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) & 2922 DDR_FREQ_MASK; 2923 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) & 2924 DDR_FREQ_MASK; 2925 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) & 2926 DDR_FREQ_MASK; 2927 #endif 2928 2929 if (get_wrlvl_val(dram, sdram_params)) 2930 printascii("get wrlvl value fail\n"); 2931 2932 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT 2933 printascii("change to: "); 2934 printdec(f1); 2935 printascii("MHz\n"); 2936 ddr_set_rate(&dram_info, sdram_params, f1, 2937 sdram_params->base.ddr_freq, 1, 1, 1); 2938 printascii("change to: "); 2939 printdec(f2); 2940 printascii("MHz\n"); 2941 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1); 2942 printascii("change to: "); 2943 printdec(f3); 2944 printascii("MHz\n"); 2945 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1); 2946 #endif 2947 printascii("change to: "); 2948 printdec(f0); 2949 printascii("MHz(final freq)\n"); 2950 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT 2951 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1); 2952 #else 2953 ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1); 2954 #endif 2955 } 2956 2957 int get_uart_config(void) 2958 { 2959 struct sdram_head_info_index_v2 *index = 2960 (struct sdram_head_info_index_v2 *)common_info; 2961 struct global_info *gbl_info; 2962 2963 gbl_info = (struct global_info *)((void *)common_info + 2964 index->global_index.offset * 4); 2965 2966 return gbl_info->uart_info; 2967 } 2968 2969 /* return: 0 = success, other = fail */ 2970 int sdram_init(void) 2971 { 2972 struct rv1126_sdram_params *sdram_params; 2973 int ret = 0; 2974 struct sdram_head_info_index_v2 *index = 2975 (struct sdram_head_info_index_v2 *)common_info; 2976 struct global_info *gbl_info; 2977 2978 dram_info.phy = (void *)DDR_PHY_BASE_ADDR; 2979 dram_info.pctl = (void *)UPCTL2_BASE_ADDR; 2980 dram_info.grf = (void *)GRF_BASE_ADDR; 2981 dram_info.cru = (void *)CRU_BASE_ADDR; 2982 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR; 2983 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR; 2984 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR; 2985 2986 if (index->version_info != 2 || 2987 (index->global_index.size != sizeof(struct global_info) / 4) || 2988 (index->ddr3_index.size != 2989 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 2990 (index->ddr4_index.size != 2991 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 2992 (index->lp3_index.size != 2993 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 2994 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) || 2995 index->global_index.offset == 0 || 2996 index->ddr3_index.offset == 0 || 2997 index->ddr4_index.offset == 0 || 2998 index->lp3_index.offset == 0 || 2999 index->lp4_index.offset == 0) { 3000 printascii("common info error\n"); 3001 goto error; 3002 } 3003 3004 gbl_info = (struct global_info *)((void *)common_info + 3005 index->global_index.offset * 4); 3006 3007 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info); 3008 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info); 3009 3010 sdram_params = &sdram_configs[0]; 3011 3012 if (sdram_params->base.dramtype == DDR3 || 3013 sdram_params->base.dramtype == DDR4) { 3014 if (DDR_2T_INFO(gbl_info->info_2t)) 3015 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10; 3016 else 3017 sdram_params->pctl_regs.pctl[0][1] &= 3018 ~(0x1 << 10); 3019 } 3020 ret = sdram_init_detect(&dram_info, sdram_params); 3021 if (ret) { 3022 sdram_print_dram_type(sdram_params->base.dramtype); 3023 printascii(", "); 3024 printdec(sdram_params->base.ddr_freq); 3025 printascii("MHz\n"); 3026 goto error; 3027 } 3028 print_ddr_info(sdram_params); 3029 3030 ddr_set_rate_for_fsp(&dram_info, sdram_params); 3031 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT 3032 copy_fsp_param_to_ddr(); 3033 #endif 3034 3035 ddr_set_atags(&dram_info, sdram_params); 3036 3037 printascii("out\n"); 3038 3039 return ret; 3040 error: 3041 printascii("error\n"); 3042 return (-1); 3043 } 3044 #endif /* CONFIG_TPL_BUILD */ 3045