1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd. 4 */ 5 6 #include <common.h> 7 #include <debug_uart.h> 8 #include <dm.h> 9 #include <ram.h> 10 #include <syscon.h> 11 #include <asm/io.h> 12 #include <asm/arch/clock.h> 13 #include <asm/arch/hardware.h> 14 #include <asm/arch/rk_atags.h> 15 #include <asm/arch/cru_rv1126.h> 16 #include <asm/arch/grf_rv1126.h> 17 #include <asm/arch/sdram_common.h> 18 #include <asm/arch/sdram_rv1126.h> 19 20 /* define training flag */ 21 #define CA_TRAINING (0x1 << 0) 22 #define READ_GATE_TRAINING (0x1 << 1) 23 #define WRITE_LEVELING (0x1 << 2) 24 #define WRITE_TRAINING (0x1 << 3) 25 #define READ_TRAINING (0x1 << 4) 26 #define FULL_TRAINING (0xff) 27 28 #define SKEW_RX_SIGNAL (0) 29 #define SKEW_TX_SIGNAL (1) 30 #define SKEW_CA_SIGNAL (2) 31 32 #define DESKEW_MDF_ABS_VAL (0) 33 #define DESKEW_MDF_DIFF_VAL (1) 34 35 #ifdef CONFIG_TPL_BUILD 36 #ifndef CONFIG_TPL_TINY_FRAMEWORK 37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!! 38 #endif 39 #endif 40 41 #ifdef CONFIG_TPL_BUILD 42 43 struct dram_info { 44 void __iomem *pctl; 45 void __iomem *phy; 46 struct rv1126_cru *cru; 47 struct msch_regs *msch; 48 struct rv1126_ddrgrf *ddrgrf; 49 struct rv1126_grf *grf; 50 struct ram_info info; 51 struct rv1126_pmugrf *pmugrf; 52 u32 sr_idle; 53 u32 pd_idle; 54 }; 55 56 #define GRF_BASE_ADDR 0xfe000000 57 #define PMU_GRF_BASE_ADDR 0xfe020000 58 #define DDR_GRF_BASE_ADDR 0xfe030000 59 #define BUS_SGRF_BASE_ADDR 0xfe0a0000 60 #define SERVER_MSCH_BASE_ADDR 0xfe800000 61 #define CRU_BASE_ADDR 0xff490000 62 #define DDR_PHY_BASE_ADDR 0xff4a0000 63 #define UPCTL2_BASE_ADDR 0xffa50000 64 65 #define SGRF_SOC_CON12 0x30 66 #define SGRF_SOC_CON13 0x34 67 68 struct dram_info dram_info; 69 70 #define TPL_INIT_DDR_TYPE_DDR3 71 #ifdef TPL_INIT_DDR_TYPE_DDR3 72 struct rv1126_sdram_params sdram_configs[] = { 73 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-330.inc" 74 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc" 75 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc" 76 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc" 77 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc" 78 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc" 79 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc" 80 }; 81 #elif defined TPL_INIT_DDR_TYPE_DDR4 82 struct rv1126_sdram_params sdram_configs[] = { 83 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-330.inc" 84 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc" 85 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc" 86 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc" 87 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc" 88 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc" 89 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc" 90 }; 91 #elif defined TPL_INIT_DDR_TYPE_LPDDR3 92 struct rv1126_sdram_params sdram_configs[] = { 93 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-330.inc" 94 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc" 95 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc" 96 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc" 97 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc" 98 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc" 99 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc" 100 }; 101 #elif defined TPL_INIT_DDR_TYPE_LPDDR4 102 struct rv1126_sdram_params sdram_configs[] = { 103 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-330.inc" 104 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc" 105 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc" 106 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc" 107 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc" 108 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc" 109 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc" 110 }; 111 #endif 112 113 u32 common_info[] = { 114 #include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc" 115 }; 116 117 static struct rv1126_fsp_param fsp_param[MAX_IDX]; 118 119 static u8 lp3_odt_value; 120 121 static u8 wrlvl_result[2][4]; 122 123 /* DDR configuration 0-9 */ 124 u16 ddr_cfg_2_rbc[] = { 125 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */ 126 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */ 127 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */ 128 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */ 129 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */ 130 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */ 131 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */ 132 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */ 133 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */ 134 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */ 135 }; 136 137 /* DDR configuration 10-21 */ 138 u8 ddr4_cfg_2_rbc[] = { 139 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */ 140 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */ 141 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */ 142 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */ 143 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */ 144 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */ 145 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */ 146 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */ 147 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */ 148 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */ 149 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */ 150 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */ 151 }; 152 153 /* DDR configuration 22-28 */ 154 u16 ddr_cfg_2_rbc_p2[] = { 155 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */ 156 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */ 157 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */ 158 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */ 159 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */ 160 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */ 161 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */ 162 }; 163 164 u8 d4_rbc_2_d3_rbc[][2] = { 165 {10, 0}, 166 {11, 2}, 167 {12, 23}, 168 {13, 1}, 169 {14, 28}, 170 {15, 24}, 171 {16, 27}, 172 {17, 7}, 173 {18, 6}, 174 {19, 25}, 175 {20, 26}, 176 {21, 3} 177 }; 178 179 u32 addrmap[23][9] = { 180 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 181 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */ 182 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 183 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */ 184 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 185 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */ 186 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606, 187 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */ 188 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909, 189 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */ 190 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707, 191 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */ 192 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808, 193 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */ 194 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909, 195 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */ 196 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, 197 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */ 198 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 199 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */ 200 201 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 202 0x08080808, 0x00000f0f, 0x0801}, /* 10 */ 203 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 204 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */ 205 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 206 0x07070707, 0x00000f07, 0x0700}, /* 12 */ 207 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 208 0x07070707, 0x00000f0f, 0x0700}, /* 13 */ 209 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 210 0x07070707, 0x00000f07, 0x3f01}, /* 14 */ 211 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 212 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */ 213 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606, 214 0x06060606, 0x00000f06, 0x3f00}, /* 16 */ 215 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909, 216 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */ 217 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808, 218 0x08080808, 0x00000f0f, 0x0700}, /* 18 */ 219 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 220 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */ 221 222 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 223 0x07070707, 0x00000f07, 0x3f00}, /* 20 */ 224 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606, 225 0x06060606, 0x00000f06, 0x0600}, /* 21 */ 226 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505, 227 0x05050505, 0x00000f0f, 0x3f3f} /* 22 */ 228 }; 229 230 static u8 dq_sel[22][3] = { 231 {0x0, 0x17, 0x22}, 232 {0x1, 0x18, 0x23}, 233 {0x2, 0x19, 0x24}, 234 {0x3, 0x1a, 0x25}, 235 {0x4, 0x1b, 0x26}, 236 {0x5, 0x1c, 0x27}, 237 {0x6, 0x1d, 0x28}, 238 {0x7, 0x1e, 0x29}, 239 {0x8, 0x16, 0x21}, 240 {0x9, 0x1f, 0x2a}, 241 {0xa, 0x20, 0x2b}, 242 {0x10, 0x1, 0xc}, 243 {0x11, 0x2, 0xd}, 244 {0x12, 0x3, 0xe}, 245 {0x13, 0x4, 0xf}, 246 {0x14, 0x5, 0x10}, 247 {0x15, 0x6, 0x11}, 248 {0x16, 0x7, 0x12}, 249 {0x17, 0x8, 0x13}, 250 {0x18, 0x0, 0xb}, 251 {0x19, 0x9, 0x14}, 252 {0x1a, 0xa, 0x15} 253 }; 254 255 static u16 grp_addr[4] = { 256 ADD_GROUP_CS0_A, 257 ADD_GROUP_CS0_B, 258 ADD_GROUP_CS1_A, 259 ADD_GROUP_CS1_B 260 }; 261 262 static u8 wrlvl_result_offset[2][4] = { 263 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27}, 264 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29}, 265 }; 266 267 static u16 dqs_dq_skew_adr[16] = { 268 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */ 269 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */ 270 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */ 271 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */ 272 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */ 273 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */ 274 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */ 275 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */ 276 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */ 277 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */ 278 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */ 279 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */ 280 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */ 281 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */ 282 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */ 283 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */ 284 }; 285 286 static void rkclk_ddr_reset(struct dram_info *dram, 287 u32 ctl_srstn, u32 ctl_psrstn, 288 u32 phy_srstn, u32 phy_psrstn) 289 { 290 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) | 291 UPCTL2_ASRSTN_REQ(ctl_srstn), 292 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13); 293 294 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn), 295 &dram->cru->softrst_con[12]); 296 } 297 298 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz) 299 { 300 unsigned int refdiv, postdiv1, postdiv2, fbdiv; 301 int delay = 1000; 302 u32 mhz = hz / MHz; 303 304 refdiv = 1; 305 if (mhz <= 100) { 306 postdiv1 = 6; 307 postdiv2 = 4; 308 } else if (mhz <= 150) { 309 postdiv1 = 4; 310 postdiv2 = 4; 311 } else if (mhz <= 200) { 312 postdiv1 = 6; 313 postdiv2 = 2; 314 } else if (mhz <= 300) { 315 postdiv1 = 4; 316 postdiv2 = 2; 317 } else if (mhz <= 400) { 318 postdiv1 = 6; 319 postdiv2 = 1; 320 } else { 321 postdiv1 = 4; 322 postdiv2 = 1; 323 } 324 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24; 325 326 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode); 327 328 writel(0x1f000000, &dram->cru->clksel_con[64]); 329 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0); 330 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv), 331 &dram->cru->pll[1].con1); 332 333 while (delay > 0) { 334 udelay(1); 335 if (LOCK(readl(&dram->cru->pll[1].con1))) 336 break; 337 delay--; 338 } 339 340 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode); 341 } 342 343 static void rkclk_configure_ddr(struct dram_info *dram, 344 struct rv1126_sdram_params *sdram_params) 345 { 346 /* for inno ddr phy need freq / 2 */ 347 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2); 348 } 349 350 static void phy_soft_reset(struct dram_info *dram) 351 { 352 void __iomem *phy_base = dram->phy; 353 354 clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2); 355 udelay(1); 356 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 357 udelay(1); 358 } 359 360 static unsigned int 361 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params) 362 { 363 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 364 u32 cs, bw, die_bw, col, row, bank; 365 u32 cs1_row; 366 u32 i, tmp; 367 u32 ddrconf = -1; 368 u32 row_3_4; 369 370 cs = cap_info->rank; 371 bw = cap_info->bw; 372 die_bw = cap_info->dbw; 373 col = cap_info->col; 374 row = cap_info->cs0_row; 375 cs1_row = cap_info->cs1_row; 376 bank = cap_info->bk; 377 row_3_4 = cap_info->row_3_4; 378 379 if (sdram_params->base.dramtype == DDR4) { 380 if (cs == 2 && row == cs1_row && !row_3_4) { 381 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) | 382 die_bw; 383 for (i = 17; i < 21; i++) { 384 if (((tmp & 0xf) == 385 (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 386 ((tmp & 0x70) <= 387 (ddr4_cfg_2_rbc[i - 10] & 0x70))) { 388 ddrconf = i; 389 goto out; 390 } 391 } 392 } 393 394 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw; 395 for (i = 10; i < 21; i++) { 396 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 397 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) && 398 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) { 399 ddrconf = i; 400 goto out; 401 } 402 } 403 } else { 404 if (cs == 2 && row == cs1_row && bank == 3) { 405 for (i = 5; i < 8; i++) { 406 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] & 407 0x7)) && 408 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] & 409 (0x7 << 5))) { 410 ddrconf = i; 411 goto out; 412 } 413 } 414 } 415 416 tmp = ((cs - 1) << 8) | ((row - 13) << 5) | 417 ((bw + col - 10) << 0); 418 if (bank == 3) 419 tmp |= (1 << 3); 420 421 for (i = 0; i < 9; i++) 422 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) && 423 ((tmp & (7 << 5)) <= 424 (ddr_cfg_2_rbc[i] & (7 << 5))) && 425 ((tmp & (1 << 8)) <= 426 (ddr_cfg_2_rbc[i] & (1 << 8)))) { 427 ddrconf = i; 428 goto out; 429 } 430 if (cs == 1 && bank == 3 && row <= 17 && 431 (col + bw) == 12) 432 ddrconf = 23; 433 } 434 435 out: 436 if (ddrconf > 28) 437 printascii("calculate ddrconfig error\n"); 438 439 if (sdram_params->base.dramtype == DDR4) { 440 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 441 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) { 442 if (ddrconf == 21 && row > 16) 443 printascii("warn:ddrconf21 row > 16\n"); 444 else 445 ddrconf = d4_rbc_2_d3_rbc[i][1]; 446 break; 447 } 448 } 449 } 450 451 return ddrconf; 452 } 453 454 static void sw_set_req(struct dram_info *dram) 455 { 456 void __iomem *pctl_base = dram->pctl; 457 458 /* clear sw_done=0 */ 459 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL); 460 } 461 462 static void sw_set_ack(struct dram_info *dram) 463 { 464 void __iomem *pctl_base = dram->pctl; 465 466 /* set sw_done=1 */ 467 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL); 468 while (1) { 469 /* wait programming done */ 470 if (readl(pctl_base + DDR_PCTL2_SWSTAT) & 471 PCTL2_SW_DONE_ACK) 472 break; 473 } 474 } 475 476 static void set_ctl_address_map(struct dram_info *dram, 477 struct rv1126_sdram_params *sdram_params) 478 { 479 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 480 void __iomem *pctl_base = dram->pctl; 481 u32 ddrconf = cap_info->ddrconfig; 482 u32 i, row; 483 484 row = cap_info->cs0_row; 485 if (sdram_params->base.dramtype == DDR4) { 486 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 487 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) { 488 ddrconf = d4_rbc_2_d3_rbc[i][0]; 489 break; 490 } 491 } 492 } 493 494 if (ddrconf > ARRAY_SIZE(addrmap)) { 495 printascii("set ctl address map fail\n"); 496 return; 497 } 498 499 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0), 500 &addrmap[ddrconf][0], 9 * 4); 501 502 /* unused row set to 0xf */ 503 for (i = 17; i >= row; i--) 504 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 + 505 ((i - 12) * 8 / 32) * 4, 506 0xf << ((i - 12) * 8 % 32)); 507 508 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4) 509 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31); 510 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1) 511 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8); 512 513 if (cap_info->rank == 1) 514 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f); 515 } 516 517 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait) 518 { 519 void __iomem *phy_base = dram->phy; 520 u32 fbdiv, prediv, postdiv, postdiv_en; 521 522 if (wait) { 523 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB); 524 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) 525 continue; 526 } else { 527 freq /= MHz; 528 prediv = 1; 529 if (freq <= 200) { 530 fbdiv = 16; 531 postdiv = 2; 532 postdiv_en = 1; 533 } else if (freq <= 456) { 534 fbdiv = 8; 535 postdiv = 1; 536 postdiv_en = 1; 537 } else { 538 fbdiv = 4; 539 postdiv = 0; 540 postdiv_en = 0; 541 } 542 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50)); 543 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK, 544 (fbdiv >> 8) & 1); 545 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK, 546 postdiv_en << PHY_POSTDIV_EN_SHIFT); 547 548 clrsetbits_le32(PHY_REG(phy_base, 0x52), 549 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv); 550 clrsetbits_le32(PHY_REG(phy_base, 0x53), 551 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT, 552 postdiv << PHY_POSTDIV_SHIFT); 553 } 554 } 555 556 static const u16 d3_phy_drv_2_ohm[][2] = { 557 {PHY_DDR3_RON_506ohm, 506}, 558 {PHY_DDR3_RON_253ohm, 253}, 559 {PHY_DDR3_RON_169hm, 169}, 560 {PHY_DDR3_RON_127ohm, 127}, 561 {PHY_DDR3_RON_101ohm, 101}, 562 {PHY_DDR3_RON_84ohm, 84}, 563 {PHY_DDR3_RON_72ohm, 72}, 564 {PHY_DDR3_RON_63ohm, 63}, 565 {PHY_DDR3_RON_56ohm, 56}, 566 {PHY_DDR3_RON_51ohm, 51}, 567 {PHY_DDR3_RON_46ohm, 46}, 568 {PHY_DDR3_RON_42ohm, 42}, 569 {PHY_DDR3_RON_39ohm, 39}, 570 {PHY_DDR3_RON_36ohm, 36}, 571 {PHY_DDR3_RON_34ohm, 34}, 572 {PHY_DDR3_RON_32ohm, 32}, 573 {PHY_DDR3_RON_30ohm, 30}, 574 {PHY_DDR3_RON_28ohm, 28}, 575 {PHY_DDR3_RON_27ohm, 27}, 576 {PHY_DDR3_RON_25ohm, 25}, 577 {PHY_DDR3_RON_24ohm, 24}, 578 {PHY_DDR3_RON_23ohm, 23}, 579 {PHY_DDR3_RON_22ohm, 22} 580 }; 581 582 static u16 d3_phy_odt_2_ohm[][2] = { 583 {PHY_DDR3_RTT_DISABLE, 0}, 584 {PHY_DDR3_RTT_953ohm, 953}, 585 {PHY_DDR3_RTT_483ohm, 483}, 586 {PHY_DDR3_RTT_320ohm, 320}, 587 {PHY_DDR3_RTT_241ohm, 241}, 588 {PHY_DDR3_RTT_193ohm, 193}, 589 {PHY_DDR3_RTT_161ohm, 161}, 590 {PHY_DDR3_RTT_138ohm, 138}, 591 {PHY_DDR3_RTT_121ohm, 121}, 592 {PHY_DDR3_RTT_107ohm, 107}, 593 {PHY_DDR3_RTT_97ohm, 97}, 594 {PHY_DDR3_RTT_88ohm, 88}, 595 {PHY_DDR3_RTT_80ohm, 80}, 596 {PHY_DDR3_RTT_74ohm, 74}, 597 {PHY_DDR3_RTT_69ohm, 69}, 598 {PHY_DDR3_RTT_64ohm, 64}, 599 {PHY_DDR3_RTT_60ohm, 60}, 600 {PHY_DDR3_RTT_57ohm, 57}, 601 {PHY_DDR3_RTT_54ohm, 54}, 602 {PHY_DDR3_RTT_51ohm, 51}, 603 {PHY_DDR3_RTT_48ohm, 48}, 604 {PHY_DDR3_RTT_46ohm, 46}, 605 {PHY_DDR3_RTT_44ohm, 44}, 606 {PHY_DDR3_RTT_42ohm, 42} 607 }; 608 609 static u16 d4lp3_phy_drv_2_ohm[][2] = { 610 {PHY_DDR4_LPDDR3_RON_570ohm, 570}, 611 {PHY_DDR4_LPDDR3_RON_285ohm, 285}, 612 {PHY_DDR4_LPDDR3_RON_190ohm, 190}, 613 {PHY_DDR4_LPDDR3_RON_142ohm, 142}, 614 {PHY_DDR4_LPDDR3_RON_114ohm, 114}, 615 {PHY_DDR4_LPDDR3_RON_95ohm, 95}, 616 {PHY_DDR4_LPDDR3_RON_81ohm, 81}, 617 {PHY_DDR4_LPDDR3_RON_71ohm, 71}, 618 {PHY_DDR4_LPDDR3_RON_63ohm, 63}, 619 {PHY_DDR4_LPDDR3_RON_57ohm, 57}, 620 {PHY_DDR4_LPDDR3_RON_52ohm, 52}, 621 {PHY_DDR4_LPDDR3_RON_47ohm, 47}, 622 {PHY_DDR4_LPDDR3_RON_44ohm, 44}, 623 {PHY_DDR4_LPDDR3_RON_41ohm, 41}, 624 {PHY_DDR4_LPDDR3_RON_38ohm, 38}, 625 {PHY_DDR4_LPDDR3_RON_36ohm, 36}, 626 {PHY_DDR4_LPDDR3_RON_34ohm, 34}, 627 {PHY_DDR4_LPDDR3_RON_32ohm, 32}, 628 {PHY_DDR4_LPDDR3_RON_30ohm, 30}, 629 {PHY_DDR4_LPDDR3_RON_28ohm, 28}, 630 {PHY_DDR4_LPDDR3_RON_27ohm, 27}, 631 {PHY_DDR4_LPDDR3_RON_26ohm, 26}, 632 {PHY_DDR4_LPDDR3_RON_25ohm, 25} 633 }; 634 635 static u16 d4lp3_phy_odt_2_ohm[][2] = { 636 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0}, 637 {PHY_DDR4_LPDDR3_RTT_973ohm, 973}, 638 {PHY_DDR4_LPDDR3_RTT_493ohm, 493}, 639 {PHY_DDR4_LPDDR3_RTT_327ohm, 327}, 640 {PHY_DDR4_LPDDR3_RTT_247ohm, 247}, 641 {PHY_DDR4_LPDDR3_RTT_197ohm, 197}, 642 {PHY_DDR4_LPDDR3_RTT_164ohm, 164}, 643 {PHY_DDR4_LPDDR3_RTT_141ohm, 141}, 644 {PHY_DDR4_LPDDR3_RTT_123ohm, 123}, 645 {PHY_DDR4_LPDDR3_RTT_109ohm, 109}, 646 {PHY_DDR4_LPDDR3_RTT_99ohm, 99}, 647 {PHY_DDR4_LPDDR3_RTT_90ohm, 90}, 648 {PHY_DDR4_LPDDR3_RTT_82ohm, 82}, 649 {PHY_DDR4_LPDDR3_RTT_76ohm, 76}, 650 {PHY_DDR4_LPDDR3_RTT_70ohm, 70}, 651 {PHY_DDR4_LPDDR3_RTT_66ohm, 66}, 652 {PHY_DDR4_LPDDR3_RTT_62ohm, 62}, 653 {PHY_DDR4_LPDDR3_RTT_58ohm, 58}, 654 {PHY_DDR4_LPDDR3_RTT_55ohm, 55}, 655 {PHY_DDR4_LPDDR3_RTT_52ohm, 52}, 656 {PHY_DDR4_LPDDR3_RTT_49ohm, 49}, 657 {PHY_DDR4_LPDDR3_RTT_47ohm, 47}, 658 {PHY_DDR4_LPDDR3_RTT_45ohm, 45}, 659 {PHY_DDR4_LPDDR3_RTT_43ohm, 43} 660 }; 661 662 static u16 lp4_phy_drv_2_ohm[][2] = { 663 {PHY_LPDDR4_RON_606ohm, 606}, 664 {PHY_LPDDR4_RON_303ohm, 303}, 665 {PHY_LPDDR4_RON_202ohm, 202}, 666 {PHY_LPDDR4_RON_152ohm, 153}, 667 {PHY_LPDDR4_RON_121ohm, 121}, 668 {PHY_LPDDR4_RON_101ohm, 101}, 669 {PHY_LPDDR4_RON_87ohm, 87}, 670 {PHY_LPDDR4_RON_76ohm, 76}, 671 {PHY_LPDDR4_RON_67ohm, 67}, 672 {PHY_LPDDR4_RON_61ohm, 61}, 673 {PHY_LPDDR4_RON_55ohm, 55}, 674 {PHY_LPDDR4_RON_51ohm, 51}, 675 {PHY_LPDDR4_RON_47ohm, 47}, 676 {PHY_LPDDR4_RON_43ohm, 43}, 677 {PHY_LPDDR4_RON_40ohm, 40}, 678 {PHY_LPDDR4_RON_38ohm, 38}, 679 {PHY_LPDDR4_RON_36ohm, 36}, 680 {PHY_LPDDR4_RON_34ohm, 34}, 681 {PHY_LPDDR4_RON_32ohm, 32}, 682 {PHY_LPDDR4_RON_30ohm, 30}, 683 {PHY_LPDDR4_RON_29ohm, 29}, 684 {PHY_LPDDR4_RON_28ohm, 28}, 685 {PHY_LPDDR4_RON_26ohm, 26} 686 }; 687 688 static u16 lp4_phy_odt_2_ohm[][2] = { 689 {PHY_LPDDR4_RTT_DISABLE, 0}, 690 {PHY_LPDDR4_RTT_998ohm, 998}, 691 {PHY_LPDDR4_RTT_506ohm, 506}, 692 {PHY_LPDDR4_RTT_336ohm, 336}, 693 {PHY_LPDDR4_RTT_253ohm, 253}, 694 {PHY_LPDDR4_RTT_202ohm, 202}, 695 {PHY_LPDDR4_RTT_169ohm, 169}, 696 {PHY_LPDDR4_RTT_144ohm, 144}, 697 {PHY_LPDDR4_RTT_127ohm, 127}, 698 {PHY_LPDDR4_RTT_112ohm, 112}, 699 {PHY_LPDDR4_RTT_101ohm, 101}, 700 {PHY_LPDDR4_RTT_92ohm, 92}, 701 {PHY_LPDDR4_RTT_84ohm, 84}, 702 {PHY_LPDDR4_RTT_78ohm, 78}, 703 {PHY_LPDDR4_RTT_72ohm, 72}, 704 {PHY_LPDDR4_RTT_67ohm, 67}, 705 {PHY_LPDDR4_RTT_63ohm, 63}, 706 {PHY_LPDDR4_RTT_60ohm, 60}, 707 {PHY_LPDDR4_RTT_56ohm, 56}, 708 {PHY_LPDDR4_RTT_53ohm, 53}, 709 {PHY_LPDDR4_RTT_51ohm, 51}, 710 {PHY_LPDDR4_RTT_48ohm, 48}, 711 {PHY_LPDDR4_RTT_46ohm, 46}, 712 {PHY_LPDDR4_RTT_44ohm, 44} 713 }; 714 715 static u32 lp4_odt_calc(u32 odt_ohm) 716 { 717 u32 odt; 718 719 if (odt_ohm == 0) 720 odt = LPDDR4_DQODT_DIS; 721 else if (odt_ohm <= 40) 722 odt = LPDDR4_DQODT_40; 723 else if (odt_ohm <= 48) 724 odt = LPDDR4_DQODT_48; 725 else if (odt_ohm <= 60) 726 odt = LPDDR4_DQODT_60; 727 else if (odt_ohm <= 80) 728 odt = LPDDR4_DQODT_80; 729 else if (odt_ohm <= 120) 730 odt = LPDDR4_DQODT_120; 731 else 732 odt = LPDDR4_DQODT_240; 733 734 return odt; 735 } 736 737 static void *get_ddr_drv_odt_info(u32 dramtype) 738 { 739 struct sdram_head_info_index_v2 *index = 740 (struct sdram_head_info_index_v2 *)common_info; 741 void *ddr_info = 0; 742 743 if (dramtype == DDR4) 744 ddr_info = (void *)common_info + index->ddr4_index.offset * 4; 745 else if (dramtype == DDR3) 746 ddr_info = (void *)common_info + index->ddr3_index.offset * 4; 747 else if (dramtype == LPDDR3) 748 ddr_info = (void *)common_info + index->lp3_index.offset * 4; 749 else if (dramtype == LPDDR4) 750 ddr_info = (void *)common_info + index->lp4_index.offset * 4; 751 else 752 printascii("unsupported dram type\n"); 753 return ddr_info; 754 } 755 756 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info, 757 u32 freq_mhz, u32 dst_fsp) 758 { 759 void __iomem *pctl_base = dram->pctl; 760 u32 ca_vref, dq_vref; 761 762 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 763 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff); 764 else 765 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten); 766 767 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq)) 768 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff); 769 else 770 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten); 771 772 if (ca_vref < 100) 773 ca_vref = 100; 774 if (ca_vref > 420) 775 ca_vref = 420; 776 777 if (ca_vref <= 300) 778 ca_vref = (0 << 6) | (ca_vref - 100) / 4; 779 else 780 ca_vref = (1 << 6) | (ca_vref - 220) / 4; 781 782 if (dq_vref < 100) 783 dq_vref = 100; 784 if (dq_vref > 420) 785 dq_vref = 420; 786 787 if (dq_vref <= 300) 788 dq_vref = (0 << 6) | (dq_vref - 100) / 4; 789 else 790 dq_vref = (1 << 6) | (dq_vref - 220) / 4; 791 792 sw_set_req(dram); 793 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 794 DDR_PCTL2_INIT6, 795 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT, 796 ca_vref << PCTL2_LPDDR4_MR12_SHIFT); 797 798 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 799 DDR_PCTL2_INIT7, 800 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT, 801 dq_vref << PCTL2_LPDDR4_MR14_SHIFT); 802 sw_set_ack(dram); 803 } 804 805 static void set_ds_odt(struct dram_info *dram, 806 struct rv1126_sdram_params *sdram_params, u32 dst_fsp) 807 { 808 void __iomem *phy_base = dram->phy; 809 void __iomem *pctl_base = dram->pctl; 810 u32 dramtype = sdram_params->base.dramtype; 811 struct ddr2_3_4_lp2_3_info *ddr_info; 812 struct lp4_info *lp4_info; 813 u32 i, j, tmp; 814 const u16 (*p_drv)[2]; 815 const u16 (*p_odt)[2]; 816 u32 drv_info, sr_info; 817 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm; 818 u32 phy_odt_ohm, dram_odt_ohm; 819 u32 lp4_pu_cal, phy_lp4_drv_pd_en; 820 u32 phy_odt_up_en, phy_odt_dn_en; 821 u32 sr_dq, sr_clk; 822 u32 freq = sdram_params->base.ddr_freq; 823 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner; 824 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0; 825 u32 phy_dq_drv = 0; 826 u32 phy_odt_up = 0, phy_odt_dn = 0; 827 828 ddr_info = get_ddr_drv_odt_info(dramtype); 829 lp4_info = (void *)ddr_info; 830 831 if (!ddr_info) 832 return; 833 834 /* dram odt en freq control phy drv, dram odt and phy sr */ 835 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) { 836 drv_info = ddr_info->drv_when_odtoff; 837 dram_odt_ohm = 0; 838 sr_info = ddr_info->sr_when_odtoff; 839 phy_lp4_drv_pd_en = 840 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info); 841 } else { 842 drv_info = ddr_info->drv_when_odten; 843 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info); 844 sr_info = ddr_info->sr_when_odten; 845 phy_lp4_drv_pd_en = 846 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info); 847 } 848 phy_dq_drv_ohm = 849 DRV_INFO_PHY_DQ_DRV(drv_info); 850 phy_clk_drv_ohm = 851 DRV_INFO_PHY_CLK_DRV(drv_info); 852 phy_ca_drv_ohm = 853 DRV_INFO_PHY_CA_DRV(drv_info); 854 855 sr_dq = DQ_SR_INFO(sr_info); 856 sr_clk = CLK_SR_INFO(sr_info); 857 858 /* phy odt en freq control dram drv and phy odt */ 859 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) { 860 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff); 861 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info); 862 phy_odt_ohm = 0; 863 phy_odt_up_en = 0; 864 phy_odt_dn_en = 0; 865 } else { 866 dram_drv_ohm = 867 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten); 868 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info); 869 phy_odt_up_en = 870 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 871 phy_odt_dn_en = 872 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 873 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info); 874 } 875 876 if (dramtype == LPDDR4) { 877 if (phy_odt_ohm) { 878 phy_odt_up_en = 0; 879 phy_odt_dn_en = 1; 880 } 881 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 882 dram_caodt_ohm = 0; 883 else 884 dram_caodt_ohm = 885 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info); 886 } 887 888 if (dramtype == DDR3) { 889 p_drv = d3_phy_drv_2_ohm; 890 p_odt = d3_phy_odt_2_ohm; 891 } else if (dramtype == LPDDR4) { 892 p_drv = lp4_phy_drv_2_ohm; 893 p_odt = lp4_phy_odt_2_ohm; 894 } else { 895 p_drv = d4lp3_phy_drv_2_ohm; 896 p_odt = d4lp3_phy_odt_2_ohm; 897 } 898 899 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 900 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) { 901 phy_dq_drv = **(p_drv + i); 902 break; 903 } 904 if (i == 0) 905 break; 906 } 907 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 908 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) { 909 phy_clk_drv = **(p_drv + i); 910 break; 911 } 912 if (i == 0) 913 break; 914 } 915 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 916 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) { 917 phy_ca_drv = **(p_drv + i); 918 break; 919 } 920 if (i == 0) 921 break; 922 } 923 if (!phy_odt_ohm) 924 phy_odt = 0; 925 else 926 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) { 927 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) { 928 phy_odt = **(p_odt + i); 929 break; 930 } 931 if (i == 0) 932 break; 933 } 934 935 if (dramtype != LPDDR4) { 936 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en)) 937 vref_inner = 0x80; 938 else if (phy_odt_up_en) 939 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 / 940 (dram_drv_ohm + phy_odt_ohm); 941 else 942 vref_inner = phy_odt_ohm * 128 / 943 (phy_odt_ohm + dram_drv_ohm); 944 945 if (dramtype != DDR3 && dram_odt_ohm) 946 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 / 947 (phy_dq_drv_ohm + dram_odt_ohm); 948 else 949 vref_out = 0x80; 950 } else { 951 /* for lp4 */ 952 if (phy_odt_ohm) 953 vref_inner = 954 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) * 955 256) / 1000; 956 else 957 vref_inner = 958 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) * 959 256) / 1000; 960 961 vref_out = 0x80; 962 } 963 964 /* default ZQCALIB bypass mode */ 965 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv); 966 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv); 967 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv); 968 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv); 969 /* clk / cmd slew rate */ 970 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk); 971 972 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1; 973 if (phy_odt_up_en) 974 phy_odt_up = phy_odt; 975 if (phy_odt_dn_en) 976 phy_odt_dn = phy_odt; 977 978 for (i = 0; i < 4; i++) { 979 j = 0x110 + i * 0x10; 980 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up); 981 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn); 982 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv); 983 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv); 984 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10)); 985 986 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), 987 1 << 3, phy_lp4_drv_pd_en << 3); 988 /* dq slew rate */ 989 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10), 990 0x1f, sr_dq); 991 } 992 993 /* reg_rx_vref_value_update */ 994 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 995 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 996 997 /* RAM VREF */ 998 writel(vref_out, PHY_REG(phy_base, 0x105)); 999 udelay(8000); 1000 1001 if (dramtype == LPDDR4) 1002 set_lp4_vref(dram, lp4_info, freq, dst_fsp); 1003 1004 if (dramtype == DDR3 || dramtype == DDR4) { 1005 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1006 DDR_PCTL2_INIT3); 1007 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK; 1008 } else { 1009 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1010 DDR_PCTL2_INIT4); 1011 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK; 1012 } 1013 1014 if (dramtype == DDR3) { 1015 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK); 1016 if (dram_drv_ohm == 34) 1017 mr1_mr3 |= DDR3_DS_34; 1018 1019 if (dram_odt_ohm == 0) 1020 mr1_mr3 |= DDR3_RTT_NOM_DIS; 1021 else if (dram_odt_ohm <= 40) 1022 mr1_mr3 |= DDR3_RTT_NOM_40; 1023 else if (dram_odt_ohm <= 60) 1024 mr1_mr3 |= DDR3_RTT_NOM_60; 1025 else 1026 mr1_mr3 |= DDR3_RTT_NOM_120; 1027 1028 } else if (dramtype == DDR4) { 1029 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK); 1030 if (dram_drv_ohm == 48) 1031 mr1_mr3 |= DDR4_DS_48; 1032 1033 if (dram_odt_ohm == 0) 1034 mr1_mr3 |= DDR4_RTT_NOM_DIS; 1035 else if (dram_odt_ohm <= 34) 1036 mr1_mr3 |= DDR4_RTT_NOM_34; 1037 else if (dram_odt_ohm <= 40) 1038 mr1_mr3 |= DDR4_RTT_NOM_40; 1039 else if (dram_odt_ohm <= 48) 1040 mr1_mr3 |= DDR4_RTT_NOM_48; 1041 else if (dram_odt_ohm <= 60) 1042 mr1_mr3 |= DDR4_RTT_NOM_60; 1043 else 1044 mr1_mr3 |= DDR4_RTT_NOM_120; 1045 1046 } else if (dramtype == LPDDR3) { 1047 if (dram_drv_ohm <= 34) 1048 mr1_mr3 |= LPDDR3_DS_34; 1049 else if (dram_drv_ohm <= 40) 1050 mr1_mr3 |= LPDDR3_DS_40; 1051 else if (dram_drv_ohm <= 48) 1052 mr1_mr3 |= LPDDR3_DS_48; 1053 else if (dram_drv_ohm <= 60) 1054 mr1_mr3 |= LPDDR3_DS_60; 1055 else if (dram_drv_ohm <= 80) 1056 mr1_mr3 |= LPDDR3_DS_80; 1057 1058 if (dram_odt_ohm == 0) 1059 lp3_odt_value = LPDDR3_ODT_DIS; 1060 else if (dram_odt_ohm <= 60) 1061 lp3_odt_value = LPDDR3_ODT_60; 1062 else if (dram_odt_ohm <= 120) 1063 lp3_odt_value = LPDDR3_ODT_120; 1064 else 1065 lp3_odt_value = LPDDR3_ODT_240; 1066 } else {/* for lpddr4 */ 1067 /* MR3 for lp4 PU-CAL and PDDS */ 1068 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK); 1069 mr1_mr3 |= lp4_pu_cal; 1070 1071 tmp = lp4_odt_calc(dram_drv_ohm); 1072 if (!tmp) 1073 tmp = LPDDR4_PDDS_240; 1074 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT); 1075 1076 /* MR11 for lp4 ca odt, dq odt set */ 1077 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1078 DDR_PCTL2_INIT6); 1079 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK; 1080 1081 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK); 1082 1083 tmp = lp4_odt_calc(dram_odt_ohm); 1084 mr11 |= (tmp << LPDDR4_DQODT_SHIFT); 1085 1086 tmp = lp4_odt_calc(dram_caodt_ohm); 1087 mr11 |= (tmp << LPDDR4_CAODT_SHIFT); 1088 sw_set_req(dram); 1089 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1090 DDR_PCTL2_INIT6, 1091 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT, 1092 mr11 << PCTL2_LPDDR4_MR11_SHIFT); 1093 sw_set_ack(dram); 1094 1095 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */ 1096 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1097 DDR_PCTL2_INIT7); 1098 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK; 1099 mr22 &= ~LPDDR4_SOC_ODT_MASK; 1100 1101 tmp = lp4_odt_calc(phy_odt_ohm); 1102 mr22 |= tmp; 1103 mr22 = mr22 | 1104 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) << 1105 LPDDR4_ODTE_CK_SHIFT) | 1106 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) << 1107 LPDDR4_ODTE_CS_SHIFT) | 1108 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) << 1109 LPDDR4_ODTD_CA_SHIFT); 1110 1111 sw_set_req(dram); 1112 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1113 DDR_PCTL2_INIT7, 1114 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT, 1115 mr22 << PCTL2_LPDDR4_MR22_SHIFT); 1116 sw_set_ack(dram); 1117 } 1118 1119 if (dramtype == DDR4 || dramtype == DDR3) { 1120 sw_set_req(dram); 1121 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1122 DDR_PCTL2_INIT3, 1123 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT, 1124 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT); 1125 sw_set_ack(dram); 1126 } else { 1127 sw_set_req(dram); 1128 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1129 DDR_PCTL2_INIT4, 1130 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT, 1131 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT); 1132 sw_set_ack(dram); 1133 } 1134 } 1135 1136 static int sdram_cmd_dq_path_remap(struct dram_info *dram, 1137 struct rv1126_sdram_params *sdram_params) 1138 { 1139 void __iomem *phy_base = dram->phy; 1140 u32 dramtype = sdram_params->base.dramtype; 1141 struct sdram_head_info_index_v2 *index = 1142 (struct sdram_head_info_index_v2 *)common_info; 1143 struct dq_map_info *map_info; 1144 1145 map_info = (struct dq_map_info *)((void *)common_info + 1146 index->dq_map_index.offset * 4); 1147 1148 if (dramtype <= LPDDR4) 1149 writel((map_info->byte_map[dramtype / 4] >> 1150 ((dramtype % 4) * 8)) & 0xff, 1151 PHY_REG(phy_base, 0x4f)); 1152 1153 return 0; 1154 } 1155 1156 static void phy_cfg(struct dram_info *dram, 1157 struct rv1126_sdram_params *sdram_params) 1158 { 1159 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 1160 void __iomem *phy_base = dram->phy; 1161 u32 i, dq_map, tmp; 1162 u32 byte1 = 0, byte0 = 0; 1163 1164 sdram_cmd_dq_path_remap(dram, sdram_params); 1165 1166 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0); 1167 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) { 1168 writel(sdram_params->phy_regs.phy[i][1], 1169 phy_base + sdram_params->phy_regs.phy[i][0]); 1170 } 1171 1172 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5)); 1173 dq_map = readl(PHY_REG(phy_base, 0x4f)); 1174 for (i = 0; i < 4; i++) { 1175 if (((dq_map >> (i * 2)) & 0x3) == 0) 1176 byte0 = i; 1177 if (((dq_map >> (i * 2)) & 0x3) == 1) 1178 byte1 = i; 1179 } 1180 1181 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK); 1182 if (cap_info->bw == 2) 1183 tmp |= 0xf; 1184 else if (cap_info->bw == 1) 1185 tmp |= ((1 << byte0) | (1 << byte1)); 1186 else 1187 tmp |= (1 << byte0); 1188 1189 writel(tmp, PHY_REG(phy_base, 0xf)); 1190 1191 /* lpddr4 odt control by phy, enable cs0 odt */ 1192 if (sdram_params->base.dramtype == LPDDR4) 1193 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4, 1194 (1 << 6) | (1 << 4)); 1195 /* for ca training ca vref choose range1 */ 1196 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6)); 1197 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6)); 1198 /* for wr training PHY_0x7c[5], choose range0 */ 1199 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5)); 1200 } 1201 1202 static int update_refresh_reg(struct dram_info *dram) 1203 { 1204 void __iomem *pctl_base = dram->pctl; 1205 u32 ret; 1206 1207 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1); 1208 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3); 1209 1210 return 0; 1211 } 1212 1213 /* 1214 * rank = 1: cs0 1215 * rank = 2: cs1 1216 */ 1217 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype) 1218 { 1219 u32 ret; 1220 u32 i, temp; 1221 u32 dqmap; 1222 1223 void __iomem *pctl_base = dram->pctl; 1224 struct sdram_head_info_index_v2 *index = 1225 (struct sdram_head_info_index_v2 *)common_info; 1226 struct dq_map_info *map_info; 1227 1228 map_info = (struct dq_map_info *)((void *)common_info + 1229 index->dq_map_index.offset * 4); 1230 1231 if (dramtype == LPDDR2) 1232 dqmap = map_info->lp2_dq0_7_map; 1233 else 1234 dqmap = map_info->lp3_dq0_7_map; 1235 1236 pctl_read_mr(pctl_base, rank, mr_num); 1237 1238 ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff); 1239 1240 if (dramtype != LPDDR4) { 1241 temp = 0; 1242 for (i = 0; i < 8; i++) { 1243 temp = temp | (((ret >> i) & 0x1) << 1244 ((dqmap >> (i * 4)) & 0xf)); 1245 } 1246 } else { 1247 ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff); 1248 } 1249 1250 return ret; 1251 } 1252 1253 /* before call this function autorefresh should be disabled */ 1254 void send_a_refresh(struct dram_info *dram) 1255 { 1256 void __iomem *pctl_base = dram->pctl; 1257 1258 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3) 1259 continue; 1260 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD); 1261 } 1262 1263 void record_dq_prebit(struct dram_info *dram) 1264 { 1265 u32 group, i, tmp; 1266 void __iomem *phy_base = dram->phy; 1267 1268 for (group = 0; group < 4; group++) { 1269 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) { 1270 /* l_loop_invdelaysel */ 1271 writel(dq_sel[i][0], PHY_REG(phy_base, 1272 grp_addr[group] + 0x2c)); 1273 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e)); 1274 writel(tmp, PHY_REG(phy_base, 1275 grp_addr[group] + dq_sel[i][1])); 1276 1277 /* r_loop_invdelaysel */ 1278 writel(dq_sel[i][0], PHY_REG(phy_base, 1279 grp_addr[group] + 0x2d)); 1280 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f)); 1281 writel(tmp, PHY_REG(phy_base, 1282 grp_addr[group] + dq_sel[i][2])); 1283 } 1284 } 1285 } 1286 1287 static void update_dq_rx_prebit(struct dram_info *dram) 1288 { 1289 void __iomem *phy_base = dram->phy; 1290 1291 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4), 1292 BIT(4)); 1293 udelay(1); 1294 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4)); 1295 } 1296 1297 static void update_dq_tx_prebit(struct dram_info *dram) 1298 { 1299 void __iomem *phy_base = dram->phy; 1300 1301 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1302 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3)); 1303 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1304 udelay(1); 1305 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1306 } 1307 1308 static void update_ca_prebit(struct dram_info *dram) 1309 { 1310 void __iomem *phy_base = dram->phy; 1311 1312 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2)); 1313 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1314 udelay(1); 1315 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1316 } 1317 1318 /* 1319 * dir: 0: de-skew = delta_* 1320 * 1: de-skew = reg val - delta_* 1321 * delta_dir: value for differential signal: clk/ 1322 * delta_sig: value for single signal: ca/cmd 1323 */ 1324 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif, 1325 int delta_sig, u32 cs, u32 dramtype) 1326 { 1327 void __iomem *phy_base = dram->phy; 1328 u32 i, cs_en, tmp; 1329 1330 if (cs == 0) 1331 cs_en = 1; 1332 else if (cs == 2) 1333 cs_en = 2; 1334 else 1335 cs_en = 3; 1336 1337 for (i = 0; i < 0x20; i++) { 1338 if (dir == DESKEW_MDF_ABS_VAL) 1339 tmp = delta_sig; 1340 else 1341 tmp = readl(PHY_REG(phy_base, 0x150 + i)) + 1342 delta_sig; 1343 writel(tmp, PHY_REG(phy_base, 0x150 + i)); 1344 } 1345 1346 if (dir == DESKEW_MDF_ABS_VAL) 1347 tmp = delta_dif; 1348 else 1349 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) - 1350 delta_sig + delta_dif; 1351 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17)); 1352 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18)); 1353 if (dramtype == LPDDR4) { 1354 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4)); 1355 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa)); 1356 1357 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6); 1358 update_ca_prebit(dram); 1359 } 1360 } 1361 1362 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank) 1363 { 1364 u32 i, j, offset = 0; 1365 u32 min = 0x3f; 1366 void __iomem *phy_base = dram->phy; 1367 u32 byte_en; 1368 1369 if (signal == SKEW_TX_SIGNAL) 1370 offset = 8; 1371 1372 if (signal == SKEW_CA_SIGNAL) { 1373 for (i = 0; i < 0x20; i++) 1374 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i))); 1375 } else { 1376 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1377 for (j = offset; j < offset + rank * 4; j++) { 1378 if (!((byte_en >> (j % 4)) & 1)) 1379 continue; 1380 for (i = 0; i < 11; i++) 1381 min = MIN(min, 1382 readl(PHY_REG(phy_base, 1383 dqs_dq_skew_adr[j] + 1384 i))); 1385 } 1386 } 1387 1388 return min; 1389 } 1390 1391 static u32 low_power_update(struct dram_info *dram, u32 en) 1392 { 1393 void __iomem *pctl_base = dram->pctl; 1394 u32 lp_stat = 0; 1395 1396 if (en) { 1397 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf); 1398 } else { 1399 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf; 1400 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf); 1401 } 1402 1403 return lp_stat; 1404 } 1405 1406 /* 1407 * signal: 1408 * dir: 0: de-skew = delta_* 1409 * 1: de-skew = reg val - delta_* 1410 * delta_dir: value for differential signal: dqs 1411 * delta_sig: value for single signal: dq/dm 1412 */ 1413 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir, 1414 int delta_dif, int delta_sig, u32 rank) 1415 { 1416 void __iomem *phy_base = dram->phy; 1417 u32 i, j, tmp, offset; 1418 u32 byte_en; 1419 1420 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1421 1422 if (signal == SKEW_RX_SIGNAL) 1423 offset = 0; 1424 else 1425 offset = 8; 1426 1427 for (j = offset; j < (offset + rank * 4); j++) { 1428 if (!((byte_en >> (j % 4)) & 1)) 1429 continue; 1430 for (i = 0; i < 0x9; i++) { 1431 if (dir == DESKEW_MDF_ABS_VAL) 1432 tmp = delta_sig; 1433 else 1434 tmp = delta_sig + readl(PHY_REG(phy_base, 1435 dqs_dq_skew_adr[j] + 1436 i)); 1437 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i)); 1438 } 1439 if (dir == DESKEW_MDF_ABS_VAL) 1440 tmp = delta_dif; 1441 else 1442 tmp = delta_dif + readl(PHY_REG(phy_base, 1443 dqs_dq_skew_adr[j] + 9)); 1444 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9)); 1445 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa)); 1446 } 1447 if (signal == SKEW_RX_SIGNAL) 1448 update_dq_rx_prebit(dram); 1449 else 1450 update_dq_tx_prebit(dram); 1451 } 1452 1453 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype) 1454 { 1455 void __iomem *phy_base = dram->phy; 1456 u32 ret; 1457 u32 dis_auto_zq = 0; 1458 u32 odt_val_up, odt_val_dn; 1459 u32 i, j; 1460 1461 odt_val_dn = readl(PHY_REG(phy_base, 0x110)); 1462 odt_val_up = readl(PHY_REG(phy_base, 0x111)); 1463 1464 if (dramtype != LPDDR4) { 1465 for (i = 0; i < 4; i++) { 1466 j = 0x110 + i * 0x10; 1467 writel(PHY_DDR4_LPDDR3_RTT_247ohm, 1468 PHY_REG(phy_base, j)); 1469 writel(PHY_DDR4_LPDDR3_RTT_DISABLE, 1470 PHY_REG(phy_base, j + 0x1)); 1471 } 1472 } 1473 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1474 /* use normal read mode for data training */ 1475 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1476 1477 if (dramtype == DDR4) 1478 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1479 1480 /* choose training cs */ 1481 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs)); 1482 /* enable gate training */ 1483 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1); 1484 udelay(50); 1485 ret = readl(PHY_REG(phy_base, 0x91)); 1486 /* disable gate training */ 1487 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0); 1488 clrbits_le32(PHY_REG(phy_base, 2), 0x30); 1489 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1490 1491 if (ret & 0x20) 1492 ret = -1; 1493 else 1494 ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf); 1495 1496 if (dramtype != LPDDR4) { 1497 for (i = 0; i < 4; i++) { 1498 j = 0x110 + i * 0x10; 1499 writel(odt_val_dn, PHY_REG(phy_base, j)); 1500 writel(odt_val_up, PHY_REG(phy_base, j + 0x1)); 1501 } 1502 } 1503 return ret; 1504 } 1505 1506 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype, 1507 u32 rank) 1508 { 1509 void __iomem *pctl_base = dram->pctl; 1510 void __iomem *phy_base = dram->phy; 1511 u32 dis_auto_zq = 0; 1512 u32 tmp; 1513 u32 cur_fsp; 1514 u32 timeout_us = 1000; 1515 1516 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1517 1518 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1519 1520 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1521 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) & 1522 0xffff; 1523 writel(tmp & 0xff, PHY_REG(phy_base, 0x3)); 1524 1525 /* disable another cs's output */ 1526 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1527 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12), 1528 dramtype); 1529 if (dramtype == DDR3 || dramtype == DDR4) 1530 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1531 else 1532 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1533 1534 /* choose cs */ 1535 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1536 ((0x2 >> cs) << 6) | (0 << 2)); 1537 /* enable write leveling */ 1538 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1539 ((0x2 >> cs) << 6) | (1 << 2)); 1540 1541 while (1) { 1542 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) == 1543 (readl(PHY_REG(phy_base, 0xf)) & 0xf)) 1544 break; 1545 1546 udelay(1); 1547 if (timeout_us-- == 0) { 1548 printascii("error: write leveling timeout\n"); 1549 while (1) 1550 ; 1551 } 1552 } 1553 1554 /* disable write leveling */ 1555 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1556 ((0x2 >> cs) << 6) | (0 << 2)); 1557 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6); 1558 1559 /* enable another cs's output */ 1560 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1561 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12), 1562 dramtype); 1563 1564 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1565 1566 return 0; 1567 } 1568 1569 char pattern[32] = { 1570 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa, 1571 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55, 1572 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55, 1573 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa 1574 }; 1575 1576 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype, 1577 u32 mhz) 1578 { 1579 void __iomem *pctl_base = dram->pctl; 1580 void __iomem *phy_base = dram->phy; 1581 u32 trefi_1x, trfc_1x; 1582 u32 dis_auto_zq = 0; 1583 u32 timeout_us = 1000; 1584 u32 dqs_default; 1585 u32 cur_fsp; 1586 u32 vref_inner; 1587 u32 i; 1588 struct sdram_head_info_index_v2 *index = 1589 (struct sdram_head_info_index_v2 *)common_info; 1590 struct dq_map_info *map_info; 1591 1592 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff; 1593 if (dramtype == DDR3 && vref_inner == 0x80) { 1594 for (i = 0; i < 4; i++) 1595 writel(vref_inner - 0xa, 1596 PHY_REG(phy_base, 0x118 + i * 0x10)); 1597 1598 /* reg_rx_vref_value_update */ 1599 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1600 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1601 } 1602 1603 map_info = (struct dq_map_info *)((void *)common_info + 1604 index->dq_map_index.offset * 4); 1605 /* only 1cs a time, 0:cs0 1 cs1 */ 1606 if (cs > 1) 1607 return -1; 1608 1609 dqs_default = 0xf; 1610 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1611 1612 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1613 /* config refresh timing */ 1614 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1615 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1616 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1617 DDR_PCTL2_RFSHTMG) & 0x3ff; 1618 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1619 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1620 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1621 /* reg_phy_trfc */ 1622 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1623 /* reg_max_refi_cnt */ 1624 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1625 1626 /* choose training cs */ 1627 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6); 1628 1629 /* set dq map for ddr4 */ 1630 if (dramtype == DDR4) { 1631 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7)); 1632 for (i = 0; i < 4; i++) { 1633 writel((map_info->ddr4_dq_map[cs * 2] >> 1634 ((i % 4) * 8)) & 0xff, 1635 PHY_REG(phy_base, 0x238 + i)); 1636 writel((map_info->ddr4_dq_map[cs * 2 + 1] >> 1637 ((i % 4) * 8)) & 0xff, 1638 PHY_REG(phy_base, 0x2b8 + i)); 1639 } 1640 } 1641 1642 /* cha_l reg_l_rd_train_dqs_default[5:0] */ 1643 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default); 1644 /* cha_h reg_h_rd_train_dqs_default[5:0] */ 1645 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default); 1646 /* chb_l reg_l_rd_train_dqs_default[5:0] */ 1647 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default); 1648 /* chb_h reg_h_rd_train_dqs_default[5:0] */ 1649 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default); 1650 1651 /* Choose the read train auto mode */ 1652 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1); 1653 /* Enable the auto train of the read train */ 1654 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3); 1655 1656 /* Wait the train done. */ 1657 while (1) { 1658 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1) 1659 break; 1660 1661 udelay(1); 1662 if (timeout_us-- == 0) { 1663 printascii("error: read training timeout\n"); 1664 return -1; 1665 } 1666 } 1667 1668 /* Check the read train state */ 1669 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) || 1670 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) { 1671 printascii("error: read training error\n"); 1672 return -1; 1673 } 1674 1675 /* Exit the Read Training by setting */ 1676 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1)); 1677 1678 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1679 1680 if (dramtype == DDR3 && vref_inner == 0x80) { 1681 for (i = 0; i < 4; i++) 1682 writel(vref_inner, 1683 PHY_REG(phy_base, 0x118 + i * 0x10)); 1684 1685 /* reg_rx_vref_value_update */ 1686 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1687 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1688 } 1689 1690 return 0; 1691 } 1692 1693 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype, 1694 u32 mhz, u32 dst_fsp) 1695 { 1696 void __iomem *pctl_base = dram->pctl; 1697 void __iomem *phy_base = dram->phy; 1698 u32 trefi_1x, trfc_1x; 1699 u32 dis_auto_zq = 0; 1700 u32 timeout_us = 1000; 1701 u32 cur_fsp; 1702 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0; 1703 1704 if (dramtype == LPDDR3 && mhz <= 400) { 1705 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3; 1706 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3; 1707 cl = readl(PHY_REG(phy_base, offset)); 1708 cwl = readl(PHY_REG(phy_base, offset + 2)); 1709 1710 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8); 1711 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4); 1712 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype); 1713 } 1714 1715 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1716 1717 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */ 1718 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0); 1719 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */ 1720 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2); 1721 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */ 1722 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0); 1723 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */ 1724 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0); 1725 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */ 1726 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0); 1727 1728 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */ 1729 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3)); 1730 1731 /* config refresh timing */ 1732 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1733 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1734 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1735 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1736 DDR_PCTL2_RFSHTMG) & 0x3ff; 1737 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1738 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1739 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1740 /* reg_phy_trfc */ 1741 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1742 /* reg_max_refi_cnt */ 1743 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1744 1745 /* choose training cs */ 1746 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6); 1747 1748 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */ 1749 /* 0: Use the write-leveling value. */ 1750 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */ 1751 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4)); 1752 1753 /* PHY_0x7a [0] reg_dq_wr_train_auto */ 1754 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1755 1756 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1757 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1758 1759 send_a_refresh(dram); 1760 1761 while (1) { 1762 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1) 1763 break; 1764 1765 udelay(1); 1766 if (timeout_us-- == 0) { 1767 printascii("error: write training timeout\n"); 1768 while (1) 1769 ; 1770 } 1771 } 1772 1773 /* Check the write train state */ 1774 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) { 1775 printascii("error: write training error\n"); 1776 return -1; 1777 } 1778 1779 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1780 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1781 1782 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1783 1784 /* save LPDDR4 write vref to fsp_param for dfs */ 1785 if (dramtype == LPDDR4) { 1786 fsp_param[dst_fsp].vref_dq[cs] = 1787 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) + 1788 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2; 1789 /* add range info */ 1790 fsp_param[dst_fsp].vref_dq[cs] |= 1791 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1); 1792 } 1793 1794 if (dramtype == LPDDR3 && mhz <= 400) { 1795 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl); 1796 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl); 1797 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1798 DDR_PCTL2_INIT3); 1799 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 1800 dramtype); 1801 } 1802 1803 return 0; 1804 } 1805 1806 static int data_training(struct dram_info *dram, u32 cs, 1807 struct rv1126_sdram_params *sdram_params, u32 dst_fsp, 1808 u32 training_flag) 1809 { 1810 u32 ret = 0; 1811 1812 if (training_flag == FULL_TRAINING) 1813 training_flag = READ_GATE_TRAINING | WRITE_LEVELING | 1814 WRITE_TRAINING | READ_TRAINING; 1815 1816 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) { 1817 ret = data_training_wl(dram, cs, 1818 sdram_params->base.dramtype, 1819 sdram_params->ch.cap_info.rank); 1820 if (ret != 0) 1821 goto out; 1822 } 1823 1824 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) { 1825 ret = data_training_rg(dram, cs, 1826 sdram_params->base.dramtype); 1827 if (ret != 0) 1828 goto out; 1829 } 1830 1831 if ((training_flag & READ_TRAINING) == READ_TRAINING) { 1832 ret = data_training_rd(dram, cs, 1833 sdram_params->base.dramtype, 1834 sdram_params->base.ddr_freq); 1835 if (ret != 0) 1836 goto out; 1837 } 1838 1839 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) { 1840 ret = data_training_wr(dram, cs, 1841 sdram_params->base.dramtype, 1842 sdram_params->base.ddr_freq, dst_fsp); 1843 if (ret != 0) 1844 goto out; 1845 } 1846 1847 out: 1848 return ret; 1849 } 1850 1851 static int get_wrlvl_val(struct dram_info *dram, 1852 struct rv1126_sdram_params *sdram_params) 1853 { 1854 u32 i, j, clk_skew; 1855 void __iomem *phy_base = dram->phy; 1856 u32 lp_stat; 1857 int ret; 1858 1859 lp_stat = low_power_update(dram, 0); 1860 1861 clk_skew = readl(PHY_REG(phy_base, 0x150 + 0x17)); 1862 1863 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING); 1864 if (sdram_params->ch.cap_info.rank == 2) 1865 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING); 1866 1867 for (j = 0; j < 2; j++) 1868 for (i = 0; i < 4; i++) 1869 wrlvl_result[j][i] = 1870 readl(PHY_REG(phy_base, 1871 wrlvl_result_offset[j][i])) - 1872 clk_skew; 1873 1874 low_power_update(dram, lp_stat); 1875 1876 return ret; 1877 } 1878 1879 static int high_freq_training(struct dram_info *dram, 1880 struct rv1126_sdram_params *sdram_params, 1881 u32 fsp) 1882 { 1883 u32 i, j; 1884 void __iomem *phy_base = dram->phy; 1885 u32 dramtype = sdram_params->base.dramtype; 1886 int min_val; 1887 u32 dqs_skew, clk_skew, ca_skew; 1888 int ret; 1889 1890 dqs_skew = 0; 1891 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) 1892 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) 1893 dqs_skew += wrlvl_result[j][i]; 1894 dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank * 1895 ARRAY_SIZE(wrlvl_result[0])); 1896 1897 clk_skew = 0x20 - dqs_skew; 1898 dqs_skew = 0x20; 1899 1900 if (dramtype == LPDDR4) { 1901 clk_skew = 0; 1902 ca_skew = 0; 1903 } else if (dramtype == LPDDR3) { 1904 ca_skew = clk_skew - 4; 1905 } else { 1906 ca_skew = clk_skew; 1907 } 1908 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3, 1909 dramtype); 1910 1911 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233)); 1912 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237)); 1913 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 1914 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 1915 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING | 1916 READ_TRAINING | WRITE_TRAINING); 1917 if (sdram_params->ch.cap_info.rank == 2) { 1918 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233)); 1919 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237)); 1920 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 1921 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 1922 ret |= data_training(dram, 1, sdram_params, fsp, 1923 READ_GATE_TRAINING | READ_TRAINING | 1924 WRITE_TRAINING); 1925 } 1926 if (ret) 1927 goto out; 1928 1929 record_dq_prebit(dram); 1930 1931 min_val = get_min_value(dram, SKEW_RX_SIGNAL, 1932 sdram_params->ch.cap_info.rank) * -1; 1933 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL, 1934 min_val, min_val, sdram_params->ch.cap_info.rank); 1935 1936 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL, 1937 sdram_params->ch.cap_info.rank), 1938 get_min_value(dram, SKEW_CA_SIGNAL, 1939 sdram_params->ch.cap_info.rank)) * -1; 1940 1941 /* clk = 0, rx all skew -7, tx - min_value */ 1942 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3, 1943 dramtype); 1944 1945 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL, 1946 min_val, min_val, sdram_params->ch.cap_info.rank); 1947 1948 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING); 1949 if (sdram_params->ch.cap_info.rank == 2) 1950 ret |= data_training(dram, 1, sdram_params, 0, 1951 READ_GATE_TRAINING); 1952 out: 1953 return ret; 1954 } 1955 1956 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig) 1957 { 1958 writel(ddrconfig, &dram->msch->deviceconf); 1959 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0); 1960 } 1961 1962 static void update_noc_timing(struct dram_info *dram, 1963 struct rv1126_sdram_params *sdram_params) 1964 { 1965 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32, 1966 &dram->msch->ddrtiminga0); 1967 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32, 1968 &dram->msch->ddrtimingb0); 1969 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32, 1970 &dram->msch->ddrtimingc0); 1971 writel(sdram_params->ch.noc_timings.devtodev0.d32, 1972 &dram->msch->devtodev0); 1973 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode); 1974 writel(sdram_params->ch.noc_timings.ddr4timing.d32, 1975 &dram->msch->ddr4timing); 1976 } 1977 1978 static void dram_all_config(struct dram_info *dram, 1979 struct rv1126_sdram_params *sdram_params) 1980 { 1981 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 1982 u32 dram_type = sdram_params->base.dramtype; 1983 void __iomem *pctl_base = dram->pctl; 1984 u32 sys_reg2 = 0; 1985 u32 sys_reg3 = 0; 1986 u64 cs_cap[2]; 1987 u32 cs_pst; 1988 1989 set_ddrconfig(dram, cap_info->ddrconfig); 1990 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2, 1991 &sys_reg3, 0); 1992 writel(sys_reg2, &dram->pmugrf->os_reg[2]); 1993 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 1994 1995 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 1996 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 1997 1998 if (cap_info->rank == 2) { 1999 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2000 6 + 2; 2001 if (cs_pst > 28) 2002 cs_cap[0] = 1 << cs_pst; 2003 } 2004 2005 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) | 2006 (((cs_cap[0] >> 20) / 64) & 0xff), 2007 &dram->msch->devicesize); 2008 update_noc_timing(dram, sdram_params); 2009 } 2010 2011 static void enable_low_power(struct dram_info *dram, 2012 struct rv1126_sdram_params *sdram_params) 2013 { 2014 void __iomem *pctl_base = dram->pctl; 2015 u32 grf_lp_con; 2016 2017 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]); 2018 2019 if (sdram_params->base.dramtype == DDR4) 2020 grf_lp_con = (0x7 << 16) | (1 << 1); 2021 else if (sdram_params->base.dramtype == DDR3) 2022 grf_lp_con = (0x7 << 16) | (1 << 0); 2023 else 2024 grf_lp_con = (0x7 << 16) | (1 << 2); 2025 2026 /* en lpckdis_en */ 2027 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9); 2028 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con); 2029 2030 /* enable sr, pd */ 2031 if (dram->pd_idle == 0) 2032 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2033 else 2034 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2035 if (dram->sr_idle == 0) 2036 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2037 else 2038 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2039 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3)); 2040 } 2041 2042 static void ddr_set_atags(struct dram_info *dram, 2043 struct rv1126_sdram_params *sdram_params) 2044 { 2045 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2046 u32 dram_type = sdram_params->base.dramtype; 2047 void __iomem *pctl_base = dram->pctl; 2048 struct tag_serial t_serial; 2049 struct tag_ddr_mem t_ddrmem; 2050 struct tag_soc_info t_socinfo; 2051 u64 cs_cap[2]; 2052 u32 cs_pst = 0; 2053 2054 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2055 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2056 2057 memset(&t_serial, 0, sizeof(struct tag_serial)); 2058 2059 t_serial.version = 0; 2060 t_serial.enable = 1; 2061 t_serial.addr = CONFIG_DEBUG_UART_BASE; 2062 t_serial.baudrate = CONFIG_BAUDRATE; 2063 t_serial.m_mode = SERIAL_M_MODE_M0; 2064 t_serial.id = 2; 2065 2066 atags_destroy(); 2067 atags_set_tag(ATAG_SERIAL, &t_serial); 2068 2069 memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem)); 2070 if (cap_info->row_3_4) { 2071 cs_cap[0] = cs_cap[0] * 3 / 4; 2072 cs_cap[1] = cs_cap[1] * 3 / 4; 2073 } 2074 t_ddrmem.version = 0; 2075 t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE; 2076 if (cs_cap[1]) { 2077 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2078 6 + 2; 2079 } 2080 2081 if (cs_cap[1] && cs_pst > 27) { 2082 t_ddrmem.count = 2; 2083 t_ddrmem.bank[1] = 1 << cs_pst; 2084 t_ddrmem.bank[2] = cs_cap[0]; 2085 t_ddrmem.bank[3] = cs_cap[1]; 2086 } else { 2087 t_ddrmem.count = 1; 2088 t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1]; 2089 } 2090 2091 atags_set_tag(ATAG_DDR_MEM, &t_ddrmem); 2092 2093 memset(&t_socinfo, 0, sizeof(struct tag_soc_info)); 2094 t_socinfo.version = 0; 2095 t_socinfo.name = 0x1126; 2096 } 2097 2098 static void print_ddr_info(struct rv1126_sdram_params *sdram_params) 2099 { 2100 u32 split; 2101 2102 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2103 (1 << SPLIT_BYPASS_OFFSET)) != 0) 2104 split = 0; 2105 else 2106 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2107 SPLIT_SIZE_MASK; 2108 2109 sdram_print_ddr_info(&sdram_params->ch.cap_info, 2110 &sdram_params->base, split); 2111 } 2112 2113 static int sdram_init_(struct dram_info *dram, 2114 struct rv1126_sdram_params *sdram_params, u32 post_init) 2115 { 2116 void __iomem *pctl_base = dram->pctl; 2117 void __iomem *phy_base = dram->phy; 2118 u32 ddr4_vref; 2119 u32 mr_tmp; 2120 2121 rkclk_configure_ddr(dram, sdram_params); 2122 2123 rkclk_ddr_reset(dram, 1, 1, 1, 1); 2124 udelay(10); 2125 2126 rkclk_ddr_reset(dram, 1, 1, 1, 0); 2127 phy_cfg(dram, sdram_params); 2128 2129 rkclk_ddr_reset(dram, 1, 1, 0, 0); 2130 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1); 2131 2132 rkclk_ddr_reset(dram, 1, 0, 0, 0); 2133 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, 2134 dram->sr_idle, dram->pd_idle); 2135 2136 /* set frequency_mode */ 2137 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2138 /* set target_frequency to Frequency 0 */ 2139 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0); 2140 2141 set_ds_odt(dram, sdram_params, 0); 2142 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params); 2143 set_ctl_address_map(dram, sdram_params); 2144 2145 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4)); 2146 2147 rkclk_ddr_reset(dram, 0, 0, 0, 0); 2148 2149 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) 2150 continue; 2151 2152 if (sdram_params->base.dramtype == LPDDR3) { 2153 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3); 2154 } else if (sdram_params->base.dramtype == LPDDR4) { 2155 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6); 2156 /* MR11 */ 2157 pctl_write_mr(dram->pctl, 3, 11, 2158 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2159 LPDDR4); 2160 /* MR12 */ 2161 pctl_write_mr(dram->pctl, 3, 12, 2162 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2163 LPDDR4); 2164 2165 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); 2166 /* MR22 */ 2167 pctl_write_mr(dram->pctl, 3, 22, 2168 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2169 LPDDR4); 2170 /* MR14 */ 2171 pctl_write_mr(dram->pctl, 3, 14, 2172 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2173 LPDDR4); 2174 } 2175 2176 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) { 2177 if (post_init != 0) 2178 printascii("DTT cs0 error\n"); 2179 return -1; 2180 } 2181 2182 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) { 2183 if (data_training(dram, 1, sdram_params, 0, 2184 READ_GATE_TRAINING) != 0) { 2185 printascii("DTT cs1 error\n"); 2186 return -1; 2187 } 2188 } 2189 2190 if (sdram_params->base.dramtype == DDR4) { 2191 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39; 2192 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref, 2193 sdram_params->base.dramtype); 2194 } 2195 2196 dram_all_config(dram, sdram_params); 2197 enable_low_power(dram, sdram_params); 2198 2199 return 0; 2200 } 2201 2202 static u64 dram_detect_cap(struct dram_info *dram, 2203 struct rv1126_sdram_params *sdram_params, 2204 unsigned char channel) 2205 { 2206 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2207 void __iomem *pctl_base = dram->pctl; 2208 void __iomem *phy_base = dram->phy; 2209 u32 mr8; 2210 2211 u32 bktmp; 2212 u32 coltmp; 2213 u32 rowtmp; 2214 u32 cs; 2215 u32 bw = 1; 2216 u32 dram_type = sdram_params->base.dramtype; 2217 u32 pwrctl; 2218 2219 cap_info->bw = bw; 2220 if (dram_type != LPDDR4) { 2221 if (dram_type != DDR4) { 2222 coltmp = 12; 2223 bktmp = 3; 2224 if (dram_type == LPDDR2) 2225 rowtmp = 15; 2226 else 2227 rowtmp = 16; 2228 2229 if (sdram_detect_col(cap_info, coltmp) != 0) 2230 goto cap_err; 2231 2232 sdram_detect_bank(cap_info, coltmp, bktmp); 2233 sdram_detect_dbw(cap_info, dram_type); 2234 } else { 2235 coltmp = 10; 2236 bktmp = 4; 2237 rowtmp = 17; 2238 2239 cap_info->col = 10; 2240 cap_info->bk = 2; 2241 sdram_detect_bg(cap_info, coltmp); 2242 } 2243 2244 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0) 2245 goto cap_err; 2246 2247 sdram_detect_row_3_4(cap_info, coltmp, bktmp); 2248 } else { 2249 mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf; 2250 cap_info->col = 10; 2251 cap_info->bk = 3; 2252 cap_info->cs0_row = 14 + (mr8 + 1) / 2; 2253 if (mr8 % 2) 2254 cap_info->row_3_4 = 1; 2255 else 2256 cap_info->row_3_4 = 0; 2257 cap_info->dbw = 1; 2258 cap_info->bw = 2; 2259 } 2260 2261 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL); 2262 writel(0, pctl_base + DDR_PCTL2_PWRCTL); 2263 2264 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0) 2265 cs = 1; 2266 else 2267 cs = 0; 2268 cap_info->rank = cs + 1; 2269 2270 if (dram_type != LPDDR4) { 2271 setbits_le32(PHY_REG(phy_base, 0xf), 0xf); 2272 2273 phy_soft_reset(dram); 2274 2275 if (data_training(dram, 0, sdram_params, 0, 2276 READ_GATE_TRAINING) == 0) 2277 cap_info->bw = 2; 2278 else 2279 cap_info->bw = 1; 2280 } 2281 2282 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL); 2283 2284 cap_info->cs0_high16bit_row = cap_info->cs0_row; 2285 if (cs) { 2286 cap_info->cs1_row = cap_info->cs0_row; 2287 cap_info->cs1_high16bit_row = cap_info->cs0_row; 2288 } else { 2289 cap_info->cs1_row = 0; 2290 cap_info->cs1_high16bit_row = 0; 2291 } 2292 2293 return 0; 2294 cap_err: 2295 return -1; 2296 } 2297 2298 static int dram_detect_cs1_row(struct dram_info *dram, 2299 struct rv1126_sdram_params *sdram_params, 2300 unsigned char channel) 2301 { 2302 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2303 void __iomem *pctl_base = dram->pctl; 2304 u32 ret = 0; 2305 void __iomem *test_addr; 2306 u32 row, bktmp, coltmp, bw; 2307 u64 cs0_cap; 2308 u32 byte_mask; 2309 u32 cs_pst; 2310 u32 cs_add = 0; 2311 u32 max_row; 2312 2313 if (cap_info->rank == 2) { 2314 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2315 6 + 2; 2316 if (cs_pst < 28) 2317 cs_add = 1; 2318 2319 cs0_cap = 1 << cs_pst; 2320 2321 if (sdram_params->base.dramtype == DDR4) { 2322 if (cap_info->dbw == 0) 2323 bktmp = cap_info->bk + 2; 2324 else 2325 bktmp = cap_info->bk + 1; 2326 } else { 2327 bktmp = cap_info->bk; 2328 } 2329 bw = cap_info->bw; 2330 coltmp = cap_info->col; 2331 2332 if (bw == 2) 2333 byte_mask = 0xFFFF; 2334 else 2335 byte_mask = 0xFF; 2336 2337 max_row = (cs_pst == 31) ? 30 : 31; 2338 2339 max_row = max_row - bktmp - coltmp - bw - cs_add + 1; 2340 2341 row = (cap_info->cs0_row > max_row) ? max_row : 2342 cap_info->cs0_row; 2343 2344 for (; row > 12; row--) { 2345 test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE + 2346 (u32)cs0_cap + 2347 (1ul << (row + bktmp + coltmp + 2348 cs_add + bw - 1ul))); 2349 2350 writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap); 2351 writel(PATTERN, test_addr); 2352 2353 if (((readl(test_addr) & byte_mask) == 2354 (PATTERN & byte_mask)) && 2355 ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) & 2356 byte_mask) == 0)) { 2357 ret = row; 2358 break; 2359 } 2360 } 2361 } 2362 2363 return ret; 2364 } 2365 2366 /* return: 0 = success, other = fail */ 2367 static int sdram_init_detect(struct dram_info *dram, 2368 struct rv1126_sdram_params *sdram_params) 2369 { 2370 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2371 u32 ret; 2372 u32 sys_reg = 0; 2373 u32 sys_reg3 = 0; 2374 2375 if (sdram_init_(dram, sdram_params, 0) != 0) 2376 return -1; 2377 2378 if (sdram_params->base.dramtype == DDR3) { 2379 writel(PATTERN, CONFIG_SYS_SDRAM_BASE); 2380 if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN) 2381 return -1; 2382 } 2383 2384 if (dram_detect_cap(dram, sdram_params, 0) != 0) 2385 return -1; 2386 2387 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, 2388 sdram_params->base.dramtype); 2389 ret = sdram_init_(dram, sdram_params, 1); 2390 if (ret != 0) 2391 goto out; 2392 2393 cap_info->cs1_row = 2394 dram_detect_cs1_row(dram, sdram_params, 0); 2395 if (cap_info->cs1_row) { 2396 sys_reg = readl(&dram->pmugrf->os_reg[2]); 2397 sys_reg3 = readl(&dram->pmugrf->os_reg[3]); 2398 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row, 2399 sys_reg, sys_reg3, 0); 2400 writel(sys_reg, &dram->pmugrf->os_reg[2]); 2401 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2402 } 2403 2404 sdram_detect_high_row(cap_info); 2405 2406 out: 2407 return ret; 2408 } 2409 2410 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz) 2411 { 2412 u32 i; 2413 u32 offset = 0; 2414 struct ddr2_3_4_lp2_3_info *ddr_info; 2415 2416 if (!freq_mhz) { 2417 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype); 2418 if (ddr_info) 2419 freq_mhz = 2420 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2421 DDR_FREQ_MASK; 2422 else 2423 freq_mhz = 0; 2424 } 2425 2426 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) { 2427 if (sdram_configs[i].base.ddr_freq == 0 || 2428 freq_mhz < sdram_configs[i].base.ddr_freq) 2429 break; 2430 } 2431 offset = i == 0 ? 0 : i - 1; 2432 2433 return &sdram_configs[offset]; 2434 } 2435 2436 static const u16 pctl_need_update_reg[] = { 2437 DDR_PCTL2_RFSHTMG, 2438 DDR_PCTL2_INIT3, 2439 DDR_PCTL2_INIT4, 2440 DDR_PCTL2_INIT6, 2441 DDR_PCTL2_INIT7, 2442 DDR_PCTL2_DRAMTMG0, 2443 DDR_PCTL2_DRAMTMG1, 2444 DDR_PCTL2_DRAMTMG2, 2445 DDR_PCTL2_DRAMTMG3, 2446 DDR_PCTL2_DRAMTMG4, 2447 DDR_PCTL2_DRAMTMG5, 2448 DDR_PCTL2_DRAMTMG6, 2449 DDR_PCTL2_DRAMTMG7, 2450 DDR_PCTL2_DRAMTMG8, 2451 DDR_PCTL2_DRAMTMG9, 2452 DDR_PCTL2_DRAMTMG12, 2453 DDR_PCTL2_DRAMTMG13, 2454 DDR_PCTL2_DRAMTMG14, 2455 DDR_PCTL2_ZQCTL0, 2456 DDR_PCTL2_DFITMG0, 2457 DDR_PCTL2_ODTCFG 2458 }; 2459 2460 static const u16 phy_need_update_reg[] = { 2461 0x14, 2462 0x18, 2463 0x1c 2464 }; 2465 2466 static void pre_set_rate(struct dram_info *dram, 2467 struct rv1126_sdram_params *sdram_params, 2468 u32 dst_fsp, u32 dst_fsp_lp4) 2469 { 2470 u32 i, j, find; 2471 void __iomem *pctl_base = dram->pctl; 2472 void __iomem *phy_base = dram->phy; 2473 u32 phy_offset; 2474 u32 mr_tmp; 2475 u32 dramtype = sdram_params->base.dramtype; 2476 2477 sw_set_req(dram); 2478 /* pctl timing update */ 2479 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) { 2480 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF; 2481 j++) { 2482 if (sdram_params->pctl_regs.pctl[j][0] == 2483 pctl_need_update_reg[i]) { 2484 writel(sdram_params->pctl_regs.pctl[j][1], 2485 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2486 pctl_need_update_reg[i]); 2487 find = j; 2488 break; 2489 } 2490 } 2491 } 2492 sw_set_ack(dram); 2493 2494 /* phy timing update */ 2495 if (dst_fsp == 0) 2496 phy_offset = 0; 2497 else 2498 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3); 2499 /* cl cwl al update */ 2500 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) { 2501 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF; 2502 j++) { 2503 if (sdram_params->phy_regs.phy[j][0] == 2504 phy_need_update_reg[i]) { 2505 writel(sdram_params->phy_regs.phy[j][1], 2506 phy_base + phy_offset + 2507 phy_need_update_reg[i]); 2508 find = j; 2509 break; 2510 } 2511 } 2512 } 2513 2514 set_ds_odt(dram, sdram_params, dst_fsp); 2515 if (dramtype == LPDDR4) { 2516 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2517 DDR_PCTL2_INIT4); 2518 /* MR13 */ 2519 pctl_write_mr(dram->pctl, 3, 13, 2520 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2521 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2522 ((0x2 << 6) >> dst_fsp_lp4), dramtype); 2523 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2524 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2525 ((0x2 << 6) >> dst_fsp_lp4), 2526 PHY_REG(phy_base, 0x1b)); 2527 /* MR3 */ 2528 pctl_write_mr(dram->pctl, 3, 3, 2529 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & 2530 PCTL2_MR_MASK, 2531 dramtype); 2532 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK, 2533 PHY_REG(phy_base, 0x19)); 2534 2535 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2536 DDR_PCTL2_INIT3); 2537 /* MR1 */ 2538 pctl_write_mr(dram->pctl, 3, 1, 2539 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & 2540 PCTL2_MR_MASK, 2541 dramtype); 2542 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK, 2543 PHY_REG(phy_base, 0x17)); 2544 /* MR2 */ 2545 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 2546 dramtype); 2547 writel(mr_tmp & PCTL2_MR_MASK, 2548 PHY_REG(phy_base, 0x18)); 2549 2550 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2551 DDR_PCTL2_INIT6); 2552 /* MR11 */ 2553 pctl_write_mr(dram->pctl, 3, 11, 2554 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2555 dramtype); 2556 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2557 PHY_REG(phy_base, 0x1a)); 2558 /* MR12 */ 2559 pctl_write_mr(dram->pctl, 3, 12, 2560 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2561 dramtype); 2562 2563 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2564 DDR_PCTL2_INIT7); 2565 /* MR22 */ 2566 pctl_write_mr(dram->pctl, 3, 22, 2567 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2568 dramtype); 2569 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2570 PHY_REG(phy_base, 0x1d)); 2571 /* MR14 */ 2572 pctl_write_mr(dram->pctl, 3, 14, 2573 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2574 dramtype); 2575 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2576 PHY_REG(phy_base, 0x1c)); 2577 } 2578 2579 update_noc_timing(dram, sdram_params); 2580 } 2581 2582 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp, 2583 struct rv1126_sdram_params *sdram_params) 2584 { 2585 void __iomem *pctl_base = dram->pctl; 2586 void __iomem *phy_base = dram->phy; 2587 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp]; 2588 u32 temp, temp1; 2589 struct ddr2_3_4_lp2_3_info *ddr_info; 2590 2591 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype); 2592 2593 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq; 2594 2595 if (sdram_params->base.dramtype == LPDDR4) { 2596 p_fsp_param->rd_odt_up_en = 0; 2597 p_fsp_param->rd_odt_down_en = 1; 2598 } else { 2599 p_fsp_param->rd_odt_up_en = 2600 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 2601 p_fsp_param->rd_odt_down_en = 2602 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 2603 } 2604 2605 if (p_fsp_param->rd_odt_up_en) 2606 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111)); 2607 else if (p_fsp_param->rd_odt_down_en) 2608 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110)); 2609 else 2610 p_fsp_param->rd_odt = 0; 2611 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112)); 2612 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100)); 2613 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102)); 2614 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128)); 2615 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105)); 2616 2617 if (sdram_params->base.dramtype == DDR3) { 2618 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2619 DDR_PCTL2_INIT3); 2620 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 2621 p_fsp_param->ds_pdds = ((temp >> 1) & 0x1) | 2622 (((temp >> 5) & 0x1) << 1); 2623 p_fsp_param->dq_odt = ((temp >> 2) & 0x1) | 2624 (((temp >> 6) & 0x1) << 1) | 2625 (((temp >> 9) & 0x1) << 2); 2626 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2627 } else if (sdram_params->base.dramtype == DDR4) { 2628 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2629 DDR_PCTL2_INIT3); 2630 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 2631 p_fsp_param->ds_pdds = (temp >> 1) & 0x3; 2632 p_fsp_param->dq_odt = (temp >> 8) & 0x7; 2633 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2634 } else if (sdram_params->base.dramtype == LPDDR3) { 2635 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2636 DDR_PCTL2_INIT4); 2637 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 2638 p_fsp_param->ds_pdds = temp & 0xf; 2639 2640 p_fsp_param->dq_odt = lp3_odt_value; 2641 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2642 } else if (sdram_params->base.dramtype == LPDDR4) { 2643 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2644 DDR_PCTL2_INIT4); 2645 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 2646 p_fsp_param->ds_pdds = (temp >> 3) & 0x7; 2647 2648 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2649 DDR_PCTL2_INIT6); 2650 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK; 2651 p_fsp_param->dq_odt = temp & 0x7; 2652 p_fsp_param->ca_odt = (temp >> 4) & 0x7; 2653 2654 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)), 2655 readl(PHY_REG(phy_base, 0x3ce))); 2656 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)), 2657 readl(PHY_REG(phy_base, 0x3de))); 2658 p_fsp_param->vref_ca[0] = (temp + temp1) / 2; 2659 temp = MAX(readl(PHY_REG(phy_base, 0x3af)), 2660 readl(PHY_REG(phy_base, 0x3cf))); 2661 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)), 2662 readl(PHY_REG(phy_base, 0x3df))); 2663 p_fsp_param->vref_ca[1] = (temp + temp1) / 2; 2664 p_fsp_param->vref_ca[0] |= 2665 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 2666 p_fsp_param->vref_ca[1] |= 2667 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 2668 2669 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >> 2670 3) & 0x1; 2671 } 2672 2673 p_fsp_param->noc_timings.ddrtiminga0 = 2674 sdram_params->ch.noc_timings.ddrtiminga0; 2675 p_fsp_param->noc_timings.ddrtimingb0 = 2676 sdram_params->ch.noc_timings.ddrtimingb0; 2677 p_fsp_param->noc_timings.ddrtimingc0 = 2678 sdram_params->ch.noc_timings.ddrtimingc0; 2679 p_fsp_param->noc_timings.devtodev0 = 2680 sdram_params->ch.noc_timings.devtodev0; 2681 p_fsp_param->noc_timings.ddrmode = 2682 sdram_params->ch.noc_timings.ddrmode; 2683 p_fsp_param->noc_timings.ddr4timing = 2684 sdram_params->ch.noc_timings.ddr4timing; 2685 p_fsp_param->noc_timings.agingx0 = 2686 sdram_params->ch.noc_timings.agingx0; 2687 p_fsp_param->noc_timings.aging0 = 2688 sdram_params->ch.noc_timings.aging0; 2689 p_fsp_param->noc_timings.aging1 = 2690 sdram_params->ch.noc_timings.aging1; 2691 p_fsp_param->noc_timings.aging2 = 2692 sdram_params->ch.noc_timings.aging2; 2693 p_fsp_param->noc_timings.aging3 = 2694 sdram_params->ch.noc_timings.aging3; 2695 2696 p_fsp_param->flag = FSP_FLAG; 2697 } 2698 2699 static void copy_fsp_param_to_ddr(void) 2700 { 2701 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param, 2702 sizeof(fsp_param)); 2703 } 2704 2705 void ddr_set_rate(struct dram_info *dram, 2706 struct rv1126_sdram_params *sdram_params, 2707 u32 freq, u32 cur_freq, u32 dst_fsp, 2708 u32 dst_fsp_lp4, u32 training_en) 2709 { 2710 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off; 2711 u32 mr_tmp; 2712 u32 lp_stat; 2713 u32 dramtype = sdram_params->base.dramtype; 2714 struct rv1126_sdram_params *sdram_params_new; 2715 void __iomem *pctl_base = dram->pctl; 2716 void __iomem *phy_base = dram->phy; 2717 2718 lp_stat = low_power_update(dram, 0); 2719 sdram_params_new = get_default_sdram_config(freq); 2720 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank; 2721 2722 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4); 2723 2724 while ((readl(pctl_base + DDR_PCTL2_STAT) & 2725 PCTL2_OPERATING_MODE_MASK) == 2726 PCTL2_OPERATING_MODE_SR) 2727 continue; 2728 2729 dest_dll_off = 0; 2730 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2731 DDR_PCTL2_INIT3); 2732 if ((dramtype == DDR3 && (dst_init3 & 1)) || 2733 (dramtype == DDR4 && !(dst_init3 & 1))) 2734 dest_dll_off = 1; 2735 2736 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 2737 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 2738 DDR_PCTL2_INIT3); 2739 cur_init3 &= PCTL2_MR_MASK; 2740 cur_dll_off = 1; 2741 if ((dramtype == DDR3 && !(cur_init3 & 1)) || 2742 (dramtype == DDR4 && (cur_init3 & 1))) 2743 cur_dll_off = 0; 2744 2745 if (!cur_dll_off) { 2746 if (dramtype == DDR3) 2747 cur_init3 |= 1; 2748 else 2749 cur_init3 &= ~1; 2750 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype); 2751 } 2752 2753 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 2754 PCTL2_DIS_AUTO_REFRESH); 2755 update_refresh_reg(dram); 2756 2757 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 2758 while (1) { 2759 if (((readl(pctl_base + DDR_PCTL2_STAT) & 2760 PCTL2_SELFREF_TYPE_MASK) == 2761 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) && 2762 ((readl(pctl_base + DDR_PCTL2_STAT) & 2763 PCTL2_OPERATING_MODE_MASK) == 2764 PCTL2_OPERATING_MODE_SR)) { 2765 break; 2766 } 2767 } 2768 2769 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 2770 PMUGRF_CON_DDRPHY_BUFFEREN_EN, 2771 dram->pmugrf->soc_con[0]); 2772 sw_set_req(dram); 2773 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC, 2774 PCTL2_DFI_INIT_COMPLETE_EN); 2775 sw_set_ack(dram); 2776 2777 sw_set_req(dram); 2778 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off) 2779 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 2780 else 2781 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 2782 2783 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0, 2784 PCTL2_DIS_SRX_ZQCL); 2785 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0, 2786 PCTL2_DIS_SRX_ZQCL); 2787 sw_set_ack(dram); 2788 2789 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT), 2790 dram->cru->clkgate_con[2]); 2791 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 2792 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) | 2793 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT), 2794 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 2795 2796 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 2797 rkclk_set_dpll(dram, freq * MHz / 2); 2798 phy_pll_set(dram, freq * MHz, 0); 2799 phy_pll_set(dram, freq * MHz, 1); 2800 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 2801 2802 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 2803 PMUGRF_CON_DDRPHY_BUFFEREN_DIS, 2804 dram->pmugrf->soc_con[0]); 2805 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT), 2806 dram->cru->clkgate_con[2]); 2807 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 2808 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) | 2809 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT), 2810 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 2811 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) & 2812 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) 2813 continue; 2814 2815 sw_set_req(dram); 2816 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2817 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp); 2818 sw_set_ack(dram); 2819 update_refresh_reg(dram); 2820 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2); 2821 2822 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 2823 while ((readl(pctl_base + DDR_PCTL2_STAT) & 2824 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR) 2825 continue; 2826 2827 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 2828 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 2829 2830 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4); 2831 if (dramtype == LPDDR3) { 2832 pctl_write_mr(dram->pctl, 3, 1, 2833 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) & 2834 PCTL2_MR_MASK, 2835 dramtype); 2836 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK, 2837 dramtype); 2838 pctl_write_mr(dram->pctl, 3, 3, 2839 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) & 2840 PCTL2_MR_MASK, 2841 dramtype); 2842 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype); 2843 } else if ((dramtype == DDR3) || (dramtype == DDR4)) { 2844 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK, 2845 dramtype); 2846 if (!dest_dll_off) { 2847 pctl_write_mr(dram->pctl, 3, 0, 2848 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) & 2849 PCTL2_MR_MASK) | DDR3_DLL_RESET, 2850 dramtype); 2851 udelay(2); 2852 } 2853 pctl_write_mr(dram->pctl, 3, 0, 2854 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT & 2855 PCTL2_MR_MASK) & (~DDR3_DLL_RESET), 2856 dramtype); 2857 pctl_write_mr(dram->pctl, 3, 2, 2858 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) & 2859 PCTL2_MR_MASK), dramtype); 2860 if (dramtype == DDR4) { 2861 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK, 2862 dramtype); 2863 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2864 DDR_PCTL2_INIT6); 2865 pctl_write_mr(dram->pctl, 3, 4, 2866 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) & 2867 PCTL2_MR_MASK, 2868 dramtype); 2869 pctl_write_mr(dram->pctl, 3, 5, 2870 mr_tmp >> PCTL2_DDR4_MR5_SHIFT & 2871 PCTL2_MR_MASK, 2872 dramtype); 2873 2874 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2875 DDR_PCTL2_INIT7); 2876 pctl_write_mr(dram->pctl, 3, 6, 2877 mr_tmp >> PCTL2_DDR4_MR6_SHIFT & 2878 PCTL2_MR_MASK, 2879 dramtype); 2880 } 2881 } else if (dramtype == LPDDR4) { 2882 pctl_write_mr(dram->pctl, 3, 13, 2883 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2884 PCTL2_MR_MASK) & (~(BIT(7)))) | 2885 dst_fsp_lp4 << 7, dramtype); 2886 } 2887 2888 /* training */ 2889 high_freq_training(dram, sdram_params_new, dst_fsp); 2890 2891 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 2892 PCTL2_DIS_AUTO_REFRESH); 2893 low_power_update(dram, lp_stat); 2894 2895 save_fsp_param(dram, dst_fsp, sdram_params_new); 2896 } 2897 2898 static void ddr_set_rate_for_fsp(struct dram_info *dram, 2899 struct rv1126_sdram_params *sdram_params) 2900 { 2901 struct ddr2_3_4_lp2_3_info *ddr_info; 2902 u32 f0, f1, f2, f3; 2903 u32 dramtype = sdram_params->base.dramtype; 2904 2905 ddr_info = get_ddr_drv_odt_info(dramtype); 2906 if (!ddr_info) 2907 return; 2908 2909 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param)); 2910 memset((void *)&fsp_param, 0, sizeof(fsp_param)); 2911 2912 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2913 DDR_FREQ_MASK; 2914 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) & 2915 DDR_FREQ_MASK; 2916 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) & 2917 DDR_FREQ_MASK; 2918 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) & 2919 DDR_FREQ_MASK; 2920 2921 if (get_wrlvl_val(dram, sdram_params)) 2922 printascii("get wrlvl value fail\n"); 2923 printascii("change to: "); 2924 printdec(f1); 2925 printascii("MHz\n"); 2926 ddr_set_rate(&dram_info, sdram_params, f1, 2927 sdram_params->base.ddr_freq, 1, 1, 1); 2928 printascii("change to: "); 2929 printdec(f2); 2930 printascii("MHz\n"); 2931 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1); 2932 printascii("change to: "); 2933 printdec(f3); 2934 printascii("MHz\n"); 2935 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1); 2936 printascii("change to: "); 2937 printdec(f0); 2938 printascii("MHz(final freq)\n"); 2939 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1); 2940 } 2941 2942 int get_uart_config(void) 2943 { 2944 struct sdram_head_info_index_v2 *index = 2945 (struct sdram_head_info_index_v2 *)common_info; 2946 struct global_info *gbl_info; 2947 2948 gbl_info = (struct global_info *)((void *)common_info + 2949 index->global_index.offset * 4); 2950 2951 return gbl_info->uart_info; 2952 } 2953 2954 /* return: 0 = success, other = fail */ 2955 int sdram_init(void) 2956 { 2957 struct rv1126_sdram_params *sdram_params; 2958 int ret = 0; 2959 struct sdram_head_info_index_v2 *index = 2960 (struct sdram_head_info_index_v2 *)common_info; 2961 struct global_info *gbl_info; 2962 2963 dram_info.phy = (void *)DDR_PHY_BASE_ADDR; 2964 dram_info.pctl = (void *)UPCTL2_BASE_ADDR; 2965 dram_info.grf = (void *)GRF_BASE_ADDR; 2966 dram_info.cru = (void *)CRU_BASE_ADDR; 2967 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR; 2968 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR; 2969 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR; 2970 2971 if (index->version_info != 2 || 2972 (index->global_index.size != sizeof(struct global_info) / 4) || 2973 (index->ddr3_index.size != 2974 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 2975 (index->ddr4_index.size != 2976 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 2977 (index->lp3_index.size != 2978 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 2979 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) || 2980 index->global_index.offset == 0 || 2981 index->ddr3_index.offset == 0 || 2982 index->ddr4_index.offset == 0 || 2983 index->lp3_index.offset == 0 || 2984 index->lp4_index.offset == 0) { 2985 printascii("common info error\n"); 2986 goto error; 2987 } 2988 2989 gbl_info = (struct global_info *)((void *)common_info + 2990 index->global_index.offset * 4); 2991 2992 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info); 2993 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info); 2994 2995 sdram_params = &sdram_configs[0]; 2996 2997 if (sdram_params->base.dramtype == DDR3 || 2998 sdram_params->base.dramtype == DDR4) { 2999 if (DDR_2T_INFO(gbl_info->info_2t)) 3000 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10; 3001 else 3002 sdram_params->pctl_regs.pctl[0][1] &= 3003 ~(0x1 << 10); 3004 } 3005 ret = sdram_init_detect(&dram_info, sdram_params); 3006 if (ret) { 3007 sdram_print_dram_type(sdram_params->base.dramtype); 3008 printascii(", "); 3009 printdec(sdram_params->base.ddr_freq); 3010 printascii("MHz\n"); 3011 goto error; 3012 } 3013 print_ddr_info(sdram_params); 3014 3015 ddr_set_rate_for_fsp(&dram_info, sdram_params); 3016 copy_fsp_param_to_ddr(); 3017 3018 ddr_set_atags(&dram_info, sdram_params); 3019 3020 printascii("out\n"); 3021 3022 return ret; 3023 error: 3024 printascii("error\n"); 3025 return (-1); 3026 } 3027 #endif /* CONFIG_TPL_BUILD */ 3028