1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd. 4 */ 5 6 #include <common.h> 7 #include <debug_uart.h> 8 #include <dm.h> 9 #include <ram.h> 10 #include <syscon.h> 11 #include <asm/io.h> 12 #include <asm/arch/clock.h> 13 #include <asm/arch/hardware.h> 14 #include <asm/arch/rk_atags.h> 15 #include <asm/arch/cru_rv1126.h> 16 #include <asm/arch/grf_rv1126.h> 17 #include <asm/arch/sdram_common.h> 18 #include <asm/arch/sdram_rv1126.h> 19 20 /* define training flag */ 21 #define CA_TRAINING (0x1 << 0) 22 #define READ_GATE_TRAINING (0x1 << 1) 23 #define WRITE_LEVELING (0x1 << 2) 24 #define WRITE_TRAINING (0x1 << 3) 25 #define READ_TRAINING (0x1 << 4) 26 #define FULL_TRAINING (0xff) 27 28 #define SKEW_RX_SIGNAL (0) 29 #define SKEW_TX_SIGNAL (1) 30 #define SKEW_CA_SIGNAL (2) 31 32 #define DESKEW_MDF_ABS_VAL (0) 33 #define DESKEW_MDF_DIFF_VAL (1) 34 35 #ifdef CONFIG_TPL_BUILD 36 #ifndef CONFIG_TPL_TINY_FRAMEWORK 37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!! 38 #endif 39 #endif 40 41 #ifdef CONFIG_TPL_BUILD 42 43 struct dram_info { 44 void __iomem *pctl; 45 void __iomem *phy; 46 struct rv1126_cru *cru; 47 struct msch_regs *msch; 48 struct rv1126_ddrgrf *ddrgrf; 49 struct rv1126_grf *grf; 50 struct ram_info info; 51 struct rv1126_pmugrf *pmugrf; 52 u32 sr_idle; 53 u32 pd_idle; 54 }; 55 56 #define GRF_BASE_ADDR 0xfe000000 57 #define PMU_GRF_BASE_ADDR 0xfe020000 58 #define DDR_GRF_BASE_ADDR 0xfe030000 59 #define BUS_SGRF_BASE_ADDR 0xfe0a0000 60 #define SERVER_MSCH_BASE_ADDR 0xfe800000 61 #define CRU_BASE_ADDR 0xff490000 62 #define DDR_PHY_BASE_ADDR 0xff4a0000 63 #define UPCTL2_BASE_ADDR 0xffa50000 64 65 #define SGRF_SOC_CON12 0x30 66 #define SGRF_SOC_CON13 0x34 67 68 struct dram_info dram_info; 69 70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3) 71 struct rv1126_sdram_params sdram_configs[] = { 72 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc" 73 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc" 74 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc" 75 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc" 76 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc" 77 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc" 78 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc" 79 }; 80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0) 81 struct rv1126_sdram_params sdram_configs[] = { 82 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc" 83 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc" 84 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc" 85 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc" 86 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc" 87 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc" 88 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc" 89 }; 90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6) 91 struct rv1126_sdram_params sdram_configs[] = { 92 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc" 93 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc" 94 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc" 95 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc" 96 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc" 97 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc" 98 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc" 99 }; 100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) 101 struct rv1126_sdram_params sdram_configs[] = { 102 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc" 103 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc" 104 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc" 105 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc" 106 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc" 107 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc" 108 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc" 109 }; 110 #endif 111 112 u32 common_info[] = { 113 #include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc" 114 }; 115 116 static struct rv1126_fsp_param fsp_param[MAX_IDX]; 117 118 static u8 lp3_odt_value; 119 120 static s8 wrlvl_result[2][4]; 121 122 /* DDR configuration 0-9 */ 123 u16 ddr_cfg_2_rbc[] = { 124 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */ 125 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */ 126 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */ 127 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */ 128 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */ 129 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */ 130 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */ 131 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */ 132 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */ 133 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */ 134 }; 135 136 /* DDR configuration 10-21 */ 137 u8 ddr4_cfg_2_rbc[] = { 138 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */ 139 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */ 140 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */ 141 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */ 142 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */ 143 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */ 144 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */ 145 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */ 146 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */ 147 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */ 148 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */ 149 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */ 150 }; 151 152 /* DDR configuration 22-28 */ 153 u16 ddr_cfg_2_rbc_p2[] = { 154 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */ 155 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */ 156 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */ 157 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */ 158 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */ 159 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */ 160 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */ 161 }; 162 163 u8 d4_rbc_2_d3_rbc[][2] = { 164 {10, 0}, 165 {11, 2}, 166 {12, 23}, 167 {13, 1}, 168 {14, 28}, 169 {15, 24}, 170 {16, 27}, 171 {17, 7}, 172 {18, 6}, 173 {19, 25}, 174 {20, 26}, 175 {21, 3} 176 }; 177 178 u32 addrmap[23][9] = { 179 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 180 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */ 181 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 182 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */ 183 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 184 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */ 185 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606, 186 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */ 187 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909, 188 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */ 189 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707, 190 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */ 191 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808, 192 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */ 193 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909, 194 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */ 195 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, 196 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */ 197 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 198 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */ 199 200 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 201 0x08080808, 0x00000f0f, 0x0801}, /* 10 */ 202 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 203 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */ 204 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 205 0x07070707, 0x00000f07, 0x0700}, /* 12 */ 206 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 207 0x07070707, 0x00000f0f, 0x0700}, /* 13 */ 208 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 209 0x07070707, 0x00000f07, 0x3f01}, /* 14 */ 210 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 211 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */ 212 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606, 213 0x06060606, 0x00000f06, 0x3f00}, /* 16 */ 214 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909, 215 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */ 216 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808, 217 0x08080808, 0x00000f0f, 0x0700}, /* 18 */ 218 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 219 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */ 220 221 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 222 0x07070707, 0x00000f07, 0x3f00}, /* 20 */ 223 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606, 224 0x06060606, 0x00000f06, 0x0600}, /* 21 */ 225 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505, 226 0x05050505, 0x00000f0f, 0x3f3f} /* 22 */ 227 }; 228 229 static u8 dq_sel[22][3] = { 230 {0x0, 0x17, 0x22}, 231 {0x1, 0x18, 0x23}, 232 {0x2, 0x19, 0x24}, 233 {0x3, 0x1a, 0x25}, 234 {0x4, 0x1b, 0x26}, 235 {0x5, 0x1c, 0x27}, 236 {0x6, 0x1d, 0x28}, 237 {0x7, 0x1e, 0x29}, 238 {0x8, 0x16, 0x21}, 239 {0x9, 0x1f, 0x2a}, 240 {0xa, 0x20, 0x2b}, 241 {0x10, 0x1, 0xc}, 242 {0x11, 0x2, 0xd}, 243 {0x12, 0x3, 0xe}, 244 {0x13, 0x4, 0xf}, 245 {0x14, 0x5, 0x10}, 246 {0x15, 0x6, 0x11}, 247 {0x16, 0x7, 0x12}, 248 {0x17, 0x8, 0x13}, 249 {0x18, 0x0, 0xb}, 250 {0x19, 0x9, 0x14}, 251 {0x1a, 0xa, 0x15} 252 }; 253 254 static u16 grp_addr[4] = { 255 ADD_GROUP_CS0_A, 256 ADD_GROUP_CS0_B, 257 ADD_GROUP_CS1_A, 258 ADD_GROUP_CS1_B 259 }; 260 261 static u8 wrlvl_result_offset[2][4] = { 262 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27}, 263 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29}, 264 }; 265 266 static u16 dqs_dq_skew_adr[16] = { 267 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */ 268 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */ 269 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */ 270 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */ 271 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */ 272 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */ 273 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */ 274 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */ 275 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */ 276 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */ 277 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */ 278 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */ 279 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */ 280 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */ 281 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */ 282 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */ 283 }; 284 285 static void rkclk_ddr_reset(struct dram_info *dram, 286 u32 ctl_srstn, u32 ctl_psrstn, 287 u32 phy_srstn, u32 phy_psrstn) 288 { 289 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) | 290 UPCTL2_ASRSTN_REQ(ctl_srstn), 291 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13); 292 293 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn), 294 &dram->cru->softrst_con[12]); 295 } 296 297 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz) 298 { 299 unsigned int refdiv, postdiv1, postdiv2, fbdiv; 300 int delay = 1000; 301 u32 mhz = hz / MHz; 302 303 refdiv = 1; 304 if (mhz <= 100) { 305 postdiv1 = 6; 306 postdiv2 = 4; 307 } else if (mhz <= 150) { 308 postdiv1 = 4; 309 postdiv2 = 4; 310 } else if (mhz <= 200) { 311 postdiv1 = 6; 312 postdiv2 = 2; 313 } else if (mhz <= 300) { 314 postdiv1 = 4; 315 postdiv2 = 2; 316 } else if (mhz <= 400) { 317 postdiv1 = 6; 318 postdiv2 = 1; 319 } else { 320 postdiv1 = 4; 321 postdiv2 = 1; 322 } 323 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24; 324 325 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode); 326 327 writel(0x1f000000, &dram->cru->clksel_con[64]); 328 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0); 329 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv), 330 &dram->cru->pll[1].con1); 331 332 while (delay > 0) { 333 udelay(1); 334 if (LOCK(readl(&dram->cru->pll[1].con1))) 335 break; 336 delay--; 337 } 338 339 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode); 340 } 341 342 static void rkclk_configure_ddr(struct dram_info *dram, 343 struct rv1126_sdram_params *sdram_params) 344 { 345 /* for inno ddr phy need freq / 2 */ 346 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2); 347 } 348 349 static unsigned int 350 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params) 351 { 352 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 353 u32 cs, bw, die_bw, col, row, bank; 354 u32 cs1_row; 355 u32 i, tmp; 356 u32 ddrconf = -1; 357 u32 row_3_4; 358 359 cs = cap_info->rank; 360 bw = cap_info->bw; 361 die_bw = cap_info->dbw; 362 col = cap_info->col; 363 row = cap_info->cs0_row; 364 cs1_row = cap_info->cs1_row; 365 bank = cap_info->bk; 366 row_3_4 = cap_info->row_3_4; 367 368 if (sdram_params->base.dramtype == DDR4) { 369 if (cs == 2 && row == cs1_row && !row_3_4) { 370 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) | 371 die_bw; 372 for (i = 17; i < 21; i++) { 373 if (((tmp & 0xf) == 374 (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 375 ((tmp & 0x70) <= 376 (ddr4_cfg_2_rbc[i - 10] & 0x70))) { 377 ddrconf = i; 378 goto out; 379 } 380 } 381 } 382 383 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw; 384 for (i = 10; i < 21; i++) { 385 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 386 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) && 387 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) { 388 ddrconf = i; 389 goto out; 390 } 391 } 392 } else { 393 if (cs == 2 && row == cs1_row && bank == 3) { 394 for (i = 5; i < 8; i++) { 395 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] & 396 0x7)) && 397 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] & 398 (0x7 << 5))) { 399 ddrconf = i; 400 goto out; 401 } 402 } 403 } 404 405 tmp = ((cs - 1) << 8) | ((row - 13) << 5) | 406 ((bw + col - 10) << 0); 407 if (bank == 3) 408 tmp |= (1 << 3); 409 410 for (i = 0; i < 9; i++) 411 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) && 412 ((tmp & (7 << 5)) <= 413 (ddr_cfg_2_rbc[i] & (7 << 5))) && 414 ((tmp & (1 << 8)) <= 415 (ddr_cfg_2_rbc[i] & (1 << 8)))) { 416 ddrconf = i; 417 goto out; 418 } 419 if (cs == 1 && bank == 3 && row <= 17 && 420 (col + bw) == 12) 421 ddrconf = 23; 422 } 423 424 out: 425 if (ddrconf > 28) 426 printascii("calculate ddrconfig error\n"); 427 428 if (sdram_params->base.dramtype == DDR4) { 429 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 430 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) { 431 if (ddrconf == 21 && row > 16) 432 printascii("warn:ddrconf21 row > 16\n"); 433 else 434 ddrconf = d4_rbc_2_d3_rbc[i][1]; 435 break; 436 } 437 } 438 } 439 440 return ddrconf; 441 } 442 443 static void sw_set_req(struct dram_info *dram) 444 { 445 void __iomem *pctl_base = dram->pctl; 446 447 /* clear sw_done=0 */ 448 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL); 449 } 450 451 static void sw_set_ack(struct dram_info *dram) 452 { 453 void __iomem *pctl_base = dram->pctl; 454 455 /* set sw_done=1 */ 456 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL); 457 while (1) { 458 /* wait programming done */ 459 if (readl(pctl_base + DDR_PCTL2_SWSTAT) & 460 PCTL2_SW_DONE_ACK) 461 break; 462 } 463 } 464 465 static void set_ctl_address_map(struct dram_info *dram, 466 struct rv1126_sdram_params *sdram_params) 467 { 468 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 469 void __iomem *pctl_base = dram->pctl; 470 u32 ddrconf = cap_info->ddrconfig; 471 u32 i, row; 472 473 row = cap_info->cs0_row; 474 if (sdram_params->base.dramtype == DDR4) { 475 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 476 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) { 477 ddrconf = d4_rbc_2_d3_rbc[i][0]; 478 break; 479 } 480 } 481 } 482 483 if (ddrconf > ARRAY_SIZE(addrmap)) { 484 printascii("set ctl address map fail\n"); 485 return; 486 } 487 488 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0), 489 &addrmap[ddrconf][0], 9 * 4); 490 491 /* unused row set to 0xf */ 492 for (i = 17; i >= row; i--) 493 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 + 494 ((i - 12) * 8 / 32) * 4, 495 0xf << ((i - 12) * 8 % 32)); 496 497 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4) 498 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31); 499 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1) 500 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8); 501 502 if (cap_info->rank == 1) 503 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f); 504 } 505 506 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait) 507 { 508 void __iomem *phy_base = dram->phy; 509 u32 fbdiv, prediv, postdiv, postdiv_en; 510 511 if (wait) { 512 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB); 513 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) 514 continue; 515 } else { 516 freq /= MHz; 517 prediv = 1; 518 if (freq <= 200) { 519 fbdiv = 16; 520 postdiv = 2; 521 postdiv_en = 1; 522 } else if (freq <= 456) { 523 fbdiv = 8; 524 postdiv = 1; 525 postdiv_en = 1; 526 } else { 527 fbdiv = 4; 528 postdiv = 0; 529 postdiv_en = 0; 530 } 531 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50)); 532 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK, 533 (fbdiv >> 8) & 1); 534 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK, 535 postdiv_en << PHY_POSTDIV_EN_SHIFT); 536 537 clrsetbits_le32(PHY_REG(phy_base, 0x52), 538 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv); 539 clrsetbits_le32(PHY_REG(phy_base, 0x53), 540 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT, 541 postdiv << PHY_POSTDIV_SHIFT); 542 } 543 } 544 545 static const u16 d3_phy_drv_2_ohm[][2] = { 546 {PHY_DDR3_RON_455ohm, 455}, 547 {PHY_DDR3_RON_230ohm, 230}, 548 {PHY_DDR3_RON_153ohm, 153}, 549 {PHY_DDR3_RON_115ohm, 115}, 550 {PHY_DDR3_RON_91ohm, 91}, 551 {PHY_DDR3_RON_76ohm, 76}, 552 {PHY_DDR3_RON_65ohm, 65}, 553 {PHY_DDR3_RON_57ohm, 57}, 554 {PHY_DDR3_RON_51ohm, 51}, 555 {PHY_DDR3_RON_46ohm, 46}, 556 {PHY_DDR3_RON_41ohm, 41}, 557 {PHY_DDR3_RON_38ohm, 38}, 558 {PHY_DDR3_RON_35ohm, 35}, 559 {PHY_DDR3_RON_32ohm, 32}, 560 {PHY_DDR3_RON_30ohm, 30}, 561 {PHY_DDR3_RON_28ohm, 28}, 562 {PHY_DDR3_RON_27ohm, 27}, 563 {PHY_DDR3_RON_25ohm, 25}, 564 {PHY_DDR3_RON_24ohm, 24}, 565 {PHY_DDR3_RON_23ohm, 23}, 566 {PHY_DDR3_RON_22ohm, 22}, 567 {PHY_DDR3_RON_21ohm, 21}, 568 {PHY_DDR3_RON_20ohm, 20} 569 }; 570 571 static u16 d3_phy_odt_2_ohm[][2] = { 572 {PHY_DDR3_RTT_DISABLE, 0}, 573 {PHY_DDR3_RTT_561ohm, 561}, 574 {PHY_DDR3_RTT_282ohm, 282}, 575 {PHY_DDR3_RTT_188ohm, 188}, 576 {PHY_DDR3_RTT_141ohm, 141}, 577 {PHY_DDR3_RTT_113ohm, 113}, 578 {PHY_DDR3_RTT_94ohm, 94}, 579 {PHY_DDR3_RTT_81ohm, 81}, 580 {PHY_DDR3_RTT_72ohm, 72}, 581 {PHY_DDR3_RTT_64ohm, 64}, 582 {PHY_DDR3_RTT_58ohm, 58}, 583 {PHY_DDR3_RTT_52ohm, 52}, 584 {PHY_DDR3_RTT_48ohm, 48}, 585 {PHY_DDR3_RTT_44ohm, 44}, 586 {PHY_DDR3_RTT_41ohm, 41}, 587 {PHY_DDR3_RTT_38ohm, 38}, 588 {PHY_DDR3_RTT_37ohm, 37}, 589 {PHY_DDR3_RTT_34ohm, 34}, 590 {PHY_DDR3_RTT_32ohm, 32}, 591 {PHY_DDR3_RTT_31ohm, 31}, 592 {PHY_DDR3_RTT_29ohm, 29}, 593 {PHY_DDR3_RTT_28ohm, 28}, 594 {PHY_DDR3_RTT_27ohm, 27}, 595 {PHY_DDR3_RTT_25ohm, 25} 596 }; 597 598 static u16 d4lp3_phy_drv_2_ohm[][2] = { 599 {PHY_DDR4_LPDDR3_RON_482ohm, 482}, 600 {PHY_DDR4_LPDDR3_RON_244ohm, 244}, 601 {PHY_DDR4_LPDDR3_RON_162ohm, 162}, 602 {PHY_DDR4_LPDDR3_RON_122ohm, 122}, 603 {PHY_DDR4_LPDDR3_RON_97ohm, 97}, 604 {PHY_DDR4_LPDDR3_RON_81ohm, 81}, 605 {PHY_DDR4_LPDDR3_RON_69ohm, 69}, 606 {PHY_DDR4_LPDDR3_RON_61ohm, 61}, 607 {PHY_DDR4_LPDDR3_RON_54ohm, 54}, 608 {PHY_DDR4_LPDDR3_RON_48ohm, 48}, 609 {PHY_DDR4_LPDDR3_RON_44ohm, 44}, 610 {PHY_DDR4_LPDDR3_RON_40ohm, 40}, 611 {PHY_DDR4_LPDDR3_RON_37ohm, 37}, 612 {PHY_DDR4_LPDDR3_RON_34ohm, 34}, 613 {PHY_DDR4_LPDDR3_RON_32ohm, 32}, 614 {PHY_DDR4_LPDDR3_RON_30ohm, 30}, 615 {PHY_DDR4_LPDDR3_RON_28ohm, 28}, 616 {PHY_DDR4_LPDDR3_RON_27ohm, 27}, 617 {PHY_DDR4_LPDDR3_RON_25ohm, 25}, 618 {PHY_DDR4_LPDDR3_RON_24ohm, 24}, 619 {PHY_DDR4_LPDDR3_RON_23ohm, 23}, 620 {PHY_DDR4_LPDDR3_RON_22ohm, 22}, 621 {PHY_DDR4_LPDDR3_RON_21ohm, 21} 622 }; 623 624 static u16 d4lp3_phy_odt_2_ohm[][2] = { 625 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0}, 626 {PHY_DDR4_LPDDR3_RTT_586ohm, 586}, 627 {PHY_DDR4_LPDDR3_RTT_294ohm, 294}, 628 {PHY_DDR4_LPDDR3_RTT_196ohm, 196}, 629 {PHY_DDR4_LPDDR3_RTT_148ohm, 148}, 630 {PHY_DDR4_LPDDR3_RTT_118ohm, 118}, 631 {PHY_DDR4_LPDDR3_RTT_99ohm, 99}, 632 {PHY_DDR4_LPDDR3_RTT_85ohm, 58}, 633 {PHY_DDR4_LPDDR3_RTT_76ohm, 76}, 634 {PHY_DDR4_LPDDR3_RTT_67ohm, 67}, 635 {PHY_DDR4_LPDDR3_RTT_60ohm, 60}, 636 {PHY_DDR4_LPDDR3_RTT_55ohm, 55}, 637 {PHY_DDR4_LPDDR3_RTT_50ohm, 50}, 638 {PHY_DDR4_LPDDR3_RTT_46ohm, 46}, 639 {PHY_DDR4_LPDDR3_RTT_43ohm, 43}, 640 {PHY_DDR4_LPDDR3_RTT_40ohm, 40}, 641 {PHY_DDR4_LPDDR3_RTT_38ohm, 38}, 642 {PHY_DDR4_LPDDR3_RTT_36ohm, 36}, 643 {PHY_DDR4_LPDDR3_RTT_34ohm, 34}, 644 {PHY_DDR4_LPDDR3_RTT_32ohm, 32}, 645 {PHY_DDR4_LPDDR3_RTT_31ohm, 31}, 646 {PHY_DDR4_LPDDR3_RTT_29ohm, 29}, 647 {PHY_DDR4_LPDDR3_RTT_28ohm, 28}, 648 {PHY_DDR4_LPDDR3_RTT_27ohm, 27} 649 }; 650 651 static u16 lp4_phy_drv_2_ohm[][2] = { 652 {PHY_LPDDR4_RON_501ohm, 501}, 653 {PHY_LPDDR4_RON_253ohm, 253}, 654 {PHY_LPDDR4_RON_168ohm, 168}, 655 {PHY_LPDDR4_RON_126ohm, 126}, 656 {PHY_LPDDR4_RON_101ohm, 101}, 657 {PHY_LPDDR4_RON_84ohm, 84}, 658 {PHY_LPDDR4_RON_72ohm, 72}, 659 {PHY_LPDDR4_RON_63ohm, 63}, 660 {PHY_LPDDR4_RON_56ohm, 56}, 661 {PHY_LPDDR4_RON_50ohm, 50}, 662 {PHY_LPDDR4_RON_46ohm, 46}, 663 {PHY_LPDDR4_RON_42ohm, 42}, 664 {PHY_LPDDR4_RON_38ohm, 38}, 665 {PHY_LPDDR4_RON_36ohm, 36}, 666 {PHY_LPDDR4_RON_33ohm, 33}, 667 {PHY_LPDDR4_RON_31ohm, 31}, 668 {PHY_LPDDR4_RON_29ohm, 29}, 669 {PHY_LPDDR4_RON_28ohm, 28}, 670 {PHY_LPDDR4_RON_26ohm, 26}, 671 {PHY_LPDDR4_RON_25ohm, 25}, 672 {PHY_LPDDR4_RON_24ohm, 24}, 673 {PHY_LPDDR4_RON_23ohm, 23}, 674 {PHY_LPDDR4_RON_22ohm, 22} 675 }; 676 677 static u16 lp4_phy_odt_2_ohm[][2] = { 678 {PHY_LPDDR4_RTT_DISABLE, 0}, 679 {PHY_LPDDR4_RTT_604ohm, 604}, 680 {PHY_LPDDR4_RTT_303ohm, 303}, 681 {PHY_LPDDR4_RTT_202ohm, 202}, 682 {PHY_LPDDR4_RTT_152ohm, 152}, 683 {PHY_LPDDR4_RTT_122ohm, 122}, 684 {PHY_LPDDR4_RTT_101ohm, 101}, 685 {PHY_LPDDR4_RTT_87ohm, 87}, 686 {PHY_LPDDR4_RTT_78ohm, 78}, 687 {PHY_LPDDR4_RTT_69ohm, 69}, 688 {PHY_LPDDR4_RTT_62ohm, 62}, 689 {PHY_LPDDR4_RTT_56ohm, 56}, 690 {PHY_LPDDR4_RTT_52ohm, 52}, 691 {PHY_LPDDR4_RTT_48ohm, 48}, 692 {PHY_LPDDR4_RTT_44ohm, 44}, 693 {PHY_LPDDR4_RTT_41ohm, 41}, 694 {PHY_LPDDR4_RTT_39ohm, 39}, 695 {PHY_LPDDR4_RTT_37ohm, 37}, 696 {PHY_LPDDR4_RTT_35ohm, 35}, 697 {PHY_LPDDR4_RTT_33ohm, 33}, 698 {PHY_LPDDR4_RTT_32ohm, 32}, 699 {PHY_LPDDR4_RTT_30ohm, 30}, 700 {PHY_LPDDR4_RTT_29ohm, 29}, 701 {PHY_LPDDR4_RTT_27ohm, 27} 702 }; 703 704 static u32 lp4_odt_calc(u32 odt_ohm) 705 { 706 u32 odt; 707 708 if (odt_ohm == 0) 709 odt = LPDDR4_DQODT_DIS; 710 else if (odt_ohm <= 40) 711 odt = LPDDR4_DQODT_40; 712 else if (odt_ohm <= 48) 713 odt = LPDDR4_DQODT_48; 714 else if (odt_ohm <= 60) 715 odt = LPDDR4_DQODT_60; 716 else if (odt_ohm <= 80) 717 odt = LPDDR4_DQODT_80; 718 else if (odt_ohm <= 120) 719 odt = LPDDR4_DQODT_120; 720 else 721 odt = LPDDR4_DQODT_240; 722 723 return odt; 724 } 725 726 static void *get_ddr_drv_odt_info(u32 dramtype) 727 { 728 struct sdram_head_info_index_v2 *index = 729 (struct sdram_head_info_index_v2 *)common_info; 730 void *ddr_info = 0; 731 732 if (dramtype == DDR4) 733 ddr_info = (void *)common_info + index->ddr4_index.offset * 4; 734 else if (dramtype == DDR3) 735 ddr_info = (void *)common_info + index->ddr3_index.offset * 4; 736 else if (dramtype == LPDDR3) 737 ddr_info = (void *)common_info + index->lp3_index.offset * 4; 738 else if (dramtype == LPDDR4) 739 ddr_info = (void *)common_info + index->lp4_index.offset * 4; 740 else 741 printascii("unsupported dram type\n"); 742 return ddr_info; 743 } 744 745 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info, 746 u32 freq_mhz, u32 dst_fsp) 747 { 748 void __iomem *pctl_base = dram->pctl; 749 u32 ca_vref, dq_vref; 750 751 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 752 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff); 753 else 754 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten); 755 756 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq)) 757 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff); 758 else 759 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten); 760 761 if (ca_vref < 100) 762 ca_vref = 100; 763 if (ca_vref > 420) 764 ca_vref = 420; 765 766 if (ca_vref <= 300) 767 ca_vref = (0 << 6) | (ca_vref - 100) / 4; 768 else 769 ca_vref = (1 << 6) | (ca_vref - 220) / 4; 770 771 if (dq_vref < 100) 772 dq_vref = 100; 773 if (dq_vref > 420) 774 dq_vref = 420; 775 776 if (dq_vref <= 300) 777 dq_vref = (0 << 6) | (dq_vref - 100) / 4; 778 else 779 dq_vref = (1 << 6) | (dq_vref - 220) / 4; 780 781 sw_set_req(dram); 782 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 783 DDR_PCTL2_INIT6, 784 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT, 785 ca_vref << PCTL2_LPDDR4_MR12_SHIFT); 786 787 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 788 DDR_PCTL2_INIT7, 789 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT, 790 dq_vref << PCTL2_LPDDR4_MR14_SHIFT); 791 sw_set_ack(dram); 792 } 793 794 static void set_ds_odt(struct dram_info *dram, 795 struct rv1126_sdram_params *sdram_params, u32 dst_fsp) 796 { 797 void __iomem *phy_base = dram->phy; 798 void __iomem *pctl_base = dram->pctl; 799 u32 dramtype = sdram_params->base.dramtype; 800 struct ddr2_3_4_lp2_3_info *ddr_info; 801 struct lp4_info *lp4_info; 802 u32 i, j, tmp; 803 const u16 (*p_drv)[2]; 804 const u16 (*p_odt)[2]; 805 u32 drv_info, sr_info; 806 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm; 807 u32 phy_odt_ohm, dram_odt_ohm; 808 u32 lp4_pu_cal, phy_lp4_drv_pd_en; 809 u32 phy_odt_up_en, phy_odt_dn_en; 810 u32 sr_dq, sr_clk; 811 u32 freq = sdram_params->base.ddr_freq; 812 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner; 813 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0; 814 u32 phy_dq_drv = 0; 815 u32 phy_odt_up = 0, phy_odt_dn = 0; 816 817 ddr_info = get_ddr_drv_odt_info(dramtype); 818 lp4_info = (void *)ddr_info; 819 820 if (!ddr_info) 821 return; 822 823 /* dram odt en freq control phy drv, dram odt and phy sr */ 824 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) { 825 drv_info = ddr_info->drv_when_odtoff; 826 dram_odt_ohm = 0; 827 sr_info = ddr_info->sr_when_odtoff; 828 phy_lp4_drv_pd_en = 829 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info); 830 } else { 831 drv_info = ddr_info->drv_when_odten; 832 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info); 833 sr_info = ddr_info->sr_when_odten; 834 phy_lp4_drv_pd_en = 835 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info); 836 } 837 phy_dq_drv_ohm = 838 DRV_INFO_PHY_DQ_DRV(drv_info); 839 phy_clk_drv_ohm = 840 DRV_INFO_PHY_CLK_DRV(drv_info); 841 phy_ca_drv_ohm = 842 DRV_INFO_PHY_CA_DRV(drv_info); 843 844 sr_dq = DQ_SR_INFO(sr_info); 845 sr_clk = CLK_SR_INFO(sr_info); 846 847 /* phy odt en freq control dram drv and phy odt */ 848 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) { 849 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff); 850 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info); 851 phy_odt_ohm = 0; 852 phy_odt_up_en = 0; 853 phy_odt_dn_en = 0; 854 } else { 855 dram_drv_ohm = 856 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten); 857 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info); 858 phy_odt_up_en = 859 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 860 phy_odt_dn_en = 861 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 862 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info); 863 } 864 865 if (dramtype == LPDDR4) { 866 if (phy_odt_ohm) { 867 phy_odt_up_en = 0; 868 phy_odt_dn_en = 1; 869 } 870 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 871 dram_caodt_ohm = 0; 872 else 873 dram_caodt_ohm = 874 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info); 875 } 876 877 if (dramtype == DDR3) { 878 p_drv = d3_phy_drv_2_ohm; 879 p_odt = d3_phy_odt_2_ohm; 880 } else if (dramtype == LPDDR4) { 881 p_drv = lp4_phy_drv_2_ohm; 882 p_odt = lp4_phy_odt_2_ohm; 883 } else { 884 p_drv = d4lp3_phy_drv_2_ohm; 885 p_odt = d4lp3_phy_odt_2_ohm; 886 } 887 888 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 889 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) { 890 phy_dq_drv = **(p_drv + i); 891 break; 892 } 893 if (i == 0) 894 break; 895 } 896 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 897 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) { 898 phy_clk_drv = **(p_drv + i); 899 break; 900 } 901 if (i == 0) 902 break; 903 } 904 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 905 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) { 906 phy_ca_drv = **(p_drv + i); 907 break; 908 } 909 if (i == 0) 910 break; 911 } 912 if (!phy_odt_ohm) 913 phy_odt = 0; 914 else 915 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) { 916 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) { 917 phy_odt = **(p_odt + i); 918 break; 919 } 920 if (i == 0) 921 break; 922 } 923 924 if (dramtype != LPDDR4) { 925 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en)) 926 vref_inner = 0x80; 927 else if (phy_odt_up_en) 928 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 / 929 (dram_drv_ohm + phy_odt_ohm); 930 else 931 vref_inner = phy_odt_ohm * 128 / 932 (phy_odt_ohm + dram_drv_ohm); 933 934 if (dramtype != DDR3 && dram_odt_ohm) 935 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 / 936 (phy_dq_drv_ohm + dram_odt_ohm); 937 else 938 vref_out = 0x80; 939 } else { 940 /* for lp4 */ 941 if (phy_odt_ohm) 942 vref_inner = 943 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) * 944 256) / 1000; 945 else 946 vref_inner = 947 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) * 948 256) / 1000; 949 950 vref_out = 0x80; 951 } 952 953 /* default ZQCALIB bypass mode */ 954 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv); 955 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv); 956 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv); 957 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv); 958 if (dramtype == LPDDR4) { 959 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv); 960 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv); 961 } else { 962 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv); 963 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv); 964 } 965 /* clk / cmd slew rate */ 966 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk); 967 968 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1; 969 if (phy_odt_up_en) 970 phy_odt_up = phy_odt; 971 if (phy_odt_dn_en) 972 phy_odt_dn = phy_odt; 973 974 for (i = 0; i < 4; i++) { 975 j = 0x110 + i * 0x10; 976 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up); 977 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn); 978 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv); 979 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv); 980 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10)); 981 982 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), 983 1 << 3, phy_lp4_drv_pd_en << 3); 984 /* dq slew rate */ 985 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10), 986 0x1f, sr_dq); 987 } 988 989 /* reg_rx_vref_value_update */ 990 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 991 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 992 993 /* RAM VREF */ 994 writel(vref_out, PHY_REG(phy_base, 0x105)); 995 if (dramtype == LPDDR3) 996 udelay(100); 997 998 if (dramtype == LPDDR4) 999 set_lp4_vref(dram, lp4_info, freq, dst_fsp); 1000 1001 if (dramtype == DDR3 || dramtype == DDR4) { 1002 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1003 DDR_PCTL2_INIT3); 1004 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK; 1005 } else { 1006 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1007 DDR_PCTL2_INIT4); 1008 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK; 1009 } 1010 1011 if (dramtype == DDR3) { 1012 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK); 1013 if (dram_drv_ohm == 34) 1014 mr1_mr3 |= DDR3_DS_34; 1015 1016 if (dram_odt_ohm == 0) 1017 mr1_mr3 |= DDR3_RTT_NOM_DIS; 1018 else if (dram_odt_ohm <= 40) 1019 mr1_mr3 |= DDR3_RTT_NOM_40; 1020 else if (dram_odt_ohm <= 60) 1021 mr1_mr3 |= DDR3_RTT_NOM_60; 1022 else 1023 mr1_mr3 |= DDR3_RTT_NOM_120; 1024 1025 } else if (dramtype == DDR4) { 1026 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK); 1027 if (dram_drv_ohm == 48) 1028 mr1_mr3 |= DDR4_DS_48; 1029 1030 if (dram_odt_ohm == 0) 1031 mr1_mr3 |= DDR4_RTT_NOM_DIS; 1032 else if (dram_odt_ohm <= 34) 1033 mr1_mr3 |= DDR4_RTT_NOM_34; 1034 else if (dram_odt_ohm <= 40) 1035 mr1_mr3 |= DDR4_RTT_NOM_40; 1036 else if (dram_odt_ohm <= 48) 1037 mr1_mr3 |= DDR4_RTT_NOM_48; 1038 else if (dram_odt_ohm <= 60) 1039 mr1_mr3 |= DDR4_RTT_NOM_60; 1040 else 1041 mr1_mr3 |= DDR4_RTT_NOM_120; 1042 1043 } else if (dramtype == LPDDR3) { 1044 if (dram_drv_ohm <= 34) 1045 mr1_mr3 |= LPDDR3_DS_34; 1046 else if (dram_drv_ohm <= 40) 1047 mr1_mr3 |= LPDDR3_DS_40; 1048 else if (dram_drv_ohm <= 48) 1049 mr1_mr3 |= LPDDR3_DS_48; 1050 else if (dram_drv_ohm <= 60) 1051 mr1_mr3 |= LPDDR3_DS_60; 1052 else if (dram_drv_ohm <= 80) 1053 mr1_mr3 |= LPDDR3_DS_80; 1054 1055 if (dram_odt_ohm == 0) 1056 lp3_odt_value = LPDDR3_ODT_DIS; 1057 else if (dram_odt_ohm <= 60) 1058 lp3_odt_value = LPDDR3_ODT_60; 1059 else if (dram_odt_ohm <= 120) 1060 lp3_odt_value = LPDDR3_ODT_120; 1061 else 1062 lp3_odt_value = LPDDR3_ODT_240; 1063 } else {/* for lpddr4 */ 1064 /* MR3 for lp4 PU-CAL and PDDS */ 1065 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK); 1066 mr1_mr3 |= lp4_pu_cal; 1067 1068 tmp = lp4_odt_calc(dram_drv_ohm); 1069 if (!tmp) 1070 tmp = LPDDR4_PDDS_240; 1071 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT); 1072 1073 /* MR11 for lp4 ca odt, dq odt set */ 1074 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1075 DDR_PCTL2_INIT6); 1076 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK; 1077 1078 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK); 1079 1080 tmp = lp4_odt_calc(dram_odt_ohm); 1081 mr11 |= (tmp << LPDDR4_DQODT_SHIFT); 1082 1083 tmp = lp4_odt_calc(dram_caodt_ohm); 1084 mr11 |= (tmp << LPDDR4_CAODT_SHIFT); 1085 sw_set_req(dram); 1086 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1087 DDR_PCTL2_INIT6, 1088 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT, 1089 mr11 << PCTL2_LPDDR4_MR11_SHIFT); 1090 sw_set_ack(dram); 1091 1092 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */ 1093 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1094 DDR_PCTL2_INIT7); 1095 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK; 1096 mr22 &= ~LPDDR4_SOC_ODT_MASK; 1097 1098 tmp = lp4_odt_calc(phy_odt_ohm); 1099 mr22 |= tmp; 1100 mr22 = mr22 | 1101 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) << 1102 LPDDR4_ODTE_CK_SHIFT) | 1103 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) << 1104 LPDDR4_ODTE_CS_SHIFT) | 1105 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) << 1106 LPDDR4_ODTD_CA_SHIFT); 1107 1108 sw_set_req(dram); 1109 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1110 DDR_PCTL2_INIT7, 1111 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT, 1112 mr22 << PCTL2_LPDDR4_MR22_SHIFT); 1113 sw_set_ack(dram); 1114 } 1115 1116 if (dramtype == DDR4 || dramtype == DDR3) { 1117 sw_set_req(dram); 1118 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1119 DDR_PCTL2_INIT3, 1120 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT, 1121 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT); 1122 sw_set_ack(dram); 1123 } else { 1124 sw_set_req(dram); 1125 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1126 DDR_PCTL2_INIT4, 1127 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT, 1128 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT); 1129 sw_set_ack(dram); 1130 } 1131 } 1132 1133 static int sdram_cmd_dq_path_remap(struct dram_info *dram, 1134 struct rv1126_sdram_params *sdram_params) 1135 { 1136 void __iomem *phy_base = dram->phy; 1137 u32 dramtype = sdram_params->base.dramtype; 1138 struct sdram_head_info_index_v2 *index = 1139 (struct sdram_head_info_index_v2 *)common_info; 1140 struct dq_map_info *map_info; 1141 1142 map_info = (struct dq_map_info *)((void *)common_info + 1143 index->dq_map_index.offset * 4); 1144 1145 if (dramtype <= LPDDR4) 1146 writel((map_info->byte_map[dramtype / 4] >> 1147 ((dramtype % 4) * 8)) & 0xff, 1148 PHY_REG(phy_base, 0x4f)); 1149 1150 return 0; 1151 } 1152 1153 static void phy_cfg(struct dram_info *dram, 1154 struct rv1126_sdram_params *sdram_params) 1155 { 1156 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 1157 void __iomem *phy_base = dram->phy; 1158 u32 i, dq_map, tmp; 1159 u32 byte1 = 0, byte0 = 0; 1160 1161 sdram_cmd_dq_path_remap(dram, sdram_params); 1162 1163 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0); 1164 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) { 1165 writel(sdram_params->phy_regs.phy[i][1], 1166 phy_base + sdram_params->phy_regs.phy[i][0]); 1167 } 1168 1169 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5)); 1170 dq_map = readl(PHY_REG(phy_base, 0x4f)); 1171 for (i = 0; i < 4; i++) { 1172 if (((dq_map >> (i * 2)) & 0x3) == 0) 1173 byte0 = i; 1174 if (((dq_map >> (i * 2)) & 0x3) == 1) 1175 byte1 = i; 1176 } 1177 1178 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK); 1179 if (cap_info->bw == 2) 1180 tmp |= 0xf; 1181 else if (cap_info->bw == 1) 1182 tmp |= ((1 << byte0) | (1 << byte1)); 1183 else 1184 tmp |= (1 << byte0); 1185 1186 writel(tmp, PHY_REG(phy_base, 0xf)); 1187 1188 /* lpddr4 odt control by phy, enable cs0 odt */ 1189 if (sdram_params->base.dramtype == LPDDR4) 1190 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4, 1191 (1 << 6) | (1 << 4)); 1192 /* for ca training ca vref choose range1 */ 1193 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6)); 1194 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6)); 1195 /* for wr training PHY_0x7c[5], choose range0 */ 1196 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5)); 1197 } 1198 1199 static int update_refresh_reg(struct dram_info *dram) 1200 { 1201 void __iomem *pctl_base = dram->pctl; 1202 u32 ret; 1203 1204 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1); 1205 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3); 1206 1207 return 0; 1208 } 1209 1210 /* 1211 * rank = 1: cs0 1212 * rank = 2: cs1 1213 */ 1214 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype) 1215 { 1216 u32 ret; 1217 u32 i, temp; 1218 u32 dqmap; 1219 1220 void __iomem *pctl_base = dram->pctl; 1221 struct sdram_head_info_index_v2 *index = 1222 (struct sdram_head_info_index_v2 *)common_info; 1223 struct dq_map_info *map_info; 1224 1225 map_info = (struct dq_map_info *)((void *)common_info + 1226 index->dq_map_index.offset * 4); 1227 1228 if (dramtype == LPDDR2) 1229 dqmap = map_info->lp2_dq0_7_map; 1230 else 1231 dqmap = map_info->lp3_dq0_7_map; 1232 1233 pctl_read_mr(pctl_base, rank, mr_num); 1234 1235 ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff); 1236 1237 if (dramtype != LPDDR4) { 1238 temp = 0; 1239 for (i = 0; i < 8; i++) { 1240 temp = temp | (((ret >> i) & 0x1) << 1241 ((dqmap >> (i * 4)) & 0xf)); 1242 } 1243 } else { 1244 ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff); 1245 } 1246 1247 return ret; 1248 } 1249 1250 /* before call this function autorefresh should be disabled */ 1251 void send_a_refresh(struct dram_info *dram) 1252 { 1253 void __iomem *pctl_base = dram->pctl; 1254 1255 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3) 1256 continue; 1257 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD); 1258 } 1259 1260 static void enter_sr(struct dram_info *dram, u32 en) 1261 { 1262 void __iomem *pctl_base = dram->pctl; 1263 1264 if (en) { 1265 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 1266 while (1) { 1267 if (((readl(pctl_base + DDR_PCTL2_STAT) & 1268 PCTL2_SELFREF_TYPE_MASK) == 1269 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) && 1270 ((readl(pctl_base + DDR_PCTL2_STAT) & 1271 PCTL2_OPERATING_MODE_MASK) == 1272 PCTL2_OPERATING_MODE_SR)) 1273 break; 1274 } 1275 } else { 1276 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 1277 while ((readl(pctl_base + DDR_PCTL2_STAT) & 1278 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR) 1279 continue; 1280 } 1281 } 1282 1283 void record_dq_prebit(struct dram_info *dram) 1284 { 1285 u32 group, i, tmp; 1286 void __iomem *phy_base = dram->phy; 1287 1288 for (group = 0; group < 4; group++) { 1289 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) { 1290 /* l_loop_invdelaysel */ 1291 writel(dq_sel[i][0], PHY_REG(phy_base, 1292 grp_addr[group] + 0x2c)); 1293 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e)); 1294 writel(tmp, PHY_REG(phy_base, 1295 grp_addr[group] + dq_sel[i][1])); 1296 1297 /* r_loop_invdelaysel */ 1298 writel(dq_sel[i][0], PHY_REG(phy_base, 1299 grp_addr[group] + 0x2d)); 1300 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f)); 1301 writel(tmp, PHY_REG(phy_base, 1302 grp_addr[group] + dq_sel[i][2])); 1303 } 1304 } 1305 } 1306 1307 static void update_dq_rx_prebit(struct dram_info *dram) 1308 { 1309 void __iomem *phy_base = dram->phy; 1310 1311 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4), 1312 BIT(4)); 1313 udelay(1); 1314 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4)); 1315 } 1316 1317 static void update_dq_tx_prebit(struct dram_info *dram) 1318 { 1319 void __iomem *phy_base = dram->phy; 1320 1321 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1322 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3)); 1323 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1324 udelay(1); 1325 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1326 } 1327 1328 static void update_ca_prebit(struct dram_info *dram) 1329 { 1330 void __iomem *phy_base = dram->phy; 1331 1332 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2)); 1333 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1334 udelay(1); 1335 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1336 } 1337 1338 /* 1339 * dir: 0: de-skew = delta_* 1340 * 1: de-skew = reg val - delta_* 1341 * delta_dir: value for differential signal: clk/ 1342 * delta_sig: value for single signal: ca/cmd 1343 */ 1344 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif, 1345 int delta_sig, u32 cs, u32 dramtype) 1346 { 1347 void __iomem *phy_base = dram->phy; 1348 u32 i, cs_en, tmp; 1349 u32 dfi_lp_stat = 0; 1350 1351 if (cs == 0) 1352 cs_en = 1; 1353 else if (cs == 2) 1354 cs_en = 2; 1355 else 1356 cs_en = 3; 1357 1358 if (dramtype == LPDDR4 && 1359 ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) { 1360 dfi_lp_stat = 1; 1361 setbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); 1362 } 1363 enter_sr(dram, 1); 1364 1365 for (i = 0; i < 0x20; i++) { 1366 if (dir == DESKEW_MDF_ABS_VAL) 1367 tmp = delta_sig; 1368 else 1369 tmp = readl(PHY_REG(phy_base, 0x150 + i)) + 1370 delta_sig; 1371 writel(tmp, PHY_REG(phy_base, 0x150 + i)); 1372 } 1373 1374 if (dir == DESKEW_MDF_ABS_VAL) 1375 tmp = delta_dif; 1376 else 1377 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) - 1378 delta_sig + delta_dif; 1379 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17)); 1380 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18)); 1381 if (dramtype == LPDDR4) { 1382 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4)); 1383 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa)); 1384 1385 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6); 1386 update_ca_prebit(dram); 1387 } 1388 enter_sr(dram, 0); 1389 1390 if (dfi_lp_stat) 1391 clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); 1392 1393 } 1394 1395 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank) 1396 { 1397 u32 i, j, offset = 0; 1398 u32 min = 0x3f; 1399 void __iomem *phy_base = dram->phy; 1400 u32 byte_en; 1401 1402 if (signal == SKEW_TX_SIGNAL) 1403 offset = 8; 1404 1405 if (signal == SKEW_CA_SIGNAL) { 1406 for (i = 0; i < 0x20; i++) 1407 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i))); 1408 } else { 1409 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1410 for (j = offset; j < offset + rank * 4; j++) { 1411 if (!((byte_en >> (j % 4)) & 1)) 1412 continue; 1413 for (i = 0; i < 11; i++) 1414 min = MIN(min, 1415 readl(PHY_REG(phy_base, 1416 dqs_dq_skew_adr[j] + 1417 i))); 1418 } 1419 } 1420 1421 return min; 1422 } 1423 1424 static u32 low_power_update(struct dram_info *dram, u32 en) 1425 { 1426 void __iomem *pctl_base = dram->pctl; 1427 u32 lp_stat = 0; 1428 1429 if (en) { 1430 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf); 1431 } else { 1432 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf; 1433 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf); 1434 } 1435 1436 return lp_stat; 1437 } 1438 1439 /* 1440 * signal: 1441 * dir: 0: de-skew = delta_* 1442 * 1: de-skew = reg val - delta_* 1443 * delta_dir: value for differential signal: dqs 1444 * delta_sig: value for single signal: dq/dm 1445 */ 1446 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir, 1447 int delta_dif, int delta_sig, u32 rank) 1448 { 1449 void __iomem *phy_base = dram->phy; 1450 u32 i, j, tmp, offset; 1451 u32 byte_en; 1452 1453 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1454 1455 if (signal == SKEW_RX_SIGNAL) 1456 offset = 0; 1457 else 1458 offset = 8; 1459 1460 for (j = offset; j < (offset + rank * 4); j++) { 1461 if (!((byte_en >> (j % 4)) & 1)) 1462 continue; 1463 for (i = 0; i < 0x9; i++) { 1464 if (dir == DESKEW_MDF_ABS_VAL) 1465 tmp = delta_sig; 1466 else 1467 tmp = delta_sig + readl(PHY_REG(phy_base, 1468 dqs_dq_skew_adr[j] + 1469 i)); 1470 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i)); 1471 } 1472 if (dir == DESKEW_MDF_ABS_VAL) 1473 tmp = delta_dif; 1474 else 1475 tmp = delta_dif + readl(PHY_REG(phy_base, 1476 dqs_dq_skew_adr[j] + 9)); 1477 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9)); 1478 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa)); 1479 } 1480 if (signal == SKEW_RX_SIGNAL) 1481 update_dq_rx_prebit(dram); 1482 else 1483 update_dq_tx_prebit(dram); 1484 } 1485 1486 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype) 1487 { 1488 void __iomem *phy_base = dram->phy; 1489 u32 ret; 1490 u32 dis_auto_zq = 0; 1491 u32 odt_val_up, odt_val_dn; 1492 u32 i, j; 1493 1494 odt_val_dn = readl(PHY_REG(phy_base, 0x110)); 1495 odt_val_up = readl(PHY_REG(phy_base, 0x111)); 1496 1497 if (dramtype != LPDDR4) { 1498 for (i = 0; i < 4; i++) { 1499 j = 0x110 + i * 0x10; 1500 writel(PHY_DDR4_LPDDR3_RTT_294ohm, 1501 PHY_REG(phy_base, j)); 1502 writel(PHY_DDR4_LPDDR3_RTT_DISABLE, 1503 PHY_REG(phy_base, j + 0x1)); 1504 } 1505 } 1506 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1507 /* use normal read mode for data training */ 1508 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1509 1510 if (dramtype == DDR4) 1511 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1512 1513 /* choose training cs */ 1514 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs)); 1515 /* enable gate training */ 1516 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1); 1517 udelay(50); 1518 ret = readl(PHY_REG(phy_base, 0x91)); 1519 /* disable gate training */ 1520 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0); 1521 clrbits_le32(PHY_REG(phy_base, 2), 0x30); 1522 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1523 1524 if (ret & 0x20) 1525 ret = -1; 1526 else 1527 ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf); 1528 1529 if (dramtype != LPDDR4) { 1530 for (i = 0; i < 4; i++) { 1531 j = 0x110 + i * 0x10; 1532 writel(odt_val_dn, PHY_REG(phy_base, j)); 1533 writel(odt_val_up, PHY_REG(phy_base, j + 0x1)); 1534 } 1535 } 1536 return ret; 1537 } 1538 1539 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype, 1540 u32 rank) 1541 { 1542 void __iomem *pctl_base = dram->pctl; 1543 void __iomem *phy_base = dram->phy; 1544 u32 dis_auto_zq = 0; 1545 u32 tmp; 1546 u32 cur_fsp; 1547 u32 timeout_us = 1000; 1548 1549 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1550 1551 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1552 1553 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1554 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) & 1555 0xffff; 1556 writel(tmp & 0xff, PHY_REG(phy_base, 0x3)); 1557 1558 /* disable another cs's output */ 1559 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1560 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12), 1561 dramtype); 1562 if (dramtype == DDR3 || dramtype == DDR4) 1563 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1564 else 1565 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1566 1567 /* choose cs */ 1568 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1569 ((0x2 >> cs) << 6) | (0 << 2)); 1570 /* enable write leveling */ 1571 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1572 ((0x2 >> cs) << 6) | (1 << 2)); 1573 1574 while (1) { 1575 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) == 1576 (readl(PHY_REG(phy_base, 0xf)) & 0xf)) 1577 break; 1578 1579 udelay(1); 1580 if (timeout_us-- == 0) { 1581 printascii("error: write leveling timeout\n"); 1582 while (1) 1583 ; 1584 } 1585 } 1586 1587 /* disable write leveling */ 1588 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1589 ((0x2 >> cs) << 6) | (0 << 2)); 1590 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6); 1591 1592 /* enable another cs's output */ 1593 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1594 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12), 1595 dramtype); 1596 1597 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1598 1599 return 0; 1600 } 1601 1602 char pattern[32] = { 1603 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa, 1604 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55, 1605 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55, 1606 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa 1607 }; 1608 1609 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype, 1610 u32 mhz) 1611 { 1612 void __iomem *pctl_base = dram->pctl; 1613 void __iomem *phy_base = dram->phy; 1614 u32 trefi_1x, trfc_1x; 1615 u32 dis_auto_zq = 0; 1616 u32 timeout_us = 1000; 1617 u32 dqs_default; 1618 u32 cur_fsp; 1619 u32 vref_inner; 1620 u32 i; 1621 struct sdram_head_info_index_v2 *index = 1622 (struct sdram_head_info_index_v2 *)common_info; 1623 struct dq_map_info *map_info; 1624 1625 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff; 1626 if (dramtype == DDR3 && vref_inner == 0x80) { 1627 for (i = 0; i < 4; i++) 1628 writel(vref_inner - 0xa, 1629 PHY_REG(phy_base, 0x118 + i * 0x10)); 1630 1631 /* reg_rx_vref_value_update */ 1632 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1633 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1634 } 1635 1636 map_info = (struct dq_map_info *)((void *)common_info + 1637 index->dq_map_index.offset * 4); 1638 /* only 1cs a time, 0:cs0 1 cs1 */ 1639 if (cs > 1) 1640 return -1; 1641 1642 dqs_default = 0xf; 1643 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1644 1645 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1646 /* config refresh timing */ 1647 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1648 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1649 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1650 DDR_PCTL2_RFSHTMG) & 0x3ff; 1651 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1652 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1653 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1654 /* reg_phy_trfc */ 1655 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1656 /* reg_max_refi_cnt */ 1657 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1658 1659 /* choose training cs */ 1660 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6); 1661 1662 /* set dq map for ddr4 */ 1663 if (dramtype == DDR4) { 1664 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7)); 1665 for (i = 0; i < 4; i++) { 1666 writel((map_info->ddr4_dq_map[cs * 2] >> 1667 ((i % 4) * 8)) & 0xff, 1668 PHY_REG(phy_base, 0x238 + i)); 1669 writel((map_info->ddr4_dq_map[cs * 2 + 1] >> 1670 ((i % 4) * 8)) & 0xff, 1671 PHY_REG(phy_base, 0x2b8 + i)); 1672 } 1673 } 1674 1675 /* cha_l reg_l_rd_train_dqs_default[5:0] */ 1676 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default); 1677 /* cha_h reg_h_rd_train_dqs_default[5:0] */ 1678 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default); 1679 /* chb_l reg_l_rd_train_dqs_default[5:0] */ 1680 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default); 1681 /* chb_h reg_h_rd_train_dqs_default[5:0] */ 1682 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default); 1683 1684 /* Choose the read train auto mode */ 1685 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1); 1686 /* Enable the auto train of the read train */ 1687 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3); 1688 1689 /* Wait the train done. */ 1690 while (1) { 1691 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1) 1692 break; 1693 1694 udelay(1); 1695 if (timeout_us-- == 0) { 1696 printascii("error: read training timeout\n"); 1697 return -1; 1698 } 1699 } 1700 1701 /* Check the read train state */ 1702 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) || 1703 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) { 1704 printascii("error: read training error\n"); 1705 return -1; 1706 } 1707 1708 /* Exit the Read Training by setting */ 1709 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1)); 1710 1711 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1712 1713 if (dramtype == DDR3 && vref_inner == 0x80) { 1714 for (i = 0; i < 4; i++) 1715 writel(vref_inner, 1716 PHY_REG(phy_base, 0x118 + i * 0x10)); 1717 1718 /* reg_rx_vref_value_update */ 1719 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1720 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1721 } 1722 1723 return 0; 1724 } 1725 1726 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype, 1727 u32 mhz, u32 dst_fsp) 1728 { 1729 void __iomem *pctl_base = dram->pctl; 1730 void __iomem *phy_base = dram->phy; 1731 u32 trefi_1x, trfc_1x; 1732 u32 dis_auto_zq = 0; 1733 u32 timeout_us = 1000; 1734 u32 cur_fsp; 1735 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0; 1736 1737 if (dramtype == LPDDR3 && mhz <= 400) { 1738 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3; 1739 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3; 1740 cl = readl(PHY_REG(phy_base, offset)); 1741 cwl = readl(PHY_REG(phy_base, offset + 2)); 1742 1743 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8); 1744 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4); 1745 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype); 1746 } 1747 1748 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1749 1750 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */ 1751 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0); 1752 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */ 1753 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2); 1754 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */ 1755 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0); 1756 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */ 1757 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0); 1758 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */ 1759 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0); 1760 1761 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */ 1762 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3)); 1763 1764 /* config refresh timing */ 1765 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1766 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1767 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1768 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1769 DDR_PCTL2_RFSHTMG) & 0x3ff; 1770 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1771 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1772 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1773 /* reg_phy_trfc */ 1774 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1775 /* reg_max_refi_cnt */ 1776 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1777 1778 /* choose training cs */ 1779 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6); 1780 1781 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */ 1782 /* 0: Use the write-leveling value. */ 1783 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */ 1784 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4)); 1785 1786 /* PHY_0x7a [0] reg_dq_wr_train_auto */ 1787 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1788 1789 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1790 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1791 1792 send_a_refresh(dram); 1793 1794 while (1) { 1795 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1) 1796 break; 1797 1798 udelay(1); 1799 if (timeout_us-- == 0) { 1800 printascii("error: write training timeout\n"); 1801 while (1) 1802 ; 1803 } 1804 } 1805 1806 /* Check the write train state */ 1807 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) { 1808 printascii("error: write training error\n"); 1809 return -1; 1810 } 1811 1812 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1813 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1814 1815 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1816 1817 /* save LPDDR4 write vref to fsp_param for dfs */ 1818 if (dramtype == LPDDR4) { 1819 fsp_param[dst_fsp].vref_dq[cs] = 1820 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) + 1821 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2; 1822 /* add range info */ 1823 fsp_param[dst_fsp].vref_dq[cs] |= 1824 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1); 1825 } 1826 1827 if (dramtype == LPDDR3 && mhz <= 400) { 1828 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl); 1829 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl); 1830 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1831 DDR_PCTL2_INIT3); 1832 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 1833 dramtype); 1834 } 1835 1836 return 0; 1837 } 1838 1839 static int data_training(struct dram_info *dram, u32 cs, 1840 struct rv1126_sdram_params *sdram_params, u32 dst_fsp, 1841 u32 training_flag) 1842 { 1843 u32 ret = 0; 1844 1845 if (training_flag == FULL_TRAINING) 1846 training_flag = READ_GATE_TRAINING | WRITE_LEVELING | 1847 WRITE_TRAINING | READ_TRAINING; 1848 1849 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) { 1850 ret = data_training_wl(dram, cs, 1851 sdram_params->base.dramtype, 1852 sdram_params->ch.cap_info.rank); 1853 if (ret != 0) 1854 goto out; 1855 } 1856 1857 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) { 1858 ret = data_training_rg(dram, cs, 1859 sdram_params->base.dramtype); 1860 if (ret != 0) 1861 goto out; 1862 } 1863 1864 if ((training_flag & READ_TRAINING) == READ_TRAINING) { 1865 ret = data_training_rd(dram, cs, 1866 sdram_params->base.dramtype, 1867 sdram_params->base.ddr_freq); 1868 if (ret != 0) 1869 goto out; 1870 } 1871 1872 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) { 1873 ret = data_training_wr(dram, cs, 1874 sdram_params->base.dramtype, 1875 sdram_params->base.ddr_freq, dst_fsp); 1876 if (ret != 0) 1877 goto out; 1878 } 1879 1880 out: 1881 return ret; 1882 } 1883 1884 static int get_wrlvl_val(struct dram_info *dram, 1885 struct rv1126_sdram_params *sdram_params) 1886 { 1887 u32 i, j, clk_skew; 1888 void __iomem *phy_base = dram->phy; 1889 u32 lp_stat; 1890 int ret; 1891 1892 lp_stat = low_power_update(dram, 0); 1893 1894 clk_skew = 0x1f; 1895 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3, 1896 sdram_params->base.dramtype); 1897 1898 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING); 1899 if (sdram_params->ch.cap_info.rank == 2) 1900 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING); 1901 1902 for (j = 0; j < 2; j++) 1903 for (i = 0; i < 4; i++) 1904 wrlvl_result[j][i] = 1905 readl(PHY_REG(phy_base, 1906 wrlvl_result_offset[j][i])) - 1907 clk_skew; 1908 1909 low_power_update(dram, lp_stat); 1910 1911 return ret; 1912 } 1913 1914 static int high_freq_training(struct dram_info *dram, 1915 struct rv1126_sdram_params *sdram_params, 1916 u32 fsp) 1917 { 1918 u32 i, j; 1919 void __iomem *phy_base = dram->phy; 1920 u32 dramtype = sdram_params->base.dramtype; 1921 int min_val; 1922 int dqs_skew, clk_skew, ca_skew; 1923 int ret; 1924 1925 dqs_skew = 0; 1926 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) 1927 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) 1928 dqs_skew += wrlvl_result[j][i]; 1929 dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank * 1930 ARRAY_SIZE(wrlvl_result[0])); 1931 1932 clk_skew = 0x20 - dqs_skew; 1933 dqs_skew = 0x20; 1934 1935 if (dramtype == LPDDR4) { 1936 clk_skew = 0; 1937 ca_skew = 0; 1938 } else if (dramtype == LPDDR3) { 1939 ca_skew = clk_skew - 4; 1940 } else { 1941 ca_skew = clk_skew; 1942 } 1943 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3, 1944 dramtype); 1945 1946 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233)); 1947 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237)); 1948 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 1949 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 1950 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING | 1951 READ_TRAINING | WRITE_TRAINING); 1952 if (sdram_params->ch.cap_info.rank == 2) { 1953 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233)); 1954 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237)); 1955 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 1956 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 1957 ret |= data_training(dram, 1, sdram_params, fsp, 1958 READ_GATE_TRAINING | READ_TRAINING | 1959 WRITE_TRAINING); 1960 } 1961 if (ret) 1962 goto out; 1963 1964 record_dq_prebit(dram); 1965 1966 min_val = get_min_value(dram, SKEW_RX_SIGNAL, 1967 sdram_params->ch.cap_info.rank) * -1; 1968 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL, 1969 min_val, min_val, sdram_params->ch.cap_info.rank); 1970 1971 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL, 1972 sdram_params->ch.cap_info.rank), 1973 get_min_value(dram, SKEW_CA_SIGNAL, 1974 sdram_params->ch.cap_info.rank)) * -1; 1975 1976 /* clk = 0, rx all skew -7, tx - min_value */ 1977 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3, 1978 dramtype); 1979 1980 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL, 1981 min_val, min_val, sdram_params->ch.cap_info.rank); 1982 1983 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING); 1984 if (sdram_params->ch.cap_info.rank == 2) 1985 ret |= data_training(dram, 1, sdram_params, 0, 1986 READ_GATE_TRAINING); 1987 out: 1988 return ret; 1989 } 1990 1991 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig) 1992 { 1993 writel(ddrconfig, &dram->msch->deviceconf); 1994 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0); 1995 } 1996 1997 static void update_noc_timing(struct dram_info *dram, 1998 struct rv1126_sdram_params *sdram_params) 1999 { 2000 void __iomem *pctl_base = dram->pctl; 2001 u32 bw, bl; 2002 2003 bw = 8 << sdram_params->ch.cap_info.bw; 2004 bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2; 2005 2006 /* update the noc timing related to data bus width */ 2007 if ((bw / 8 * bl) == 16) 2008 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0; 2009 else if ((bw / 8 * bl) == 32) 2010 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1; 2011 else if ((bw / 8 * bl) == 64) 2012 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2; 2013 else 2014 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3; 2015 2016 sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty = 2017 (bw == 32) ? 2 : ((bw == 16) ? 4 : 8); 2018 2019 if (sdram_params->base.dramtype == LPDDR4) { 2020 sdram_params->ch.noc_timings.ddrmode.b.mwrsize = 2021 (bw == 16) ? 0x1 : 0x2; 2022 sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr = 2023 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty; 2024 } 2025 2026 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32, 2027 &dram->msch->ddrtiminga0); 2028 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32, 2029 &dram->msch->ddrtimingb0); 2030 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32, 2031 &dram->msch->ddrtimingc0); 2032 writel(sdram_params->ch.noc_timings.devtodev0.d32, 2033 &dram->msch->devtodev0); 2034 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode); 2035 writel(sdram_params->ch.noc_timings.ddr4timing.d32, 2036 &dram->msch->ddr4timing); 2037 } 2038 2039 static void dram_all_config(struct dram_info *dram, 2040 struct rv1126_sdram_params *sdram_params) 2041 { 2042 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2043 u32 dram_type = sdram_params->base.dramtype; 2044 void __iomem *pctl_base = dram->pctl; 2045 u32 sys_reg2 = 0; 2046 u32 sys_reg3 = 0; 2047 u64 cs_cap[2]; 2048 u32 cs_pst; 2049 2050 set_ddrconfig(dram, cap_info->ddrconfig); 2051 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2, 2052 &sys_reg3, 0); 2053 writel(sys_reg2, &dram->pmugrf->os_reg[2]); 2054 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2055 2056 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2057 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2058 2059 if (cap_info->rank == 2) { 2060 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2061 6 + 2; 2062 if (cs_pst > 28) 2063 cs_cap[0] = 1 << cs_pst; 2064 } 2065 2066 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) | 2067 (((cs_cap[0] >> 20) / 64) & 0xff), 2068 &dram->msch->devicesize); 2069 update_noc_timing(dram, sdram_params); 2070 } 2071 2072 static void enable_low_power(struct dram_info *dram, 2073 struct rv1126_sdram_params *sdram_params) 2074 { 2075 void __iomem *pctl_base = dram->pctl; 2076 u32 grf_lp_con; 2077 2078 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]); 2079 2080 if (sdram_params->base.dramtype == DDR4) 2081 grf_lp_con = (0x7 << 16) | (1 << 1); 2082 else if (sdram_params->base.dramtype == DDR3) 2083 grf_lp_con = (0x7 << 16) | (1 << 0); 2084 else 2085 grf_lp_con = (0x7 << 16) | (1 << 2); 2086 2087 /* en lpckdis_en */ 2088 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9); 2089 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con); 2090 2091 /* enable sr, pd */ 2092 if (dram->pd_idle == 0) 2093 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2094 else 2095 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2096 if (dram->sr_idle == 0) 2097 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2098 else 2099 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2100 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3)); 2101 } 2102 2103 static void ddr_set_atags(struct dram_info *dram, 2104 struct rv1126_sdram_params *sdram_params) 2105 { 2106 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2107 u32 dram_type = sdram_params->base.dramtype; 2108 void __iomem *pctl_base = dram->pctl; 2109 struct tag_serial t_serial; 2110 struct tag_ddr_mem t_ddrmem; 2111 struct tag_soc_info t_socinfo; 2112 u64 cs_cap[2]; 2113 u32 cs_pst = 0; 2114 2115 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2116 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2117 2118 memset(&t_serial, 0, sizeof(struct tag_serial)); 2119 2120 t_serial.version = 0; 2121 t_serial.enable = 1; 2122 t_serial.addr = CONFIG_DEBUG_UART_BASE; 2123 t_serial.baudrate = CONFIG_BAUDRATE; 2124 t_serial.m_mode = SERIAL_M_MODE_M0; 2125 t_serial.id = 2; 2126 2127 atags_destroy(); 2128 atags_set_tag(ATAG_SERIAL, &t_serial); 2129 2130 memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem)); 2131 if (cap_info->row_3_4) { 2132 cs_cap[0] = cs_cap[0] * 3 / 4; 2133 cs_cap[1] = cs_cap[1] * 3 / 4; 2134 } 2135 t_ddrmem.version = 0; 2136 t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE; 2137 if (cs_cap[1]) { 2138 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2139 6 + 2; 2140 } 2141 2142 if (cs_cap[1] && cs_pst > 27) { 2143 t_ddrmem.count = 2; 2144 t_ddrmem.bank[1] = 1 << cs_pst; 2145 t_ddrmem.bank[2] = cs_cap[0]; 2146 t_ddrmem.bank[3] = cs_cap[1]; 2147 } else { 2148 t_ddrmem.count = 1; 2149 t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1]; 2150 } 2151 2152 atags_set_tag(ATAG_DDR_MEM, &t_ddrmem); 2153 2154 memset(&t_socinfo, 0, sizeof(struct tag_soc_info)); 2155 t_socinfo.version = 0; 2156 t_socinfo.name = 0x1126; 2157 } 2158 2159 static void print_ddr_info(struct rv1126_sdram_params *sdram_params) 2160 { 2161 u32 split; 2162 2163 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2164 (1 << SPLIT_BYPASS_OFFSET)) != 0) 2165 split = 0; 2166 else 2167 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2168 SPLIT_SIZE_MASK; 2169 2170 sdram_print_ddr_info(&sdram_params->ch.cap_info, 2171 &sdram_params->base, split); 2172 } 2173 2174 static int sdram_init_(struct dram_info *dram, 2175 struct rv1126_sdram_params *sdram_params, u32 post_init) 2176 { 2177 void __iomem *pctl_base = dram->pctl; 2178 void __iomem *phy_base = dram->phy; 2179 u32 ddr4_vref; 2180 u32 mr_tmp; 2181 2182 rkclk_configure_ddr(dram, sdram_params); 2183 2184 rkclk_ddr_reset(dram, 1, 1, 1, 1); 2185 udelay(10); 2186 2187 rkclk_ddr_reset(dram, 1, 1, 1, 0); 2188 phy_cfg(dram, sdram_params); 2189 2190 rkclk_ddr_reset(dram, 1, 1, 0, 0); 2191 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1); 2192 2193 rkclk_ddr_reset(dram, 1, 0, 0, 0); 2194 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, 2195 dram->sr_idle, dram->pd_idle); 2196 2197 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 2198 u32 tmp, trefi; 2199 2200 tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG); 2201 trefi = (tmp >> 16) & 0xfff; 2202 writel((tmp & 0xf000ffff) | (trefi / 2) << 16, 2203 pctl_base + DDR_PCTL2_RFSHTMG); 2204 #endif 2205 2206 /* set frequency_mode */ 2207 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2208 /* set target_frequency to Frequency 0 */ 2209 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0); 2210 2211 set_ds_odt(dram, sdram_params, 0); 2212 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params); 2213 set_ctl_address_map(dram, sdram_params); 2214 2215 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4)); 2216 2217 rkclk_ddr_reset(dram, 0, 0, 0, 0); 2218 2219 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) 2220 continue; 2221 2222 if (sdram_params->base.dramtype == LPDDR3) { 2223 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3); 2224 } else if (sdram_params->base.dramtype == LPDDR4) { 2225 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6); 2226 /* MR11 */ 2227 pctl_write_mr(dram->pctl, 3, 11, 2228 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2229 LPDDR4); 2230 /* MR12 */ 2231 pctl_write_mr(dram->pctl, 3, 12, 2232 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2233 LPDDR4); 2234 2235 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); 2236 /* MR22 */ 2237 pctl_write_mr(dram->pctl, 3, 22, 2238 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2239 LPDDR4); 2240 /* MR14 */ 2241 pctl_write_mr(dram->pctl, 3, 14, 2242 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2243 LPDDR4); 2244 } 2245 2246 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) { 2247 if (post_init != 0) 2248 printascii("DTT cs0 error\n"); 2249 return -1; 2250 } 2251 2252 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) { 2253 if (data_training(dram, 1, sdram_params, 0, 2254 READ_GATE_TRAINING) != 0) { 2255 printascii("DTT cs1 error\n"); 2256 return -1; 2257 } 2258 } 2259 2260 if (sdram_params->base.dramtype == DDR4) { 2261 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39; 2262 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref, 2263 sdram_params->base.dramtype); 2264 } 2265 2266 dram_all_config(dram, sdram_params); 2267 enable_low_power(dram, sdram_params); 2268 2269 return 0; 2270 } 2271 2272 static u64 dram_detect_cap(struct dram_info *dram, 2273 struct rv1126_sdram_params *sdram_params, 2274 unsigned char channel) 2275 { 2276 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2277 void __iomem *pctl_base = dram->pctl; 2278 void __iomem *phy_base = dram->phy; 2279 u32 mr8; 2280 2281 u32 bktmp; 2282 u32 coltmp; 2283 u32 rowtmp; 2284 u32 cs; 2285 u32 bw = 1; 2286 u32 dram_type = sdram_params->base.dramtype; 2287 u32 pwrctl; 2288 2289 cap_info->bw = bw; 2290 if (dram_type != LPDDR4) { 2291 if (dram_type != DDR4) { 2292 coltmp = 12; 2293 bktmp = 3; 2294 if (dram_type == LPDDR2) 2295 rowtmp = 15; 2296 else 2297 rowtmp = 16; 2298 2299 if (sdram_detect_col(cap_info, coltmp) != 0) 2300 goto cap_err; 2301 2302 sdram_detect_bank(cap_info, coltmp, bktmp); 2303 sdram_detect_dbw(cap_info, dram_type); 2304 } else { 2305 coltmp = 10; 2306 bktmp = 4; 2307 rowtmp = 17; 2308 2309 cap_info->col = 10; 2310 cap_info->bk = 2; 2311 sdram_detect_bg(cap_info, coltmp); 2312 } 2313 2314 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0) 2315 goto cap_err; 2316 2317 sdram_detect_row_3_4(cap_info, coltmp, bktmp); 2318 } else { 2319 mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf; 2320 cap_info->col = 10; 2321 cap_info->bk = 3; 2322 cap_info->cs0_row = 14 + (mr8 + 1) / 2; 2323 if (mr8 % 2) 2324 cap_info->row_3_4 = 1; 2325 else 2326 cap_info->row_3_4 = 0; 2327 cap_info->dbw = 1; 2328 cap_info->bw = 2; 2329 } 2330 2331 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL); 2332 writel(0, pctl_base + DDR_PCTL2_PWRCTL); 2333 2334 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0) 2335 cs = 1; 2336 else 2337 cs = 0; 2338 cap_info->rank = cs + 1; 2339 2340 if (dram_type != LPDDR4) { 2341 setbits_le32(PHY_REG(phy_base, 0xf), 0xf); 2342 2343 if (data_training(dram, 0, sdram_params, 0, 2344 READ_GATE_TRAINING) == 0) 2345 cap_info->bw = 2; 2346 else 2347 cap_info->bw = 1; 2348 } 2349 2350 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL); 2351 2352 cap_info->cs0_high16bit_row = cap_info->cs0_row; 2353 if (cs) { 2354 cap_info->cs1_row = cap_info->cs0_row; 2355 cap_info->cs1_high16bit_row = cap_info->cs0_row; 2356 } else { 2357 cap_info->cs1_row = 0; 2358 cap_info->cs1_high16bit_row = 0; 2359 } 2360 2361 return 0; 2362 cap_err: 2363 return -1; 2364 } 2365 2366 static int dram_detect_cs1_row(struct dram_info *dram, 2367 struct rv1126_sdram_params *sdram_params, 2368 unsigned char channel) 2369 { 2370 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2371 void __iomem *pctl_base = dram->pctl; 2372 u32 ret = 0; 2373 void __iomem *test_addr; 2374 u32 row, bktmp, coltmp, bw; 2375 u64 cs0_cap; 2376 u32 byte_mask; 2377 u32 cs_pst; 2378 u32 cs_add = 0; 2379 u32 max_row; 2380 2381 if (cap_info->rank == 2) { 2382 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2383 6 + 2; 2384 if (cs_pst < 28) 2385 cs_add = 1; 2386 2387 cs0_cap = 1 << cs_pst; 2388 2389 if (sdram_params->base.dramtype == DDR4) { 2390 if (cap_info->dbw == 0) 2391 bktmp = cap_info->bk + 2; 2392 else 2393 bktmp = cap_info->bk + 1; 2394 } else { 2395 bktmp = cap_info->bk; 2396 } 2397 bw = cap_info->bw; 2398 coltmp = cap_info->col; 2399 2400 if (bw == 2) 2401 byte_mask = 0xFFFF; 2402 else 2403 byte_mask = 0xFF; 2404 2405 max_row = (cs_pst == 31) ? 30 : 31; 2406 2407 max_row = max_row - bktmp - coltmp - bw - cs_add + 1; 2408 2409 row = (cap_info->cs0_row > max_row) ? max_row : 2410 cap_info->cs0_row; 2411 2412 for (; row > 12; row--) { 2413 test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE + 2414 (u32)cs0_cap + 2415 (1ul << (row + bktmp + coltmp + 2416 cs_add + bw - 1ul))); 2417 2418 writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap); 2419 writel(PATTERN, test_addr); 2420 2421 if (((readl(test_addr) & byte_mask) == 2422 (PATTERN & byte_mask)) && 2423 ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) & 2424 byte_mask) == 0)) { 2425 ret = row; 2426 break; 2427 } 2428 } 2429 } 2430 2431 return ret; 2432 } 2433 2434 /* return: 0 = success, other = fail */ 2435 static int sdram_init_detect(struct dram_info *dram, 2436 struct rv1126_sdram_params *sdram_params) 2437 { 2438 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2439 u32 ret; 2440 u32 sys_reg = 0; 2441 u32 sys_reg3 = 0; 2442 2443 if (sdram_init_(dram, sdram_params, 0) != 0) 2444 return -1; 2445 2446 if (sdram_params->base.dramtype == DDR3) { 2447 writel(PATTERN, CONFIG_SYS_SDRAM_BASE); 2448 if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN) 2449 return -1; 2450 } 2451 2452 if (dram_detect_cap(dram, sdram_params, 0) != 0) 2453 return -1; 2454 2455 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, 2456 sdram_params->base.dramtype); 2457 ret = sdram_init_(dram, sdram_params, 1); 2458 if (ret != 0) 2459 goto out; 2460 2461 cap_info->cs1_row = 2462 dram_detect_cs1_row(dram, sdram_params, 0); 2463 if (cap_info->cs1_row) { 2464 sys_reg = readl(&dram->pmugrf->os_reg[2]); 2465 sys_reg3 = readl(&dram->pmugrf->os_reg[3]); 2466 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row, 2467 sys_reg, sys_reg3, 0); 2468 writel(sys_reg, &dram->pmugrf->os_reg[2]); 2469 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2470 } 2471 2472 sdram_detect_high_row(cap_info); 2473 2474 out: 2475 return ret; 2476 } 2477 2478 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz) 2479 { 2480 u32 i; 2481 u32 offset = 0; 2482 struct ddr2_3_4_lp2_3_info *ddr_info; 2483 2484 if (!freq_mhz) { 2485 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype); 2486 if (ddr_info) 2487 freq_mhz = 2488 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2489 DDR_FREQ_MASK; 2490 else 2491 freq_mhz = 0; 2492 } 2493 2494 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) { 2495 if (sdram_configs[i].base.ddr_freq == 0 || 2496 freq_mhz < sdram_configs[i].base.ddr_freq) 2497 break; 2498 } 2499 offset = i == 0 ? 0 : i - 1; 2500 2501 return &sdram_configs[offset]; 2502 } 2503 2504 static const u16 pctl_need_update_reg[] = { 2505 DDR_PCTL2_RFSHTMG, 2506 DDR_PCTL2_INIT3, 2507 DDR_PCTL2_INIT4, 2508 DDR_PCTL2_INIT6, 2509 DDR_PCTL2_INIT7, 2510 DDR_PCTL2_DRAMTMG0, 2511 DDR_PCTL2_DRAMTMG1, 2512 DDR_PCTL2_DRAMTMG2, 2513 DDR_PCTL2_DRAMTMG3, 2514 DDR_PCTL2_DRAMTMG4, 2515 DDR_PCTL2_DRAMTMG5, 2516 DDR_PCTL2_DRAMTMG6, 2517 DDR_PCTL2_DRAMTMG7, 2518 DDR_PCTL2_DRAMTMG8, 2519 DDR_PCTL2_DRAMTMG9, 2520 DDR_PCTL2_DRAMTMG12, 2521 DDR_PCTL2_DRAMTMG13, 2522 DDR_PCTL2_DRAMTMG14, 2523 DDR_PCTL2_ZQCTL0, 2524 DDR_PCTL2_DFITMG0, 2525 DDR_PCTL2_ODTCFG 2526 }; 2527 2528 static const u16 phy_need_update_reg[] = { 2529 0x14, 2530 0x18, 2531 0x1c 2532 }; 2533 2534 static void pre_set_rate(struct dram_info *dram, 2535 struct rv1126_sdram_params *sdram_params, 2536 u32 dst_fsp, u32 dst_fsp_lp4) 2537 { 2538 u32 i, j, find; 2539 void __iomem *pctl_base = dram->pctl; 2540 void __iomem *phy_base = dram->phy; 2541 u32 phy_offset; 2542 u32 mr_tmp; 2543 u32 dramtype = sdram_params->base.dramtype; 2544 2545 sw_set_req(dram); 2546 /* pctl timing update */ 2547 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) { 2548 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF; 2549 j++) { 2550 if (sdram_params->pctl_regs.pctl[j][0] == 2551 pctl_need_update_reg[i]) { 2552 writel(sdram_params->pctl_regs.pctl[j][1], 2553 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2554 pctl_need_update_reg[i]); 2555 find = j; 2556 break; 2557 } 2558 } 2559 } 2560 2561 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 2562 u32 tmp, trefi; 2563 2564 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); 2565 trefi = (tmp >> 16) & 0xfff; 2566 writel((tmp & 0xf000ffff) | (trefi / 2) << 16, 2567 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); 2568 #endif 2569 2570 sw_set_ack(dram); 2571 2572 /* phy timing update */ 2573 if (dst_fsp == 0) 2574 phy_offset = 0; 2575 else 2576 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3); 2577 /* cl cwl al update */ 2578 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) { 2579 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF; 2580 j++) { 2581 if (sdram_params->phy_regs.phy[j][0] == 2582 phy_need_update_reg[i]) { 2583 writel(sdram_params->phy_regs.phy[j][1], 2584 phy_base + phy_offset + 2585 phy_need_update_reg[i]); 2586 find = j; 2587 break; 2588 } 2589 } 2590 } 2591 2592 set_ds_odt(dram, sdram_params, dst_fsp); 2593 if (dramtype == LPDDR4) { 2594 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2595 DDR_PCTL2_INIT4); 2596 /* MR13 */ 2597 pctl_write_mr(dram->pctl, 3, 13, 2598 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2599 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2600 ((0x2 << 6) >> dst_fsp_lp4), dramtype); 2601 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2602 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2603 ((0x2 << 6) >> dst_fsp_lp4), 2604 PHY_REG(phy_base, 0x1b)); 2605 /* MR3 */ 2606 pctl_write_mr(dram->pctl, 3, 3, 2607 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & 2608 PCTL2_MR_MASK, 2609 dramtype); 2610 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK, 2611 PHY_REG(phy_base, 0x19)); 2612 2613 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2614 DDR_PCTL2_INIT3); 2615 /* MR1 */ 2616 pctl_write_mr(dram->pctl, 3, 1, 2617 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & 2618 PCTL2_MR_MASK, 2619 dramtype); 2620 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK, 2621 PHY_REG(phy_base, 0x17)); 2622 /* MR2 */ 2623 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 2624 dramtype); 2625 writel(mr_tmp & PCTL2_MR_MASK, 2626 PHY_REG(phy_base, 0x18)); 2627 2628 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2629 DDR_PCTL2_INIT6); 2630 /* MR11 */ 2631 pctl_write_mr(dram->pctl, 3, 11, 2632 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2633 dramtype); 2634 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2635 PHY_REG(phy_base, 0x1a)); 2636 /* MR12 */ 2637 pctl_write_mr(dram->pctl, 3, 12, 2638 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2639 dramtype); 2640 2641 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2642 DDR_PCTL2_INIT7); 2643 /* MR22 */ 2644 pctl_write_mr(dram->pctl, 3, 22, 2645 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2646 dramtype); 2647 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2648 PHY_REG(phy_base, 0x1d)); 2649 /* MR14 */ 2650 pctl_write_mr(dram->pctl, 3, 14, 2651 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2652 dramtype); 2653 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2654 PHY_REG(phy_base, 0x1c)); 2655 } 2656 2657 update_noc_timing(dram, sdram_params); 2658 } 2659 2660 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp, 2661 struct rv1126_sdram_params *sdram_params) 2662 { 2663 void __iomem *pctl_base = dram->pctl; 2664 void __iomem *phy_base = dram->phy; 2665 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp]; 2666 u32 temp, temp1; 2667 struct ddr2_3_4_lp2_3_info *ddr_info; 2668 2669 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype); 2670 2671 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq; 2672 2673 if (sdram_params->base.dramtype == LPDDR4) { 2674 p_fsp_param->rd_odt_up_en = 0; 2675 p_fsp_param->rd_odt_down_en = 1; 2676 } else { 2677 p_fsp_param->rd_odt_up_en = 2678 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 2679 p_fsp_param->rd_odt_down_en = 2680 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 2681 } 2682 2683 if (p_fsp_param->rd_odt_up_en) 2684 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111)); 2685 else if (p_fsp_param->rd_odt_down_en) 2686 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110)); 2687 else 2688 p_fsp_param->rd_odt = 0; 2689 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112)); 2690 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100)); 2691 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102)); 2692 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128)); 2693 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105)); 2694 2695 if (sdram_params->base.dramtype == DDR3) { 2696 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2697 DDR_PCTL2_INIT3); 2698 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 2699 p_fsp_param->ds_pdds = temp & DDR3_DS_MASK; 2700 p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK; 2701 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2702 } else if (sdram_params->base.dramtype == DDR4) { 2703 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2704 DDR_PCTL2_INIT3); 2705 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 2706 p_fsp_param->ds_pdds = temp & DDR4_DS_MASK; 2707 p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK; 2708 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2709 } else if (sdram_params->base.dramtype == LPDDR3) { 2710 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2711 DDR_PCTL2_INIT4); 2712 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 2713 p_fsp_param->ds_pdds = temp & 0xf; 2714 2715 p_fsp_param->dq_odt = lp3_odt_value; 2716 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 2717 } else if (sdram_params->base.dramtype == LPDDR4) { 2718 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2719 DDR_PCTL2_INIT4); 2720 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 2721 p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK; 2722 2723 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2724 DDR_PCTL2_INIT6); 2725 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK; 2726 p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK; 2727 p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK; 2728 2729 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)), 2730 readl(PHY_REG(phy_base, 0x3ce))); 2731 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)), 2732 readl(PHY_REG(phy_base, 0x3de))); 2733 p_fsp_param->vref_ca[0] = (temp + temp1) / 2; 2734 temp = MAX(readl(PHY_REG(phy_base, 0x3af)), 2735 readl(PHY_REG(phy_base, 0x3cf))); 2736 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)), 2737 readl(PHY_REG(phy_base, 0x3df))); 2738 p_fsp_param->vref_ca[1] = (temp + temp1) / 2; 2739 p_fsp_param->vref_ca[0] |= 2740 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 2741 p_fsp_param->vref_ca[1] |= 2742 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 2743 2744 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >> 2745 3) & 0x1; 2746 } 2747 2748 p_fsp_param->noc_timings.ddrtiminga0 = 2749 sdram_params->ch.noc_timings.ddrtiminga0; 2750 p_fsp_param->noc_timings.ddrtimingb0 = 2751 sdram_params->ch.noc_timings.ddrtimingb0; 2752 p_fsp_param->noc_timings.ddrtimingc0 = 2753 sdram_params->ch.noc_timings.ddrtimingc0; 2754 p_fsp_param->noc_timings.devtodev0 = 2755 sdram_params->ch.noc_timings.devtodev0; 2756 p_fsp_param->noc_timings.ddrmode = 2757 sdram_params->ch.noc_timings.ddrmode; 2758 p_fsp_param->noc_timings.ddr4timing = 2759 sdram_params->ch.noc_timings.ddr4timing; 2760 p_fsp_param->noc_timings.agingx0 = 2761 sdram_params->ch.noc_timings.agingx0; 2762 p_fsp_param->noc_timings.aging0 = 2763 sdram_params->ch.noc_timings.aging0; 2764 p_fsp_param->noc_timings.aging1 = 2765 sdram_params->ch.noc_timings.aging1; 2766 p_fsp_param->noc_timings.aging2 = 2767 sdram_params->ch.noc_timings.aging2; 2768 p_fsp_param->noc_timings.aging3 = 2769 sdram_params->ch.noc_timings.aging3; 2770 2771 p_fsp_param->flag = FSP_FLAG; 2772 } 2773 2774 #ifndef CONFIG_SPL_KERNEL_BOOT 2775 static void copy_fsp_param_to_ddr(void) 2776 { 2777 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param, 2778 sizeof(fsp_param)); 2779 } 2780 #endif 2781 2782 void ddr_set_rate(struct dram_info *dram, 2783 struct rv1126_sdram_params *sdram_params, 2784 u32 freq, u32 cur_freq, u32 dst_fsp, 2785 u32 dst_fsp_lp4, u32 training_en) 2786 { 2787 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off; 2788 u32 mr_tmp; 2789 u32 lp_stat; 2790 u32 dramtype = sdram_params->base.dramtype; 2791 struct rv1126_sdram_params *sdram_params_new; 2792 void __iomem *pctl_base = dram->pctl; 2793 void __iomem *phy_base = dram->phy; 2794 2795 lp_stat = low_power_update(dram, 0); 2796 sdram_params_new = get_default_sdram_config(freq); 2797 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank; 2798 sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw; 2799 2800 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4); 2801 2802 while ((readl(pctl_base + DDR_PCTL2_STAT) & 2803 PCTL2_OPERATING_MODE_MASK) == 2804 PCTL2_OPERATING_MODE_SR) 2805 continue; 2806 2807 dest_dll_off = 0; 2808 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2809 DDR_PCTL2_INIT3); 2810 if ((dramtype == DDR3 && (dst_init3 & 1)) || 2811 (dramtype == DDR4 && !(dst_init3 & 1))) 2812 dest_dll_off = 1; 2813 2814 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 2815 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 2816 DDR_PCTL2_INIT3); 2817 cur_init3 &= PCTL2_MR_MASK; 2818 cur_dll_off = 1; 2819 if ((dramtype == DDR3 && !(cur_init3 & 1)) || 2820 (dramtype == DDR4 && (cur_init3 & 1))) 2821 cur_dll_off = 0; 2822 2823 if (!cur_dll_off) { 2824 if (dramtype == DDR3) 2825 cur_init3 |= 1; 2826 else 2827 cur_init3 &= ~1; 2828 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype); 2829 } 2830 2831 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 2832 PCTL2_DIS_AUTO_REFRESH); 2833 update_refresh_reg(dram); 2834 2835 enter_sr(dram, 1); 2836 2837 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 2838 PMUGRF_CON_DDRPHY_BUFFEREN_EN, 2839 &dram->pmugrf->soc_con[0]); 2840 sw_set_req(dram); 2841 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC, 2842 PCTL2_DFI_INIT_COMPLETE_EN); 2843 sw_set_ack(dram); 2844 2845 sw_set_req(dram); 2846 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off) 2847 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 2848 else 2849 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 2850 2851 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0, 2852 PCTL2_DIS_SRX_ZQCL); 2853 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0, 2854 PCTL2_DIS_SRX_ZQCL); 2855 sw_set_ack(dram); 2856 2857 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT), 2858 &dram->cru->clkgate_con[21]); 2859 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 2860 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) | 2861 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT), 2862 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 2863 2864 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 2865 rkclk_set_dpll(dram, freq * MHz / 2); 2866 phy_pll_set(dram, freq * MHz, 0); 2867 phy_pll_set(dram, freq * MHz, 1); 2868 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 2869 2870 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 2871 PMUGRF_CON_DDRPHY_BUFFEREN_DIS, 2872 &dram->pmugrf->soc_con[0]); 2873 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT), 2874 &dram->cru->clkgate_con[21]); 2875 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 2876 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) | 2877 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT), 2878 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 2879 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) & 2880 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) 2881 continue; 2882 2883 sw_set_req(dram); 2884 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2885 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp); 2886 sw_set_ack(dram); 2887 update_refresh_reg(dram); 2888 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2); 2889 2890 enter_sr(dram, 0); 2891 2892 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 2893 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 2894 2895 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4); 2896 if (dramtype == LPDDR3) { 2897 pctl_write_mr(dram->pctl, 3, 1, 2898 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) & 2899 PCTL2_MR_MASK, 2900 dramtype); 2901 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK, 2902 dramtype); 2903 pctl_write_mr(dram->pctl, 3, 3, 2904 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) & 2905 PCTL2_MR_MASK, 2906 dramtype); 2907 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype); 2908 } else if ((dramtype == DDR3) || (dramtype == DDR4)) { 2909 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK, 2910 dramtype); 2911 if (!dest_dll_off) { 2912 pctl_write_mr(dram->pctl, 3, 0, 2913 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) & 2914 PCTL2_MR_MASK) | DDR3_DLL_RESET, 2915 dramtype); 2916 udelay(2); 2917 } 2918 pctl_write_mr(dram->pctl, 3, 0, 2919 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT & 2920 PCTL2_MR_MASK) & (~DDR3_DLL_RESET), 2921 dramtype); 2922 pctl_write_mr(dram->pctl, 3, 2, 2923 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) & 2924 PCTL2_MR_MASK), dramtype); 2925 if (dramtype == DDR4) { 2926 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK, 2927 dramtype); 2928 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2929 DDR_PCTL2_INIT6); 2930 pctl_write_mr(dram->pctl, 3, 4, 2931 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) & 2932 PCTL2_MR_MASK, 2933 dramtype); 2934 pctl_write_mr(dram->pctl, 3, 5, 2935 mr_tmp >> PCTL2_DDR4_MR5_SHIFT & 2936 PCTL2_MR_MASK, 2937 dramtype); 2938 2939 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2940 DDR_PCTL2_INIT7); 2941 pctl_write_mr(dram->pctl, 3, 6, 2942 mr_tmp >> PCTL2_DDR4_MR6_SHIFT & 2943 PCTL2_MR_MASK, 2944 dramtype); 2945 } 2946 } else if (dramtype == LPDDR4) { 2947 pctl_write_mr(dram->pctl, 3, 13, 2948 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2949 PCTL2_MR_MASK) & (~(BIT(7)))) | 2950 dst_fsp_lp4 << 7, dramtype); 2951 } 2952 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 2953 PCTL2_DIS_AUTO_REFRESH); 2954 update_refresh_reg(dram); 2955 2956 /* training */ 2957 high_freq_training(dram, sdram_params_new, dst_fsp); 2958 low_power_update(dram, lp_stat); 2959 2960 save_fsp_param(dram, dst_fsp, sdram_params_new); 2961 } 2962 2963 static void ddr_set_rate_for_fsp(struct dram_info *dram, 2964 struct rv1126_sdram_params *sdram_params) 2965 { 2966 struct ddr2_3_4_lp2_3_info *ddr_info; 2967 u32 f0; 2968 u32 dramtype = sdram_params->base.dramtype; 2969 #ifndef CONFIG_SPL_KERNEL_BOOT 2970 u32 f1, f2, f3; 2971 #endif 2972 2973 ddr_info = get_ddr_drv_odt_info(dramtype); 2974 if (!ddr_info) 2975 return; 2976 2977 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2978 DDR_FREQ_MASK; 2979 2980 #ifndef CONFIG_SPL_KERNEL_BOOT 2981 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param)); 2982 memset((void *)&fsp_param, 0, sizeof(fsp_param)); 2983 2984 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) & 2985 DDR_FREQ_MASK; 2986 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) & 2987 DDR_FREQ_MASK; 2988 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) & 2989 DDR_FREQ_MASK; 2990 #endif 2991 2992 if (get_wrlvl_val(dram, sdram_params)) 2993 printascii("get wrlvl value fail\n"); 2994 2995 #ifndef CONFIG_SPL_KERNEL_BOOT 2996 printascii("change to: "); 2997 printdec(f1); 2998 printascii("MHz\n"); 2999 ddr_set_rate(&dram_info, sdram_params, f1, 3000 sdram_params->base.ddr_freq, 1, 1, 1); 3001 printascii("change to: "); 3002 printdec(f2); 3003 printascii("MHz\n"); 3004 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1); 3005 printascii("change to: "); 3006 printdec(f3); 3007 printascii("MHz\n"); 3008 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1); 3009 #endif 3010 printascii("change to: "); 3011 printdec(f0); 3012 printascii("MHz(final freq)\n"); 3013 #ifndef CONFIG_SPL_KERNEL_BOOT 3014 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1); 3015 #else 3016 ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1); 3017 #endif 3018 } 3019 3020 int get_uart_config(void) 3021 { 3022 struct sdram_head_info_index_v2 *index = 3023 (struct sdram_head_info_index_v2 *)common_info; 3024 struct global_info *gbl_info; 3025 3026 gbl_info = (struct global_info *)((void *)common_info + 3027 index->global_index.offset * 4); 3028 3029 return gbl_info->uart_info; 3030 } 3031 3032 /* return: 0 = success, other = fail */ 3033 int sdram_init(void) 3034 { 3035 struct rv1126_sdram_params *sdram_params; 3036 int ret = 0; 3037 struct sdram_head_info_index_v2 *index = 3038 (struct sdram_head_info_index_v2 *)common_info; 3039 struct global_info *gbl_info; 3040 3041 dram_info.phy = (void *)DDR_PHY_BASE_ADDR; 3042 dram_info.pctl = (void *)UPCTL2_BASE_ADDR; 3043 dram_info.grf = (void *)GRF_BASE_ADDR; 3044 dram_info.cru = (void *)CRU_BASE_ADDR; 3045 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR; 3046 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR; 3047 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR; 3048 3049 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 3050 printascii("extended temp support\n"); 3051 #endif 3052 if (index->version_info != 2 || 3053 (index->global_index.size != sizeof(struct global_info) / 4) || 3054 (index->ddr3_index.size != 3055 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3056 (index->ddr4_index.size != 3057 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3058 (index->lp3_index.size != 3059 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3060 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) || 3061 index->global_index.offset == 0 || 3062 index->ddr3_index.offset == 0 || 3063 index->ddr4_index.offset == 0 || 3064 index->lp3_index.offset == 0 || 3065 index->lp4_index.offset == 0) { 3066 printascii("common info error\n"); 3067 goto error; 3068 } 3069 3070 gbl_info = (struct global_info *)((void *)common_info + 3071 index->global_index.offset * 4); 3072 3073 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info); 3074 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info); 3075 3076 sdram_params = &sdram_configs[0]; 3077 3078 if (sdram_params->base.dramtype == DDR3 || 3079 sdram_params->base.dramtype == DDR4) { 3080 if (DDR_2T_INFO(gbl_info->info_2t)) 3081 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10; 3082 else 3083 sdram_params->pctl_regs.pctl[0][1] &= 3084 ~(0x1 << 10); 3085 } 3086 ret = sdram_init_detect(&dram_info, sdram_params); 3087 if (ret) { 3088 sdram_print_dram_type(sdram_params->base.dramtype); 3089 printascii(", "); 3090 printdec(sdram_params->base.ddr_freq); 3091 printascii("MHz\n"); 3092 goto error; 3093 } 3094 print_ddr_info(sdram_params); 3095 3096 ddr_set_rate_for_fsp(&dram_info, sdram_params); 3097 #ifndef CONFIG_SPL_KERNEL_BOOT 3098 copy_fsp_param_to_ddr(); 3099 #endif 3100 3101 ddr_set_atags(&dram_info, sdram_params); 3102 3103 printascii("out\n"); 3104 3105 return ret; 3106 error: 3107 printascii("error\n"); 3108 return (-1); 3109 } 3110 #endif /* CONFIG_TPL_BUILD */ 3111