1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd. 4 */ 5 6 #include <common.h> 7 #include <debug_uart.h> 8 #include <dm.h> 9 #include <ram.h> 10 #include <syscon.h> 11 #include <asm/io.h> 12 #include <asm/arch/clock.h> 13 #include <asm/arch/hardware.h> 14 #include <asm/arch/rk_atags.h> 15 #include <asm/arch/cru_rv1126.h> 16 #include <asm/arch/grf_rv1126.h> 17 #include <asm/arch/sdram_common.h> 18 #include <asm/arch/sdram_rv1126.h> 19 20 /* define training flag */ 21 #define CA_TRAINING (0x1 << 0) 22 #define READ_GATE_TRAINING (0x1 << 1) 23 #define WRITE_LEVELING (0x1 << 2) 24 #define WRITE_TRAINING (0x1 << 3) 25 #define READ_TRAINING (0x1 << 4) 26 #define FULL_TRAINING (0xff) 27 28 #define SKEW_RX_SIGNAL (0) 29 #define SKEW_TX_SIGNAL (1) 30 #define SKEW_CA_SIGNAL (2) 31 32 #define DESKEW_MDF_ABS_VAL (0) 33 #define DESKEW_MDF_DIFF_VAL (1) 34 35 #ifdef CONFIG_TPL_BUILD 36 #ifndef CONFIG_TPL_TINY_FRAMEWORK 37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!! 38 #endif 39 #endif 40 41 #ifdef CONFIG_TPL_BUILD 42 43 struct dram_info { 44 void __iomem *pctl; 45 void __iomem *phy; 46 struct rv1126_cru *cru; 47 struct msch_regs *msch; 48 struct rv1126_ddrgrf *ddrgrf; 49 struct rv1126_grf *grf; 50 struct ram_info info; 51 struct rv1126_pmugrf *pmugrf; 52 u32 sr_idle; 53 u32 pd_idle; 54 }; 55 56 #define GRF_BASE_ADDR 0xfe000000 57 #define PMU_GRF_BASE_ADDR 0xfe020000 58 #define DDR_GRF_BASE_ADDR 0xfe030000 59 #define BUS_SGRF_BASE_ADDR 0xfe0a0000 60 #define SERVER_MSCH_BASE_ADDR 0xfe800000 61 #define CRU_BASE_ADDR 0xff490000 62 #define DDR_PHY_BASE_ADDR 0xff4a0000 63 #define UPCTL2_BASE_ADDR 0xffa50000 64 65 #define SGRF_SOC_CON2 0x8 66 #define SGRF_SOC_CON12 0x30 67 #define SGRF_SOC_CON13 0x34 68 69 struct dram_info dram_info; 70 71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3) 72 struct rv1126_sdram_params sdram_configs[] = { 73 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc" 74 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc" 75 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc" 76 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc" 77 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc" 78 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc" 79 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc" 80 }; 81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0) 82 struct rv1126_sdram_params sdram_configs[] = { 83 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc" 84 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc" 85 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc" 86 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc" 87 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc" 88 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc" 89 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc" 90 }; 91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6) 92 struct rv1126_sdram_params sdram_configs[] = { 93 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc" 94 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc" 95 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc" 96 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc" 97 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc" 98 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc" 99 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc" 100 }; 101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) 102 struct rv1126_sdram_params sdram_configs[] = { 103 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc" 104 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc" 105 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc" 106 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc" 107 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc" 108 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc" 109 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc" 110 }; 111 #endif 112 113 u32 common_info[] = { 114 #include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc" 115 }; 116 117 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 118 static struct rw_trn_result rw_trn_result; 119 #endif 120 121 static struct rv1126_fsp_param fsp_param[MAX_IDX]; 122 123 static u8 lp3_odt_value; 124 125 static s8 wrlvl_result[2][4]; 126 127 /* DDR configuration 0-9 */ 128 u16 ddr_cfg_2_rbc[] = { 129 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */ 130 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */ 131 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */ 132 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */ 133 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */ 134 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */ 135 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */ 136 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */ 137 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */ 138 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */ 139 }; 140 141 /* DDR configuration 10-21 */ 142 u8 ddr4_cfg_2_rbc[] = { 143 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */ 144 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */ 145 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */ 146 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */ 147 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */ 148 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */ 149 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */ 150 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */ 151 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */ 152 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */ 153 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */ 154 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */ 155 }; 156 157 /* DDR configuration 22-28 */ 158 u16 ddr_cfg_2_rbc_p2[] = { 159 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */ 160 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */ 161 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */ 162 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */ 163 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */ 164 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */ 165 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */ 166 }; 167 168 u8 d4_rbc_2_d3_rbc[][2] = { 169 {10, 0}, 170 {11, 2}, 171 {12, 23}, 172 {13, 1}, 173 {14, 28}, 174 {15, 24}, 175 {16, 27}, 176 {17, 7}, 177 {18, 6}, 178 {19, 25}, 179 {20, 26}, 180 {21, 3} 181 }; 182 183 u32 addrmap[29][9] = { 184 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 185 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */ 186 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 187 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */ 188 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 189 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */ 190 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606, 191 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */ 192 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909, 193 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */ 194 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707, 195 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */ 196 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808, 197 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */ 198 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909, 199 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */ 200 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, 201 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */ 202 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 203 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */ 204 205 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 206 0x08080808, 0x00000f0f, 0x0801}, /* 10 */ 207 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 208 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */ 209 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 210 0x07070707, 0x00000f07, 0x0700}, /* 12 */ 211 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 212 0x07070707, 0x00000f0f, 0x0700}, /* 13 */ 213 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 214 0x07070707, 0x00000f07, 0x3f01}, /* 14 */ 215 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 216 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */ 217 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606, 218 0x06060606, 0x00000f06, 0x3f00}, /* 16 */ 219 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909, 220 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */ 221 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808, 222 0x08080808, 0x00000f0f, 0x0700}, /* 18 */ 223 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 224 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */ 225 226 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 227 0x07070707, 0x00000f07, 0x3f00}, /* 20 */ 228 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606, 229 0x06060606, 0x00000f06, 0x0600}, /* 21 */ 230 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505, 231 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */ 232 233 {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 234 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */ 235 {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707, 236 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */ 237 {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 238 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */ 239 {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 240 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */ 241 {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, 242 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */ 243 {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707, 244 0x07070707, 0x00000f07, 0x3f3f} /* 28 */ 245 }; 246 247 static u8 dq_sel[22][3] = { 248 {0x0, 0x17, 0x22}, 249 {0x1, 0x18, 0x23}, 250 {0x2, 0x19, 0x24}, 251 {0x3, 0x1a, 0x25}, 252 {0x4, 0x1b, 0x26}, 253 {0x5, 0x1c, 0x27}, 254 {0x6, 0x1d, 0x28}, 255 {0x7, 0x1e, 0x29}, 256 {0x8, 0x16, 0x21}, 257 {0x9, 0x1f, 0x2a}, 258 {0xa, 0x20, 0x2b}, 259 {0x10, 0x1, 0xc}, 260 {0x11, 0x2, 0xd}, 261 {0x12, 0x3, 0xe}, 262 {0x13, 0x4, 0xf}, 263 {0x14, 0x5, 0x10}, 264 {0x15, 0x6, 0x11}, 265 {0x16, 0x7, 0x12}, 266 {0x17, 0x8, 0x13}, 267 {0x18, 0x0, 0xb}, 268 {0x19, 0x9, 0x14}, 269 {0x1a, 0xa, 0x15} 270 }; 271 272 static u16 grp_addr[4] = { 273 ADD_GROUP_CS0_A, 274 ADD_GROUP_CS0_B, 275 ADD_GROUP_CS1_A, 276 ADD_GROUP_CS1_B 277 }; 278 279 static u8 wrlvl_result_offset[2][4] = { 280 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27}, 281 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29}, 282 }; 283 284 static u16 dqs_dq_skew_adr[16] = { 285 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */ 286 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */ 287 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */ 288 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */ 289 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */ 290 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */ 291 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */ 292 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */ 293 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */ 294 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */ 295 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */ 296 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */ 297 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */ 298 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */ 299 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */ 300 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */ 301 }; 302 303 static void rkclk_ddr_reset(struct dram_info *dram, 304 u32 ctl_srstn, u32 ctl_psrstn, 305 u32 phy_srstn, u32 phy_psrstn) 306 { 307 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) | 308 UPCTL2_ASRSTN_REQ(ctl_srstn), 309 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13); 310 311 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn), 312 &dram->cru->softrst_con[12]); 313 } 314 315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz) 316 { 317 unsigned int refdiv, postdiv1, postdiv2, fbdiv; 318 int delay = 1000; 319 u32 mhz = hz / MHz; 320 struct global_info *gbl_info; 321 struct sdram_head_info_index_v2 *index = 322 (struct sdram_head_info_index_v2 *)common_info; 323 u32 ssmod_info; 324 u32 dsmpd = 1; 325 326 gbl_info = (struct global_info *)((void *)common_info + 327 index->global_index.offset * 4); 328 ssmod_info = gbl_info->info_2t; 329 refdiv = 1; 330 if (mhz <= 100) { 331 postdiv1 = 6; 332 postdiv2 = 4; 333 } else if (mhz <= 150) { 334 postdiv1 = 4; 335 postdiv2 = 4; 336 } else if (mhz <= 200) { 337 postdiv1 = 6; 338 postdiv2 = 2; 339 } else if (mhz <= 300) { 340 postdiv1 = 4; 341 postdiv2 = 2; 342 } else if (mhz <= 400) { 343 postdiv1 = 6; 344 postdiv2 = 1; 345 } else { 346 postdiv1 = 4; 347 postdiv2 = 1; 348 } 349 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24; 350 351 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode); 352 353 writel(0x1f000000, &dram->cru->clksel_con[64]); 354 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0); 355 /* enable ssmod */ 356 if (PLL_SSMOD_SPREAD(ssmod_info)) { 357 dsmpd = 0; 358 clrsetbits_le32(&dram->cru->pll[1].con2, 359 0xffffff << 0, 0x0 << 0); 360 writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) | 361 SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) | 362 SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) | 363 SSMOD_RESET(0) | 364 SSMOD_DIS_SSCG(0) | 365 SSMOD_BP(0), 366 &dram->cru->pll[1].con3); 367 } 368 writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv), 369 &dram->cru->pll[1].con1); 370 371 while (delay > 0) { 372 udelay(1); 373 if (LOCK(readl(&dram->cru->pll[1].con1))) 374 break; 375 delay--; 376 } 377 378 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode); 379 } 380 381 static void rkclk_configure_ddr(struct dram_info *dram, 382 struct rv1126_sdram_params *sdram_params) 383 { 384 /* for inno ddr phy need freq / 2 */ 385 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2); 386 } 387 388 static unsigned int 389 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params) 390 { 391 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 392 u32 cs, bw, die_bw, col, row, bank; 393 u32 cs1_row; 394 u32 i, tmp; 395 u32 ddrconf = -1; 396 u32 row_3_4; 397 398 cs = cap_info->rank; 399 bw = cap_info->bw; 400 die_bw = cap_info->dbw; 401 col = cap_info->col; 402 row = cap_info->cs0_row; 403 cs1_row = cap_info->cs1_row; 404 bank = cap_info->bk; 405 row_3_4 = cap_info->row_3_4; 406 407 if (sdram_params->base.dramtype == DDR4) { 408 if (cs == 2 && row == cs1_row && !row_3_4) { 409 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) | 410 die_bw; 411 for (i = 17; i < 21; i++) { 412 if (((tmp & 0xf) == 413 (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 414 ((tmp & 0x70) <= 415 (ddr4_cfg_2_rbc[i - 10] & 0x70))) { 416 ddrconf = i; 417 goto out; 418 } 419 } 420 } 421 422 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw; 423 for (i = 10; i < 21; i++) { 424 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 425 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) && 426 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) { 427 ddrconf = i; 428 goto out; 429 } 430 } 431 } else { 432 if (cs == 2 && row == cs1_row && bank == 3) { 433 for (i = 5; i < 8; i++) { 434 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] & 435 0x7)) && 436 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] & 437 (0x7 << 5))) { 438 ddrconf = i; 439 goto out; 440 } 441 } 442 } 443 444 tmp = ((cs - 1) << 8) | ((row - 13) << 5) | 445 ((bw + col - 10) << 0); 446 if (bank == 3) 447 tmp |= (1 << 3); 448 449 for (i = 0; i < 9; i++) 450 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) && 451 ((tmp & (7 << 5)) <= 452 (ddr_cfg_2_rbc[i] & (7 << 5))) && 453 ((tmp & (1 << 8)) <= 454 (ddr_cfg_2_rbc[i] & (1 << 8)))) { 455 ddrconf = i; 456 goto out; 457 } 458 459 for (i = 0; i < 7; i++) 460 if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) && 461 ((tmp & (7 << 5)) <= 462 (ddr_cfg_2_rbc_p2[i] & (7 << 5))) && 463 ((tmp & (1 << 8)) <= 464 (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) { 465 ddrconf = i + 22; 466 goto out; 467 } 468 469 if (cs == 1 && bank == 3 && row <= 17 && 470 (col + bw) == 12) 471 ddrconf = 23; 472 } 473 474 out: 475 if (ddrconf > 28) 476 printascii("calculate ddrconfig error\n"); 477 478 if (sdram_params->base.dramtype == DDR4) { 479 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 480 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) { 481 if (ddrconf == 21 && row > 16) 482 printascii("warn:ddrconf21 row > 16\n"); 483 else 484 ddrconf = d4_rbc_2_d3_rbc[i][1]; 485 break; 486 } 487 } 488 } 489 490 return ddrconf; 491 } 492 493 static void sw_set_req(struct dram_info *dram) 494 { 495 void __iomem *pctl_base = dram->pctl; 496 497 /* clear sw_done=0 */ 498 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL); 499 } 500 501 static void sw_set_ack(struct dram_info *dram) 502 { 503 void __iomem *pctl_base = dram->pctl; 504 505 /* set sw_done=1 */ 506 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL); 507 while (1) { 508 /* wait programming done */ 509 if (readl(pctl_base + DDR_PCTL2_SWSTAT) & 510 PCTL2_SW_DONE_ACK) 511 break; 512 } 513 } 514 515 static void set_ctl_address_map(struct dram_info *dram, 516 struct rv1126_sdram_params *sdram_params) 517 { 518 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 519 void __iomem *pctl_base = dram->pctl; 520 u32 ddrconf = cap_info->ddrconfig; 521 u32 i, row; 522 523 row = cap_info->cs0_row; 524 if (sdram_params->base.dramtype == DDR4) { 525 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 526 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) { 527 ddrconf = d4_rbc_2_d3_rbc[i][0]; 528 break; 529 } 530 } 531 } 532 533 if (ddrconf >= ARRAY_SIZE(addrmap)) { 534 printascii("set ctl address map fail\n"); 535 return; 536 } 537 538 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0), 539 &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4); 540 541 /* unused row set to 0xf */ 542 for (i = 17; i >= row; i--) 543 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 + 544 ((i - 12) * 8 / 32) * 4, 545 0xf << ((i - 12) * 8 % 32)); 546 547 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4) 548 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31); 549 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1) 550 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8); 551 552 if (cap_info->rank == 1) 553 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f); 554 } 555 556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait) 557 { 558 void __iomem *phy_base = dram->phy; 559 u32 fbdiv, prediv, postdiv, postdiv_en; 560 561 if (wait) { 562 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB); 563 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) 564 continue; 565 } else { 566 freq /= MHz; 567 prediv = 1; 568 if (freq <= 200) { 569 fbdiv = 16; 570 postdiv = 2; 571 postdiv_en = 1; 572 } else if (freq <= 456) { 573 fbdiv = 8; 574 postdiv = 1; 575 postdiv_en = 1; 576 } else { 577 fbdiv = 4; 578 postdiv = 0; 579 postdiv_en = 0; 580 } 581 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50)); 582 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK, 583 (fbdiv >> 8) & 1); 584 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK, 585 postdiv_en << PHY_POSTDIV_EN_SHIFT); 586 587 clrsetbits_le32(PHY_REG(phy_base, 0x52), 588 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv); 589 clrsetbits_le32(PHY_REG(phy_base, 0x53), 590 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT, 591 postdiv << PHY_POSTDIV_SHIFT); 592 } 593 } 594 595 static const u16 d3_phy_drv_2_ohm[][2] = { 596 {PHY_DDR3_RON_455ohm, 455}, 597 {PHY_DDR3_RON_230ohm, 230}, 598 {PHY_DDR3_RON_153ohm, 153}, 599 {PHY_DDR3_RON_115ohm, 115}, 600 {PHY_DDR3_RON_91ohm, 91}, 601 {PHY_DDR3_RON_76ohm, 76}, 602 {PHY_DDR3_RON_65ohm, 65}, 603 {PHY_DDR3_RON_57ohm, 57}, 604 {PHY_DDR3_RON_51ohm, 51}, 605 {PHY_DDR3_RON_46ohm, 46}, 606 {PHY_DDR3_RON_41ohm, 41}, 607 {PHY_DDR3_RON_38ohm, 38}, 608 {PHY_DDR3_RON_35ohm, 35}, 609 {PHY_DDR3_RON_32ohm, 32}, 610 {PHY_DDR3_RON_30ohm, 30}, 611 {PHY_DDR3_RON_28ohm, 28}, 612 {PHY_DDR3_RON_27ohm, 27}, 613 {PHY_DDR3_RON_25ohm, 25}, 614 {PHY_DDR3_RON_24ohm, 24}, 615 {PHY_DDR3_RON_23ohm, 23}, 616 {PHY_DDR3_RON_22ohm, 22}, 617 {PHY_DDR3_RON_21ohm, 21}, 618 {PHY_DDR3_RON_20ohm, 20} 619 }; 620 621 static u16 d3_phy_odt_2_ohm[][2] = { 622 {PHY_DDR3_RTT_DISABLE, 0}, 623 {PHY_DDR3_RTT_561ohm, 561}, 624 {PHY_DDR3_RTT_282ohm, 282}, 625 {PHY_DDR3_RTT_188ohm, 188}, 626 {PHY_DDR3_RTT_141ohm, 141}, 627 {PHY_DDR3_RTT_113ohm, 113}, 628 {PHY_DDR3_RTT_94ohm, 94}, 629 {PHY_DDR3_RTT_81ohm, 81}, 630 {PHY_DDR3_RTT_72ohm, 72}, 631 {PHY_DDR3_RTT_64ohm, 64}, 632 {PHY_DDR3_RTT_58ohm, 58}, 633 {PHY_DDR3_RTT_52ohm, 52}, 634 {PHY_DDR3_RTT_48ohm, 48}, 635 {PHY_DDR3_RTT_44ohm, 44}, 636 {PHY_DDR3_RTT_41ohm, 41}, 637 {PHY_DDR3_RTT_38ohm, 38}, 638 {PHY_DDR3_RTT_37ohm, 37}, 639 {PHY_DDR3_RTT_34ohm, 34}, 640 {PHY_DDR3_RTT_32ohm, 32}, 641 {PHY_DDR3_RTT_31ohm, 31}, 642 {PHY_DDR3_RTT_29ohm, 29}, 643 {PHY_DDR3_RTT_28ohm, 28}, 644 {PHY_DDR3_RTT_27ohm, 27}, 645 {PHY_DDR3_RTT_25ohm, 25} 646 }; 647 648 static u16 d4lp3_phy_drv_2_ohm[][2] = { 649 {PHY_DDR4_LPDDR3_RON_482ohm, 482}, 650 {PHY_DDR4_LPDDR3_RON_244ohm, 244}, 651 {PHY_DDR4_LPDDR3_RON_162ohm, 162}, 652 {PHY_DDR4_LPDDR3_RON_122ohm, 122}, 653 {PHY_DDR4_LPDDR3_RON_97ohm, 97}, 654 {PHY_DDR4_LPDDR3_RON_81ohm, 81}, 655 {PHY_DDR4_LPDDR3_RON_69ohm, 69}, 656 {PHY_DDR4_LPDDR3_RON_61ohm, 61}, 657 {PHY_DDR4_LPDDR3_RON_54ohm, 54}, 658 {PHY_DDR4_LPDDR3_RON_48ohm, 48}, 659 {PHY_DDR4_LPDDR3_RON_44ohm, 44}, 660 {PHY_DDR4_LPDDR3_RON_40ohm, 40}, 661 {PHY_DDR4_LPDDR3_RON_37ohm, 37}, 662 {PHY_DDR4_LPDDR3_RON_34ohm, 34}, 663 {PHY_DDR4_LPDDR3_RON_32ohm, 32}, 664 {PHY_DDR4_LPDDR3_RON_30ohm, 30}, 665 {PHY_DDR4_LPDDR3_RON_28ohm, 28}, 666 {PHY_DDR4_LPDDR3_RON_27ohm, 27}, 667 {PHY_DDR4_LPDDR3_RON_25ohm, 25}, 668 {PHY_DDR4_LPDDR3_RON_24ohm, 24}, 669 {PHY_DDR4_LPDDR3_RON_23ohm, 23}, 670 {PHY_DDR4_LPDDR3_RON_22ohm, 22}, 671 {PHY_DDR4_LPDDR3_RON_21ohm, 21} 672 }; 673 674 static u16 d4lp3_phy_odt_2_ohm[][2] = { 675 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0}, 676 {PHY_DDR4_LPDDR3_RTT_586ohm, 586}, 677 {PHY_DDR4_LPDDR3_RTT_294ohm, 294}, 678 {PHY_DDR4_LPDDR3_RTT_196ohm, 196}, 679 {PHY_DDR4_LPDDR3_RTT_148ohm, 148}, 680 {PHY_DDR4_LPDDR3_RTT_118ohm, 118}, 681 {PHY_DDR4_LPDDR3_RTT_99ohm, 99}, 682 {PHY_DDR4_LPDDR3_RTT_85ohm, 58}, 683 {PHY_DDR4_LPDDR3_RTT_76ohm, 76}, 684 {PHY_DDR4_LPDDR3_RTT_67ohm, 67}, 685 {PHY_DDR4_LPDDR3_RTT_60ohm, 60}, 686 {PHY_DDR4_LPDDR3_RTT_55ohm, 55}, 687 {PHY_DDR4_LPDDR3_RTT_50ohm, 50}, 688 {PHY_DDR4_LPDDR3_RTT_46ohm, 46}, 689 {PHY_DDR4_LPDDR3_RTT_43ohm, 43}, 690 {PHY_DDR4_LPDDR3_RTT_40ohm, 40}, 691 {PHY_DDR4_LPDDR3_RTT_38ohm, 38}, 692 {PHY_DDR4_LPDDR3_RTT_36ohm, 36}, 693 {PHY_DDR4_LPDDR3_RTT_34ohm, 34}, 694 {PHY_DDR4_LPDDR3_RTT_32ohm, 32}, 695 {PHY_DDR4_LPDDR3_RTT_31ohm, 31}, 696 {PHY_DDR4_LPDDR3_RTT_29ohm, 29}, 697 {PHY_DDR4_LPDDR3_RTT_28ohm, 28}, 698 {PHY_DDR4_LPDDR3_RTT_27ohm, 27} 699 }; 700 701 static u16 lp4_phy_drv_2_ohm[][2] = { 702 {PHY_LPDDR4_RON_501ohm, 501}, 703 {PHY_LPDDR4_RON_253ohm, 253}, 704 {PHY_LPDDR4_RON_168ohm, 168}, 705 {PHY_LPDDR4_RON_126ohm, 126}, 706 {PHY_LPDDR4_RON_101ohm, 101}, 707 {PHY_LPDDR4_RON_84ohm, 84}, 708 {PHY_LPDDR4_RON_72ohm, 72}, 709 {PHY_LPDDR4_RON_63ohm, 63}, 710 {PHY_LPDDR4_RON_56ohm, 56}, 711 {PHY_LPDDR4_RON_50ohm, 50}, 712 {PHY_LPDDR4_RON_46ohm, 46}, 713 {PHY_LPDDR4_RON_42ohm, 42}, 714 {PHY_LPDDR4_RON_38ohm, 38}, 715 {PHY_LPDDR4_RON_36ohm, 36}, 716 {PHY_LPDDR4_RON_33ohm, 33}, 717 {PHY_LPDDR4_RON_31ohm, 31}, 718 {PHY_LPDDR4_RON_29ohm, 29}, 719 {PHY_LPDDR4_RON_28ohm, 28}, 720 {PHY_LPDDR4_RON_26ohm, 26}, 721 {PHY_LPDDR4_RON_25ohm, 25}, 722 {PHY_LPDDR4_RON_24ohm, 24}, 723 {PHY_LPDDR4_RON_23ohm, 23}, 724 {PHY_LPDDR4_RON_22ohm, 22} 725 }; 726 727 static u16 lp4_phy_odt_2_ohm[][2] = { 728 {PHY_LPDDR4_RTT_DISABLE, 0}, 729 {PHY_LPDDR4_RTT_604ohm, 604}, 730 {PHY_LPDDR4_RTT_303ohm, 303}, 731 {PHY_LPDDR4_RTT_202ohm, 202}, 732 {PHY_LPDDR4_RTT_152ohm, 152}, 733 {PHY_LPDDR4_RTT_122ohm, 122}, 734 {PHY_LPDDR4_RTT_101ohm, 101}, 735 {PHY_LPDDR4_RTT_87ohm, 87}, 736 {PHY_LPDDR4_RTT_78ohm, 78}, 737 {PHY_LPDDR4_RTT_69ohm, 69}, 738 {PHY_LPDDR4_RTT_62ohm, 62}, 739 {PHY_LPDDR4_RTT_56ohm, 56}, 740 {PHY_LPDDR4_RTT_52ohm, 52}, 741 {PHY_LPDDR4_RTT_48ohm, 48}, 742 {PHY_LPDDR4_RTT_44ohm, 44}, 743 {PHY_LPDDR4_RTT_41ohm, 41}, 744 {PHY_LPDDR4_RTT_39ohm, 39}, 745 {PHY_LPDDR4_RTT_37ohm, 37}, 746 {PHY_LPDDR4_RTT_35ohm, 35}, 747 {PHY_LPDDR4_RTT_33ohm, 33}, 748 {PHY_LPDDR4_RTT_32ohm, 32}, 749 {PHY_LPDDR4_RTT_30ohm, 30}, 750 {PHY_LPDDR4_RTT_29ohm, 29}, 751 {PHY_LPDDR4_RTT_27ohm, 27} 752 }; 753 754 static u32 lp4_odt_calc(u32 odt_ohm) 755 { 756 u32 odt; 757 758 if (odt_ohm == 0) 759 odt = LPDDR4_DQODT_DIS; 760 else if (odt_ohm <= 40) 761 odt = LPDDR4_DQODT_40; 762 else if (odt_ohm <= 48) 763 odt = LPDDR4_DQODT_48; 764 else if (odt_ohm <= 60) 765 odt = LPDDR4_DQODT_60; 766 else if (odt_ohm <= 80) 767 odt = LPDDR4_DQODT_80; 768 else if (odt_ohm <= 120) 769 odt = LPDDR4_DQODT_120; 770 else 771 odt = LPDDR4_DQODT_240; 772 773 return odt; 774 } 775 776 static void *get_ddr_drv_odt_info(u32 dramtype) 777 { 778 struct sdram_head_info_index_v2 *index = 779 (struct sdram_head_info_index_v2 *)common_info; 780 void *ddr_info = 0; 781 782 if (dramtype == DDR4) 783 ddr_info = (void *)common_info + index->ddr4_index.offset * 4; 784 else if (dramtype == DDR3) 785 ddr_info = (void *)common_info + index->ddr3_index.offset * 4; 786 else if (dramtype == LPDDR3) 787 ddr_info = (void *)common_info + index->lp3_index.offset * 4; 788 else if (dramtype == LPDDR4) 789 ddr_info = (void *)common_info + index->lp4_index.offset * 4; 790 else if (dramtype == LPDDR4X) 791 ddr_info = (void *)common_info + index->lp4x_index.offset * 4; 792 else 793 printascii("unsupported dram type\n"); 794 return ddr_info; 795 } 796 797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info, 798 u32 freq_mhz, u32 dst_fsp, u32 dramtype) 799 { 800 void __iomem *pctl_base = dram->pctl; 801 u32 ca_vref, dq_vref; 802 803 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 804 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff); 805 else 806 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten); 807 808 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq)) 809 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff); 810 else 811 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten); 812 813 if (dramtype == LPDDR4) { 814 if (ca_vref < 100) 815 ca_vref = 100; 816 if (ca_vref > 420) 817 ca_vref = 420; 818 819 if (ca_vref <= 300) 820 ca_vref = (0 << 6) | (ca_vref - 100) / 4; 821 else 822 ca_vref = (1 << 6) | (ca_vref - 220) / 4; 823 824 if (dq_vref < 100) 825 dq_vref = 100; 826 if (dq_vref > 420) 827 dq_vref = 420; 828 829 if (dq_vref <= 300) 830 dq_vref = (0 << 6) | (dq_vref - 100) / 4; 831 else 832 dq_vref = (1 << 6) | (dq_vref - 220) / 4; 833 } else { 834 ca_vref = ca_vref * 11 / 6; 835 if (ca_vref < 150) 836 ca_vref = 150; 837 if (ca_vref > 629) 838 ca_vref = 629; 839 840 if (ca_vref <= 449) 841 ca_vref = (0 << 6) | (ca_vref - 150) / 4; 842 else 843 ca_vref = (1 << 6) | (ca_vref - 329) / 4; 844 845 if (dq_vref < 150) 846 dq_vref = 150; 847 if (dq_vref > 629) 848 dq_vref = 629; 849 850 if (dq_vref <= 449) 851 dq_vref = (0 << 6) | (dq_vref - 150) / 6; 852 else 853 dq_vref = (1 << 6) | (dq_vref - 329) / 6; 854 } 855 sw_set_req(dram); 856 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 857 DDR_PCTL2_INIT6, 858 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT, 859 ca_vref << PCTL2_LPDDR4_MR12_SHIFT); 860 861 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 862 DDR_PCTL2_INIT7, 863 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT, 864 dq_vref << PCTL2_LPDDR4_MR14_SHIFT); 865 sw_set_ack(dram); 866 } 867 868 static void set_ds_odt(struct dram_info *dram, 869 struct rv1126_sdram_params *sdram_params, u32 dst_fsp) 870 { 871 void __iomem *phy_base = dram->phy; 872 void __iomem *pctl_base = dram->pctl; 873 u32 dramtype = sdram_params->base.dramtype; 874 struct ddr2_3_4_lp2_3_info *ddr_info; 875 struct lp4_info *lp4_info; 876 u32 i, j, tmp; 877 const u16 (*p_drv)[2]; 878 const u16 (*p_odt)[2]; 879 u32 drv_info, sr_info; 880 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm; 881 u32 phy_odt_ohm, dram_odt_ohm; 882 u32 lp4_pu_cal, phy_lp4_drv_pd_en; 883 u32 phy_odt_up_en, phy_odt_dn_en; 884 u32 sr_dq, sr_clk; 885 u32 freq = sdram_params->base.ddr_freq; 886 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner; 887 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0; 888 u32 phy_dq_drv = 0; 889 u32 phy_odt_up = 0, phy_odt_dn = 0; 890 891 ddr_info = get_ddr_drv_odt_info(dramtype); 892 lp4_info = (void *)ddr_info; 893 894 if (!ddr_info) 895 return; 896 897 /* dram odt en freq control phy drv, dram odt and phy sr */ 898 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) { 899 drv_info = ddr_info->drv_when_odtoff; 900 dram_odt_ohm = 0; 901 sr_info = ddr_info->sr_when_odtoff; 902 phy_lp4_drv_pd_en = 903 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info); 904 } else { 905 drv_info = ddr_info->drv_when_odten; 906 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info); 907 sr_info = ddr_info->sr_when_odten; 908 phy_lp4_drv_pd_en = 909 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info); 910 } 911 phy_dq_drv_ohm = 912 DRV_INFO_PHY_DQ_DRV(drv_info); 913 phy_clk_drv_ohm = 914 DRV_INFO_PHY_CLK_DRV(drv_info); 915 phy_ca_drv_ohm = 916 DRV_INFO_PHY_CA_DRV(drv_info); 917 918 sr_dq = DQ_SR_INFO(sr_info); 919 sr_clk = CLK_SR_INFO(sr_info); 920 921 /* phy odt en freq control dram drv and phy odt */ 922 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) { 923 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff); 924 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info); 925 phy_odt_ohm = 0; 926 phy_odt_up_en = 0; 927 phy_odt_dn_en = 0; 928 } else { 929 dram_drv_ohm = 930 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten); 931 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info); 932 phy_odt_up_en = 933 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 934 phy_odt_dn_en = 935 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 936 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info); 937 } 938 939 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 940 if (phy_odt_ohm) { 941 phy_odt_up_en = 0; 942 phy_odt_dn_en = 1; 943 } 944 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 945 dram_caodt_ohm = 0; 946 else 947 dram_caodt_ohm = 948 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info); 949 } 950 951 if (dramtype == DDR3) { 952 p_drv = d3_phy_drv_2_ohm; 953 p_odt = d3_phy_odt_2_ohm; 954 } else if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 955 p_drv = lp4_phy_drv_2_ohm; 956 p_odt = lp4_phy_odt_2_ohm; 957 } else { 958 p_drv = d4lp3_phy_drv_2_ohm; 959 p_odt = d4lp3_phy_odt_2_ohm; 960 } 961 962 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 963 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) { 964 phy_dq_drv = **(p_drv + i); 965 break; 966 } 967 if (i == 0) 968 break; 969 } 970 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 971 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) { 972 phy_clk_drv = **(p_drv + i); 973 break; 974 } 975 if (i == 0) 976 break; 977 } 978 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 979 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) { 980 phy_ca_drv = **(p_drv + i); 981 break; 982 } 983 if (i == 0) 984 break; 985 } 986 if (!phy_odt_ohm) 987 phy_odt = 0; 988 else 989 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) { 990 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) { 991 phy_odt = **(p_odt + i); 992 break; 993 } 994 if (i == 0) 995 break; 996 } 997 998 if (dramtype != LPDDR4 && dramtype != LPDDR4X) { 999 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en)) 1000 vref_inner = 0x80; 1001 else if (phy_odt_up_en) 1002 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 / 1003 (dram_drv_ohm + phy_odt_ohm); 1004 else 1005 vref_inner = phy_odt_ohm * 128 / 1006 (phy_odt_ohm + dram_drv_ohm); 1007 1008 if (dramtype != DDR3 && dram_odt_ohm) 1009 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 / 1010 (phy_dq_drv_ohm + dram_odt_ohm); 1011 else 1012 vref_out = 0x80; 1013 } else { 1014 /* for lp4 and lp4x*/ 1015 if (phy_odt_ohm) 1016 vref_inner = 1017 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) * 1018 256) / 1000; 1019 else 1020 vref_inner = 1021 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) * 1022 256) / 1000; 1023 1024 vref_out = 0x80; 1025 } 1026 1027 /* default ZQCALIB bypass mode */ 1028 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv); 1029 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv); 1030 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv); 1031 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv); 1032 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 1033 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv); 1034 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv); 1035 } else { 1036 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv); 1037 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv); 1038 } 1039 /* clk / cmd slew rate */ 1040 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk); 1041 1042 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1; 1043 if (phy_odt_up_en) 1044 phy_odt_up = phy_odt; 1045 if (phy_odt_dn_en) 1046 phy_odt_dn = phy_odt; 1047 1048 for (i = 0; i < 4; i++) { 1049 j = 0x110 + i * 0x10; 1050 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up); 1051 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn); 1052 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv); 1053 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv); 1054 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10)); 1055 1056 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), 1057 1 << 3, phy_lp4_drv_pd_en << 3); 1058 if (dramtype == LPDDR4 || dramtype == LPDDR4X) 1059 clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5)); 1060 /* dq slew rate */ 1061 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10), 1062 0x1f, sr_dq); 1063 } 1064 1065 /* reg_rx_vref_value_update */ 1066 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1067 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1068 1069 /* RAM VREF */ 1070 writel(vref_out, PHY_REG(phy_base, 0x105)); 1071 if (dramtype == LPDDR3) 1072 udelay(100); 1073 1074 if (dramtype == LPDDR4 || dramtype == LPDDR4X) 1075 set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype); 1076 1077 if (dramtype == DDR3 || dramtype == DDR4) { 1078 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1079 DDR_PCTL2_INIT3); 1080 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK; 1081 } else { 1082 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1083 DDR_PCTL2_INIT4); 1084 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK; 1085 } 1086 1087 if (dramtype == DDR3) { 1088 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK); 1089 if (dram_drv_ohm == 34) 1090 mr1_mr3 |= DDR3_DS_34; 1091 1092 if (dram_odt_ohm == 0) 1093 mr1_mr3 |= DDR3_RTT_NOM_DIS; 1094 else if (dram_odt_ohm <= 40) 1095 mr1_mr3 |= DDR3_RTT_NOM_40; 1096 else if (dram_odt_ohm <= 60) 1097 mr1_mr3 |= DDR3_RTT_NOM_60; 1098 else 1099 mr1_mr3 |= DDR3_RTT_NOM_120; 1100 1101 } else if (dramtype == DDR4) { 1102 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK); 1103 if (dram_drv_ohm == 48) 1104 mr1_mr3 |= DDR4_DS_48; 1105 1106 if (dram_odt_ohm == 0) 1107 mr1_mr3 |= DDR4_RTT_NOM_DIS; 1108 else if (dram_odt_ohm <= 34) 1109 mr1_mr3 |= DDR4_RTT_NOM_34; 1110 else if (dram_odt_ohm <= 40) 1111 mr1_mr3 |= DDR4_RTT_NOM_40; 1112 else if (dram_odt_ohm <= 48) 1113 mr1_mr3 |= DDR4_RTT_NOM_48; 1114 else if (dram_odt_ohm <= 60) 1115 mr1_mr3 |= DDR4_RTT_NOM_60; 1116 else 1117 mr1_mr3 |= DDR4_RTT_NOM_120; 1118 1119 } else if (dramtype == LPDDR3) { 1120 if (dram_drv_ohm <= 34) 1121 mr1_mr3 |= LPDDR3_DS_34; 1122 else if (dram_drv_ohm <= 40) 1123 mr1_mr3 |= LPDDR3_DS_40; 1124 else if (dram_drv_ohm <= 48) 1125 mr1_mr3 |= LPDDR3_DS_48; 1126 else if (dram_drv_ohm <= 60) 1127 mr1_mr3 |= LPDDR3_DS_60; 1128 else if (dram_drv_ohm <= 80) 1129 mr1_mr3 |= LPDDR3_DS_80; 1130 1131 if (dram_odt_ohm == 0) 1132 lp3_odt_value = LPDDR3_ODT_DIS; 1133 else if (dram_odt_ohm <= 60) 1134 lp3_odt_value = LPDDR3_ODT_60; 1135 else if (dram_odt_ohm <= 120) 1136 lp3_odt_value = LPDDR3_ODT_120; 1137 else 1138 lp3_odt_value = LPDDR3_ODT_240; 1139 } else {/* for lpddr4 and lpddr4x */ 1140 /* MR3 for lp4 PU-CAL and PDDS */ 1141 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK); 1142 mr1_mr3 |= lp4_pu_cal; 1143 1144 tmp = lp4_odt_calc(dram_drv_ohm); 1145 if (!tmp) 1146 tmp = LPDDR4_PDDS_240; 1147 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT); 1148 1149 /* MR11 for lp4 ca odt, dq odt set */ 1150 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1151 DDR_PCTL2_INIT6); 1152 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK; 1153 1154 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK); 1155 1156 tmp = lp4_odt_calc(dram_odt_ohm); 1157 mr11 |= (tmp << LPDDR4_DQODT_SHIFT); 1158 1159 tmp = lp4_odt_calc(dram_caodt_ohm); 1160 mr11 |= (tmp << LPDDR4_CAODT_SHIFT); 1161 sw_set_req(dram); 1162 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1163 DDR_PCTL2_INIT6, 1164 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT, 1165 mr11 << PCTL2_LPDDR4_MR11_SHIFT); 1166 sw_set_ack(dram); 1167 1168 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */ 1169 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1170 DDR_PCTL2_INIT7); 1171 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK; 1172 mr22 &= ~LPDDR4_SOC_ODT_MASK; 1173 1174 tmp = lp4_odt_calc(phy_odt_ohm); 1175 mr22 |= tmp; 1176 mr22 = mr22 | 1177 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) << 1178 LPDDR4_ODTE_CK_SHIFT) | 1179 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) << 1180 LPDDR4_ODTE_CS_SHIFT) | 1181 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) << 1182 LPDDR4_ODTD_CA_SHIFT); 1183 1184 sw_set_req(dram); 1185 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1186 DDR_PCTL2_INIT7, 1187 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT, 1188 mr22 << PCTL2_LPDDR4_MR22_SHIFT); 1189 sw_set_ack(dram); 1190 } 1191 1192 if (dramtype == DDR4 || dramtype == DDR3) { 1193 sw_set_req(dram); 1194 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1195 DDR_PCTL2_INIT3, 1196 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT, 1197 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT); 1198 sw_set_ack(dram); 1199 } else { 1200 sw_set_req(dram); 1201 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1202 DDR_PCTL2_INIT4, 1203 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT, 1204 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT); 1205 sw_set_ack(dram); 1206 } 1207 } 1208 1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram, 1210 struct rv1126_sdram_params *sdram_params) 1211 { 1212 void __iomem *phy_base = dram->phy; 1213 u32 dramtype = sdram_params->base.dramtype; 1214 struct sdram_head_info_index_v2 *index = 1215 (struct sdram_head_info_index_v2 *)common_info; 1216 struct dq_map_info *map_info; 1217 1218 map_info = (struct dq_map_info *)((void *)common_info + 1219 index->dq_map_index.offset * 4); 1220 1221 if (dramtype == LPDDR4X) 1222 dramtype = LPDDR4; 1223 1224 if (dramtype <= LPDDR4) 1225 writel((map_info->byte_map[dramtype / 4] >> 1226 ((dramtype % 4) * 8)) & 0xff, 1227 PHY_REG(phy_base, 0x4f)); 1228 1229 return 0; 1230 } 1231 1232 static void phy_cfg(struct dram_info *dram, 1233 struct rv1126_sdram_params *sdram_params) 1234 { 1235 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 1236 void __iomem *phy_base = dram->phy; 1237 u32 i, dq_map, tmp; 1238 u32 byte1 = 0, byte0 = 0; 1239 1240 sdram_cmd_dq_path_remap(dram, sdram_params); 1241 1242 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0); 1243 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) { 1244 writel(sdram_params->phy_regs.phy[i][1], 1245 phy_base + sdram_params->phy_regs.phy[i][0]); 1246 } 1247 1248 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5)); 1249 dq_map = readl(PHY_REG(phy_base, 0x4f)); 1250 for (i = 0; i < 4; i++) { 1251 if (((dq_map >> (i * 2)) & 0x3) == 0) 1252 byte0 = i; 1253 if (((dq_map >> (i * 2)) & 0x3) == 1) 1254 byte1 = i; 1255 } 1256 1257 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK); 1258 if (cap_info->bw == 2) 1259 tmp |= 0xf; 1260 else if (cap_info->bw == 1) 1261 tmp |= ((1 << byte0) | (1 << byte1)); 1262 else 1263 tmp |= (1 << byte0); 1264 1265 writel(tmp, PHY_REG(phy_base, 0xf)); 1266 1267 /* lpddr4 odt control by phy, enable cs0 odt */ 1268 if (sdram_params->base.dramtype == LPDDR4 || 1269 sdram_params->base.dramtype == LPDDR4X) 1270 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4, 1271 (1 << 6) | (1 << 4)); 1272 /* for ca training ca vref choose range1 */ 1273 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6)); 1274 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6)); 1275 /* for wr training PHY_0x7c[5], choose range0 */ 1276 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5)); 1277 } 1278 1279 static int update_refresh_reg(struct dram_info *dram) 1280 { 1281 void __iomem *pctl_base = dram->pctl; 1282 u32 ret; 1283 1284 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1); 1285 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3); 1286 1287 return 0; 1288 } 1289 1290 /* 1291 * rank = 1: cs0 1292 * rank = 2: cs1 1293 */ 1294 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype) 1295 { 1296 u32 ret; 1297 u32 i, temp; 1298 u32 dqmap; 1299 1300 void __iomem *pctl_base = dram->pctl; 1301 struct sdram_head_info_index_v2 *index = 1302 (struct sdram_head_info_index_v2 *)common_info; 1303 struct dq_map_info *map_info; 1304 1305 map_info = (struct dq_map_info *)((void *)common_info + 1306 index->dq_map_index.offset * 4); 1307 1308 if (dramtype == LPDDR2) 1309 dqmap = map_info->lp2_dq0_7_map; 1310 else 1311 dqmap = map_info->lp3_dq0_7_map; 1312 1313 pctl_read_mr(pctl_base, rank, mr_num); 1314 1315 ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff); 1316 1317 if (dramtype != LPDDR4) { 1318 temp = 0; 1319 for (i = 0; i < 8; i++) { 1320 temp = temp | (((ret >> i) & 0x1) << 1321 ((dqmap >> (i * 4)) & 0xf)); 1322 } 1323 } else { 1324 temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff); 1325 } 1326 1327 return temp; 1328 } 1329 1330 /* before call this function autorefresh should be disabled */ 1331 void send_a_refresh(struct dram_info *dram) 1332 { 1333 void __iomem *pctl_base = dram->pctl; 1334 1335 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3) 1336 continue; 1337 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD); 1338 } 1339 1340 static void enter_sr(struct dram_info *dram, u32 en) 1341 { 1342 void __iomem *pctl_base = dram->pctl; 1343 1344 if (en) { 1345 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 1346 while (1) { 1347 if (((readl(pctl_base + DDR_PCTL2_STAT) & 1348 PCTL2_SELFREF_TYPE_MASK) == 1349 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) && 1350 ((readl(pctl_base + DDR_PCTL2_STAT) & 1351 PCTL2_OPERATING_MODE_MASK) == 1352 PCTL2_OPERATING_MODE_SR)) 1353 break; 1354 } 1355 } else { 1356 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 1357 while ((readl(pctl_base + DDR_PCTL2_STAT) & 1358 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR) 1359 continue; 1360 } 1361 } 1362 1363 void record_dq_prebit(struct dram_info *dram) 1364 { 1365 u32 group, i, tmp; 1366 void __iomem *phy_base = dram->phy; 1367 1368 for (group = 0; group < 4; group++) { 1369 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) { 1370 /* l_loop_invdelaysel */ 1371 writel(dq_sel[i][0], PHY_REG(phy_base, 1372 grp_addr[group] + 0x2c)); 1373 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e)); 1374 writel(tmp, PHY_REG(phy_base, 1375 grp_addr[group] + dq_sel[i][1])); 1376 1377 /* r_loop_invdelaysel */ 1378 writel(dq_sel[i][0], PHY_REG(phy_base, 1379 grp_addr[group] + 0x2d)); 1380 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f)); 1381 writel(tmp, PHY_REG(phy_base, 1382 grp_addr[group] + dq_sel[i][2])); 1383 } 1384 } 1385 } 1386 1387 static void update_dq_rx_prebit(struct dram_info *dram) 1388 { 1389 void __iomem *phy_base = dram->phy; 1390 1391 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4), 1392 BIT(4)); 1393 udelay(1); 1394 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4)); 1395 } 1396 1397 static void update_dq_tx_prebit(struct dram_info *dram) 1398 { 1399 void __iomem *phy_base = dram->phy; 1400 1401 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1402 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3)); 1403 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1404 udelay(1); 1405 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1406 } 1407 1408 static void update_ca_prebit(struct dram_info *dram) 1409 { 1410 void __iomem *phy_base = dram->phy; 1411 1412 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2)); 1413 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1414 udelay(1); 1415 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1416 } 1417 1418 /* 1419 * dir: 0: de-skew = delta_* 1420 * 1: de-skew = reg val - delta_* 1421 * delta_dir: value for differential signal: clk/ 1422 * delta_sig: value for single signal: ca/cmd 1423 */ 1424 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif, 1425 int delta_sig, u32 cs, u32 dramtype) 1426 { 1427 void __iomem *phy_base = dram->phy; 1428 u32 i, cs_en, tmp; 1429 u32 dfi_lp_stat = 0; 1430 1431 if (cs == 0) 1432 cs_en = 1; 1433 else if (cs == 2) 1434 cs_en = 2; 1435 else 1436 cs_en = 3; 1437 1438 if ((dramtype == LPDDR4 || dramtype == LPDDR4X) && 1439 ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) { 1440 dfi_lp_stat = 1; 1441 setbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); 1442 } 1443 enter_sr(dram, 1); 1444 1445 for (i = 0; i < 0x20; i++) { 1446 if (dir == DESKEW_MDF_ABS_VAL) 1447 tmp = delta_sig; 1448 else 1449 tmp = readl(PHY_REG(phy_base, 0x150 + i)) + 1450 delta_sig; 1451 writel(tmp, PHY_REG(phy_base, 0x150 + i)); 1452 } 1453 1454 if (dir == DESKEW_MDF_ABS_VAL) 1455 tmp = delta_dif; 1456 else 1457 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) - 1458 delta_sig + delta_dif; 1459 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17)); 1460 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18)); 1461 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 1462 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4)); 1463 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa)); 1464 1465 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6); 1466 update_ca_prebit(dram); 1467 } 1468 enter_sr(dram, 0); 1469 1470 if (dfi_lp_stat) 1471 clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); 1472 1473 } 1474 1475 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank) 1476 { 1477 u32 i, j, offset = 0; 1478 u32 min = 0x3f; 1479 void __iomem *phy_base = dram->phy; 1480 u32 byte_en; 1481 1482 if (signal == SKEW_TX_SIGNAL) 1483 offset = 8; 1484 1485 if (signal == SKEW_CA_SIGNAL) { 1486 for (i = 0; i < 0x20; i++) 1487 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i))); 1488 } else { 1489 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1490 for (j = offset; j < offset + rank * 4; j++) { 1491 if (!((byte_en >> (j % 4)) & 1)) 1492 continue; 1493 for (i = 0; i < 11; i++) 1494 min = MIN(min, 1495 readl(PHY_REG(phy_base, 1496 dqs_dq_skew_adr[j] + 1497 i))); 1498 } 1499 } 1500 1501 return min; 1502 } 1503 1504 static u32 low_power_update(struct dram_info *dram, u32 en) 1505 { 1506 void __iomem *pctl_base = dram->pctl; 1507 u32 lp_stat = 0; 1508 1509 if (en) { 1510 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf); 1511 } else { 1512 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf; 1513 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf); 1514 } 1515 1516 return lp_stat; 1517 } 1518 1519 /* 1520 * signal: 1521 * dir: 0: de-skew = delta_* 1522 * 1: de-skew = reg val - delta_* 1523 * delta_dir: value for differential signal: dqs 1524 * delta_sig: value for single signal: dq/dm 1525 */ 1526 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir, 1527 int delta_dif, int delta_sig, u32 rank) 1528 { 1529 void __iomem *phy_base = dram->phy; 1530 u32 i, j, tmp, offset; 1531 u32 byte_en; 1532 1533 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1534 1535 if (signal == SKEW_RX_SIGNAL) 1536 offset = 0; 1537 else 1538 offset = 8; 1539 1540 for (j = offset; j < (offset + rank * 4); j++) { 1541 if (!((byte_en >> (j % 4)) & 1)) 1542 continue; 1543 for (i = 0; i < 0x9; i++) { 1544 if (dir == DESKEW_MDF_ABS_VAL) 1545 tmp = delta_sig; 1546 else 1547 tmp = delta_sig + readl(PHY_REG(phy_base, 1548 dqs_dq_skew_adr[j] + 1549 i)); 1550 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i)); 1551 } 1552 if (dir == DESKEW_MDF_ABS_VAL) 1553 tmp = delta_dif; 1554 else 1555 tmp = delta_dif + readl(PHY_REG(phy_base, 1556 dqs_dq_skew_adr[j] + 9)); 1557 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9)); 1558 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa)); 1559 } 1560 if (signal == SKEW_RX_SIGNAL) 1561 update_dq_rx_prebit(dram); 1562 else 1563 update_dq_tx_prebit(dram); 1564 } 1565 1566 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype) 1567 { 1568 void __iomem *phy_base = dram->phy; 1569 u32 ret; 1570 u32 dis_auto_zq = 0; 1571 u32 odt_val_up, odt_val_dn; 1572 u32 i, j; 1573 1574 odt_val_dn = readl(PHY_REG(phy_base, 0x110)); 1575 odt_val_up = readl(PHY_REG(phy_base, 0x111)); 1576 1577 if (dramtype != LPDDR4 || dramtype != LPDDR4X) { 1578 for (i = 0; i < 4; i++) { 1579 j = 0x110 + i * 0x10; 1580 writel(PHY_DDR4_LPDDR3_RTT_294ohm, 1581 PHY_REG(phy_base, j)); 1582 writel(PHY_DDR4_LPDDR3_RTT_DISABLE, 1583 PHY_REG(phy_base, j + 0x1)); 1584 } 1585 } 1586 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1587 /* use normal read mode for data training */ 1588 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1589 1590 if (dramtype == DDR4) 1591 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1592 1593 /* choose training cs */ 1594 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs)); 1595 /* enable gate training */ 1596 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1); 1597 udelay(50); 1598 ret = readl(PHY_REG(phy_base, 0x91)); 1599 /* disable gate training */ 1600 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0); 1601 clrbits_le32(PHY_REG(phy_base, 2), 0x30); 1602 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1603 1604 ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf); 1605 1606 if (dramtype != LPDDR4 || dramtype != LPDDR4X) { 1607 for (i = 0; i < 4; i++) { 1608 j = 0x110 + i * 0x10; 1609 writel(odt_val_dn, PHY_REG(phy_base, j)); 1610 writel(odt_val_up, PHY_REG(phy_base, j + 0x1)); 1611 } 1612 } 1613 return ret; 1614 } 1615 1616 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype, 1617 u32 rank) 1618 { 1619 void __iomem *pctl_base = dram->pctl; 1620 void __iomem *phy_base = dram->phy; 1621 u32 dis_auto_zq = 0; 1622 u32 tmp; 1623 u32 cur_fsp; 1624 u32 timeout_us = 1000; 1625 1626 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1627 1628 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1629 1630 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1631 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) & 1632 0xffff; 1633 writel(tmp & 0xff, PHY_REG(phy_base, 0x3)); 1634 1635 /* disable another cs's output */ 1636 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1637 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12), 1638 dramtype); 1639 if (dramtype == DDR3 || dramtype == DDR4) 1640 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1641 else 1642 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1643 1644 /* choose cs */ 1645 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1646 ((0x2 >> cs) << 6) | (0 << 2)); 1647 /* enable write leveling */ 1648 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1649 ((0x2 >> cs) << 6) | (1 << 2)); 1650 1651 while (1) { 1652 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) == 1653 (readl(PHY_REG(phy_base, 0xf)) & 0xf)) 1654 break; 1655 1656 udelay(1); 1657 if (timeout_us-- == 0) { 1658 printascii("error: write leveling timeout\n"); 1659 while (1) 1660 ; 1661 } 1662 } 1663 1664 /* disable write leveling */ 1665 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1666 ((0x2 >> cs) << 6) | (0 << 2)); 1667 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6); 1668 1669 /* enable another cs's output */ 1670 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1671 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12), 1672 dramtype); 1673 1674 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1675 1676 return 0; 1677 } 1678 1679 char pattern[32] = { 1680 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa, 1681 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55, 1682 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55, 1683 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa 1684 }; 1685 1686 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype, 1687 u32 mhz) 1688 { 1689 void __iomem *pctl_base = dram->pctl; 1690 void __iomem *phy_base = dram->phy; 1691 u32 trefi_1x, trfc_1x; 1692 u32 dis_auto_zq = 0; 1693 u32 timeout_us = 1000; 1694 u32 dqs_default; 1695 u32 cur_fsp; 1696 u32 vref_inner; 1697 u32 i; 1698 struct sdram_head_info_index_v2 *index = 1699 (struct sdram_head_info_index_v2 *)common_info; 1700 struct dq_map_info *map_info; 1701 1702 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff; 1703 if (dramtype == DDR3 && vref_inner == 0x80) { 1704 for (i = 0; i < 4; i++) 1705 writel(vref_inner - 0xa, 1706 PHY_REG(phy_base, 0x118 + i * 0x10)); 1707 1708 /* reg_rx_vref_value_update */ 1709 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1710 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1711 } 1712 1713 map_info = (struct dq_map_info *)((void *)common_info + 1714 index->dq_map_index.offset * 4); 1715 /* only 1cs a time, 0:cs0 1 cs1 */ 1716 if (cs > 1) 1717 return -1; 1718 1719 dqs_default = 0xf; 1720 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1721 1722 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1723 /* config refresh timing */ 1724 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1725 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1726 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1727 DDR_PCTL2_RFSHTMG) & 0x3ff; 1728 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1729 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1730 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1731 /* reg_phy_trfc */ 1732 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1733 /* reg_max_refi_cnt */ 1734 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1735 1736 /* choose training cs */ 1737 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6); 1738 1739 /* set dq map for ddr4 */ 1740 if (dramtype == DDR4) { 1741 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7)); 1742 for (i = 0; i < 4; i++) { 1743 writel((map_info->ddr4_dq_map[cs * 2] >> 1744 ((i % 4) * 8)) & 0xff, 1745 PHY_REG(phy_base, 0x238 + i)); 1746 writel((map_info->ddr4_dq_map[cs * 2 + 1] >> 1747 ((i % 4) * 8)) & 0xff, 1748 PHY_REG(phy_base, 0x2b8 + i)); 1749 } 1750 } 1751 1752 /* cha_l reg_l_rd_train_dqs_default[5:0] */ 1753 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default); 1754 /* cha_h reg_h_rd_train_dqs_default[5:0] */ 1755 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default); 1756 /* chb_l reg_l_rd_train_dqs_default[5:0] */ 1757 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default); 1758 /* chb_h reg_h_rd_train_dqs_default[5:0] */ 1759 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default); 1760 1761 /* Choose the read train auto mode */ 1762 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1); 1763 /* Enable the auto train of the read train */ 1764 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3); 1765 1766 /* Wait the train done. */ 1767 while (1) { 1768 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1) 1769 break; 1770 1771 udelay(1); 1772 if (timeout_us-- == 0) { 1773 printascii("error: read training timeout\n"); 1774 return -1; 1775 } 1776 } 1777 1778 /* Check the read train state */ 1779 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) || 1780 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) { 1781 printascii("error: read training error\n"); 1782 return -1; 1783 } 1784 1785 /* Exit the Read Training by setting */ 1786 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1)); 1787 1788 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1789 1790 if (dramtype == DDR3 && vref_inner == 0x80) { 1791 for (i = 0; i < 4; i++) 1792 writel(vref_inner, 1793 PHY_REG(phy_base, 0x118 + i * 0x10)); 1794 1795 /* reg_rx_vref_value_update */ 1796 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1797 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1798 } 1799 1800 return 0; 1801 } 1802 1803 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype, 1804 u32 mhz, u32 dst_fsp) 1805 { 1806 void __iomem *pctl_base = dram->pctl; 1807 void __iomem *phy_base = dram->phy; 1808 u32 trefi_1x, trfc_1x; 1809 u32 dis_auto_zq = 0; 1810 u32 timeout_us = 1000; 1811 u32 cur_fsp; 1812 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0; 1813 1814 if (dramtype == LPDDR3 && mhz <= 400) { 1815 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3; 1816 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3; 1817 cl = readl(PHY_REG(phy_base, offset)); 1818 cwl = readl(PHY_REG(phy_base, offset + 2)); 1819 1820 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8); 1821 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4); 1822 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype); 1823 } 1824 1825 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1826 1827 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */ 1828 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0); 1829 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */ 1830 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2); 1831 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */ 1832 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0); 1833 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */ 1834 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0); 1835 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */ 1836 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0); 1837 1838 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */ 1839 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3)); 1840 1841 /* config refresh timing */ 1842 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1843 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1844 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1845 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1846 DDR_PCTL2_RFSHTMG) & 0x3ff; 1847 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1848 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1849 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1850 /* reg_phy_trfc */ 1851 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1852 /* reg_max_refi_cnt */ 1853 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1854 1855 /* choose training cs */ 1856 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6); 1857 1858 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */ 1859 /* 0: Use the write-leveling value. */ 1860 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */ 1861 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4)); 1862 1863 /* PHY_0x7a [0] reg_dq_wr_train_auto */ 1864 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1865 1866 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1867 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1868 1869 send_a_refresh(dram); 1870 1871 while (1) { 1872 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1) 1873 break; 1874 1875 udelay(1); 1876 if (timeout_us-- == 0) { 1877 printascii("error: write training timeout\n"); 1878 while (1) 1879 ; 1880 } 1881 } 1882 1883 /* Check the write train state */ 1884 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) { 1885 printascii("error: write training error\n"); 1886 return -1; 1887 } 1888 1889 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1890 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1891 1892 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1893 1894 /* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */ 1895 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 1896 fsp_param[dst_fsp].vref_dq[cs] = 1897 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) + 1898 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2; 1899 /* add range info */ 1900 fsp_param[dst_fsp].vref_dq[cs] |= 1901 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1); 1902 } 1903 1904 if (dramtype == LPDDR3 && mhz <= 400) { 1905 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl); 1906 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl); 1907 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1908 DDR_PCTL2_INIT3); 1909 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 1910 dramtype); 1911 } 1912 1913 return 0; 1914 } 1915 1916 static int data_training(struct dram_info *dram, u32 cs, 1917 struct rv1126_sdram_params *sdram_params, u32 dst_fsp, 1918 u32 training_flag) 1919 { 1920 u32 ret = 0; 1921 1922 if (training_flag == FULL_TRAINING) 1923 training_flag = READ_GATE_TRAINING | WRITE_LEVELING | 1924 WRITE_TRAINING | READ_TRAINING; 1925 1926 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) { 1927 ret = data_training_wl(dram, cs, 1928 sdram_params->base.dramtype, 1929 sdram_params->ch.cap_info.rank); 1930 if (ret != 0) 1931 goto out; 1932 } 1933 1934 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) { 1935 ret = data_training_rg(dram, cs, 1936 sdram_params->base.dramtype); 1937 if (ret != 0) 1938 goto out; 1939 } 1940 1941 if ((training_flag & READ_TRAINING) == READ_TRAINING) { 1942 ret = data_training_rd(dram, cs, 1943 sdram_params->base.dramtype, 1944 sdram_params->base.ddr_freq); 1945 if (ret != 0) 1946 goto out; 1947 } 1948 1949 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) { 1950 ret = data_training_wr(dram, cs, 1951 sdram_params->base.dramtype, 1952 sdram_params->base.ddr_freq, dst_fsp); 1953 if (ret != 0) 1954 goto out; 1955 } 1956 1957 out: 1958 return ret; 1959 } 1960 1961 static int get_wrlvl_val(struct dram_info *dram, 1962 struct rv1126_sdram_params *sdram_params) 1963 { 1964 int i, j, clk_skew; 1965 void __iomem *phy_base = dram->phy; 1966 u32 lp_stat; 1967 int ret; 1968 1969 lp_stat = low_power_update(dram, 0); 1970 1971 clk_skew = 0x1f; 1972 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3, 1973 sdram_params->base.dramtype); 1974 1975 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING); 1976 if (sdram_params->ch.cap_info.rank == 2) 1977 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING); 1978 1979 for (j = 0; j < 2; j++) 1980 for (i = 0; i < 4; i++) 1981 wrlvl_result[j][i] = 1982 (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) - 1983 clk_skew; 1984 1985 low_power_update(dram, lp_stat); 1986 1987 return ret; 1988 } 1989 1990 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 1991 static void init_rw_trn_result_struct(struct rw_trn_result *result, 1992 void __iomem *phy_base, u8 cs_num) 1993 { 1994 int i; 1995 1996 result->cs_num = cs_num; 1997 result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) & 1998 PHY_DQ_WIDTH_MASK; 1999 for (i = 0; i < FSP_NUM; i++) 2000 result->fsp_mhz[i] = 0; 2001 } 2002 2003 static void save_rw_trn_min_max(void __iomem *phy_base, 2004 struct cs_rw_trn_result *rd_result, 2005 struct cs_rw_trn_result *wr_result, 2006 u8 byte_en) 2007 { 2008 u16 phy_ofs; 2009 u8 dqs; 2010 u8 dq; 2011 2012 for (dqs = 0; dqs < BYTE_NUM; dqs++) { 2013 if ((byte_en & BIT(dqs)) == 0) 2014 continue; 2015 2016 /* Channel A or B (low or high 16 bit) */ 2017 phy_ofs = dqs < 2 ? 0x230 : 0x2b0; 2018 /* low or high 8 bit */ 2019 phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9; 2020 for (dq = 0; dq < 8; dq++) { 2021 rd_result->dqs[dqs].dq_min[dq] = 2022 readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq)); 2023 rd_result->dqs[dqs].dq_max[dq] = 2024 readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq)); 2025 wr_result->dqs[dqs].dq_min[dq] = 2026 readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq)); 2027 wr_result->dqs[dqs].dq_max[dq] = 2028 readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq)); 2029 } 2030 } 2031 } 2032 2033 static void save_rw_trn_deskew(void __iomem *phy_base, 2034 struct fsp_rw_trn_result *result, u8 cs_num, 2035 int min_val, bool rw) 2036 { 2037 u16 phy_ofs; 2038 u8 cs; 2039 u8 dq; 2040 2041 result->min_val = min_val; 2042 2043 for (cs = 0; cs < cs_num; cs++) { 2044 phy_ofs = cs == 0 ? 0x170 : 0x1a0; 2045 phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17; 2046 for (dq = 0; dq < 8; dq++) { 2047 result->cs[cs].dqs[0].dq_deskew[dq] = 2048 readb(PHY_REG(phy_base, phy_ofs + dq)); 2049 result->cs[cs].dqs[1].dq_deskew[dq] = 2050 readb(PHY_REG(phy_base, phy_ofs + 0xb + dq)); 2051 result->cs[cs].dqs[2].dq_deskew[dq] = 2052 readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq)); 2053 result->cs[cs].dqs[3].dq_deskew[dq] = 2054 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq)); 2055 } 2056 2057 result->cs[cs].dqs[0].dqs_deskew = 2058 readb(PHY_REG(phy_base, phy_ofs + 0x8)); 2059 result->cs[cs].dqs[1].dqs_deskew = 2060 readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8)); 2061 result->cs[cs].dqs[2].dqs_deskew = 2062 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8)); 2063 result->cs[cs].dqs[3].dqs_deskew = 2064 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8)); 2065 } 2066 } 2067 2068 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result) 2069 { 2070 result->flag = DDR_DQ_EYE_FLAG; 2071 memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result)); 2072 } 2073 #endif 2074 2075 static int high_freq_training(struct dram_info *dram, 2076 struct rv1126_sdram_params *sdram_params, 2077 u32 fsp) 2078 { 2079 u32 i, j; 2080 void __iomem *phy_base = dram->phy; 2081 u32 dramtype = sdram_params->base.dramtype; 2082 int min_val; 2083 int dqs_skew, clk_skew, ca_skew; 2084 u8 byte_en; 2085 int ret; 2086 2087 byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK; 2088 dqs_skew = 0; 2089 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) { 2090 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) { 2091 if ((byte_en & BIT(i)) != 0) 2092 dqs_skew += wrlvl_result[j][i]; 2093 } 2094 } 2095 dqs_skew = dqs_skew / 2096 (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw)); 2097 2098 clk_skew = 0x20 - dqs_skew; 2099 dqs_skew = 0x20; 2100 2101 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 2102 min_val = 0xff; 2103 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) 2104 for (i = 0; i < sdram_params->ch.cap_info.bw; i++) 2105 min_val = MIN(wrlvl_result[j][i], min_val); 2106 2107 if (min_val < 0) { 2108 clk_skew = -min_val; 2109 ca_skew = -min_val; 2110 } else { 2111 clk_skew = 0; 2112 ca_skew = 0; 2113 } 2114 } else if (dramtype == LPDDR3) { 2115 ca_skew = clk_skew - 4; 2116 } else { 2117 ca_skew = clk_skew; 2118 } 2119 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3, 2120 dramtype); 2121 2122 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233)); 2123 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237)); 2124 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 2125 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 2126 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING | 2127 READ_TRAINING | WRITE_TRAINING); 2128 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2129 rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq; 2130 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0], 2131 &rw_trn_result.wr_fsp[fsp].cs[0], 2132 rw_trn_result.byte_en); 2133 #endif 2134 if (sdram_params->ch.cap_info.rank == 2) { 2135 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233)); 2136 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237)); 2137 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 2138 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 2139 ret |= data_training(dram, 1, sdram_params, fsp, 2140 READ_GATE_TRAINING | READ_TRAINING | 2141 WRITE_TRAINING); 2142 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2143 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1], 2144 &rw_trn_result.wr_fsp[fsp].cs[1], 2145 rw_trn_result.byte_en); 2146 #endif 2147 } 2148 if (ret) 2149 goto out; 2150 2151 record_dq_prebit(dram); 2152 2153 min_val = get_min_value(dram, SKEW_RX_SIGNAL, 2154 sdram_params->ch.cap_info.rank) * -1; 2155 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL, 2156 min_val, min_val, sdram_params->ch.cap_info.rank); 2157 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2158 save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp], 2159 rw_trn_result.cs_num, (u8)(min_val * (-1)), 2160 SKEW_RX_SIGNAL); 2161 #endif 2162 2163 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL, 2164 sdram_params->ch.cap_info.rank), 2165 get_min_value(dram, SKEW_CA_SIGNAL, 2166 sdram_params->ch.cap_info.rank)) * -1; 2167 2168 /* clk = 0, rx all skew -7, tx - min_value */ 2169 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3, 2170 dramtype); 2171 2172 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL, 2173 min_val, min_val, sdram_params->ch.cap_info.rank); 2174 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2175 save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp], 2176 rw_trn_result.cs_num, (u8)(min_val * (-1)), 2177 SKEW_TX_SIGNAL); 2178 #endif 2179 2180 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING); 2181 if (sdram_params->ch.cap_info.rank == 2) 2182 ret |= data_training(dram, 1, sdram_params, 0, 2183 READ_GATE_TRAINING); 2184 out: 2185 return ret; 2186 } 2187 2188 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig) 2189 { 2190 writel(ddrconfig, &dram->msch->deviceconf); 2191 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0); 2192 } 2193 2194 static void update_noc_timing(struct dram_info *dram, 2195 struct rv1126_sdram_params *sdram_params) 2196 { 2197 void __iomem *pctl_base = dram->pctl; 2198 u32 bw, bl; 2199 2200 bw = 8 << sdram_params->ch.cap_info.bw; 2201 bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2; 2202 2203 /* update the noc timing related to data bus width */ 2204 if ((bw / 8 * bl) <= 16) 2205 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0; 2206 else if ((bw / 8 * bl) == 32) 2207 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1; 2208 else if ((bw / 8 * bl) == 64) 2209 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2; 2210 else 2211 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3; 2212 2213 sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty = 2214 (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4; 2215 2216 if (sdram_params->base.dramtype == LPDDR4 || 2217 sdram_params->base.dramtype == LPDDR4X) { 2218 sdram_params->ch.noc_timings.ddrmode.b.mwrsize = 2219 (bw == 16) ? 0x1 : 0x2; 2220 sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr = 2221 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty; 2222 } 2223 2224 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32, 2225 &dram->msch->ddrtiminga0); 2226 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32, 2227 &dram->msch->ddrtimingb0); 2228 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32, 2229 &dram->msch->ddrtimingc0); 2230 writel(sdram_params->ch.noc_timings.devtodev0.d32, 2231 &dram->msch->devtodev0); 2232 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode); 2233 writel(sdram_params->ch.noc_timings.ddr4timing.d32, 2234 &dram->msch->ddr4timing); 2235 } 2236 2237 static int split_setup(struct dram_info *dram, 2238 struct rv1126_sdram_params *sdram_params) 2239 { 2240 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2241 u32 dramtype = sdram_params->base.dramtype; 2242 u32 split_size, split_mode; 2243 u64 cs_cap[2], cap; 2244 2245 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype); 2246 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype); 2247 /* only support the larger cap is in low 16bit */ 2248 if (cap_info->cs0_high16bit_row < cap_info->cs0_row) { 2249 cap = cs_cap[0] / (1 << (cap_info->cs0_row - 2250 cap_info->cs0_high16bit_row)); 2251 } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) && 2252 (cap_info->rank == 2)) { 2253 if (!cap_info->cs1_high16bit_row) 2254 cap = cs_cap[0]; 2255 else 2256 cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row - 2257 cap_info->cs1_high16bit_row)); 2258 } else { 2259 goto out; 2260 } 2261 split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK; 2262 if (cap_info->bw == 2) 2263 split_mode = SPLIT_MODE_32_L16_VALID; 2264 else 2265 split_mode = SPLIT_MODE_16_L8_VALID; 2266 2267 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con, 2268 (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) | 2269 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) | 2270 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET), 2271 (split_mode << SPLIT_MODE_OFFSET) | 2272 (0x0 << SPLIT_BYPASS_OFFSET) | 2273 (split_size << SPLIT_SIZE_OFFSET)); 2274 2275 rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2, 2276 MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT, 2277 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT); 2278 2279 out: 2280 return 0; 2281 } 2282 2283 static void split_bypass(struct dram_info *dram) 2284 { 2285 if ((readl(&dram->ddrgrf->grf_ddrsplit_con) & 2286 (1 << SPLIT_BYPASS_OFFSET)) != 0) 2287 return; 2288 2289 /* bypass split */ 2290 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con, 2291 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) | 2292 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET), 2293 (0x1 << SPLIT_BYPASS_OFFSET) | 2294 (0x0 << SPLIT_SIZE_OFFSET)); 2295 } 2296 2297 static void dram_all_config(struct dram_info *dram, 2298 struct rv1126_sdram_params *sdram_params) 2299 { 2300 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2301 u32 dram_type = sdram_params->base.dramtype; 2302 void __iomem *pctl_base = dram->pctl; 2303 u32 sys_reg2 = 0; 2304 u32 sys_reg3 = 0; 2305 u64 cs_cap[2]; 2306 u32 cs_pst; 2307 2308 set_ddrconfig(dram, cap_info->ddrconfig); 2309 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2, 2310 &sys_reg3, 0); 2311 writel(sys_reg2, &dram->pmugrf->os_reg[2]); 2312 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2313 2314 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2315 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2316 2317 if (cap_info->rank == 2) { 2318 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2319 6 + 2; 2320 if (cs_pst > 28) 2321 cs_cap[0] = 1llu << cs_pst; 2322 } 2323 2324 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) | 2325 (((cs_cap[0] >> 20) / 64) & 0xff), 2326 &dram->msch->devicesize); 2327 update_noc_timing(dram, sdram_params); 2328 } 2329 2330 static void enable_low_power(struct dram_info *dram, 2331 struct rv1126_sdram_params *sdram_params) 2332 { 2333 void __iomem *pctl_base = dram->pctl; 2334 u32 grf_lp_con; 2335 2336 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]); 2337 2338 if (sdram_params->base.dramtype == DDR4) 2339 grf_lp_con = (0x7 << 16) | (1 << 1); 2340 else if (sdram_params->base.dramtype == DDR3) 2341 grf_lp_con = (0x7 << 16) | (1 << 0); 2342 else 2343 grf_lp_con = (0x7 << 16) | (1 << 2); 2344 2345 /* en lpckdis_en */ 2346 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9); 2347 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con); 2348 2349 /* enable sr, pd */ 2350 if (dram->pd_idle == 0) 2351 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2352 else 2353 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2354 if (dram->sr_idle == 0) 2355 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2356 else 2357 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2358 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3)); 2359 } 2360 2361 static void ddr_set_atags(struct dram_info *dram, 2362 struct rv1126_sdram_params *sdram_params) 2363 { 2364 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2365 u32 dram_type = sdram_params->base.dramtype; 2366 void __iomem *pctl_base = dram->pctl; 2367 struct tag_serial t_serial; 2368 struct tag_ddr_mem t_ddrmem; 2369 struct tag_soc_info t_socinfo; 2370 u64 cs_cap[2]; 2371 u32 cs_pst = 0; 2372 u32 split, split_size; 2373 u64 reduce_cap = 0; 2374 2375 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2376 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2377 2378 memset(&t_serial, 0, sizeof(struct tag_serial)); 2379 2380 t_serial.version = 0; 2381 t_serial.enable = 1; 2382 t_serial.addr = CONFIG_DEBUG_UART_BASE; 2383 t_serial.baudrate = CONFIG_BAUDRATE; 2384 t_serial.m_mode = SERIAL_M_MODE_M0; 2385 t_serial.id = 2; 2386 2387 atags_destroy(); 2388 atags_set_tag(ATAG_SERIAL, &t_serial); 2389 2390 split = readl(&dram->ddrgrf->grf_ddrsplit_con); 2391 memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem)); 2392 if (cap_info->row_3_4) { 2393 cs_cap[0] = cs_cap[0] * 3 / 4; 2394 cs_cap[1] = cs_cap[1] * 3 / 4; 2395 } else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) { 2396 split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK; 2397 reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2; 2398 } 2399 t_ddrmem.version = 0; 2400 t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE; 2401 if (cs_cap[1]) { 2402 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2403 6 + 2; 2404 } 2405 2406 if (cs_cap[1] && cs_pst > 27) { 2407 t_ddrmem.count = 2; 2408 t_ddrmem.bank[1] = 1 << cs_pst; 2409 t_ddrmem.bank[2] = cs_cap[0]; 2410 t_ddrmem.bank[3] = cs_cap[1] - reduce_cap; 2411 } else { 2412 t_ddrmem.count = 1; 2413 t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap; 2414 } 2415 2416 atags_set_tag(ATAG_DDR_MEM, &t_ddrmem); 2417 2418 memset(&t_socinfo, 0, sizeof(struct tag_soc_info)); 2419 t_socinfo.version = 0; 2420 t_socinfo.name = 0x1126; 2421 } 2422 2423 static void print_ddr_info(struct rv1126_sdram_params *sdram_params) 2424 { 2425 u32 split; 2426 2427 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2428 (1 << SPLIT_BYPASS_OFFSET)) != 0) 2429 split = 0; 2430 else 2431 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2432 SPLIT_SIZE_MASK; 2433 2434 sdram_print_ddr_info(&sdram_params->ch.cap_info, 2435 &sdram_params->base, split); 2436 } 2437 2438 static int sdram_init_(struct dram_info *dram, 2439 struct rv1126_sdram_params *sdram_params, u32 post_init) 2440 { 2441 void __iomem *pctl_base = dram->pctl; 2442 void __iomem *phy_base = dram->phy; 2443 u32 ddr4_vref; 2444 u32 mr_tmp; 2445 2446 rkclk_configure_ddr(dram, sdram_params); 2447 2448 rkclk_ddr_reset(dram, 1, 1, 1, 1); 2449 udelay(10); 2450 2451 rkclk_ddr_reset(dram, 1, 1, 1, 0); 2452 phy_cfg(dram, sdram_params); 2453 2454 rkclk_ddr_reset(dram, 1, 1, 0, 0); 2455 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1); 2456 2457 rkclk_ddr_reset(dram, 1, 0, 0, 0); 2458 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, 2459 dram->sr_idle, dram->pd_idle); 2460 2461 if (sdram_params->ch.cap_info.bw == 2) { 2462 /* 32bit interface use pageclose */ 2463 setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2); 2464 /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */ 2465 clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0); 2466 } else { 2467 clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2); 2468 } 2469 2470 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 2471 u32 tmp, trefi; 2472 2473 tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG); 2474 trefi = (tmp >> 16) & 0xfff; 2475 writel((tmp & 0xf000ffff) | (trefi / 2) << 16, 2476 pctl_base + DDR_PCTL2_RFSHTMG); 2477 #endif 2478 2479 /* set frequency_mode */ 2480 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2481 /* set target_frequency to Frequency 0 */ 2482 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0); 2483 2484 set_ds_odt(dram, sdram_params, 0); 2485 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params); 2486 set_ctl_address_map(dram, sdram_params); 2487 2488 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4)); 2489 2490 rkclk_ddr_reset(dram, 0, 0, 0, 0); 2491 2492 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) 2493 continue; 2494 2495 if (sdram_params->base.dramtype == LPDDR3) { 2496 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3); 2497 } else if (sdram_params->base.dramtype == LPDDR4 || 2498 sdram_params->base.dramtype == LPDDR4X) { 2499 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6); 2500 /* MR11 */ 2501 pctl_write_mr(dram->pctl, 3, 11, 2502 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2503 LPDDR4); 2504 /* MR12 */ 2505 pctl_write_mr(dram->pctl, 3, 12, 2506 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2507 LPDDR4); 2508 2509 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); 2510 /* MR22 */ 2511 pctl_write_mr(dram->pctl, 3, 22, 2512 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2513 LPDDR4); 2514 } 2515 2516 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) { 2517 if (post_init != 0) 2518 printascii("DTT cs0 error\n"); 2519 return -1; 2520 } 2521 2522 if (sdram_params->base.dramtype == LPDDR4) { 2523 mr_tmp = read_mr(dram, 1, 14, LPDDR4); 2524 2525 if (mr_tmp != 0x4d) 2526 return -1; 2527 } 2528 2529 if (sdram_params->base.dramtype == LPDDR4 || 2530 sdram_params->base.dramtype == LPDDR4X) { 2531 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); 2532 /* MR14 */ 2533 pctl_write_mr(dram->pctl, 3, 14, 2534 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2535 LPDDR4); 2536 } 2537 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) { 2538 if (data_training(dram, 1, sdram_params, 0, 2539 READ_GATE_TRAINING) != 0) { 2540 printascii("DTT cs1 error\n"); 2541 return -1; 2542 } 2543 } 2544 2545 if (sdram_params->base.dramtype == DDR4) { 2546 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39; 2547 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref, 2548 sdram_params->base.dramtype); 2549 } 2550 2551 dram_all_config(dram, sdram_params); 2552 enable_low_power(dram, sdram_params); 2553 2554 return 0; 2555 } 2556 2557 static u64 dram_detect_cap(struct dram_info *dram, 2558 struct rv1126_sdram_params *sdram_params, 2559 unsigned char channel) 2560 { 2561 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2562 void __iomem *pctl_base = dram->pctl; 2563 void __iomem *phy_base = dram->phy; 2564 u32 mr8; 2565 2566 u32 bktmp; 2567 u32 coltmp; 2568 u32 rowtmp; 2569 u32 cs; 2570 u32 dram_type = sdram_params->base.dramtype; 2571 u32 pwrctl; 2572 u32 i, dq_map; 2573 u32 byte1 = 0, byte0 = 0; 2574 u32 tmp, byte; 2575 struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info; 2576 struct dq_map_info *map_info = (struct dq_map_info *) 2577 ((void *)common_info + index->dq_map_index.offset * 4); 2578 2579 cap_info->bw = dram_type == DDR3 ? 0 : 1; 2580 if (dram_type != LPDDR4 && dram_type != LPDDR4X) { 2581 if (dram_type != DDR4) { 2582 coltmp = 12; 2583 bktmp = 3; 2584 if (dram_type == LPDDR2) 2585 rowtmp = 15; 2586 else 2587 rowtmp = 16; 2588 2589 if (sdram_detect_col(cap_info, coltmp) != 0) 2590 goto cap_err; 2591 2592 sdram_detect_bank(cap_info, coltmp, bktmp); 2593 sdram_detect_dbw(cap_info, dram_type); 2594 } else { 2595 coltmp = 10; 2596 bktmp = 4; 2597 rowtmp = 17; 2598 2599 cap_info->col = 10; 2600 cap_info->bk = 2; 2601 sdram_detect_bg(cap_info, coltmp); 2602 } 2603 2604 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0) 2605 goto cap_err; 2606 2607 sdram_detect_row_3_4(cap_info, coltmp, bktmp); 2608 } else { 2609 cap_info->col = 10; 2610 cap_info->bk = 3; 2611 mr8 = read_mr(dram, 1, 8, dram_type); 2612 cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0; 2613 mr8 = (mr8 >> 2) & 0xf; 2614 if (mr8 >= 0 && mr8 <= 6) { 2615 cap_info->cs0_row = 14 + (mr8 + 1) / 2; 2616 } else if (mr8 == 0xc) { 2617 cap_info->cs0_row = 13; 2618 } else { 2619 printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n"); 2620 goto cap_err; 2621 } 2622 if (cap_info->dbw == 0) 2623 cap_info->cs0_row++; 2624 cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0; 2625 if (cap_info->cs0_row >= 17) { 2626 printascii("Cap ERR: "); 2627 printascii("RV1126 LPDDR4/X cannot support row >= 17\n"); 2628 goto cap_err; 2629 // cap_info->cs0_row = 16; 2630 // cap_info->row_3_4 = 0; 2631 } 2632 } 2633 2634 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL); 2635 writel(0, pctl_base + DDR_PCTL2_PWRCTL); 2636 2637 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0) 2638 cs = 1; 2639 else 2640 cs = 0; 2641 cap_info->rank = cs + 1; 2642 2643 setbits_le32(PHY_REG(phy_base, 0xf), 0xf); 2644 2645 tmp = data_training_rg(dram, 0, dram_type) & 0xf; 2646 2647 if (tmp == 0) { 2648 cap_info->bw = 2; 2649 } else { 2650 if (dram_type == DDR3 || dram_type == DDR4) { 2651 dq_map = 0; 2652 byte = 0; 2653 for (i = 0; i < 4; i++) { 2654 if ((tmp & BIT(i)) == 0) { 2655 dq_map |= byte << (i * 2); 2656 byte++; 2657 } 2658 } 2659 cap_info->bw = byte / 2; 2660 for (i = 0; i < 4; i++) { 2661 if ((tmp & BIT(i)) != 0) { 2662 dq_map |= byte << (i * 2); 2663 byte++; 2664 } 2665 } 2666 clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24); 2667 } else { 2668 dq_map = readl(PHY_REG(phy_base, 0x4f)); 2669 for (i = 0; i < 4; i++) { 2670 if (((dq_map >> (i * 2)) & 0x3) == 0) 2671 byte0 = i; 2672 if (((dq_map >> (i * 2)) & 0x3) == 1) 2673 byte1 = i; 2674 } 2675 clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK, 2676 BIT(byte0) | BIT(byte1)); 2677 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0) 2678 cap_info->bw = 1; 2679 else 2680 cap_info->bw = 0; 2681 } 2682 } 2683 if (cap_info->bw > 0) 2684 cap_info->dbw = 1; 2685 2686 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL); 2687 2688 cap_info->cs0_high16bit_row = cap_info->cs0_row; 2689 if (cs) { 2690 cap_info->cs1_row = cap_info->cs0_row; 2691 cap_info->cs1_high16bit_row = cap_info->cs0_row; 2692 } else { 2693 cap_info->cs1_row = 0; 2694 cap_info->cs1_high16bit_row = 0; 2695 } 2696 2697 return 0; 2698 cap_err: 2699 return -1; 2700 } 2701 2702 static int dram_detect_cs1_row(struct dram_info *dram, 2703 struct rv1126_sdram_params *sdram_params, 2704 unsigned char channel) 2705 { 2706 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2707 void __iomem *pctl_base = dram->pctl; 2708 u32 ret = 0; 2709 void __iomem *test_addr; 2710 u32 row, bktmp, coltmp, bw; 2711 u64 cs0_cap; 2712 u32 byte_mask; 2713 u32 cs_pst; 2714 u32 cs_add = 0; 2715 u32 max_row; 2716 2717 if (cap_info->rank == 2) { 2718 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2719 6 + 2; 2720 if (cs_pst < 28) 2721 cs_add = 1; 2722 2723 cs0_cap = 1 << cs_pst; 2724 2725 if (sdram_params->base.dramtype == DDR4) { 2726 if (cap_info->dbw == 0) 2727 bktmp = cap_info->bk + 2; 2728 else 2729 bktmp = cap_info->bk + 1; 2730 } else { 2731 bktmp = cap_info->bk; 2732 } 2733 bw = cap_info->bw; 2734 coltmp = cap_info->col; 2735 2736 if (bw == 2) 2737 byte_mask = 0xFFFF; 2738 else 2739 byte_mask = 0xFF; 2740 2741 max_row = (cs_pst == 31) ? 30 : 31; 2742 2743 max_row = max_row - bktmp - coltmp - bw - cs_add + 1; 2744 2745 row = (cap_info->cs0_row > max_row) ? max_row : 2746 cap_info->cs0_row; 2747 2748 for (; row > 12; row--) { 2749 test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE + 2750 (u32)cs0_cap + 2751 (1ul << (row + bktmp + coltmp + 2752 cs_add + bw - 1ul))); 2753 2754 writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap); 2755 writel(PATTERN, test_addr); 2756 2757 if (((readl(test_addr) & byte_mask) == 2758 (PATTERN & byte_mask)) && 2759 ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) & 2760 byte_mask) == 0)) { 2761 ret = row; 2762 break; 2763 } 2764 } 2765 } 2766 2767 return ret; 2768 } 2769 2770 /* return: 0 = success, other = fail */ 2771 static int sdram_init_detect(struct dram_info *dram, 2772 struct rv1126_sdram_params *sdram_params) 2773 { 2774 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2775 u32 ret; 2776 u32 sys_reg = 0; 2777 u32 sys_reg3 = 0; 2778 struct sdram_head_info_index_v2 *index = 2779 (struct sdram_head_info_index_v2 *)common_info; 2780 struct dq_map_info *map_info; 2781 2782 map_info = (struct dq_map_info *)((void *)common_info + 2783 index->dq_map_index.offset * 4); 2784 2785 if (sdram_init_(dram, sdram_params, 0)) { 2786 if (sdram_params->base.dramtype == DDR3) { 2787 clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, 2788 ((0x1 << 6) | (0x3 << 4) | (0x2 << 2) | 2789 (0x0 << 0)) << 24); 2790 if (sdram_init_(dram, sdram_params, 0)) 2791 return -1; 2792 } else { 2793 return -1; 2794 } 2795 } 2796 2797 if (sdram_params->base.dramtype == DDR3) { 2798 writel(PATTERN, CONFIG_SYS_SDRAM_BASE); 2799 if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN) 2800 return -1; 2801 } 2802 2803 split_bypass(dram); 2804 if (dram_detect_cap(dram, sdram_params, 0) != 0) 2805 return -1; 2806 2807 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, 2808 sdram_params->base.dramtype); 2809 ret = sdram_init_(dram, sdram_params, 1); 2810 if (ret != 0) 2811 goto out; 2812 2813 cap_info->cs1_row = 2814 dram_detect_cs1_row(dram, sdram_params, 0); 2815 if (cap_info->cs1_row) { 2816 sys_reg = readl(&dram->pmugrf->os_reg[2]); 2817 sys_reg3 = readl(&dram->pmugrf->os_reg[3]); 2818 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row, 2819 sys_reg, sys_reg3, 0); 2820 writel(sys_reg, &dram->pmugrf->os_reg[2]); 2821 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2822 } 2823 2824 sdram_detect_high_row(cap_info, sdram_params->base.dramtype); 2825 split_setup(dram, sdram_params); 2826 out: 2827 return ret; 2828 } 2829 2830 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz) 2831 { 2832 u32 i; 2833 u32 offset = 0; 2834 struct ddr2_3_4_lp2_3_info *ddr_info; 2835 2836 if (!freq_mhz) { 2837 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype); 2838 if (ddr_info) 2839 freq_mhz = 2840 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2841 DDR_FREQ_MASK; 2842 else 2843 freq_mhz = 0; 2844 } 2845 2846 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) { 2847 if (sdram_configs[i].base.ddr_freq == 0 || 2848 freq_mhz < sdram_configs[i].base.ddr_freq) 2849 break; 2850 } 2851 offset = i == 0 ? 0 : i - 1; 2852 2853 return &sdram_configs[offset]; 2854 } 2855 2856 static const u16 pctl_need_update_reg[] = { 2857 DDR_PCTL2_RFSHTMG, 2858 DDR_PCTL2_INIT3, 2859 DDR_PCTL2_INIT4, 2860 DDR_PCTL2_INIT6, 2861 DDR_PCTL2_INIT7, 2862 DDR_PCTL2_DRAMTMG0, 2863 DDR_PCTL2_DRAMTMG1, 2864 DDR_PCTL2_DRAMTMG2, 2865 DDR_PCTL2_DRAMTMG3, 2866 DDR_PCTL2_DRAMTMG4, 2867 DDR_PCTL2_DRAMTMG5, 2868 DDR_PCTL2_DRAMTMG6, 2869 DDR_PCTL2_DRAMTMG7, 2870 DDR_PCTL2_DRAMTMG8, 2871 DDR_PCTL2_DRAMTMG9, 2872 DDR_PCTL2_DRAMTMG12, 2873 DDR_PCTL2_DRAMTMG13, 2874 DDR_PCTL2_DRAMTMG14, 2875 DDR_PCTL2_ZQCTL0, 2876 DDR_PCTL2_DFITMG0, 2877 DDR_PCTL2_ODTCFG 2878 }; 2879 2880 static const u16 phy_need_update_reg[] = { 2881 0x14, 2882 0x18, 2883 0x1c 2884 }; 2885 2886 static void pre_set_rate(struct dram_info *dram, 2887 struct rv1126_sdram_params *sdram_params, 2888 u32 dst_fsp, u32 dst_fsp_lp4) 2889 { 2890 u32 i, j, find; 2891 void __iomem *pctl_base = dram->pctl; 2892 void __iomem *phy_base = dram->phy; 2893 u32 phy_offset; 2894 u32 mr_tmp; 2895 u32 dramtype = sdram_params->base.dramtype; 2896 2897 sw_set_req(dram); 2898 /* pctl timing update */ 2899 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) { 2900 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF; 2901 j++) { 2902 if (sdram_params->pctl_regs.pctl[j][0] == 2903 pctl_need_update_reg[i]) { 2904 writel(sdram_params->pctl_regs.pctl[j][1], 2905 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2906 pctl_need_update_reg[i]); 2907 find = j; 2908 break; 2909 } 2910 } 2911 } 2912 2913 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 2914 u32 tmp, trefi; 2915 2916 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); 2917 trefi = (tmp >> 16) & 0xfff; 2918 writel((tmp & 0xf000ffff) | (trefi / 2) << 16, 2919 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); 2920 #endif 2921 2922 sw_set_ack(dram); 2923 2924 /* phy timing update */ 2925 if (dst_fsp == 0) 2926 phy_offset = 0; 2927 else 2928 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3); 2929 /* cl cwl al update */ 2930 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) { 2931 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF; 2932 j++) { 2933 if (sdram_params->phy_regs.phy[j][0] == 2934 phy_need_update_reg[i]) { 2935 writel(sdram_params->phy_regs.phy[j][1], 2936 phy_base + phy_offset + 2937 phy_need_update_reg[i]); 2938 find = j; 2939 break; 2940 } 2941 } 2942 } 2943 2944 set_ds_odt(dram, sdram_params, dst_fsp); 2945 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 2946 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2947 DDR_PCTL2_INIT4); 2948 /* MR13 */ 2949 pctl_write_mr(dram->pctl, 3, 13, 2950 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2951 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2952 ((0x2 << 6) >> dst_fsp_lp4), dramtype); 2953 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2954 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2955 ((0x2 << 6) >> dst_fsp_lp4), 2956 PHY_REG(phy_base, 0x1b)); 2957 /* MR3 */ 2958 pctl_write_mr(dram->pctl, 3, 3, 2959 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & 2960 PCTL2_MR_MASK, 2961 dramtype); 2962 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK, 2963 PHY_REG(phy_base, 0x19)); 2964 2965 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2966 DDR_PCTL2_INIT3); 2967 /* MR1 */ 2968 pctl_write_mr(dram->pctl, 3, 1, 2969 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & 2970 PCTL2_MR_MASK, 2971 dramtype); 2972 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK, 2973 PHY_REG(phy_base, 0x17)); 2974 /* MR2 */ 2975 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 2976 dramtype); 2977 writel(mr_tmp & PCTL2_MR_MASK, 2978 PHY_REG(phy_base, 0x18)); 2979 2980 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2981 DDR_PCTL2_INIT6); 2982 /* MR11 */ 2983 pctl_write_mr(dram->pctl, 3, 11, 2984 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2985 dramtype); 2986 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2987 PHY_REG(phy_base, 0x1a)); 2988 /* MR12 */ 2989 pctl_write_mr(dram->pctl, 3, 12, 2990 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2991 dramtype); 2992 2993 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2994 DDR_PCTL2_INIT7); 2995 /* MR22 */ 2996 pctl_write_mr(dram->pctl, 3, 22, 2997 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2998 dramtype); 2999 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 3000 PHY_REG(phy_base, 0x1d)); 3001 /* MR14 */ 3002 pctl_write_mr(dram->pctl, 3, 14, 3003 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 3004 dramtype); 3005 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 3006 PHY_REG(phy_base, 0x1c)); 3007 } 3008 3009 update_noc_timing(dram, sdram_params); 3010 } 3011 3012 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp, 3013 struct rv1126_sdram_params *sdram_params) 3014 { 3015 void __iomem *pctl_base = dram->pctl; 3016 void __iomem *phy_base = dram->phy; 3017 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp]; 3018 u32 temp, temp1; 3019 struct ddr2_3_4_lp2_3_info *ddr_info; 3020 3021 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype); 3022 3023 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq; 3024 3025 if (sdram_params->base.dramtype == LPDDR4 || 3026 sdram_params->base.dramtype == LPDDR4X) { 3027 p_fsp_param->rd_odt_up_en = 0; 3028 p_fsp_param->rd_odt_down_en = 1; 3029 } else { 3030 p_fsp_param->rd_odt_up_en = 3031 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 3032 p_fsp_param->rd_odt_down_en = 3033 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 3034 } 3035 3036 if (p_fsp_param->rd_odt_up_en) 3037 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111)); 3038 else if (p_fsp_param->rd_odt_down_en) 3039 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110)); 3040 else 3041 p_fsp_param->rd_odt = 0; 3042 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112)); 3043 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100)); 3044 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102)); 3045 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128)); 3046 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105)); 3047 3048 if (sdram_params->base.dramtype == DDR3) { 3049 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3050 DDR_PCTL2_INIT3); 3051 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 3052 p_fsp_param->ds_pdds = temp & DDR3_DS_MASK; 3053 p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK; 3054 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 3055 } else if (sdram_params->base.dramtype == DDR4) { 3056 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3057 DDR_PCTL2_INIT3); 3058 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 3059 p_fsp_param->ds_pdds = temp & DDR4_DS_MASK; 3060 p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK; 3061 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 3062 } else if (sdram_params->base.dramtype == LPDDR3) { 3063 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3064 DDR_PCTL2_INIT4); 3065 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 3066 p_fsp_param->ds_pdds = temp & 0xf; 3067 3068 p_fsp_param->dq_odt = lp3_odt_value; 3069 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 3070 } else if (sdram_params->base.dramtype == LPDDR4 || 3071 sdram_params->base.dramtype == LPDDR4X) { 3072 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3073 DDR_PCTL2_INIT4); 3074 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 3075 p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK; 3076 3077 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3078 DDR_PCTL2_INIT6); 3079 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK; 3080 p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK; 3081 p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK; 3082 3083 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)), 3084 readl(PHY_REG(phy_base, 0x3ce))); 3085 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)), 3086 readl(PHY_REG(phy_base, 0x3de))); 3087 p_fsp_param->vref_ca[0] = (temp + temp1) / 2; 3088 temp = MAX(readl(PHY_REG(phy_base, 0x3af)), 3089 readl(PHY_REG(phy_base, 0x3cf))); 3090 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)), 3091 readl(PHY_REG(phy_base, 0x3df))); 3092 p_fsp_param->vref_ca[1] = (temp + temp1) / 2; 3093 p_fsp_param->vref_ca[0] |= 3094 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 3095 p_fsp_param->vref_ca[1] |= 3096 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 3097 3098 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >> 3099 3) & 0x1; 3100 } 3101 3102 p_fsp_param->noc_timings.ddrtiminga0 = 3103 sdram_params->ch.noc_timings.ddrtiminga0; 3104 p_fsp_param->noc_timings.ddrtimingb0 = 3105 sdram_params->ch.noc_timings.ddrtimingb0; 3106 p_fsp_param->noc_timings.ddrtimingc0 = 3107 sdram_params->ch.noc_timings.ddrtimingc0; 3108 p_fsp_param->noc_timings.devtodev0 = 3109 sdram_params->ch.noc_timings.devtodev0; 3110 p_fsp_param->noc_timings.ddrmode = 3111 sdram_params->ch.noc_timings.ddrmode; 3112 p_fsp_param->noc_timings.ddr4timing = 3113 sdram_params->ch.noc_timings.ddr4timing; 3114 p_fsp_param->noc_timings.agingx0 = 3115 sdram_params->ch.noc_timings.agingx0; 3116 p_fsp_param->noc_timings.aging0 = 3117 sdram_params->ch.noc_timings.aging0; 3118 p_fsp_param->noc_timings.aging1 = 3119 sdram_params->ch.noc_timings.aging1; 3120 p_fsp_param->noc_timings.aging2 = 3121 sdram_params->ch.noc_timings.aging2; 3122 p_fsp_param->noc_timings.aging3 = 3123 sdram_params->ch.noc_timings.aging3; 3124 3125 p_fsp_param->flag = FSP_FLAG; 3126 } 3127 3128 #ifndef CONFIG_SPL_KERNEL_BOOT 3129 static void copy_fsp_param_to_ddr(void) 3130 { 3131 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param, 3132 sizeof(fsp_param)); 3133 } 3134 #endif 3135 3136 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs, 3137 struct sdram_cap_info *cap_info, u32 dram_type, 3138 u32 freq) 3139 { 3140 u64 cs0_cap; 3141 u32 die_cap; 3142 u32 trfc_ns, trfc4_ns; 3143 u32 trfc, txsnr; 3144 u32 txs_abort_fast = 0; 3145 u32 tmp; 3146 3147 cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type); 3148 die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw))); 3149 3150 switch (dram_type) { 3151 case DDR3: 3152 if (die_cap <= DIE_CAP_512MBIT) 3153 trfc_ns = 90; 3154 else if (die_cap <= DIE_CAP_1GBIT) 3155 trfc_ns = 110; 3156 else if (die_cap <= DIE_CAP_2GBIT) 3157 trfc_ns = 160; 3158 else if (die_cap <= DIE_CAP_4GBIT) 3159 trfc_ns = 260; 3160 else 3161 trfc_ns = 350; 3162 txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000); 3163 break; 3164 3165 case DDR4: 3166 if (die_cap <= DIE_CAP_2GBIT) { 3167 trfc_ns = 160; 3168 trfc4_ns = 90; 3169 } else if (die_cap <= DIE_CAP_4GBIT) { 3170 trfc_ns = 260; 3171 trfc4_ns = 110; 3172 } else if (die_cap <= DIE_CAP_8GBIT) { 3173 trfc_ns = 350; 3174 trfc4_ns = 160; 3175 } else { 3176 trfc_ns = 550; 3177 trfc4_ns = 260; 3178 } 3179 txsnr = ((trfc_ns + 10) * freq + 999) / 1000; 3180 txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000; 3181 break; 3182 3183 case LPDDR3: 3184 if (die_cap <= DIE_CAP_4GBIT) 3185 trfc_ns = 130; 3186 else 3187 trfc_ns = 210; 3188 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000); 3189 break; 3190 3191 case LPDDR4: 3192 case LPDDR4X: 3193 if (die_cap <= DIE_CAP_2GBIT) 3194 trfc_ns = 130; 3195 else if (die_cap <= DIE_CAP_4GBIT) 3196 trfc_ns = 180; 3197 else if (die_cap <= DIE_CAP_8GBIT) 3198 trfc_ns = 280; 3199 else 3200 trfc_ns = 380; 3201 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000); 3202 break; 3203 3204 default: 3205 return; 3206 } 3207 trfc = (trfc_ns * freq + 999) / 1000; 3208 3209 for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) { 3210 switch (pctl_regs->pctl[i][0]) { 3211 case DDR_PCTL2_RFSHTMG: 3212 tmp = pctl_regs->pctl[i][1]; 3213 /* t_rfc_min */ 3214 tmp &= ~((u32)0x3ff); 3215 tmp |= ((trfc + 1) / 2) & 0x3ff; 3216 pctl_regs->pctl[i][1] = tmp; 3217 break; 3218 3219 case DDR_PCTL2_DRAMTMG8: 3220 if (dram_type == DDR3 || dram_type == DDR4) { 3221 tmp = pctl_regs->pctl[i][1]; 3222 /* t_xs_x32 */ 3223 tmp &= ~((u32)0x7f); 3224 tmp |= ((txsnr + 63) / 64) & 0x7f; 3225 3226 if (dram_type == DDR4) { 3227 /* t_xs_abort_x32 */ 3228 tmp &= ~((u32)(0x7f << 16)); 3229 tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16; 3230 /* t_xs_fast_x32 */ 3231 tmp &= ~((u32)(0x7f << 24)); 3232 tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24; 3233 } 3234 3235 pctl_regs->pctl[i][1] = tmp; 3236 } 3237 break; 3238 3239 case DDR_PCTL2_DRAMTMG14: 3240 if (dram_type == LPDDR3 || 3241 dram_type == LPDDR4 || dram_type == LPDDR4X) { 3242 tmp = pctl_regs->pctl[i][1]; 3243 /* t_xsr */ 3244 tmp &= ~((u32)0xfff); 3245 tmp |= ((txsnr + 1) / 2) & 0xfff; 3246 pctl_regs->pctl[i][1] = tmp; 3247 } 3248 break; 3249 3250 default: 3251 break; 3252 } 3253 } 3254 } 3255 3256 void ddr_set_rate(struct dram_info *dram, 3257 struct rv1126_sdram_params *sdram_params, 3258 u32 freq, u32 cur_freq, u32 dst_fsp, 3259 u32 dst_fsp_lp4, u32 training_en) 3260 { 3261 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off; 3262 u32 mr_tmp; 3263 u32 lp_stat; 3264 u32 dramtype = sdram_params->base.dramtype; 3265 struct rv1126_sdram_params *sdram_params_new; 3266 void __iomem *pctl_base = dram->pctl; 3267 void __iomem *phy_base = dram->phy; 3268 3269 lp_stat = low_power_update(dram, 0); 3270 sdram_params_new = get_default_sdram_config(freq); 3271 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank; 3272 sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw; 3273 3274 pctl_modify_trfc(&sdram_params_new->pctl_regs, 3275 &sdram_params->ch.cap_info, dramtype, freq); 3276 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4); 3277 3278 while ((readl(pctl_base + DDR_PCTL2_STAT) & 3279 PCTL2_OPERATING_MODE_MASK) == 3280 PCTL2_OPERATING_MODE_SR) 3281 continue; 3282 3283 dest_dll_off = 0; 3284 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3285 DDR_PCTL2_INIT3); 3286 if ((dramtype == DDR3 && (dst_init3 & 1)) || 3287 (dramtype == DDR4 && !(dst_init3 & 1))) 3288 dest_dll_off = 1; 3289 3290 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 3291 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 3292 DDR_PCTL2_INIT3); 3293 cur_init3 &= PCTL2_MR_MASK; 3294 cur_dll_off = 1; 3295 if ((dramtype == DDR3 && !(cur_init3 & 1)) || 3296 (dramtype == DDR4 && (cur_init3 & 1))) 3297 cur_dll_off = 0; 3298 3299 if (!cur_dll_off) { 3300 if (dramtype == DDR3) 3301 cur_init3 |= 1; 3302 else 3303 cur_init3 &= ~1; 3304 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype); 3305 } 3306 3307 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 3308 PCTL2_DIS_AUTO_REFRESH); 3309 update_refresh_reg(dram); 3310 3311 enter_sr(dram, 1); 3312 3313 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 3314 PMUGRF_CON_DDRPHY_BUFFEREN_EN, 3315 &dram->pmugrf->soc_con[0]); 3316 sw_set_req(dram); 3317 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC, 3318 PCTL2_DFI_INIT_COMPLETE_EN); 3319 sw_set_ack(dram); 3320 3321 sw_set_req(dram); 3322 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off) 3323 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 3324 else 3325 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 3326 3327 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0, 3328 PCTL2_DIS_SRX_ZQCL); 3329 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0, 3330 PCTL2_DIS_SRX_ZQCL); 3331 sw_set_ack(dram); 3332 3333 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT), 3334 &dram->cru->clkgate_con[21]); 3335 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 3336 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) | 3337 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT), 3338 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 3339 3340 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 3341 rkclk_set_dpll(dram, freq * MHz / 2); 3342 phy_pll_set(dram, freq * MHz, 0); 3343 phy_pll_set(dram, freq * MHz, 1); 3344 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 3345 3346 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 3347 PMUGRF_CON_DDRPHY_BUFFEREN_DIS, 3348 &dram->pmugrf->soc_con[0]); 3349 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT), 3350 &dram->cru->clkgate_con[21]); 3351 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 3352 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) | 3353 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT), 3354 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 3355 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) & 3356 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) 3357 continue; 3358 3359 sw_set_req(dram); 3360 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 3361 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp); 3362 sw_set_ack(dram); 3363 update_refresh_reg(dram); 3364 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2); 3365 3366 enter_sr(dram, 0); 3367 3368 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 3369 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 3370 3371 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4); 3372 if (dramtype == LPDDR3) { 3373 pctl_write_mr(dram->pctl, 3, 1, 3374 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) & 3375 PCTL2_MR_MASK, 3376 dramtype); 3377 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK, 3378 dramtype); 3379 pctl_write_mr(dram->pctl, 3, 3, 3380 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) & 3381 PCTL2_MR_MASK, 3382 dramtype); 3383 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype); 3384 } else if ((dramtype == DDR3) || (dramtype == DDR4)) { 3385 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK, 3386 dramtype); 3387 if (!dest_dll_off) { 3388 pctl_write_mr(dram->pctl, 3, 0, 3389 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) & 3390 PCTL2_MR_MASK) | DDR3_DLL_RESET, 3391 dramtype); 3392 udelay(2); 3393 } 3394 pctl_write_mr(dram->pctl, 3, 0, 3395 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT & 3396 PCTL2_MR_MASK) & (~DDR3_DLL_RESET), 3397 dramtype); 3398 pctl_write_mr(dram->pctl, 3, 2, 3399 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) & 3400 PCTL2_MR_MASK), dramtype); 3401 if (dramtype == DDR4) { 3402 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK, 3403 dramtype); 3404 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3405 DDR_PCTL2_INIT6); 3406 pctl_write_mr(dram->pctl, 3, 4, 3407 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) & 3408 PCTL2_MR_MASK, 3409 dramtype); 3410 pctl_write_mr(dram->pctl, 3, 5, 3411 mr_tmp >> PCTL2_DDR4_MR5_SHIFT & 3412 PCTL2_MR_MASK, 3413 dramtype); 3414 3415 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3416 DDR_PCTL2_INIT7); 3417 pctl_write_mr(dram->pctl, 3, 6, 3418 mr_tmp >> PCTL2_DDR4_MR6_SHIFT & 3419 PCTL2_MR_MASK, 3420 dramtype); 3421 } 3422 } else if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 3423 pctl_write_mr(dram->pctl, 3, 13, 3424 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 3425 PCTL2_MR_MASK) & (~(BIT(7)))) | 3426 dst_fsp_lp4 << 7, dramtype); 3427 } 3428 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 3429 PCTL2_DIS_AUTO_REFRESH); 3430 update_refresh_reg(dram); 3431 3432 /* training */ 3433 high_freq_training(dram, sdram_params_new, dst_fsp); 3434 low_power_update(dram, lp_stat); 3435 3436 save_fsp_param(dram, dst_fsp, sdram_params_new); 3437 } 3438 3439 static void ddr_set_rate_for_fsp(struct dram_info *dram, 3440 struct rv1126_sdram_params *sdram_params) 3441 { 3442 struct ddr2_3_4_lp2_3_info *ddr_info; 3443 u32 f0; 3444 u32 dramtype = sdram_params->base.dramtype; 3445 #ifndef CONFIG_SPL_KERNEL_BOOT 3446 u32 f1, f2, f3; 3447 #endif 3448 3449 ddr_info = get_ddr_drv_odt_info(dramtype); 3450 if (!ddr_info) 3451 return; 3452 3453 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 3454 DDR_FREQ_MASK; 3455 3456 #ifndef CONFIG_SPL_KERNEL_BOOT 3457 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param)); 3458 memset((void *)&fsp_param, 0, sizeof(fsp_param)); 3459 3460 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) & 3461 DDR_FREQ_MASK; 3462 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) & 3463 DDR_FREQ_MASK; 3464 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) & 3465 DDR_FREQ_MASK; 3466 #endif 3467 3468 if (get_wrlvl_val(dram, sdram_params)) 3469 printascii("get wrlvl value fail\n"); 3470 3471 #ifndef CONFIG_SPL_KERNEL_BOOT 3472 printascii("change to: "); 3473 printdec(f1); 3474 printascii("MHz\n"); 3475 ddr_set_rate(&dram_info, sdram_params, f1, 3476 sdram_params->base.ddr_freq, 1, 1, 1); 3477 printascii("change to: "); 3478 printdec(f2); 3479 printascii("MHz\n"); 3480 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1); 3481 printascii("change to: "); 3482 printdec(f3); 3483 printascii("MHz\n"); 3484 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1); 3485 #endif 3486 printascii("change to: "); 3487 printdec(f0); 3488 printascii("MHz(final freq)\n"); 3489 #ifndef CONFIG_SPL_KERNEL_BOOT 3490 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1); 3491 #else 3492 ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1); 3493 #endif 3494 } 3495 3496 int get_uart_config(void) 3497 { 3498 struct sdram_head_info_index_v2 *index = 3499 (struct sdram_head_info_index_v2 *)common_info; 3500 struct global_info *gbl_info; 3501 3502 gbl_info = (struct global_info *)((void *)common_info + 3503 index->global_index.offset * 4); 3504 3505 return gbl_info->uart_info; 3506 } 3507 3508 /* return: 0 = success, other = fail */ 3509 int sdram_init(void) 3510 { 3511 struct rv1126_sdram_params *sdram_params; 3512 int ret = 0; 3513 struct sdram_head_info_index_v2 *index = 3514 (struct sdram_head_info_index_v2 *)common_info; 3515 struct global_info *gbl_info; 3516 3517 dram_info.phy = (void *)DDR_PHY_BASE_ADDR; 3518 dram_info.pctl = (void *)UPCTL2_BASE_ADDR; 3519 dram_info.grf = (void *)GRF_BASE_ADDR; 3520 dram_info.cru = (void *)CRU_BASE_ADDR; 3521 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR; 3522 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR; 3523 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR; 3524 3525 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 3526 printascii("extended temp support\n"); 3527 #endif 3528 if (index->version_info != 2 || 3529 (index->global_index.size != sizeof(struct global_info) / 4) || 3530 (index->ddr3_index.size != 3531 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3532 (index->ddr4_index.size != 3533 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3534 (index->lp3_index.size != 3535 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3536 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) || 3537 (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) || 3538 index->global_index.offset == 0 || 3539 index->ddr3_index.offset == 0 || 3540 index->ddr4_index.offset == 0 || 3541 index->lp3_index.offset == 0 || 3542 index->lp4_index.offset == 0 || 3543 index->lp4x_index.offset == 0) { 3544 printascii("common info error\n"); 3545 goto error; 3546 } 3547 3548 gbl_info = (struct global_info *)((void *)common_info + 3549 index->global_index.offset * 4); 3550 3551 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info); 3552 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info); 3553 3554 sdram_params = &sdram_configs[0]; 3555 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8) 3556 for (j = 0; j < ARRAY_SIZE(sdram_configs); j++) 3557 sdram_configs[j].base.dramtype = LPDDR4X; 3558 #endif 3559 if (sdram_params->base.dramtype == DDR3 || 3560 sdram_params->base.dramtype == DDR4) { 3561 if (DDR_2T_INFO(gbl_info->info_2t)) 3562 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10; 3563 else 3564 sdram_params->pctl_regs.pctl[0][1] &= 3565 ~(0x1 << 10); 3566 } 3567 ret = sdram_init_detect(&dram_info, sdram_params); 3568 if (ret) { 3569 sdram_print_dram_type(sdram_params->base.dramtype); 3570 printascii(", "); 3571 printdec(sdram_params->base.ddr_freq); 3572 printascii("MHz\n"); 3573 goto error; 3574 } 3575 print_ddr_info(sdram_params); 3576 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 3577 init_rw_trn_result_struct(&rw_trn_result, dram_info.phy, 3578 (u8)sdram_params->ch.cap_info.rank); 3579 #endif 3580 3581 ddr_set_rate_for_fsp(&dram_info, sdram_params); 3582 #ifndef CONFIG_SPL_KERNEL_BOOT 3583 copy_fsp_param_to_ddr(); 3584 #endif 3585 3586 ddr_set_atags(&dram_info, sdram_params); 3587 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 3588 save_rw_trn_result_to_ddr(&rw_trn_result); 3589 #endif 3590 3591 printascii("out\n"); 3592 3593 return ret; 3594 error: 3595 printascii("error\n"); 3596 return (-1); 3597 } 3598 #endif /* CONFIG_TPL_BUILD */ 3599