1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd. 4 */ 5 6 #include <common.h> 7 #include <debug_uart.h> 8 #include <dm.h> 9 #include <ram.h> 10 #include <syscon.h> 11 #include <asm/io.h> 12 #include <asm/arch/clock.h> 13 #include <asm/arch/hardware.h> 14 #include <asm/arch/rk_atags.h> 15 #include <asm/arch/cru_rv1126.h> 16 #include <asm/arch/grf_rv1126.h> 17 #include <asm/arch/sdram_common.h> 18 #include <asm/arch/sdram_rv1126.h> 19 20 /* define training flag */ 21 #define CA_TRAINING (0x1 << 0) 22 #define READ_GATE_TRAINING (0x1 << 1) 23 #define WRITE_LEVELING (0x1 << 2) 24 #define WRITE_TRAINING (0x1 << 3) 25 #define READ_TRAINING (0x1 << 4) 26 #define FULL_TRAINING (0xff) 27 28 #define SKEW_RX_SIGNAL (0) 29 #define SKEW_TX_SIGNAL (1) 30 #define SKEW_CA_SIGNAL (2) 31 32 #define DESKEW_MDF_ABS_VAL (0) 33 #define DESKEW_MDF_DIFF_VAL (1) 34 35 #ifdef CONFIG_TPL_BUILD 36 #ifndef CONFIG_TPL_TINY_FRAMEWORK 37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!! 38 #endif 39 #endif 40 41 #ifdef CONFIG_TPL_BUILD 42 43 struct dram_info { 44 void __iomem *pctl; 45 void __iomem *phy; 46 struct rv1126_cru *cru; 47 struct msch_regs *msch; 48 struct rv1126_ddrgrf *ddrgrf; 49 struct rv1126_grf *grf; 50 struct ram_info info; 51 struct rv1126_pmugrf *pmugrf; 52 u32 sr_idle; 53 u32 pd_idle; 54 }; 55 56 #define GRF_BASE_ADDR 0xfe000000 57 #define PMU_GRF_BASE_ADDR 0xfe020000 58 #define DDR_GRF_BASE_ADDR 0xfe030000 59 #define BUS_SGRF_BASE_ADDR 0xfe0a0000 60 #define SERVER_MSCH_BASE_ADDR 0xfe800000 61 #define CRU_BASE_ADDR 0xff490000 62 #define DDR_PHY_BASE_ADDR 0xff4a0000 63 #define UPCTL2_BASE_ADDR 0xffa50000 64 65 #define SGRF_SOC_CON2 0x8 66 #define SGRF_SOC_CON12 0x30 67 #define SGRF_SOC_CON13 0x34 68 69 struct dram_info dram_info; 70 71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3) 72 struct rv1126_sdram_params sdram_configs[] = { 73 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc" 74 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc" 75 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc" 76 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc" 77 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc" 78 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc" 79 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc" 80 }; 81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0) 82 struct rv1126_sdram_params sdram_configs[] = { 83 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc" 84 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc" 85 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc" 86 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc" 87 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc" 88 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc" 89 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc" 90 }; 91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6) 92 struct rv1126_sdram_params sdram_configs[] = { 93 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc" 94 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc" 95 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc" 96 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc" 97 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc" 98 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc" 99 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc" 100 }; 101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8) 102 struct rv1126_sdram_params sdram_configs[] = { 103 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc" 104 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc" 105 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc" 106 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc" 107 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc" 108 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc" 109 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc" 110 }; 111 #endif 112 113 u32 common_info[] = { 114 #include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc" 115 }; 116 117 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 118 static struct rw_trn_result rw_trn_result; 119 #endif 120 121 static struct rv1126_fsp_param fsp_param[MAX_IDX]; 122 123 static u8 lp3_odt_value; 124 125 static s8 wrlvl_result[2][4]; 126 127 /* DDR configuration 0-9 */ 128 u16 ddr_cfg_2_rbc[] = { 129 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */ 130 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */ 131 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */ 132 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */ 133 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */ 134 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */ 135 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */ 136 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */ 137 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */ 138 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */ 139 }; 140 141 /* DDR configuration 10-21 */ 142 u8 ddr4_cfg_2_rbc[] = { 143 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */ 144 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */ 145 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */ 146 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */ 147 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */ 148 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */ 149 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */ 150 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */ 151 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */ 152 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */ 153 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */ 154 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */ 155 }; 156 157 /* DDR configuration 22-28 */ 158 u16 ddr_cfg_2_rbc_p2[] = { 159 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */ 160 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */ 161 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */ 162 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */ 163 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */ 164 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */ 165 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */ 166 }; 167 168 u8 d4_rbc_2_d3_rbc[][2] = { 169 {10, 0}, 170 {11, 2}, 171 {12, 23}, 172 {13, 1}, 173 {14, 28}, 174 {15, 24}, 175 {16, 27}, 176 {17, 7}, 177 {18, 6}, 178 {19, 25}, 179 {20, 26}, 180 {21, 3} 181 }; 182 183 u32 addrmap[29][9] = { 184 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 185 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */ 186 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 187 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */ 188 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 189 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */ 190 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606, 191 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */ 192 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909, 193 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */ 194 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707, 195 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */ 196 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808, 197 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */ 198 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909, 199 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */ 200 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, 201 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */ 202 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 203 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */ 204 205 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 206 0x08080808, 0x00000f0f, 0x0801}, /* 10 */ 207 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 208 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */ 209 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 210 0x07070707, 0x00000f07, 0x0700}, /* 12 */ 211 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 212 0x07070707, 0x00000f0f, 0x0700}, /* 13 */ 213 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 214 0x07070707, 0x00000f07, 0x3f01}, /* 14 */ 215 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707, 216 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */ 217 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606, 218 0x06060606, 0x00000f06, 0x3f00}, /* 16 */ 219 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909, 220 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */ 221 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808, 222 0x08080808, 0x00000f0f, 0x0700}, /* 18 */ 223 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808, 224 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */ 225 226 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707, 227 0x07070707, 0x00000f07, 0x3f00}, /* 20 */ 228 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606, 229 0x06060606, 0x00000f06, 0x0600}, /* 21 */ 230 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505, 231 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */ 232 233 {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 234 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */ 235 {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707, 236 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */ 237 {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808, 238 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */ 239 {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707, 240 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */ 241 {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606, 242 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */ 243 {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707, 244 0x07070707, 0x00000f07, 0x3f3f} /* 28 */ 245 }; 246 247 static u8 dq_sel[22][3] = { 248 {0x0, 0x17, 0x22}, 249 {0x1, 0x18, 0x23}, 250 {0x2, 0x19, 0x24}, 251 {0x3, 0x1a, 0x25}, 252 {0x4, 0x1b, 0x26}, 253 {0x5, 0x1c, 0x27}, 254 {0x6, 0x1d, 0x28}, 255 {0x7, 0x1e, 0x29}, 256 {0x8, 0x16, 0x21}, 257 {0x9, 0x1f, 0x2a}, 258 {0xa, 0x20, 0x2b}, 259 {0x10, 0x1, 0xc}, 260 {0x11, 0x2, 0xd}, 261 {0x12, 0x3, 0xe}, 262 {0x13, 0x4, 0xf}, 263 {0x14, 0x5, 0x10}, 264 {0x15, 0x6, 0x11}, 265 {0x16, 0x7, 0x12}, 266 {0x17, 0x8, 0x13}, 267 {0x18, 0x0, 0xb}, 268 {0x19, 0x9, 0x14}, 269 {0x1a, 0xa, 0x15} 270 }; 271 272 static u16 grp_addr[4] = { 273 ADD_GROUP_CS0_A, 274 ADD_GROUP_CS0_B, 275 ADD_GROUP_CS1_A, 276 ADD_GROUP_CS1_B 277 }; 278 279 static u8 wrlvl_result_offset[2][4] = { 280 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27}, 281 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29}, 282 }; 283 284 static u16 dqs_dq_skew_adr[16] = { 285 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */ 286 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */ 287 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */ 288 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */ 289 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */ 290 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */ 291 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */ 292 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */ 293 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */ 294 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */ 295 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */ 296 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */ 297 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */ 298 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */ 299 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */ 300 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */ 301 }; 302 303 static void rkclk_ddr_reset(struct dram_info *dram, 304 u32 ctl_srstn, u32 ctl_psrstn, 305 u32 phy_srstn, u32 phy_psrstn) 306 { 307 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) | 308 UPCTL2_ASRSTN_REQ(ctl_srstn), 309 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13); 310 311 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn), 312 &dram->cru->softrst_con[12]); 313 } 314 315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz) 316 { 317 unsigned int refdiv, postdiv1, postdiv2, fbdiv; 318 int delay = 1000; 319 u32 mhz = hz / MHz; 320 struct global_info *gbl_info; 321 struct sdram_head_info_index_v2 *index = 322 (struct sdram_head_info_index_v2 *)common_info; 323 u32 ssmod_info; 324 u32 dsmpd = 1; 325 326 gbl_info = (struct global_info *)((void *)common_info + 327 index->global_index.offset * 4); 328 ssmod_info = gbl_info->info_2t; 329 refdiv = 1; 330 if (mhz <= 100) { 331 postdiv1 = 6; 332 postdiv2 = 4; 333 } else if (mhz <= 150) { 334 postdiv1 = 4; 335 postdiv2 = 4; 336 } else if (mhz <= 200) { 337 postdiv1 = 6; 338 postdiv2 = 2; 339 } else if (mhz <= 300) { 340 postdiv1 = 4; 341 postdiv2 = 2; 342 } else if (mhz <= 400) { 343 postdiv1 = 6; 344 postdiv2 = 1; 345 } else { 346 postdiv1 = 4; 347 postdiv2 = 1; 348 } 349 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24; 350 351 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode); 352 353 writel(0x1f000000, &dram->cru->clksel_con[64]); 354 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0); 355 /* enable ssmod */ 356 if (PLL_SSMOD_SPREAD(ssmod_info)) { 357 dsmpd = 0; 358 clrsetbits_le32(&dram->cru->pll[1].con2, 359 0xffffff << 0, 0x0 << 0); 360 writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) | 361 SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) | 362 SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) | 363 SSMOD_RESET(0) | 364 SSMOD_DIS_SSCG(0) | 365 SSMOD_BP(0), 366 &dram->cru->pll[1].con3); 367 } 368 writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv), 369 &dram->cru->pll[1].con1); 370 371 while (delay > 0) { 372 udelay(1); 373 if (LOCK(readl(&dram->cru->pll[1].con1))) 374 break; 375 delay--; 376 } 377 378 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode); 379 } 380 381 static void rkclk_configure_ddr(struct dram_info *dram, 382 struct rv1126_sdram_params *sdram_params) 383 { 384 /* for inno ddr phy need freq / 2 */ 385 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2); 386 } 387 388 static unsigned int 389 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params) 390 { 391 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 392 u32 cs, bw, die_bw, col, row, bank; 393 u32 cs1_row; 394 u32 i, tmp; 395 u32 ddrconf = -1; 396 u32 row_3_4; 397 398 cs = cap_info->rank; 399 bw = cap_info->bw; 400 die_bw = cap_info->dbw; 401 col = cap_info->col; 402 row = cap_info->cs0_row; 403 cs1_row = cap_info->cs1_row; 404 bank = cap_info->bk; 405 row_3_4 = cap_info->row_3_4; 406 407 if (sdram_params->base.dramtype == DDR4) { 408 if (cs == 2 && row == cs1_row && !row_3_4) { 409 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) | 410 die_bw; 411 for (i = 17; i < 21; i++) { 412 if (((tmp & 0xf) == 413 (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 414 ((tmp & 0x70) <= 415 (ddr4_cfg_2_rbc[i - 10] & 0x70))) { 416 ddrconf = i; 417 goto out; 418 } 419 } 420 } 421 422 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw; 423 for (i = 10; i < 21; i++) { 424 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) && 425 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) && 426 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) { 427 ddrconf = i; 428 goto out; 429 } 430 } 431 } else { 432 if (cs == 2 && row == cs1_row && bank == 3) { 433 for (i = 5; i < 8; i++) { 434 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] & 435 0x7)) && 436 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] & 437 (0x7 << 5))) { 438 ddrconf = i; 439 goto out; 440 } 441 } 442 } 443 444 tmp = ((cs - 1) << 8) | ((row - 13) << 5) | 445 ((bw + col - 10) << 0); 446 if (bank == 3) 447 tmp |= (1 << 3); 448 449 for (i = 0; i < 9; i++) 450 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) && 451 ((tmp & (7 << 5)) <= 452 (ddr_cfg_2_rbc[i] & (7 << 5))) && 453 ((tmp & (1 << 8)) <= 454 (ddr_cfg_2_rbc[i] & (1 << 8)))) { 455 ddrconf = i; 456 goto out; 457 } 458 459 for (i = 0; i < 7; i++) 460 if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) && 461 ((tmp & (7 << 5)) <= 462 (ddr_cfg_2_rbc_p2[i] & (7 << 5))) && 463 ((tmp & (1 << 8)) <= 464 (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) { 465 ddrconf = i + 22; 466 goto out; 467 } 468 469 if (cs == 1 && bank == 3 && row <= 17 && 470 (col + bw) == 12) 471 ddrconf = 23; 472 } 473 474 out: 475 if (ddrconf > 28) 476 printascii("calculate ddrconfig error\n"); 477 478 if (sdram_params->base.dramtype == DDR4) { 479 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 480 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) { 481 if (ddrconf == 21 && row > 16) 482 printascii("warn:ddrconf21 row > 16\n"); 483 else 484 ddrconf = d4_rbc_2_d3_rbc[i][1]; 485 break; 486 } 487 } 488 } 489 490 return ddrconf; 491 } 492 493 static void sw_set_req(struct dram_info *dram) 494 { 495 void __iomem *pctl_base = dram->pctl; 496 497 /* clear sw_done=0 */ 498 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL); 499 } 500 501 static void sw_set_ack(struct dram_info *dram) 502 { 503 void __iomem *pctl_base = dram->pctl; 504 505 /* set sw_done=1 */ 506 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL); 507 while (1) { 508 /* wait programming done */ 509 if (readl(pctl_base + DDR_PCTL2_SWSTAT) & 510 PCTL2_SW_DONE_ACK) 511 break; 512 } 513 } 514 515 static void set_ctl_address_map(struct dram_info *dram, 516 struct rv1126_sdram_params *sdram_params) 517 { 518 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 519 void __iomem *pctl_base = dram->pctl; 520 u32 ddrconf = cap_info->ddrconfig; 521 u32 i, row; 522 523 row = cap_info->cs0_row; 524 if (sdram_params->base.dramtype == DDR4) { 525 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) { 526 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) { 527 ddrconf = d4_rbc_2_d3_rbc[i][0]; 528 break; 529 } 530 } 531 } 532 533 if (ddrconf >= ARRAY_SIZE(addrmap)) { 534 printascii("set ctl address map fail\n"); 535 return; 536 } 537 538 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0), 539 &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4); 540 541 /* unused row set to 0xf */ 542 for (i = 17; i >= row; i--) 543 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 + 544 ((i - 12) * 8 / 32) * 4, 545 0xf << ((i - 12) * 8 % 32)); 546 547 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4) 548 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31); 549 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1) 550 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8); 551 552 if (cap_info->rank == 1) 553 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f); 554 } 555 556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait) 557 { 558 void __iomem *phy_base = dram->phy; 559 u32 fbdiv, prediv, postdiv, postdiv_en; 560 561 if (wait) { 562 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB); 563 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) 564 continue; 565 } else { 566 freq /= MHz; 567 prediv = 1; 568 if (freq <= 200) { 569 fbdiv = 16; 570 postdiv = 2; 571 postdiv_en = 1; 572 } else if (freq <= 456) { 573 fbdiv = 8; 574 postdiv = 1; 575 postdiv_en = 1; 576 } else { 577 fbdiv = 4; 578 postdiv = 0; 579 postdiv_en = 0; 580 } 581 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50)); 582 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK, 583 (fbdiv >> 8) & 1); 584 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK, 585 postdiv_en << PHY_POSTDIV_EN_SHIFT); 586 587 clrsetbits_le32(PHY_REG(phy_base, 0x52), 588 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv); 589 clrsetbits_le32(PHY_REG(phy_base, 0x53), 590 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT, 591 postdiv << PHY_POSTDIV_SHIFT); 592 } 593 } 594 595 static const u16 d3_phy_drv_2_ohm[][2] = { 596 {PHY_DDR3_RON_455ohm, 455}, 597 {PHY_DDR3_RON_230ohm, 230}, 598 {PHY_DDR3_RON_153ohm, 153}, 599 {PHY_DDR3_RON_115ohm, 115}, 600 {PHY_DDR3_RON_91ohm, 91}, 601 {PHY_DDR3_RON_76ohm, 76}, 602 {PHY_DDR3_RON_65ohm, 65}, 603 {PHY_DDR3_RON_57ohm, 57}, 604 {PHY_DDR3_RON_51ohm, 51}, 605 {PHY_DDR3_RON_46ohm, 46}, 606 {PHY_DDR3_RON_41ohm, 41}, 607 {PHY_DDR3_RON_38ohm, 38}, 608 {PHY_DDR3_RON_35ohm, 35}, 609 {PHY_DDR3_RON_32ohm, 32}, 610 {PHY_DDR3_RON_30ohm, 30}, 611 {PHY_DDR3_RON_28ohm, 28}, 612 {PHY_DDR3_RON_27ohm, 27}, 613 {PHY_DDR3_RON_25ohm, 25}, 614 {PHY_DDR3_RON_24ohm, 24}, 615 {PHY_DDR3_RON_23ohm, 23}, 616 {PHY_DDR3_RON_22ohm, 22}, 617 {PHY_DDR3_RON_21ohm, 21}, 618 {PHY_DDR3_RON_20ohm, 20} 619 }; 620 621 static u16 d3_phy_odt_2_ohm[][2] = { 622 {PHY_DDR3_RTT_DISABLE, 0}, 623 {PHY_DDR3_RTT_561ohm, 561}, 624 {PHY_DDR3_RTT_282ohm, 282}, 625 {PHY_DDR3_RTT_188ohm, 188}, 626 {PHY_DDR3_RTT_141ohm, 141}, 627 {PHY_DDR3_RTT_113ohm, 113}, 628 {PHY_DDR3_RTT_94ohm, 94}, 629 {PHY_DDR3_RTT_81ohm, 81}, 630 {PHY_DDR3_RTT_72ohm, 72}, 631 {PHY_DDR3_RTT_64ohm, 64}, 632 {PHY_DDR3_RTT_58ohm, 58}, 633 {PHY_DDR3_RTT_52ohm, 52}, 634 {PHY_DDR3_RTT_48ohm, 48}, 635 {PHY_DDR3_RTT_44ohm, 44}, 636 {PHY_DDR3_RTT_41ohm, 41}, 637 {PHY_DDR3_RTT_38ohm, 38}, 638 {PHY_DDR3_RTT_37ohm, 37}, 639 {PHY_DDR3_RTT_34ohm, 34}, 640 {PHY_DDR3_RTT_32ohm, 32}, 641 {PHY_DDR3_RTT_31ohm, 31}, 642 {PHY_DDR3_RTT_29ohm, 29}, 643 {PHY_DDR3_RTT_28ohm, 28}, 644 {PHY_DDR3_RTT_27ohm, 27}, 645 {PHY_DDR3_RTT_25ohm, 25} 646 }; 647 648 static u16 d4lp3_phy_drv_2_ohm[][2] = { 649 {PHY_DDR4_LPDDR3_RON_482ohm, 482}, 650 {PHY_DDR4_LPDDR3_RON_244ohm, 244}, 651 {PHY_DDR4_LPDDR3_RON_162ohm, 162}, 652 {PHY_DDR4_LPDDR3_RON_122ohm, 122}, 653 {PHY_DDR4_LPDDR3_RON_97ohm, 97}, 654 {PHY_DDR4_LPDDR3_RON_81ohm, 81}, 655 {PHY_DDR4_LPDDR3_RON_69ohm, 69}, 656 {PHY_DDR4_LPDDR3_RON_61ohm, 61}, 657 {PHY_DDR4_LPDDR3_RON_54ohm, 54}, 658 {PHY_DDR4_LPDDR3_RON_48ohm, 48}, 659 {PHY_DDR4_LPDDR3_RON_44ohm, 44}, 660 {PHY_DDR4_LPDDR3_RON_40ohm, 40}, 661 {PHY_DDR4_LPDDR3_RON_37ohm, 37}, 662 {PHY_DDR4_LPDDR3_RON_34ohm, 34}, 663 {PHY_DDR4_LPDDR3_RON_32ohm, 32}, 664 {PHY_DDR4_LPDDR3_RON_30ohm, 30}, 665 {PHY_DDR4_LPDDR3_RON_28ohm, 28}, 666 {PHY_DDR4_LPDDR3_RON_27ohm, 27}, 667 {PHY_DDR4_LPDDR3_RON_25ohm, 25}, 668 {PHY_DDR4_LPDDR3_RON_24ohm, 24}, 669 {PHY_DDR4_LPDDR3_RON_23ohm, 23}, 670 {PHY_DDR4_LPDDR3_RON_22ohm, 22}, 671 {PHY_DDR4_LPDDR3_RON_21ohm, 21} 672 }; 673 674 static u16 d4lp3_phy_odt_2_ohm[][2] = { 675 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0}, 676 {PHY_DDR4_LPDDR3_RTT_586ohm, 586}, 677 {PHY_DDR4_LPDDR3_RTT_294ohm, 294}, 678 {PHY_DDR4_LPDDR3_RTT_196ohm, 196}, 679 {PHY_DDR4_LPDDR3_RTT_148ohm, 148}, 680 {PHY_DDR4_LPDDR3_RTT_118ohm, 118}, 681 {PHY_DDR4_LPDDR3_RTT_99ohm, 99}, 682 {PHY_DDR4_LPDDR3_RTT_85ohm, 58}, 683 {PHY_DDR4_LPDDR3_RTT_76ohm, 76}, 684 {PHY_DDR4_LPDDR3_RTT_67ohm, 67}, 685 {PHY_DDR4_LPDDR3_RTT_60ohm, 60}, 686 {PHY_DDR4_LPDDR3_RTT_55ohm, 55}, 687 {PHY_DDR4_LPDDR3_RTT_50ohm, 50}, 688 {PHY_DDR4_LPDDR3_RTT_46ohm, 46}, 689 {PHY_DDR4_LPDDR3_RTT_43ohm, 43}, 690 {PHY_DDR4_LPDDR3_RTT_40ohm, 40}, 691 {PHY_DDR4_LPDDR3_RTT_38ohm, 38}, 692 {PHY_DDR4_LPDDR3_RTT_36ohm, 36}, 693 {PHY_DDR4_LPDDR3_RTT_34ohm, 34}, 694 {PHY_DDR4_LPDDR3_RTT_32ohm, 32}, 695 {PHY_DDR4_LPDDR3_RTT_31ohm, 31}, 696 {PHY_DDR4_LPDDR3_RTT_29ohm, 29}, 697 {PHY_DDR4_LPDDR3_RTT_28ohm, 28}, 698 {PHY_DDR4_LPDDR3_RTT_27ohm, 27} 699 }; 700 701 static u16 lp4_phy_drv_2_ohm[][2] = { 702 {PHY_LPDDR4_RON_501ohm, 501}, 703 {PHY_LPDDR4_RON_253ohm, 253}, 704 {PHY_LPDDR4_RON_168ohm, 168}, 705 {PHY_LPDDR4_RON_126ohm, 126}, 706 {PHY_LPDDR4_RON_101ohm, 101}, 707 {PHY_LPDDR4_RON_84ohm, 84}, 708 {PHY_LPDDR4_RON_72ohm, 72}, 709 {PHY_LPDDR4_RON_63ohm, 63}, 710 {PHY_LPDDR4_RON_56ohm, 56}, 711 {PHY_LPDDR4_RON_50ohm, 50}, 712 {PHY_LPDDR4_RON_46ohm, 46}, 713 {PHY_LPDDR4_RON_42ohm, 42}, 714 {PHY_LPDDR4_RON_38ohm, 38}, 715 {PHY_LPDDR4_RON_36ohm, 36}, 716 {PHY_LPDDR4_RON_33ohm, 33}, 717 {PHY_LPDDR4_RON_31ohm, 31}, 718 {PHY_LPDDR4_RON_29ohm, 29}, 719 {PHY_LPDDR4_RON_28ohm, 28}, 720 {PHY_LPDDR4_RON_26ohm, 26}, 721 {PHY_LPDDR4_RON_25ohm, 25}, 722 {PHY_LPDDR4_RON_24ohm, 24}, 723 {PHY_LPDDR4_RON_23ohm, 23}, 724 {PHY_LPDDR4_RON_22ohm, 22} 725 }; 726 727 static u16 lp4_phy_odt_2_ohm[][2] = { 728 {PHY_LPDDR4_RTT_DISABLE, 0}, 729 {PHY_LPDDR4_RTT_604ohm, 604}, 730 {PHY_LPDDR4_RTT_303ohm, 303}, 731 {PHY_LPDDR4_RTT_202ohm, 202}, 732 {PHY_LPDDR4_RTT_152ohm, 152}, 733 {PHY_LPDDR4_RTT_122ohm, 122}, 734 {PHY_LPDDR4_RTT_101ohm, 101}, 735 {PHY_LPDDR4_RTT_87ohm, 87}, 736 {PHY_LPDDR4_RTT_78ohm, 78}, 737 {PHY_LPDDR4_RTT_69ohm, 69}, 738 {PHY_LPDDR4_RTT_62ohm, 62}, 739 {PHY_LPDDR4_RTT_56ohm, 56}, 740 {PHY_LPDDR4_RTT_52ohm, 52}, 741 {PHY_LPDDR4_RTT_48ohm, 48}, 742 {PHY_LPDDR4_RTT_44ohm, 44}, 743 {PHY_LPDDR4_RTT_41ohm, 41}, 744 {PHY_LPDDR4_RTT_39ohm, 39}, 745 {PHY_LPDDR4_RTT_37ohm, 37}, 746 {PHY_LPDDR4_RTT_35ohm, 35}, 747 {PHY_LPDDR4_RTT_33ohm, 33}, 748 {PHY_LPDDR4_RTT_32ohm, 32}, 749 {PHY_LPDDR4_RTT_30ohm, 30}, 750 {PHY_LPDDR4_RTT_29ohm, 29}, 751 {PHY_LPDDR4_RTT_27ohm, 27} 752 }; 753 754 static u32 lp4_odt_calc(u32 odt_ohm) 755 { 756 u32 odt; 757 758 if (odt_ohm == 0) 759 odt = LPDDR4_DQODT_DIS; 760 else if (odt_ohm <= 40) 761 odt = LPDDR4_DQODT_40; 762 else if (odt_ohm <= 48) 763 odt = LPDDR4_DQODT_48; 764 else if (odt_ohm <= 60) 765 odt = LPDDR4_DQODT_60; 766 else if (odt_ohm <= 80) 767 odt = LPDDR4_DQODT_80; 768 else if (odt_ohm <= 120) 769 odt = LPDDR4_DQODT_120; 770 else 771 odt = LPDDR4_DQODT_240; 772 773 return odt; 774 } 775 776 static void *get_ddr_drv_odt_info(u32 dramtype) 777 { 778 struct sdram_head_info_index_v2 *index = 779 (struct sdram_head_info_index_v2 *)common_info; 780 void *ddr_info = 0; 781 782 if (dramtype == DDR4) 783 ddr_info = (void *)common_info + index->ddr4_index.offset * 4; 784 else if (dramtype == DDR3) 785 ddr_info = (void *)common_info + index->ddr3_index.offset * 4; 786 else if (dramtype == LPDDR3) 787 ddr_info = (void *)common_info + index->lp3_index.offset * 4; 788 else if (dramtype == LPDDR4) 789 ddr_info = (void *)common_info + index->lp4_index.offset * 4; 790 else if (dramtype == LPDDR4X) 791 ddr_info = (void *)common_info + index->lp4x_index.offset * 4; 792 else 793 printascii("unsupported dram type\n"); 794 return ddr_info; 795 } 796 797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info, 798 u32 freq_mhz, u32 dst_fsp, u32 dramtype) 799 { 800 void __iomem *pctl_base = dram->pctl; 801 u32 ca_vref, dq_vref; 802 803 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 804 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff); 805 else 806 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten); 807 808 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq)) 809 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff); 810 else 811 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten); 812 813 if (dramtype == LPDDR4) { 814 if (ca_vref < 100) 815 ca_vref = 100; 816 if (ca_vref > 420) 817 ca_vref = 420; 818 819 if (ca_vref <= 300) 820 ca_vref = (0 << 6) | (ca_vref - 100) / 4; 821 else 822 ca_vref = (1 << 6) | (ca_vref - 220) / 4; 823 824 if (dq_vref < 100) 825 dq_vref = 100; 826 if (dq_vref > 420) 827 dq_vref = 420; 828 829 if (dq_vref <= 300) 830 dq_vref = (0 << 6) | (dq_vref - 100) / 4; 831 else 832 dq_vref = (1 << 6) | (dq_vref - 220) / 4; 833 } else { 834 ca_vref = ca_vref * 11 / 6; 835 if (ca_vref < 150) 836 ca_vref = 150; 837 if (ca_vref > 629) 838 ca_vref = 629; 839 840 if (ca_vref <= 449) 841 ca_vref = (0 << 6) | (ca_vref - 150) / 4; 842 else 843 ca_vref = (1 << 6) | (ca_vref - 329) / 4; 844 845 if (dq_vref < 150) 846 dq_vref = 150; 847 if (dq_vref > 629) 848 dq_vref = 629; 849 850 if (dq_vref <= 449) 851 dq_vref = (0 << 6) | (dq_vref - 150) / 6; 852 else 853 dq_vref = (1 << 6) | (dq_vref - 329) / 6; 854 } 855 sw_set_req(dram); 856 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 857 DDR_PCTL2_INIT6, 858 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT, 859 ca_vref << PCTL2_LPDDR4_MR12_SHIFT); 860 861 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 862 DDR_PCTL2_INIT7, 863 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT, 864 dq_vref << PCTL2_LPDDR4_MR14_SHIFT); 865 sw_set_ack(dram); 866 } 867 868 static void set_ds_odt(struct dram_info *dram, 869 struct rv1126_sdram_params *sdram_params, u32 dst_fsp) 870 { 871 void __iomem *phy_base = dram->phy; 872 void __iomem *pctl_base = dram->pctl; 873 u32 dramtype = sdram_params->base.dramtype; 874 struct ddr2_3_4_lp2_3_info *ddr_info; 875 struct lp4_info *lp4_info; 876 u32 i, j, tmp; 877 const u16 (*p_drv)[2]; 878 const u16 (*p_odt)[2]; 879 u32 drv_info, sr_info; 880 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm; 881 u32 phy_odt_ohm, dram_odt_ohm; 882 u32 lp4_pu_cal, phy_lp4_drv_pd_en; 883 u32 phy_odt_up_en, phy_odt_dn_en; 884 u32 sr_dq, sr_clk; 885 u32 freq = sdram_params->base.ddr_freq; 886 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner; 887 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0; 888 u32 phy_dq_drv = 0; 889 u32 phy_odt_up = 0, phy_odt_dn = 0; 890 891 ddr_info = get_ddr_drv_odt_info(dramtype); 892 lp4_info = (void *)ddr_info; 893 894 if (!ddr_info) 895 return; 896 897 /* dram odt en freq control phy drv, dram odt and phy sr */ 898 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) { 899 drv_info = ddr_info->drv_when_odtoff; 900 dram_odt_ohm = 0; 901 sr_info = ddr_info->sr_when_odtoff; 902 phy_lp4_drv_pd_en = 903 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info); 904 } else { 905 drv_info = ddr_info->drv_when_odten; 906 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info); 907 sr_info = ddr_info->sr_when_odten; 908 phy_lp4_drv_pd_en = 909 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info); 910 } 911 phy_dq_drv_ohm = 912 DRV_INFO_PHY_DQ_DRV(drv_info); 913 phy_clk_drv_ohm = 914 DRV_INFO_PHY_CLK_DRV(drv_info); 915 phy_ca_drv_ohm = 916 DRV_INFO_PHY_CA_DRV(drv_info); 917 918 sr_dq = DQ_SR_INFO(sr_info); 919 sr_clk = CLK_SR_INFO(sr_info); 920 921 /* phy odt en freq control dram drv and phy odt */ 922 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) { 923 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff); 924 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info); 925 phy_odt_ohm = 0; 926 phy_odt_up_en = 0; 927 phy_odt_dn_en = 0; 928 } else { 929 dram_drv_ohm = 930 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten); 931 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info); 932 phy_odt_up_en = 933 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 934 phy_odt_dn_en = 935 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 936 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info); 937 } 938 939 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 940 if (phy_odt_ohm) { 941 phy_odt_up_en = 0; 942 phy_odt_dn_en = 1; 943 } 944 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq)) 945 dram_caodt_ohm = 0; 946 else 947 dram_caodt_ohm = 948 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info); 949 } 950 951 if (dramtype == DDR3) { 952 p_drv = d3_phy_drv_2_ohm; 953 p_odt = d3_phy_odt_2_ohm; 954 } else if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 955 p_drv = lp4_phy_drv_2_ohm; 956 p_odt = lp4_phy_odt_2_ohm; 957 } else { 958 p_drv = d4lp3_phy_drv_2_ohm; 959 p_odt = d4lp3_phy_odt_2_ohm; 960 } 961 962 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 963 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) { 964 phy_dq_drv = **(p_drv + i); 965 break; 966 } 967 if (i == 0) 968 break; 969 } 970 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 971 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) { 972 phy_clk_drv = **(p_drv + i); 973 break; 974 } 975 if (i == 0) 976 break; 977 } 978 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) { 979 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) { 980 phy_ca_drv = **(p_drv + i); 981 break; 982 } 983 if (i == 0) 984 break; 985 } 986 if (!phy_odt_ohm) 987 phy_odt = 0; 988 else 989 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) { 990 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) { 991 phy_odt = **(p_odt + i); 992 break; 993 } 994 if (i == 0) 995 break; 996 } 997 998 if (dramtype != LPDDR4 && dramtype != LPDDR4X) { 999 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en)) 1000 vref_inner = 0x80; 1001 else if (phy_odt_up_en) 1002 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 / 1003 (dram_drv_ohm + phy_odt_ohm); 1004 else 1005 vref_inner = phy_odt_ohm * 128 / 1006 (phy_odt_ohm + dram_drv_ohm); 1007 1008 if (dramtype != DDR3 && dram_odt_ohm) 1009 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 / 1010 (phy_dq_drv_ohm + dram_odt_ohm); 1011 else 1012 vref_out = 0x80; 1013 } else { 1014 /* for lp4 and lp4x*/ 1015 if (phy_odt_ohm) 1016 vref_inner = 1017 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) * 1018 256) / 1000; 1019 else 1020 vref_inner = 1021 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) * 1022 256) / 1000; 1023 1024 vref_out = 0x80; 1025 } 1026 1027 /* default ZQCALIB bypass mode */ 1028 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv); 1029 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv); 1030 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv); 1031 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv); 1032 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 1033 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv); 1034 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv); 1035 } else { 1036 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv); 1037 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv); 1038 } 1039 /* clk / cmd slew rate */ 1040 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk); 1041 1042 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1; 1043 if (phy_odt_up_en) 1044 phy_odt_up = phy_odt; 1045 if (phy_odt_dn_en) 1046 phy_odt_dn = phy_odt; 1047 1048 for (i = 0; i < 4; i++) { 1049 j = 0x110 + i * 0x10; 1050 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up); 1051 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn); 1052 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv); 1053 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv); 1054 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10)); 1055 1056 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), 1057 1 << 3, phy_lp4_drv_pd_en << 3); 1058 if (dramtype == LPDDR4 || dramtype == LPDDR4X) 1059 clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5)); 1060 /* dq slew rate */ 1061 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10), 1062 0x1f, sr_dq); 1063 } 1064 1065 /* reg_rx_vref_value_update */ 1066 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1067 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1068 1069 /* RAM VREF */ 1070 writel(vref_out, PHY_REG(phy_base, 0x105)); 1071 if (dramtype == LPDDR3) 1072 udelay(100); 1073 1074 if (dramtype == LPDDR4 || dramtype == LPDDR4X) 1075 set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype); 1076 1077 if (dramtype == DDR3 || dramtype == DDR4) { 1078 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1079 DDR_PCTL2_INIT3); 1080 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK; 1081 } else { 1082 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1083 DDR_PCTL2_INIT4); 1084 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK; 1085 } 1086 1087 if (dramtype == DDR3) { 1088 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK); 1089 if (dram_drv_ohm == 34) 1090 mr1_mr3 |= DDR3_DS_34; 1091 1092 if (dram_odt_ohm == 0) 1093 mr1_mr3 |= DDR3_RTT_NOM_DIS; 1094 else if (dram_odt_ohm <= 40) 1095 mr1_mr3 |= DDR3_RTT_NOM_40; 1096 else if (dram_odt_ohm <= 60) 1097 mr1_mr3 |= DDR3_RTT_NOM_60; 1098 else 1099 mr1_mr3 |= DDR3_RTT_NOM_120; 1100 1101 } else if (dramtype == DDR4) { 1102 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK); 1103 if (dram_drv_ohm == 48) 1104 mr1_mr3 |= DDR4_DS_48; 1105 1106 if (dram_odt_ohm == 0) 1107 mr1_mr3 |= DDR4_RTT_NOM_DIS; 1108 else if (dram_odt_ohm <= 34) 1109 mr1_mr3 |= DDR4_RTT_NOM_34; 1110 else if (dram_odt_ohm <= 40) 1111 mr1_mr3 |= DDR4_RTT_NOM_40; 1112 else if (dram_odt_ohm <= 48) 1113 mr1_mr3 |= DDR4_RTT_NOM_48; 1114 else if (dram_odt_ohm <= 60) 1115 mr1_mr3 |= DDR4_RTT_NOM_60; 1116 else 1117 mr1_mr3 |= DDR4_RTT_NOM_120; 1118 1119 } else if (dramtype == LPDDR3) { 1120 if (dram_drv_ohm <= 34) 1121 mr1_mr3 |= LPDDR3_DS_34; 1122 else if (dram_drv_ohm <= 40) 1123 mr1_mr3 |= LPDDR3_DS_40; 1124 else if (dram_drv_ohm <= 48) 1125 mr1_mr3 |= LPDDR3_DS_48; 1126 else if (dram_drv_ohm <= 60) 1127 mr1_mr3 |= LPDDR3_DS_60; 1128 else if (dram_drv_ohm <= 80) 1129 mr1_mr3 |= LPDDR3_DS_80; 1130 1131 if (dram_odt_ohm == 0) 1132 lp3_odt_value = LPDDR3_ODT_DIS; 1133 else if (dram_odt_ohm <= 60) 1134 lp3_odt_value = LPDDR3_ODT_60; 1135 else if (dram_odt_ohm <= 120) 1136 lp3_odt_value = LPDDR3_ODT_120; 1137 else 1138 lp3_odt_value = LPDDR3_ODT_240; 1139 } else {/* for lpddr4 and lpddr4x */ 1140 /* MR3 for lp4 PU-CAL and PDDS */ 1141 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK); 1142 mr1_mr3 |= lp4_pu_cal; 1143 1144 tmp = lp4_odt_calc(dram_drv_ohm); 1145 if (!tmp) 1146 tmp = LPDDR4_PDDS_240; 1147 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT); 1148 1149 /* MR11 for lp4 ca odt, dq odt set */ 1150 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1151 DDR_PCTL2_INIT6); 1152 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK; 1153 1154 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK); 1155 1156 tmp = lp4_odt_calc(dram_odt_ohm); 1157 mr11 |= (tmp << LPDDR4_DQODT_SHIFT); 1158 1159 tmp = lp4_odt_calc(dram_caodt_ohm); 1160 mr11 |= (tmp << LPDDR4_CAODT_SHIFT); 1161 sw_set_req(dram); 1162 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1163 DDR_PCTL2_INIT6, 1164 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT, 1165 mr11 << PCTL2_LPDDR4_MR11_SHIFT); 1166 sw_set_ack(dram); 1167 1168 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */ 1169 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1170 DDR_PCTL2_INIT7); 1171 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK; 1172 mr22 &= ~LPDDR4_SOC_ODT_MASK; 1173 1174 tmp = lp4_odt_calc(phy_odt_ohm); 1175 mr22 |= tmp; 1176 mr22 = mr22 | 1177 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) << 1178 LPDDR4_ODTE_CK_SHIFT) | 1179 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) << 1180 LPDDR4_ODTE_CS_SHIFT) | 1181 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) << 1182 LPDDR4_ODTD_CA_SHIFT); 1183 1184 sw_set_req(dram); 1185 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1186 DDR_PCTL2_INIT7, 1187 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT, 1188 mr22 << PCTL2_LPDDR4_MR22_SHIFT); 1189 sw_set_ack(dram); 1190 } 1191 1192 if (dramtype == DDR4 || dramtype == DDR3) { 1193 sw_set_req(dram); 1194 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1195 DDR_PCTL2_INIT3, 1196 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT, 1197 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT); 1198 sw_set_ack(dram); 1199 } else { 1200 sw_set_req(dram); 1201 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 1202 DDR_PCTL2_INIT4, 1203 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT, 1204 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT); 1205 sw_set_ack(dram); 1206 } 1207 } 1208 1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram, 1210 struct rv1126_sdram_params *sdram_params) 1211 { 1212 void __iomem *phy_base = dram->phy; 1213 u32 dramtype = sdram_params->base.dramtype; 1214 struct sdram_head_info_index_v2 *index = 1215 (struct sdram_head_info_index_v2 *)common_info; 1216 struct dq_map_info *map_info; 1217 1218 map_info = (struct dq_map_info *)((void *)common_info + 1219 index->dq_map_index.offset * 4); 1220 1221 if (dramtype == LPDDR4X) 1222 dramtype = LPDDR4; 1223 1224 if (dramtype <= LPDDR4) 1225 writel((map_info->byte_map[dramtype / 4] >> 1226 ((dramtype % 4) * 8)) & 0xff, 1227 PHY_REG(phy_base, 0x4f)); 1228 1229 return 0; 1230 } 1231 1232 static void phy_cfg(struct dram_info *dram, 1233 struct rv1126_sdram_params *sdram_params) 1234 { 1235 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 1236 void __iomem *phy_base = dram->phy; 1237 u32 i, dq_map, tmp; 1238 u32 byte1 = 0, byte0 = 0; 1239 1240 sdram_cmd_dq_path_remap(dram, sdram_params); 1241 1242 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0); 1243 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) { 1244 writel(sdram_params->phy_regs.phy[i][1], 1245 phy_base + sdram_params->phy_regs.phy[i][0]); 1246 } 1247 1248 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5)); 1249 dq_map = readl(PHY_REG(phy_base, 0x4f)); 1250 for (i = 0; i < 4; i++) { 1251 if (((dq_map >> (i * 2)) & 0x3) == 0) { 1252 byte0 = i; 1253 break; 1254 } 1255 } 1256 for (i = 0; i < 4; i++) { 1257 if (((dq_map >> (i * 2)) & 0x3) == 1) { 1258 byte1 = i; 1259 break; 1260 } 1261 } 1262 1263 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK); 1264 if (cap_info->bw == 2) 1265 tmp |= 0xf; 1266 else if (cap_info->bw == 1) 1267 tmp |= ((1 << byte0) | (1 << byte1)); 1268 else 1269 tmp |= (1 << byte0); 1270 1271 writel(tmp, PHY_REG(phy_base, 0xf)); 1272 1273 /* lpddr4 odt control by phy, enable cs0 odt */ 1274 if (sdram_params->base.dramtype == LPDDR4 || 1275 sdram_params->base.dramtype == LPDDR4X) 1276 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4, 1277 (1 << 6) | (1 << 4)); 1278 /* for ca training ca vref choose range1 */ 1279 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6)); 1280 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6)); 1281 /* for wr training PHY_0x7c[5], choose range0 */ 1282 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5)); 1283 } 1284 1285 static int update_refresh_reg(struct dram_info *dram) 1286 { 1287 void __iomem *pctl_base = dram->pctl; 1288 u32 ret; 1289 1290 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1); 1291 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3); 1292 1293 return 0; 1294 } 1295 1296 /* 1297 * rank = 1: cs0 1298 * rank = 2: cs1 1299 */ 1300 u32 read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype) 1301 { 1302 u32 ret; 1303 u32 i, temp; 1304 void __iomem *pctl_base = dram->pctl; 1305 struct sdram_head_info_index_v2 *index = 1306 (struct sdram_head_info_index_v2 *)common_info; 1307 struct dq_map_info *map_info; 1308 1309 map_info = (struct dq_map_info *)((void *)common_info + 1310 index->dq_map_index.offset * 4); 1311 1312 pctl_read_mr(pctl_base, rank, mr_num); 1313 1314 if (dramtype == LPDDR3) { 1315 temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff); 1316 ret = 0; 1317 for (i = 0; i < 8; i++) 1318 ret |= ((temp >> i) & 0x1) << ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf); 1319 } else { 1320 ret = readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff; 1321 } 1322 1323 return ret; 1324 } 1325 1326 /* before call this function autorefresh should be disabled */ 1327 void send_a_refresh(struct dram_info *dram) 1328 { 1329 void __iomem *pctl_base = dram->pctl; 1330 1331 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3) 1332 continue; 1333 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD); 1334 } 1335 1336 static void enter_sr(struct dram_info *dram, u32 en) 1337 { 1338 void __iomem *pctl_base = dram->pctl; 1339 1340 if (en) { 1341 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 1342 while (1) { 1343 if (((readl(pctl_base + DDR_PCTL2_STAT) & 1344 PCTL2_SELFREF_TYPE_MASK) == 1345 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) && 1346 ((readl(pctl_base + DDR_PCTL2_STAT) & 1347 PCTL2_OPERATING_MODE_MASK) == 1348 PCTL2_OPERATING_MODE_SR)) 1349 break; 1350 } 1351 } else { 1352 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW); 1353 while ((readl(pctl_base + DDR_PCTL2_STAT) & 1354 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR) 1355 continue; 1356 } 1357 } 1358 1359 void record_dq_prebit(struct dram_info *dram) 1360 { 1361 u32 group, i, tmp; 1362 void __iomem *phy_base = dram->phy; 1363 1364 for (group = 0; group < 4; group++) { 1365 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) { 1366 /* l_loop_invdelaysel */ 1367 writel(dq_sel[i][0], PHY_REG(phy_base, 1368 grp_addr[group] + 0x2c)); 1369 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e)); 1370 writel(tmp, PHY_REG(phy_base, 1371 grp_addr[group] + dq_sel[i][1])); 1372 1373 /* r_loop_invdelaysel */ 1374 writel(dq_sel[i][0], PHY_REG(phy_base, 1375 grp_addr[group] + 0x2d)); 1376 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f)); 1377 writel(tmp, PHY_REG(phy_base, 1378 grp_addr[group] + dq_sel[i][2])); 1379 } 1380 } 1381 } 1382 1383 static void update_dq_rx_prebit(struct dram_info *dram) 1384 { 1385 void __iomem *phy_base = dram->phy; 1386 1387 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4), 1388 BIT(4)); 1389 udelay(1); 1390 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4)); 1391 } 1392 1393 static void update_dq_tx_prebit(struct dram_info *dram) 1394 { 1395 void __iomem *phy_base = dram->phy; 1396 1397 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1398 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3)); 1399 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1400 udelay(1); 1401 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6)); 1402 } 1403 1404 static void update_ca_prebit(struct dram_info *dram) 1405 { 1406 void __iomem *phy_base = dram->phy; 1407 1408 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2)); 1409 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1410 udelay(1); 1411 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6)); 1412 } 1413 1414 /* 1415 * dir: 0: de-skew = delta_* 1416 * 1: de-skew = reg val - delta_* 1417 * delta_dir: value for differential signal: clk/ 1418 * delta_sig: value for single signal: ca/cmd 1419 */ 1420 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif, 1421 int delta_sig, u32 cs, u32 dramtype) 1422 { 1423 void __iomem *phy_base = dram->phy; 1424 u32 i, cs_en, tmp; 1425 u32 dfi_lp_stat = 0; 1426 1427 if (cs == 0) 1428 cs_en = 1; 1429 else if (cs == 2) 1430 cs_en = 2; 1431 else 1432 cs_en = 3; 1433 1434 if ((dramtype == LPDDR4 || dramtype == LPDDR4X) && 1435 ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) { 1436 dfi_lp_stat = 1; 1437 setbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); 1438 } 1439 enter_sr(dram, 1); 1440 1441 for (i = 0; i < 0x20; i++) { 1442 if (dir == DESKEW_MDF_ABS_VAL) 1443 tmp = delta_sig; 1444 else 1445 tmp = readl(PHY_REG(phy_base, 0x150 + i)) + 1446 delta_sig; 1447 writel(tmp, PHY_REG(phy_base, 0x150 + i)); 1448 } 1449 1450 if (dir == DESKEW_MDF_ABS_VAL) 1451 tmp = delta_dif; 1452 else 1453 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) - 1454 delta_sig + delta_dif; 1455 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17)); 1456 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18)); 1457 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 1458 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4)); 1459 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa)); 1460 1461 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6); 1462 update_ca_prebit(dram); 1463 } 1464 enter_sr(dram, 0); 1465 1466 if (dfi_lp_stat) 1467 clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5)); 1468 1469 } 1470 1471 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank) 1472 { 1473 u32 i, j, offset = 0; 1474 u32 min = 0x3f; 1475 void __iomem *phy_base = dram->phy; 1476 u32 byte_en; 1477 1478 if (signal == SKEW_TX_SIGNAL) 1479 offset = 8; 1480 1481 if (signal == SKEW_CA_SIGNAL) { 1482 for (i = 0; i < 0x20; i++) 1483 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i))); 1484 } else { 1485 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1486 for (j = offset; j < offset + rank * 4; j++) { 1487 if (!((byte_en >> (j % 4)) & 1)) 1488 continue; 1489 for (i = 0; i < 11; i++) 1490 min = MIN(min, 1491 readl(PHY_REG(phy_base, 1492 dqs_dq_skew_adr[j] + 1493 i))); 1494 } 1495 } 1496 1497 return min; 1498 } 1499 1500 static u32 low_power_update(struct dram_info *dram, u32 en) 1501 { 1502 void __iomem *pctl_base = dram->pctl; 1503 u32 lp_stat = 0; 1504 1505 if (en) { 1506 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf); 1507 } else { 1508 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf; 1509 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf); 1510 } 1511 1512 return lp_stat; 1513 } 1514 1515 /* 1516 * signal: 1517 * dir: 0: de-skew = delta_* 1518 * 1: de-skew = reg val - delta_* 1519 * delta_dir: value for differential signal: dqs 1520 * delta_sig: value for single signal: dq/dm 1521 */ 1522 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir, 1523 int delta_dif, int delta_sig, u32 rank) 1524 { 1525 void __iomem *phy_base = dram->phy; 1526 u32 i, j, tmp, offset; 1527 u32 byte_en; 1528 1529 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf; 1530 1531 if (signal == SKEW_RX_SIGNAL) 1532 offset = 0; 1533 else 1534 offset = 8; 1535 1536 for (j = offset; j < (offset + rank * 4); j++) { 1537 if (!((byte_en >> (j % 4)) & 1)) 1538 continue; 1539 for (i = 0; i < 0x9; i++) { 1540 if (dir == DESKEW_MDF_ABS_VAL) 1541 tmp = delta_sig; 1542 else 1543 tmp = delta_sig + readl(PHY_REG(phy_base, 1544 dqs_dq_skew_adr[j] + 1545 i)); 1546 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i)); 1547 } 1548 if (dir == DESKEW_MDF_ABS_VAL) 1549 tmp = delta_dif; 1550 else 1551 tmp = delta_dif + readl(PHY_REG(phy_base, 1552 dqs_dq_skew_adr[j] + 9)); 1553 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9)); 1554 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa)); 1555 } 1556 if (signal == SKEW_RX_SIGNAL) 1557 update_dq_rx_prebit(dram); 1558 else 1559 update_dq_tx_prebit(dram); 1560 } 1561 1562 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype) 1563 { 1564 void __iomem *phy_base = dram->phy; 1565 u32 ret; 1566 u32 dis_auto_zq = 0; 1567 u32 odt_val_up, odt_val_dn; 1568 u32 i, j; 1569 1570 odt_val_dn = readl(PHY_REG(phy_base, 0x110)); 1571 odt_val_up = readl(PHY_REG(phy_base, 0x111)); 1572 1573 if (dramtype != LPDDR4 || dramtype != LPDDR4X) { 1574 for (i = 0; i < 4; i++) { 1575 j = 0x110 + i * 0x10; 1576 writel(PHY_DDR4_LPDDR3_RTT_294ohm, 1577 PHY_REG(phy_base, j)); 1578 writel(PHY_DDR4_LPDDR3_RTT_DISABLE, 1579 PHY_REG(phy_base, j + 0x1)); 1580 } 1581 } 1582 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1583 /* use normal read mode for data training */ 1584 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1585 1586 if (dramtype == DDR4) 1587 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1)); 1588 1589 /* choose training cs */ 1590 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs)); 1591 /* enable gate training */ 1592 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1); 1593 udelay(50); 1594 ret = readl(PHY_REG(phy_base, 0x91)); 1595 /* disable gate training */ 1596 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0); 1597 clrbits_le32(PHY_REG(phy_base, 2), 0x30); 1598 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1599 1600 ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf); 1601 1602 if (dramtype != LPDDR4 || dramtype != LPDDR4X) { 1603 for (i = 0; i < 4; i++) { 1604 j = 0x110 + i * 0x10; 1605 writel(odt_val_dn, PHY_REG(phy_base, j)); 1606 writel(odt_val_up, PHY_REG(phy_base, j + 0x1)); 1607 } 1608 } 1609 return ret; 1610 } 1611 1612 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype, 1613 u32 rank) 1614 { 1615 void __iomem *pctl_base = dram->pctl; 1616 void __iomem *phy_base = dram->phy; 1617 u32 dis_auto_zq = 0; 1618 u32 tmp; 1619 u32 cur_fsp; 1620 u32 timeout_us = 1000; 1621 1622 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1623 1624 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1625 1626 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1627 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) & 1628 0xffff; 1629 writel(tmp & 0xff, PHY_REG(phy_base, 0x3)); 1630 1631 /* disable another cs's output */ 1632 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1633 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12), 1634 dramtype); 1635 if (dramtype == DDR3 || dramtype == DDR4) 1636 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1637 else 1638 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4)); 1639 1640 /* choose cs */ 1641 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1642 ((0x2 >> cs) << 6) | (0 << 2)); 1643 /* enable write leveling */ 1644 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1645 ((0x2 >> cs) << 6) | (1 << 2)); 1646 1647 while (1) { 1648 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) == 1649 (readl(PHY_REG(phy_base, 0xf)) & 0xf)) 1650 break; 1651 1652 udelay(1); 1653 if (timeout_us-- == 0) { 1654 printascii("error: write leveling timeout\n"); 1655 while (1) 1656 ; 1657 } 1658 } 1659 1660 /* disable write leveling */ 1661 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2), 1662 ((0x2 >> cs) << 6) | (0 << 2)); 1663 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6); 1664 1665 /* enable another cs's output */ 1666 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2) 1667 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12), 1668 dramtype); 1669 1670 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1671 1672 return 0; 1673 } 1674 1675 char pattern[32] = { 1676 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa, 1677 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55, 1678 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55, 1679 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa 1680 }; 1681 1682 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype, 1683 u32 mhz) 1684 { 1685 void __iomem *pctl_base = dram->pctl; 1686 void __iomem *phy_base = dram->phy; 1687 u32 trefi_1x, trfc_1x; 1688 u32 dis_auto_zq = 0; 1689 u32 timeout_us = 1000; 1690 u32 dqs_default; 1691 u32 cur_fsp; 1692 u32 vref_inner; 1693 u32 i; 1694 struct sdram_head_info_index_v2 *index = 1695 (struct sdram_head_info_index_v2 *)common_info; 1696 struct dq_map_info *map_info; 1697 1698 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff; 1699 if (dramtype == DDR3 && vref_inner == 0x80) { 1700 for (i = 0; i < 4; i++) 1701 writel(vref_inner - 0xa, 1702 PHY_REG(phy_base, 0x118 + i * 0x10)); 1703 1704 /* reg_rx_vref_value_update */ 1705 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1706 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1707 } 1708 1709 map_info = (struct dq_map_info *)((void *)common_info + 1710 index->dq_map_index.offset * 4); 1711 /* only 1cs a time, 0:cs0 1 cs1 */ 1712 if (cs > 1) 1713 return -1; 1714 1715 dqs_default = 0xf; 1716 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1717 1718 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1719 /* config refresh timing */ 1720 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1721 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1722 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1723 DDR_PCTL2_RFSHTMG) & 0x3ff; 1724 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1725 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1726 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1727 /* reg_phy_trfc */ 1728 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1729 /* reg_max_refi_cnt */ 1730 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1731 1732 /* choose training cs */ 1733 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6); 1734 1735 /* set dq map for ddr4 */ 1736 if (dramtype == DDR4) { 1737 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7)); 1738 for (i = 0; i < 4; i++) { 1739 writel((map_info->ddr4_dq_map[cs * 2] >> 1740 ((i % 4) * 8)) & 0xff, 1741 PHY_REG(phy_base, 0x238 + i)); 1742 writel((map_info->ddr4_dq_map[cs * 2 + 1] >> 1743 ((i % 4) * 8)) & 0xff, 1744 PHY_REG(phy_base, 0x2b8 + i)); 1745 } 1746 } 1747 1748 /* cha_l reg_l_rd_train_dqs_default[5:0] */ 1749 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default); 1750 /* cha_h reg_h_rd_train_dqs_default[5:0] */ 1751 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default); 1752 /* chb_l reg_l_rd_train_dqs_default[5:0] */ 1753 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default); 1754 /* chb_h reg_h_rd_train_dqs_default[5:0] */ 1755 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default); 1756 1757 /* Choose the read train auto mode */ 1758 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1); 1759 /* Enable the auto train of the read train */ 1760 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3); 1761 1762 /* Wait the train done. */ 1763 while (1) { 1764 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1) 1765 break; 1766 1767 udelay(1); 1768 if (timeout_us-- == 0) { 1769 printascii("error: read training timeout\n"); 1770 return -1; 1771 } 1772 } 1773 1774 /* Check the read train state */ 1775 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) || 1776 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) { 1777 printascii("error: read training error\n"); 1778 return -1; 1779 } 1780 1781 /* Exit the Read Training by setting */ 1782 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1)); 1783 1784 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1785 1786 if (dramtype == DDR3 && vref_inner == 0x80) { 1787 for (i = 0; i < 4; i++) 1788 writel(vref_inner, 1789 PHY_REG(phy_base, 0x118 + i * 0x10)); 1790 1791 /* reg_rx_vref_value_update */ 1792 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1793 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 1794 } 1795 1796 return 0; 1797 } 1798 1799 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype, 1800 u32 mhz, u32 dst_fsp) 1801 { 1802 void __iomem *pctl_base = dram->pctl; 1803 void __iomem *phy_base = dram->phy; 1804 u32 trefi_1x, trfc_1x; 1805 u32 dis_auto_zq = 0; 1806 u32 timeout_us = 1000; 1807 u32 cur_fsp; 1808 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0; 1809 1810 if (dramtype == LPDDR3 && mhz <= 400) { 1811 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3; 1812 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3; 1813 cl = readl(PHY_REG(phy_base, offset)); 1814 cwl = readl(PHY_REG(phy_base, offset + 2)); 1815 1816 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8); 1817 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4); 1818 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype); 1819 } 1820 1821 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 1822 1823 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */ 1824 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0); 1825 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */ 1826 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2); 1827 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */ 1828 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0); 1829 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */ 1830 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0); 1831 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */ 1832 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0); 1833 1834 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */ 1835 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3)); 1836 1837 /* config refresh timing */ 1838 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 1839 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1840 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32; 1841 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1842 DDR_PCTL2_RFSHTMG) & 0x3ff; 1843 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */ 1844 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff); 1845 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f); 1846 /* reg_phy_trfc */ 1847 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x); 1848 /* reg_max_refi_cnt */ 1849 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4); 1850 1851 /* choose training cs */ 1852 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6); 1853 1854 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */ 1855 /* 0: Use the write-leveling value. */ 1856 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */ 1857 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4)); 1858 1859 /* PHY_0x7a [0] reg_dq_wr_train_auto */ 1860 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1); 1861 1862 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1863 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1864 1865 send_a_refresh(dram); 1866 1867 while (1) { 1868 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1) 1869 break; 1870 1871 udelay(1); 1872 if (timeout_us-- == 0) { 1873 printascii("error: write training timeout\n"); 1874 while (1) 1875 ; 1876 } 1877 } 1878 1879 /* Check the write train state */ 1880 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) { 1881 printascii("error: write training error\n"); 1882 return -1; 1883 } 1884 1885 /* PHY_0x7a [1] reg_dq_wr_train_en */ 1886 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1)); 1887 1888 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 1889 1890 /* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */ 1891 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 1892 fsp_param[dst_fsp].vref_dq[cs] = 1893 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) + 1894 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2; 1895 /* add range info */ 1896 fsp_param[dst_fsp].vref_dq[cs] |= 1897 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1); 1898 } 1899 1900 if (dramtype == LPDDR3 && mhz <= 400) { 1901 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl); 1902 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl); 1903 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 1904 DDR_PCTL2_INIT3); 1905 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 1906 dramtype); 1907 } 1908 1909 return 0; 1910 } 1911 1912 static int data_training(struct dram_info *dram, u32 cs, 1913 struct rv1126_sdram_params *sdram_params, u32 dst_fsp, 1914 u32 training_flag) 1915 { 1916 u32 ret = 0; 1917 1918 if (training_flag == FULL_TRAINING) 1919 training_flag = READ_GATE_TRAINING | WRITE_LEVELING | 1920 WRITE_TRAINING | READ_TRAINING; 1921 1922 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) { 1923 ret = data_training_wl(dram, cs, 1924 sdram_params->base.dramtype, 1925 sdram_params->ch.cap_info.rank); 1926 if (ret != 0) 1927 goto out; 1928 } 1929 1930 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) { 1931 ret = data_training_rg(dram, cs, 1932 sdram_params->base.dramtype); 1933 if (ret != 0) 1934 goto out; 1935 } 1936 1937 if ((training_flag & READ_TRAINING) == READ_TRAINING) { 1938 ret = data_training_rd(dram, cs, 1939 sdram_params->base.dramtype, 1940 sdram_params->base.ddr_freq); 1941 if (ret != 0) 1942 goto out; 1943 } 1944 1945 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) { 1946 ret = data_training_wr(dram, cs, 1947 sdram_params->base.dramtype, 1948 sdram_params->base.ddr_freq, dst_fsp); 1949 if (ret != 0) 1950 goto out; 1951 } 1952 1953 out: 1954 return ret; 1955 } 1956 1957 static int get_wrlvl_val(struct dram_info *dram, 1958 struct rv1126_sdram_params *sdram_params) 1959 { 1960 int i, j, clk_skew; 1961 void __iomem *phy_base = dram->phy; 1962 u32 lp_stat; 1963 int ret; 1964 1965 lp_stat = low_power_update(dram, 0); 1966 1967 clk_skew = 0x1f; 1968 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3, 1969 sdram_params->base.dramtype); 1970 1971 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING); 1972 if (sdram_params->ch.cap_info.rank == 2) 1973 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING); 1974 1975 for (j = 0; j < 2; j++) 1976 for (i = 0; i < 4; i++) 1977 wrlvl_result[j][i] = 1978 (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) - 1979 clk_skew; 1980 1981 low_power_update(dram, lp_stat); 1982 1983 return ret; 1984 } 1985 1986 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 1987 static void init_rw_trn_result_struct(struct rw_trn_result *result, 1988 void __iomem *phy_base, u8 cs_num) 1989 { 1990 int i; 1991 1992 result->cs_num = cs_num; 1993 result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) & 1994 PHY_DQ_WIDTH_MASK; 1995 for (i = 0; i < FSP_NUM; i++) 1996 result->fsp_mhz[i] = 0; 1997 } 1998 1999 static void save_rw_trn_min_max(void __iomem *phy_base, 2000 struct cs_rw_trn_result *rd_result, 2001 struct cs_rw_trn_result *wr_result, 2002 u8 byte_en) 2003 { 2004 u16 phy_ofs; 2005 u8 dqs; 2006 u8 dq; 2007 2008 for (dqs = 0; dqs < BYTE_NUM; dqs++) { 2009 if ((byte_en & BIT(dqs)) == 0) 2010 continue; 2011 2012 /* Channel A or B (low or high 16 bit) */ 2013 phy_ofs = dqs < 2 ? 0x230 : 0x2b0; 2014 /* low or high 8 bit */ 2015 phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9; 2016 for (dq = 0; dq < 8; dq++) { 2017 rd_result->dqs[dqs].dq_min[dq] = 2018 readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq)); 2019 rd_result->dqs[dqs].dq_max[dq] = 2020 readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq)); 2021 wr_result->dqs[dqs].dq_min[dq] = 2022 readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq)); 2023 wr_result->dqs[dqs].dq_max[dq] = 2024 readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq)); 2025 } 2026 } 2027 } 2028 2029 static void save_rw_trn_deskew(void __iomem *phy_base, 2030 struct fsp_rw_trn_result *result, u8 cs_num, 2031 int min_val, bool rw) 2032 { 2033 u16 phy_ofs; 2034 u8 cs; 2035 u8 dq; 2036 2037 result->min_val = min_val; 2038 2039 for (cs = 0; cs < cs_num; cs++) { 2040 phy_ofs = cs == 0 ? 0x170 : 0x1a0; 2041 phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17; 2042 for (dq = 0; dq < 8; dq++) { 2043 result->cs[cs].dqs[0].dq_deskew[dq] = 2044 readb(PHY_REG(phy_base, phy_ofs + dq)); 2045 result->cs[cs].dqs[1].dq_deskew[dq] = 2046 readb(PHY_REG(phy_base, phy_ofs + 0xb + dq)); 2047 result->cs[cs].dqs[2].dq_deskew[dq] = 2048 readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq)); 2049 result->cs[cs].dqs[3].dq_deskew[dq] = 2050 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq)); 2051 } 2052 2053 result->cs[cs].dqs[0].dqs_deskew = 2054 readb(PHY_REG(phy_base, phy_ofs + 0x8)); 2055 result->cs[cs].dqs[1].dqs_deskew = 2056 readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8)); 2057 result->cs[cs].dqs[2].dqs_deskew = 2058 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8)); 2059 result->cs[cs].dqs[3].dqs_deskew = 2060 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8)); 2061 } 2062 } 2063 2064 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result) 2065 { 2066 result->flag = DDR_DQ_EYE_FLAG; 2067 memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result)); 2068 } 2069 #endif 2070 2071 static int high_freq_training(struct dram_info *dram, 2072 struct rv1126_sdram_params *sdram_params, 2073 u32 fsp) 2074 { 2075 u32 i, j; 2076 void __iomem *phy_base = dram->phy; 2077 u32 dramtype = sdram_params->base.dramtype; 2078 int min_val; 2079 int dqs_skew, clk_skew, ca_skew; 2080 u8 byte_en; 2081 int ret; 2082 2083 byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK; 2084 dqs_skew = 0; 2085 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) { 2086 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) { 2087 if ((byte_en & BIT(i)) != 0) 2088 dqs_skew += wrlvl_result[j][i]; 2089 } 2090 } 2091 dqs_skew = dqs_skew / 2092 (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw)); 2093 2094 clk_skew = 0x20 - dqs_skew; 2095 dqs_skew = 0x20; 2096 2097 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 2098 min_val = 0xff; 2099 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) 2100 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) { 2101 if ((byte_en & BIT(i)) != 0) 2102 min_val = MIN(wrlvl_result[j][i], min_val); 2103 } 2104 2105 if (min_val < 0) { 2106 clk_skew = -min_val; 2107 ca_skew = -min_val; 2108 } else { 2109 clk_skew = 0; 2110 ca_skew = 0; 2111 } 2112 } else if (dramtype == LPDDR3) { 2113 ca_skew = clk_skew - 4; 2114 } else { 2115 ca_skew = clk_skew; 2116 } 2117 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3, 2118 dramtype); 2119 2120 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233)); 2121 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237)); 2122 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 2123 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 2124 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING | 2125 READ_TRAINING | WRITE_TRAINING); 2126 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2127 rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq; 2128 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0], 2129 &rw_trn_result.wr_fsp[fsp].cs[0], 2130 rw_trn_result.byte_en); 2131 #endif 2132 if (sdram_params->ch.cap_info.rank == 2) { 2133 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233)); 2134 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237)); 2135 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3)); 2136 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7)); 2137 ret |= data_training(dram, 1, sdram_params, fsp, 2138 READ_GATE_TRAINING | READ_TRAINING | 2139 WRITE_TRAINING); 2140 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2141 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1], 2142 &rw_trn_result.wr_fsp[fsp].cs[1], 2143 rw_trn_result.byte_en); 2144 #endif 2145 } 2146 if (ret) 2147 goto out; 2148 2149 record_dq_prebit(dram); 2150 2151 min_val = get_min_value(dram, SKEW_RX_SIGNAL, 2152 sdram_params->ch.cap_info.rank) * -1; 2153 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL, 2154 min_val, min_val, sdram_params->ch.cap_info.rank); 2155 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2156 save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp], 2157 rw_trn_result.cs_num, (u8)(min_val * (-1)), 2158 SKEW_RX_SIGNAL); 2159 #endif 2160 2161 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL, 2162 sdram_params->ch.cap_info.rank), 2163 get_min_value(dram, SKEW_CA_SIGNAL, 2164 sdram_params->ch.cap_info.rank)) * -1; 2165 2166 /* clk = 0, rx all skew -7, tx - min_value */ 2167 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3, 2168 dramtype); 2169 2170 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL, 2171 min_val, min_val, sdram_params->ch.cap_info.rank); 2172 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 2173 save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp], 2174 rw_trn_result.cs_num, (u8)(min_val * (-1)), 2175 SKEW_TX_SIGNAL); 2176 #endif 2177 2178 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING); 2179 if (sdram_params->ch.cap_info.rank == 2) 2180 ret |= data_training(dram, 1, sdram_params, 0, 2181 READ_GATE_TRAINING); 2182 out: 2183 return ret; 2184 } 2185 2186 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig) 2187 { 2188 writel(ddrconfig, &dram->msch->deviceconf); 2189 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0); 2190 } 2191 2192 static void update_noc_timing(struct dram_info *dram, 2193 struct rv1126_sdram_params *sdram_params) 2194 { 2195 void __iomem *pctl_base = dram->pctl; 2196 u32 bw, bl; 2197 2198 bw = 8 << sdram_params->ch.cap_info.bw; 2199 bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2; 2200 2201 /* update the noc timing related to data bus width */ 2202 if ((bw / 8 * bl) <= 16) 2203 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0; 2204 else if ((bw / 8 * bl) == 32) 2205 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1; 2206 else if ((bw / 8 * bl) == 64) 2207 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2; 2208 else 2209 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3; 2210 2211 sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty = 2212 (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4; 2213 2214 if (sdram_params->base.dramtype == LPDDR4 || 2215 sdram_params->base.dramtype == LPDDR4X) { 2216 sdram_params->ch.noc_timings.ddrmode.b.mwrsize = 2217 (bw == 16) ? 0x1 : 0x2; 2218 sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr = 2219 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty; 2220 } 2221 2222 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32, 2223 &dram->msch->ddrtiminga0); 2224 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32, 2225 &dram->msch->ddrtimingb0); 2226 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32, 2227 &dram->msch->ddrtimingc0); 2228 writel(sdram_params->ch.noc_timings.devtodev0.d32, 2229 &dram->msch->devtodev0); 2230 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode); 2231 writel(sdram_params->ch.noc_timings.ddr4timing.d32, 2232 &dram->msch->ddr4timing); 2233 } 2234 2235 static int split_setup(struct dram_info *dram, 2236 struct rv1126_sdram_params *sdram_params) 2237 { 2238 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2239 u32 dramtype = sdram_params->base.dramtype; 2240 u32 split_size, split_mode; 2241 u64 cs_cap[2], cap; 2242 2243 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype); 2244 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype); 2245 /* only support the larger cap is in low 16bit */ 2246 if (cap_info->cs0_high16bit_row < cap_info->cs0_row) { 2247 cap = cs_cap[0] / (1 << (cap_info->cs0_row - 2248 cap_info->cs0_high16bit_row)); 2249 } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) && 2250 (cap_info->rank == 2)) { 2251 if (!cap_info->cs1_high16bit_row) 2252 cap = cs_cap[0]; 2253 else 2254 cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row - 2255 cap_info->cs1_high16bit_row)); 2256 } else { 2257 goto out; 2258 } 2259 split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK; 2260 if (cap_info->bw == 2) 2261 split_mode = SPLIT_MODE_32_L16_VALID; 2262 else 2263 split_mode = SPLIT_MODE_16_L8_VALID; 2264 2265 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con, 2266 (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) | 2267 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) | 2268 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET), 2269 (split_mode << SPLIT_MODE_OFFSET) | 2270 (0x0 << SPLIT_BYPASS_OFFSET) | 2271 (split_size << SPLIT_SIZE_OFFSET)); 2272 2273 rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2, 2274 MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT, 2275 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT); 2276 2277 out: 2278 return 0; 2279 } 2280 2281 static void split_bypass(struct dram_info *dram) 2282 { 2283 if ((readl(&dram->ddrgrf->grf_ddrsplit_con) & 2284 (1 << SPLIT_BYPASS_OFFSET)) != 0) 2285 return; 2286 2287 /* bypass split */ 2288 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con, 2289 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) | 2290 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET), 2291 (0x1 << SPLIT_BYPASS_OFFSET) | 2292 (0x0 << SPLIT_SIZE_OFFSET)); 2293 } 2294 2295 static void dram_all_config(struct dram_info *dram, 2296 struct rv1126_sdram_params *sdram_params) 2297 { 2298 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2299 u32 dram_type = sdram_params->base.dramtype; 2300 void __iomem *pctl_base = dram->pctl; 2301 u32 sys_reg2 = 0; 2302 u32 sys_reg3 = 0; 2303 u64 cs_cap[2]; 2304 u32 cs_pst; 2305 2306 set_ddrconfig(dram, cap_info->ddrconfig); 2307 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2, 2308 &sys_reg3, 0); 2309 writel(sys_reg2, &dram->pmugrf->os_reg[2]); 2310 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2311 2312 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2313 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2314 2315 if (cap_info->rank == 2) { 2316 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2317 6 + 2; 2318 if (cs_pst > 28) 2319 cs_cap[0] = 1llu << cs_pst; 2320 } 2321 2322 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) | 2323 (((cs_cap[0] >> 20) / 64) & 0xff), 2324 &dram->msch->devicesize); 2325 update_noc_timing(dram, sdram_params); 2326 } 2327 2328 static void enable_low_power(struct dram_info *dram, 2329 struct rv1126_sdram_params *sdram_params) 2330 { 2331 void __iomem *pctl_base = dram->pctl; 2332 u32 grf_lp_con; 2333 2334 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]); 2335 2336 if (sdram_params->base.dramtype == DDR4) 2337 grf_lp_con = (0x7 << 16) | (1 << 1); 2338 else if (sdram_params->base.dramtype == DDR3) 2339 grf_lp_con = (0x7 << 16) | (1 << 0); 2340 else 2341 grf_lp_con = (0x7 << 16) | (1 << 2); 2342 2343 /* en lpckdis_en */ 2344 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9); 2345 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con); 2346 2347 /* enable sr, pd */ 2348 if (dram->pd_idle == 0) 2349 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2350 else 2351 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 2352 if (dram->sr_idle == 0) 2353 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2354 else 2355 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 2356 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3)); 2357 } 2358 2359 static void ddr_set_atags(struct dram_info *dram, 2360 struct rv1126_sdram_params *sdram_params) 2361 { 2362 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2363 u32 dram_type = sdram_params->base.dramtype; 2364 void __iomem *pctl_base = dram->pctl; 2365 struct tag_serial t_serial; 2366 struct tag_ddr_mem t_ddrmem; 2367 struct tag_soc_info t_socinfo; 2368 u64 cs_cap[2]; 2369 u32 cs_pst = 0; 2370 u32 split, split_size; 2371 u64 reduce_cap = 0; 2372 2373 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 2374 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 2375 2376 memset(&t_serial, 0, sizeof(struct tag_serial)); 2377 2378 t_serial.version = 0; 2379 t_serial.enable = 1; 2380 t_serial.addr = CONFIG_DEBUG_UART_BASE; 2381 t_serial.baudrate = CONFIG_BAUDRATE; 2382 t_serial.m_mode = SERIAL_M_MODE_M0; 2383 t_serial.id = 2; 2384 2385 atags_destroy(); 2386 atags_set_tag(ATAG_SERIAL, &t_serial); 2387 2388 split = readl(&dram->ddrgrf->grf_ddrsplit_con); 2389 memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem)); 2390 if (cap_info->row_3_4) { 2391 cs_cap[0] = cs_cap[0] * 3 / 4; 2392 cs_cap[1] = cs_cap[1] * 3 / 4; 2393 } else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) { 2394 split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK; 2395 reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2; 2396 } 2397 t_ddrmem.version = 0; 2398 t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE; 2399 if (cs_cap[1]) { 2400 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2401 6 + 2; 2402 } 2403 2404 if (cs_cap[1] && cs_pst > 27) { 2405 t_ddrmem.count = 2; 2406 t_ddrmem.bank[1] = 1 << cs_pst; 2407 t_ddrmem.bank[2] = cs_cap[0]; 2408 t_ddrmem.bank[3] = cs_cap[1] - reduce_cap; 2409 } else { 2410 t_ddrmem.count = 1; 2411 t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap; 2412 } 2413 2414 atags_set_tag(ATAG_DDR_MEM, &t_ddrmem); 2415 2416 memset(&t_socinfo, 0, sizeof(struct tag_soc_info)); 2417 t_socinfo.version = 0x1; 2418 t_socinfo.name = 0x1126; 2419 t_socinfo.flags = SOC_FLAGS_TDBT; 2420 atags_set_tag(ATAG_SOC_INFO, &t_socinfo); 2421 } 2422 2423 static void print_ddr_info(struct rv1126_sdram_params *sdram_params) 2424 { 2425 u32 split; 2426 2427 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2428 (1 << SPLIT_BYPASS_OFFSET)) != 0) 2429 split = 0; 2430 else 2431 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) & 2432 SPLIT_SIZE_MASK; 2433 2434 sdram_print_ddr_info(&sdram_params->ch.cap_info, 2435 &sdram_params->base, split); 2436 } 2437 2438 static int modify_ddr34_bw_byte_map(u8 rg_result, struct rv1126_sdram_params *sdram_params) 2439 { 2440 struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info; 2441 struct dq_map_info *map_info = (struct dq_map_info *) 2442 ((void *)common_info + index->dq_map_index.offset * 4); 2443 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2444 u32 dramtype = sdram_params->base.dramtype; 2445 u32 byte_map = 0; 2446 u32 byte = 0; 2447 u32 byte_map_shift; 2448 int i; 2449 2450 if (dramtype == DDR3) 2451 byte_map_shift = 24; 2452 else if (dramtype == DDR4) 2453 byte_map_shift = 0; 2454 else 2455 return -1; 2456 2457 for (i = 0; i < 4; i++) { 2458 if ((rg_result & BIT(i)) == 0) { 2459 byte_map |= byte << (i * 2); 2460 byte++; 2461 } 2462 } 2463 if (byte != 1 && byte != 2 && byte != 4) { 2464 printascii("DTT result is abnormal: "); 2465 printdec(byte); 2466 printascii("byte\n"); 2467 return -1; 2468 } 2469 cap_info->bw = byte / 2; 2470 for (i = 0; i < 4; i++) { 2471 if ((rg_result & BIT(i)) != 0) { 2472 byte_map |= byte << (i * 2); 2473 byte++; 2474 } 2475 } 2476 2477 if ((u8)byte_map != (u8)(map_info->byte_map[0] >> byte_map_shift)) { 2478 clrsetbits_le32(&map_info->byte_map[0], 2479 0xff << byte_map_shift, byte_map << byte_map_shift); 2480 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, dramtype); 2481 return 1; 2482 } 2483 2484 return 0; 2485 } 2486 2487 int sdram_init_(struct dram_info *dram, struct rv1126_sdram_params *sdram_params, u32 post_init) 2488 { 2489 void __iomem *pctl_base = dram->pctl; 2490 void __iomem *phy_base = dram->phy; 2491 u32 ddr4_vref; 2492 u32 mr_tmp, tmp; 2493 2494 rkclk_configure_ddr(dram, sdram_params); 2495 2496 rkclk_ddr_reset(dram, 1, 1, 1, 1); 2497 udelay(10); 2498 2499 rkclk_ddr_reset(dram, 1, 1, 1, 0); 2500 phy_cfg(dram, sdram_params); 2501 2502 rkclk_ddr_reset(dram, 1, 1, 0, 0); 2503 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1); 2504 2505 rkclk_ddr_reset(dram, 1, 0, 0, 0); 2506 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, 2507 dram->sr_idle, dram->pd_idle); 2508 2509 if (sdram_params->ch.cap_info.bw == 2) { 2510 /* 32bit interface use pageclose */ 2511 setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2); 2512 /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */ 2513 clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0); 2514 } else { 2515 clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2); 2516 } 2517 2518 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 2519 u32 trefi; 2520 2521 tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG); 2522 trefi = (tmp >> 16) & 0xfff; 2523 writel((tmp & 0xf000ffff) | (trefi / 2) << 16, 2524 pctl_base + DDR_PCTL2_RFSHTMG); 2525 #endif 2526 2527 /* set frequency_mode */ 2528 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 2529 /* set target_frequency to Frequency 0 */ 2530 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0); 2531 2532 set_ds_odt(dram, sdram_params, 0); 2533 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params); 2534 set_ctl_address_map(dram, sdram_params); 2535 2536 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4)); 2537 2538 rkclk_ddr_reset(dram, 0, 0, 0, 0); 2539 2540 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) 2541 continue; 2542 2543 if (sdram_params->base.dramtype == LPDDR3) { 2544 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3); 2545 } else if (sdram_params->base.dramtype == LPDDR4 || 2546 sdram_params->base.dramtype == LPDDR4X) { 2547 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6); 2548 /* MR11 */ 2549 pctl_write_mr(dram->pctl, 3, 11, 2550 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 2551 LPDDR4); 2552 /* MR12 */ 2553 pctl_write_mr(dram->pctl, 3, 12, 2554 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 2555 LPDDR4); 2556 2557 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); 2558 /* MR22 */ 2559 pctl_write_mr(dram->pctl, 3, 22, 2560 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 2561 LPDDR4); 2562 } 2563 2564 if (sdram_params->base.dramtype == DDR3 && post_init == 0) 2565 setbits_le32(PHY_REG(phy_base, 0xf), 0xf); 2566 tmp = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) & 0xf; 2567 2568 if (tmp != 0) { 2569 if (post_init != 0) { 2570 printascii("DTT cs0 error\n"); 2571 return -1; 2572 } 2573 if (sdram_params->base.dramtype != DDR3 || tmp == 0xf) 2574 return -1; 2575 } 2576 2577 if (sdram_params->base.dramtype == DDR3 && post_init == 0) { 2578 if (modify_ddr34_bw_byte_map((u8)tmp, sdram_params) != 0) 2579 return -1; 2580 } 2581 2582 if (sdram_params->base.dramtype == LPDDR4) { 2583 mr_tmp = read_mr(dram, 1, 14, LPDDR4); 2584 2585 if (mr_tmp != 0x4d) 2586 return -1; 2587 } 2588 2589 if (sdram_params->base.dramtype == LPDDR4 || 2590 sdram_params->base.dramtype == LPDDR4X) { 2591 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7); 2592 /* MR14 */ 2593 pctl_write_mr(dram->pctl, 3, 14, 2594 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 2595 LPDDR4); 2596 } 2597 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) { 2598 if (data_training(dram, 1, sdram_params, 0, 2599 READ_GATE_TRAINING) != 0) { 2600 printascii("DTT cs1 error\n"); 2601 return -1; 2602 } 2603 } 2604 2605 if (sdram_params->base.dramtype == DDR4) { 2606 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39; 2607 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref, 2608 sdram_params->base.dramtype); 2609 } 2610 2611 dram_all_config(dram, sdram_params); 2612 enable_low_power(dram, sdram_params); 2613 2614 return 0; 2615 } 2616 2617 static u64 dram_detect_cap(struct dram_info *dram, 2618 struct rv1126_sdram_params *sdram_params, 2619 unsigned char channel) 2620 { 2621 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2622 void __iomem *pctl_base = dram->pctl; 2623 void __iomem *phy_base = dram->phy; 2624 u32 mr8; 2625 2626 u32 bktmp; 2627 u32 coltmp; 2628 u32 rowtmp; 2629 u32 cs; 2630 u32 dram_type = sdram_params->base.dramtype; 2631 u32 pwrctl; 2632 u32 i, dq_map; 2633 u32 byte1 = 0, byte0 = 0; 2634 2635 if (dram_type != LPDDR4 && dram_type != LPDDR4X) { 2636 if (dram_type != DDR4) { 2637 if (dram_type == DDR3) 2638 coltmp = 11; 2639 else 2640 coltmp = 12; 2641 bktmp = 3; 2642 if (dram_type == LPDDR2) 2643 rowtmp = 15; 2644 else 2645 rowtmp = 16; 2646 2647 if (sdram_detect_col(cap_info, coltmp) != 0) 2648 goto cap_err; 2649 2650 sdram_detect_bank(cap_info, coltmp, bktmp); 2651 if (dram_type != LPDDR3) 2652 sdram_detect_dbw(cap_info, dram_type); 2653 } else { 2654 coltmp = 10; 2655 bktmp = 4; 2656 rowtmp = 17; 2657 2658 cap_info->col = 10; 2659 cap_info->bk = 2; 2660 sdram_detect_bg(cap_info, coltmp); 2661 } 2662 2663 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0) 2664 goto cap_err; 2665 2666 sdram_detect_row_3_4(cap_info, coltmp, bktmp); 2667 } else { 2668 cap_info->col = 10; 2669 cap_info->bk = 3; 2670 mr8 = read_mr(dram, 1, 8, dram_type); 2671 cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0; 2672 mr8 = (mr8 >> 2) & 0xf; 2673 if (mr8 >= 0 && mr8 <= 6) { 2674 cap_info->cs0_row = 14 + (mr8 + 1) / 2; 2675 } else if (mr8 == 0xc) { 2676 cap_info->cs0_row = 13; 2677 } else { 2678 printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n"); 2679 goto cap_err; 2680 } 2681 if (cap_info->dbw == 0) 2682 cap_info->cs0_row++; 2683 cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0; 2684 if (cap_info->cs0_row >= 17) { 2685 printascii("Cap ERR: "); 2686 printascii("RV1126 LPDDR4/X cannot support row >= 17\n"); 2687 goto cap_err; 2688 // cap_info->cs0_row = 16; 2689 // cap_info->row_3_4 = 0; 2690 } 2691 } 2692 2693 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL); 2694 writel(0, pctl_base + DDR_PCTL2_PWRCTL); 2695 2696 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0) 2697 cs = 1; 2698 else 2699 cs = 0; 2700 cap_info->rank = cs + 1; 2701 2702 setbits_le32(PHY_REG(phy_base, 0xf), 0xf); 2703 2704 if (dram_type != DDR3) { 2705 if ((data_training_rg(dram, 0, dram_type) & 0xf) == 0) { 2706 cap_info->bw = 2; 2707 } else { 2708 dq_map = readl(PHY_REG(phy_base, 0x4f)); 2709 for (i = 0; i < 4; i++) { 2710 if (((dq_map >> (i * 2)) & 0x3) == 0) 2711 byte0 = i; 2712 if (((dq_map >> (i * 2)) & 0x3) == 1) 2713 byte1 = i; 2714 } 2715 clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK, 2716 BIT(byte0) | BIT(byte1)); 2717 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0) 2718 cap_info->bw = 1; 2719 else 2720 cap_info->bw = 0; 2721 } 2722 } 2723 if (cap_info->bw > 0) 2724 cap_info->dbw = 1; 2725 2726 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL); 2727 2728 cap_info->cs0_high16bit_row = cap_info->cs0_row; 2729 if (cs) { 2730 cap_info->cs1_row = cap_info->cs0_row; 2731 cap_info->cs1_high16bit_row = cap_info->cs0_row; 2732 } else { 2733 cap_info->cs1_row = 0; 2734 cap_info->cs1_high16bit_row = 0; 2735 } 2736 2737 if (dram_type == LPDDR3) 2738 sdram_detect_dbw(cap_info, dram_type); 2739 2740 return 0; 2741 cap_err: 2742 return -1; 2743 } 2744 2745 static int dram_detect_cs1_row(struct dram_info *dram, 2746 struct rv1126_sdram_params *sdram_params, 2747 unsigned char channel) 2748 { 2749 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2750 void __iomem *pctl_base = dram->pctl; 2751 u32 ret = 0; 2752 void __iomem *test_addr; 2753 u32 row, bktmp, coltmp, bw; 2754 u64 cs0_cap; 2755 u32 byte_mask; 2756 u32 cs_pst; 2757 u32 cs_add = 0; 2758 u32 max_row; 2759 2760 if (cap_info->rank == 2) { 2761 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) + 2762 6 + 2; 2763 if (cs_pst < 28) 2764 cs_add = 1; 2765 2766 cs0_cap = 1 << cs_pst; 2767 2768 if (sdram_params->base.dramtype == DDR4) { 2769 if (cap_info->dbw == 0) 2770 bktmp = cap_info->bk + 2; 2771 else 2772 bktmp = cap_info->bk + 1; 2773 } else { 2774 bktmp = cap_info->bk; 2775 } 2776 bw = cap_info->bw; 2777 coltmp = cap_info->col; 2778 2779 if (bw == 2) 2780 byte_mask = 0xFFFF; 2781 else 2782 byte_mask = 0xFF; 2783 2784 max_row = (cs_pst == 31) ? 30 : 31; 2785 2786 max_row = max_row - bktmp - coltmp - bw - cs_add + 1; 2787 2788 row = (cap_info->cs0_row > max_row) ? max_row : 2789 cap_info->cs0_row; 2790 2791 for (; row > 12; row--) { 2792 test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE + 2793 (u32)cs0_cap + 2794 (1ul << (row + bktmp + coltmp + 2795 cs_add + bw - 1ul))); 2796 2797 writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap); 2798 writel(PATTERN, test_addr); 2799 2800 if (((readl(test_addr) & byte_mask) == 2801 (PATTERN & byte_mask)) && 2802 ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) & 2803 byte_mask) == 0)) { 2804 ret = row; 2805 break; 2806 } 2807 } 2808 } 2809 2810 return ret; 2811 } 2812 2813 /* return: 0 = success, other = fail */ 2814 static int sdram_init_detect(struct dram_info *dram, 2815 struct rv1126_sdram_params *sdram_params) 2816 { 2817 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 2818 u32 ret; 2819 u32 sys_reg = 0; 2820 u32 sys_reg3 = 0; 2821 2822 if (sdram_init_(dram, sdram_params, 0)) { 2823 if (sdram_params->base.dramtype == DDR3) { 2824 if (sdram_init_(dram, sdram_params, 0)) 2825 return -1; 2826 } else { 2827 return -1; 2828 } 2829 } 2830 2831 if (sdram_params->base.dramtype == DDR3) { 2832 writel(PATTERN, CONFIG_SYS_SDRAM_BASE); 2833 if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN) 2834 return -1; 2835 } 2836 2837 split_bypass(dram); 2838 if (dram_detect_cap(dram, sdram_params, 0) != 0) 2839 return -1; 2840 2841 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, 2842 sdram_params->base.dramtype); 2843 ret = sdram_init_(dram, sdram_params, 1); 2844 if (ret != 0) 2845 goto out; 2846 2847 cap_info->cs1_row = 2848 dram_detect_cs1_row(dram, sdram_params, 0); 2849 if (cap_info->cs1_row) { 2850 sys_reg = readl(&dram->pmugrf->os_reg[2]); 2851 sys_reg3 = readl(&dram->pmugrf->os_reg[3]); 2852 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row, 2853 sys_reg, sys_reg3, 0); 2854 writel(sys_reg, &dram->pmugrf->os_reg[2]); 2855 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 2856 } 2857 2858 sdram_detect_high_row(cap_info, sdram_params->base.dramtype); 2859 split_setup(dram, sdram_params); 2860 out: 2861 return ret; 2862 } 2863 2864 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz) 2865 { 2866 u32 i; 2867 u32 offset = 0; 2868 struct ddr2_3_4_lp2_3_info *ddr_info; 2869 2870 if (!freq_mhz) { 2871 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype); 2872 if (ddr_info) 2873 freq_mhz = 2874 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 2875 DDR_FREQ_MASK; 2876 else 2877 freq_mhz = 0; 2878 } 2879 2880 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) { 2881 if (sdram_configs[i].base.ddr_freq == 0 || 2882 freq_mhz < sdram_configs[i].base.ddr_freq) 2883 break; 2884 } 2885 offset = i == 0 ? 0 : i - 1; 2886 2887 return &sdram_configs[offset]; 2888 } 2889 2890 static const u16 pctl_need_update_reg[] = { 2891 DDR_PCTL2_RFSHTMG, 2892 DDR_PCTL2_INIT3, 2893 DDR_PCTL2_INIT4, 2894 DDR_PCTL2_INIT6, 2895 DDR_PCTL2_INIT7, 2896 DDR_PCTL2_DRAMTMG0, 2897 DDR_PCTL2_DRAMTMG1, 2898 DDR_PCTL2_DRAMTMG2, 2899 DDR_PCTL2_DRAMTMG3, 2900 DDR_PCTL2_DRAMTMG4, 2901 DDR_PCTL2_DRAMTMG5, 2902 DDR_PCTL2_DRAMTMG6, 2903 DDR_PCTL2_DRAMTMG7, 2904 DDR_PCTL2_DRAMTMG8, 2905 DDR_PCTL2_DRAMTMG9, 2906 DDR_PCTL2_DRAMTMG12, 2907 DDR_PCTL2_DRAMTMG13, 2908 DDR_PCTL2_DRAMTMG14, 2909 DDR_PCTL2_ZQCTL0, 2910 DDR_PCTL2_DFITMG0, 2911 DDR_PCTL2_ODTCFG 2912 }; 2913 2914 static const u16 phy_need_update_reg[] = { 2915 0x14, 2916 0x18, 2917 0x1c 2918 }; 2919 2920 static void pre_set_rate(struct dram_info *dram, 2921 struct rv1126_sdram_params *sdram_params, 2922 u32 dst_fsp, u32 dst_fsp_lp4) 2923 { 2924 u32 i, j, find; 2925 void __iomem *pctl_base = dram->pctl; 2926 void __iomem *phy_base = dram->phy; 2927 u32 phy_offset; 2928 u32 mr_tmp; 2929 u32 dramtype = sdram_params->base.dramtype; 2930 2931 sw_set_req(dram); 2932 /* pctl timing update */ 2933 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) { 2934 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF; 2935 j++) { 2936 if (sdram_params->pctl_regs.pctl[j][0] == 2937 pctl_need_update_reg[i]) { 2938 writel(sdram_params->pctl_regs.pctl[j][1], 2939 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2940 pctl_need_update_reg[i]); 2941 find = j; 2942 break; 2943 } 2944 } 2945 } 2946 2947 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 2948 u32 tmp, trefi; 2949 2950 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); 2951 trefi = (tmp >> 16) & 0xfff; 2952 writel((tmp & 0xf000ffff) | (trefi / 2) << 16, 2953 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG); 2954 #endif 2955 2956 sw_set_ack(dram); 2957 2958 /* phy timing update */ 2959 if (dst_fsp == 0) 2960 phy_offset = 0; 2961 else 2962 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3); 2963 /* cl cwl al update */ 2964 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) { 2965 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF; 2966 j++) { 2967 if (sdram_params->phy_regs.phy[j][0] == 2968 phy_need_update_reg[i]) { 2969 writel(sdram_params->phy_regs.phy[j][1], 2970 phy_base + phy_offset + 2971 phy_need_update_reg[i]); 2972 find = j; 2973 break; 2974 } 2975 } 2976 } 2977 2978 set_ds_odt(dram, sdram_params, dst_fsp); 2979 if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 2980 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 2981 DDR_PCTL2_INIT4); 2982 /* MR13 */ 2983 pctl_write_mr(dram->pctl, 3, 13, 2984 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2985 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2986 ((0x2 << 6) >> dst_fsp_lp4), dramtype); 2987 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 2988 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) | 2989 ((0x2 << 6) >> dst_fsp_lp4), 2990 PHY_REG(phy_base, 0x1b)); 2991 /* MR3 */ 2992 pctl_write_mr(dram->pctl, 3, 3, 2993 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & 2994 PCTL2_MR_MASK, 2995 dramtype); 2996 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK, 2997 PHY_REG(phy_base, 0x19)); 2998 2999 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3000 DDR_PCTL2_INIT3); 3001 /* MR1 */ 3002 pctl_write_mr(dram->pctl, 3, 1, 3003 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & 3004 PCTL2_MR_MASK, 3005 dramtype); 3006 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK, 3007 PHY_REG(phy_base, 0x17)); 3008 /* MR2 */ 3009 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK, 3010 dramtype); 3011 writel(mr_tmp & PCTL2_MR_MASK, 3012 PHY_REG(phy_base, 0x18)); 3013 3014 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3015 DDR_PCTL2_INIT6); 3016 /* MR11 */ 3017 pctl_write_mr(dram->pctl, 3, 11, 3018 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 3019 dramtype); 3020 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK, 3021 PHY_REG(phy_base, 0x1a)); 3022 /* MR12 */ 3023 pctl_write_mr(dram->pctl, 3, 12, 3024 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK, 3025 dramtype); 3026 3027 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3028 DDR_PCTL2_INIT7); 3029 /* MR22 */ 3030 pctl_write_mr(dram->pctl, 3, 22, 3031 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 3032 dramtype); 3033 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK, 3034 PHY_REG(phy_base, 0x1d)); 3035 /* MR14 */ 3036 pctl_write_mr(dram->pctl, 3, 14, 3037 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 3038 dramtype); 3039 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK, 3040 PHY_REG(phy_base, 0x1c)); 3041 } 3042 3043 update_noc_timing(dram, sdram_params); 3044 } 3045 3046 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp, 3047 struct rv1126_sdram_params *sdram_params) 3048 { 3049 void __iomem *pctl_base = dram->pctl; 3050 void __iomem *phy_base = dram->phy; 3051 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp]; 3052 u32 temp, temp1; 3053 struct ddr2_3_4_lp2_3_info *ddr_info; 3054 3055 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype); 3056 3057 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq; 3058 3059 if (sdram_params->base.dramtype == LPDDR4 || 3060 sdram_params->base.dramtype == LPDDR4X) { 3061 p_fsp_param->rd_odt_up_en = 0; 3062 p_fsp_param->rd_odt_down_en = 1; 3063 } else { 3064 p_fsp_param->rd_odt_up_en = 3065 ODT_INFO_PULLUP_EN(ddr_info->odt_info); 3066 p_fsp_param->rd_odt_down_en = 3067 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info); 3068 } 3069 3070 if (p_fsp_param->rd_odt_up_en) 3071 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111)); 3072 else if (p_fsp_param->rd_odt_down_en) 3073 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110)); 3074 else 3075 p_fsp_param->rd_odt = 0; 3076 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112)); 3077 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100)); 3078 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102)); 3079 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128)); 3080 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105)); 3081 3082 if (sdram_params->base.dramtype == DDR3) { 3083 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3084 DDR_PCTL2_INIT3); 3085 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 3086 p_fsp_param->ds_pdds = temp & DDR3_DS_MASK; 3087 p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK; 3088 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 3089 } else if (sdram_params->base.dramtype == DDR4) { 3090 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3091 DDR_PCTL2_INIT3); 3092 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK; 3093 p_fsp_param->ds_pdds = temp & DDR4_DS_MASK; 3094 p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK; 3095 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 3096 } else if (sdram_params->base.dramtype == LPDDR3) { 3097 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3098 DDR_PCTL2_INIT4); 3099 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 3100 p_fsp_param->ds_pdds = temp & 0xf; 3101 3102 p_fsp_param->dq_odt = lp3_odt_value; 3103 p_fsp_param->ca_odt = p_fsp_param->dq_odt; 3104 } else if (sdram_params->base.dramtype == LPDDR4 || 3105 sdram_params->base.dramtype == LPDDR4X) { 3106 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3107 DDR_PCTL2_INIT4); 3108 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK; 3109 p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK; 3110 3111 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3112 DDR_PCTL2_INIT6); 3113 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK; 3114 p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK; 3115 p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK; 3116 3117 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)), 3118 readl(PHY_REG(phy_base, 0x3ce))); 3119 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)), 3120 readl(PHY_REG(phy_base, 0x3de))); 3121 p_fsp_param->vref_ca[0] = (temp + temp1) / 2; 3122 temp = MAX(readl(PHY_REG(phy_base, 0x3af)), 3123 readl(PHY_REG(phy_base, 0x3cf))); 3124 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)), 3125 readl(PHY_REG(phy_base, 0x3df))); 3126 p_fsp_param->vref_ca[1] = (temp + temp1) / 2; 3127 p_fsp_param->vref_ca[0] |= 3128 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 3129 p_fsp_param->vref_ca[1] |= 3130 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6)); 3131 3132 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >> 3133 3) & 0x1; 3134 } 3135 3136 p_fsp_param->noc_timings.ddrtiminga0 = 3137 sdram_params->ch.noc_timings.ddrtiminga0; 3138 p_fsp_param->noc_timings.ddrtimingb0 = 3139 sdram_params->ch.noc_timings.ddrtimingb0; 3140 p_fsp_param->noc_timings.ddrtimingc0 = 3141 sdram_params->ch.noc_timings.ddrtimingc0; 3142 p_fsp_param->noc_timings.devtodev0 = 3143 sdram_params->ch.noc_timings.devtodev0; 3144 p_fsp_param->noc_timings.ddrmode = 3145 sdram_params->ch.noc_timings.ddrmode; 3146 p_fsp_param->noc_timings.ddr4timing = 3147 sdram_params->ch.noc_timings.ddr4timing; 3148 p_fsp_param->noc_timings.agingx0 = 3149 sdram_params->ch.noc_timings.agingx0; 3150 p_fsp_param->noc_timings.aging0 = 3151 sdram_params->ch.noc_timings.aging0; 3152 p_fsp_param->noc_timings.aging1 = 3153 sdram_params->ch.noc_timings.aging1; 3154 p_fsp_param->noc_timings.aging2 = 3155 sdram_params->ch.noc_timings.aging2; 3156 p_fsp_param->noc_timings.aging3 = 3157 sdram_params->ch.noc_timings.aging3; 3158 3159 p_fsp_param->flag = FSP_FLAG; 3160 } 3161 3162 #ifndef CONFIG_SPL_KERNEL_BOOT 3163 static void copy_fsp_param_to_ddr(void) 3164 { 3165 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param, 3166 sizeof(fsp_param)); 3167 } 3168 #endif 3169 3170 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs, 3171 struct sdram_cap_info *cap_info, u32 dram_type, 3172 u32 freq) 3173 { 3174 u64 cs0_cap; 3175 u32 die_cap; 3176 u32 trfc_ns, trfc4_ns; 3177 u32 trfc, txsnr; 3178 u32 txs_abort_fast = 0; 3179 u32 tmp; 3180 3181 cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type); 3182 die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw))); 3183 3184 switch (dram_type) { 3185 case DDR3: 3186 if (die_cap <= DIE_CAP_512MBIT) 3187 trfc_ns = 90; 3188 else if (die_cap <= DIE_CAP_1GBIT) 3189 trfc_ns = 110; 3190 else if (die_cap <= DIE_CAP_2GBIT) 3191 trfc_ns = 160; 3192 else if (die_cap <= DIE_CAP_4GBIT) 3193 trfc_ns = 260; 3194 else 3195 trfc_ns = 350; 3196 txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000); 3197 break; 3198 3199 case DDR4: 3200 if (die_cap <= DIE_CAP_2GBIT) { 3201 trfc_ns = 160; 3202 trfc4_ns = 90; 3203 } else if (die_cap <= DIE_CAP_4GBIT) { 3204 trfc_ns = 260; 3205 trfc4_ns = 110; 3206 } else if (die_cap <= DIE_CAP_8GBIT) { 3207 trfc_ns = 350; 3208 trfc4_ns = 160; 3209 } else { 3210 trfc_ns = 550; 3211 trfc4_ns = 260; 3212 } 3213 txsnr = ((trfc_ns + 10) * freq + 999) / 1000; 3214 txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000; 3215 break; 3216 3217 case LPDDR3: 3218 if (die_cap <= DIE_CAP_4GBIT) 3219 trfc_ns = 130; 3220 else 3221 trfc_ns = 210; 3222 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000); 3223 break; 3224 3225 case LPDDR4: 3226 case LPDDR4X: 3227 if (die_cap <= DIE_CAP_2GBIT) 3228 trfc_ns = 130; 3229 else if (die_cap <= DIE_CAP_4GBIT) 3230 trfc_ns = 180; 3231 else if (die_cap <= DIE_CAP_8GBIT) 3232 trfc_ns = 280; 3233 else 3234 trfc_ns = 380; 3235 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000); 3236 break; 3237 3238 default: 3239 return; 3240 } 3241 trfc = (trfc_ns * freq + 999) / 1000; 3242 3243 for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) { 3244 switch (pctl_regs->pctl[i][0]) { 3245 case DDR_PCTL2_RFSHTMG: 3246 tmp = pctl_regs->pctl[i][1]; 3247 /* t_rfc_min */ 3248 tmp &= ~((u32)0x3ff); 3249 tmp |= ((trfc + 1) / 2) & 0x3ff; 3250 pctl_regs->pctl[i][1] = tmp; 3251 break; 3252 3253 case DDR_PCTL2_DRAMTMG8: 3254 if (dram_type == DDR3 || dram_type == DDR4) { 3255 tmp = pctl_regs->pctl[i][1]; 3256 /* t_xs_x32 */ 3257 tmp &= ~((u32)0x7f); 3258 tmp |= ((txsnr + 63) / 64 + 1) & 0x7f; 3259 3260 if (dram_type == DDR4) { 3261 /* t_xs_abort_x32 */ 3262 tmp &= ~((u32)(0x7f << 16)); 3263 tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16; 3264 /* t_xs_fast_x32 */ 3265 tmp &= ~((u32)(0x7f << 24)); 3266 tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24; 3267 } 3268 3269 pctl_regs->pctl[i][1] = tmp; 3270 } 3271 break; 3272 3273 case DDR_PCTL2_DRAMTMG14: 3274 if (dram_type == LPDDR3 || 3275 dram_type == LPDDR4 || dram_type == LPDDR4X) { 3276 tmp = pctl_regs->pctl[i][1]; 3277 /* t_xsr */ 3278 tmp &= ~((u32)0xfff); 3279 tmp |= ((txsnr + 1) / 2) & 0xfff; 3280 pctl_regs->pctl[i][1] = tmp; 3281 } 3282 break; 3283 3284 default: 3285 break; 3286 } 3287 } 3288 } 3289 3290 void ddr_set_rate(struct dram_info *dram, 3291 struct rv1126_sdram_params *sdram_params, 3292 u32 freq, u32 cur_freq, u32 dst_fsp, 3293 u32 dst_fsp_lp4, u32 training_en) 3294 { 3295 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off; 3296 u32 mr_tmp; 3297 u32 lp_stat; 3298 u32 dramtype = sdram_params->base.dramtype; 3299 struct rv1126_sdram_params *sdram_params_new; 3300 void __iomem *pctl_base = dram->pctl; 3301 void __iomem *phy_base = dram->phy; 3302 3303 lp_stat = low_power_update(dram, 0); 3304 sdram_params_new = get_default_sdram_config(freq); 3305 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank; 3306 sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw; 3307 3308 pctl_modify_trfc(&sdram_params_new->pctl_regs, 3309 &sdram_params->ch.cap_info, dramtype, freq); 3310 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4); 3311 3312 while ((readl(pctl_base + DDR_PCTL2_STAT) & 3313 PCTL2_OPERATING_MODE_MASK) == 3314 PCTL2_OPERATING_MODE_SR) 3315 continue; 3316 3317 dest_dll_off = 0; 3318 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3319 DDR_PCTL2_INIT3); 3320 if ((dramtype == DDR3 && (dst_init3 & 1)) || 3321 (dramtype == DDR4 && !(dst_init3 & 1))) 3322 dest_dll_off = 1; 3323 3324 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3; 3325 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + 3326 DDR_PCTL2_INIT3); 3327 cur_init3 &= PCTL2_MR_MASK; 3328 cur_dll_off = 1; 3329 if ((dramtype == DDR3 && !(cur_init3 & 1)) || 3330 (dramtype == DDR4 && (cur_init3 & 1))) 3331 cur_dll_off = 0; 3332 3333 if (!cur_dll_off) { 3334 if (dramtype == DDR3) 3335 cur_init3 |= 1; 3336 else 3337 cur_init3 &= ~1; 3338 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype); 3339 } 3340 3341 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 3342 PCTL2_DIS_AUTO_REFRESH); 3343 update_refresh_reg(dram); 3344 3345 enter_sr(dram, 1); 3346 3347 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 3348 PMUGRF_CON_DDRPHY_BUFFEREN_EN, 3349 &dram->pmugrf->soc_con[0]); 3350 sw_set_req(dram); 3351 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC, 3352 PCTL2_DFI_INIT_COMPLETE_EN); 3353 sw_set_ack(dram); 3354 3355 sw_set_req(dram); 3356 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off) 3357 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 3358 else 3359 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE); 3360 3361 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0, 3362 PCTL2_DIS_SRX_ZQCL); 3363 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0, 3364 PCTL2_DIS_SRX_ZQCL); 3365 sw_set_ack(dram); 3366 3367 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT), 3368 &dram->cru->clkgate_con[21]); 3369 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 3370 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) | 3371 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT), 3372 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 3373 3374 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 3375 rkclk_set_dpll(dram, freq * MHz / 2); 3376 phy_pll_set(dram, freq * MHz, 0); 3377 phy_pll_set(dram, freq * MHz, 1); 3378 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET); 3379 3380 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK | 3381 PMUGRF_CON_DDRPHY_BUFFEREN_DIS, 3382 &dram->pmugrf->soc_con[0]); 3383 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT), 3384 &dram->cru->clkgate_con[21]); 3385 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK | 3386 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) | 3387 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT), 3388 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12); 3389 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) & 3390 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) 3391 continue; 3392 3393 sw_set_req(dram); 3394 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29); 3395 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp); 3396 sw_set_ack(dram); 3397 update_refresh_reg(dram); 3398 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2); 3399 3400 enter_sr(dram, 0); 3401 3402 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 3403 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5); 3404 3405 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4); 3406 if (dramtype == LPDDR3) { 3407 pctl_write_mr(dram->pctl, 3, 1, 3408 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) & 3409 PCTL2_MR_MASK, 3410 dramtype); 3411 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK, 3412 dramtype); 3413 pctl_write_mr(dram->pctl, 3, 3, 3414 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) & 3415 PCTL2_MR_MASK, 3416 dramtype); 3417 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype); 3418 } else if ((dramtype == DDR3) || (dramtype == DDR4)) { 3419 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK, 3420 dramtype); 3421 if (!dest_dll_off) { 3422 pctl_write_mr(dram->pctl, 3, 0, 3423 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) & 3424 PCTL2_MR_MASK) | DDR3_DLL_RESET, 3425 dramtype); 3426 udelay(2); 3427 } 3428 pctl_write_mr(dram->pctl, 3, 0, 3429 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT & 3430 PCTL2_MR_MASK) & (~DDR3_DLL_RESET), 3431 dramtype); 3432 pctl_write_mr(dram->pctl, 3, 2, 3433 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) & 3434 PCTL2_MR_MASK), dramtype); 3435 if (dramtype == DDR4) { 3436 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK, 3437 dramtype); 3438 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3439 DDR_PCTL2_INIT6); 3440 pctl_write_mr(dram->pctl, 3, 4, 3441 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) & 3442 PCTL2_MR_MASK, 3443 dramtype); 3444 pctl_write_mr(dram->pctl, 3, 5, 3445 mr_tmp >> PCTL2_DDR4_MR5_SHIFT & 3446 PCTL2_MR_MASK, 3447 dramtype); 3448 3449 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + 3450 DDR_PCTL2_INIT7); 3451 pctl_write_mr(dram->pctl, 3, 6, 3452 mr_tmp >> PCTL2_DDR4_MR6_SHIFT & 3453 PCTL2_MR_MASK, 3454 dramtype); 3455 } 3456 } else if (dramtype == LPDDR4 || dramtype == LPDDR4X) { 3457 pctl_write_mr(dram->pctl, 3, 13, 3458 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT & 3459 PCTL2_MR_MASK) & (~(BIT(7)))) | 3460 dst_fsp_lp4 << 7, dramtype); 3461 } 3462 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 3463 PCTL2_DIS_AUTO_REFRESH); 3464 update_refresh_reg(dram); 3465 3466 /* training */ 3467 high_freq_training(dram, sdram_params_new, dst_fsp); 3468 low_power_update(dram, lp_stat); 3469 3470 save_fsp_param(dram, dst_fsp, sdram_params_new); 3471 } 3472 3473 static void ddr_set_rate_for_fsp(struct dram_info *dram, 3474 struct rv1126_sdram_params *sdram_params) 3475 { 3476 struct ddr2_3_4_lp2_3_info *ddr_info; 3477 u32 f0; 3478 u32 dramtype = sdram_params->base.dramtype; 3479 #ifndef CONFIG_SPL_KERNEL_BOOT 3480 u32 f1, f2, f3; 3481 #endif 3482 3483 ddr_info = get_ddr_drv_odt_info(dramtype); 3484 if (!ddr_info) 3485 return; 3486 3487 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) & 3488 DDR_FREQ_MASK; 3489 3490 #ifndef CONFIG_SPL_KERNEL_BOOT 3491 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param)); 3492 memset((void *)&fsp_param, 0, sizeof(fsp_param)); 3493 3494 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) & 3495 DDR_FREQ_MASK; 3496 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) & 3497 DDR_FREQ_MASK; 3498 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) & 3499 DDR_FREQ_MASK; 3500 #endif 3501 3502 if (get_wrlvl_val(dram, sdram_params)) 3503 printascii("get wrlvl value fail\n"); 3504 3505 #ifndef CONFIG_SPL_KERNEL_BOOT 3506 printascii("change to: "); 3507 printdec(f1); 3508 printascii("MHz\n"); 3509 ddr_set_rate(&dram_info, sdram_params, f1, 3510 sdram_params->base.ddr_freq, 1, 1, 1); 3511 printascii("change to: "); 3512 printdec(f2); 3513 printascii("MHz\n"); 3514 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1); 3515 printascii("change to: "); 3516 printdec(f3); 3517 printascii("MHz\n"); 3518 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1); 3519 #endif 3520 printascii("change to: "); 3521 printdec(f0); 3522 printascii("MHz(final freq)\n"); 3523 #ifndef CONFIG_SPL_KERNEL_BOOT 3524 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1); 3525 #else 3526 ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1); 3527 #endif 3528 } 3529 3530 int get_uart_config(void) 3531 { 3532 struct sdram_head_info_index_v2 *index = 3533 (struct sdram_head_info_index_v2 *)common_info; 3534 struct global_info *gbl_info; 3535 3536 gbl_info = (struct global_info *)((void *)common_info + 3537 index->global_index.offset * 4); 3538 3539 return gbl_info->uart_info; 3540 } 3541 3542 /* return: 0 = success, other = fail */ 3543 int sdram_init(void) 3544 { 3545 struct rv1126_sdram_params *sdram_params; 3546 int ret = 0; 3547 struct sdram_head_info_index_v2 *index = 3548 (struct sdram_head_info_index_v2 *)common_info; 3549 struct global_info *gbl_info; 3550 3551 dram_info.phy = (void *)DDR_PHY_BASE_ADDR; 3552 dram_info.pctl = (void *)UPCTL2_BASE_ADDR; 3553 dram_info.grf = (void *)GRF_BASE_ADDR; 3554 dram_info.cru = (void *)CRU_BASE_ADDR; 3555 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR; 3556 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR; 3557 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR; 3558 3559 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT 3560 printascii("extended temp support\n"); 3561 #endif 3562 if (index->version_info != 2 || 3563 (index->global_index.size != sizeof(struct global_info) / 4) || 3564 (index->ddr3_index.size != 3565 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3566 (index->ddr4_index.size != 3567 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3568 (index->lp3_index.size != 3569 sizeof(struct ddr2_3_4_lp2_3_info) / 4) || 3570 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) || 3571 (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) || 3572 index->global_index.offset == 0 || 3573 index->ddr3_index.offset == 0 || 3574 index->ddr4_index.offset == 0 || 3575 index->lp3_index.offset == 0 || 3576 index->lp4_index.offset == 0 || 3577 index->lp4x_index.offset == 0) { 3578 printascii("common info error\n"); 3579 goto error; 3580 } 3581 3582 gbl_info = (struct global_info *)((void *)common_info + 3583 index->global_index.offset * 4); 3584 3585 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info); 3586 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info); 3587 3588 sdram_params = &sdram_configs[0]; 3589 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8) 3590 for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++) 3591 sdram_configs[j].base.dramtype = LPDDR4X; 3592 #endif 3593 if (sdram_params->base.dramtype == DDR3 || 3594 sdram_params->base.dramtype == DDR4) { 3595 if (DDR_2T_INFO(gbl_info->info_2t)) 3596 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10; 3597 else 3598 sdram_params->pctl_regs.pctl[0][1] &= 3599 ~(0x1 << 10); 3600 } 3601 ret = sdram_init_detect(&dram_info, sdram_params); 3602 if (ret) { 3603 sdram_print_dram_type(sdram_params->base.dramtype); 3604 printascii(", "); 3605 printdec(sdram_params->base.ddr_freq); 3606 printascii("MHz\n"); 3607 goto error; 3608 } 3609 print_ddr_info(sdram_params); 3610 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 3611 init_rw_trn_result_struct(&rw_trn_result, dram_info.phy, 3612 (u8)sdram_params->ch.cap_info.rank); 3613 #endif 3614 3615 ddr_set_rate_for_fsp(&dram_info, sdram_params); 3616 #ifndef CONFIG_SPL_KERNEL_BOOT 3617 copy_fsp_param_to_ddr(); 3618 #endif 3619 3620 ddr_set_atags(&dram_info, sdram_params); 3621 #if defined(CONFIG_CMD_DDR_TEST_TOOL) 3622 save_rw_trn_result_to_ddr(&rw_trn_result); 3623 #endif 3624 3625 printascii("out\n"); 3626 3627 return ret; 3628 error: 3629 printascii("error\n"); 3630 return (-1); 3631 } 3632 #endif /* CONFIG_TPL_BUILD */ 3633