1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * sunxi DRAM controller initialization
3*4882a593Smuzhiyun * (C) Copyright 2012 Henrik Nordstrom <henrik@henriknordstrom.net>
4*4882a593Smuzhiyun * (C) Copyright 2013 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * Based on sun4i Linux kernel sources mach-sunxi/pm/standby/dram*.c
7*4882a593Smuzhiyun * and earlier U-Boot Allwiner A10 SPL work
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun * (C) Copyright 2007-2012
10*4882a593Smuzhiyun * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
11*4882a593Smuzhiyun * Berg Xing <bergxing@allwinnertech.com>
12*4882a593Smuzhiyun * Tom Cubie <tangliang@allwinnertech.com>
13*4882a593Smuzhiyun *
14*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+
15*4882a593Smuzhiyun */
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun /*
18*4882a593Smuzhiyun * Unfortunately the only documentation we have on the sun7i DRAM
19*4882a593Smuzhiyun * controller is Allwinner boot0 + boot1 code, and that code uses
20*4882a593Smuzhiyun * magic numbers & shifts with no explanations. Hence this code is
21*4882a593Smuzhiyun * rather undocumented and full of magic.
22*4882a593Smuzhiyun */
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun #include <common.h>
25*4882a593Smuzhiyun #include <asm/io.h>
26*4882a593Smuzhiyun #include <asm/arch/clock.h>
27*4882a593Smuzhiyun #include <asm/arch/dram.h>
28*4882a593Smuzhiyun #include <asm/arch/timer.h>
29*4882a593Smuzhiyun #include <asm/arch/sys_proto.h>
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun #define CPU_CFG_CHIP_VER(n) ((n) << 6)
32*4882a593Smuzhiyun #define CPU_CFG_CHIP_VER_MASK CPU_CFG_CHIP_VER(0x3)
33*4882a593Smuzhiyun #define CPU_CFG_CHIP_REV_A 0x0
34*4882a593Smuzhiyun #define CPU_CFG_CHIP_REV_C1 0x1
35*4882a593Smuzhiyun #define CPU_CFG_CHIP_REV_C2 0x2
36*4882a593Smuzhiyun #define CPU_CFG_CHIP_REV_B 0x3
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun /*
39*4882a593Smuzhiyun * Wait up to 1s for mask to be clear in given reg.
40*4882a593Smuzhiyun */
await_bits_clear(u32 * reg,u32 mask)41*4882a593Smuzhiyun static inline void await_bits_clear(u32 *reg, u32 mask)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun mctl_await_completion(reg, mask, 0);
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun
46*4882a593Smuzhiyun /*
47*4882a593Smuzhiyun * Wait up to 1s for mask to be set in given reg.
48*4882a593Smuzhiyun */
await_bits_set(u32 * reg,u32 mask)49*4882a593Smuzhiyun static inline void await_bits_set(u32 *reg, u32 mask)
50*4882a593Smuzhiyun {
51*4882a593Smuzhiyun mctl_await_completion(reg, mask, mask);
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun /*
55*4882a593Smuzhiyun * This performs the external DRAM reset by driving the RESET pin low and
56*4882a593Smuzhiyun * then high again. According to the DDR3 spec, the RESET pin needs to be
57*4882a593Smuzhiyun * kept low for at least 200 us.
58*4882a593Smuzhiyun */
mctl_ddr3_reset(void)59*4882a593Smuzhiyun static void mctl_ddr3_reset(void)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun struct sunxi_dram_reg *dram =
62*4882a593Smuzhiyun (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN4I
65*4882a593Smuzhiyun struct sunxi_timer_reg *timer =
66*4882a593Smuzhiyun (struct sunxi_timer_reg *)SUNXI_TIMER_BASE;
67*4882a593Smuzhiyun u32 reg_val;
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun writel(0, &timer->cpu_cfg);
70*4882a593Smuzhiyun reg_val = readl(&timer->cpu_cfg);
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun if ((reg_val & CPU_CFG_CHIP_VER_MASK) !=
73*4882a593Smuzhiyun CPU_CFG_CHIP_VER(CPU_CFG_CHIP_REV_A)) {
74*4882a593Smuzhiyun setbits_le32(&dram->mcr, DRAM_MCR_RESET);
75*4882a593Smuzhiyun udelay(200);
76*4882a593Smuzhiyun clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
77*4882a593Smuzhiyun } else
78*4882a593Smuzhiyun #endif
79*4882a593Smuzhiyun {
80*4882a593Smuzhiyun clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
81*4882a593Smuzhiyun udelay(200);
82*4882a593Smuzhiyun setbits_le32(&dram->mcr, DRAM_MCR_RESET);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun /* After the RESET pin is de-asserted, the DDR3 spec requires to wait
85*4882a593Smuzhiyun * for additional 500 us before driving the CKE pin (Clock Enable)
86*4882a593Smuzhiyun * high. The duration of this delay can be configured in the SDR_IDCR
87*4882a593Smuzhiyun * (Initialization Delay Configuration Register) and applied
88*4882a593Smuzhiyun * automatically by the DRAM controller during the DDR3 initialization
89*4882a593Smuzhiyun * step. But SDR_IDCR has limited range on sun4i/sun5i hardware and
90*4882a593Smuzhiyun * can't provide sufficient delay at DRAM clock frequencies higher than
91*4882a593Smuzhiyun * 524 MHz (while Allwinner A13 supports DRAM clock frequency up to
92*4882a593Smuzhiyun * 533 MHz according to the datasheet). Additionally, there is no
93*4882a593Smuzhiyun * official documentation for the SDR_IDCR register anywhere, and
94*4882a593Smuzhiyun * there is always a chance that we are interpreting it wrong.
95*4882a593Smuzhiyun * Better be safe than sorry, so add an explicit delay here. */
96*4882a593Smuzhiyun udelay(500);
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun
mctl_set_drive(void)99*4882a593Smuzhiyun static void mctl_set_drive(void)
100*4882a593Smuzhiyun {
101*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN7I
104*4882a593Smuzhiyun clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3) | (0x3 << 28),
105*4882a593Smuzhiyun #else
106*4882a593Smuzhiyun clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3),
107*4882a593Smuzhiyun #endif
108*4882a593Smuzhiyun DRAM_MCR_MODE_EN(0x3) |
109*4882a593Smuzhiyun 0xffc);
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
mctl_itm_disable(void)112*4882a593Smuzhiyun static void mctl_itm_disable(void)
113*4882a593Smuzhiyun {
114*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun clrsetbits_le32(&dram->ccr, DRAM_CCR_INIT, DRAM_CCR_ITM_OFF);
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun
mctl_itm_enable(void)119*4882a593Smuzhiyun static void mctl_itm_enable(void)
120*4882a593Smuzhiyun {
121*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun clrbits_le32(&dram->ccr, DRAM_CCR_ITM_OFF);
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun
mctl_itm_reset(void)126*4882a593Smuzhiyun static void mctl_itm_reset(void)
127*4882a593Smuzhiyun {
128*4882a593Smuzhiyun mctl_itm_disable();
129*4882a593Smuzhiyun udelay(1); /* ITM reset needs a bit of delay */
130*4882a593Smuzhiyun mctl_itm_enable();
131*4882a593Smuzhiyun udelay(1);
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun
mctl_enable_dll0(u32 phase)134*4882a593Smuzhiyun static void mctl_enable_dll0(u32 phase)
135*4882a593Smuzhiyun {
136*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun clrsetbits_le32(&dram->dllcr[0], 0x3f << 6,
139*4882a593Smuzhiyun ((phase >> 16) & 0x3f) << 6);
140*4882a593Smuzhiyun clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET, DRAM_DLLCR_DISABLE);
141*4882a593Smuzhiyun udelay(2);
142*4882a593Smuzhiyun
143*4882a593Smuzhiyun clrbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET | DRAM_DLLCR_DISABLE);
144*4882a593Smuzhiyun udelay(22);
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_DISABLE, DRAM_DLLCR_NRESET);
147*4882a593Smuzhiyun udelay(22);
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun
150*4882a593Smuzhiyun /* Get the number of DDR byte lanes */
mctl_get_number_of_lanes(void)151*4882a593Smuzhiyun static u32 mctl_get_number_of_lanes(void)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
154*4882a593Smuzhiyun if ((readl(&dram->dcr) & DRAM_DCR_BUS_WIDTH_MASK) ==
155*4882a593Smuzhiyun DRAM_DCR_BUS_WIDTH(DRAM_DCR_BUS_WIDTH_32BIT))
156*4882a593Smuzhiyun return 4;
157*4882a593Smuzhiyun else
158*4882a593Smuzhiyun return 2;
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun /*
162*4882a593Smuzhiyun * Note: This differs from pm/standby in that it checks the bus width
163*4882a593Smuzhiyun */
mctl_enable_dllx(u32 phase)164*4882a593Smuzhiyun static void mctl_enable_dllx(u32 phase)
165*4882a593Smuzhiyun {
166*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
167*4882a593Smuzhiyun u32 i, number_of_lanes;
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun number_of_lanes = mctl_get_number_of_lanes();
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun for (i = 1; i <= number_of_lanes; i++) {
172*4882a593Smuzhiyun clrsetbits_le32(&dram->dllcr[i], 0xf << 14,
173*4882a593Smuzhiyun (phase & 0xf) << 14);
174*4882a593Smuzhiyun clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET,
175*4882a593Smuzhiyun DRAM_DLLCR_DISABLE);
176*4882a593Smuzhiyun phase >>= 4;
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun udelay(2);
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun for (i = 1; i <= number_of_lanes; i++)
181*4882a593Smuzhiyun clrbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET |
182*4882a593Smuzhiyun DRAM_DLLCR_DISABLE);
183*4882a593Smuzhiyun udelay(22);
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun for (i = 1; i <= number_of_lanes; i++)
186*4882a593Smuzhiyun clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_DISABLE,
187*4882a593Smuzhiyun DRAM_DLLCR_NRESET);
188*4882a593Smuzhiyun udelay(22);
189*4882a593Smuzhiyun }
190*4882a593Smuzhiyun
191*4882a593Smuzhiyun static u32 hpcr_value[32] = {
192*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN5I
193*4882a593Smuzhiyun 0, 0, 0, 0,
194*4882a593Smuzhiyun 0, 0, 0, 0,
195*4882a593Smuzhiyun 0, 0, 0, 0,
196*4882a593Smuzhiyun 0, 0, 0, 0,
197*4882a593Smuzhiyun 0x1031, 0x1031, 0x0735, 0x1035,
198*4882a593Smuzhiyun 0x1035, 0x0731, 0x1031, 0,
199*4882a593Smuzhiyun 0x0301, 0x0301, 0x0301, 0x0301,
200*4882a593Smuzhiyun 0x0301, 0x0301, 0x0301, 0
201*4882a593Smuzhiyun #endif
202*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN4I
203*4882a593Smuzhiyun 0x0301, 0x0301, 0x0301, 0x0301,
204*4882a593Smuzhiyun 0x0301, 0x0301, 0, 0,
205*4882a593Smuzhiyun 0, 0, 0, 0,
206*4882a593Smuzhiyun 0, 0, 0, 0,
207*4882a593Smuzhiyun 0x1031, 0x1031, 0x0735, 0x5031,
208*4882a593Smuzhiyun 0x1035, 0x0731, 0x1031, 0x0735,
209*4882a593Smuzhiyun 0x1035, 0x1031, 0x0731, 0x1035,
210*4882a593Smuzhiyun 0x1031, 0x0301, 0x0301, 0x0731
211*4882a593Smuzhiyun #endif
212*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN7I
213*4882a593Smuzhiyun 0x0301, 0x0301, 0x0301, 0x0301,
214*4882a593Smuzhiyun 0x0301, 0x0301, 0x0301, 0x0301,
215*4882a593Smuzhiyun 0, 0, 0, 0,
216*4882a593Smuzhiyun 0, 0, 0, 0,
217*4882a593Smuzhiyun 0x1031, 0x1031, 0x0735, 0x1035,
218*4882a593Smuzhiyun 0x1035, 0x0731, 0x1031, 0x0735,
219*4882a593Smuzhiyun 0x1035, 0x1031, 0x0731, 0x1035,
220*4882a593Smuzhiyun 0x0001, 0x1031, 0, 0x1031
221*4882a593Smuzhiyun /* last row differs from boot0 source table
222*4882a593Smuzhiyun * 0x1031, 0x0301, 0x0301, 0x0731
223*4882a593Smuzhiyun * but boot0 code skips #28 and #30, and sets #29 and #31 to the
224*4882a593Smuzhiyun * value from #28 entry (0x1031)
225*4882a593Smuzhiyun */
226*4882a593Smuzhiyun #endif
227*4882a593Smuzhiyun };
228*4882a593Smuzhiyun
mctl_configure_hostport(void)229*4882a593Smuzhiyun static void mctl_configure_hostport(void)
230*4882a593Smuzhiyun {
231*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
232*4882a593Smuzhiyun u32 i;
233*4882a593Smuzhiyun
234*4882a593Smuzhiyun for (i = 0; i < 32; i++)
235*4882a593Smuzhiyun writel(hpcr_value[i], &dram->hpcr[i]);
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun
mctl_setup_dram_clock(u32 clk,u32 mbus_clk)238*4882a593Smuzhiyun static void mctl_setup_dram_clock(u32 clk, u32 mbus_clk)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun u32 reg_val;
241*4882a593Smuzhiyun struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
242*4882a593Smuzhiyun u32 pll5p_clk, pll6x_clk;
243*4882a593Smuzhiyun u32 pll5p_div, pll6x_div;
244*4882a593Smuzhiyun u32 pll5p_rate, pll6x_rate;
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun /* setup DRAM PLL */
247*4882a593Smuzhiyun reg_val = readl(&ccm->pll5_cfg);
248*4882a593Smuzhiyun reg_val &= ~CCM_PLL5_CTRL_M_MASK; /* set M to 0 (x1) */
249*4882a593Smuzhiyun reg_val &= ~CCM_PLL5_CTRL_K_MASK; /* set K to 0 (x1) */
250*4882a593Smuzhiyun reg_val &= ~CCM_PLL5_CTRL_N_MASK; /* set N to 0 (x0) */
251*4882a593Smuzhiyun reg_val &= ~CCM_PLL5_CTRL_P_MASK; /* set P to 0 (x1) */
252*4882a593Smuzhiyun #ifdef CONFIG_OLD_SUNXI_KERNEL_COMPAT
253*4882a593Smuzhiyun /* Old kernels are hardcoded to P=1 (divide by 2) */
254*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_P(1);
255*4882a593Smuzhiyun #endif
256*4882a593Smuzhiyun if (clk >= 540 && clk < 552) {
257*4882a593Smuzhiyun /* dram = 540MHz */
258*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
259*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
260*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(15));
261*4882a593Smuzhiyun } else if (clk >= 512 && clk < 528) {
262*4882a593Smuzhiyun /* dram = 512MHz */
263*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
264*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(4));
265*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(16));
266*4882a593Smuzhiyun } else if (clk >= 496 && clk < 504) {
267*4882a593Smuzhiyun /* dram = 496MHz */
268*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
269*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
270*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(31));
271*4882a593Smuzhiyun } else if (clk >= 468 && clk < 480) {
272*4882a593Smuzhiyun /* dram = 468MHz */
273*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
274*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
275*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(13));
276*4882a593Smuzhiyun } else if (clk >= 396 && clk < 408) {
277*4882a593Smuzhiyun /* dram = 396MHz */
278*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
279*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
280*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(11));
281*4882a593Smuzhiyun } else {
282*4882a593Smuzhiyun /* any other frequency that is a multiple of 24 */
283*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
284*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
285*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(clk / 24));
286*4882a593Smuzhiyun }
287*4882a593Smuzhiyun reg_val &= ~CCM_PLL5_CTRL_VCO_GAIN; /* PLL VCO Gain off */
288*4882a593Smuzhiyun reg_val |= CCM_PLL5_CTRL_EN; /* PLL On */
289*4882a593Smuzhiyun writel(reg_val, &ccm->pll5_cfg);
290*4882a593Smuzhiyun udelay(5500);
291*4882a593Smuzhiyun
292*4882a593Smuzhiyun setbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_DDR_CLK);
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN7I)
295*4882a593Smuzhiyun /* reset GPS */
296*4882a593Smuzhiyun clrbits_le32(&ccm->gps_clk_cfg, CCM_GPS_CTRL_RESET | CCM_GPS_CTRL_GATE);
297*4882a593Smuzhiyun setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
298*4882a593Smuzhiyun udelay(1);
299*4882a593Smuzhiyun clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
300*4882a593Smuzhiyun #endif
301*4882a593Smuzhiyun
302*4882a593Smuzhiyun /* setup MBUS clock */
303*4882a593Smuzhiyun if (!mbus_clk)
304*4882a593Smuzhiyun mbus_clk = 300;
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun /* PLL5P and PLL6 are the potential clock sources for MBUS */
307*4882a593Smuzhiyun pll6x_clk = clock_get_pll6() / 1000000;
308*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN7I
309*4882a593Smuzhiyun pll6x_clk *= 2; /* sun7i uses PLL6*2, sun5i uses just PLL6 */
310*4882a593Smuzhiyun #endif
311*4882a593Smuzhiyun pll5p_clk = clock_get_pll5p() / 1000000;
312*4882a593Smuzhiyun pll6x_div = DIV_ROUND_UP(pll6x_clk, mbus_clk);
313*4882a593Smuzhiyun pll5p_div = DIV_ROUND_UP(pll5p_clk, mbus_clk);
314*4882a593Smuzhiyun pll6x_rate = pll6x_clk / pll6x_div;
315*4882a593Smuzhiyun pll5p_rate = pll5p_clk / pll5p_div;
316*4882a593Smuzhiyun
317*4882a593Smuzhiyun if (pll6x_div <= 16 && pll6x_rate > pll5p_rate) {
318*4882a593Smuzhiyun /* use PLL6 as the MBUS clock source */
319*4882a593Smuzhiyun reg_val = CCM_MBUS_CTRL_GATE |
320*4882a593Smuzhiyun CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL6) |
321*4882a593Smuzhiyun CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
322*4882a593Smuzhiyun CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll6x_div));
323*4882a593Smuzhiyun } else if (pll5p_div <= 16) {
324*4882a593Smuzhiyun /* use PLL5P as the MBUS clock source */
325*4882a593Smuzhiyun reg_val = CCM_MBUS_CTRL_GATE |
326*4882a593Smuzhiyun CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL5) |
327*4882a593Smuzhiyun CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
328*4882a593Smuzhiyun CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll5p_div));
329*4882a593Smuzhiyun } else {
330*4882a593Smuzhiyun panic("Bad mbus_clk\n");
331*4882a593Smuzhiyun }
332*4882a593Smuzhiyun writel(reg_val, &ccm->mbus_clk_cfg);
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun /*
335*4882a593Smuzhiyun * open DRAMC AHB & DLL register clock
336*4882a593Smuzhiyun * close it first
337*4882a593Smuzhiyun */
338*4882a593Smuzhiyun #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
339*4882a593Smuzhiyun clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
340*4882a593Smuzhiyun #else
341*4882a593Smuzhiyun clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
342*4882a593Smuzhiyun #endif
343*4882a593Smuzhiyun udelay(22);
344*4882a593Smuzhiyun
345*4882a593Smuzhiyun /* then open it */
346*4882a593Smuzhiyun #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
347*4882a593Smuzhiyun setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
348*4882a593Smuzhiyun #else
349*4882a593Smuzhiyun setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
350*4882a593Smuzhiyun #endif
351*4882a593Smuzhiyun udelay(22);
352*4882a593Smuzhiyun }
353*4882a593Smuzhiyun
354*4882a593Smuzhiyun /*
355*4882a593Smuzhiyun * The data from rslrX and rdgrX registers (X=rank) is stored
356*4882a593Smuzhiyun * in a single 32-bit value using the following format:
357*4882a593Smuzhiyun * bits [31:26] - DQS gating system latency for byte lane 3
358*4882a593Smuzhiyun * bits [25:24] - DQS gating phase select for byte lane 3
359*4882a593Smuzhiyun * bits [23:18] - DQS gating system latency for byte lane 2
360*4882a593Smuzhiyun * bits [17:16] - DQS gating phase select for byte lane 2
361*4882a593Smuzhiyun * bits [15:10] - DQS gating system latency for byte lane 1
362*4882a593Smuzhiyun * bits [ 9:8 ] - DQS gating phase select for byte lane 1
363*4882a593Smuzhiyun * bits [ 7:2 ] - DQS gating system latency for byte lane 0
364*4882a593Smuzhiyun * bits [ 1:0 ] - DQS gating phase select for byte lane 0
365*4882a593Smuzhiyun */
mctl_set_dqs_gating_delay(int rank,u32 dqs_gating_delay)366*4882a593Smuzhiyun static void mctl_set_dqs_gating_delay(int rank, u32 dqs_gating_delay)
367*4882a593Smuzhiyun {
368*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
369*4882a593Smuzhiyun u32 lane, number_of_lanes = mctl_get_number_of_lanes();
370*4882a593Smuzhiyun /* rank0 gating system latency (3 bits per lane: cycles) */
371*4882a593Smuzhiyun u32 slr = readl(rank == 0 ? &dram->rslr0 : &dram->rslr1);
372*4882a593Smuzhiyun /* rank0 gating phase select (2 bits per lane: 90, 180, 270, 360) */
373*4882a593Smuzhiyun u32 dgr = readl(rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
374*4882a593Smuzhiyun for (lane = 0; lane < number_of_lanes; lane++) {
375*4882a593Smuzhiyun u32 tmp = dqs_gating_delay >> (lane * 8);
376*4882a593Smuzhiyun slr &= ~(7 << (lane * 3));
377*4882a593Smuzhiyun slr |= ((tmp >> 2) & 7) << (lane * 3);
378*4882a593Smuzhiyun dgr &= ~(3 << (lane * 2));
379*4882a593Smuzhiyun dgr |= (tmp & 3) << (lane * 2);
380*4882a593Smuzhiyun }
381*4882a593Smuzhiyun writel(slr, rank == 0 ? &dram->rslr0 : &dram->rslr1);
382*4882a593Smuzhiyun writel(dgr, rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
383*4882a593Smuzhiyun }
384*4882a593Smuzhiyun
dramc_scan_readpipe(void)385*4882a593Smuzhiyun static int dramc_scan_readpipe(void)
386*4882a593Smuzhiyun {
387*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
388*4882a593Smuzhiyun u32 reg_val;
389*4882a593Smuzhiyun
390*4882a593Smuzhiyun /* data training trigger */
391*4882a593Smuzhiyun clrbits_le32(&dram->csr, DRAM_CSR_FAILED);
392*4882a593Smuzhiyun setbits_le32(&dram->ccr, DRAM_CCR_DATA_TRAINING);
393*4882a593Smuzhiyun
394*4882a593Smuzhiyun /* check whether data training process has completed */
395*4882a593Smuzhiyun await_bits_clear(&dram->ccr, DRAM_CCR_DATA_TRAINING);
396*4882a593Smuzhiyun
397*4882a593Smuzhiyun /* check data training result */
398*4882a593Smuzhiyun reg_val = readl(&dram->csr);
399*4882a593Smuzhiyun if (reg_val & DRAM_CSR_FAILED)
400*4882a593Smuzhiyun return -1;
401*4882a593Smuzhiyun
402*4882a593Smuzhiyun return 0;
403*4882a593Smuzhiyun }
404*4882a593Smuzhiyun
dramc_clock_output_en(u32 on)405*4882a593Smuzhiyun static void dramc_clock_output_en(u32 on)
406*4882a593Smuzhiyun {
407*4882a593Smuzhiyun #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
408*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
409*4882a593Smuzhiyun
410*4882a593Smuzhiyun if (on)
411*4882a593Smuzhiyun setbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
412*4882a593Smuzhiyun else
413*4882a593Smuzhiyun clrbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
414*4882a593Smuzhiyun #endif
415*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN4I
416*4882a593Smuzhiyun struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
417*4882a593Smuzhiyun if (on)
418*4882a593Smuzhiyun setbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
419*4882a593Smuzhiyun else
420*4882a593Smuzhiyun clrbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
421*4882a593Smuzhiyun #endif
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun
424*4882a593Smuzhiyun /* tRFC in nanoseconds for different densities (from the DDR3 spec) */
425*4882a593Smuzhiyun static const u16 tRFC_DDR3_table[6] = {
426*4882a593Smuzhiyun /* 256Mb 512Mb 1Gb 2Gb 4Gb 8Gb */
427*4882a593Smuzhiyun 90, 90, 110, 160, 300, 350
428*4882a593Smuzhiyun };
429*4882a593Smuzhiyun
dramc_set_autorefresh_cycle(u32 clk,u32 density)430*4882a593Smuzhiyun static void dramc_set_autorefresh_cycle(u32 clk, u32 density)
431*4882a593Smuzhiyun {
432*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
433*4882a593Smuzhiyun u32 tRFC, tREFI;
434*4882a593Smuzhiyun
435*4882a593Smuzhiyun tRFC = (tRFC_DDR3_table[density] * clk + 999) / 1000;
436*4882a593Smuzhiyun tREFI = (7987 * clk) >> 10; /* <= 7.8us */
437*4882a593Smuzhiyun
438*4882a593Smuzhiyun writel(DRAM_DRR_TREFI(tREFI) | DRAM_DRR_TRFC(tRFC), &dram->drr);
439*4882a593Smuzhiyun }
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun /* Calculate the value for A11, A10, A9 bits in MR0 (write recovery) */
ddr3_write_recovery(u32 clk)442*4882a593Smuzhiyun static u32 ddr3_write_recovery(u32 clk)
443*4882a593Smuzhiyun {
444*4882a593Smuzhiyun u32 twr_ns = 15; /* DDR3 spec says that it is 15ns for all speed bins */
445*4882a593Smuzhiyun u32 twr_ck = (twr_ns * clk + 999) / 1000;
446*4882a593Smuzhiyun if (twr_ck < 5)
447*4882a593Smuzhiyun return 1;
448*4882a593Smuzhiyun else if (twr_ck <= 8)
449*4882a593Smuzhiyun return twr_ck - 4;
450*4882a593Smuzhiyun else if (twr_ck <= 10)
451*4882a593Smuzhiyun return 5;
452*4882a593Smuzhiyun else
453*4882a593Smuzhiyun return 6;
454*4882a593Smuzhiyun }
455*4882a593Smuzhiyun
456*4882a593Smuzhiyun /*
457*4882a593Smuzhiyun * If the dram->ppwrsctl (SDR_DPCR) register has the lowest bit set to 1, this
458*4882a593Smuzhiyun * means that DRAM is currently in self-refresh mode and retaining the old
459*4882a593Smuzhiyun * data. Since we have no idea what to do in this situation yet, just set this
460*4882a593Smuzhiyun * register to 0 and initialize DRAM in the same way as on any normal reboot
461*4882a593Smuzhiyun * (discarding whatever was stored there).
462*4882a593Smuzhiyun *
463*4882a593Smuzhiyun * Note: on sun7i hardware, the highest 16 bits need to be set to 0x1651 magic
464*4882a593Smuzhiyun * value for this write operation to have any effect. On sun5i hadware this
465*4882a593Smuzhiyun * magic value is not necessary. And on sun4i hardware the writes to this
466*4882a593Smuzhiyun * register seem to have no effect at all.
467*4882a593Smuzhiyun */
mctl_disable_power_save(void)468*4882a593Smuzhiyun static void mctl_disable_power_save(void)
469*4882a593Smuzhiyun {
470*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
471*4882a593Smuzhiyun writel(0x16510000, &dram->ppwrsctl);
472*4882a593Smuzhiyun }
473*4882a593Smuzhiyun
474*4882a593Smuzhiyun /*
475*4882a593Smuzhiyun * After the DRAM is powered up or reset, the DDR3 spec requires to wait at
476*4882a593Smuzhiyun * least 500 us before driving the CKE pin (Clock Enable) high. The dram->idct
477*4882a593Smuzhiyun * (SDR_IDCR) register appears to configure this delay, which gets applied
478*4882a593Smuzhiyun * right at the time when the DRAM initialization is activated in the
479*4882a593Smuzhiyun * 'mctl_ddr3_initialize' function.
480*4882a593Smuzhiyun */
mctl_set_cke_delay(void)481*4882a593Smuzhiyun static void mctl_set_cke_delay(void)
482*4882a593Smuzhiyun {
483*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
484*4882a593Smuzhiyun
485*4882a593Smuzhiyun /* The CKE delay is represented in DRAM clock cycles, multiplied by N
486*4882a593Smuzhiyun * (where N=2 for sun4i/sun5i and N=3 for sun7i). Here it is set to
487*4882a593Smuzhiyun * the maximum possible value 0x1ffff, just like in the Allwinner's
488*4882a593Smuzhiyun * boot0 bootloader. The resulting delay value is somewhere between
489*4882a593Smuzhiyun * ~0.4 ms (sun5i with 648 MHz DRAM clock speed) and ~1.1 ms (sun7i
490*4882a593Smuzhiyun * with 360 MHz DRAM clock speed). */
491*4882a593Smuzhiyun setbits_le32(&dram->idcr, 0x1ffff);
492*4882a593Smuzhiyun }
493*4882a593Smuzhiyun
494*4882a593Smuzhiyun /*
495*4882a593Smuzhiyun * This triggers the DRAM initialization. It performs sending the mode registers
496*4882a593Smuzhiyun * to the DRAM among other things. Very likely the ZQCL command is also getting
497*4882a593Smuzhiyun * executed (to do the initial impedance calibration on the DRAM side of the
498*4882a593Smuzhiyun * wire). The memory controller and the PHY must be already configured before
499*4882a593Smuzhiyun * calling this function.
500*4882a593Smuzhiyun */
mctl_ddr3_initialize(void)501*4882a593Smuzhiyun static void mctl_ddr3_initialize(void)
502*4882a593Smuzhiyun {
503*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
504*4882a593Smuzhiyun setbits_le32(&dram->ccr, DRAM_CCR_INIT);
505*4882a593Smuzhiyun await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
506*4882a593Smuzhiyun }
507*4882a593Smuzhiyun
508*4882a593Smuzhiyun /*
509*4882a593Smuzhiyun * Perform impedance calibration on the DRAM controller side of the wire.
510*4882a593Smuzhiyun */
mctl_set_impedance(u32 zq,bool odt_en)511*4882a593Smuzhiyun static void mctl_set_impedance(u32 zq, bool odt_en)
512*4882a593Smuzhiyun {
513*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
514*4882a593Smuzhiyun u32 reg_val;
515*4882a593Smuzhiyun u32 zprog = zq & 0xFF, zdata = (zq >> 8) & 0xFFFFF;
516*4882a593Smuzhiyun
517*4882a593Smuzhiyun #ifndef CONFIG_MACH_SUN7I
518*4882a593Smuzhiyun /* Appears that some kind of automatically initiated default
519*4882a593Smuzhiyun * ZQ calibration is already in progress at this point on sun4i/sun5i
520*4882a593Smuzhiyun * hardware, but not on sun7i. So it is reasonable to wait for its
521*4882a593Smuzhiyun * completion before doing anything else. */
522*4882a593Smuzhiyun await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
523*4882a593Smuzhiyun #endif
524*4882a593Smuzhiyun
525*4882a593Smuzhiyun /* ZQ calibration is not really useful unless ODT is enabled */
526*4882a593Smuzhiyun if (!odt_en)
527*4882a593Smuzhiyun return;
528*4882a593Smuzhiyun
529*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN7I
530*4882a593Smuzhiyun /* Enabling ODT in SDR_IOCR on sun7i hardware results in a deadlock
531*4882a593Smuzhiyun * unless bit 24 is set in SDR_ZQCR1. Not much is known about the
532*4882a593Smuzhiyun * SDR_ZQCR1 register, but there are hints indicating that it might
533*4882a593Smuzhiyun * be related to periodic impedance re-calibration. This particular
534*4882a593Smuzhiyun * magic value is borrowed from the Allwinner boot0 bootloader, and
535*4882a593Smuzhiyun * using it helps to avoid troubles */
536*4882a593Smuzhiyun writel((1 << 24) | (1 << 1), &dram->zqcr1);
537*4882a593Smuzhiyun #endif
538*4882a593Smuzhiyun
539*4882a593Smuzhiyun /* Needed at least for sun5i, because it does not self clear there */
540*4882a593Smuzhiyun clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
541*4882a593Smuzhiyun
542*4882a593Smuzhiyun if (zdata) {
543*4882a593Smuzhiyun /* Set the user supplied impedance data */
544*4882a593Smuzhiyun reg_val = DRAM_ZQCR0_ZDEN | zdata;
545*4882a593Smuzhiyun writel(reg_val, &dram->zqcr0);
546*4882a593Smuzhiyun /* no need to wait, this takes effect immediately */
547*4882a593Smuzhiyun } else {
548*4882a593Smuzhiyun /* Do the calibration using the external resistor */
549*4882a593Smuzhiyun reg_val = DRAM_ZQCR0_ZCAL | DRAM_ZQCR0_IMP_DIV(zprog);
550*4882a593Smuzhiyun writel(reg_val, &dram->zqcr0);
551*4882a593Smuzhiyun /* Wait for the new impedance configuration to settle */
552*4882a593Smuzhiyun await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
553*4882a593Smuzhiyun }
554*4882a593Smuzhiyun
555*4882a593Smuzhiyun /* Needed at least for sun5i, because it does not self clear there */
556*4882a593Smuzhiyun clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
557*4882a593Smuzhiyun
558*4882a593Smuzhiyun /* Set I/O configure register */
559*4882a593Smuzhiyun writel(DRAM_IOCR_ODT_EN, &dram->iocr);
560*4882a593Smuzhiyun }
561*4882a593Smuzhiyun
dramc_init_helper(struct dram_para * para)562*4882a593Smuzhiyun static unsigned long dramc_init_helper(struct dram_para *para)
563*4882a593Smuzhiyun {
564*4882a593Smuzhiyun struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
565*4882a593Smuzhiyun u32 reg_val;
566*4882a593Smuzhiyun u32 density;
567*4882a593Smuzhiyun int ret_val;
568*4882a593Smuzhiyun
569*4882a593Smuzhiyun /*
570*4882a593Smuzhiyun * only single rank DDR3 is supported by this code even though the
571*4882a593Smuzhiyun * hardware can theoretically support DDR2 and up to two ranks
572*4882a593Smuzhiyun */
573*4882a593Smuzhiyun if (para->type != DRAM_MEMORY_TYPE_DDR3 || para->rank_num != 1)
574*4882a593Smuzhiyun return 0;
575*4882a593Smuzhiyun
576*4882a593Smuzhiyun /* setup DRAM relative clock */
577*4882a593Smuzhiyun mctl_setup_dram_clock(para->clock, para->mbus_clock);
578*4882a593Smuzhiyun
579*4882a593Smuzhiyun /* Disable any pad power save control */
580*4882a593Smuzhiyun mctl_disable_power_save();
581*4882a593Smuzhiyun
582*4882a593Smuzhiyun mctl_set_drive();
583*4882a593Smuzhiyun
584*4882a593Smuzhiyun /* dram clock off */
585*4882a593Smuzhiyun dramc_clock_output_en(0);
586*4882a593Smuzhiyun
587*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN4I
588*4882a593Smuzhiyun /* select dram controller 1 */
589*4882a593Smuzhiyun writel(DRAM_CSEL_MAGIC, &dram->csel);
590*4882a593Smuzhiyun #endif
591*4882a593Smuzhiyun
592*4882a593Smuzhiyun mctl_itm_disable();
593*4882a593Smuzhiyun mctl_enable_dll0(para->tpr3);
594*4882a593Smuzhiyun
595*4882a593Smuzhiyun /* configure external DRAM */
596*4882a593Smuzhiyun reg_val = DRAM_DCR_TYPE_DDR3;
597*4882a593Smuzhiyun reg_val |= DRAM_DCR_IO_WIDTH(para->io_width >> 3);
598*4882a593Smuzhiyun
599*4882a593Smuzhiyun if (para->density == 256)
600*4882a593Smuzhiyun density = DRAM_DCR_CHIP_DENSITY_256M;
601*4882a593Smuzhiyun else if (para->density == 512)
602*4882a593Smuzhiyun density = DRAM_DCR_CHIP_DENSITY_512M;
603*4882a593Smuzhiyun else if (para->density == 1024)
604*4882a593Smuzhiyun density = DRAM_DCR_CHIP_DENSITY_1024M;
605*4882a593Smuzhiyun else if (para->density == 2048)
606*4882a593Smuzhiyun density = DRAM_DCR_CHIP_DENSITY_2048M;
607*4882a593Smuzhiyun else if (para->density == 4096)
608*4882a593Smuzhiyun density = DRAM_DCR_CHIP_DENSITY_4096M;
609*4882a593Smuzhiyun else if (para->density == 8192)
610*4882a593Smuzhiyun density = DRAM_DCR_CHIP_DENSITY_8192M;
611*4882a593Smuzhiyun else
612*4882a593Smuzhiyun density = DRAM_DCR_CHIP_DENSITY_256M;
613*4882a593Smuzhiyun
614*4882a593Smuzhiyun reg_val |= DRAM_DCR_CHIP_DENSITY(density);
615*4882a593Smuzhiyun reg_val |= DRAM_DCR_BUS_WIDTH((para->bus_width >> 3) - 1);
616*4882a593Smuzhiyun reg_val |= DRAM_DCR_RANK_SEL(para->rank_num - 1);
617*4882a593Smuzhiyun reg_val |= DRAM_DCR_CMD_RANK_ALL;
618*4882a593Smuzhiyun reg_val |= DRAM_DCR_MODE(DRAM_DCR_MODE_INTERLEAVE);
619*4882a593Smuzhiyun writel(reg_val, &dram->dcr);
620*4882a593Smuzhiyun
621*4882a593Smuzhiyun dramc_clock_output_en(1);
622*4882a593Smuzhiyun
623*4882a593Smuzhiyun mctl_set_impedance(para->zq, para->odt_en);
624*4882a593Smuzhiyun
625*4882a593Smuzhiyun mctl_set_cke_delay();
626*4882a593Smuzhiyun
627*4882a593Smuzhiyun mctl_ddr3_reset();
628*4882a593Smuzhiyun
629*4882a593Smuzhiyun udelay(1);
630*4882a593Smuzhiyun
631*4882a593Smuzhiyun await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
632*4882a593Smuzhiyun
633*4882a593Smuzhiyun mctl_enable_dllx(para->tpr3);
634*4882a593Smuzhiyun
635*4882a593Smuzhiyun /* set refresh period */
636*4882a593Smuzhiyun dramc_set_autorefresh_cycle(para->clock, density);
637*4882a593Smuzhiyun
638*4882a593Smuzhiyun /* set timing parameters */
639*4882a593Smuzhiyun writel(para->tpr0, &dram->tpr0);
640*4882a593Smuzhiyun writel(para->tpr1, &dram->tpr1);
641*4882a593Smuzhiyun writel(para->tpr2, &dram->tpr2);
642*4882a593Smuzhiyun
643*4882a593Smuzhiyun reg_val = DRAM_MR_BURST_LENGTH(0x0);
644*4882a593Smuzhiyun #if (defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I))
645*4882a593Smuzhiyun reg_val |= DRAM_MR_POWER_DOWN;
646*4882a593Smuzhiyun #endif
647*4882a593Smuzhiyun reg_val |= DRAM_MR_CAS_LAT(para->cas - 4);
648*4882a593Smuzhiyun reg_val |= DRAM_MR_WRITE_RECOVERY(ddr3_write_recovery(para->clock));
649*4882a593Smuzhiyun writel(reg_val, &dram->mr);
650*4882a593Smuzhiyun
651*4882a593Smuzhiyun writel(para->emr1, &dram->emr);
652*4882a593Smuzhiyun writel(para->emr2, &dram->emr2);
653*4882a593Smuzhiyun writel(para->emr3, &dram->emr3);
654*4882a593Smuzhiyun
655*4882a593Smuzhiyun /* disable drift compensation and set passive DQS window mode */
656*4882a593Smuzhiyun clrsetbits_le32(&dram->ccr, DRAM_CCR_DQS_DRIFT_COMP, DRAM_CCR_DQS_GATE);
657*4882a593Smuzhiyun
658*4882a593Smuzhiyun #ifdef CONFIG_MACH_SUN7I
659*4882a593Smuzhiyun /* Command rate timing mode 2T & 1T */
660*4882a593Smuzhiyun if (para->tpr4 & 0x1)
661*4882a593Smuzhiyun setbits_le32(&dram->ccr, DRAM_CCR_COMMAND_RATE_1T);
662*4882a593Smuzhiyun #endif
663*4882a593Smuzhiyun /* initialize external DRAM */
664*4882a593Smuzhiyun mctl_ddr3_initialize();
665*4882a593Smuzhiyun
666*4882a593Smuzhiyun /* scan read pipe value */
667*4882a593Smuzhiyun mctl_itm_enable();
668*4882a593Smuzhiyun
669*4882a593Smuzhiyun /* Hardware DQS gate training */
670*4882a593Smuzhiyun ret_val = dramc_scan_readpipe();
671*4882a593Smuzhiyun
672*4882a593Smuzhiyun if (ret_val < 0)
673*4882a593Smuzhiyun return 0;
674*4882a593Smuzhiyun
675*4882a593Smuzhiyun /* allow to override the DQS training results with a custom delay */
676*4882a593Smuzhiyun if (para->dqs_gating_delay)
677*4882a593Smuzhiyun mctl_set_dqs_gating_delay(0, para->dqs_gating_delay);
678*4882a593Smuzhiyun
679*4882a593Smuzhiyun /* set the DQS gating window type */
680*4882a593Smuzhiyun if (para->active_windowing)
681*4882a593Smuzhiyun clrbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
682*4882a593Smuzhiyun else
683*4882a593Smuzhiyun setbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
684*4882a593Smuzhiyun
685*4882a593Smuzhiyun mctl_itm_reset();
686*4882a593Smuzhiyun
687*4882a593Smuzhiyun /* configure all host port */
688*4882a593Smuzhiyun mctl_configure_hostport();
689*4882a593Smuzhiyun
690*4882a593Smuzhiyun return get_ram_size((long *)PHYS_SDRAM_0, PHYS_SDRAM_0_SIZE);
691*4882a593Smuzhiyun }
692*4882a593Smuzhiyun
dramc_init(struct dram_para * para)693*4882a593Smuzhiyun unsigned long dramc_init(struct dram_para *para)
694*4882a593Smuzhiyun {
695*4882a593Smuzhiyun unsigned long dram_size, actual_density;
696*4882a593Smuzhiyun
697*4882a593Smuzhiyun /* If the dram configuration is not provided, use a default */
698*4882a593Smuzhiyun if (!para)
699*4882a593Smuzhiyun return 0;
700*4882a593Smuzhiyun
701*4882a593Smuzhiyun /* if everything is known, then autodetection is not necessary */
702*4882a593Smuzhiyun if (para->io_width && para->bus_width && para->density)
703*4882a593Smuzhiyun return dramc_init_helper(para);
704*4882a593Smuzhiyun
705*4882a593Smuzhiyun /* try to autodetect the DRAM bus width and density */
706*4882a593Smuzhiyun para->io_width = 16;
707*4882a593Smuzhiyun para->bus_width = 32;
708*4882a593Smuzhiyun #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN5I)
709*4882a593Smuzhiyun /* only A0-A14 address lines on A10/A13, limiting max density to 4096 */
710*4882a593Smuzhiyun para->density = 4096;
711*4882a593Smuzhiyun #else
712*4882a593Smuzhiyun /* all A0-A15 address lines on A20, which allow density 8192 */
713*4882a593Smuzhiyun para->density = 8192;
714*4882a593Smuzhiyun #endif
715*4882a593Smuzhiyun
716*4882a593Smuzhiyun dram_size = dramc_init_helper(para);
717*4882a593Smuzhiyun if (!dram_size) {
718*4882a593Smuzhiyun /* if 32-bit bus width failed, try 16-bit bus width instead */
719*4882a593Smuzhiyun para->bus_width = 16;
720*4882a593Smuzhiyun dram_size = dramc_init_helper(para);
721*4882a593Smuzhiyun if (!dram_size) {
722*4882a593Smuzhiyun /* if 16-bit bus width also failed, then bail out */
723*4882a593Smuzhiyun return dram_size;
724*4882a593Smuzhiyun }
725*4882a593Smuzhiyun }
726*4882a593Smuzhiyun
727*4882a593Smuzhiyun /* check if we need to adjust the density */
728*4882a593Smuzhiyun actual_density = (dram_size >> 17) * para->io_width / para->bus_width;
729*4882a593Smuzhiyun
730*4882a593Smuzhiyun if (actual_density != para->density) {
731*4882a593Smuzhiyun /* update the density and re-initialize DRAM again */
732*4882a593Smuzhiyun para->density = actual_density;
733*4882a593Smuzhiyun dram_size = dramc_init_helper(para);
734*4882a593Smuzhiyun }
735*4882a593Smuzhiyun
736*4882a593Smuzhiyun return dram_size;
737*4882a593Smuzhiyun }
738