xref: /OK3568_Linux_fs/u-boot/arch/powerpc/cpu/mpc83xx/spd_sdram.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * (C) Copyright 2006-2007 Freescale Semiconductor, Inc.
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * (C) Copyright 2006
5*4882a593Smuzhiyun  * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Copyright (C) 2004-2006 Freescale Semiconductor, Inc.
8*4882a593Smuzhiyun  * (C) Copyright 2003 Motorola Inc.
9*4882a593Smuzhiyun  * Xianghua Xiao (X.Xiao@motorola.com)
10*4882a593Smuzhiyun  *
11*4882a593Smuzhiyun  * SPDX-License-Identifier:	GPL-2.0+
12*4882a593Smuzhiyun  */
13*4882a593Smuzhiyun 
14*4882a593Smuzhiyun #include <common.h>
15*4882a593Smuzhiyun #include <asm/processor.h>
16*4882a593Smuzhiyun #include <asm/io.h>
17*4882a593Smuzhiyun #include <i2c.h>
18*4882a593Smuzhiyun #include <spd.h>
19*4882a593Smuzhiyun #include <asm/mmu.h>
20*4882a593Smuzhiyun #include <spd_sdram.h>
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun DECLARE_GLOBAL_DATA_PTR;
23*4882a593Smuzhiyun 
board_add_ram_info(int use_default)24*4882a593Smuzhiyun void board_add_ram_info(int use_default)
25*4882a593Smuzhiyun {
26*4882a593Smuzhiyun 	volatile immap_t *immap = (immap_t *) CONFIG_SYS_IMMR;
27*4882a593Smuzhiyun 	volatile ddr83xx_t *ddr = &immap->ddr;
28*4882a593Smuzhiyun 	char buf[32];
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun 	printf(" (DDR%d", ((ddr->sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK)
31*4882a593Smuzhiyun 			   >> SDRAM_CFG_SDRAM_TYPE_SHIFT) - 1);
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun #if defined(CONFIG_MPC8308) || defined(CONFIG_MPC831x)
34*4882a593Smuzhiyun 	if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_16)
35*4882a593Smuzhiyun 		puts(", 16-bit");
36*4882a593Smuzhiyun 	else if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_32)
37*4882a593Smuzhiyun 		puts(", 32-bit");
38*4882a593Smuzhiyun 	else
39*4882a593Smuzhiyun 		puts(", unknown width");
40*4882a593Smuzhiyun #else
41*4882a593Smuzhiyun 	if (ddr->sdram_cfg & SDRAM_CFG_32_BE)
42*4882a593Smuzhiyun 		puts(", 32-bit");
43*4882a593Smuzhiyun 	else
44*4882a593Smuzhiyun 		puts(", 64-bit");
45*4882a593Smuzhiyun #endif
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun 	if (ddr->sdram_cfg & SDRAM_CFG_ECC_EN)
48*4882a593Smuzhiyun 		puts(", ECC on");
49*4882a593Smuzhiyun 	else
50*4882a593Smuzhiyun 		puts(", ECC off");
51*4882a593Smuzhiyun 
52*4882a593Smuzhiyun 	printf(", %s MHz)", strmhz(buf, gd->mem_clk));
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun #if defined(CONFIG_SYS_LB_SDRAM) && defined(CONFIG_SYS_LBC_SDRAM_SIZE)
55*4882a593Smuzhiyun 	puts("\nSDRAM: ");
56*4882a593Smuzhiyun 	print_size (CONFIG_SYS_LBC_SDRAM_SIZE * 1024 * 1024, " (local bus)");
57*4882a593Smuzhiyun #endif
58*4882a593Smuzhiyun }
59*4882a593Smuzhiyun 
60*4882a593Smuzhiyun #ifdef CONFIG_SPD_EEPROM
61*4882a593Smuzhiyun #ifndef	CONFIG_SYS_READ_SPD
62*4882a593Smuzhiyun #define CONFIG_SYS_READ_SPD	i2c_read
63*4882a593Smuzhiyun #endif
64*4882a593Smuzhiyun #ifndef SPD_EEPROM_OFFSET
65*4882a593Smuzhiyun #define SPD_EEPROM_OFFSET	0
66*4882a593Smuzhiyun #endif
67*4882a593Smuzhiyun #ifndef SPD_EEPROM_ADDR_LEN
68*4882a593Smuzhiyun #define SPD_EEPROM_ADDR_LEN     1
69*4882a593Smuzhiyun #endif
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun /*
72*4882a593Smuzhiyun  * Convert picoseconds into clock cycles (rounding up if needed).
73*4882a593Smuzhiyun  */
74*4882a593Smuzhiyun int
picos_to_clk(int picos)75*4882a593Smuzhiyun picos_to_clk(int picos)
76*4882a593Smuzhiyun {
77*4882a593Smuzhiyun 	unsigned int mem_bus_clk;
78*4882a593Smuzhiyun 	int clks;
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun 	mem_bus_clk = gd->mem_clk >> 1;
81*4882a593Smuzhiyun 	clks = picos / (1000000000 / (mem_bus_clk / 1000));
82*4882a593Smuzhiyun 	if (picos % (1000000000 / (mem_bus_clk / 1000)) != 0)
83*4882a593Smuzhiyun 		clks++;
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun 	return clks;
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun 
banksize(unsigned char row_dens)88*4882a593Smuzhiyun unsigned int banksize(unsigned char row_dens)
89*4882a593Smuzhiyun {
90*4882a593Smuzhiyun 	return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24;
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun 
read_spd(uint addr)93*4882a593Smuzhiyun int read_spd(uint addr)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun 	return ((int) addr);
96*4882a593Smuzhiyun }
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun #undef SPD_DEBUG
99*4882a593Smuzhiyun #ifdef SPD_DEBUG
spd_debug(spd_eeprom_t * spd)100*4882a593Smuzhiyun static void spd_debug(spd_eeprom_t *spd)
101*4882a593Smuzhiyun {
102*4882a593Smuzhiyun 	printf ("\nDIMM type:       %-18.18s\n", spd->mpart);
103*4882a593Smuzhiyun 	printf ("SPD size:        %d\n", spd->info_size);
104*4882a593Smuzhiyun 	printf ("EEPROM size:     %d\n", 1 << spd->chip_size);
105*4882a593Smuzhiyun 	printf ("Memory type:     %d\n", spd->mem_type);
106*4882a593Smuzhiyun 	printf ("Row addr:        %d\n", spd->nrow_addr);
107*4882a593Smuzhiyun 	printf ("Column addr:     %d\n", spd->ncol_addr);
108*4882a593Smuzhiyun 	printf ("# of rows:       %d\n", spd->nrows);
109*4882a593Smuzhiyun 	printf ("Row density:     %d\n", spd->row_dens);
110*4882a593Smuzhiyun 	printf ("# of banks:      %d\n", spd->nbanks);
111*4882a593Smuzhiyun 	printf ("Data width:      %d\n",
112*4882a593Smuzhiyun 			256 * spd->dataw_msb + spd->dataw_lsb);
113*4882a593Smuzhiyun 	printf ("Chip width:      %d\n", spd->primw);
114*4882a593Smuzhiyun 	printf ("Refresh rate:    %02X\n", spd->refresh);
115*4882a593Smuzhiyun 	printf ("CAS latencies:   %02X\n", spd->cas_lat);
116*4882a593Smuzhiyun 	printf ("Write latencies: %02X\n", spd->write_lat);
117*4882a593Smuzhiyun 	printf ("tRP:             %d\n", spd->trp);
118*4882a593Smuzhiyun 	printf ("tRCD:            %d\n", spd->trcd);
119*4882a593Smuzhiyun 	printf ("\n");
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun #endif /* SPD_DEBUG */
122*4882a593Smuzhiyun 
spd_sdram()123*4882a593Smuzhiyun long int spd_sdram()
124*4882a593Smuzhiyun {
125*4882a593Smuzhiyun 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
126*4882a593Smuzhiyun 	volatile ddr83xx_t *ddr = &immap->ddr;
127*4882a593Smuzhiyun 	volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0];
128*4882a593Smuzhiyun 	spd_eeprom_t spd;
129*4882a593Smuzhiyun 	unsigned int n_ranks;
130*4882a593Smuzhiyun 	unsigned int odt_rd_cfg, odt_wr_cfg;
131*4882a593Smuzhiyun 	unsigned char twr_clk, twtr_clk;
132*4882a593Smuzhiyun 	unsigned int sdram_type;
133*4882a593Smuzhiyun 	unsigned int memsize;
134*4882a593Smuzhiyun 	unsigned int law_size;
135*4882a593Smuzhiyun 	unsigned char caslat, caslat_ctrl;
136*4882a593Smuzhiyun 	unsigned int trfc, trfc_clk, trfc_low;
137*4882a593Smuzhiyun 	unsigned int trcd_clk, trtp_clk;
138*4882a593Smuzhiyun 	unsigned char cke_min_clk;
139*4882a593Smuzhiyun 	unsigned char add_lat, wr_lat;
140*4882a593Smuzhiyun 	unsigned char wr_data_delay;
141*4882a593Smuzhiyun 	unsigned char four_act;
142*4882a593Smuzhiyun 	unsigned char cpo;
143*4882a593Smuzhiyun 	unsigned char burstlen;
144*4882a593Smuzhiyun 	unsigned char odt_cfg, mode_odt_enable;
145*4882a593Smuzhiyun 	unsigned int max_bus_clk;
146*4882a593Smuzhiyun 	unsigned int max_data_rate, effective_data_rate;
147*4882a593Smuzhiyun 	unsigned int ddrc_clk;
148*4882a593Smuzhiyun 	unsigned int refresh_clk;
149*4882a593Smuzhiyun 	unsigned int sdram_cfg;
150*4882a593Smuzhiyun 	unsigned int ddrc_ecc_enable;
151*4882a593Smuzhiyun 	unsigned int pvr = get_pvr();
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun 	/*
154*4882a593Smuzhiyun 	 * First disable the memory controller (could be enabled
155*4882a593Smuzhiyun 	 * by the debugger)
156*4882a593Smuzhiyun 	 */
157*4882a593Smuzhiyun 	clrsetbits_be32(&ddr->sdram_cfg, SDRAM_CFG_MEM_EN, 0);
158*4882a593Smuzhiyun 	sync();
159*4882a593Smuzhiyun 	isync();
160*4882a593Smuzhiyun 
161*4882a593Smuzhiyun 	/* Read SPD parameters with I2C */
162*4882a593Smuzhiyun 	CONFIG_SYS_READ_SPD(SPD_EEPROM_ADDRESS, SPD_EEPROM_OFFSET,
163*4882a593Smuzhiyun 		SPD_EEPROM_ADDR_LEN, (uchar *) &spd, sizeof(spd));
164*4882a593Smuzhiyun #ifdef SPD_DEBUG
165*4882a593Smuzhiyun 	spd_debug(&spd);
166*4882a593Smuzhiyun #endif
167*4882a593Smuzhiyun 	/* Check the memory type */
168*4882a593Smuzhiyun 	if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) {
169*4882a593Smuzhiyun 		debug("DDR: Module mem type is %02X\n", spd.mem_type);
170*4882a593Smuzhiyun 		return 0;
171*4882a593Smuzhiyun 	}
172*4882a593Smuzhiyun 
173*4882a593Smuzhiyun 	/* Check the number of physical bank */
174*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
175*4882a593Smuzhiyun 		n_ranks = spd.nrows;
176*4882a593Smuzhiyun 	} else {
177*4882a593Smuzhiyun 		n_ranks = (spd.nrows & 0x7) + 1;
178*4882a593Smuzhiyun 	}
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	if (n_ranks > 2) {
181*4882a593Smuzhiyun 		printf("DDR: The number of physical bank is %02X\n", n_ranks);
182*4882a593Smuzhiyun 		return 0;
183*4882a593Smuzhiyun 	}
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun 	/* Check if the number of row of the module is in the range of DDRC */
186*4882a593Smuzhiyun 	if (spd.nrow_addr < 12 || spd.nrow_addr > 15) {
187*4882a593Smuzhiyun 		printf("DDR: Row number is out of range of DDRC, row=%02X\n",
188*4882a593Smuzhiyun 							 spd.nrow_addr);
189*4882a593Smuzhiyun 		return 0;
190*4882a593Smuzhiyun 	}
191*4882a593Smuzhiyun 
192*4882a593Smuzhiyun 	/* Check if the number of col of the module is in the range of DDRC */
193*4882a593Smuzhiyun 	if (spd.ncol_addr < 8 || spd.ncol_addr > 11) {
194*4882a593Smuzhiyun 		printf("DDR: Col number is out of range of DDRC, col=%02X\n",
195*4882a593Smuzhiyun 							 spd.ncol_addr);
196*4882a593Smuzhiyun 		return 0;
197*4882a593Smuzhiyun 	}
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun #ifdef CONFIG_SYS_DDRCDR_VALUE
200*4882a593Smuzhiyun 	/*
201*4882a593Smuzhiyun 	 * Adjust DDR II IO voltage biasing.  It just makes it work.
202*4882a593Smuzhiyun 	 */
203*4882a593Smuzhiyun 	if(spd.mem_type == SPD_MEMTYPE_DDR2) {
204*4882a593Smuzhiyun 		immap->sysconf.ddrcdr = CONFIG_SYS_DDRCDR_VALUE;
205*4882a593Smuzhiyun 	}
206*4882a593Smuzhiyun 	udelay(50000);
207*4882a593Smuzhiyun #endif
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 	/*
210*4882a593Smuzhiyun 	 * ODT configuration recommendation from DDR Controller Chapter.
211*4882a593Smuzhiyun 	 */
212*4882a593Smuzhiyun 	odt_rd_cfg = 0;			/* Never assert ODT */
213*4882a593Smuzhiyun 	odt_wr_cfg = 0;			/* Never assert ODT */
214*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
215*4882a593Smuzhiyun 		odt_wr_cfg = 1;		/* Assert ODT on writes to CSn */
216*4882a593Smuzhiyun 	}
217*4882a593Smuzhiyun 
218*4882a593Smuzhiyun 	/* Setup DDR chip select register */
219*4882a593Smuzhiyun #ifdef CONFIG_SYS_83XX_DDR_USES_CS0
220*4882a593Smuzhiyun 	ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1;
221*4882a593Smuzhiyun 	ddr->cs_config[0] = ( 1 << 31
222*4882a593Smuzhiyun 			    | (odt_rd_cfg << 20)
223*4882a593Smuzhiyun 			    | (odt_wr_cfg << 16)
224*4882a593Smuzhiyun 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
225*4882a593Smuzhiyun 			    | ((spd.nrow_addr - 12) << 8)
226*4882a593Smuzhiyun 			    | (spd.ncol_addr - 8) );
227*4882a593Smuzhiyun 	debug("\n");
228*4882a593Smuzhiyun 	debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds);
229*4882a593Smuzhiyun 	debug("cs0_config = 0x%08x\n",ddr->cs_config[0]);
230*4882a593Smuzhiyun 
231*4882a593Smuzhiyun 	if (n_ranks == 2) {
232*4882a593Smuzhiyun 		ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8)
233*4882a593Smuzhiyun 				  | ((banksize(spd.row_dens) >> 23) - 1) );
234*4882a593Smuzhiyun 		ddr->cs_config[1] = ( 1<<31
235*4882a593Smuzhiyun 				    | (odt_rd_cfg << 20)
236*4882a593Smuzhiyun 				    | (odt_wr_cfg << 16)
237*4882a593Smuzhiyun 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
238*4882a593Smuzhiyun 				    | ((spd.nrow_addr - 12) << 8)
239*4882a593Smuzhiyun 				    | (spd.ncol_addr - 8) );
240*4882a593Smuzhiyun 		debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds);
241*4882a593Smuzhiyun 		debug("cs1_config = 0x%08x\n",ddr->cs_config[1]);
242*4882a593Smuzhiyun 	}
243*4882a593Smuzhiyun 
244*4882a593Smuzhiyun #else
245*4882a593Smuzhiyun 	ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1;
246*4882a593Smuzhiyun 	ddr->cs_config[2] = ( 1 << 31
247*4882a593Smuzhiyun 			    | (odt_rd_cfg << 20)
248*4882a593Smuzhiyun 			    | (odt_wr_cfg << 16)
249*4882a593Smuzhiyun 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
250*4882a593Smuzhiyun 			    | ((spd.nrow_addr - 12) << 8)
251*4882a593Smuzhiyun 			    | (spd.ncol_addr - 8) );
252*4882a593Smuzhiyun 	debug("\n");
253*4882a593Smuzhiyun 	debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds);
254*4882a593Smuzhiyun 	debug("cs2_config = 0x%08x\n",ddr->cs_config[2]);
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun 	if (n_ranks == 2) {
257*4882a593Smuzhiyun 		ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8)
258*4882a593Smuzhiyun 				  | ((banksize(spd.row_dens) >> 23) - 1) );
259*4882a593Smuzhiyun 		ddr->cs_config[3] = ( 1<<31
260*4882a593Smuzhiyun 				    | (odt_rd_cfg << 20)
261*4882a593Smuzhiyun 				    | (odt_wr_cfg << 16)
262*4882a593Smuzhiyun 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
263*4882a593Smuzhiyun 				    | ((spd.nrow_addr - 12) << 8)
264*4882a593Smuzhiyun 				    | (spd.ncol_addr - 8) );
265*4882a593Smuzhiyun 		debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds);
266*4882a593Smuzhiyun 		debug("cs3_config = 0x%08x\n",ddr->cs_config[3]);
267*4882a593Smuzhiyun 	}
268*4882a593Smuzhiyun #endif
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun 	/*
271*4882a593Smuzhiyun 	 * Figure out memory size in Megabytes.
272*4882a593Smuzhiyun 	 */
273*4882a593Smuzhiyun 	memsize = n_ranks * banksize(spd.row_dens) / 0x100000;
274*4882a593Smuzhiyun 
275*4882a593Smuzhiyun 	/*
276*4882a593Smuzhiyun 	 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23.
277*4882a593Smuzhiyun 	 */
278*4882a593Smuzhiyun 	law_size = 19 + __ilog2(memsize);
279*4882a593Smuzhiyun 
280*4882a593Smuzhiyun 	/*
281*4882a593Smuzhiyun 	 * Set up LAWBAR for all of DDR.
282*4882a593Smuzhiyun 	 */
283*4882a593Smuzhiyun 	ecm->bar = CONFIG_SYS_DDR_SDRAM_BASE & 0xfffff000;
284*4882a593Smuzhiyun 	ecm->ar  = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size));
285*4882a593Smuzhiyun 	debug("DDR:bar=0x%08x\n", ecm->bar);
286*4882a593Smuzhiyun 	debug("DDR:ar=0x%08x\n", ecm->ar);
287*4882a593Smuzhiyun 
288*4882a593Smuzhiyun 	/*
289*4882a593Smuzhiyun 	 * Find the largest CAS by locating the highest 1 bit
290*4882a593Smuzhiyun 	 * in the spd.cas_lat field.  Translate it to a DDR
291*4882a593Smuzhiyun 	 * controller field value:
292*4882a593Smuzhiyun 	 *
293*4882a593Smuzhiyun 	 *	CAS Lat	DDR I	DDR II	Ctrl
294*4882a593Smuzhiyun 	 *	Clocks	SPD Bit	SPD Bit	Value
295*4882a593Smuzhiyun 	 *	-------	-------	-------	-----
296*4882a593Smuzhiyun 	 *	1.0	0		0001
297*4882a593Smuzhiyun 	 *	1.5	1		0010
298*4882a593Smuzhiyun 	 *	2.0	2	2	0011
299*4882a593Smuzhiyun 	 *	2.5	3		0100
300*4882a593Smuzhiyun 	 *	3.0	4	3	0101
301*4882a593Smuzhiyun 	 *	3.5	5		0110
302*4882a593Smuzhiyun 	 *	4.0	6	4	0111
303*4882a593Smuzhiyun 	 *	4.5			1000
304*4882a593Smuzhiyun 	 *	5.0		5	1001
305*4882a593Smuzhiyun 	 */
306*4882a593Smuzhiyun 	caslat = __ilog2(spd.cas_lat);
307*4882a593Smuzhiyun 	if ((spd.mem_type == SPD_MEMTYPE_DDR)
308*4882a593Smuzhiyun 	    && (caslat > 6)) {
309*4882a593Smuzhiyun 		printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat);
310*4882a593Smuzhiyun 		return 0;
311*4882a593Smuzhiyun 	} else if (spd.mem_type == SPD_MEMTYPE_DDR2
312*4882a593Smuzhiyun 		   && (caslat < 2 || caslat > 5)) {
313*4882a593Smuzhiyun 		printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n",
314*4882a593Smuzhiyun 		       spd.cas_lat);
315*4882a593Smuzhiyun 		return 0;
316*4882a593Smuzhiyun 	}
317*4882a593Smuzhiyun 	debug("DDR: caslat SPD bit is %d\n", caslat);
318*4882a593Smuzhiyun 
319*4882a593Smuzhiyun 	max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10
320*4882a593Smuzhiyun 			+ (spd.clk_cycle & 0x0f));
321*4882a593Smuzhiyun 	max_data_rate = max_bus_clk * 2;
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 	debug("DDR:Module maximum data rate is: %d MHz\n", max_data_rate);
324*4882a593Smuzhiyun 
325*4882a593Smuzhiyun 	ddrc_clk = gd->mem_clk / 1000000;
326*4882a593Smuzhiyun 	effective_data_rate = 0;
327*4882a593Smuzhiyun 
328*4882a593Smuzhiyun 	if (max_data_rate >= 460) { /* it is DDR2-800, 667, 533 */
329*4882a593Smuzhiyun 		if (spd.cas_lat & 0x08)
330*4882a593Smuzhiyun 			caslat = 3;
331*4882a593Smuzhiyun 		else
332*4882a593Smuzhiyun 			caslat = 4;
333*4882a593Smuzhiyun 		if (ddrc_clk <= 460 && ddrc_clk > 350)
334*4882a593Smuzhiyun 			effective_data_rate = 400;
335*4882a593Smuzhiyun 		else if (ddrc_clk <=350 && ddrc_clk > 280)
336*4882a593Smuzhiyun 			effective_data_rate = 333;
337*4882a593Smuzhiyun 		else if (ddrc_clk <= 280 && ddrc_clk > 230)
338*4882a593Smuzhiyun 			effective_data_rate = 266;
339*4882a593Smuzhiyun 		else
340*4882a593Smuzhiyun 			effective_data_rate = 200;
341*4882a593Smuzhiyun 	} else if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */
342*4882a593Smuzhiyun 		if (ddrc_clk <= 460 && ddrc_clk > 350) {
343*4882a593Smuzhiyun 			/* DDR controller clk at 350~460 */
344*4882a593Smuzhiyun 			effective_data_rate = 400; /* 5ns */
345*4882a593Smuzhiyun 			caslat = caslat;
346*4882a593Smuzhiyun 		} else if (ddrc_clk <= 350 && ddrc_clk > 280) {
347*4882a593Smuzhiyun 			/* DDR controller clk at 280~350 */
348*4882a593Smuzhiyun 			effective_data_rate = 333; /* 6ns */
349*4882a593Smuzhiyun 			if (spd.clk_cycle2 == 0x60)
350*4882a593Smuzhiyun 				caslat = caslat - 1;
351*4882a593Smuzhiyun 			else
352*4882a593Smuzhiyun 				caslat = caslat;
353*4882a593Smuzhiyun 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
354*4882a593Smuzhiyun 			/* DDR controller clk at 230~280 */
355*4882a593Smuzhiyun 			effective_data_rate = 266; /* 7.5ns */
356*4882a593Smuzhiyun 			if (spd.clk_cycle3 == 0x75)
357*4882a593Smuzhiyun 				caslat = caslat - 2;
358*4882a593Smuzhiyun 			else if (spd.clk_cycle2 == 0x75)
359*4882a593Smuzhiyun 				caslat = caslat - 1;
360*4882a593Smuzhiyun 			else
361*4882a593Smuzhiyun 				caslat = caslat;
362*4882a593Smuzhiyun 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
363*4882a593Smuzhiyun 			/* DDR controller clk at 90~230 */
364*4882a593Smuzhiyun 			effective_data_rate = 200; /* 10ns */
365*4882a593Smuzhiyun 			if (spd.clk_cycle3 == 0xa0)
366*4882a593Smuzhiyun 				caslat = caslat - 2;
367*4882a593Smuzhiyun 			else if (spd.clk_cycle2 == 0xa0)
368*4882a593Smuzhiyun 				caslat = caslat - 1;
369*4882a593Smuzhiyun 			else
370*4882a593Smuzhiyun 				caslat = caslat;
371*4882a593Smuzhiyun 		}
372*4882a593Smuzhiyun 	} else if (max_data_rate >= 323) { /* it is DDR 333 */
373*4882a593Smuzhiyun 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
374*4882a593Smuzhiyun 			/* DDR controller clk at 280~350 */
375*4882a593Smuzhiyun 			effective_data_rate = 333; /* 6ns */
376*4882a593Smuzhiyun 			caslat = caslat;
377*4882a593Smuzhiyun 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
378*4882a593Smuzhiyun 			/* DDR controller clk at 230~280 */
379*4882a593Smuzhiyun 			effective_data_rate = 266; /* 7.5ns */
380*4882a593Smuzhiyun 			if (spd.clk_cycle2 == 0x75)
381*4882a593Smuzhiyun 				caslat = caslat - 1;
382*4882a593Smuzhiyun 			else
383*4882a593Smuzhiyun 				caslat = caslat;
384*4882a593Smuzhiyun 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
385*4882a593Smuzhiyun 			/* DDR controller clk at 90~230 */
386*4882a593Smuzhiyun 			effective_data_rate = 200; /* 10ns */
387*4882a593Smuzhiyun 			if (spd.clk_cycle3 == 0xa0)
388*4882a593Smuzhiyun 				caslat = caslat - 2;
389*4882a593Smuzhiyun 			else if (spd.clk_cycle2 == 0xa0)
390*4882a593Smuzhiyun 				caslat = caslat - 1;
391*4882a593Smuzhiyun 			else
392*4882a593Smuzhiyun 				caslat = caslat;
393*4882a593Smuzhiyun 		}
394*4882a593Smuzhiyun 	} else if (max_data_rate >= 256) { /* it is DDR 266 */
395*4882a593Smuzhiyun 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
396*4882a593Smuzhiyun 			/* DDR controller clk at 280~350 */
397*4882a593Smuzhiyun 			printf("DDR: DDR controller freq is more than "
398*4882a593Smuzhiyun 				"max data rate of the module\n");
399*4882a593Smuzhiyun 			return 0;
400*4882a593Smuzhiyun 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
401*4882a593Smuzhiyun 			/* DDR controller clk at 230~280 */
402*4882a593Smuzhiyun 			effective_data_rate = 266; /* 7.5ns */
403*4882a593Smuzhiyun 			caslat = caslat;
404*4882a593Smuzhiyun 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
405*4882a593Smuzhiyun 			/* DDR controller clk at 90~230 */
406*4882a593Smuzhiyun 			effective_data_rate = 200; /* 10ns */
407*4882a593Smuzhiyun 			if (spd.clk_cycle2 == 0xa0)
408*4882a593Smuzhiyun 				caslat = caslat - 1;
409*4882a593Smuzhiyun 		}
410*4882a593Smuzhiyun 	} else if (max_data_rate >= 190) { /* it is DDR 200 */
411*4882a593Smuzhiyun 		if (ddrc_clk <= 350 && ddrc_clk > 230) {
412*4882a593Smuzhiyun 			/* DDR controller clk at 230~350 */
413*4882a593Smuzhiyun 			printf("DDR: DDR controller freq is more than "
414*4882a593Smuzhiyun 				"max data rate of the module\n");
415*4882a593Smuzhiyun 			return 0;
416*4882a593Smuzhiyun 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
417*4882a593Smuzhiyun 			/* DDR controller clk at 90~230 */
418*4882a593Smuzhiyun 			effective_data_rate = 200; /* 10ns */
419*4882a593Smuzhiyun 			caslat = caslat;
420*4882a593Smuzhiyun 		}
421*4882a593Smuzhiyun 	}
422*4882a593Smuzhiyun 
423*4882a593Smuzhiyun 	debug("DDR:Effective data rate is: %dMHz\n", effective_data_rate);
424*4882a593Smuzhiyun 	debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat);
425*4882a593Smuzhiyun 
426*4882a593Smuzhiyun 	/*
427*4882a593Smuzhiyun 	 * Errata DDR6 work around: input enable 2 cycles earlier.
428*4882a593Smuzhiyun 	 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2.
429*4882a593Smuzhiyun 	 */
430*4882a593Smuzhiyun 	if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){
431*4882a593Smuzhiyun 		if (caslat == 2)
432*4882a593Smuzhiyun 			ddr->debug_reg = 0x201c0000; /* CL=2 */
433*4882a593Smuzhiyun 		else if (caslat == 3)
434*4882a593Smuzhiyun 			ddr->debug_reg = 0x202c0000; /* CL=2.5 */
435*4882a593Smuzhiyun 		else if (caslat == 4)
436*4882a593Smuzhiyun 			ddr->debug_reg = 0x202c0000; /* CL=3.0 */
437*4882a593Smuzhiyun 
438*4882a593Smuzhiyun 		__asm__ __volatile__ ("sync");
439*4882a593Smuzhiyun 
440*4882a593Smuzhiyun 		debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg);
441*4882a593Smuzhiyun 	}
442*4882a593Smuzhiyun 
443*4882a593Smuzhiyun 	/*
444*4882a593Smuzhiyun 	 * Convert caslat clocks to DDR controller value.
445*4882a593Smuzhiyun 	 * Force caslat_ctrl to be DDR Controller field-sized.
446*4882a593Smuzhiyun 	 */
447*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
448*4882a593Smuzhiyun 		caslat_ctrl = (caslat + 1) & 0x07;
449*4882a593Smuzhiyun 	} else {
450*4882a593Smuzhiyun 		caslat_ctrl =  (2 * caslat - 1) & 0x0f;
451*4882a593Smuzhiyun 	}
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 	debug("DDR: effective data rate is %d MHz\n", effective_data_rate);
454*4882a593Smuzhiyun 	debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n",
455*4882a593Smuzhiyun 	      caslat, caslat_ctrl);
456*4882a593Smuzhiyun 
457*4882a593Smuzhiyun 	/*
458*4882a593Smuzhiyun 	 * Timing Config 0.
459*4882a593Smuzhiyun 	 * Avoid writing for DDR I.
460*4882a593Smuzhiyun 	 */
461*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
462*4882a593Smuzhiyun 		unsigned char taxpd_clk = 8;		/* By the book. */
463*4882a593Smuzhiyun 		unsigned char tmrd_clk = 2;		/* By the book. */
464*4882a593Smuzhiyun 		unsigned char act_pd_exit = 2;		/* Empirical? */
465*4882a593Smuzhiyun 		unsigned char pre_pd_exit = 6;		/* Empirical? */
466*4882a593Smuzhiyun 
467*4882a593Smuzhiyun 		ddr->timing_cfg_0 = (0
468*4882a593Smuzhiyun 			| ((act_pd_exit & 0x7) << 20)	/* ACT_PD_EXIT */
469*4882a593Smuzhiyun 			| ((pre_pd_exit & 0x7) << 16)	/* PRE_PD_EXIT */
470*4882a593Smuzhiyun 			| ((taxpd_clk & 0xf) << 8)	/* ODT_PD_EXIT */
471*4882a593Smuzhiyun 			| ((tmrd_clk & 0xf) << 0)	/* MRS_CYC */
472*4882a593Smuzhiyun 			);
473*4882a593Smuzhiyun 		debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0);
474*4882a593Smuzhiyun 	}
475*4882a593Smuzhiyun 
476*4882a593Smuzhiyun 	/*
477*4882a593Smuzhiyun 	 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD,
478*4882a593Smuzhiyun 	 * use conservative value.
479*4882a593Smuzhiyun 	 * For DDR II, they are bytes 36 and 37, in quarter nanos.
480*4882a593Smuzhiyun 	 */
481*4882a593Smuzhiyun 
482*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
483*4882a593Smuzhiyun 		twr_clk = 3;	/* Clocks */
484*4882a593Smuzhiyun 		twtr_clk = 1;	/* Clocks */
485*4882a593Smuzhiyun 	} else {
486*4882a593Smuzhiyun 		twr_clk = picos_to_clk(spd.twr * 250);
487*4882a593Smuzhiyun 		twtr_clk = picos_to_clk(spd.twtr * 250);
488*4882a593Smuzhiyun 		if (twtr_clk < 2)
489*4882a593Smuzhiyun 			twtr_clk = 2;
490*4882a593Smuzhiyun 	}
491*4882a593Smuzhiyun 
492*4882a593Smuzhiyun 	/*
493*4882a593Smuzhiyun 	 * Calculate Trfc, in picos.
494*4882a593Smuzhiyun 	 * DDR I:  Byte 42 straight up in ns.
495*4882a593Smuzhiyun 	 * DDR II: Byte 40 and 42 swizzled some, in ns.
496*4882a593Smuzhiyun 	 */
497*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
498*4882a593Smuzhiyun 		trfc = spd.trfc * 1000;		/* up to ps */
499*4882a593Smuzhiyun 	} else {
500*4882a593Smuzhiyun 		unsigned int byte40_table_ps[8] = {
501*4882a593Smuzhiyun 			0,
502*4882a593Smuzhiyun 			250,
503*4882a593Smuzhiyun 			330,
504*4882a593Smuzhiyun 			500,
505*4882a593Smuzhiyun 			660,
506*4882a593Smuzhiyun 			750,
507*4882a593Smuzhiyun 			0,
508*4882a593Smuzhiyun 			0
509*4882a593Smuzhiyun 		};
510*4882a593Smuzhiyun 
511*4882a593Smuzhiyun 		trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000
512*4882a593Smuzhiyun 			+ byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7];
513*4882a593Smuzhiyun 	}
514*4882a593Smuzhiyun 	trfc_clk = picos_to_clk(trfc);
515*4882a593Smuzhiyun 
516*4882a593Smuzhiyun 	/*
517*4882a593Smuzhiyun 	 * Trcd, Byte 29, from quarter nanos to ps and clocks.
518*4882a593Smuzhiyun 	 */
519*4882a593Smuzhiyun 	trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7;
520*4882a593Smuzhiyun 
521*4882a593Smuzhiyun 	/*
522*4882a593Smuzhiyun 	 * Convert trfc_clk to DDR controller fields.  DDR I should
523*4882a593Smuzhiyun 	 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the
524*4882a593Smuzhiyun 	 * 83xx controller has an extended REFREC field of three bits.
525*4882a593Smuzhiyun 	 * The controller automatically adds 8 clocks to this value,
526*4882a593Smuzhiyun 	 * so preadjust it down 8 first before splitting it up.
527*4882a593Smuzhiyun 	 */
528*4882a593Smuzhiyun 	trfc_low = (trfc_clk - 8) & 0xf;
529*4882a593Smuzhiyun 
530*4882a593Smuzhiyun 	ddr->timing_cfg_1 =
531*4882a593Smuzhiyun 	    (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) |	/* PRETOACT */
532*4882a593Smuzhiyun 	     ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */
533*4882a593Smuzhiyun 	     (trcd_clk << 20 ) |				/* ACTTORW */
534*4882a593Smuzhiyun 	     (caslat_ctrl << 16 ) |				/* CASLAT */
535*4882a593Smuzhiyun 	     (trfc_low << 12 ) |				/* REFEC */
536*4882a593Smuzhiyun 	     ((twr_clk & 0x07) << 8) |				/* WRRREC */
537*4882a593Smuzhiyun 	     ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) |	/* ACTTOACT */
538*4882a593Smuzhiyun 	     ((twtr_clk & 0x07) << 0)				/* WRTORD */
539*4882a593Smuzhiyun 	    );
540*4882a593Smuzhiyun 
541*4882a593Smuzhiyun 	/*
542*4882a593Smuzhiyun 	 * Additive Latency
543*4882a593Smuzhiyun 	 * For DDR I, 0.
544*4882a593Smuzhiyun 	 * For DDR II, with ODT enabled, use "a value" less than ACTTORW,
545*4882a593Smuzhiyun 	 * which comes from Trcd, and also note that:
546*4882a593Smuzhiyun 	 *	add_lat + caslat must be >= 4
547*4882a593Smuzhiyun 	 */
548*4882a593Smuzhiyun 	add_lat = 0;
549*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR2
550*4882a593Smuzhiyun 	    && (odt_wr_cfg || odt_rd_cfg)
551*4882a593Smuzhiyun 	    && (caslat < 4)) {
552*4882a593Smuzhiyun 		add_lat = 4 - caslat;
553*4882a593Smuzhiyun 		if ((add_lat + caslat) < 4) {
554*4882a593Smuzhiyun 			add_lat = 0;
555*4882a593Smuzhiyun 		}
556*4882a593Smuzhiyun 	}
557*4882a593Smuzhiyun 
558*4882a593Smuzhiyun 	/*
559*4882a593Smuzhiyun 	 * Write Data Delay
560*4882a593Smuzhiyun 	 * Historically 0x2 == 4/8 clock delay.
561*4882a593Smuzhiyun 	 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266.
562*4882a593Smuzhiyun 	 */
563*4882a593Smuzhiyun 	wr_data_delay = 2;
564*4882a593Smuzhiyun #ifdef CONFIG_SYS_DDR_WRITE_DATA_DELAY
565*4882a593Smuzhiyun 	wr_data_delay = CONFIG_SYS_DDR_WRITE_DATA_DELAY;
566*4882a593Smuzhiyun #endif
567*4882a593Smuzhiyun 
568*4882a593Smuzhiyun 	/*
569*4882a593Smuzhiyun 	 * Write Latency
570*4882a593Smuzhiyun 	 * Read to Precharge
571*4882a593Smuzhiyun 	 * Minimum CKE Pulse Width.
572*4882a593Smuzhiyun 	 * Four Activate Window
573*4882a593Smuzhiyun 	 */
574*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
575*4882a593Smuzhiyun 		/*
576*4882a593Smuzhiyun 		 * This is a lie.  It should really be 1, but if it is
577*4882a593Smuzhiyun 		 * set to 1, bits overlap into the old controller's
578*4882a593Smuzhiyun 		 * otherwise unused ACSM field.  If we leave it 0, then
579*4882a593Smuzhiyun 		 * the HW will magically treat it as 1 for DDR 1.  Oh Yea.
580*4882a593Smuzhiyun 		 */
581*4882a593Smuzhiyun 		wr_lat = 0;
582*4882a593Smuzhiyun 
583*4882a593Smuzhiyun 		trtp_clk = 2;		/* By the book. */
584*4882a593Smuzhiyun 		cke_min_clk = 1;	/* By the book. */
585*4882a593Smuzhiyun 		four_act = 1;		/* By the book. */
586*4882a593Smuzhiyun 
587*4882a593Smuzhiyun 	} else {
588*4882a593Smuzhiyun 		wr_lat = caslat - 1;
589*4882a593Smuzhiyun 
590*4882a593Smuzhiyun 		/* Convert SPD value from quarter nanos to picos. */
591*4882a593Smuzhiyun 		trtp_clk = picos_to_clk(spd.trtp * 250);
592*4882a593Smuzhiyun 		if (trtp_clk < 2)
593*4882a593Smuzhiyun 			trtp_clk = 2;
594*4882a593Smuzhiyun 		trtp_clk += add_lat;
595*4882a593Smuzhiyun 
596*4882a593Smuzhiyun 		cke_min_clk = 3;	/* By the book. */
597*4882a593Smuzhiyun 		four_act = picos_to_clk(37500);	/* By the book. 1k pages? */
598*4882a593Smuzhiyun 	}
599*4882a593Smuzhiyun 
600*4882a593Smuzhiyun 	/*
601*4882a593Smuzhiyun 	 * Empirically set ~MCAS-to-preamble override for DDR 2.
602*4882a593Smuzhiyun 	 * Your mileage will vary.
603*4882a593Smuzhiyun 	 */
604*4882a593Smuzhiyun 	cpo = 0;
605*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
606*4882a593Smuzhiyun #ifdef CONFIG_SYS_DDR_CPO
607*4882a593Smuzhiyun 		cpo = CONFIG_SYS_DDR_CPO;
608*4882a593Smuzhiyun #else
609*4882a593Smuzhiyun 		if (effective_data_rate == 266) {
610*4882a593Smuzhiyun 			cpo = 0x4;		/* READ_LAT + 1/2 */
611*4882a593Smuzhiyun 		} else if (effective_data_rate == 333) {
612*4882a593Smuzhiyun 			cpo = 0x6;		/* READ_LAT + 1 */
613*4882a593Smuzhiyun 		} else if (effective_data_rate == 400) {
614*4882a593Smuzhiyun 			cpo = 0x7;		/* READ_LAT + 5/4 */
615*4882a593Smuzhiyun 		} else {
616*4882a593Smuzhiyun 			/* Automatic calibration */
617*4882a593Smuzhiyun 			cpo = 0x1f;
618*4882a593Smuzhiyun 		}
619*4882a593Smuzhiyun #endif
620*4882a593Smuzhiyun 	}
621*4882a593Smuzhiyun 
622*4882a593Smuzhiyun 	ddr->timing_cfg_2 = (0
623*4882a593Smuzhiyun 		| ((add_lat & 0x7) << 28)		/* ADD_LAT */
624*4882a593Smuzhiyun 		| ((cpo & 0x1f) << 23)			/* CPO */
625*4882a593Smuzhiyun 		| ((wr_lat & 0x7) << 19)		/* WR_LAT */
626*4882a593Smuzhiyun 		| ((trtp_clk & 0x7) << 13)		/* RD_TO_PRE */
627*4882a593Smuzhiyun 		| ((wr_data_delay & 0x7) << 10)		/* WR_DATA_DELAY */
628*4882a593Smuzhiyun 		| ((cke_min_clk & 0x7) << 6)		/* CKE_PLS */
629*4882a593Smuzhiyun 		| ((four_act & 0x1f) << 0)		/* FOUR_ACT */
630*4882a593Smuzhiyun 		);
631*4882a593Smuzhiyun 
632*4882a593Smuzhiyun 	debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1);
633*4882a593Smuzhiyun 	debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2);
634*4882a593Smuzhiyun 
635*4882a593Smuzhiyun 	/* Check DIMM data bus width */
636*4882a593Smuzhiyun 	if (spd.dataw_lsb < 64) {
637*4882a593Smuzhiyun 		if (spd.mem_type == SPD_MEMTYPE_DDR)
638*4882a593Smuzhiyun 			burstlen = 0x03; /* 32 bit data bus, burst len is 8 */
639*4882a593Smuzhiyun 		else
640*4882a593Smuzhiyun 			burstlen = 0x02; /* 32 bit data bus, burst len is 4 */
641*4882a593Smuzhiyun 		debug("\n   DDR DIMM: data bus width is 32 bit");
642*4882a593Smuzhiyun 	} else {
643*4882a593Smuzhiyun 		burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */
644*4882a593Smuzhiyun 		debug("\n   DDR DIMM: data bus width is 64 bit");
645*4882a593Smuzhiyun 	}
646*4882a593Smuzhiyun 
647*4882a593Smuzhiyun 	/* Is this an ECC DDR chip? */
648*4882a593Smuzhiyun 	if (spd.config == 0x02)
649*4882a593Smuzhiyun 		debug(" with ECC\n");
650*4882a593Smuzhiyun 	else
651*4882a593Smuzhiyun 		debug(" without ECC\n");
652*4882a593Smuzhiyun 
653*4882a593Smuzhiyun 	/* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus,
654*4882a593Smuzhiyun 	   Burst type is sequential
655*4882a593Smuzhiyun 	 */
656*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
657*4882a593Smuzhiyun 		switch (caslat) {
658*4882a593Smuzhiyun 		case 1:
659*4882a593Smuzhiyun 			ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */
660*4882a593Smuzhiyun 			break;
661*4882a593Smuzhiyun 		case 2:
662*4882a593Smuzhiyun 			ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */
663*4882a593Smuzhiyun 			break;
664*4882a593Smuzhiyun 		case 3:
665*4882a593Smuzhiyun 			ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */
666*4882a593Smuzhiyun 			break;
667*4882a593Smuzhiyun 		case 4:
668*4882a593Smuzhiyun 			ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */
669*4882a593Smuzhiyun 			break;
670*4882a593Smuzhiyun 		default:
671*4882a593Smuzhiyun 			printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n");
672*4882a593Smuzhiyun 			return 0;
673*4882a593Smuzhiyun 		}
674*4882a593Smuzhiyun 	} else {
675*4882a593Smuzhiyun 		mode_odt_enable = 0x0;                  /* Default disabled */
676*4882a593Smuzhiyun 		if (odt_wr_cfg || odt_rd_cfg) {
677*4882a593Smuzhiyun 			/*
678*4882a593Smuzhiyun 			 * Bits 6 and 2 in Extended MRS(1)
679*4882a593Smuzhiyun 			 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules.
680*4882a593Smuzhiyun 			 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module.
681*4882a593Smuzhiyun 			 */
682*4882a593Smuzhiyun 			mode_odt_enable = 0x40;         /* 150 Ohm */
683*4882a593Smuzhiyun 		}
684*4882a593Smuzhiyun 
685*4882a593Smuzhiyun 		ddr->sdram_mode =
686*4882a593Smuzhiyun 			(0
687*4882a593Smuzhiyun 			 | (1 << (16 + 10))             /* DQS Differential disable */
688*4882a593Smuzhiyun #ifdef CONFIG_SYS_DDR_MODE_WEAK
689*4882a593Smuzhiyun 			 | (1 << (16 + 1))		/* weak driver (~60%) */
690*4882a593Smuzhiyun #endif
691*4882a593Smuzhiyun 			 | (add_lat << (16 + 3))        /* Additive Latency in EMRS1 */
692*4882a593Smuzhiyun 			 | (mode_odt_enable << 16)      /* ODT Enable in EMRS1 */
693*4882a593Smuzhiyun 			 | ((twr_clk - 1) << 9)         /* Write Recovery Autopre */
694*4882a593Smuzhiyun 			 | (caslat << 4)                /* caslat */
695*4882a593Smuzhiyun 			 | (burstlen << 0)              /* Burst length */
696*4882a593Smuzhiyun 			);
697*4882a593Smuzhiyun 	}
698*4882a593Smuzhiyun 	debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode);
699*4882a593Smuzhiyun 
700*4882a593Smuzhiyun 	/*
701*4882a593Smuzhiyun 	 * Clear EMRS2 and EMRS3.
702*4882a593Smuzhiyun 	 */
703*4882a593Smuzhiyun 	ddr->sdram_mode2 = 0;
704*4882a593Smuzhiyun 	debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2);
705*4882a593Smuzhiyun 
706*4882a593Smuzhiyun 	switch (spd.refresh) {
707*4882a593Smuzhiyun 		case 0x00:
708*4882a593Smuzhiyun 		case 0x80:
709*4882a593Smuzhiyun 			refresh_clk = picos_to_clk(15625000);
710*4882a593Smuzhiyun 			break;
711*4882a593Smuzhiyun 		case 0x01:
712*4882a593Smuzhiyun 		case 0x81:
713*4882a593Smuzhiyun 			refresh_clk = picos_to_clk(3900000);
714*4882a593Smuzhiyun 			break;
715*4882a593Smuzhiyun 		case 0x02:
716*4882a593Smuzhiyun 		case 0x82:
717*4882a593Smuzhiyun 			refresh_clk = picos_to_clk(7800000);
718*4882a593Smuzhiyun 			break;
719*4882a593Smuzhiyun 		case 0x03:
720*4882a593Smuzhiyun 		case 0x83:
721*4882a593Smuzhiyun 			refresh_clk = picos_to_clk(31300000);
722*4882a593Smuzhiyun 			break;
723*4882a593Smuzhiyun 		case 0x04:
724*4882a593Smuzhiyun 		case 0x84:
725*4882a593Smuzhiyun 			refresh_clk = picos_to_clk(62500000);
726*4882a593Smuzhiyun 			break;
727*4882a593Smuzhiyun 		case 0x05:
728*4882a593Smuzhiyun 		case 0x85:
729*4882a593Smuzhiyun 			refresh_clk = picos_to_clk(125000000);
730*4882a593Smuzhiyun 			break;
731*4882a593Smuzhiyun 		default:
732*4882a593Smuzhiyun 			refresh_clk = 0x512;
733*4882a593Smuzhiyun 			break;
734*4882a593Smuzhiyun 	}
735*4882a593Smuzhiyun 
736*4882a593Smuzhiyun 	/*
737*4882a593Smuzhiyun 	 * Set BSTOPRE to 0x100 for page mode
738*4882a593Smuzhiyun 	 * If auto-charge is used, set BSTOPRE = 0
739*4882a593Smuzhiyun 	 */
740*4882a593Smuzhiyun 	ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100;
741*4882a593Smuzhiyun 	debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval);
742*4882a593Smuzhiyun 
743*4882a593Smuzhiyun 	/*
744*4882a593Smuzhiyun 	 * SDRAM Cfg 2
745*4882a593Smuzhiyun 	 */
746*4882a593Smuzhiyun 	odt_cfg = 0;
747*4882a593Smuzhiyun #ifndef CONFIG_NEVER_ASSERT_ODT_TO_CPU
748*4882a593Smuzhiyun 	if (odt_rd_cfg | odt_wr_cfg) {
749*4882a593Smuzhiyun 		odt_cfg = 0x2;		/* ODT to IOs during reads */
750*4882a593Smuzhiyun 	}
751*4882a593Smuzhiyun #endif
752*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
753*4882a593Smuzhiyun 		ddr->sdram_cfg2 = (0
754*4882a593Smuzhiyun 			    | (0 << 26)	/* True DQS */
755*4882a593Smuzhiyun 			    | (odt_cfg << 21)	/* ODT only read */
756*4882a593Smuzhiyun 			    | (1 << 12)	/* 1 refresh at a time */
757*4882a593Smuzhiyun 			    );
758*4882a593Smuzhiyun 
759*4882a593Smuzhiyun 		debug("DDR: sdram_cfg2  = 0x%08x\n", ddr->sdram_cfg2);
760*4882a593Smuzhiyun 	}
761*4882a593Smuzhiyun 
762*4882a593Smuzhiyun #ifdef CONFIG_SYS_DDR_SDRAM_CLK_CNTL	/* Optional platform specific value */
763*4882a593Smuzhiyun 	ddr->sdram_clk_cntl = CONFIG_SYS_DDR_SDRAM_CLK_CNTL;
764*4882a593Smuzhiyun #endif
765*4882a593Smuzhiyun 	debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl);
766*4882a593Smuzhiyun 
767*4882a593Smuzhiyun 	asm("sync;isync");
768*4882a593Smuzhiyun 
769*4882a593Smuzhiyun 	udelay(600);
770*4882a593Smuzhiyun 
771*4882a593Smuzhiyun 	/*
772*4882a593Smuzhiyun 	 * Figure out the settings for the sdram_cfg register. Build up
773*4882a593Smuzhiyun 	 * the value in 'sdram_cfg' before writing since the write into
774*4882a593Smuzhiyun 	 * the register will actually enable the memory controller, and all
775*4882a593Smuzhiyun 	 * settings must be done before enabling.
776*4882a593Smuzhiyun 	 *
777*4882a593Smuzhiyun 	 * sdram_cfg[0]   = 1 (ddr sdram logic enable)
778*4882a593Smuzhiyun 	 * sdram_cfg[1]   = 1 (self-refresh-enable)
779*4882a593Smuzhiyun 	 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM)
780*4882a593Smuzhiyun 	 *			010 DDR 1 SDRAM
781*4882a593Smuzhiyun 	 *			011 DDR 2 SDRAM
782*4882a593Smuzhiyun 	 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode)
783*4882a593Smuzhiyun 	 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts)
784*4882a593Smuzhiyun 	 */
785*4882a593Smuzhiyun 	if (spd.mem_type == SPD_MEMTYPE_DDR)
786*4882a593Smuzhiyun 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1;
787*4882a593Smuzhiyun 	else
788*4882a593Smuzhiyun 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR2;
789*4882a593Smuzhiyun 
790*4882a593Smuzhiyun 	sdram_cfg = (0
791*4882a593Smuzhiyun 		     | SDRAM_CFG_MEM_EN		/* DDR enable */
792*4882a593Smuzhiyun 		     | SDRAM_CFG_SREN		/* Self refresh */
793*4882a593Smuzhiyun 		     | sdram_type		/* SDRAM type */
794*4882a593Smuzhiyun 		     );
795*4882a593Smuzhiyun 
796*4882a593Smuzhiyun 	/* sdram_cfg[3] = RD_EN - registered DIMM enable */
797*4882a593Smuzhiyun 	if (spd.mod_attr & 0x02)
798*4882a593Smuzhiyun 		sdram_cfg |= SDRAM_CFG_RD_EN;
799*4882a593Smuzhiyun 
800*4882a593Smuzhiyun 	/* The DIMM is 32bit width */
801*4882a593Smuzhiyun 	if (spd.dataw_lsb < 64) {
802*4882a593Smuzhiyun 		if (spd.mem_type == SPD_MEMTYPE_DDR)
803*4882a593Smuzhiyun 			sdram_cfg |= SDRAM_CFG_32_BE | SDRAM_CFG_8_BE;
804*4882a593Smuzhiyun 		if (spd.mem_type == SPD_MEMTYPE_DDR2)
805*4882a593Smuzhiyun 			sdram_cfg |= SDRAM_CFG_32_BE;
806*4882a593Smuzhiyun 	}
807*4882a593Smuzhiyun 
808*4882a593Smuzhiyun 	ddrc_ecc_enable = 0;
809*4882a593Smuzhiyun 
810*4882a593Smuzhiyun #if defined(CONFIG_DDR_ECC)
811*4882a593Smuzhiyun 	/* Enable ECC with sdram_cfg[2] */
812*4882a593Smuzhiyun 	if (spd.config == 0x02) {
813*4882a593Smuzhiyun 		sdram_cfg |= 0x20000000;
814*4882a593Smuzhiyun 		ddrc_ecc_enable = 1;
815*4882a593Smuzhiyun 		/* disable error detection */
816*4882a593Smuzhiyun 		ddr->err_disable = ~ECC_ERROR_ENABLE;
817*4882a593Smuzhiyun 		/* set single bit error threshold to maximum value,
818*4882a593Smuzhiyun 		 * reset counter to zero */
819*4882a593Smuzhiyun 		ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) |
820*4882a593Smuzhiyun 				(0 << ECC_ERROR_MAN_SBEC_SHIFT);
821*4882a593Smuzhiyun 	}
822*4882a593Smuzhiyun 
823*4882a593Smuzhiyun 	debug("DDR:err_disable=0x%08x\n", ddr->err_disable);
824*4882a593Smuzhiyun 	debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe);
825*4882a593Smuzhiyun #endif
826*4882a593Smuzhiyun 	debug("   DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF");
827*4882a593Smuzhiyun 
828*4882a593Smuzhiyun #if defined(CONFIG_DDR_2T_TIMING)
829*4882a593Smuzhiyun 	/*
830*4882a593Smuzhiyun 	 * Enable 2T timing by setting sdram_cfg[16].
831*4882a593Smuzhiyun 	 */
832*4882a593Smuzhiyun 	sdram_cfg |= SDRAM_CFG_2T_EN;
833*4882a593Smuzhiyun #endif
834*4882a593Smuzhiyun 	/* Enable controller, and GO! */
835*4882a593Smuzhiyun 	ddr->sdram_cfg = sdram_cfg;
836*4882a593Smuzhiyun 	asm("sync;isync");
837*4882a593Smuzhiyun 	udelay(500);
838*4882a593Smuzhiyun 
839*4882a593Smuzhiyun 	debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg);
840*4882a593Smuzhiyun 	return memsize; /*in MBytes*/
841*4882a593Smuzhiyun }
842*4882a593Smuzhiyun #endif /* CONFIG_SPD_EEPROM */
843*4882a593Smuzhiyun 
844*4882a593Smuzhiyun #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER)
845*4882a593Smuzhiyun /*
846*4882a593Smuzhiyun  * Use timebase counter, get_timer() is not available
847*4882a593Smuzhiyun  * at this point of initialization yet.
848*4882a593Smuzhiyun  */
get_tbms(void)849*4882a593Smuzhiyun static __inline__ unsigned long get_tbms (void)
850*4882a593Smuzhiyun {
851*4882a593Smuzhiyun 	unsigned long tbl;
852*4882a593Smuzhiyun 	unsigned long tbu1, tbu2;
853*4882a593Smuzhiyun 	unsigned long ms;
854*4882a593Smuzhiyun 	unsigned long long tmp;
855*4882a593Smuzhiyun 
856*4882a593Smuzhiyun 	ulong tbclk = get_tbclk();
857*4882a593Smuzhiyun 
858*4882a593Smuzhiyun 	/* get the timebase ticks */
859*4882a593Smuzhiyun 	do {
860*4882a593Smuzhiyun 		asm volatile ("mftbu %0":"=r" (tbu1):);
861*4882a593Smuzhiyun 		asm volatile ("mftb %0":"=r" (tbl):);
862*4882a593Smuzhiyun 		asm volatile ("mftbu %0":"=r" (tbu2):);
863*4882a593Smuzhiyun 	} while (tbu1 != tbu2);
864*4882a593Smuzhiyun 
865*4882a593Smuzhiyun 	/* convert ticks to ms */
866*4882a593Smuzhiyun 	tmp = (unsigned long long)(tbu1);
867*4882a593Smuzhiyun 	tmp = (tmp << 32);
868*4882a593Smuzhiyun 	tmp += (unsigned long long)(tbl);
869*4882a593Smuzhiyun 	ms = tmp/(tbclk/1000);
870*4882a593Smuzhiyun 
871*4882a593Smuzhiyun 	return ms;
872*4882a593Smuzhiyun }
873*4882a593Smuzhiyun 
874*4882a593Smuzhiyun /*
875*4882a593Smuzhiyun  * Initialize all of memory for ECC, then enable errors.
876*4882a593Smuzhiyun  */
ddr_enable_ecc(unsigned int dram_size)877*4882a593Smuzhiyun void ddr_enable_ecc(unsigned int dram_size)
878*4882a593Smuzhiyun {
879*4882a593Smuzhiyun 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
880*4882a593Smuzhiyun 	volatile ddr83xx_t *ddr= &immap->ddr;
881*4882a593Smuzhiyun 	unsigned long t_start, t_end;
882*4882a593Smuzhiyun 	register u64 *p;
883*4882a593Smuzhiyun 	register uint size;
884*4882a593Smuzhiyun 	unsigned int pattern[2];
885*4882a593Smuzhiyun 
886*4882a593Smuzhiyun 	icache_enable();
887*4882a593Smuzhiyun 	t_start = get_tbms();
888*4882a593Smuzhiyun 	pattern[0] = 0xdeadbeef;
889*4882a593Smuzhiyun 	pattern[1] = 0xdeadbeef;
890*4882a593Smuzhiyun 
891*4882a593Smuzhiyun #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
892*4882a593Smuzhiyun 	dma_meminit(pattern[0], dram_size);
893*4882a593Smuzhiyun #else
894*4882a593Smuzhiyun 	debug("ddr init: CPU FP write method\n");
895*4882a593Smuzhiyun 	size = dram_size;
896*4882a593Smuzhiyun 	for (p = 0; p < (u64*)(size); p++) {
897*4882a593Smuzhiyun 		ppcDWstore((u32*)p, pattern);
898*4882a593Smuzhiyun 	}
899*4882a593Smuzhiyun 	__asm__ __volatile__ ("sync");
900*4882a593Smuzhiyun #endif
901*4882a593Smuzhiyun 
902*4882a593Smuzhiyun 	t_end = get_tbms();
903*4882a593Smuzhiyun 	icache_disable();
904*4882a593Smuzhiyun 
905*4882a593Smuzhiyun 	debug("\nREADY!!\n");
906*4882a593Smuzhiyun 	debug("ddr init duration: %ld ms\n", t_end - t_start);
907*4882a593Smuzhiyun 
908*4882a593Smuzhiyun 	/* Clear All ECC Errors */
909*4882a593Smuzhiyun 	if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME)
910*4882a593Smuzhiyun 		ddr->err_detect |= ECC_ERROR_DETECT_MME;
911*4882a593Smuzhiyun 	if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE)
912*4882a593Smuzhiyun 		ddr->err_detect |= ECC_ERROR_DETECT_MBE;
913*4882a593Smuzhiyun 	if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE)
914*4882a593Smuzhiyun 		ddr->err_detect |= ECC_ERROR_DETECT_SBE;
915*4882a593Smuzhiyun 	if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE)
916*4882a593Smuzhiyun 		ddr->err_detect |= ECC_ERROR_DETECT_MSE;
917*4882a593Smuzhiyun 
918*4882a593Smuzhiyun 	/* Disable ECC-Interrupts */
919*4882a593Smuzhiyun 	ddr->err_int_en &= ECC_ERR_INT_DISABLE;
920*4882a593Smuzhiyun 
921*4882a593Smuzhiyun 	/* Enable errors for ECC */
922*4882a593Smuzhiyun 	ddr->err_disable &= ECC_ERROR_ENABLE;
923*4882a593Smuzhiyun 
924*4882a593Smuzhiyun 	__asm__ __volatile__ ("sync");
925*4882a593Smuzhiyun 	__asm__ __volatile__ ("isync");
926*4882a593Smuzhiyun }
927*4882a593Smuzhiyun #endif	/* CONFIG_DDR_ECC */
928