xref: /OK3568_Linux_fs/u-boot/arch/x86/cpu/quark/smc.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * Copyright (C) 2013, Intel Corporation
3*4882a593Smuzhiyun  * Copyright (C) 2015, Bin Meng <bmeng.cn@gmail.com>
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Ported from Intel released Quark UEFI BIOS
6*4882a593Smuzhiyun  * QuarkSocPkg/QuarkNorthCluster/MemoryInit/Pei
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  * SPDX-License-Identifier:	Intel
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <common.h>
12*4882a593Smuzhiyun #include <pci.h>
13*4882a593Smuzhiyun #include <asm/arch/device.h>
14*4882a593Smuzhiyun #include <asm/arch/mrc.h>
15*4882a593Smuzhiyun #include <asm/arch/msg_port.h>
16*4882a593Smuzhiyun #include "mrc_util.h"
17*4882a593Smuzhiyun #include "hte.h"
18*4882a593Smuzhiyun #include "smc.h"
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun /* t_rfc values (in picoseconds) per density */
21*4882a593Smuzhiyun static const uint32_t t_rfc[5] = {
22*4882a593Smuzhiyun 	90000,	/* 512Mb */
23*4882a593Smuzhiyun 	110000,	/* 1Gb */
24*4882a593Smuzhiyun 	160000,	/* 2Gb */
25*4882a593Smuzhiyun 	300000,	/* 4Gb */
26*4882a593Smuzhiyun 	350000,	/* 8Gb */
27*4882a593Smuzhiyun };
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun /* t_ck clock period in picoseconds per speed index 800, 1066, 1333 */
30*4882a593Smuzhiyun static const uint32_t t_ck[3] = {
31*4882a593Smuzhiyun 	2500,
32*4882a593Smuzhiyun 	1875,
33*4882a593Smuzhiyun 	1500
34*4882a593Smuzhiyun };
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun /* Global variables */
37*4882a593Smuzhiyun static const uint16_t ddr_wclk[] = {193, 158};
38*4882a593Smuzhiyun static const uint16_t ddr_wctl[] = {1, 217};
39*4882a593Smuzhiyun static const uint16_t ddr_wcmd[] = {1, 220};
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun #ifdef BACKUP_RCVN
42*4882a593Smuzhiyun static const uint16_t ddr_rcvn[] = {129, 498};
43*4882a593Smuzhiyun #endif
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun #ifdef BACKUP_WDQS
46*4882a593Smuzhiyun static const uint16_t ddr_wdqs[] = {65, 289};
47*4882a593Smuzhiyun #endif
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun #ifdef BACKUP_RDQS
50*4882a593Smuzhiyun static const uint8_t ddr_rdqs[] = {32, 24};
51*4882a593Smuzhiyun #endif
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun #ifdef BACKUP_WDQ
54*4882a593Smuzhiyun static const uint16_t ddr_wdq[] = {32, 257};
55*4882a593Smuzhiyun #endif
56*4882a593Smuzhiyun 
57*4882a593Smuzhiyun /* Stop self refresh driven by MCU */
clear_self_refresh(struct mrc_params * mrc_params)58*4882a593Smuzhiyun void clear_self_refresh(struct mrc_params *mrc_params)
59*4882a593Smuzhiyun {
60*4882a593Smuzhiyun 	ENTERFN();
61*4882a593Smuzhiyun 
62*4882a593Smuzhiyun 	/* clear the PMSTS Channel Self Refresh bits */
63*4882a593Smuzhiyun 	mrc_write_mask(MEM_CTLR, PMSTS, PMSTS_DISR, PMSTS_DISR);
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun 	LEAVEFN();
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun /* It will initialize timing registers in the MCU (DTR0..DTR4) */
prog_ddr_timing_control(struct mrc_params * mrc_params)69*4882a593Smuzhiyun void prog_ddr_timing_control(struct mrc_params *mrc_params)
70*4882a593Smuzhiyun {
71*4882a593Smuzhiyun 	uint8_t tcl, wl;
72*4882a593Smuzhiyun 	uint8_t trp, trcd, tras, twr, twtr, trrd, trtp, tfaw;
73*4882a593Smuzhiyun 	uint32_t tck;
74*4882a593Smuzhiyun 	u32 dtr0, dtr1, dtr2, dtr3, dtr4;
75*4882a593Smuzhiyun 	u32 tmp1, tmp2;
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	ENTERFN();
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun 	/* mcu_init starts */
80*4882a593Smuzhiyun 	mrc_post_code(0x02, 0x00);
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	dtr0 = msg_port_read(MEM_CTLR, DTR0);
83*4882a593Smuzhiyun 	dtr1 = msg_port_read(MEM_CTLR, DTR1);
84*4882a593Smuzhiyun 	dtr2 = msg_port_read(MEM_CTLR, DTR2);
85*4882a593Smuzhiyun 	dtr3 = msg_port_read(MEM_CTLR, DTR3);
86*4882a593Smuzhiyun 	dtr4 = msg_port_read(MEM_CTLR, DTR4);
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun 	tck = t_ck[mrc_params->ddr_speed];	/* Clock in picoseconds */
89*4882a593Smuzhiyun 	tcl = mrc_params->params.cl;		/* CAS latency in clocks */
90*4882a593Smuzhiyun 	trp = tcl;	/* Per CAT MRC */
91*4882a593Smuzhiyun 	trcd = tcl;	/* Per CAT MRC */
92*4882a593Smuzhiyun 	tras = MCEIL(mrc_params->params.ras, tck);
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 	/* Per JEDEC: tWR=15000ps DDR2/3 from 800-1600 */
95*4882a593Smuzhiyun 	twr = MCEIL(15000, tck);
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun 	twtr = MCEIL(mrc_params->params.wtr, tck);
98*4882a593Smuzhiyun 	trrd = MCEIL(mrc_params->params.rrd, tck);
99*4882a593Smuzhiyun 	trtp = 4;	/* Valid for 800 and 1066, use 5 for 1333 */
100*4882a593Smuzhiyun 	tfaw = MCEIL(mrc_params->params.faw, tck);
101*4882a593Smuzhiyun 
102*4882a593Smuzhiyun 	wl = 5 + mrc_params->ddr_speed;
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun 	dtr0 &= ~DTR0_DFREQ_MASK;
105*4882a593Smuzhiyun 	dtr0 |= mrc_params->ddr_speed;
106*4882a593Smuzhiyun 	dtr0 &= ~DTR0_TCL_MASK;
107*4882a593Smuzhiyun 	tmp1 = tcl - 5;
108*4882a593Smuzhiyun 	dtr0 |= ((tcl - 5) << 12);
109*4882a593Smuzhiyun 	dtr0 &= ~DTR0_TRP_MASK;
110*4882a593Smuzhiyun 	dtr0 |= ((trp - 5) << 4);	/* 5 bit DRAM Clock */
111*4882a593Smuzhiyun 	dtr0 &= ~DTR0_TRCD_MASK;
112*4882a593Smuzhiyun 	dtr0 |= ((trcd - 5) << 8);	/* 5 bit DRAM Clock */
113*4882a593Smuzhiyun 
114*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TWCL_MASK;
115*4882a593Smuzhiyun 	tmp2 = wl - 3;
116*4882a593Smuzhiyun 	dtr1 |= (wl - 3);
117*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TWTP_MASK;
118*4882a593Smuzhiyun 	dtr1 |= ((wl + 4 + twr - 14) << 8);	/* Change to tWTP */
119*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TRTP_MASK;
120*4882a593Smuzhiyun 	dtr1 |= ((MMAX(trtp, 4) - 3) << 28);	/* 4 bit DRAM Clock */
121*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TRRD_MASK;
122*4882a593Smuzhiyun 	dtr1 |= ((trrd - 4) << 24);		/* 4 bit DRAM Clock */
123*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TCMD_MASK;
124*4882a593Smuzhiyun 	dtr1 |= (1 << 4);
125*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TRAS_MASK;
126*4882a593Smuzhiyun 	dtr1 |= ((tras - 14) << 20);		/* 6 bit DRAM Clock */
127*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TFAW_MASK;
128*4882a593Smuzhiyun 	dtr1 |= ((((tfaw + 1) >> 1) - 5) << 16);/* 4 bit DRAM Clock */
129*4882a593Smuzhiyun 	/* Set 4 Clock CAS to CAS delay (multi-burst) */
130*4882a593Smuzhiyun 	dtr1 &= ~DTR1_TCCD_MASK;
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun 	dtr2 &= ~DTR2_TRRDR_MASK;
133*4882a593Smuzhiyun 	dtr2 |= 1;
134*4882a593Smuzhiyun 	dtr2 &= ~DTR2_TWWDR_MASK;
135*4882a593Smuzhiyun 	dtr2 |= (2 << 8);
136*4882a593Smuzhiyun 	dtr2 &= ~DTR2_TRWDR_MASK;
137*4882a593Smuzhiyun 	dtr2 |= (2 << 16);
138*4882a593Smuzhiyun 
139*4882a593Smuzhiyun 	dtr3 &= ~DTR3_TWRDR_MASK;
140*4882a593Smuzhiyun 	dtr3 |= 2;
141*4882a593Smuzhiyun 	dtr3 &= ~DTR3_TXXXX_MASK;
142*4882a593Smuzhiyun 	dtr3 |= (2 << 4);
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun 	dtr3 &= ~DTR3_TRWSR_MASK;
145*4882a593Smuzhiyun 	if (mrc_params->ddr_speed == DDRFREQ_800) {
146*4882a593Smuzhiyun 		/* Extended RW delay (+1) */
147*4882a593Smuzhiyun 		dtr3 |= ((tcl - 5 + 1) << 8);
148*4882a593Smuzhiyun 	} else if (mrc_params->ddr_speed == DDRFREQ_1066) {
149*4882a593Smuzhiyun 		/* Extended RW delay (+1) */
150*4882a593Smuzhiyun 		dtr3 |= ((tcl - 5 + 1) << 8);
151*4882a593Smuzhiyun 	}
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun 	dtr3 &= ~DTR3_TWRSR_MASK;
154*4882a593Smuzhiyun 	dtr3 |= ((4 + wl + twtr - 11) << 13);
155*4882a593Smuzhiyun 
156*4882a593Smuzhiyun 	dtr3 &= ~DTR3_TXP_MASK;
157*4882a593Smuzhiyun 	if (mrc_params->ddr_speed == DDRFREQ_800)
158*4882a593Smuzhiyun 		dtr3 |= ((MMAX(0, 1 - 1)) << 22);
159*4882a593Smuzhiyun 	else
160*4882a593Smuzhiyun 		dtr3 |= ((MMAX(0, 2 - 1)) << 22);
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun 	dtr4 &= ~DTR4_WRODTSTRT_MASK;
163*4882a593Smuzhiyun 	dtr4 |= 1;
164*4882a593Smuzhiyun 	dtr4 &= ~DTR4_WRODTSTOP_MASK;
165*4882a593Smuzhiyun 	dtr4 |= (1 << 4);
166*4882a593Smuzhiyun 	dtr4 &= ~DTR4_XXXX1_MASK;
167*4882a593Smuzhiyun 	dtr4 |= ((1 + tmp1 - tmp2 + 2) << 8);
168*4882a593Smuzhiyun 	dtr4 &= ~DTR4_XXXX2_MASK;
169*4882a593Smuzhiyun 	dtr4 |= ((1 + tmp1 - tmp2 + 2) << 12);
170*4882a593Smuzhiyun 	dtr4 &= ~(DTR4_ODTDIS | DTR4_TRGSTRDIS);
171*4882a593Smuzhiyun 
172*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DTR0, dtr0);
173*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DTR1, dtr1);
174*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DTR2, dtr2);
175*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DTR3, dtr3);
176*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DTR4, dtr4);
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	LEAVEFN();
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun /* Configure MCU before jedec init sequence */
prog_decode_before_jedec(struct mrc_params * mrc_params)182*4882a593Smuzhiyun void prog_decode_before_jedec(struct mrc_params *mrc_params)
183*4882a593Smuzhiyun {
184*4882a593Smuzhiyun 	u32 drp;
185*4882a593Smuzhiyun 	u32 drfc;
186*4882a593Smuzhiyun 	u32 dcal;
187*4882a593Smuzhiyun 	u32 dsch;
188*4882a593Smuzhiyun 	u32 dpmc0;
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun 	ENTERFN();
191*4882a593Smuzhiyun 
192*4882a593Smuzhiyun 	/* Disable power saving features */
193*4882a593Smuzhiyun 	dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
194*4882a593Smuzhiyun 	dpmc0 |= (DPMC0_CLKGTDIS | DPMC0_DISPWRDN);
195*4882a593Smuzhiyun 	dpmc0 &= ~DPMC0_PCLSTO_MASK;
196*4882a593Smuzhiyun 	dpmc0 &= ~DPMC0_DYNSREN;
197*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DPMC0, dpmc0);
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun 	/* Disable out of order transactions */
200*4882a593Smuzhiyun 	dsch = msg_port_read(MEM_CTLR, DSCH);
201*4882a593Smuzhiyun 	dsch |= (DSCH_OOODIS | DSCH_NEWBYPDIS);
202*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DSCH, dsch);
203*4882a593Smuzhiyun 
204*4882a593Smuzhiyun 	/* Disable issuing the REF command */
205*4882a593Smuzhiyun 	drfc = msg_port_read(MEM_CTLR, DRFC);
206*4882a593Smuzhiyun 	drfc &= ~DRFC_TREFI_MASK;
207*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRFC, drfc);
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 	/* Disable ZQ calibration short */
210*4882a593Smuzhiyun 	dcal = msg_port_read(MEM_CTLR, DCAL);
211*4882a593Smuzhiyun 	dcal &= ~DCAL_ZQCINT_MASK;
212*4882a593Smuzhiyun 	dcal &= ~DCAL_SRXZQCL_MASK;
213*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DCAL, dcal);
214*4882a593Smuzhiyun 
215*4882a593Smuzhiyun 	/*
216*4882a593Smuzhiyun 	 * Training performed in address mode 0, rank population has limited
217*4882a593Smuzhiyun 	 * impact, however simulator complains if enabled non-existing rank.
218*4882a593Smuzhiyun 	 */
219*4882a593Smuzhiyun 	drp = 0;
220*4882a593Smuzhiyun 	if (mrc_params->rank_enables & 1)
221*4882a593Smuzhiyun 		drp |= DRP_RKEN0;
222*4882a593Smuzhiyun 	if (mrc_params->rank_enables & 2)
223*4882a593Smuzhiyun 		drp |= DRP_RKEN1;
224*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRP, drp);
225*4882a593Smuzhiyun 
226*4882a593Smuzhiyun 	LEAVEFN();
227*4882a593Smuzhiyun }
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun /*
230*4882a593Smuzhiyun  * After Cold Reset, BIOS should set COLDWAKE bit to 1 before
231*4882a593Smuzhiyun  * sending the WAKE message to the Dunit.
232*4882a593Smuzhiyun  *
233*4882a593Smuzhiyun  * For Standby Exit, or any other mode in which the DRAM is in
234*4882a593Smuzhiyun  * SR, this bit must be set to 0.
235*4882a593Smuzhiyun  */
perform_ddr_reset(struct mrc_params * mrc_params)236*4882a593Smuzhiyun void perform_ddr_reset(struct mrc_params *mrc_params)
237*4882a593Smuzhiyun {
238*4882a593Smuzhiyun 	ENTERFN();
239*4882a593Smuzhiyun 
240*4882a593Smuzhiyun 	/* Set COLDWAKE bit before sending the WAKE message */
241*4882a593Smuzhiyun 	mrc_write_mask(MEM_CTLR, DRMC, DRMC_COLDWAKE, DRMC_COLDWAKE);
242*4882a593Smuzhiyun 
243*4882a593Smuzhiyun 	/* Send wake command to DUNIT (MUST be done before JEDEC) */
244*4882a593Smuzhiyun 	dram_wake_command();
245*4882a593Smuzhiyun 
246*4882a593Smuzhiyun 	/* Set default value */
247*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRMC,
248*4882a593Smuzhiyun 		       mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0);
249*4882a593Smuzhiyun 
250*4882a593Smuzhiyun 	LEAVEFN();
251*4882a593Smuzhiyun }
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun 
254*4882a593Smuzhiyun /*
255*4882a593Smuzhiyun  * This function performs some initialization on the DDRIO unit.
256*4882a593Smuzhiyun  * This function is dependent on BOARD_ID, DDR_SPEED, and CHANNEL_ENABLES.
257*4882a593Smuzhiyun  */
ddrphy_init(struct mrc_params * mrc_params)258*4882a593Smuzhiyun void ddrphy_init(struct mrc_params *mrc_params)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun 	uint32_t temp;
261*4882a593Smuzhiyun 	uint8_t ch;	/* channel counter */
262*4882a593Smuzhiyun 	uint8_t rk;	/* rank counter */
263*4882a593Smuzhiyun 	uint8_t bl_grp;	/*  byte lane group counter (2 BLs per module) */
264*4882a593Smuzhiyun 	uint8_t bl_divisor = 1;	/* byte lane divisor */
265*4882a593Smuzhiyun 	/* For DDR3 --> 0 == 800, 1 == 1066, 2 == 1333 */
266*4882a593Smuzhiyun 	uint8_t speed = mrc_params->ddr_speed & 3;
267*4882a593Smuzhiyun 	uint8_t cas;
268*4882a593Smuzhiyun 	uint8_t cwl;
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun 	ENTERFN();
271*4882a593Smuzhiyun 
272*4882a593Smuzhiyun 	cas = mrc_params->params.cl;
273*4882a593Smuzhiyun 	cwl = 5 + mrc_params->ddr_speed;
274*4882a593Smuzhiyun 
275*4882a593Smuzhiyun 	/* ddrphy_init starts */
276*4882a593Smuzhiyun 	mrc_post_code(0x03, 0x00);
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun 	/*
279*4882a593Smuzhiyun 	 * HSD#231531
280*4882a593Smuzhiyun 	 * Make sure IOBUFACT is deasserted before initializing the DDR PHY
281*4882a593Smuzhiyun 	 *
282*4882a593Smuzhiyun 	 * HSD#234845
283*4882a593Smuzhiyun 	 * Make sure WRPTRENABLE is deasserted before initializing the DDR PHY
284*4882a593Smuzhiyun 	 */
285*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
286*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
287*4882a593Smuzhiyun 			/* Deassert DDRPHY Initialization Complete */
288*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
289*4882a593Smuzhiyun 				CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
290*4882a593Smuzhiyun 				~(1 << 20), 1 << 20);	/* SPID_INIT_COMPLETE=0 */
291*4882a593Smuzhiyun 			/* Deassert IOBUFACT */
292*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
293*4882a593Smuzhiyun 				CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
294*4882a593Smuzhiyun 				~(1 << 2), 1 << 2);	/* IOBUFACTRST_N=0 */
295*4882a593Smuzhiyun 			/* Disable WRPTR */
296*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
297*4882a593Smuzhiyun 				CMDPTRREG + ch * DDRIOCCC_CH_OFFSET,
298*4882a593Smuzhiyun 				~(1 << 0), 1 << 0);	/* WRPTRENABLE=0 */
299*4882a593Smuzhiyun 		}
300*4882a593Smuzhiyun 	}
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun 	/* Put PHY in reset */
303*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, MASTERRSTN, 0, 1);
304*4882a593Smuzhiyun 
305*4882a593Smuzhiyun 	/* Initialize DQ01, DQ23, CMD, CLK-CTL, COMP modules */
306*4882a593Smuzhiyun 
307*4882a593Smuzhiyun 	/* STEP0 */
308*4882a593Smuzhiyun 	mrc_post_code(0x03, 0x10);
309*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
310*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
311*4882a593Smuzhiyun 			/* DQ01-DQ23 */
312*4882a593Smuzhiyun 			for (bl_grp = 0;
313*4882a593Smuzhiyun 			     bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
314*4882a593Smuzhiyun 			     bl_grp++) {
315*4882a593Smuzhiyun 				/* Analog MUX select - IO2xCLKSEL */
316*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
317*4882a593Smuzhiyun 					DQOBSCKEBBCTL +
318*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
319*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
320*4882a593Smuzhiyun 					bl_grp ? 0 : (1 << 22), 1 << 22);
321*4882a593Smuzhiyun 
322*4882a593Smuzhiyun 				/* ODT Strength */
323*4882a593Smuzhiyun 				switch (mrc_params->rd_odt_value) {
324*4882a593Smuzhiyun 				case 1:
325*4882a593Smuzhiyun 					temp = 0x3;
326*4882a593Smuzhiyun 					break;	/* 60 ohm */
327*4882a593Smuzhiyun 				case 2:
328*4882a593Smuzhiyun 					temp = 0x3;
329*4882a593Smuzhiyun 					break;	/* 120 ohm */
330*4882a593Smuzhiyun 				case 3:
331*4882a593Smuzhiyun 					temp = 0x3;
332*4882a593Smuzhiyun 					break;	/* 180 ohm */
333*4882a593Smuzhiyun 				default:
334*4882a593Smuzhiyun 					temp = 0x3;
335*4882a593Smuzhiyun 					break;	/* 120 ohm */
336*4882a593Smuzhiyun 				}
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun 				/* ODT strength */
339*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
340*4882a593Smuzhiyun 					B0RXIOBUFCTL +
341*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
342*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
343*4882a593Smuzhiyun 					temp << 5, 0x60);
344*4882a593Smuzhiyun 				/* ODT strength */
345*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
346*4882a593Smuzhiyun 					B1RXIOBUFCTL +
347*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
348*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
349*4882a593Smuzhiyun 					temp << 5, 0x60);
350*4882a593Smuzhiyun 
351*4882a593Smuzhiyun 				/* Dynamic ODT/DIFFAMP */
352*4882a593Smuzhiyun 				temp = (cas << 24) | (cas << 16) |
353*4882a593Smuzhiyun 					(cas << 8) | (cas << 0);
354*4882a593Smuzhiyun 				switch (speed) {
355*4882a593Smuzhiyun 				case 0:
356*4882a593Smuzhiyun 					temp -= 0x01010101;
357*4882a593Smuzhiyun 					break;	/* 800 */
358*4882a593Smuzhiyun 				case 1:
359*4882a593Smuzhiyun 					temp -= 0x02020202;
360*4882a593Smuzhiyun 					break;	/* 1066 */
361*4882a593Smuzhiyun 				case 2:
362*4882a593Smuzhiyun 					temp -= 0x03030303;
363*4882a593Smuzhiyun 					break;	/* 1333 */
364*4882a593Smuzhiyun 				case 3:
365*4882a593Smuzhiyun 					temp -= 0x04040404;
366*4882a593Smuzhiyun 					break;	/* 1600 */
367*4882a593Smuzhiyun 				}
368*4882a593Smuzhiyun 
369*4882a593Smuzhiyun 				/* Launch Time: ODT, DIFFAMP, ODT, DIFFAMP */
370*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
371*4882a593Smuzhiyun 					B01LATCTL1 +
372*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
373*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
374*4882a593Smuzhiyun 					temp, 0x1f1f1f1f);
375*4882a593Smuzhiyun 				switch (speed) {
376*4882a593Smuzhiyun 				/* HSD#234715 */
377*4882a593Smuzhiyun 				case 0:
378*4882a593Smuzhiyun 					temp = (0x06 << 16) | (0x07 << 8);
379*4882a593Smuzhiyun 					break;	/* 800 */
380*4882a593Smuzhiyun 				case 1:
381*4882a593Smuzhiyun 					temp = (0x07 << 16) | (0x08 << 8);
382*4882a593Smuzhiyun 					break;	/* 1066 */
383*4882a593Smuzhiyun 				case 2:
384*4882a593Smuzhiyun 					temp = (0x09 << 16) | (0x0a << 8);
385*4882a593Smuzhiyun 					break;	/* 1333 */
386*4882a593Smuzhiyun 				case 3:
387*4882a593Smuzhiyun 					temp = (0x0a << 16) | (0x0b << 8);
388*4882a593Smuzhiyun 					break;	/* 1600 */
389*4882a593Smuzhiyun 				}
390*4882a593Smuzhiyun 
391*4882a593Smuzhiyun 				/* On Duration: ODT, DIFFAMP */
392*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
393*4882a593Smuzhiyun 					B0ONDURCTL +
394*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
395*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
396*4882a593Smuzhiyun 					temp, 0x003f3f00);
397*4882a593Smuzhiyun 				/* On Duration: ODT, DIFFAMP */
398*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
399*4882a593Smuzhiyun 					B1ONDURCTL +
400*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
401*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
402*4882a593Smuzhiyun 					temp, 0x003f3f00);
403*4882a593Smuzhiyun 
404*4882a593Smuzhiyun 				switch (mrc_params->rd_odt_value) {
405*4882a593Smuzhiyun 				case 0:
406*4882a593Smuzhiyun 					/* override DIFFAMP=on, ODT=off */
407*4882a593Smuzhiyun 					temp = (0x3f << 16) | (0x3f << 10);
408*4882a593Smuzhiyun 					break;
409*4882a593Smuzhiyun 				default:
410*4882a593Smuzhiyun 					/* override DIFFAMP=on, ODT=on */
411*4882a593Smuzhiyun 					temp = (0x3f << 16) | (0x2a << 10);
412*4882a593Smuzhiyun 					break;
413*4882a593Smuzhiyun 				}
414*4882a593Smuzhiyun 
415*4882a593Smuzhiyun 				/* Override: DIFFAMP, ODT */
416*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
417*4882a593Smuzhiyun 					B0OVRCTL +
418*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
419*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
420*4882a593Smuzhiyun 					temp, 0x003ffc00);
421*4882a593Smuzhiyun 				/* Override: DIFFAMP, ODT */
422*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
423*4882a593Smuzhiyun 					B1OVRCTL +
424*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
425*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
426*4882a593Smuzhiyun 					temp, 0x003ffc00);
427*4882a593Smuzhiyun 
428*4882a593Smuzhiyun 				/* DLL Setup */
429*4882a593Smuzhiyun 
430*4882a593Smuzhiyun 				/* 1xCLK Domain Timings: tEDP,RCVEN,WDQS (PO) */
431*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
432*4882a593Smuzhiyun 					B0LATCTL0 +
433*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
434*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
435*4882a593Smuzhiyun 					((cas + 7) << 16) | ((cas - 4) << 8) |
436*4882a593Smuzhiyun 					((cwl - 2) << 0), 0x003f1f1f);
437*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
438*4882a593Smuzhiyun 					B1LATCTL0 +
439*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
440*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
441*4882a593Smuzhiyun 					((cas + 7) << 16) | ((cas - 4) << 8) |
442*4882a593Smuzhiyun 					((cwl - 2) << 0), 0x003f1f1f);
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun 				/* RCVEN Bypass (PO) */
445*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
446*4882a593Smuzhiyun 					B0RXIOBUFCTL +
447*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
448*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
449*4882a593Smuzhiyun 					0, 0x81);
450*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
451*4882a593Smuzhiyun 					B1RXIOBUFCTL +
452*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
453*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
454*4882a593Smuzhiyun 					0, 0x81);
455*4882a593Smuzhiyun 
456*4882a593Smuzhiyun 				/* TX */
457*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
458*4882a593Smuzhiyun 					DQCTL +
459*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
460*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
461*4882a593Smuzhiyun 					1 << 16, 1 << 16);
462*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
463*4882a593Smuzhiyun 					B01PTRCTL1 +
464*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
465*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
466*4882a593Smuzhiyun 					1 << 8, 1 << 8);
467*4882a593Smuzhiyun 
468*4882a593Smuzhiyun 				/* RX (PO) */
469*4882a593Smuzhiyun 				/* Internal Vref Code, Enable#, Ext_or_Int (1=Ext) */
470*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
471*4882a593Smuzhiyun 					B0VREFCTL +
472*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
473*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
474*4882a593Smuzhiyun 					(0x03 << 2) | (0x0 << 1) | (0x0 << 0),
475*4882a593Smuzhiyun 					0xff);
476*4882a593Smuzhiyun 				/* Internal Vref Code, Enable#, Ext_or_Int (1=Ext) */
477*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
478*4882a593Smuzhiyun 					B1VREFCTL +
479*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
480*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
481*4882a593Smuzhiyun 					(0x03 << 2) | (0x0 << 1) | (0x0 << 0),
482*4882a593Smuzhiyun 					0xff);
483*4882a593Smuzhiyun 				/* Per-Bit De-Skew Enable */
484*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
485*4882a593Smuzhiyun 					B0RXIOBUFCTL +
486*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
487*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
488*4882a593Smuzhiyun 					0, 0x10);
489*4882a593Smuzhiyun 				/* Per-Bit De-Skew Enable */
490*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
491*4882a593Smuzhiyun 					B1RXIOBUFCTL +
492*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
493*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
494*4882a593Smuzhiyun 					0, 0x10);
495*4882a593Smuzhiyun 			}
496*4882a593Smuzhiyun 
497*4882a593Smuzhiyun 			/* CLKEBB */
498*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
499*4882a593Smuzhiyun 				CMDOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET,
500*4882a593Smuzhiyun 				0, 1 << 23);
501*4882a593Smuzhiyun 
502*4882a593Smuzhiyun 			/* Enable tristate control of cmd/address bus */
503*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
504*4882a593Smuzhiyun 				CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
505*4882a593Smuzhiyun 				0, 0x03);
506*4882a593Smuzhiyun 
507*4882a593Smuzhiyun 			/* ODT RCOMP */
508*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
509*4882a593Smuzhiyun 				CMDRCOMPODT + ch * DDRIOCCC_CH_OFFSET,
510*4882a593Smuzhiyun 				(0x03 << 5) | (0x03 << 0), 0x3ff);
511*4882a593Smuzhiyun 
512*4882a593Smuzhiyun 			/* CMDPM* registers must be programmed in this order */
513*4882a593Smuzhiyun 
514*4882a593Smuzhiyun 			/* Turn On Delays: SFR (regulator), MPLL */
515*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
516*4882a593Smuzhiyun 				CMDPMDLYREG4 + ch * DDRIOCCC_CH_OFFSET,
517*4882a593Smuzhiyun 				0xffffffff, 0xffffffff);
518*4882a593Smuzhiyun 			/*
519*4882a593Smuzhiyun 			 * Delays: ASSERT_IOBUFACT_to_ALLON0_for_PM_MSG_3,
520*4882a593Smuzhiyun 			 * VREG (MDLL) Turn On, ALLON0_to_DEASSERT_IOBUFACT
521*4882a593Smuzhiyun 			 * for_PM_MSG_gt0, MDLL Turn On
522*4882a593Smuzhiyun 			 */
523*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
524*4882a593Smuzhiyun 				CMDPMDLYREG3 + ch * DDRIOCCC_CH_OFFSET,
525*4882a593Smuzhiyun 				0xfffff616, 0xffffffff);
526*4882a593Smuzhiyun 			/* MPLL Divider Reset Delays */
527*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
528*4882a593Smuzhiyun 				CMDPMDLYREG2 + ch * DDRIOCCC_CH_OFFSET,
529*4882a593Smuzhiyun 				0xffffffff, 0xffffffff);
530*4882a593Smuzhiyun 			/* Turn Off Delays: VREG, Staggered MDLL, MDLL, PI */
531*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
532*4882a593Smuzhiyun 				CMDPMDLYREG1 + ch * DDRIOCCC_CH_OFFSET,
533*4882a593Smuzhiyun 				0xffffffff, 0xffffffff);
534*4882a593Smuzhiyun 			/* Turn On Delays: MPLL, Staggered MDLL, PI, IOBUFACT */
535*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
536*4882a593Smuzhiyun 				CMDPMDLYREG0 + ch * DDRIOCCC_CH_OFFSET,
537*4882a593Smuzhiyun 				0xffffffff, 0xffffffff);
538*4882a593Smuzhiyun 			/* Allow PUnit signals */
539*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
540*4882a593Smuzhiyun 				CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
541*4882a593Smuzhiyun 				(0x6 << 8) | (0x1 << 6) | (0x4 << 0),
542*4882a593Smuzhiyun 				0xffe00f4f);
543*4882a593Smuzhiyun 			/* DLL_VREG Bias Trim, VREF Tuning for DLL_VREG */
544*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
545*4882a593Smuzhiyun 				CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
546*4882a593Smuzhiyun 				(0x3 << 4) | (0x7 << 0), 0x7f);
547*4882a593Smuzhiyun 
548*4882a593Smuzhiyun 			/* CLK-CTL */
549*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
550*4882a593Smuzhiyun 				CCOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET,
551*4882a593Smuzhiyun 				0, 1 << 24);	/* CLKEBB */
552*4882a593Smuzhiyun 			/* Buffer Enable: CS,CKE,ODT,CLK */
553*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
554*4882a593Smuzhiyun 				CCCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
555*4882a593Smuzhiyun 				0x1f, 0x000ffff1);
556*4882a593Smuzhiyun 			/* ODT RCOMP */
557*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
558*4882a593Smuzhiyun 				CCRCOMPODT + ch * DDRIOCCC_CH_OFFSET,
559*4882a593Smuzhiyun 				(0x03 << 8) | (0x03 << 0), 0x00001f1f);
560*4882a593Smuzhiyun 			/* DLL_VREG Bias Trim, VREF Tuning for DLL_VREG */
561*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
562*4882a593Smuzhiyun 				CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
563*4882a593Smuzhiyun 				(0x3 << 4) | (0x7 << 0), 0x7f);
564*4882a593Smuzhiyun 
565*4882a593Smuzhiyun 			/*
566*4882a593Smuzhiyun 			 * COMP (RON channel specific)
567*4882a593Smuzhiyun 			 * - DQ/DQS/DM RON: 32 Ohm
568*4882a593Smuzhiyun 			 * - CTRL/CMD RON: 27 Ohm
569*4882a593Smuzhiyun 			 * - CLK RON: 26 Ohm
570*4882a593Smuzhiyun 			 */
571*4882a593Smuzhiyun 			/* RCOMP Vref PU/PD */
572*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
573*4882a593Smuzhiyun 				DQVREFCH0 +  ch * DDRCOMP_CH_OFFSET,
574*4882a593Smuzhiyun 				(0x08 << 24) | (0x03 << 16), 0x3f3f0000);
575*4882a593Smuzhiyun 			/* RCOMP Vref PU/PD */
576*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
577*4882a593Smuzhiyun 				CMDVREFCH0 + ch * DDRCOMP_CH_OFFSET,
578*4882a593Smuzhiyun 				(0x0C << 24) | (0x03 << 16), 0x3f3f0000);
579*4882a593Smuzhiyun 			/* RCOMP Vref PU/PD */
580*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
581*4882a593Smuzhiyun 				CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET,
582*4882a593Smuzhiyun 				(0x0F << 24) | (0x03 << 16), 0x3f3f0000);
583*4882a593Smuzhiyun 			/* RCOMP Vref PU/PD */
584*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
585*4882a593Smuzhiyun 				DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET,
586*4882a593Smuzhiyun 				(0x08 << 24) | (0x03 << 16), 0x3f3f0000);
587*4882a593Smuzhiyun 			/* RCOMP Vref PU/PD */
588*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
589*4882a593Smuzhiyun 				CTLVREFCH0 + ch * DDRCOMP_CH_OFFSET,
590*4882a593Smuzhiyun 				(0x0C << 24) | (0x03 << 16), 0x3f3f0000);
591*4882a593Smuzhiyun 
592*4882a593Smuzhiyun 			/* DQS Swapped Input Enable */
593*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
594*4882a593Smuzhiyun 				COMPEN1CH0 + ch * DDRCOMP_CH_OFFSET,
595*4882a593Smuzhiyun 				(1 << 19) | (1 << 17), 0xc00ac000);
596*4882a593Smuzhiyun 
597*4882a593Smuzhiyun 			/* ODT VREF = 1.5 x 274/360+274 = 0.65V (code of ~50) */
598*4882a593Smuzhiyun 			/* ODT Vref PU/PD */
599*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
600*4882a593Smuzhiyun 				DQVREFCH0 + ch * DDRCOMP_CH_OFFSET,
601*4882a593Smuzhiyun 				(0x32 << 8) | (0x03 << 0), 0x00003f3f);
602*4882a593Smuzhiyun 			/* ODT Vref PU/PD */
603*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
604*4882a593Smuzhiyun 				DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET,
605*4882a593Smuzhiyun 				(0x32 << 8) | (0x03 << 0), 0x00003f3f);
606*4882a593Smuzhiyun 			/* ODT Vref PU/PD */
607*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
608*4882a593Smuzhiyun 				CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET,
609*4882a593Smuzhiyun 				(0x0E << 8) | (0x05 << 0), 0x00003f3f);
610*4882a593Smuzhiyun 
611*4882a593Smuzhiyun 			/*
612*4882a593Smuzhiyun 			 * Slew rate settings are frequency specific,
613*4882a593Smuzhiyun 			 * numbers below are for 800Mhz (speed == 0)
614*4882a593Smuzhiyun 			 * - DQ/DQS/DM/CLK SR: 4V/ns,
615*4882a593Smuzhiyun 			 * - CTRL/CMD SR: 1.5V/ns
616*4882a593Smuzhiyun 			 */
617*4882a593Smuzhiyun 			temp = (0x0e << 16) | (0x0e << 12) | (0x08 << 8) |
618*4882a593Smuzhiyun 				(0x0b << 4) | (0x0b << 0);
619*4882a593Smuzhiyun 			/* DCOMP Delay Select: CTL,CMD,CLK,DQS,DQ */
620*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
621*4882a593Smuzhiyun 				DLYSELCH0 + ch * DDRCOMP_CH_OFFSET,
622*4882a593Smuzhiyun 				temp, 0x000fffff);
623*4882a593Smuzhiyun 			/* TCO Vref CLK,DQS,DQ */
624*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
625*4882a593Smuzhiyun 				TCOVREFCH0 + ch * DDRCOMP_CH_OFFSET,
626*4882a593Smuzhiyun 				(0x05 << 16) | (0x05 << 8) | (0x05 << 0),
627*4882a593Smuzhiyun 				0x003f3f3f);
628*4882a593Smuzhiyun 			/* ODTCOMP CMD/CTL PU/PD */
629*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
630*4882a593Smuzhiyun 				CCBUFODTCH0 + ch * DDRCOMP_CH_OFFSET,
631*4882a593Smuzhiyun 				(0x03 << 8) | (0x03 << 0),
632*4882a593Smuzhiyun 				0x00001f1f);
633*4882a593Smuzhiyun 			/* COMP */
634*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
635*4882a593Smuzhiyun 				COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
636*4882a593Smuzhiyun 				0, 0xc0000100);
637*4882a593Smuzhiyun 
638*4882a593Smuzhiyun #ifdef BACKUP_COMPS
639*4882a593Smuzhiyun 			/* DQ COMP Overrides */
640*4882a593Smuzhiyun 			/* RCOMP PU */
641*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
642*4882a593Smuzhiyun 				DQDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
643*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
644*4882a593Smuzhiyun 				0x801f0000);
645*4882a593Smuzhiyun 			/* RCOMP PD */
646*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
647*4882a593Smuzhiyun 				DQDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
648*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
649*4882a593Smuzhiyun 				0x801f0000);
650*4882a593Smuzhiyun 			/* DCOMP PU */
651*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
652*4882a593Smuzhiyun 				DQDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
653*4882a593Smuzhiyun 				(1 << 31) | (0x10 << 16),
654*4882a593Smuzhiyun 				0x801f0000);
655*4882a593Smuzhiyun 			/* DCOMP PD */
656*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
657*4882a593Smuzhiyun 				DQDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
658*4882a593Smuzhiyun 				(1 << 31) | (0x10 << 16),
659*4882a593Smuzhiyun 				0x801f0000);
660*4882a593Smuzhiyun 			/* ODTCOMP PU */
661*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
662*4882a593Smuzhiyun 				DQODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
663*4882a593Smuzhiyun 				(1 << 31) | (0x0b << 16),
664*4882a593Smuzhiyun 				0x801f0000);
665*4882a593Smuzhiyun 			/* ODTCOMP PD */
666*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
667*4882a593Smuzhiyun 				DQODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
668*4882a593Smuzhiyun 				(1 << 31) | (0x0b << 16),
669*4882a593Smuzhiyun 				0x801f0000);
670*4882a593Smuzhiyun 			/* TCOCOMP PU */
671*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
672*4882a593Smuzhiyun 				DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
673*4882a593Smuzhiyun 				1 << 31, 1 << 31);
674*4882a593Smuzhiyun 			/* TCOCOMP PD */
675*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
676*4882a593Smuzhiyun 				DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
677*4882a593Smuzhiyun 				1 << 31, 1 << 31);
678*4882a593Smuzhiyun 
679*4882a593Smuzhiyun 			/* DQS COMP Overrides */
680*4882a593Smuzhiyun 			/* RCOMP PU */
681*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
682*4882a593Smuzhiyun 				DQSDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
683*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
684*4882a593Smuzhiyun 				0x801f0000);
685*4882a593Smuzhiyun 			/* RCOMP PD */
686*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
687*4882a593Smuzhiyun 				DQSDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
688*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
689*4882a593Smuzhiyun 				0x801f0000);
690*4882a593Smuzhiyun 			/* DCOMP PU */
691*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
692*4882a593Smuzhiyun 				DQSDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
693*4882a593Smuzhiyun 				(1 << 31) | (0x10 << 16),
694*4882a593Smuzhiyun 				0x801f0000);
695*4882a593Smuzhiyun 			/* DCOMP PD */
696*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
697*4882a593Smuzhiyun 				DQSDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
698*4882a593Smuzhiyun 				(1 << 31) | (0x10 << 16),
699*4882a593Smuzhiyun 				0x801f0000);
700*4882a593Smuzhiyun 			/* ODTCOMP PU */
701*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
702*4882a593Smuzhiyun 				DQSODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
703*4882a593Smuzhiyun 				(1 << 31) | (0x0b << 16),
704*4882a593Smuzhiyun 				0x801f0000);
705*4882a593Smuzhiyun 			/* ODTCOMP PD */
706*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
707*4882a593Smuzhiyun 				DQSODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
708*4882a593Smuzhiyun 				(1 << 31) | (0x0b << 16),
709*4882a593Smuzhiyun 				0x801f0000);
710*4882a593Smuzhiyun 			/* TCOCOMP PU */
711*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
712*4882a593Smuzhiyun 				DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
713*4882a593Smuzhiyun 				1 << 31, 1 << 31);
714*4882a593Smuzhiyun 			/* TCOCOMP PD */
715*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
716*4882a593Smuzhiyun 				DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
717*4882a593Smuzhiyun 				1 << 31, 1 << 31);
718*4882a593Smuzhiyun 
719*4882a593Smuzhiyun 			/* CLK COMP Overrides */
720*4882a593Smuzhiyun 			/* RCOMP PU */
721*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
722*4882a593Smuzhiyun 				CLKDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
723*4882a593Smuzhiyun 				(1 << 31) | (0x0c << 16),
724*4882a593Smuzhiyun 				0x801f0000);
725*4882a593Smuzhiyun 			/* RCOMP PD */
726*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
727*4882a593Smuzhiyun 				CLKDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
728*4882a593Smuzhiyun 				(1 << 31) | (0x0c << 16),
729*4882a593Smuzhiyun 				0x801f0000);
730*4882a593Smuzhiyun 			/* DCOMP PU */
731*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
732*4882a593Smuzhiyun 				CLKDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
733*4882a593Smuzhiyun 				(1 << 31) | (0x07 << 16),
734*4882a593Smuzhiyun 				0x801f0000);
735*4882a593Smuzhiyun 			/* DCOMP PD */
736*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
737*4882a593Smuzhiyun 				CLKDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
738*4882a593Smuzhiyun 				(1 << 31) | (0x07 << 16),
739*4882a593Smuzhiyun 				0x801f0000);
740*4882a593Smuzhiyun 			/* ODTCOMP PU */
741*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
742*4882a593Smuzhiyun 				CLKODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
743*4882a593Smuzhiyun 				(1 << 31) | (0x0b << 16),
744*4882a593Smuzhiyun 				0x801f0000);
745*4882a593Smuzhiyun 			/* ODTCOMP PD */
746*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
747*4882a593Smuzhiyun 				CLKODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
748*4882a593Smuzhiyun 				(1 << 31) | (0x0b << 16),
749*4882a593Smuzhiyun 				0x801f0000);
750*4882a593Smuzhiyun 			/* TCOCOMP PU */
751*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
752*4882a593Smuzhiyun 				CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
753*4882a593Smuzhiyun 				1 << 31, 1 << 31);
754*4882a593Smuzhiyun 			/* TCOCOMP PD */
755*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
756*4882a593Smuzhiyun 				CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
757*4882a593Smuzhiyun 				1 << 31, 1 << 31);
758*4882a593Smuzhiyun 
759*4882a593Smuzhiyun 			/* CMD COMP Overrides */
760*4882a593Smuzhiyun 			/* RCOMP PU */
761*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
762*4882a593Smuzhiyun 				CMDDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
763*4882a593Smuzhiyun 				(1 << 31) | (0x0d << 16),
764*4882a593Smuzhiyun 				0x803f0000);
765*4882a593Smuzhiyun 			/* RCOMP PD */
766*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
767*4882a593Smuzhiyun 				CMDDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
768*4882a593Smuzhiyun 				(1 << 31) | (0x0d << 16),
769*4882a593Smuzhiyun 				0x803f0000);
770*4882a593Smuzhiyun 			/* DCOMP PU */
771*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
772*4882a593Smuzhiyun 				CMDDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
773*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
774*4882a593Smuzhiyun 				0x801f0000);
775*4882a593Smuzhiyun 			/* DCOMP PD */
776*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
777*4882a593Smuzhiyun 				CMDDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
778*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
779*4882a593Smuzhiyun 				0x801f0000);
780*4882a593Smuzhiyun 
781*4882a593Smuzhiyun 			/* CTL COMP Overrides */
782*4882a593Smuzhiyun 			/* RCOMP PU */
783*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
784*4882a593Smuzhiyun 				CTLDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
785*4882a593Smuzhiyun 				(1 << 31) | (0x0d << 16),
786*4882a593Smuzhiyun 				0x803f0000);
787*4882a593Smuzhiyun 			/* RCOMP PD */
788*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
789*4882a593Smuzhiyun 				CTLDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
790*4882a593Smuzhiyun 				(1 << 31) | (0x0d << 16),
791*4882a593Smuzhiyun 				0x803f0000);
792*4882a593Smuzhiyun 			/* DCOMP PU */
793*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
794*4882a593Smuzhiyun 				CTLDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
795*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
796*4882a593Smuzhiyun 				0x801f0000);
797*4882a593Smuzhiyun 			/* DCOMP PD */
798*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
799*4882a593Smuzhiyun 				CTLDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
800*4882a593Smuzhiyun 				(1 << 31) | (0x0a << 16),
801*4882a593Smuzhiyun 				0x801f0000);
802*4882a593Smuzhiyun #else
803*4882a593Smuzhiyun 			/* DQ TCOCOMP Overrides */
804*4882a593Smuzhiyun 			/* TCOCOMP PU */
805*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
806*4882a593Smuzhiyun 				DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
807*4882a593Smuzhiyun 				(1 << 31) | (0x1f << 16),
808*4882a593Smuzhiyun 				0x801f0000);
809*4882a593Smuzhiyun 			/* TCOCOMP PD */
810*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
811*4882a593Smuzhiyun 				DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
812*4882a593Smuzhiyun 				(1 << 31) | (0x1f << 16),
813*4882a593Smuzhiyun 				0x801f0000);
814*4882a593Smuzhiyun 
815*4882a593Smuzhiyun 			/* DQS TCOCOMP Overrides */
816*4882a593Smuzhiyun 			/* TCOCOMP PU */
817*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
818*4882a593Smuzhiyun 				DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
819*4882a593Smuzhiyun 				(1 << 31) | (0x1f << 16),
820*4882a593Smuzhiyun 				0x801f0000);
821*4882a593Smuzhiyun 			/* TCOCOMP PD */
822*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
823*4882a593Smuzhiyun 				DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
824*4882a593Smuzhiyun 				(1 << 31) | (0x1f << 16),
825*4882a593Smuzhiyun 				0x801f0000);
826*4882a593Smuzhiyun 
827*4882a593Smuzhiyun 			/* CLK TCOCOMP Overrides */
828*4882a593Smuzhiyun 			/* TCOCOMP PU */
829*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
830*4882a593Smuzhiyun 				CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
831*4882a593Smuzhiyun 				(1 << 31) | (0x1f << 16),
832*4882a593Smuzhiyun 				0x801f0000);
833*4882a593Smuzhiyun 			/* TCOCOMP PD */
834*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
835*4882a593Smuzhiyun 				CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
836*4882a593Smuzhiyun 				(1 << 31) | (0x1f << 16),
837*4882a593Smuzhiyun 				0x801f0000);
838*4882a593Smuzhiyun #endif
839*4882a593Smuzhiyun 
840*4882a593Smuzhiyun 			/* program STATIC delays */
841*4882a593Smuzhiyun #ifdef BACKUP_WCMD
842*4882a593Smuzhiyun 			set_wcmd(ch, ddr_wcmd[PLATFORM_ID]);
843*4882a593Smuzhiyun #else
844*4882a593Smuzhiyun 			set_wcmd(ch, ddr_wclk[PLATFORM_ID] + HALF_CLK);
845*4882a593Smuzhiyun #endif
846*4882a593Smuzhiyun 
847*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
848*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
849*4882a593Smuzhiyun 					set_wclk(ch, rk, ddr_wclk[PLATFORM_ID]);
850*4882a593Smuzhiyun #ifdef BACKUP_WCTL
851*4882a593Smuzhiyun 					set_wctl(ch, rk, ddr_wctl[PLATFORM_ID]);
852*4882a593Smuzhiyun #else
853*4882a593Smuzhiyun 					set_wctl(ch, rk, ddr_wclk[PLATFORM_ID] + HALF_CLK);
854*4882a593Smuzhiyun #endif
855*4882a593Smuzhiyun 				}
856*4882a593Smuzhiyun 			}
857*4882a593Smuzhiyun 		}
858*4882a593Smuzhiyun 	}
859*4882a593Smuzhiyun 
860*4882a593Smuzhiyun 	/* COMP (non channel specific) */
861*4882a593Smuzhiyun 	/* RCOMP: Dither PU Enable */
862*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANADRVPUCTL, 1 << 30, 1 << 30);
863*4882a593Smuzhiyun 	/* RCOMP: Dither PD Enable */
864*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANADRVPDCTL, 1 << 30, 1 << 30);
865*4882a593Smuzhiyun 	/* RCOMP: Dither PU Enable */
866*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CMDANADRVPUCTL, 1 << 30, 1 << 30);
867*4882a593Smuzhiyun 	/* RCOMP: Dither PD Enable */
868*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CMDANADRVPDCTL, 1 << 30, 1 << 30);
869*4882a593Smuzhiyun 	/* RCOMP: Dither PU Enable */
870*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANADRVPUCTL, 1 << 30, 1 << 30);
871*4882a593Smuzhiyun 	/* RCOMP: Dither PD Enable */
872*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANADRVPDCTL, 1 << 30, 1 << 30);
873*4882a593Smuzhiyun 	/* RCOMP: Dither PU Enable */
874*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANADRVPUCTL, 1 << 30, 1 << 30);
875*4882a593Smuzhiyun 	/* RCOMP: Dither PD Enable */
876*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANADRVPDCTL, 1 << 30, 1 << 30);
877*4882a593Smuzhiyun 	/* RCOMP: Dither PU Enable */
878*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CTLANADRVPUCTL, 1 << 30, 1 << 30);
879*4882a593Smuzhiyun 	/* RCOMP: Dither PD Enable */
880*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CTLANADRVPDCTL, 1 << 30, 1 << 30);
881*4882a593Smuzhiyun 	/* ODT: Dither PU Enable */
882*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANAODTPUCTL, 1 << 30, 1 << 30);
883*4882a593Smuzhiyun 	/* ODT: Dither PD Enable */
884*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANAODTPDCTL, 1 << 30, 1 << 30);
885*4882a593Smuzhiyun 	/* ODT: Dither PU Enable */
886*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANAODTPUCTL, 1 << 30, 1 << 30);
887*4882a593Smuzhiyun 	/* ODT: Dither PD Enable */
888*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANAODTPDCTL, 1 << 30, 1 << 30);
889*4882a593Smuzhiyun 	/* ODT: Dither PU Enable */
890*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANAODTPUCTL, 1 << 30, 1 << 30);
891*4882a593Smuzhiyun 	/* ODT: Dither PD Enable */
892*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANAODTPDCTL, 1 << 30, 1 << 30);
893*4882a593Smuzhiyun 	/* DCOMP: Dither PU Enable */
894*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANADLYPUCTL, 1 << 30, 1 << 30);
895*4882a593Smuzhiyun 	/* DCOMP: Dither PD Enable */
896*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANADLYPDCTL, 1 << 30, 1 << 30);
897*4882a593Smuzhiyun 	/* DCOMP: Dither PU Enable */
898*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CMDANADLYPUCTL, 1 << 30, 1 << 30);
899*4882a593Smuzhiyun 	/* DCOMP: Dither PD Enable */
900*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CMDANADLYPDCTL, 1 << 30, 1 << 30);
901*4882a593Smuzhiyun 	/* DCOMP: Dither PU Enable */
902*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANADLYPUCTL, 1 << 30, 1 << 30);
903*4882a593Smuzhiyun 	/* DCOMP: Dither PD Enable */
904*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANADLYPDCTL, 1 << 30, 1 << 30);
905*4882a593Smuzhiyun 	/* DCOMP: Dither PU Enable */
906*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANADLYPUCTL, 1 << 30, 1 << 30);
907*4882a593Smuzhiyun 	/* DCOMP: Dither PD Enable */
908*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANADLYPDCTL, 1 << 30, 1 << 30);
909*4882a593Smuzhiyun 	/* DCOMP: Dither PU Enable */
910*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CTLANADLYPUCTL, 1 << 30, 1 << 30);
911*4882a593Smuzhiyun 	/* DCOMP: Dither PD Enable */
912*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CTLANADLYPDCTL, 1 << 30, 1 << 30);
913*4882a593Smuzhiyun 	/* TCO: Dither PU Enable */
914*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANATCOPUCTL, 1 << 30, 1 << 30);
915*4882a593Smuzhiyun 	/* TCO: Dither PD Enable */
916*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQANATCOPDCTL, 1 << 30, 1 << 30);
917*4882a593Smuzhiyun 	/* TCO: Dither PU Enable */
918*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANATCOPUCTL, 1 << 30, 1 << 30);
919*4882a593Smuzhiyun 	/* TCO: Dither PD Enable */
920*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CLKANATCOPDCTL, 1 << 30, 1 << 30);
921*4882a593Smuzhiyun 	/* TCO: Dither PU Enable */
922*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANATCOPUCTL, 1 << 30, 1 << 30);
923*4882a593Smuzhiyun 	/* TCO: Dither PD Enable */
924*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, DQSANATCOPDCTL, 1 << 30, 1 << 30);
925*4882a593Smuzhiyun 	/* TCOCOMP: Pulse Count */
926*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, TCOCNTCTRL, 1, 3);
927*4882a593Smuzhiyun 	/* ODT: CMD/CTL PD/PU */
928*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CHNLBUFSTATIC,
929*4882a593Smuzhiyun 		(0x03 << 24) | (0x03 << 16), 0x1f1f0000);
930*4882a593Smuzhiyun 	/* Set 1us counter */
931*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, MSCNTR, 0x64, 0xff);
932*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, LATCH1CTL, 0x1 << 28, 0x70000000);
933*4882a593Smuzhiyun 
934*4882a593Smuzhiyun 	/* Release PHY from reset */
935*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, MASTERRSTN, 1, 1);
936*4882a593Smuzhiyun 
937*4882a593Smuzhiyun 	/* STEP1 */
938*4882a593Smuzhiyun 	mrc_post_code(0x03, 0x11);
939*4882a593Smuzhiyun 
940*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
941*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
942*4882a593Smuzhiyun 			/* DQ01-DQ23 */
943*4882a593Smuzhiyun 			for (bl_grp = 0;
944*4882a593Smuzhiyun 			     bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
945*4882a593Smuzhiyun 			     bl_grp++) {
946*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
947*4882a593Smuzhiyun 					DQMDLLCTL +
948*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
949*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
950*4882a593Smuzhiyun 					1 << 13,
951*4882a593Smuzhiyun 					1 << 13);	/* Enable VREG */
952*4882a593Smuzhiyun 				delay_n(3);
953*4882a593Smuzhiyun 			}
954*4882a593Smuzhiyun 
955*4882a593Smuzhiyun 			/* ECC */
956*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY, ECCMDLLCTL,
957*4882a593Smuzhiyun 				1 << 13, 1 << 13);	/* Enable VREG */
958*4882a593Smuzhiyun 			delay_n(3);
959*4882a593Smuzhiyun 			/* CMD */
960*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
961*4882a593Smuzhiyun 				CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
962*4882a593Smuzhiyun 				1 << 13, 1 << 13);	/* Enable VREG */
963*4882a593Smuzhiyun 			delay_n(3);
964*4882a593Smuzhiyun 			/* CLK-CTL */
965*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
966*4882a593Smuzhiyun 				CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
967*4882a593Smuzhiyun 				1 << 13, 1 << 13);	/* Enable VREG */
968*4882a593Smuzhiyun 			delay_n(3);
969*4882a593Smuzhiyun 		}
970*4882a593Smuzhiyun 	}
971*4882a593Smuzhiyun 
972*4882a593Smuzhiyun 	/* STEP2 */
973*4882a593Smuzhiyun 	mrc_post_code(0x03, 0x12);
974*4882a593Smuzhiyun 	delay_n(200);
975*4882a593Smuzhiyun 
976*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
977*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
978*4882a593Smuzhiyun 			/* DQ01-DQ23 */
979*4882a593Smuzhiyun 			for (bl_grp = 0;
980*4882a593Smuzhiyun 			     bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
981*4882a593Smuzhiyun 			     bl_grp++) {
982*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
983*4882a593Smuzhiyun 					DQMDLLCTL +
984*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
985*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
986*4882a593Smuzhiyun 					1 << 17,
987*4882a593Smuzhiyun 					1 << 17);	/* Enable MCDLL */
988*4882a593Smuzhiyun 				delay_n(50);
989*4882a593Smuzhiyun 			}
990*4882a593Smuzhiyun 
991*4882a593Smuzhiyun 		/* ECC */
992*4882a593Smuzhiyun 		mrc_alt_write_mask(DDRPHY, ECCMDLLCTL,
993*4882a593Smuzhiyun 			1 << 17, 1 << 17);	/* Enable MCDLL */
994*4882a593Smuzhiyun 		delay_n(50);
995*4882a593Smuzhiyun 		/* CMD */
996*4882a593Smuzhiyun 		mrc_alt_write_mask(DDRPHY,
997*4882a593Smuzhiyun 			CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
998*4882a593Smuzhiyun 			1 << 18, 1 << 18);	/* Enable MCDLL */
999*4882a593Smuzhiyun 		delay_n(50);
1000*4882a593Smuzhiyun 		/* CLK-CTL */
1001*4882a593Smuzhiyun 		mrc_alt_write_mask(DDRPHY,
1002*4882a593Smuzhiyun 			CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
1003*4882a593Smuzhiyun 			1 << 18, 1 << 18);	/* Enable MCDLL */
1004*4882a593Smuzhiyun 		delay_n(50);
1005*4882a593Smuzhiyun 		}
1006*4882a593Smuzhiyun 	}
1007*4882a593Smuzhiyun 
1008*4882a593Smuzhiyun 	/* STEP3: */
1009*4882a593Smuzhiyun 	mrc_post_code(0x03, 0x13);
1010*4882a593Smuzhiyun 	delay_n(100);
1011*4882a593Smuzhiyun 
1012*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1013*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
1014*4882a593Smuzhiyun 			/* DQ01-DQ23 */
1015*4882a593Smuzhiyun 			for (bl_grp = 0;
1016*4882a593Smuzhiyun 			     bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
1017*4882a593Smuzhiyun 			     bl_grp++) {
1018*4882a593Smuzhiyun #ifdef FORCE_16BIT_DDRIO
1019*4882a593Smuzhiyun 				temp = (bl_grp &&
1020*4882a593Smuzhiyun 					(mrc_params->channel_width == X16)) ?
1021*4882a593Smuzhiyun 					0x11ff : 0xffff;
1022*4882a593Smuzhiyun #else
1023*4882a593Smuzhiyun 				temp = 0xffff;
1024*4882a593Smuzhiyun #endif
1025*4882a593Smuzhiyun 				/* Enable TXDLL */
1026*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
1027*4882a593Smuzhiyun 					DQDLLTXCTL +
1028*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
1029*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
1030*4882a593Smuzhiyun 					temp, 0xffff);
1031*4882a593Smuzhiyun 				delay_n(3);
1032*4882a593Smuzhiyun 				/* Enable RXDLL */
1033*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
1034*4882a593Smuzhiyun 					DQDLLRXCTL +
1035*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
1036*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
1037*4882a593Smuzhiyun 					0xf, 0xf);
1038*4882a593Smuzhiyun 				delay_n(3);
1039*4882a593Smuzhiyun 				/* Enable RXDLL Overrides BL0 */
1040*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
1041*4882a593Smuzhiyun 					B0OVRCTL +
1042*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
1043*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
1044*4882a593Smuzhiyun 					0xf, 0xf);
1045*4882a593Smuzhiyun 			}
1046*4882a593Smuzhiyun 
1047*4882a593Smuzhiyun 			/* ECC */
1048*4882a593Smuzhiyun 			temp = 0xffff;
1049*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY, ECCDLLTXCTL,
1050*4882a593Smuzhiyun 				temp, 0xffff);
1051*4882a593Smuzhiyun 			delay_n(3);
1052*4882a593Smuzhiyun 
1053*4882a593Smuzhiyun 			/* CMD (PO) */
1054*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1055*4882a593Smuzhiyun 				CMDDLLTXCTL + ch * DDRIOCCC_CH_OFFSET,
1056*4882a593Smuzhiyun 				temp, 0xffff);
1057*4882a593Smuzhiyun 			delay_n(3);
1058*4882a593Smuzhiyun 		}
1059*4882a593Smuzhiyun 	}
1060*4882a593Smuzhiyun 
1061*4882a593Smuzhiyun 	/* STEP4 */
1062*4882a593Smuzhiyun 	mrc_post_code(0x03, 0x14);
1063*4882a593Smuzhiyun 
1064*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1065*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
1066*4882a593Smuzhiyun 			/* Host To Memory Clock Alignment (HMC) for 800/1066 */
1067*4882a593Smuzhiyun 			for (bl_grp = 0;
1068*4882a593Smuzhiyun 			     bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
1069*4882a593Smuzhiyun 			     bl_grp++) {
1070*4882a593Smuzhiyun 				/* CLK_ALIGN_MOD_ID */
1071*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
1072*4882a593Smuzhiyun 					DQCLKALIGNREG2 +
1073*4882a593Smuzhiyun 					bl_grp * DDRIODQ_BL_OFFSET +
1074*4882a593Smuzhiyun 					ch * DDRIODQ_CH_OFFSET,
1075*4882a593Smuzhiyun 					bl_grp ? 3 : 1,
1076*4882a593Smuzhiyun 					0xf);
1077*4882a593Smuzhiyun 			}
1078*4882a593Smuzhiyun 
1079*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1080*4882a593Smuzhiyun 				ECCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1081*4882a593Smuzhiyun 				0x2, 0xf);
1082*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1083*4882a593Smuzhiyun 				CMDCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1084*4882a593Smuzhiyun 				0x0, 0xf);
1085*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1086*4882a593Smuzhiyun 				CCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1087*4882a593Smuzhiyun 				0x2, 0xf);
1088*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1089*4882a593Smuzhiyun 				CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET,
1090*4882a593Smuzhiyun 				0x20, 0x30);
1091*4882a593Smuzhiyun 			/*
1092*4882a593Smuzhiyun 			 * NUM_SAMPLES, MAX_SAMPLES,
1093*4882a593Smuzhiyun 			 * MACRO_PI_STEP, MICRO_PI_STEP
1094*4882a593Smuzhiyun 			 */
1095*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1096*4882a593Smuzhiyun 				CMDCLKALIGNREG1 + ch * DDRIOCCC_CH_OFFSET,
1097*4882a593Smuzhiyun 				(0x18 << 16) | (0x10 << 8) |
1098*4882a593Smuzhiyun 				(0x8 << 2) | (0x1 << 0),
1099*4882a593Smuzhiyun 				0x007f7fff);
1100*4882a593Smuzhiyun 			/* TOTAL_NUM_MODULES, FIRST_U_PARTITION */
1101*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1102*4882a593Smuzhiyun 				CMDCLKALIGNREG2 + ch * DDRIOCCC_CH_OFFSET,
1103*4882a593Smuzhiyun 				(0x10 << 16) | (0x4 << 8) | (0x2 << 4),
1104*4882a593Smuzhiyun 				0x001f0ff0);
1105*4882a593Smuzhiyun #ifdef HMC_TEST
1106*4882a593Smuzhiyun 			/* START_CLK_ALIGN=1 */
1107*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1108*4882a593Smuzhiyun 				CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET,
1109*4882a593Smuzhiyun 				1 << 24, 1 << 24);
1110*4882a593Smuzhiyun 			while (msg_port_alt_read(DDRPHY,
1111*4882a593Smuzhiyun 				CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET) &
1112*4882a593Smuzhiyun 				(1 << 24))
1113*4882a593Smuzhiyun 				;	/* wait for START_CLK_ALIGN=0 */
1114*4882a593Smuzhiyun #endif
1115*4882a593Smuzhiyun 
1116*4882a593Smuzhiyun 			/* Set RD/WR Pointer Seperation & COUNTEN & FIFOPTREN */
1117*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1118*4882a593Smuzhiyun 				CMDPTRREG + ch * DDRIOCCC_CH_OFFSET,
1119*4882a593Smuzhiyun 				1, 1);	/* WRPTRENABLE=1 */
1120*4882a593Smuzhiyun 
1121*4882a593Smuzhiyun 			/* COMP initial */
1122*4882a593Smuzhiyun 			/* enable bypass for CLK buffer (PO) */
1123*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1124*4882a593Smuzhiyun 				COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
1125*4882a593Smuzhiyun 				1 << 5, 1 << 5);
1126*4882a593Smuzhiyun 			/* Initial COMP Enable */
1127*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY, CMPCTRL, 1, 1);
1128*4882a593Smuzhiyun 			/* wait for Initial COMP Enable = 0 */
1129*4882a593Smuzhiyun 			while (msg_port_alt_read(DDRPHY, CMPCTRL) & 1)
1130*4882a593Smuzhiyun 				;
1131*4882a593Smuzhiyun 			/* disable bypass for CLK buffer (PO) */
1132*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1133*4882a593Smuzhiyun 				COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
1134*4882a593Smuzhiyun 				~(1 << 5), 1 << 5);
1135*4882a593Smuzhiyun 
1136*4882a593Smuzhiyun 			/* IOBUFACT */
1137*4882a593Smuzhiyun 
1138*4882a593Smuzhiyun 			/* STEP4a */
1139*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1140*4882a593Smuzhiyun 				CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
1141*4882a593Smuzhiyun 				1 << 2, 1 << 2);	/* IOBUFACTRST_N=1 */
1142*4882a593Smuzhiyun 
1143*4882a593Smuzhiyun 			/* DDRPHY initialization complete */
1144*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY,
1145*4882a593Smuzhiyun 				CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
1146*4882a593Smuzhiyun 				1 << 20, 1 << 20);	/* SPID_INIT_COMPLETE=1 */
1147*4882a593Smuzhiyun 		}
1148*4882a593Smuzhiyun 	}
1149*4882a593Smuzhiyun 
1150*4882a593Smuzhiyun 	LEAVEFN();
1151*4882a593Smuzhiyun }
1152*4882a593Smuzhiyun 
1153*4882a593Smuzhiyun /* This function performs JEDEC initialization on all enabled channels */
perform_jedec_init(struct mrc_params * mrc_params)1154*4882a593Smuzhiyun void perform_jedec_init(struct mrc_params *mrc_params)
1155*4882a593Smuzhiyun {
1156*4882a593Smuzhiyun 	uint8_t twr, wl, rank;
1157*4882a593Smuzhiyun 	uint32_t tck;
1158*4882a593Smuzhiyun 	u32 dtr0;
1159*4882a593Smuzhiyun 	u32 drp;
1160*4882a593Smuzhiyun 	u32 drmc;
1161*4882a593Smuzhiyun 	u32 mrs0_cmd = 0;
1162*4882a593Smuzhiyun 	u32 emrs1_cmd = 0;
1163*4882a593Smuzhiyun 	u32 emrs2_cmd = 0;
1164*4882a593Smuzhiyun 	u32 emrs3_cmd = 0;
1165*4882a593Smuzhiyun 
1166*4882a593Smuzhiyun 	ENTERFN();
1167*4882a593Smuzhiyun 
1168*4882a593Smuzhiyun 	/* jedec_init starts */
1169*4882a593Smuzhiyun 	mrc_post_code(0x04, 0x00);
1170*4882a593Smuzhiyun 
1171*4882a593Smuzhiyun 	/* DDR3_RESET_SET=0, DDR3_RESET_RESET=1 */
1172*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 2, 0x102);
1173*4882a593Smuzhiyun 
1174*4882a593Smuzhiyun 	/* Assert RESET# for 200us */
1175*4882a593Smuzhiyun 	delay_u(200);
1176*4882a593Smuzhiyun 
1177*4882a593Smuzhiyun 	/* DDR3_RESET_SET=1, DDR3_RESET_RESET=0 */
1178*4882a593Smuzhiyun 	mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 0x100, 0x102);
1179*4882a593Smuzhiyun 
1180*4882a593Smuzhiyun 	dtr0 = msg_port_read(MEM_CTLR, DTR0);
1181*4882a593Smuzhiyun 
1182*4882a593Smuzhiyun 	/*
1183*4882a593Smuzhiyun 	 * Set CKEVAL for populated ranks
1184*4882a593Smuzhiyun 	 * then send NOP to each rank (#4550197)
1185*4882a593Smuzhiyun 	 */
1186*4882a593Smuzhiyun 
1187*4882a593Smuzhiyun 	drp = msg_port_read(MEM_CTLR, DRP);
1188*4882a593Smuzhiyun 	drp &= 0x3;
1189*4882a593Smuzhiyun 
1190*4882a593Smuzhiyun 	drmc = msg_port_read(MEM_CTLR, DRMC);
1191*4882a593Smuzhiyun 	drmc &= 0xfffffffc;
1192*4882a593Smuzhiyun 	drmc |= (DRMC_CKEMODE | drp);
1193*4882a593Smuzhiyun 
1194*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRMC, drmc);
1195*4882a593Smuzhiyun 
1196*4882a593Smuzhiyun 	for (rank = 0; rank < NUM_RANKS; rank++) {
1197*4882a593Smuzhiyun 		/* Skip to next populated rank */
1198*4882a593Smuzhiyun 		if ((mrc_params->rank_enables & (1 << rank)) == 0)
1199*4882a593Smuzhiyun 			continue;
1200*4882a593Smuzhiyun 
1201*4882a593Smuzhiyun 		dram_init_command(DCMD_NOP(rank));
1202*4882a593Smuzhiyun 	}
1203*4882a593Smuzhiyun 
1204*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRMC,
1205*4882a593Smuzhiyun 		(mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0));
1206*4882a593Smuzhiyun 
1207*4882a593Smuzhiyun 	/*
1208*4882a593Smuzhiyun 	 * setup for emrs 2
1209*4882a593Smuzhiyun 	 * BIT[15:11] --> Always "0"
1210*4882a593Smuzhiyun 	 * BIT[10:09] --> Rtt_WR: want "Dynamic ODT Off" (0)
1211*4882a593Smuzhiyun 	 * BIT[08]    --> Always "0"
1212*4882a593Smuzhiyun 	 * BIT[07]    --> SRT: use sr_temp_range
1213*4882a593Smuzhiyun 	 * BIT[06]    --> ASR: want "Manual SR Reference" (0)
1214*4882a593Smuzhiyun 	 * BIT[05:03] --> CWL: use oem_tCWL
1215*4882a593Smuzhiyun 	 * BIT[02:00] --> PASR: want "Full Array" (0)
1216*4882a593Smuzhiyun 	 */
1217*4882a593Smuzhiyun 	emrs2_cmd |= (2 << 3);
1218*4882a593Smuzhiyun 	wl = 5 + mrc_params->ddr_speed;
1219*4882a593Smuzhiyun 	emrs2_cmd |= ((wl - 5) << 9);
1220*4882a593Smuzhiyun 	emrs2_cmd |= (mrc_params->sr_temp_range << 13);
1221*4882a593Smuzhiyun 
1222*4882a593Smuzhiyun 	/*
1223*4882a593Smuzhiyun 	 * setup for emrs 3
1224*4882a593Smuzhiyun 	 * BIT[15:03] --> Always "0"
1225*4882a593Smuzhiyun 	 * BIT[02]    --> MPR: want "Normal Operation" (0)
1226*4882a593Smuzhiyun 	 * BIT[01:00] --> MPR_Loc: want "Predefined Pattern" (0)
1227*4882a593Smuzhiyun 	 */
1228*4882a593Smuzhiyun 	emrs3_cmd |= (3 << 3);
1229*4882a593Smuzhiyun 
1230*4882a593Smuzhiyun 	/*
1231*4882a593Smuzhiyun 	 * setup for emrs 1
1232*4882a593Smuzhiyun 	 * BIT[15:13]     --> Always "0"
1233*4882a593Smuzhiyun 	 * BIT[12:12]     --> Qoff: want "Output Buffer Enabled" (0)
1234*4882a593Smuzhiyun 	 * BIT[11:11]     --> TDQS: want "Disabled" (0)
1235*4882a593Smuzhiyun 	 * BIT[10:10]     --> Always "0"
1236*4882a593Smuzhiyun 	 * BIT[09,06,02]  --> Rtt_nom: use rtt_nom_value
1237*4882a593Smuzhiyun 	 * BIT[08]        --> Always "0"
1238*4882a593Smuzhiyun 	 * BIT[07]        --> WR_LVL: want "Disabled" (0)
1239*4882a593Smuzhiyun 	 * BIT[05,01]     --> DIC: use ron_value
1240*4882a593Smuzhiyun 	 * BIT[04:03]     --> AL: additive latency want "0" (0)
1241*4882a593Smuzhiyun 	 * BIT[00]        --> DLL: want "Enable" (0)
1242*4882a593Smuzhiyun 	 *
1243*4882a593Smuzhiyun 	 * (BIT5|BIT1) set Ron value
1244*4882a593Smuzhiyun 	 * 00 --> RZQ/6 (40ohm)
1245*4882a593Smuzhiyun 	 * 01 --> RZQ/7 (34ohm)
1246*4882a593Smuzhiyun 	 * 1* --> RESERVED
1247*4882a593Smuzhiyun 	 *
1248*4882a593Smuzhiyun 	 * (BIT9|BIT6|BIT2) set Rtt_nom value
1249*4882a593Smuzhiyun 	 * 000 --> Disabled
1250*4882a593Smuzhiyun 	 * 001 --> RZQ/4 ( 60ohm)
1251*4882a593Smuzhiyun 	 * 010 --> RZQ/2 (120ohm)
1252*4882a593Smuzhiyun 	 * 011 --> RZQ/6 ( 40ohm)
1253*4882a593Smuzhiyun 	 * 1** --> RESERVED
1254*4882a593Smuzhiyun 	 */
1255*4882a593Smuzhiyun 	emrs1_cmd |= (1 << 3);
1256*4882a593Smuzhiyun 	emrs1_cmd &= ~(1 << 6);
1257*4882a593Smuzhiyun 
1258*4882a593Smuzhiyun 	if (mrc_params->ron_value == 0)
1259*4882a593Smuzhiyun 		emrs1_cmd |= (1 << 7);
1260*4882a593Smuzhiyun 	else
1261*4882a593Smuzhiyun 		emrs1_cmd &= ~(1 << 7);
1262*4882a593Smuzhiyun 
1263*4882a593Smuzhiyun 	if (mrc_params->rtt_nom_value == 0)
1264*4882a593Smuzhiyun 		emrs1_cmd |= (DDR3_EMRS1_RTTNOM_40 << 6);
1265*4882a593Smuzhiyun 	else if (mrc_params->rtt_nom_value == 1)
1266*4882a593Smuzhiyun 		emrs1_cmd |= (DDR3_EMRS1_RTTNOM_60 << 6);
1267*4882a593Smuzhiyun 	else if (mrc_params->rtt_nom_value == 2)
1268*4882a593Smuzhiyun 		emrs1_cmd |= (DDR3_EMRS1_RTTNOM_120 << 6);
1269*4882a593Smuzhiyun 
1270*4882a593Smuzhiyun 	/* save MRS1 value (excluding control fields) */
1271*4882a593Smuzhiyun 	mrc_params->mrs1 = emrs1_cmd >> 6;
1272*4882a593Smuzhiyun 
1273*4882a593Smuzhiyun 	/*
1274*4882a593Smuzhiyun 	 * setup for mrs 0
1275*4882a593Smuzhiyun 	 * BIT[15:13]     --> Always "0"
1276*4882a593Smuzhiyun 	 * BIT[12]        --> PPD: for Quark (1)
1277*4882a593Smuzhiyun 	 * BIT[11:09]     --> WR: use oem_tWR
1278*4882a593Smuzhiyun 	 * BIT[08]        --> DLL: want "Reset" (1, self clearing)
1279*4882a593Smuzhiyun 	 * BIT[07]        --> MODE: want "Normal" (0)
1280*4882a593Smuzhiyun 	 * BIT[06:04,02]  --> CL: use oem_tCAS
1281*4882a593Smuzhiyun 	 * BIT[03]        --> RD_BURST_TYPE: want "Interleave" (1)
1282*4882a593Smuzhiyun 	 * BIT[01:00]     --> BL: want "8 Fixed" (0)
1283*4882a593Smuzhiyun 	 * WR:
1284*4882a593Smuzhiyun 	 * 0 --> 16
1285*4882a593Smuzhiyun 	 * 1 --> 5
1286*4882a593Smuzhiyun 	 * 2 --> 6
1287*4882a593Smuzhiyun 	 * 3 --> 7
1288*4882a593Smuzhiyun 	 * 4 --> 8
1289*4882a593Smuzhiyun 	 * 5 --> 10
1290*4882a593Smuzhiyun 	 * 6 --> 12
1291*4882a593Smuzhiyun 	 * 7 --> 14
1292*4882a593Smuzhiyun 	 * CL:
1293*4882a593Smuzhiyun 	 * BIT[02:02] "0" if oem_tCAS <= 11 (1866?)
1294*4882a593Smuzhiyun 	 * BIT[06:04] use oem_tCAS-4
1295*4882a593Smuzhiyun 	 */
1296*4882a593Smuzhiyun 	mrs0_cmd |= (1 << 14);
1297*4882a593Smuzhiyun 	mrs0_cmd |= (1 << 18);
1298*4882a593Smuzhiyun 	mrs0_cmd |= ((((dtr0 >> 12) & 7) + 1) << 10);
1299*4882a593Smuzhiyun 
1300*4882a593Smuzhiyun 	tck = t_ck[mrc_params->ddr_speed];
1301*4882a593Smuzhiyun 	/* Per JEDEC: tWR=15000ps DDR2/3 from 800-1600 */
1302*4882a593Smuzhiyun 	twr = MCEIL(15000, tck);
1303*4882a593Smuzhiyun 	mrs0_cmd |= ((twr - 4) << 15);
1304*4882a593Smuzhiyun 
1305*4882a593Smuzhiyun 	for (rank = 0; rank < NUM_RANKS; rank++) {
1306*4882a593Smuzhiyun 		/* Skip to next populated rank */
1307*4882a593Smuzhiyun 		if ((mrc_params->rank_enables & (1 << rank)) == 0)
1308*4882a593Smuzhiyun 			continue;
1309*4882a593Smuzhiyun 
1310*4882a593Smuzhiyun 		emrs2_cmd |= (rank << 22);
1311*4882a593Smuzhiyun 		dram_init_command(emrs2_cmd);
1312*4882a593Smuzhiyun 
1313*4882a593Smuzhiyun 		emrs3_cmd |= (rank << 22);
1314*4882a593Smuzhiyun 		dram_init_command(emrs3_cmd);
1315*4882a593Smuzhiyun 
1316*4882a593Smuzhiyun 		emrs1_cmd |= (rank << 22);
1317*4882a593Smuzhiyun 		dram_init_command(emrs1_cmd);
1318*4882a593Smuzhiyun 
1319*4882a593Smuzhiyun 		mrs0_cmd |= (rank << 22);
1320*4882a593Smuzhiyun 		dram_init_command(mrs0_cmd);
1321*4882a593Smuzhiyun 
1322*4882a593Smuzhiyun 		dram_init_command(DCMD_ZQCL(rank));
1323*4882a593Smuzhiyun 	}
1324*4882a593Smuzhiyun 
1325*4882a593Smuzhiyun 	LEAVEFN();
1326*4882a593Smuzhiyun }
1327*4882a593Smuzhiyun 
1328*4882a593Smuzhiyun /*
1329*4882a593Smuzhiyun  * Dunit Initialization Complete
1330*4882a593Smuzhiyun  *
1331*4882a593Smuzhiyun  * Indicates that initialization of the Dunit has completed.
1332*4882a593Smuzhiyun  *
1333*4882a593Smuzhiyun  * Memory accesses are permitted and maintenance operation begins.
1334*4882a593Smuzhiyun  * Until this bit is set to a 1, the memory controller will not accept
1335*4882a593Smuzhiyun  * DRAM requests from the MEMORY_MANAGER or HTE.
1336*4882a593Smuzhiyun  */
set_ddr_init_complete(struct mrc_params * mrc_params)1337*4882a593Smuzhiyun void set_ddr_init_complete(struct mrc_params *mrc_params)
1338*4882a593Smuzhiyun {
1339*4882a593Smuzhiyun 	u32 dco;
1340*4882a593Smuzhiyun 
1341*4882a593Smuzhiyun 	ENTERFN();
1342*4882a593Smuzhiyun 
1343*4882a593Smuzhiyun 	dco = msg_port_read(MEM_CTLR, DCO);
1344*4882a593Smuzhiyun 	dco &= ~DCO_PMICTL;
1345*4882a593Smuzhiyun 	dco |= DCO_IC;
1346*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DCO, dco);
1347*4882a593Smuzhiyun 
1348*4882a593Smuzhiyun 	LEAVEFN();
1349*4882a593Smuzhiyun }
1350*4882a593Smuzhiyun 
1351*4882a593Smuzhiyun /*
1352*4882a593Smuzhiyun  * This function will retrieve relevant timing data
1353*4882a593Smuzhiyun  *
1354*4882a593Smuzhiyun  * This data will be used on subsequent boots to speed up boot times
1355*4882a593Smuzhiyun  * and is required for Suspend To RAM capabilities.
1356*4882a593Smuzhiyun  */
restore_timings(struct mrc_params * mrc_params)1357*4882a593Smuzhiyun void restore_timings(struct mrc_params *mrc_params)
1358*4882a593Smuzhiyun {
1359*4882a593Smuzhiyun 	uint8_t ch, rk, bl;
1360*4882a593Smuzhiyun 	const struct mrc_timings *mt = &mrc_params->timings;
1361*4882a593Smuzhiyun 
1362*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1363*4882a593Smuzhiyun 		for (rk = 0; rk < NUM_RANKS; rk++) {
1364*4882a593Smuzhiyun 			for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
1365*4882a593Smuzhiyun 				set_rcvn(ch, rk, bl, mt->rcvn[ch][rk][bl]);
1366*4882a593Smuzhiyun 				set_rdqs(ch, rk, bl, mt->rdqs[ch][rk][bl]);
1367*4882a593Smuzhiyun 				set_wdqs(ch, rk, bl, mt->wdqs[ch][rk][bl]);
1368*4882a593Smuzhiyun 				set_wdq(ch, rk, bl, mt->wdq[ch][rk][bl]);
1369*4882a593Smuzhiyun 				if (rk == 0) {
1370*4882a593Smuzhiyun 					/* VREF (RANK0 only) */
1371*4882a593Smuzhiyun 					set_vref(ch, bl, mt->vref[ch][bl]);
1372*4882a593Smuzhiyun 				}
1373*4882a593Smuzhiyun 			}
1374*4882a593Smuzhiyun 			set_wctl(ch, rk, mt->wctl[ch][rk]);
1375*4882a593Smuzhiyun 		}
1376*4882a593Smuzhiyun 		set_wcmd(ch, mt->wcmd[ch]);
1377*4882a593Smuzhiyun 	}
1378*4882a593Smuzhiyun }
1379*4882a593Smuzhiyun 
1380*4882a593Smuzhiyun /*
1381*4882a593Smuzhiyun  * Configure default settings normally set as part of read training
1382*4882a593Smuzhiyun  *
1383*4882a593Smuzhiyun  * Some defaults have to be set earlier as they may affect earlier
1384*4882a593Smuzhiyun  * training steps.
1385*4882a593Smuzhiyun  */
default_timings(struct mrc_params * mrc_params)1386*4882a593Smuzhiyun void default_timings(struct mrc_params *mrc_params)
1387*4882a593Smuzhiyun {
1388*4882a593Smuzhiyun 	uint8_t ch, rk, bl;
1389*4882a593Smuzhiyun 
1390*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1391*4882a593Smuzhiyun 		for (rk = 0; rk < NUM_RANKS; rk++) {
1392*4882a593Smuzhiyun 			for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
1393*4882a593Smuzhiyun 				set_rdqs(ch, rk, bl, 24);
1394*4882a593Smuzhiyun 				if (rk == 0) {
1395*4882a593Smuzhiyun 					/* VREF (RANK0 only) */
1396*4882a593Smuzhiyun 					set_vref(ch, bl, 32);
1397*4882a593Smuzhiyun 				}
1398*4882a593Smuzhiyun 			}
1399*4882a593Smuzhiyun 		}
1400*4882a593Smuzhiyun 	}
1401*4882a593Smuzhiyun }
1402*4882a593Smuzhiyun 
1403*4882a593Smuzhiyun /*
1404*4882a593Smuzhiyun  * This function will perform our RCVEN Calibration Algorithm.
1405*4882a593Smuzhiyun  * We will only use the 2xCLK domain timings to perform RCVEN Calibration.
1406*4882a593Smuzhiyun  * All byte lanes will be calibrated "simultaneously" per channel per rank.
1407*4882a593Smuzhiyun  */
rcvn_cal(struct mrc_params * mrc_params)1408*4882a593Smuzhiyun void rcvn_cal(struct mrc_params *mrc_params)
1409*4882a593Smuzhiyun {
1410*4882a593Smuzhiyun 	uint8_t ch;	/* channel counter */
1411*4882a593Smuzhiyun 	uint8_t rk;	/* rank counter */
1412*4882a593Smuzhiyun 	uint8_t bl;	/* byte lane counter */
1413*4882a593Smuzhiyun 	uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1414*4882a593Smuzhiyun 
1415*4882a593Smuzhiyun #ifdef R2R_SHARING
1416*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
1417*4882a593Smuzhiyun 	uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1418*4882a593Smuzhiyun #ifndef BACKUP_RCVN
1419*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
1420*4882a593Smuzhiyun 	uint32_t num_ranks_enabled = 0;
1421*4882a593Smuzhiyun #endif
1422*4882a593Smuzhiyun #endif
1423*4882a593Smuzhiyun 
1424*4882a593Smuzhiyun #ifdef BACKUP_RCVN
1425*4882a593Smuzhiyun #else
1426*4882a593Smuzhiyun 	uint32_t temp;
1427*4882a593Smuzhiyun 	/* absolute PI value to be programmed on the byte lane */
1428*4882a593Smuzhiyun 	uint32_t delay[NUM_BYTE_LANES];
1429*4882a593Smuzhiyun 	u32 dtr1, dtr1_save;
1430*4882a593Smuzhiyun #endif
1431*4882a593Smuzhiyun 
1432*4882a593Smuzhiyun 	ENTERFN();
1433*4882a593Smuzhiyun 
1434*4882a593Smuzhiyun 	/* rcvn_cal starts */
1435*4882a593Smuzhiyun 	mrc_post_code(0x05, 0x00);
1436*4882a593Smuzhiyun 
1437*4882a593Smuzhiyun #ifndef BACKUP_RCVN
1438*4882a593Smuzhiyun 	/* need separate burst to sample DQS preamble */
1439*4882a593Smuzhiyun 	dtr1 = msg_port_read(MEM_CTLR, DTR1);
1440*4882a593Smuzhiyun 	dtr1_save = dtr1;
1441*4882a593Smuzhiyun 	dtr1 |= DTR1_TCCD_12CLK;
1442*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DTR1, dtr1);
1443*4882a593Smuzhiyun #endif
1444*4882a593Smuzhiyun 
1445*4882a593Smuzhiyun #ifdef R2R_SHARING
1446*4882a593Smuzhiyun 	/* need to set "final_delay[][]" elements to "0" */
1447*4882a593Smuzhiyun 	memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1448*4882a593Smuzhiyun #endif
1449*4882a593Smuzhiyun 
1450*4882a593Smuzhiyun 	/* loop through each enabled channel */
1451*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1452*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
1453*4882a593Smuzhiyun 			/* perform RCVEN Calibration on a per rank basis */
1454*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
1455*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
1456*4882a593Smuzhiyun 					/*
1457*4882a593Smuzhiyun 					 * POST_CODE here indicates the current
1458*4882a593Smuzhiyun 					 * channel and rank being calibrated
1459*4882a593Smuzhiyun 					 */
1460*4882a593Smuzhiyun 					mrc_post_code(0x05, 0x10 + ((ch << 4) | rk));
1461*4882a593Smuzhiyun 
1462*4882a593Smuzhiyun #ifdef BACKUP_RCVN
1463*4882a593Smuzhiyun 					/* et hard-coded timing values */
1464*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++)
1465*4882a593Smuzhiyun 						set_rcvn(ch, rk, bl, ddr_rcvn[PLATFORM_ID]);
1466*4882a593Smuzhiyun #else
1467*4882a593Smuzhiyun 					/* enable FIFORST */
1468*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) {
1469*4882a593Smuzhiyun 						mrc_alt_write_mask(DDRPHY,
1470*4882a593Smuzhiyun 							B01PTRCTL1 +
1471*4882a593Smuzhiyun 							(bl >> 1) * DDRIODQ_BL_OFFSET +
1472*4882a593Smuzhiyun 							ch * DDRIODQ_CH_OFFSET,
1473*4882a593Smuzhiyun 							0, 1 << 8);
1474*4882a593Smuzhiyun 					}
1475*4882a593Smuzhiyun 					/* initialize the starting delay to 128 PI (cas +1 CLK) */
1476*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1477*4882a593Smuzhiyun 						/* 1x CLK domain timing is cas-4 */
1478*4882a593Smuzhiyun 						delay[bl] = (4 + 1) * FULL_CLK;
1479*4882a593Smuzhiyun 
1480*4882a593Smuzhiyun 						set_rcvn(ch, rk, bl, delay[bl]);
1481*4882a593Smuzhiyun 					}
1482*4882a593Smuzhiyun 
1483*4882a593Smuzhiyun 					/* now find the rising edge */
1484*4882a593Smuzhiyun 					find_rising_edge(mrc_params, delay, ch, rk, true);
1485*4882a593Smuzhiyun 
1486*4882a593Smuzhiyun 					/* Now increase delay by 32 PI (1/4 CLK) to place in center of high pulse */
1487*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1488*4882a593Smuzhiyun 						delay[bl] += QRTR_CLK;
1489*4882a593Smuzhiyun 						set_rcvn(ch, rk, bl, delay[bl]);
1490*4882a593Smuzhiyun 					}
1491*4882a593Smuzhiyun 					/* Now decrement delay by 128 PI (1 CLK) until we sample a "0" */
1492*4882a593Smuzhiyun 					do {
1493*4882a593Smuzhiyun 						temp = sample_dqs(mrc_params, ch, rk, true);
1494*4882a593Smuzhiyun 						for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1495*4882a593Smuzhiyun 							if (temp & (1 << bl)) {
1496*4882a593Smuzhiyun 								if (delay[bl] >= FULL_CLK) {
1497*4882a593Smuzhiyun 									delay[bl] -= FULL_CLK;
1498*4882a593Smuzhiyun 									set_rcvn(ch, rk, bl, delay[bl]);
1499*4882a593Smuzhiyun 								} else {
1500*4882a593Smuzhiyun 									/* not enough delay */
1501*4882a593Smuzhiyun 									training_message(ch, rk, bl);
1502*4882a593Smuzhiyun 									mrc_post_code(0xee, 0x50);
1503*4882a593Smuzhiyun 								}
1504*4882a593Smuzhiyun 							}
1505*4882a593Smuzhiyun 						}
1506*4882a593Smuzhiyun 					} while (temp & 0xff);
1507*4882a593Smuzhiyun 
1508*4882a593Smuzhiyun #ifdef R2R_SHARING
1509*4882a593Smuzhiyun 					/* increment "num_ranks_enabled" */
1510*4882a593Smuzhiyun 					num_ranks_enabled++;
1511*4882a593Smuzhiyun 					/* Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble */
1512*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1513*4882a593Smuzhiyun 						delay[bl] += QRTR_CLK;
1514*4882a593Smuzhiyun 						/* add "delay[]" values to "final_delay[][]" for rolling average */
1515*4882a593Smuzhiyun 						final_delay[ch][bl] += delay[bl];
1516*4882a593Smuzhiyun 						/* set timing based on rolling average values */
1517*4882a593Smuzhiyun 						set_rcvn(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
1518*4882a593Smuzhiyun 					}
1519*4882a593Smuzhiyun #else
1520*4882a593Smuzhiyun 					/* Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble */
1521*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1522*4882a593Smuzhiyun 						delay[bl] += QRTR_CLK;
1523*4882a593Smuzhiyun 						set_rcvn(ch, rk, bl, delay[bl]);
1524*4882a593Smuzhiyun 					}
1525*4882a593Smuzhiyun #endif
1526*4882a593Smuzhiyun 
1527*4882a593Smuzhiyun 					/* disable FIFORST */
1528*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) {
1529*4882a593Smuzhiyun 						mrc_alt_write_mask(DDRPHY,
1530*4882a593Smuzhiyun 							B01PTRCTL1 +
1531*4882a593Smuzhiyun 							(bl >> 1) * DDRIODQ_BL_OFFSET +
1532*4882a593Smuzhiyun 							ch * DDRIODQ_CH_OFFSET,
1533*4882a593Smuzhiyun 							1 << 8, 1 << 8);
1534*4882a593Smuzhiyun 					}
1535*4882a593Smuzhiyun #endif
1536*4882a593Smuzhiyun 				}
1537*4882a593Smuzhiyun 			}
1538*4882a593Smuzhiyun 		}
1539*4882a593Smuzhiyun 	}
1540*4882a593Smuzhiyun 
1541*4882a593Smuzhiyun #ifndef BACKUP_RCVN
1542*4882a593Smuzhiyun 	/* restore original */
1543*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DTR1, dtr1_save);
1544*4882a593Smuzhiyun #endif
1545*4882a593Smuzhiyun 
1546*4882a593Smuzhiyun 	LEAVEFN();
1547*4882a593Smuzhiyun }
1548*4882a593Smuzhiyun 
1549*4882a593Smuzhiyun /*
1550*4882a593Smuzhiyun  * This function will perform the Write Levelling algorithm
1551*4882a593Smuzhiyun  * (align WCLK and WDQS).
1552*4882a593Smuzhiyun  *
1553*4882a593Smuzhiyun  * This algorithm will act on each rank in each channel separately.
1554*4882a593Smuzhiyun  */
wr_level(struct mrc_params * mrc_params)1555*4882a593Smuzhiyun void wr_level(struct mrc_params *mrc_params)
1556*4882a593Smuzhiyun {
1557*4882a593Smuzhiyun 	uint8_t ch;	/* channel counter */
1558*4882a593Smuzhiyun 	uint8_t rk;	/* rank counter */
1559*4882a593Smuzhiyun 	uint8_t bl;	/* byte lane counter */
1560*4882a593Smuzhiyun 	uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1561*4882a593Smuzhiyun 
1562*4882a593Smuzhiyun #ifdef R2R_SHARING
1563*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
1564*4882a593Smuzhiyun 	uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1565*4882a593Smuzhiyun #ifndef BACKUP_WDQS
1566*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
1567*4882a593Smuzhiyun 	uint32_t num_ranks_enabled = 0;
1568*4882a593Smuzhiyun #endif
1569*4882a593Smuzhiyun #endif
1570*4882a593Smuzhiyun 
1571*4882a593Smuzhiyun #ifdef BACKUP_WDQS
1572*4882a593Smuzhiyun #else
1573*4882a593Smuzhiyun 	/* determines stop condition for CRS_WR_LVL */
1574*4882a593Smuzhiyun 	bool all_edges_found;
1575*4882a593Smuzhiyun 	/* absolute PI value to be programmed on the byte lane */
1576*4882a593Smuzhiyun 	uint32_t delay[NUM_BYTE_LANES];
1577*4882a593Smuzhiyun 	/*
1578*4882a593Smuzhiyun 	 * static makes it so the data is loaded in the heap once by shadow(),
1579*4882a593Smuzhiyun 	 * where non-static copies the data onto the stack every time this
1580*4882a593Smuzhiyun 	 * function is called
1581*4882a593Smuzhiyun 	 */
1582*4882a593Smuzhiyun 	uint32_t address;	/* address to be checked during COARSE_WR_LVL */
1583*4882a593Smuzhiyun 	u32 dtr4, dtr4_save;
1584*4882a593Smuzhiyun #endif
1585*4882a593Smuzhiyun 
1586*4882a593Smuzhiyun 	ENTERFN();
1587*4882a593Smuzhiyun 
1588*4882a593Smuzhiyun 	/* wr_level starts */
1589*4882a593Smuzhiyun 	mrc_post_code(0x06, 0x00);
1590*4882a593Smuzhiyun 
1591*4882a593Smuzhiyun #ifdef R2R_SHARING
1592*4882a593Smuzhiyun 	/* need to set "final_delay[][]" elements to "0" */
1593*4882a593Smuzhiyun 	memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1594*4882a593Smuzhiyun #endif
1595*4882a593Smuzhiyun 
1596*4882a593Smuzhiyun 	/* loop through each enabled channel */
1597*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1598*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
1599*4882a593Smuzhiyun 			/* perform WRITE LEVELING algorithm on a per rank basis */
1600*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
1601*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
1602*4882a593Smuzhiyun 					/*
1603*4882a593Smuzhiyun 					 * POST_CODE here indicates the current
1604*4882a593Smuzhiyun 					 * rank and channel being calibrated
1605*4882a593Smuzhiyun 					 */
1606*4882a593Smuzhiyun 					mrc_post_code(0x06, 0x10 + ((ch << 4) | rk));
1607*4882a593Smuzhiyun 
1608*4882a593Smuzhiyun #ifdef BACKUP_WDQS
1609*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1610*4882a593Smuzhiyun 						set_wdqs(ch, rk, bl, ddr_wdqs[PLATFORM_ID]);
1611*4882a593Smuzhiyun 						set_wdq(ch, rk, bl, ddr_wdqs[PLATFORM_ID] - QRTR_CLK);
1612*4882a593Smuzhiyun 					}
1613*4882a593Smuzhiyun #else
1614*4882a593Smuzhiyun 					/*
1615*4882a593Smuzhiyun 					 * perform a single PRECHARGE_ALL command to
1616*4882a593Smuzhiyun 					 * make DRAM state machine go to IDLE state
1617*4882a593Smuzhiyun 					 */
1618*4882a593Smuzhiyun 					dram_init_command(DCMD_PREA(rk));
1619*4882a593Smuzhiyun 
1620*4882a593Smuzhiyun 					/*
1621*4882a593Smuzhiyun 					 * enable Write Levelling Mode
1622*4882a593Smuzhiyun 					 * (EMRS1 w/ Write Levelling Mode Enable)
1623*4882a593Smuzhiyun 					 */
1624*4882a593Smuzhiyun 					dram_init_command(DCMD_MRS1(rk, 0x82));
1625*4882a593Smuzhiyun 
1626*4882a593Smuzhiyun 					/*
1627*4882a593Smuzhiyun 					 * set ODT DRAM Full Time Termination
1628*4882a593Smuzhiyun 					 * disable in MCU
1629*4882a593Smuzhiyun 					 */
1630*4882a593Smuzhiyun 
1631*4882a593Smuzhiyun 					dtr4 = msg_port_read(MEM_CTLR, DTR4);
1632*4882a593Smuzhiyun 					dtr4_save = dtr4;
1633*4882a593Smuzhiyun 					dtr4 |= DTR4_ODTDIS;
1634*4882a593Smuzhiyun 					msg_port_write(MEM_CTLR, DTR4, dtr4);
1635*4882a593Smuzhiyun 
1636*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
1637*4882a593Smuzhiyun 						/*
1638*4882a593Smuzhiyun 						 * Enable Sandy Bridge Mode (WDQ Tri-State) &
1639*4882a593Smuzhiyun 						 * Ensure 5 WDQS pulses during Write Leveling
1640*4882a593Smuzhiyun 						 */
1641*4882a593Smuzhiyun 						mrc_alt_write_mask(DDRPHY,
1642*4882a593Smuzhiyun 							DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch,
1643*4882a593Smuzhiyun 							0x10000154,
1644*4882a593Smuzhiyun 							0x100003fc);
1645*4882a593Smuzhiyun 					}
1646*4882a593Smuzhiyun 
1647*4882a593Smuzhiyun 					/* Write Leveling Mode enabled in IO */
1648*4882a593Smuzhiyun 					mrc_alt_write_mask(DDRPHY,
1649*4882a593Smuzhiyun 						CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch,
1650*4882a593Smuzhiyun 						1 << 16, 1 << 16);
1651*4882a593Smuzhiyun 
1652*4882a593Smuzhiyun 					/* Initialize the starting delay to WCLK */
1653*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1654*4882a593Smuzhiyun 						/*
1655*4882a593Smuzhiyun 						 * CLK0 --> RK0
1656*4882a593Smuzhiyun 						 * CLK1 --> RK1
1657*4882a593Smuzhiyun 						 */
1658*4882a593Smuzhiyun 						delay[bl] = get_wclk(ch, rk);
1659*4882a593Smuzhiyun 
1660*4882a593Smuzhiyun 						set_wdqs(ch, rk, bl, delay[bl]);
1661*4882a593Smuzhiyun 					}
1662*4882a593Smuzhiyun 
1663*4882a593Smuzhiyun 					/* now find the rising edge */
1664*4882a593Smuzhiyun 					find_rising_edge(mrc_params, delay, ch, rk, false);
1665*4882a593Smuzhiyun 
1666*4882a593Smuzhiyun 					/* disable Write Levelling Mode */
1667*4882a593Smuzhiyun 					mrc_alt_write_mask(DDRPHY,
1668*4882a593Smuzhiyun 						CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch,
1669*4882a593Smuzhiyun 						0, 1 << 16);
1670*4882a593Smuzhiyun 
1671*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
1672*4882a593Smuzhiyun 						/* Disable Sandy Bridge Mode & Ensure 4 WDQS pulses during normal operation */
1673*4882a593Smuzhiyun 						mrc_alt_write_mask(DDRPHY,
1674*4882a593Smuzhiyun 							DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch,
1675*4882a593Smuzhiyun 							0x00000154,
1676*4882a593Smuzhiyun 							0x100003fc);
1677*4882a593Smuzhiyun 					}
1678*4882a593Smuzhiyun 
1679*4882a593Smuzhiyun 					/* restore original DTR4 */
1680*4882a593Smuzhiyun 					msg_port_write(MEM_CTLR, DTR4, dtr4_save);
1681*4882a593Smuzhiyun 
1682*4882a593Smuzhiyun 					/*
1683*4882a593Smuzhiyun 					 * restore original value
1684*4882a593Smuzhiyun 					 * (Write Levelling Mode Disable)
1685*4882a593Smuzhiyun 					 */
1686*4882a593Smuzhiyun 					dram_init_command(DCMD_MRS1(rk, mrc_params->mrs1));
1687*4882a593Smuzhiyun 
1688*4882a593Smuzhiyun 					/*
1689*4882a593Smuzhiyun 					 * perform a single PRECHARGE_ALL command to
1690*4882a593Smuzhiyun 					 * make DRAM state machine go to IDLE state
1691*4882a593Smuzhiyun 					 */
1692*4882a593Smuzhiyun 					dram_init_command(DCMD_PREA(rk));
1693*4882a593Smuzhiyun 
1694*4882a593Smuzhiyun 					mrc_post_code(0x06, 0x30 + ((ch << 4) | rk));
1695*4882a593Smuzhiyun 
1696*4882a593Smuzhiyun 					/*
1697*4882a593Smuzhiyun 					 * COARSE WRITE LEVEL:
1698*4882a593Smuzhiyun 					 * check that we're on the correct clock edge
1699*4882a593Smuzhiyun 					 */
1700*4882a593Smuzhiyun 
1701*4882a593Smuzhiyun 					/* hte reconfiguration request */
1702*4882a593Smuzhiyun 					mrc_params->hte_setup = 1;
1703*4882a593Smuzhiyun 
1704*4882a593Smuzhiyun 					/* start CRS_WR_LVL with WDQS = WDQS + 128 PI */
1705*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1706*4882a593Smuzhiyun 						delay[bl] = get_wdqs(ch, rk, bl) + FULL_CLK;
1707*4882a593Smuzhiyun 						set_wdqs(ch, rk, bl, delay[bl]);
1708*4882a593Smuzhiyun 						/*
1709*4882a593Smuzhiyun 						 * program WDQ timings based on WDQS
1710*4882a593Smuzhiyun 						 * (WDQ = WDQS - 32 PI)
1711*4882a593Smuzhiyun 						 */
1712*4882a593Smuzhiyun 						set_wdq(ch, rk, bl, (delay[bl] - QRTR_CLK));
1713*4882a593Smuzhiyun 					}
1714*4882a593Smuzhiyun 
1715*4882a593Smuzhiyun 					/* get an address in the targeted channel/rank */
1716*4882a593Smuzhiyun 					address = get_addr(ch, rk);
1717*4882a593Smuzhiyun 					do {
1718*4882a593Smuzhiyun 						uint32_t coarse_result = 0x00;
1719*4882a593Smuzhiyun 						uint32_t coarse_result_mask = byte_lane_mask(mrc_params);
1720*4882a593Smuzhiyun 						/* assume pass */
1721*4882a593Smuzhiyun 						all_edges_found = true;
1722*4882a593Smuzhiyun 
1723*4882a593Smuzhiyun 						mrc_params->hte_setup = 1;
1724*4882a593Smuzhiyun 						coarse_result = check_rw_coarse(mrc_params, address);
1725*4882a593Smuzhiyun 
1726*4882a593Smuzhiyun 						/* check for failures and margin the byte lane back 128 PI (1 CLK) */
1727*4882a593Smuzhiyun 						for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1728*4882a593Smuzhiyun 							if (coarse_result & (coarse_result_mask << bl)) {
1729*4882a593Smuzhiyun 								all_edges_found = false;
1730*4882a593Smuzhiyun 								delay[bl] -= FULL_CLK;
1731*4882a593Smuzhiyun 								set_wdqs(ch, rk, bl, delay[bl]);
1732*4882a593Smuzhiyun 								/* program WDQ timings based on WDQS (WDQ = WDQS - 32 PI) */
1733*4882a593Smuzhiyun 								set_wdq(ch, rk, bl, delay[bl] - QRTR_CLK);
1734*4882a593Smuzhiyun 							}
1735*4882a593Smuzhiyun 						}
1736*4882a593Smuzhiyun 					} while (!all_edges_found);
1737*4882a593Smuzhiyun 
1738*4882a593Smuzhiyun #ifdef R2R_SHARING
1739*4882a593Smuzhiyun 					/* increment "num_ranks_enabled" */
1740*4882a593Smuzhiyun 					 num_ranks_enabled++;
1741*4882a593Smuzhiyun 					/* accumulate "final_delay[][]" values from "delay[]" values for rolling average */
1742*4882a593Smuzhiyun 					for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1743*4882a593Smuzhiyun 						final_delay[ch][bl] += delay[bl];
1744*4882a593Smuzhiyun 						set_wdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
1745*4882a593Smuzhiyun 						/* program WDQ timings based on WDQS (WDQ = WDQS - 32 PI) */
1746*4882a593Smuzhiyun 						set_wdq(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled - QRTR_CLK);
1747*4882a593Smuzhiyun 					}
1748*4882a593Smuzhiyun #endif
1749*4882a593Smuzhiyun #endif
1750*4882a593Smuzhiyun 				}
1751*4882a593Smuzhiyun 			}
1752*4882a593Smuzhiyun 		}
1753*4882a593Smuzhiyun 	}
1754*4882a593Smuzhiyun 
1755*4882a593Smuzhiyun 	LEAVEFN();
1756*4882a593Smuzhiyun }
1757*4882a593Smuzhiyun 
prog_page_ctrl(struct mrc_params * mrc_params)1758*4882a593Smuzhiyun void prog_page_ctrl(struct mrc_params *mrc_params)
1759*4882a593Smuzhiyun {
1760*4882a593Smuzhiyun 	u32 dpmc0;
1761*4882a593Smuzhiyun 
1762*4882a593Smuzhiyun 	ENTERFN();
1763*4882a593Smuzhiyun 
1764*4882a593Smuzhiyun 	dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
1765*4882a593Smuzhiyun 	dpmc0 &= ~DPMC0_PCLSTO_MASK;
1766*4882a593Smuzhiyun 	dpmc0 |= (4 << 16);
1767*4882a593Smuzhiyun 	dpmc0 |= DPMC0_PREAPWDEN;
1768*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DPMC0, dpmc0);
1769*4882a593Smuzhiyun }
1770*4882a593Smuzhiyun 
1771*4882a593Smuzhiyun /*
1772*4882a593Smuzhiyun  * This function will perform the READ TRAINING Algorithm on all
1773*4882a593Smuzhiyun  * channels/ranks/byte_lanes simultaneously to minimize execution time.
1774*4882a593Smuzhiyun  *
1775*4882a593Smuzhiyun  * The idea here is to train the VREF and RDQS (and eventually RDQ) values
1776*4882a593Smuzhiyun  * to achieve maximum READ margins. The algorithm will first determine the
1777*4882a593Smuzhiyun  * X coordinate (RDQS setting). This is done by collapsing the VREF eye
1778*4882a593Smuzhiyun  * until we find a minimum required RDQS eye for VREF_MIN and VREF_MAX.
1779*4882a593Smuzhiyun  * Then we take the averages of the RDQS eye at VREF_MIN and VREF_MAX,
1780*4882a593Smuzhiyun  * then average those; this will be the final X coordinate. The algorithm
1781*4882a593Smuzhiyun  * will then determine the Y coordinate (VREF setting). This is done by
1782*4882a593Smuzhiyun  * collapsing the RDQS eye until we find a minimum required VREF eye for
1783*4882a593Smuzhiyun  * RDQS_MIN and RDQS_MAX. Then we take the averages of the VREF eye at
1784*4882a593Smuzhiyun  * RDQS_MIN and RDQS_MAX, then average those; this will be the final Y
1785*4882a593Smuzhiyun  * coordinate.
1786*4882a593Smuzhiyun  *
1787*4882a593Smuzhiyun  * NOTE: this algorithm assumes the eye curves have a one-to-one relationship,
1788*4882a593Smuzhiyun  * meaning for each X the curve has only one Y and vice-a-versa.
1789*4882a593Smuzhiyun  */
rd_train(struct mrc_params * mrc_params)1790*4882a593Smuzhiyun void rd_train(struct mrc_params *mrc_params)
1791*4882a593Smuzhiyun {
1792*4882a593Smuzhiyun 	uint8_t ch;	/* channel counter */
1793*4882a593Smuzhiyun 	uint8_t rk;	/* rank counter */
1794*4882a593Smuzhiyun 	uint8_t bl;	/* byte lane counter */
1795*4882a593Smuzhiyun 	uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1796*4882a593Smuzhiyun #ifdef BACKUP_RDQS
1797*4882a593Smuzhiyun #else
1798*4882a593Smuzhiyun 	uint8_t side_x;	/* tracks LEFT/RIGHT approach vectors */
1799*4882a593Smuzhiyun 	uint8_t side_y;	/* tracks BOTTOM/TOP approach vectors */
1800*4882a593Smuzhiyun 	/* X coordinate data (passing RDQS values) for approach vectors */
1801*4882a593Smuzhiyun 	uint8_t x_coordinate[2][2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
1802*4882a593Smuzhiyun 	/* Y coordinate data (passing VREF values) for approach vectors */
1803*4882a593Smuzhiyun 	uint8_t y_coordinate[2][2][NUM_CHANNELS][NUM_BYTE_LANES];
1804*4882a593Smuzhiyun 	/* centered X (RDQS) */
1805*4882a593Smuzhiyun 	uint8_t x_center[NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
1806*4882a593Smuzhiyun 	/* centered Y (VREF) */
1807*4882a593Smuzhiyun 	uint8_t y_center[NUM_CHANNELS][NUM_BYTE_LANES];
1808*4882a593Smuzhiyun 	uint32_t address;	/* target address for check_bls_ex() */
1809*4882a593Smuzhiyun 	uint32_t result;	/* result of check_bls_ex() */
1810*4882a593Smuzhiyun 	uint32_t bl_mask;	/* byte lane mask for result checking */
1811*4882a593Smuzhiyun #ifdef R2R_SHARING
1812*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
1813*4882a593Smuzhiyun 	uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1814*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
1815*4882a593Smuzhiyun 	uint32_t num_ranks_enabled = 0;
1816*4882a593Smuzhiyun #endif
1817*4882a593Smuzhiyun #endif
1818*4882a593Smuzhiyun 
1819*4882a593Smuzhiyun 	/* rd_train starts */
1820*4882a593Smuzhiyun 	mrc_post_code(0x07, 0x00);
1821*4882a593Smuzhiyun 
1822*4882a593Smuzhiyun 	ENTERFN();
1823*4882a593Smuzhiyun 
1824*4882a593Smuzhiyun #ifdef BACKUP_RDQS
1825*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1826*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
1827*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
1828*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
1829*4882a593Smuzhiyun 					for (bl = 0;
1830*4882a593Smuzhiyun 					     bl < NUM_BYTE_LANES / bl_divisor;
1831*4882a593Smuzhiyun 					     bl++) {
1832*4882a593Smuzhiyun 						set_rdqs(ch, rk, bl, ddr_rdqs[PLATFORM_ID]);
1833*4882a593Smuzhiyun 					}
1834*4882a593Smuzhiyun 				}
1835*4882a593Smuzhiyun 			}
1836*4882a593Smuzhiyun 		}
1837*4882a593Smuzhiyun 	}
1838*4882a593Smuzhiyun #else
1839*4882a593Smuzhiyun 	/* initialize x/y_coordinate arrays */
1840*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1841*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
1842*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
1843*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
1844*4882a593Smuzhiyun 					for (bl = 0;
1845*4882a593Smuzhiyun 					     bl < NUM_BYTE_LANES / bl_divisor;
1846*4882a593Smuzhiyun 					     bl++) {
1847*4882a593Smuzhiyun 						/* x_coordinate */
1848*4882a593Smuzhiyun 						x_coordinate[L][B][ch][rk][bl] = RDQS_MIN;
1849*4882a593Smuzhiyun 						x_coordinate[R][B][ch][rk][bl] = RDQS_MAX;
1850*4882a593Smuzhiyun 						x_coordinate[L][T][ch][rk][bl] = RDQS_MIN;
1851*4882a593Smuzhiyun 						x_coordinate[R][T][ch][rk][bl] = RDQS_MAX;
1852*4882a593Smuzhiyun 						/* y_coordinate */
1853*4882a593Smuzhiyun 						y_coordinate[L][B][ch][bl] = VREF_MIN;
1854*4882a593Smuzhiyun 						y_coordinate[R][B][ch][bl] = VREF_MIN;
1855*4882a593Smuzhiyun 						y_coordinate[L][T][ch][bl] = VREF_MAX;
1856*4882a593Smuzhiyun 						y_coordinate[R][T][ch][bl] = VREF_MAX;
1857*4882a593Smuzhiyun 					}
1858*4882a593Smuzhiyun 				}
1859*4882a593Smuzhiyun 			}
1860*4882a593Smuzhiyun 		}
1861*4882a593Smuzhiyun 	}
1862*4882a593Smuzhiyun 
1863*4882a593Smuzhiyun 	/* initialize other variables */
1864*4882a593Smuzhiyun 	bl_mask = byte_lane_mask(mrc_params);
1865*4882a593Smuzhiyun 	address = get_addr(0, 0);
1866*4882a593Smuzhiyun 
1867*4882a593Smuzhiyun #ifdef R2R_SHARING
1868*4882a593Smuzhiyun 	/* need to set "final_delay[][]" elements to "0" */
1869*4882a593Smuzhiyun 	memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1870*4882a593Smuzhiyun #endif
1871*4882a593Smuzhiyun 
1872*4882a593Smuzhiyun 	/* look for passing coordinates */
1873*4882a593Smuzhiyun 	for (side_y = B; side_y <= T; side_y++) {
1874*4882a593Smuzhiyun 		for (side_x = L; side_x <= R; side_x++) {
1875*4882a593Smuzhiyun 			mrc_post_code(0x07, 0x10 + side_y * 2 + side_x);
1876*4882a593Smuzhiyun 
1877*4882a593Smuzhiyun 			/* find passing values */
1878*4882a593Smuzhiyun 			for (ch = 0; ch < NUM_CHANNELS; ch++) {
1879*4882a593Smuzhiyun 				if (mrc_params->channel_enables & (0x1 << ch)) {
1880*4882a593Smuzhiyun 					for (rk = 0; rk < NUM_RANKS; rk++) {
1881*4882a593Smuzhiyun 						if (mrc_params->rank_enables &
1882*4882a593Smuzhiyun 							(0x1 << rk)) {
1883*4882a593Smuzhiyun 							/* set x/y_coordinate search starting settings */
1884*4882a593Smuzhiyun 							for (bl = 0;
1885*4882a593Smuzhiyun 							     bl < NUM_BYTE_LANES / bl_divisor;
1886*4882a593Smuzhiyun 							     bl++) {
1887*4882a593Smuzhiyun 								set_rdqs(ch, rk, bl,
1888*4882a593Smuzhiyun 									 x_coordinate[side_x][side_y][ch][rk][bl]);
1889*4882a593Smuzhiyun 								set_vref(ch, bl,
1890*4882a593Smuzhiyun 									 y_coordinate[side_x][side_y][ch][bl]);
1891*4882a593Smuzhiyun 							}
1892*4882a593Smuzhiyun 
1893*4882a593Smuzhiyun 							/* get an address in the target channel/rank */
1894*4882a593Smuzhiyun 							address = get_addr(ch, rk);
1895*4882a593Smuzhiyun 
1896*4882a593Smuzhiyun 							/* request HTE reconfiguration */
1897*4882a593Smuzhiyun 							mrc_params->hte_setup = 1;
1898*4882a593Smuzhiyun 
1899*4882a593Smuzhiyun 							/* test the settings */
1900*4882a593Smuzhiyun 							do {
1901*4882a593Smuzhiyun 								/* result[07:00] == failing byte lane (MAX 8) */
1902*4882a593Smuzhiyun 								result = check_bls_ex(mrc_params, address);
1903*4882a593Smuzhiyun 
1904*4882a593Smuzhiyun 								/* check for failures */
1905*4882a593Smuzhiyun 								if (result & 0xff) {
1906*4882a593Smuzhiyun 									/* at least 1 byte lane failed */
1907*4882a593Smuzhiyun 									for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1908*4882a593Smuzhiyun 										if (result &
1909*4882a593Smuzhiyun 											(bl_mask << bl)) {
1910*4882a593Smuzhiyun 											/* adjust the RDQS values accordingly */
1911*4882a593Smuzhiyun 											if (side_x == L)
1912*4882a593Smuzhiyun 												x_coordinate[L][side_y][ch][rk][bl] += RDQS_STEP;
1913*4882a593Smuzhiyun 											else
1914*4882a593Smuzhiyun 												x_coordinate[R][side_y][ch][rk][bl] -= RDQS_STEP;
1915*4882a593Smuzhiyun 
1916*4882a593Smuzhiyun 											/* check that we haven't closed the RDQS_EYE too much */
1917*4882a593Smuzhiyun 											if ((x_coordinate[L][side_y][ch][rk][bl] > (RDQS_MAX - MIN_RDQS_EYE)) ||
1918*4882a593Smuzhiyun 												(x_coordinate[R][side_y][ch][rk][bl] < (RDQS_MIN + MIN_RDQS_EYE)) ||
1919*4882a593Smuzhiyun 												(x_coordinate[L][side_y][ch][rk][bl] ==
1920*4882a593Smuzhiyun 												x_coordinate[R][side_y][ch][rk][bl])) {
1921*4882a593Smuzhiyun 												/*
1922*4882a593Smuzhiyun 												 * not enough RDQS margin available at this VREF
1923*4882a593Smuzhiyun 												 * update VREF values accordingly
1924*4882a593Smuzhiyun 												 */
1925*4882a593Smuzhiyun 												if (side_y == B)
1926*4882a593Smuzhiyun 													y_coordinate[side_x][B][ch][bl] += VREF_STEP;
1927*4882a593Smuzhiyun 												else
1928*4882a593Smuzhiyun 													y_coordinate[side_x][T][ch][bl] -= VREF_STEP;
1929*4882a593Smuzhiyun 
1930*4882a593Smuzhiyun 												/* check that we haven't closed the VREF_EYE too much */
1931*4882a593Smuzhiyun 												if ((y_coordinate[side_x][B][ch][bl] > (VREF_MAX - MIN_VREF_EYE)) ||
1932*4882a593Smuzhiyun 													(y_coordinate[side_x][T][ch][bl] < (VREF_MIN + MIN_VREF_EYE)) ||
1933*4882a593Smuzhiyun 													(y_coordinate[side_x][B][ch][bl] == y_coordinate[side_x][T][ch][bl])) {
1934*4882a593Smuzhiyun 													/* VREF_EYE collapsed below MIN_VREF_EYE */
1935*4882a593Smuzhiyun 													training_message(ch, rk, bl);
1936*4882a593Smuzhiyun 													mrc_post_code(0xEE, 0x70 + side_y * 2 + side_x);
1937*4882a593Smuzhiyun 												} else {
1938*4882a593Smuzhiyun 													/* update the VREF setting */
1939*4882a593Smuzhiyun 													set_vref(ch, bl, y_coordinate[side_x][side_y][ch][bl]);
1940*4882a593Smuzhiyun 													/* reset the X coordinate to begin the search at the new VREF */
1941*4882a593Smuzhiyun 													x_coordinate[side_x][side_y][ch][rk][bl] =
1942*4882a593Smuzhiyun 														(side_x == L) ? RDQS_MIN : RDQS_MAX;
1943*4882a593Smuzhiyun 												}
1944*4882a593Smuzhiyun 											}
1945*4882a593Smuzhiyun 
1946*4882a593Smuzhiyun 											/* update the RDQS setting */
1947*4882a593Smuzhiyun 											set_rdqs(ch, rk, bl, x_coordinate[side_x][side_y][ch][rk][bl]);
1948*4882a593Smuzhiyun 										}
1949*4882a593Smuzhiyun 									}
1950*4882a593Smuzhiyun 								}
1951*4882a593Smuzhiyun 							} while (result & 0xff);
1952*4882a593Smuzhiyun 						}
1953*4882a593Smuzhiyun 					}
1954*4882a593Smuzhiyun 				}
1955*4882a593Smuzhiyun 			}
1956*4882a593Smuzhiyun 		}
1957*4882a593Smuzhiyun 	}
1958*4882a593Smuzhiyun 
1959*4882a593Smuzhiyun 	mrc_post_code(0x07, 0x20);
1960*4882a593Smuzhiyun 
1961*4882a593Smuzhiyun 	/* find final RDQS (X coordinate) & final VREF (Y coordinate) */
1962*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
1963*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
1964*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
1965*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
1966*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1967*4882a593Smuzhiyun 						uint32_t temp1;
1968*4882a593Smuzhiyun 						uint32_t temp2;
1969*4882a593Smuzhiyun 
1970*4882a593Smuzhiyun 						/* x_coordinate */
1971*4882a593Smuzhiyun 						DPF(D_INFO,
1972*4882a593Smuzhiyun 						    "RDQS T/B eye rank%d lane%d : %d-%d %d-%d\n",
1973*4882a593Smuzhiyun 						    rk, bl,
1974*4882a593Smuzhiyun 						    x_coordinate[L][T][ch][rk][bl],
1975*4882a593Smuzhiyun 						    x_coordinate[R][T][ch][rk][bl],
1976*4882a593Smuzhiyun 						    x_coordinate[L][B][ch][rk][bl],
1977*4882a593Smuzhiyun 						    x_coordinate[R][B][ch][rk][bl]);
1978*4882a593Smuzhiyun 
1979*4882a593Smuzhiyun 						/* average the TOP side LEFT & RIGHT values */
1980*4882a593Smuzhiyun 						temp1 = (x_coordinate[R][T][ch][rk][bl] + x_coordinate[L][T][ch][rk][bl]) / 2;
1981*4882a593Smuzhiyun 						/* average the BOTTOM side LEFT & RIGHT values */
1982*4882a593Smuzhiyun 						temp2 = (x_coordinate[R][B][ch][rk][bl] + x_coordinate[L][B][ch][rk][bl]) / 2;
1983*4882a593Smuzhiyun 						/* average the above averages */
1984*4882a593Smuzhiyun 						x_center[ch][rk][bl] = (uint8_t) ((temp1 + temp2) / 2);
1985*4882a593Smuzhiyun 
1986*4882a593Smuzhiyun 						/* y_coordinate */
1987*4882a593Smuzhiyun 						DPF(D_INFO,
1988*4882a593Smuzhiyun 						    "VREF R/L eye lane%d : %d-%d %d-%d\n",
1989*4882a593Smuzhiyun 						    bl,
1990*4882a593Smuzhiyun 						    y_coordinate[R][B][ch][bl],
1991*4882a593Smuzhiyun 						    y_coordinate[R][T][ch][bl],
1992*4882a593Smuzhiyun 						    y_coordinate[L][B][ch][bl],
1993*4882a593Smuzhiyun 						    y_coordinate[L][T][ch][bl]);
1994*4882a593Smuzhiyun 
1995*4882a593Smuzhiyun 						/* average the RIGHT side TOP & BOTTOM values */
1996*4882a593Smuzhiyun 						temp1 = (y_coordinate[R][T][ch][bl] + y_coordinate[R][B][ch][bl]) / 2;
1997*4882a593Smuzhiyun 						/* average the LEFT side TOP & BOTTOM values */
1998*4882a593Smuzhiyun 						temp2 = (y_coordinate[L][T][ch][bl] + y_coordinate[L][B][ch][bl]) / 2;
1999*4882a593Smuzhiyun 						/* average the above averages */
2000*4882a593Smuzhiyun 						y_center[ch][bl] = (uint8_t) ((temp1 + temp2) / 2);
2001*4882a593Smuzhiyun 					}
2002*4882a593Smuzhiyun 				}
2003*4882a593Smuzhiyun 			}
2004*4882a593Smuzhiyun 		}
2005*4882a593Smuzhiyun 	}
2006*4882a593Smuzhiyun 
2007*4882a593Smuzhiyun #ifdef RX_EYE_CHECK
2008*4882a593Smuzhiyun 	/* perform an eye check */
2009*4882a593Smuzhiyun 	for (side_y = B; side_y <= T; side_y++) {
2010*4882a593Smuzhiyun 		for (side_x = L; side_x <= R; side_x++) {
2011*4882a593Smuzhiyun 			mrc_post_code(0x07, 0x30 + side_y * 2 + side_x);
2012*4882a593Smuzhiyun 
2013*4882a593Smuzhiyun 			/* update the settings for the eye check */
2014*4882a593Smuzhiyun 			for (ch = 0; ch < NUM_CHANNELS; ch++) {
2015*4882a593Smuzhiyun 				if (mrc_params->channel_enables & (1 << ch)) {
2016*4882a593Smuzhiyun 					for (rk = 0; rk < NUM_RANKS; rk++) {
2017*4882a593Smuzhiyun 						if (mrc_params->rank_enables & (1 << rk)) {
2018*4882a593Smuzhiyun 							for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2019*4882a593Smuzhiyun 								if (side_x == L)
2020*4882a593Smuzhiyun 									set_rdqs(ch, rk, bl, x_center[ch][rk][bl] - (MIN_RDQS_EYE / 2));
2021*4882a593Smuzhiyun 								else
2022*4882a593Smuzhiyun 									set_rdqs(ch, rk, bl, x_center[ch][rk][bl] + (MIN_RDQS_EYE / 2));
2023*4882a593Smuzhiyun 
2024*4882a593Smuzhiyun 								if (side_y == B)
2025*4882a593Smuzhiyun 									set_vref(ch, bl, y_center[ch][bl] - (MIN_VREF_EYE / 2));
2026*4882a593Smuzhiyun 								else
2027*4882a593Smuzhiyun 									set_vref(ch, bl, y_center[ch][bl] + (MIN_VREF_EYE / 2));
2028*4882a593Smuzhiyun 							}
2029*4882a593Smuzhiyun 						}
2030*4882a593Smuzhiyun 					}
2031*4882a593Smuzhiyun 				}
2032*4882a593Smuzhiyun 			}
2033*4882a593Smuzhiyun 
2034*4882a593Smuzhiyun 			/* request HTE reconfiguration */
2035*4882a593Smuzhiyun 			mrc_params->hte_setup = 1;
2036*4882a593Smuzhiyun 
2037*4882a593Smuzhiyun 			/* check the eye */
2038*4882a593Smuzhiyun 			if (check_bls_ex(mrc_params, address) & 0xff) {
2039*4882a593Smuzhiyun 				/* one or more byte lanes failed */
2040*4882a593Smuzhiyun 				mrc_post_code(0xee, 0x74 + side_x * 2 + side_y);
2041*4882a593Smuzhiyun 			}
2042*4882a593Smuzhiyun 		}
2043*4882a593Smuzhiyun 	}
2044*4882a593Smuzhiyun #endif
2045*4882a593Smuzhiyun 
2046*4882a593Smuzhiyun 	mrc_post_code(0x07, 0x40);
2047*4882a593Smuzhiyun 
2048*4882a593Smuzhiyun 	/* set final placements */
2049*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
2050*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
2051*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
2052*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
2053*4882a593Smuzhiyun #ifdef R2R_SHARING
2054*4882a593Smuzhiyun 					/* increment "num_ranks_enabled" */
2055*4882a593Smuzhiyun 					num_ranks_enabled++;
2056*4882a593Smuzhiyun #endif
2057*4882a593Smuzhiyun 					for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
2058*4882a593Smuzhiyun 						/* x_coordinate */
2059*4882a593Smuzhiyun #ifdef R2R_SHARING
2060*4882a593Smuzhiyun 						final_delay[ch][bl] += x_center[ch][rk][bl];
2061*4882a593Smuzhiyun 						set_rdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
2062*4882a593Smuzhiyun #else
2063*4882a593Smuzhiyun 						set_rdqs(ch, rk, bl, x_center[ch][rk][bl]);
2064*4882a593Smuzhiyun #endif
2065*4882a593Smuzhiyun 						/* y_coordinate */
2066*4882a593Smuzhiyun 						set_vref(ch, bl, y_center[ch][bl]);
2067*4882a593Smuzhiyun 					}
2068*4882a593Smuzhiyun 				}
2069*4882a593Smuzhiyun 			}
2070*4882a593Smuzhiyun 		}
2071*4882a593Smuzhiyun 	}
2072*4882a593Smuzhiyun #endif
2073*4882a593Smuzhiyun 
2074*4882a593Smuzhiyun 	LEAVEFN();
2075*4882a593Smuzhiyun }
2076*4882a593Smuzhiyun 
2077*4882a593Smuzhiyun /*
2078*4882a593Smuzhiyun  * This function will perform the WRITE TRAINING Algorithm on all
2079*4882a593Smuzhiyun  * channels/ranks/byte_lanes simultaneously to minimize execution time.
2080*4882a593Smuzhiyun  *
2081*4882a593Smuzhiyun  * The idea here is to train the WDQ timings to achieve maximum WRITE margins.
2082*4882a593Smuzhiyun  * The algorithm will start with WDQ at the current WDQ setting (tracks WDQS
2083*4882a593Smuzhiyun  * in WR_LVL) +/- 32 PIs (+/- 1/4 CLK) and collapse the eye until all data
2084*4882a593Smuzhiyun  * patterns pass. This is because WDQS will be aligned to WCLK by the
2085*4882a593Smuzhiyun  * Write Leveling algorithm and WDQ will only ever have a 1/2 CLK window
2086*4882a593Smuzhiyun  * of validity.
2087*4882a593Smuzhiyun  */
wr_train(struct mrc_params * mrc_params)2088*4882a593Smuzhiyun void wr_train(struct mrc_params *mrc_params)
2089*4882a593Smuzhiyun {
2090*4882a593Smuzhiyun 	uint8_t ch;	/* channel counter */
2091*4882a593Smuzhiyun 	uint8_t rk;	/* rank counter */
2092*4882a593Smuzhiyun 	uint8_t bl;	/* byte lane counter */
2093*4882a593Smuzhiyun 	uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
2094*4882a593Smuzhiyun #ifdef BACKUP_WDQ
2095*4882a593Smuzhiyun #else
2096*4882a593Smuzhiyun 	uint8_t side;		/* LEFT/RIGHT side indicator (0=L, 1=R) */
2097*4882a593Smuzhiyun 	uint32_t temp;		/* temporary DWORD */
2098*4882a593Smuzhiyun 	/* 2 arrays, for L & R side passing delays */
2099*4882a593Smuzhiyun 	uint32_t delay[2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
2100*4882a593Smuzhiyun 	uint32_t address;	/* target address for check_bls_ex() */
2101*4882a593Smuzhiyun 	uint32_t result;	/* result of check_bls_ex() */
2102*4882a593Smuzhiyun 	uint32_t bl_mask;	/* byte lane mask for result checking */
2103*4882a593Smuzhiyun #ifdef R2R_SHARING
2104*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
2105*4882a593Smuzhiyun 	uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
2106*4882a593Smuzhiyun 	/* used to find placement for rank2rank sharing configs */
2107*4882a593Smuzhiyun 	uint32_t num_ranks_enabled = 0;
2108*4882a593Smuzhiyun #endif
2109*4882a593Smuzhiyun #endif
2110*4882a593Smuzhiyun 
2111*4882a593Smuzhiyun 	/* wr_train starts */
2112*4882a593Smuzhiyun 	mrc_post_code(0x08, 0x00);
2113*4882a593Smuzhiyun 
2114*4882a593Smuzhiyun 	ENTERFN();
2115*4882a593Smuzhiyun 
2116*4882a593Smuzhiyun #ifdef BACKUP_WDQ
2117*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
2118*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
2119*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
2120*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
2121*4882a593Smuzhiyun 					for (bl = 0;
2122*4882a593Smuzhiyun 					     bl < NUM_BYTE_LANES / bl_divisor;
2123*4882a593Smuzhiyun 					     bl++) {
2124*4882a593Smuzhiyun 						set_wdq(ch, rk, bl, ddr_wdq[PLATFORM_ID]);
2125*4882a593Smuzhiyun 					}
2126*4882a593Smuzhiyun 				}
2127*4882a593Smuzhiyun 			}
2128*4882a593Smuzhiyun 		}
2129*4882a593Smuzhiyun 	}
2130*4882a593Smuzhiyun #else
2131*4882a593Smuzhiyun 	/* initialize "delay" */
2132*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
2133*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
2134*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
2135*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
2136*4882a593Smuzhiyun 					for (bl = 0;
2137*4882a593Smuzhiyun 					     bl < NUM_BYTE_LANES / bl_divisor;
2138*4882a593Smuzhiyun 					     bl++) {
2139*4882a593Smuzhiyun 						/*
2140*4882a593Smuzhiyun 						 * want to start with
2141*4882a593Smuzhiyun 						 * WDQ = (WDQS - QRTR_CLK)
2142*4882a593Smuzhiyun 						 * +/- QRTR_CLK
2143*4882a593Smuzhiyun 						 */
2144*4882a593Smuzhiyun 						temp = get_wdqs(ch, rk, bl) - QRTR_CLK;
2145*4882a593Smuzhiyun 						delay[L][ch][rk][bl] = temp - QRTR_CLK;
2146*4882a593Smuzhiyun 						delay[R][ch][rk][bl] = temp + QRTR_CLK;
2147*4882a593Smuzhiyun 					}
2148*4882a593Smuzhiyun 				}
2149*4882a593Smuzhiyun 			}
2150*4882a593Smuzhiyun 		}
2151*4882a593Smuzhiyun 	}
2152*4882a593Smuzhiyun 
2153*4882a593Smuzhiyun 	/* initialize other variables */
2154*4882a593Smuzhiyun 	bl_mask = byte_lane_mask(mrc_params);
2155*4882a593Smuzhiyun 	address = get_addr(0, 0);
2156*4882a593Smuzhiyun 
2157*4882a593Smuzhiyun #ifdef R2R_SHARING
2158*4882a593Smuzhiyun 	/* need to set "final_delay[][]" elements to "0" */
2159*4882a593Smuzhiyun 	memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
2160*4882a593Smuzhiyun #endif
2161*4882a593Smuzhiyun 
2162*4882a593Smuzhiyun 	/*
2163*4882a593Smuzhiyun 	 * start algorithm on the LEFT side and train each channel/bl
2164*4882a593Smuzhiyun 	 * until no failures are observed, then repeat for the RIGHT side.
2165*4882a593Smuzhiyun 	 */
2166*4882a593Smuzhiyun 	for (side = L; side <= R; side++) {
2167*4882a593Smuzhiyun 		mrc_post_code(0x08, 0x10 + side);
2168*4882a593Smuzhiyun 
2169*4882a593Smuzhiyun 		/* set starting values */
2170*4882a593Smuzhiyun 		for (ch = 0; ch < NUM_CHANNELS; ch++) {
2171*4882a593Smuzhiyun 			if (mrc_params->channel_enables & (1 << ch)) {
2172*4882a593Smuzhiyun 				for (rk = 0; rk < NUM_RANKS; rk++) {
2173*4882a593Smuzhiyun 					if (mrc_params->rank_enables &
2174*4882a593Smuzhiyun 						(1 << rk)) {
2175*4882a593Smuzhiyun 						for (bl = 0;
2176*4882a593Smuzhiyun 						     bl < NUM_BYTE_LANES / bl_divisor;
2177*4882a593Smuzhiyun 						     bl++) {
2178*4882a593Smuzhiyun 							set_wdq(ch, rk, bl, delay[side][ch][rk][bl]);
2179*4882a593Smuzhiyun 						}
2180*4882a593Smuzhiyun 					}
2181*4882a593Smuzhiyun 				}
2182*4882a593Smuzhiyun 			}
2183*4882a593Smuzhiyun 		}
2184*4882a593Smuzhiyun 
2185*4882a593Smuzhiyun 		/* find passing values */
2186*4882a593Smuzhiyun 		for (ch = 0; ch < NUM_CHANNELS; ch++) {
2187*4882a593Smuzhiyun 			if (mrc_params->channel_enables & (1 << ch)) {
2188*4882a593Smuzhiyun 				for (rk = 0; rk < NUM_RANKS; rk++) {
2189*4882a593Smuzhiyun 					if (mrc_params->rank_enables &
2190*4882a593Smuzhiyun 						(1 << rk)) {
2191*4882a593Smuzhiyun 						/* get an address in the target channel/rank */
2192*4882a593Smuzhiyun 						address = get_addr(ch, rk);
2193*4882a593Smuzhiyun 
2194*4882a593Smuzhiyun 						/* request HTE reconfiguration */
2195*4882a593Smuzhiyun 						mrc_params->hte_setup = 1;
2196*4882a593Smuzhiyun 
2197*4882a593Smuzhiyun 						/* check the settings */
2198*4882a593Smuzhiyun 						do {
2199*4882a593Smuzhiyun 							/* result[07:00] == failing byte lane (MAX 8) */
2200*4882a593Smuzhiyun 							result = check_bls_ex(mrc_params, address);
2201*4882a593Smuzhiyun 							/* check for failures */
2202*4882a593Smuzhiyun 							if (result & 0xff) {
2203*4882a593Smuzhiyun 								/* at least 1 byte lane failed */
2204*4882a593Smuzhiyun 								for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2205*4882a593Smuzhiyun 									if (result &
2206*4882a593Smuzhiyun 										(bl_mask << bl)) {
2207*4882a593Smuzhiyun 										if (side == L)
2208*4882a593Smuzhiyun 											delay[L][ch][rk][bl] += WDQ_STEP;
2209*4882a593Smuzhiyun 										else
2210*4882a593Smuzhiyun 											delay[R][ch][rk][bl] -= WDQ_STEP;
2211*4882a593Smuzhiyun 
2212*4882a593Smuzhiyun 										/* check for algorithm failure */
2213*4882a593Smuzhiyun 										if (delay[L][ch][rk][bl] != delay[R][ch][rk][bl]) {
2214*4882a593Smuzhiyun 											/*
2215*4882a593Smuzhiyun 											 * margin available
2216*4882a593Smuzhiyun 											 * update delay setting
2217*4882a593Smuzhiyun 											 */
2218*4882a593Smuzhiyun 											set_wdq(ch, rk, bl,
2219*4882a593Smuzhiyun 												delay[side][ch][rk][bl]);
2220*4882a593Smuzhiyun 										} else {
2221*4882a593Smuzhiyun 											/*
2222*4882a593Smuzhiyun 											 * no margin available
2223*4882a593Smuzhiyun 											 * notify the user and halt
2224*4882a593Smuzhiyun 											 */
2225*4882a593Smuzhiyun 											training_message(ch, rk, bl);
2226*4882a593Smuzhiyun 											mrc_post_code(0xee, 0x80 + side);
2227*4882a593Smuzhiyun 										}
2228*4882a593Smuzhiyun 									}
2229*4882a593Smuzhiyun 								}
2230*4882a593Smuzhiyun 							}
2231*4882a593Smuzhiyun 						/* stop when all byte lanes pass */
2232*4882a593Smuzhiyun 						} while (result & 0xff);
2233*4882a593Smuzhiyun 					}
2234*4882a593Smuzhiyun 				}
2235*4882a593Smuzhiyun 			}
2236*4882a593Smuzhiyun 		}
2237*4882a593Smuzhiyun 	}
2238*4882a593Smuzhiyun 
2239*4882a593Smuzhiyun 	/* program WDQ to the middle of passing window */
2240*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
2241*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << ch)) {
2242*4882a593Smuzhiyun 			for (rk = 0; rk < NUM_RANKS; rk++) {
2243*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rk)) {
2244*4882a593Smuzhiyun #ifdef R2R_SHARING
2245*4882a593Smuzhiyun 					/* increment "num_ranks_enabled" */
2246*4882a593Smuzhiyun 					num_ranks_enabled++;
2247*4882a593Smuzhiyun #endif
2248*4882a593Smuzhiyun 					for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2249*4882a593Smuzhiyun 						DPF(D_INFO,
2250*4882a593Smuzhiyun 						    "WDQ eye rank%d lane%d : %d-%d\n",
2251*4882a593Smuzhiyun 						    rk, bl,
2252*4882a593Smuzhiyun 						    delay[L][ch][rk][bl],
2253*4882a593Smuzhiyun 						    delay[R][ch][rk][bl]);
2254*4882a593Smuzhiyun 
2255*4882a593Smuzhiyun 						temp = (delay[R][ch][rk][bl] + delay[L][ch][rk][bl]) / 2;
2256*4882a593Smuzhiyun 
2257*4882a593Smuzhiyun #ifdef R2R_SHARING
2258*4882a593Smuzhiyun 						final_delay[ch][bl] += temp;
2259*4882a593Smuzhiyun 						set_wdq(ch, rk, bl,
2260*4882a593Smuzhiyun 							final_delay[ch][bl] / num_ranks_enabled);
2261*4882a593Smuzhiyun #else
2262*4882a593Smuzhiyun 						set_wdq(ch, rk, bl, temp);
2263*4882a593Smuzhiyun #endif
2264*4882a593Smuzhiyun 					}
2265*4882a593Smuzhiyun 				}
2266*4882a593Smuzhiyun 			}
2267*4882a593Smuzhiyun 		}
2268*4882a593Smuzhiyun 	}
2269*4882a593Smuzhiyun #endif
2270*4882a593Smuzhiyun 
2271*4882a593Smuzhiyun 	LEAVEFN();
2272*4882a593Smuzhiyun }
2273*4882a593Smuzhiyun 
2274*4882a593Smuzhiyun /*
2275*4882a593Smuzhiyun  * This function will store relevant timing data
2276*4882a593Smuzhiyun  *
2277*4882a593Smuzhiyun  * This data will be used on subsequent boots to speed up boot times
2278*4882a593Smuzhiyun  * and is required for Suspend To RAM capabilities.
2279*4882a593Smuzhiyun  */
store_timings(struct mrc_params * mrc_params)2280*4882a593Smuzhiyun void store_timings(struct mrc_params *mrc_params)
2281*4882a593Smuzhiyun {
2282*4882a593Smuzhiyun 	uint8_t ch, rk, bl;
2283*4882a593Smuzhiyun 	struct mrc_timings *mt = &mrc_params->timings;
2284*4882a593Smuzhiyun 
2285*4882a593Smuzhiyun 	for (ch = 0; ch < NUM_CHANNELS; ch++) {
2286*4882a593Smuzhiyun 		for (rk = 0; rk < NUM_RANKS; rk++) {
2287*4882a593Smuzhiyun 			for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
2288*4882a593Smuzhiyun 				mt->rcvn[ch][rk][bl] = get_rcvn(ch, rk, bl);
2289*4882a593Smuzhiyun 				mt->rdqs[ch][rk][bl] = get_rdqs(ch, rk, bl);
2290*4882a593Smuzhiyun 				mt->wdqs[ch][rk][bl] = get_wdqs(ch, rk, bl);
2291*4882a593Smuzhiyun 				mt->wdq[ch][rk][bl] = get_wdq(ch, rk, bl);
2292*4882a593Smuzhiyun 
2293*4882a593Smuzhiyun 				if (rk == 0)
2294*4882a593Smuzhiyun 					mt->vref[ch][bl] = get_vref(ch, bl);
2295*4882a593Smuzhiyun 			}
2296*4882a593Smuzhiyun 
2297*4882a593Smuzhiyun 			mt->wctl[ch][rk] = get_wctl(ch, rk);
2298*4882a593Smuzhiyun 		}
2299*4882a593Smuzhiyun 
2300*4882a593Smuzhiyun 		mt->wcmd[ch] = get_wcmd(ch);
2301*4882a593Smuzhiyun 	}
2302*4882a593Smuzhiyun 
2303*4882a593Smuzhiyun 	/* need to save for a case of changing frequency after warm reset */
2304*4882a593Smuzhiyun 	mt->ddr_speed = mrc_params->ddr_speed;
2305*4882a593Smuzhiyun }
2306*4882a593Smuzhiyun 
2307*4882a593Smuzhiyun /*
2308*4882a593Smuzhiyun  * The purpose of this function is to ensure the SEC comes out of reset
2309*4882a593Smuzhiyun  * and IA initiates the SEC enabling Memory Scrambling.
2310*4882a593Smuzhiyun  */
enable_scrambling(struct mrc_params * mrc_params)2311*4882a593Smuzhiyun void enable_scrambling(struct mrc_params *mrc_params)
2312*4882a593Smuzhiyun {
2313*4882a593Smuzhiyun 	uint32_t lfsr = 0;
2314*4882a593Smuzhiyun 	uint8_t i;
2315*4882a593Smuzhiyun 
2316*4882a593Smuzhiyun 	if (mrc_params->scrambling_enables == 0)
2317*4882a593Smuzhiyun 		return;
2318*4882a593Smuzhiyun 
2319*4882a593Smuzhiyun 	ENTERFN();
2320*4882a593Smuzhiyun 
2321*4882a593Smuzhiyun 	/* 32 bit seed is always stored in BIOS NVM */
2322*4882a593Smuzhiyun 	lfsr = mrc_params->timings.scrambler_seed;
2323*4882a593Smuzhiyun 
2324*4882a593Smuzhiyun 	if (mrc_params->boot_mode == BM_COLD) {
2325*4882a593Smuzhiyun 		/*
2326*4882a593Smuzhiyun 		 * factory value is 0 and in first boot,
2327*4882a593Smuzhiyun 		 * a clock based seed is loaded.
2328*4882a593Smuzhiyun 		 */
2329*4882a593Smuzhiyun 		if (lfsr == 0) {
2330*4882a593Smuzhiyun 			/*
2331*4882a593Smuzhiyun 			 * get seed from system clock
2332*4882a593Smuzhiyun 			 * and make sure it is not all 1's
2333*4882a593Smuzhiyun 			 */
2334*4882a593Smuzhiyun 			lfsr = rdtsc() & 0x0fffffff;
2335*4882a593Smuzhiyun 		} else {
2336*4882a593Smuzhiyun 			/*
2337*4882a593Smuzhiyun 			 * Need to replace scrambler
2338*4882a593Smuzhiyun 			 *
2339*4882a593Smuzhiyun 			 * get next 32bit LFSR 16 times which is the last
2340*4882a593Smuzhiyun 			 * part of the previous scrambler vector
2341*4882a593Smuzhiyun 			 */
2342*4882a593Smuzhiyun 			for (i = 0; i < 16; i++)
2343*4882a593Smuzhiyun 				lfsr32(&lfsr);
2344*4882a593Smuzhiyun 		}
2345*4882a593Smuzhiyun 
2346*4882a593Smuzhiyun 		/* save new seed */
2347*4882a593Smuzhiyun 		mrc_params->timings.scrambler_seed = lfsr;
2348*4882a593Smuzhiyun 	}
2349*4882a593Smuzhiyun 
2350*4882a593Smuzhiyun 	/*
2351*4882a593Smuzhiyun 	 * In warm boot or S3 exit, we have the previous seed.
2352*4882a593Smuzhiyun 	 * In cold boot, we have the last 32bit LFSR which is the new seed.
2353*4882a593Smuzhiyun 	 */
2354*4882a593Smuzhiyun 	lfsr32(&lfsr);	/* shift to next value */
2355*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, SCRMSEED, (lfsr & 0x0003ffff));
2356*4882a593Smuzhiyun 
2357*4882a593Smuzhiyun 	for (i = 0; i < 2; i++)
2358*4882a593Smuzhiyun 		msg_port_write(MEM_CTLR, SCRMLO + i, (lfsr & 0xaaaaaaaa));
2359*4882a593Smuzhiyun 
2360*4882a593Smuzhiyun 	LEAVEFN();
2361*4882a593Smuzhiyun }
2362*4882a593Smuzhiyun 
2363*4882a593Smuzhiyun /*
2364*4882a593Smuzhiyun  * Configure MCU Power Management Control Register
2365*4882a593Smuzhiyun  * and Scheduler Control Register
2366*4882a593Smuzhiyun  */
prog_ddr_control(struct mrc_params * mrc_params)2367*4882a593Smuzhiyun void prog_ddr_control(struct mrc_params *mrc_params)
2368*4882a593Smuzhiyun {
2369*4882a593Smuzhiyun 	u32 dsch;
2370*4882a593Smuzhiyun 	u32 dpmc0;
2371*4882a593Smuzhiyun 
2372*4882a593Smuzhiyun 	ENTERFN();
2373*4882a593Smuzhiyun 
2374*4882a593Smuzhiyun 	dsch = msg_port_read(MEM_CTLR, DSCH);
2375*4882a593Smuzhiyun 	dsch &= ~(DSCH_OOODIS | DSCH_OOOST3DIS | DSCH_NEWBYPDIS);
2376*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DSCH, dsch);
2377*4882a593Smuzhiyun 
2378*4882a593Smuzhiyun 	dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
2379*4882a593Smuzhiyun 	dpmc0 &= ~DPMC0_DISPWRDN;
2380*4882a593Smuzhiyun 	dpmc0 |= (mrc_params->power_down_disable << 25);
2381*4882a593Smuzhiyun 	dpmc0 &= ~DPMC0_CLKGTDIS;
2382*4882a593Smuzhiyun 	dpmc0 &= ~DPMC0_PCLSTO_MASK;
2383*4882a593Smuzhiyun 	dpmc0 |= (4 << 16);
2384*4882a593Smuzhiyun 	dpmc0 |= DPMC0_PREAPWDEN;
2385*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DPMC0, dpmc0);
2386*4882a593Smuzhiyun 
2387*4882a593Smuzhiyun 	/* CMDTRIST = 2h - CMD/ADDR are tristated when no valid command */
2388*4882a593Smuzhiyun 	mrc_write_mask(MEM_CTLR, DPMC1, 0x20, 0x30);
2389*4882a593Smuzhiyun 
2390*4882a593Smuzhiyun 	LEAVEFN();
2391*4882a593Smuzhiyun }
2392*4882a593Smuzhiyun 
2393*4882a593Smuzhiyun /*
2394*4882a593Smuzhiyun  * After training complete configure MCU Rank Population Register
2395*4882a593Smuzhiyun  * specifying: ranks enabled, device width, density, address mode
2396*4882a593Smuzhiyun  */
prog_dra_drb(struct mrc_params * mrc_params)2397*4882a593Smuzhiyun void prog_dra_drb(struct mrc_params *mrc_params)
2398*4882a593Smuzhiyun {
2399*4882a593Smuzhiyun 	u32 drp;
2400*4882a593Smuzhiyun 	u32 dco;
2401*4882a593Smuzhiyun 	u8 density = mrc_params->params.density;
2402*4882a593Smuzhiyun 
2403*4882a593Smuzhiyun 	ENTERFN();
2404*4882a593Smuzhiyun 
2405*4882a593Smuzhiyun 	dco = msg_port_read(MEM_CTLR, DCO);
2406*4882a593Smuzhiyun 	dco &= ~DCO_IC;
2407*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DCO, dco);
2408*4882a593Smuzhiyun 
2409*4882a593Smuzhiyun 	drp = 0;
2410*4882a593Smuzhiyun 	if (mrc_params->rank_enables & 1)
2411*4882a593Smuzhiyun 		drp |= DRP_RKEN0;
2412*4882a593Smuzhiyun 	if (mrc_params->rank_enables & 2)
2413*4882a593Smuzhiyun 		drp |= DRP_RKEN1;
2414*4882a593Smuzhiyun 	if (mrc_params->dram_width == X16) {
2415*4882a593Smuzhiyun 		drp |= (1 << 4);
2416*4882a593Smuzhiyun 		drp |= (1 << 9);
2417*4882a593Smuzhiyun 	}
2418*4882a593Smuzhiyun 
2419*4882a593Smuzhiyun 	/*
2420*4882a593Smuzhiyun 	 * Density encoding in struct dram_params: 0=512Mb, 1=Gb, 2=2Gb, 3=4Gb
2421*4882a593Smuzhiyun 	 * has to be mapped RANKDENSx encoding (0=1Gb)
2422*4882a593Smuzhiyun 	 */
2423*4882a593Smuzhiyun 	if (density == 0)
2424*4882a593Smuzhiyun 		density = 4;
2425*4882a593Smuzhiyun 
2426*4882a593Smuzhiyun 	drp |= ((density - 1) << 6);
2427*4882a593Smuzhiyun 	drp |= ((density - 1) << 11);
2428*4882a593Smuzhiyun 
2429*4882a593Smuzhiyun 	/* Address mode can be overwritten if ECC enabled */
2430*4882a593Smuzhiyun 	drp |= (mrc_params->address_mode << 14);
2431*4882a593Smuzhiyun 
2432*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRP, drp);
2433*4882a593Smuzhiyun 
2434*4882a593Smuzhiyun 	dco &= ~DCO_PMICTL;
2435*4882a593Smuzhiyun 	dco |= DCO_IC;
2436*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DCO, dco);
2437*4882a593Smuzhiyun 
2438*4882a593Smuzhiyun 	LEAVEFN();
2439*4882a593Smuzhiyun }
2440*4882a593Smuzhiyun 
2441*4882a593Smuzhiyun /* Send DRAM wake command */
perform_wake(struct mrc_params * mrc_params)2442*4882a593Smuzhiyun void perform_wake(struct mrc_params *mrc_params)
2443*4882a593Smuzhiyun {
2444*4882a593Smuzhiyun 	ENTERFN();
2445*4882a593Smuzhiyun 
2446*4882a593Smuzhiyun 	dram_wake_command();
2447*4882a593Smuzhiyun 
2448*4882a593Smuzhiyun 	LEAVEFN();
2449*4882a593Smuzhiyun }
2450*4882a593Smuzhiyun 
2451*4882a593Smuzhiyun /*
2452*4882a593Smuzhiyun  * Configure refresh rate and short ZQ calibration interval
2453*4882a593Smuzhiyun  * Activate dynamic self refresh
2454*4882a593Smuzhiyun  */
change_refresh_period(struct mrc_params * mrc_params)2455*4882a593Smuzhiyun void change_refresh_period(struct mrc_params *mrc_params)
2456*4882a593Smuzhiyun {
2457*4882a593Smuzhiyun 	u32 drfc;
2458*4882a593Smuzhiyun 	u32 dcal;
2459*4882a593Smuzhiyun 	u32 dpmc0;
2460*4882a593Smuzhiyun 
2461*4882a593Smuzhiyun 	ENTERFN();
2462*4882a593Smuzhiyun 
2463*4882a593Smuzhiyun 	drfc = msg_port_read(MEM_CTLR, DRFC);
2464*4882a593Smuzhiyun 	drfc &= ~DRFC_TREFI_MASK;
2465*4882a593Smuzhiyun 	drfc |= (mrc_params->refresh_rate << 12);
2466*4882a593Smuzhiyun 	drfc |= DRFC_REFDBTCLR;
2467*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRFC, drfc);
2468*4882a593Smuzhiyun 
2469*4882a593Smuzhiyun 	dcal = msg_port_read(MEM_CTLR, DCAL);
2470*4882a593Smuzhiyun 	dcal &= ~DCAL_ZQCINT_MASK;
2471*4882a593Smuzhiyun 	dcal |= (3 << 8);	/* 63ms */
2472*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DCAL, dcal);
2473*4882a593Smuzhiyun 
2474*4882a593Smuzhiyun 	dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
2475*4882a593Smuzhiyun 	dpmc0 |= (DPMC0_DYNSREN | DPMC0_ENPHYCLKGATE);
2476*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DPMC0, dpmc0);
2477*4882a593Smuzhiyun 
2478*4882a593Smuzhiyun 	LEAVEFN();
2479*4882a593Smuzhiyun }
2480*4882a593Smuzhiyun 
2481*4882a593Smuzhiyun /*
2482*4882a593Smuzhiyun  * Configure DDRPHY for Auto-Refresh, Periodic Compensations,
2483*4882a593Smuzhiyun  * Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down
2484*4882a593Smuzhiyun  */
set_auto_refresh(struct mrc_params * mrc_params)2485*4882a593Smuzhiyun void set_auto_refresh(struct mrc_params *mrc_params)
2486*4882a593Smuzhiyun {
2487*4882a593Smuzhiyun 	uint32_t channel;
2488*4882a593Smuzhiyun 	uint32_t rank;
2489*4882a593Smuzhiyun 	uint32_t bl;
2490*4882a593Smuzhiyun 	uint32_t bl_divisor = 1;
2491*4882a593Smuzhiyun 	uint32_t temp;
2492*4882a593Smuzhiyun 
2493*4882a593Smuzhiyun 	ENTERFN();
2494*4882a593Smuzhiyun 
2495*4882a593Smuzhiyun 	/*
2496*4882a593Smuzhiyun 	 * Enable Auto-Refresh, Periodic Compensations, Dynamic Diff-Amp,
2497*4882a593Smuzhiyun 	 * ZQSPERIOD, Auto-Precharge, CKE Power-Down
2498*4882a593Smuzhiyun 	 */
2499*4882a593Smuzhiyun 	for (channel = 0; channel < NUM_CHANNELS; channel++) {
2500*4882a593Smuzhiyun 		if (mrc_params->channel_enables & (1 << channel)) {
2501*4882a593Smuzhiyun 			/* Enable Periodic RCOMPS */
2502*4882a593Smuzhiyun 			mrc_alt_write_mask(DDRPHY, CMPCTRL, 2, 2);
2503*4882a593Smuzhiyun 
2504*4882a593Smuzhiyun 			/* Enable Dynamic DiffAmp & Set Read ODT Value */
2505*4882a593Smuzhiyun 			switch (mrc_params->rd_odt_value) {
2506*4882a593Smuzhiyun 			case 0:
2507*4882a593Smuzhiyun 				temp = 0x3f;	/* OFF */
2508*4882a593Smuzhiyun 				break;
2509*4882a593Smuzhiyun 			default:
2510*4882a593Smuzhiyun 				temp = 0x00;	/* Auto */
2511*4882a593Smuzhiyun 				break;
2512*4882a593Smuzhiyun 			}
2513*4882a593Smuzhiyun 
2514*4882a593Smuzhiyun 			for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
2515*4882a593Smuzhiyun 				/* Override: DIFFAMP, ODT */
2516*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
2517*4882a593Smuzhiyun 					B0OVRCTL + bl * DDRIODQ_BL_OFFSET +
2518*4882a593Smuzhiyun 					channel * DDRIODQ_CH_OFFSET,
2519*4882a593Smuzhiyun 					temp << 10,
2520*4882a593Smuzhiyun 					0x003ffc00);
2521*4882a593Smuzhiyun 
2522*4882a593Smuzhiyun 				/* Override: DIFFAMP, ODT */
2523*4882a593Smuzhiyun 				mrc_alt_write_mask(DDRPHY,
2524*4882a593Smuzhiyun 					B1OVRCTL + bl * DDRIODQ_BL_OFFSET +
2525*4882a593Smuzhiyun 					channel * DDRIODQ_CH_OFFSET,
2526*4882a593Smuzhiyun 					temp << 10,
2527*4882a593Smuzhiyun 					0x003ffc00);
2528*4882a593Smuzhiyun 			}
2529*4882a593Smuzhiyun 
2530*4882a593Smuzhiyun 			/* Issue ZQCS command */
2531*4882a593Smuzhiyun 			for (rank = 0; rank < NUM_RANKS; rank++) {
2532*4882a593Smuzhiyun 				if (mrc_params->rank_enables & (1 << rank))
2533*4882a593Smuzhiyun 					dram_init_command(DCMD_ZQCS(rank));
2534*4882a593Smuzhiyun 			}
2535*4882a593Smuzhiyun 		}
2536*4882a593Smuzhiyun 	}
2537*4882a593Smuzhiyun 
2538*4882a593Smuzhiyun 	clear_pointers();
2539*4882a593Smuzhiyun 
2540*4882a593Smuzhiyun 	LEAVEFN();
2541*4882a593Smuzhiyun }
2542*4882a593Smuzhiyun 
2543*4882a593Smuzhiyun /*
2544*4882a593Smuzhiyun  * Depending on configuration enables ECC support
2545*4882a593Smuzhiyun  *
2546*4882a593Smuzhiyun  * Available memory size is decreased, and updated with 0s
2547*4882a593Smuzhiyun  * in order to clear error status. Address mode 2 forced.
2548*4882a593Smuzhiyun  */
ecc_enable(struct mrc_params * mrc_params)2549*4882a593Smuzhiyun void ecc_enable(struct mrc_params *mrc_params)
2550*4882a593Smuzhiyun {
2551*4882a593Smuzhiyun 	u32 drp;
2552*4882a593Smuzhiyun 	u32 dsch;
2553*4882a593Smuzhiyun 	u32 ecc_ctrl;
2554*4882a593Smuzhiyun 
2555*4882a593Smuzhiyun 	if (mrc_params->ecc_enables == 0)
2556*4882a593Smuzhiyun 		return;
2557*4882a593Smuzhiyun 
2558*4882a593Smuzhiyun 	ENTERFN();
2559*4882a593Smuzhiyun 
2560*4882a593Smuzhiyun 	/* Configuration required in ECC mode */
2561*4882a593Smuzhiyun 	drp = msg_port_read(MEM_CTLR, DRP);
2562*4882a593Smuzhiyun 	drp &= ~DRP_ADDRMAP_MASK;
2563*4882a593Smuzhiyun 	drp |= DRP_ADDRMAP_MAP1;
2564*4882a593Smuzhiyun 	drp |= DRP_PRI64BSPLITEN;
2565*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DRP, drp);
2566*4882a593Smuzhiyun 
2567*4882a593Smuzhiyun 	/* Disable new request bypass */
2568*4882a593Smuzhiyun 	dsch = msg_port_read(MEM_CTLR, DSCH);
2569*4882a593Smuzhiyun 	dsch |= DSCH_NEWBYPDIS;
2570*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DSCH, dsch);
2571*4882a593Smuzhiyun 
2572*4882a593Smuzhiyun 	/* Enable ECC */
2573*4882a593Smuzhiyun 	ecc_ctrl = (DECCCTRL_SBEEN | DECCCTRL_DBEEN | DECCCTRL_ENCBGEN);
2574*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DECCCTRL, ecc_ctrl);
2575*4882a593Smuzhiyun 
2576*4882a593Smuzhiyun 	/* Assume 8 bank memory, one bank is gone for ECC */
2577*4882a593Smuzhiyun 	mrc_params->mem_size -= mrc_params->mem_size / 8;
2578*4882a593Smuzhiyun 
2579*4882a593Smuzhiyun 	/* For S3 resume memory content has to be preserved */
2580*4882a593Smuzhiyun 	if (mrc_params->boot_mode != BM_S3) {
2581*4882a593Smuzhiyun 		select_hte();
2582*4882a593Smuzhiyun 		hte_mem_init(mrc_params, MRC_MEM_INIT);
2583*4882a593Smuzhiyun 		select_mem_mgr();
2584*4882a593Smuzhiyun 	}
2585*4882a593Smuzhiyun 
2586*4882a593Smuzhiyun 	LEAVEFN();
2587*4882a593Smuzhiyun }
2588*4882a593Smuzhiyun 
2589*4882a593Smuzhiyun /*
2590*4882a593Smuzhiyun  * Execute memory test
2591*4882a593Smuzhiyun  * if error detected it is indicated in mrc_params->status
2592*4882a593Smuzhiyun  */
memory_test(struct mrc_params * mrc_params)2593*4882a593Smuzhiyun void memory_test(struct mrc_params *mrc_params)
2594*4882a593Smuzhiyun {
2595*4882a593Smuzhiyun 	uint32_t result = 0;
2596*4882a593Smuzhiyun 
2597*4882a593Smuzhiyun 	ENTERFN();
2598*4882a593Smuzhiyun 
2599*4882a593Smuzhiyun 	select_hte();
2600*4882a593Smuzhiyun 	result = hte_mem_init(mrc_params, MRC_MEM_TEST);
2601*4882a593Smuzhiyun 	select_mem_mgr();
2602*4882a593Smuzhiyun 
2603*4882a593Smuzhiyun 	DPF(D_INFO, "Memory test result %x\n", result);
2604*4882a593Smuzhiyun 	mrc_params->status = ((result == 0) ? MRC_SUCCESS : MRC_E_MEMTEST);
2605*4882a593Smuzhiyun 	LEAVEFN();
2606*4882a593Smuzhiyun }
2607*4882a593Smuzhiyun 
2608*4882a593Smuzhiyun /* Lock MCU registers at the end of initialization sequence */
lock_registers(struct mrc_params * mrc_params)2609*4882a593Smuzhiyun void lock_registers(struct mrc_params *mrc_params)
2610*4882a593Smuzhiyun {
2611*4882a593Smuzhiyun 	u32 dco;
2612*4882a593Smuzhiyun 
2613*4882a593Smuzhiyun 	ENTERFN();
2614*4882a593Smuzhiyun 
2615*4882a593Smuzhiyun 	dco = msg_port_read(MEM_CTLR, DCO);
2616*4882a593Smuzhiyun 	dco &= ~(DCO_PMICTL | DCO_PMIDIS);
2617*4882a593Smuzhiyun 	dco |= (DCO_DRPLOCK | DCO_CPGCLOCK);
2618*4882a593Smuzhiyun 	msg_port_write(MEM_CTLR, DCO, dco);
2619*4882a593Smuzhiyun 
2620*4882a593Smuzhiyun 	LEAVEFN();
2621*4882a593Smuzhiyun }
2622