xref: /OK3568_Linux_fs/u-boot/drivers/ddr/marvell/a38x/ddr3_training_static.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /*
2  * Copyright (C) Marvell International Ltd. and its affiliates
3  *
4  * SPDX-License-Identifier:	GPL-2.0
5  */
6 
7 #include <common.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12 
13 #include "ddr3_init.h"
14 
15 /* Design Guidelines parameters */
16 u32 g_zpri_data = 123;		/* controller data - P drive strength */
17 u32 g_znri_data = 123;		/* controller data - N drive strength */
18 u32 g_zpri_ctrl = 74;		/* controller C/A - P drive strength */
19 u32 g_znri_ctrl = 74;		/* controller C/A - N drive strength */
20 u32 g_zpodt_data = 45;		/* controller data - P ODT */
21 u32 g_znodt_data = 45;		/* controller data - N ODT */
22 u32 g_zpodt_ctrl = 45;		/* controller data - P ODT */
23 u32 g_znodt_ctrl = 45;		/* controller data - N ODT */
24 u32 g_odt_config = 0x120012;
25 u32 g_rtt_nom = 0x44;
26 u32 g_dic = 0x2;
27 
28 #ifdef STATIC_ALGO_SUPPORT
29 
30 #define PARAM_NOT_CARE		0
31 #define MAX_STATIC_SEQ		48
32 
33 u32 silicon_delay[HWS_MAX_DEVICE_NUM];
34 struct hws_tip_static_config_info static_config[HWS_MAX_DEVICE_NUM];
35 static reg_data *static_init_controller_config[HWS_MAX_DEVICE_NUM];
36 
37 /* debug delay in write leveling */
38 int wl_debug_delay = 0;
39 /* pup register #3 for functional board */
40 int function_reg_value = 8;
41 u32 silicon;
42 
43 u32 read_ready_delay_phase_offset[] = { 4, 4, 4, 4, 6, 6, 6, 6 };
44 
45 static struct cs_element chip_select_map[] = {
46 	/* CS Value (single only)  Num_CS */
47 	{0, 0},
48 	{0, 1},
49 	{1, 1},
50 	{0, 2},
51 	{2, 1},
52 	{0, 2},
53 	{0, 2},
54 	{0, 3},
55 	{3, 1},
56 	{0, 2},
57 	{0, 2},
58 	{0, 3},
59 	{0, 2},
60 	{0, 3},
61 	{0, 3},
62 	{0, 4}
63 };
64 
65 /*
66  * Register static init controller DB
67  */
ddr3_tip_init_specific_reg_config(u32 dev_num,reg_data * reg_config_arr)68 int ddr3_tip_init_specific_reg_config(u32 dev_num, reg_data *reg_config_arr)
69 {
70 	static_init_controller_config[dev_num] = reg_config_arr;
71 	return MV_OK;
72 }
73 
74 /*
75  * Register static info DB
76  */
ddr3_tip_init_static_config_db(u32 dev_num,struct hws_tip_static_config_info * static_config_info)77 int ddr3_tip_init_static_config_db(
78 	u32 dev_num, struct hws_tip_static_config_info *static_config_info)
79 {
80 	static_config[dev_num].board_trace_arr =
81 		static_config_info->board_trace_arr;
82 	static_config[dev_num].package_trace_arr =
83 		static_config_info->package_trace_arr;
84 	silicon_delay[dev_num] = static_config_info->silicon_delay;
85 
86 	return MV_OK;
87 }
88 
89 /*
90  * Static round trip flow - Calculates the total round trip delay.
91  */
ddr3_tip_static_round_trip_arr_build(u32 dev_num,struct trip_delay_element * table_ptr,int is_wl,u32 * round_trip_delay_arr)92 int ddr3_tip_static_round_trip_arr_build(u32 dev_num,
93 					 struct trip_delay_element *table_ptr,
94 					 int is_wl, u32 *round_trip_delay_arr)
95 {
96 	u32 bus_index, global_bus;
97 	u32 if_id;
98 	u32 bus_per_interface;
99 	int sign;
100 	u32 temp;
101 	u32 board_trace;
102 	struct trip_delay_element *pkg_delay_ptr;
103 	struct hws_topology_map *tm = ddr3_get_topology_map();
104 
105 	/*
106 	 * In WL we calc the diff between Clock to DQs in RL we sum the round
107 	 * trip of Clock and DQs
108 	 */
109 	sign = (is_wl) ? -1 : 1;
110 
111 	bus_per_interface = GET_TOPOLOGY_NUM_OF_BUSES();
112 
113 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
114 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
115 		for (bus_index = 0; bus_index < bus_per_interface;
116 		     bus_index++) {
117 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
118 			global_bus = (if_id * bus_per_interface) + bus_index;
119 
120 			/* calculate total trip delay (package and board) */
121 			board_trace = (table_ptr[global_bus].dqs_delay * sign) +
122 				table_ptr[global_bus].ck_delay;
123 			temp = (board_trace * 163) / 1000;
124 
125 			/* Convert the length to delay in psec units */
126 			pkg_delay_ptr =
127 				static_config[dev_num].package_trace_arr;
128 			round_trip_delay_arr[global_bus] = temp +
129 				(int)(pkg_delay_ptr[global_bus].dqs_delay *
130 				      sign) +
131 				(int)pkg_delay_ptr[global_bus].ck_delay +
132 				(int)((is_wl == 1) ? wl_debug_delay :
133 				      (int)silicon_delay[dev_num]);
134 			DEBUG_TRAINING_STATIC_IP(
135 				DEBUG_LEVEL_TRACE,
136 				("Round Trip Build round_trip_delay_arr[0x%x]: 0x%x    temp 0x%x\n",
137 				 global_bus, round_trip_delay_arr[global_bus],
138 				 temp));
139 		}
140 	}
141 
142 	return MV_OK;
143 }
144 
145 /*
146  * Write leveling for static flow - calculating the round trip delay of the
147  * DQS signal.
148  */
ddr3_tip_write_leveling_static_config(u32 dev_num,u32 if_id,enum hws_ddr_freq frequency,u32 * round_trip_delay_arr)149 int ddr3_tip_write_leveling_static_config(u32 dev_num, u32 if_id,
150 					  enum hws_ddr_freq frequency,
151 					  u32 *round_trip_delay_arr)
152 {
153 	u32 bus_index;		/* index to the bus loop */
154 	u32 bus_start_index;
155 	u32 bus_per_interface;
156 	u32 phase = 0;
157 	u32 adll = 0, adll_cen, adll_inv, adll_final;
158 	u32 adll_period = MEGA / freq_val[frequency] / 64;
159 
160 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
161 				 ("ddr3_tip_write_leveling_static_config\n"));
162 	DEBUG_TRAINING_STATIC_IP(
163 		DEBUG_LEVEL_TRACE,
164 		("dev_num 0x%x IF 0x%x freq %d (adll_period 0x%x)\n",
165 		 dev_num, if_id, frequency, adll_period));
166 
167 	bus_per_interface = GET_TOPOLOGY_NUM_OF_BUSES();
168 	bus_start_index = if_id * bus_per_interface;
169 	for (bus_index = bus_start_index;
170 	     bus_index < (bus_start_index + bus_per_interface); bus_index++) {
171 		VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
172 		phase = round_trip_delay_arr[bus_index] / (32 * adll_period);
173 		adll = (round_trip_delay_arr[bus_index] -
174 			(phase * 32 * adll_period)) / adll_period;
175 		adll = (adll > 31) ? 31 : adll;
176 		adll_cen = 16 + adll;
177 		adll_inv = adll_cen / 32;
178 		adll_final = adll_cen - (adll_inv * 32);
179 		adll_final = (adll_final > 31) ? 31 : adll_final;
180 
181 		DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
182 					 ("\t%d - phase 0x%x adll 0x%x\n",
183 					  bus_index, phase, adll));
184 		/*
185 		 * Writing to all 4 phy of Interface number,
186 		 * bit 0 \96 4 \96 ADLL, bit 6-8 phase
187 		 */
188 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
189 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
190 			      (bus_index % 4), DDR_PHY_DATA,
191 			      PHY_WRITE_DELAY(cs),
192 			      ((phase << 6) + (adll & 0x1f)), 0x1df));
193 		CHECK_STATUS(ddr3_tip_bus_write
194 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
195 			      ACCESS_TYPE_UNICAST, (bus_index % 4),
196 			      DDR_PHY_DATA, WRITE_CENTRALIZATION_PHY_REG,
197 			      ((adll_inv & 0x1) << 5) + adll_final));
198 	}
199 
200 	return MV_OK;
201 }
202 
203 /*
204  * Read leveling for static flow
205  */
ddr3_tip_read_leveling_static_config(u32 dev_num,u32 if_id,enum hws_ddr_freq frequency,u32 * total_round_trip_delay_arr)206 int ddr3_tip_read_leveling_static_config(u32 dev_num,
207 					 u32 if_id,
208 					 enum hws_ddr_freq frequency,
209 					 u32 *total_round_trip_delay_arr)
210 {
211 	u32 cs, data0, data1, data3 = 0;
212 	u32 bus_index;		/* index to the bus loop */
213 	u32 bus_start_index;
214 	u32 phase0, phase1, max_phase;
215 	u32 adll0, adll1;
216 	u32 cl_value;
217 	u32 min_delay;
218 	u32 sdr_period = MEGA / freq_val[frequency];
219 	u32 ddr_period = MEGA / freq_val[frequency] / 2;
220 	u32 adll_period = MEGA / freq_val[frequency] / 64;
221 	enum hws_speed_bin speed_bin_index;
222 	u32 rd_sample_dly[MAX_CS_NUM] = { 0 };
223 	u32 rd_ready_del[MAX_CS_NUM] = { 0 };
224 	u32 bus_per_interface = GET_TOPOLOGY_NUM_OF_BUSES();
225 	struct hws_topology_map *tm = ddr3_get_topology_map();
226 
227 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
228 				 ("ddr3_tip_read_leveling_static_config\n"));
229 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
230 				 ("dev_num 0x%x ifc 0x%x freq %d\n", dev_num,
231 				  if_id, frequency));
232 	DEBUG_TRAINING_STATIC_IP(
233 		DEBUG_LEVEL_TRACE,
234 		("Sdr_period 0x%x Ddr_period 0x%x adll_period 0x%x\n",
235 		 sdr_period, ddr_period, adll_period));
236 
237 	if (tm->interface_params[first_active_if].memory_freq ==
238 	    frequency) {
239 		cl_value = tm->interface_params[first_active_if].cas_l;
240 		DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
241 					 ("cl_value 0x%x\n", cl_value));
242 	} else {
243 		speed_bin_index = tm->interface_params[if_id].speed_bin_index;
244 		cl_value = cas_latency_table[speed_bin_index].cl_val[frequency];
245 		DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
246 					 ("cl_value 0x%x speed_bin_index %d\n",
247 					  cl_value, speed_bin_index));
248 	}
249 
250 	bus_start_index = if_id * bus_per_interface;
251 
252 	for (bus_index = bus_start_index;
253 	     bus_index < (bus_start_index + bus_per_interface);
254 	     bus_index += 2) {
255 		VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
256 		cs = chip_select_map[
257 			tm->interface_params[if_id].as_bus_params[
258 				(bus_index % 4)].cs_bitmask].cs_num;
259 
260 		/* read sample delay calculation */
261 		min_delay = (total_round_trip_delay_arr[bus_index] <
262 			     total_round_trip_delay_arr[bus_index + 1]) ?
263 			total_round_trip_delay_arr[bus_index] :
264 			total_round_trip_delay_arr[bus_index + 1];
265 		/* round down */
266 		rd_sample_dly[cs] = 2 * (min_delay / (sdr_period * 2));
267 		DEBUG_TRAINING_STATIC_IP(
268 			DEBUG_LEVEL_TRACE,
269 			("\t%d - min_delay 0x%x cs 0x%x rd_sample_dly[cs] 0x%x\n",
270 			 bus_index, min_delay, cs, rd_sample_dly[cs]));
271 
272 		/* phase calculation */
273 		phase0 = (total_round_trip_delay_arr[bus_index] -
274 			  (sdr_period * rd_sample_dly[cs])) / (ddr_period);
275 		phase1 = (total_round_trip_delay_arr[bus_index + 1] -
276 			  (sdr_period * rd_sample_dly[cs])) / (ddr_period);
277 		max_phase = (phase0 > phase1) ? phase0 : phase1;
278 		DEBUG_TRAINING_STATIC_IP(
279 			DEBUG_LEVEL_TRACE,
280 			("\tphase0 0x%x phase1 0x%x max_phase 0x%x\n",
281 			 phase0, phase1, max_phase));
282 
283 		/* ADLL calculation */
284 		adll0 = (u32)((total_round_trip_delay_arr[bus_index] -
285 			       (sdr_period * rd_sample_dly[cs]) -
286 			       (ddr_period * phase0)) / adll_period);
287 		adll0 = (adll0 > 31) ? 31 : adll0;
288 		adll1 = (u32)((total_round_trip_delay_arr[bus_index + 1] -
289 			       (sdr_period * rd_sample_dly[cs]) -
290 			       (ddr_period * phase1)) / adll_period);
291 		adll1 = (adll1 > 31) ? 31 : adll1;
292 
293 		/* The Read delay close the Read FIFO */
294 		rd_ready_del[cs] = rd_sample_dly[cs] +
295 			read_ready_delay_phase_offset[max_phase];
296 		DEBUG_TRAINING_STATIC_IP(
297 			DEBUG_LEVEL_TRACE,
298 			("\tadll0 0x%x adll1 0x%x rd_ready_del[cs] 0x%x\n",
299 			 adll0, adll1, rd_ready_del[cs]));
300 
301 		/*
302 		 * Write to the phy of Interface (bit 0 \96 4 \96 ADLL,
303 		 * bit 6-8 phase)
304 		 */
305 		data0 = ((phase0 << 6) + (adll0 & 0x1f));
306 		data1 = ((phase1 << 6) + (adll1 & 0x1f));
307 
308 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
309 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
310 			      (bus_index % 4), DDR_PHY_DATA, PHY_READ_DELAY(cs),
311 			      data0, 0x1df));
312 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
313 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
314 			      ((bus_index + 1) % 4), DDR_PHY_DATA,
315 			      PHY_READ_DELAY(cs), data1, 0x1df));
316 	}
317 
318 	for (bus_index = 0; bus_index < bus_per_interface; bus_index++) {
319 		VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
320 		CHECK_STATUS(ddr3_tip_bus_read_modify_write
321 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
322 			      bus_index, DDR_PHY_DATA, 0x3, data3, 0x1f));
323 	}
324 	CHECK_STATUS(ddr3_tip_if_write
325 		     (dev_num, ACCESS_TYPE_UNICAST, if_id,
326 		      READ_DATA_SAMPLE_DELAY,
327 		      (rd_sample_dly[0] + cl_value) + (rd_sample_dly[1] << 8),
328 		      MASK_ALL_BITS));
329 
330 	/* Read_ready_del0 bit 0-4 , CS bits 8-12 */
331 	CHECK_STATUS(ddr3_tip_if_write
332 		     (dev_num, ACCESS_TYPE_UNICAST, if_id,
333 		      READ_DATA_READY_DELAY,
334 		      rd_ready_del[0] + (rd_ready_del[1] << 8) + cl_value,
335 		      MASK_ALL_BITS));
336 
337 	return MV_OK;
338 }
339 
340 /*
341  * DDR3 Static flow
342  */
ddr3_tip_run_static_alg(u32 dev_num,enum hws_ddr_freq freq)343 int ddr3_tip_run_static_alg(u32 dev_num, enum hws_ddr_freq freq)
344 {
345 	u32 if_id = 0;
346 	struct trip_delay_element *table_ptr;
347 	u32 wl_total_round_trip_delay_arr[MAX_TOTAL_BUS_NUM];
348 	u32 rl_total_round_trip_delay_arr[MAX_TOTAL_BUS_NUM];
349 	struct init_cntr_param init_cntr_prm;
350 	int ret;
351 	struct hws_topology_map *tm = ddr3_get_topology_map();
352 
353 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
354 				 ("ddr3_tip_run_static_alg"));
355 
356 	init_cntr_prm.do_mrs_phy = 1;
357 	init_cntr_prm.is_ctrl64_bit = 0;
358 	init_cntr_prm.init_phy = 1;
359 	ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm);
360 	if (ret != MV_OK) {
361 		DEBUG_TRAINING_STATIC_IP(
362 			DEBUG_LEVEL_ERROR,
363 			("hws_ddr3_tip_init_controller failure\n"));
364 	}
365 
366 	/* calculate the round trip delay for Write Leveling */
367 	table_ptr = static_config[dev_num].board_trace_arr;
368 	CHECK_STATUS(ddr3_tip_static_round_trip_arr_build
369 		     (dev_num, table_ptr, 1,
370 		      wl_total_round_trip_delay_arr));
371 	/* calculate the round trip delay  for Read Leveling */
372 	CHECK_STATUS(ddr3_tip_static_round_trip_arr_build
373 		     (dev_num, table_ptr, 0,
374 		      rl_total_round_trip_delay_arr));
375 
376 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
377 		/* check if the interface is enabled */
378 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
379 		/*
380 		 * Static frequency is defined according to init-frequency
381 		 * (not target)
382 		 */
383 		DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
384 					 ("Static IF %d freq %d\n",
385 					  if_id, freq));
386 		CHECK_STATUS(ddr3_tip_write_leveling_static_config
387 			     (dev_num, if_id, freq,
388 			      wl_total_round_trip_delay_arr));
389 		CHECK_STATUS(ddr3_tip_read_leveling_static_config
390 			     (dev_num, if_id, freq,
391 			      rl_total_round_trip_delay_arr));
392 	}
393 
394 	return MV_OK;
395 }
396 
397 /*
398  * Init controller for static flow
399  */
ddr3_tip_static_init_controller(u32 dev_num)400 int ddr3_tip_static_init_controller(u32 dev_num)
401 {
402 	u32 index_cnt = 0;
403 
404 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
405 				 ("ddr3_tip_static_init_controller\n"));
406 	while (static_init_controller_config[dev_num][index_cnt].reg_addr !=
407 	       0) {
408 		CHECK_STATUS(ddr3_tip_if_write
409 			     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
410 			      static_init_controller_config[dev_num][index_cnt].
411 			      reg_addr,
412 			      static_init_controller_config[dev_num][index_cnt].
413 			      reg_data,
414 			      static_init_controller_config[dev_num][index_cnt].
415 			      reg_mask));
416 
417 		DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
418 					 ("Init_controller index_cnt %d\n",
419 					  index_cnt));
420 		index_cnt++;
421 	}
422 
423 	return MV_OK;
424 }
425 
ddr3_tip_static_phy_init_controller(u32 dev_num)426 int ddr3_tip_static_phy_init_controller(u32 dev_num)
427 {
428 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
429 				 ("Phy Init Controller 2\n"));
430 	CHECK_STATUS(ddr3_tip_bus_write
431 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
432 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa4,
433 		      0x3dfe));
434 
435 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
436 				 ("Phy Init Controller 3\n"));
437 	CHECK_STATUS(ddr3_tip_bus_write
438 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
439 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa6,
440 		      0xcb2));
441 
442 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
443 				 ("Phy Init Controller 4\n"));
444 	CHECK_STATUS(ddr3_tip_bus_write
445 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
446 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa9,
447 		      0));
448 
449 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
450 				 ("Static Receiver Calibration\n"));
451 	CHECK_STATUS(ddr3_tip_bus_write
452 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
453 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xd0,
454 		      0x1f));
455 
456 	DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
457 				 ("Static V-REF Calibration\n"));
458 	CHECK_STATUS(ddr3_tip_bus_write
459 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
460 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa8,
461 		      0x434));
462 
463 	return MV_OK;
464 }
465 #endif
466 
467 /*
468  * Configure phy (called by static init controller) for static flow
469  */
ddr3_tip_configure_phy(u32 dev_num)470 int ddr3_tip_configure_phy(u32 dev_num)
471 {
472 	u32 if_id, phy_id;
473 	struct hws_topology_map *tm = ddr3_get_topology_map();
474 
475 	CHECK_STATUS(ddr3_tip_bus_write
476 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
477 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
478 		      PAD_ZRI_CALIB_PHY_REG,
479 		      ((0x7f & g_zpri_data) << 7 | (0x7f & g_znri_data))));
480 	CHECK_STATUS(ddr3_tip_bus_write
481 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
482 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
483 		      PAD_ZRI_CALIB_PHY_REG,
484 		      ((0x7f & g_zpri_ctrl) << 7 | (0x7f & g_znri_ctrl))));
485 	CHECK_STATUS(ddr3_tip_bus_write
486 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
487 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
488 		      PAD_ODT_CALIB_PHY_REG,
489 		      ((0x3f & g_zpodt_data) << 6 | (0x3f & g_znodt_data))));
490 	CHECK_STATUS(ddr3_tip_bus_write
491 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
492 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
493 		      PAD_ODT_CALIB_PHY_REG,
494 		      ((0x3f & g_zpodt_ctrl) << 6 | (0x3f & g_znodt_ctrl))));
495 
496 	CHECK_STATUS(ddr3_tip_bus_write
497 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
498 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
499 		      PAD_PRE_DISABLE_PHY_REG, 0));
500 	CHECK_STATUS(ddr3_tip_bus_write
501 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
502 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
503 		      CMOS_CONFIG_PHY_REG, 0));
504 	CHECK_STATUS(ddr3_tip_bus_write
505 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
506 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
507 		      CMOS_CONFIG_PHY_REG, 0));
508 
509 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
510 		/* check if the interface is enabled */
511 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
512 
513 		for (phy_id = 0;
514 		     phy_id < tm->num_of_bus_per_interface;
515 		     phy_id++) {
516 			VALIDATE_ACTIVE(tm->bus_act_mask, phy_id);
517 			/* Vref & clamp */
518 			CHECK_STATUS(ddr3_tip_bus_read_modify_write
519 				     (dev_num, ACCESS_TYPE_UNICAST,
520 				      if_id, phy_id, DDR_PHY_DATA,
521 				      PAD_CONFIG_PHY_REG,
522 				      ((clamp_tbl[if_id] << 4) | vref),
523 				      ((0x7 << 4) | 0x7)));
524 			/* clamp not relevant for control */
525 			CHECK_STATUS(ddr3_tip_bus_read_modify_write
526 				     (dev_num, ACCESS_TYPE_UNICAST,
527 				      if_id, phy_id, DDR_PHY_CONTROL,
528 				      PAD_CONFIG_PHY_REG, 0x4, 0x7));
529 		}
530 	}
531 
532 	CHECK_STATUS(ddr3_tip_bus_write
533 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
534 		      ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0x90,
535 		      0x6002));
536 
537 	return MV_OK;
538 }
539