xref: /OK3568_Linux_fs/u-boot/drivers/ddr/marvell/a38x/ddr3_a38x.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /*
2  * Copyright (C) Marvell International Ltd. and its affiliates
3  *
4  * SPDX-License-Identifier:	GPL-2.0
5  */
6 
7 #include <common.h>
8 #include <i2c.h>
9 #include <spl.h>
10 #include <asm/io.h>
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/soc.h>
13 
14 #include "ddr3_init.h"
15 
16 #define A38X_NUMBER_OF_INTERFACES	5
17 
18 #define SAR_DEV_ID_OFFS			27
19 #define SAR_DEV_ID_MASK			0x7
20 
21 /* Termal Sensor Registers */
22 #define TSEN_STATE_REG			0xe4070
23 #define TSEN_STATE_OFFSET		31
24 #define TSEN_STATE_MASK			(0x1 << TSEN_STATE_OFFSET)
25 #define TSEN_CONF_REG			0xe4074
26 #define TSEN_CONF_RST_OFFSET		8
27 #define TSEN_CONF_RST_MASK		(0x1 << TSEN_CONF_RST_OFFSET)
28 #define TSEN_STATUS_REG			0xe4078
29 #define TSEN_STATUS_READOUT_VALID_OFFSET	10
30 #define TSEN_STATUS_READOUT_VALID_MASK	(0x1 <<				\
31 					 TSEN_STATUS_READOUT_VALID_OFFSET)
32 #define TSEN_STATUS_TEMP_OUT_OFFSET	0
33 #define TSEN_STATUS_TEMP_OUT_MASK	(0x3ff << TSEN_STATUS_TEMP_OUT_OFFSET)
34 
35 static struct dfx_access interface_map[] = {
36 	/* Pipe	Client */
37 	{ 0, 17 },
38 	{ 1, 7 },
39 	{ 1, 11 },
40 	{ 0, 3 },
41 	{ 1, 25 },
42 	{ 0, 0 },
43 	{ 0, 0 },
44 	{ 0, 0 },
45 	{ 0, 0 },
46 	{ 0, 0 },
47 	{ 0, 0 },
48 	{ 0, 0 }
49 };
50 
51 /* This array hold the board round trip delay (DQ and CK) per <interface,bus> */
52 struct trip_delay_element a38x_board_round_trip_delay_array[] = {
53 	/* 1st board */
54 	/* Interface bus DQS-delay CK-delay */
55 	{ 3952, 5060 },
56 	{ 3192, 4493 },
57 	{ 4785, 6677 },
58 	{ 3413, 7267 },
59 	{ 4282, 6086 },	/* ECC PUP */
60 	{ 3952, 5134 },
61 	{ 3192, 4567 },
62 	{ 4785, 6751 },
63 	{ 3413, 7341 },
64 	{ 4282, 6160 },	/* ECC PUP */
65 
66 	/* 2nd board */
67 	/* Interface bus DQS-delay CK-delay */
68 	{ 3952, 5060 },
69 	{ 3192, 4493 },
70 	{ 4785, 6677 },
71 	{ 3413, 7267 },
72 	{ 4282, 6086 },	/* ECC PUP */
73 	{ 3952, 5134 },
74 	{ 3192, 4567 },
75 	{ 4785, 6751 },
76 	{ 3413, 7341 },
77 	{ 4282, 6160 }	/* ECC PUP */
78 };
79 
80 #ifdef STATIC_ALGO_SUPPORT
81 /* package trace */
82 static struct trip_delay_element a38x_package_round_trip_delay_array[] = {
83 	/* IF BUS DQ_DELAY CK_DELAY */
84 	{ 0, 0 },
85 	{ 0, 0 },
86 	{ 0, 0 },
87 	{ 0, 0 },
88 	{ 0, 0 },
89 	{ 0, 0 },
90 	{ 0, 0 },
91 	{ 0, 0 },
92 	{ 0, 0 },
93 	{ 0, 0 },
94 	{ 0, 0 },
95 	{ 0, 0 },
96 	{ 0, 0 },
97 	{ 0, 0 },
98 	{ 0, 0 },
99 	{ 0, 0 },
100 	{ 0, 0 },
101 	{ 0, 0 },
102 	{ 0, 0 },
103 	{ 0, 0 }
104 };
105 
106 static int a38x_silicon_delay_offset[] = {
107 	/* board 0 */
108 	0,
109 	/* board 1 */
110 	0,
111 	/* board 2 */
112 	0
113 };
114 #endif
115 
116 static u8 a38x_bw_per_freq[DDR_FREQ_LIMIT] = {
117 	0x3,			/* DDR_FREQ_100 */
118 	0x4,			/* DDR_FREQ_400 */
119 	0x4,			/* DDR_FREQ_533 */
120 	0x5,			/* DDR_FREQ_667 */
121 	0x5,			/* DDR_FREQ_800 */
122 	0x5,			/* DDR_FREQ_933 */
123 	0x5,			/* DDR_FREQ_1066 */
124 	0x3,			/* DDR_FREQ_311 */
125 	0x3,			/* DDR_FREQ_333 */
126 	0x4,			/* DDR_FREQ_467 */
127 	0x5,			/* DDR_FREQ_850 */
128 	0x5,			/* DDR_FREQ_600 */
129 	0x3,			/* DDR_FREQ_300 */
130 	0x5,			/* DDR_FREQ_900 */
131 	0x3,			/* DDR_FREQ_360 */
132 	0x5			/* DDR_FREQ_1000 */
133 };
134 
135 static u8 a38x_rate_per_freq[DDR_FREQ_LIMIT] = {
136 	 /*TBD*/ 0x1,		/* DDR_FREQ_100 */
137 	0x2,			/* DDR_FREQ_400 */
138 	0x2,			/* DDR_FREQ_533 */
139 	0x2,			/* DDR_FREQ_667 */
140 	0x2,			/* DDR_FREQ_800 */
141 	0x3,			/* DDR_FREQ_933 */
142 	0x3,			/* DDR_FREQ_1066 */
143 	0x1,			/* DDR_FREQ_311 */
144 	0x1,			/* DDR_FREQ_333 */
145 	0x2,			/* DDR_FREQ_467 */
146 	0x2,			/* DDR_FREQ_850 */
147 	0x2,			/* DDR_FREQ_600 */
148 	0x1,			/* DDR_FREQ_300 */
149 	0x2,			/* DDR_FREQ_900 */
150 	0x1,			/* DDR_FREQ_360 */
151 	0x2			/* DDR_FREQ_1000 */
152 };
153 
154 static u16 a38x_vco_freq_per_sar[] = {
155 	666,			/* 0 */
156 	1332,
157 	800,
158 	1600,
159 	1066,
160 	2132,
161 	1200,
162 	2400,
163 	1332,
164 	1332,
165 	1500,
166 	1500,
167 	1600,			/* 12 */
168 	1600,
169 	1700,
170 	1700,
171 	1866,
172 	1866,
173 	1800,			/* 18 */
174 	2000,
175 	2000,
176 	4000,
177 	2132,
178 	2132,
179 	2300,
180 	2300,
181 	2400,
182 	2400,
183 	2500,
184 	2500,
185 	800
186 };
187 
188 u32 pipe_multicast_mask;
189 
190 u32 dq_bit_map_2_phy_pin[] = {
191 	1, 0, 2, 6, 9, 8, 3, 7,	/* 0 */
192 	8, 9, 1, 7, 2, 6, 3, 0,	/* 1 */
193 	3, 9, 7, 8, 1, 0, 2, 6,	/* 2 */
194 	1, 0, 6, 2, 8, 3, 7, 9,	/* 3 */
195 	0, 1, 2, 9, 7, 8, 3, 6,	/* 4 */
196 };
197 
198 static int ddr3_tip_a38x_set_divider(u8 dev_num, u32 if_id,
199 				     enum hws_ddr_freq freq);
200 
201 /*
202  * Read temperature TJ value
203  */
ddr3_ctrl_get_junc_temp(u8 dev_num)204 u32 ddr3_ctrl_get_junc_temp(u8 dev_num)
205 {
206 	int reg = 0;
207 
208 	/* Initiates TSEN hardware reset once */
209 	if ((reg_read(TSEN_CONF_REG) & TSEN_CONF_RST_MASK) == 0)
210 		reg_bit_set(TSEN_CONF_REG, TSEN_CONF_RST_MASK);
211 	mdelay(10);
212 
213 	/* Check if the readout field is valid */
214 	if ((reg_read(TSEN_STATUS_REG) & TSEN_STATUS_READOUT_VALID_MASK) == 0) {
215 		printf("%s: TSEN not ready\n", __func__);
216 		return 0;
217 	}
218 
219 	reg = reg_read(TSEN_STATUS_REG);
220 	reg = (reg & TSEN_STATUS_TEMP_OUT_MASK) >> TSEN_STATUS_TEMP_OUT_OFFSET;
221 
222 	return ((((10000 * reg) / 21445) * 1000) - 272674) / 1000;
223 }
224 
225 /*
226  * Name:     ddr3_tip_a38x_get_freq_config.
227  * Desc:
228  * Args:
229  * Notes:
230  * Returns:  MV_OK if success, other error code if fail.
231  */
ddr3_tip_a38x_get_freq_config(u8 dev_num,enum hws_ddr_freq freq,struct hws_tip_freq_config_info * freq_config_info)232 int ddr3_tip_a38x_get_freq_config(u8 dev_num, enum hws_ddr_freq freq,
233 				  struct hws_tip_freq_config_info
234 				  *freq_config_info)
235 {
236 	if (a38x_bw_per_freq[freq] == 0xff)
237 		return MV_NOT_SUPPORTED;
238 
239 	if (freq_config_info == NULL)
240 		return MV_BAD_PARAM;
241 
242 	freq_config_info->bw_per_freq = a38x_bw_per_freq[freq];
243 	freq_config_info->rate_per_freq = a38x_rate_per_freq[freq];
244 	freq_config_info->is_supported = 1;
245 
246 	return MV_OK;
247 }
248 
249 /*
250  * Name:     ddr3_tip_a38x_pipe_enable.
251  * Desc:
252  * Args:
253  * Notes:
254  * Returns:  MV_OK if success, other error code if fail.
255  */
ddr3_tip_a38x_pipe_enable(u8 dev_num,enum hws_access_type interface_access,u32 if_id,int enable)256 int ddr3_tip_a38x_pipe_enable(u8 dev_num, enum hws_access_type interface_access,
257 			      u32 if_id, int enable)
258 {
259 	u32 data_value, pipe_enable_mask = 0;
260 
261 	if (enable == 0) {
262 		pipe_enable_mask = 0;
263 	} else {
264 		if (interface_access == ACCESS_TYPE_MULTICAST)
265 			pipe_enable_mask = pipe_multicast_mask;
266 		else
267 			pipe_enable_mask = (1 << interface_map[if_id].pipe);
268 	}
269 
270 	CHECK_STATUS(ddr3_tip_reg_read
271 		     (dev_num, PIPE_ENABLE_ADDR, &data_value, MASK_ALL_BITS));
272 	data_value = (data_value & (~0xff)) | pipe_enable_mask;
273 	CHECK_STATUS(ddr3_tip_reg_write(dev_num, PIPE_ENABLE_ADDR, data_value));
274 
275 	return MV_OK;
276 }
277 
278 /*
279  * Name:     ddr3_tip_a38x_if_write.
280  * Desc:
281  * Args:
282  * Notes:
283  * Returns:  MV_OK if success, other error code if fail.
284  */
ddr3_tip_a38x_if_write(u8 dev_num,enum hws_access_type interface_access,u32 if_id,u32 reg_addr,u32 data_value,u32 mask)285 int ddr3_tip_a38x_if_write(u8 dev_num, enum hws_access_type interface_access,
286 			   u32 if_id, u32 reg_addr, u32 data_value,
287 			   u32 mask)
288 {
289 	u32 ui_data_read;
290 
291 	if (mask != MASK_ALL_BITS) {
292 		CHECK_STATUS(ddr3_tip_a38x_if_read
293 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, reg_addr,
294 			      &ui_data_read, MASK_ALL_BITS));
295 		data_value = (ui_data_read & (~mask)) | (data_value & mask);
296 	}
297 
298 	reg_write(reg_addr, data_value);
299 
300 	return MV_OK;
301 }
302 
303 /*
304  * Name:     ddr3_tip_a38x_if_read.
305  * Desc:
306  * Args:
307  * Notes:
308  * Returns:  MV_OK if success, other error code if fail.
309  */
ddr3_tip_a38x_if_read(u8 dev_num,enum hws_access_type interface_access,u32 if_id,u32 reg_addr,u32 * data,u32 mask)310 int ddr3_tip_a38x_if_read(u8 dev_num, enum hws_access_type interface_access,
311 			  u32 if_id, u32 reg_addr, u32 *data, u32 mask)
312 {
313 	*data = reg_read(reg_addr) & mask;
314 
315 	return MV_OK;
316 }
317 
318 /*
319  * Name:     ddr3_tip_a38x_select_ddr_controller.
320  * Desc:     Enable/Disable access to Marvell's server.
321  * Args:     dev_num     - device number
322  *           enable        - whether to enable or disable the server
323  * Notes:
324  * Returns:  MV_OK if success, other error code if fail.
325  */
ddr3_tip_a38x_select_ddr_controller(u8 dev_num,int enable)326 int ddr3_tip_a38x_select_ddr_controller(u8 dev_num, int enable)
327 {
328 	u32 reg;
329 
330 	reg = reg_read(CS_ENABLE_REG);
331 
332 	if (enable)
333 		reg |= (1 << 6);
334 	else
335 		reg &= ~(1 << 6);
336 
337 	reg_write(CS_ENABLE_REG, reg);
338 
339 	return MV_OK;
340 }
341 
342 /*
343  * Name:     ddr3_tip_init_a38x_silicon.
344  * Desc:     init Training SW DB.
345  * Args:
346  * Notes:
347  * Returns:  MV_OK if success, other error code if fail.
348  */
ddr3_tip_init_a38x_silicon(u32 dev_num,u32 board_id)349 static int ddr3_tip_init_a38x_silicon(u32 dev_num, u32 board_id)
350 {
351 	struct hws_tip_config_func_db config_func;
352 	enum hws_ddr_freq ddr_freq;
353 	int status;
354 	struct hws_topology_map *tm = ddr3_get_topology_map();
355 
356 	/* new read leveling version */
357 	config_func.tip_dunit_read_func = ddr3_tip_a38x_if_read;
358 	config_func.tip_dunit_write_func = ddr3_tip_a38x_if_write;
359 	config_func.tip_dunit_mux_select_func =
360 		ddr3_tip_a38x_select_ddr_controller;
361 	config_func.tip_get_freq_config_info_func =
362 		ddr3_tip_a38x_get_freq_config;
363 	config_func.tip_set_freq_divider_func = ddr3_tip_a38x_set_divider;
364 	config_func.tip_get_device_info_func = ddr3_tip_a38x_get_device_info;
365 	config_func.tip_get_temperature = ddr3_ctrl_get_junc_temp;
366 
367 	ddr3_tip_init_config_func(dev_num, &config_func);
368 
369 	ddr3_tip_register_dq_table(dev_num, dq_bit_map_2_phy_pin);
370 
371 #ifdef STATIC_ALGO_SUPPORT
372 	{
373 		struct hws_tip_static_config_info static_config;
374 		u32 board_offset =
375 		    board_id * A38X_NUMBER_OF_INTERFACES *
376 		    tm->num_of_bus_per_interface;
377 
378 		static_config.silicon_delay =
379 			a38x_silicon_delay_offset[board_id];
380 		static_config.package_trace_arr =
381 			a38x_package_round_trip_delay_array;
382 		static_config.board_trace_arr =
383 			&a38x_board_round_trip_delay_array[board_offset];
384 		ddr3_tip_init_static_config_db(dev_num, &static_config);
385 	}
386 #endif
387 	status = ddr3_tip_a38x_get_init_freq(dev_num, &ddr_freq);
388 	if (MV_OK != status) {
389 		DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
390 				      ("DDR3 silicon get target frequency - FAILED 0x%x\n",
391 				       status));
392 		return status;
393 	}
394 
395 	rl_version = 1;
396 	mask_tune_func = (SET_LOW_FREQ_MASK_BIT |
397 			  LOAD_PATTERN_MASK_BIT |
398 			  SET_MEDIUM_FREQ_MASK_BIT | WRITE_LEVELING_MASK_BIT |
399 			  /* LOAD_PATTERN_2_MASK_BIT | */
400 			  WRITE_LEVELING_SUPP_MASK_BIT |
401 			  READ_LEVELING_MASK_BIT |
402 			  PBS_RX_MASK_BIT |
403 			  PBS_TX_MASK_BIT |
404 			  SET_TARGET_FREQ_MASK_BIT |
405 			  WRITE_LEVELING_TF_MASK_BIT |
406 			  WRITE_LEVELING_SUPP_TF_MASK_BIT |
407 			  READ_LEVELING_TF_MASK_BIT |
408 			  CENTRALIZATION_RX_MASK_BIT |
409 			  CENTRALIZATION_TX_MASK_BIT);
410 	rl_mid_freq_wa = 1;
411 
412 	if ((ddr_freq == DDR_FREQ_333) || (ddr_freq == DDR_FREQ_400)) {
413 		mask_tune_func = (WRITE_LEVELING_MASK_BIT |
414 				  LOAD_PATTERN_2_MASK_BIT |
415 				  WRITE_LEVELING_SUPP_MASK_BIT |
416 				  READ_LEVELING_MASK_BIT |
417 				  PBS_RX_MASK_BIT |
418 				  PBS_TX_MASK_BIT |
419 				  CENTRALIZATION_RX_MASK_BIT |
420 				  CENTRALIZATION_TX_MASK_BIT);
421 		rl_mid_freq_wa = 0; /* WA not needed if 333/400 is TF */
422 	}
423 
424 	/* Supplementary not supported for ECC modes */
425 	if (1 == ddr3_if_ecc_enabled()) {
426 		mask_tune_func &= ~WRITE_LEVELING_SUPP_TF_MASK_BIT;
427 		mask_tune_func &= ~WRITE_LEVELING_SUPP_MASK_BIT;
428 		mask_tune_func &= ~PBS_TX_MASK_BIT;
429 		mask_tune_func &= ~PBS_RX_MASK_BIT;
430 	}
431 
432 	if (ck_delay == -1)
433 		ck_delay = 160;
434 	if (ck_delay_16 == -1)
435 		ck_delay_16 = 160;
436 	ca_delay = 0;
437 	delay_enable = 1;
438 
439 	calibration_update_control = 1;
440 
441 	init_freq = tm->interface_params[first_active_if].memory_freq;
442 
443 	ddr3_tip_a38x_get_medium_freq(dev_num, &medium_freq);
444 
445 	return MV_OK;
446 }
447 
ddr3_a38x_update_topology_map(u32 dev_num,struct hws_topology_map * tm)448 int ddr3_a38x_update_topology_map(u32 dev_num, struct hws_topology_map *tm)
449 {
450 	u32 if_id = 0;
451 	enum hws_ddr_freq freq;
452 
453 	ddr3_tip_a38x_get_init_freq(dev_num, &freq);
454 	tm->interface_params[if_id].memory_freq = freq;
455 
456 	/*
457 	 * re-calc topology parameters according to topology updates
458 	 * (if needed)
459 	 */
460 	CHECK_STATUS(hws_ddr3_tip_load_topology_map(dev_num, tm));
461 
462 	return MV_OK;
463 }
464 
ddr3_tip_init_a38x(u32 dev_num,u32 board_id)465 int ddr3_tip_init_a38x(u32 dev_num, u32 board_id)
466 {
467 	struct hws_topology_map *tm = ddr3_get_topology_map();
468 
469 	if (NULL == tm)
470 		return MV_FAIL;
471 
472 	ddr3_a38x_update_topology_map(dev_num, tm);
473 	ddr3_tip_init_a38x_silicon(dev_num, board_id);
474 
475 	return MV_OK;
476 }
477 
ddr3_tip_a38x_get_init_freq(int dev_num,enum hws_ddr_freq * freq)478 int ddr3_tip_a38x_get_init_freq(int dev_num, enum hws_ddr_freq *freq)
479 {
480 	u32 reg;
481 
482 	/* Read sample at reset setting */
483 	reg = (reg_read(REG_DEVICE_SAR1_ADDR) >>
484 	       RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
485 		RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
486 	switch (reg) {
487 	case 0x0:
488 	case 0x1:
489 		*freq = DDR_FREQ_333;
490 		break;
491 	case 0x2:
492 	case 0x3:
493 		*freq = DDR_FREQ_400;
494 		break;
495 	case 0x4:
496 	case 0xd:
497 		*freq = DDR_FREQ_533;
498 		break;
499 	case 0x6:
500 		*freq = DDR_FREQ_600;
501 		break;
502 	case 0x8:
503 	case 0x11:
504 	case 0x14:
505 		*freq = DDR_FREQ_667;
506 		break;
507 	case 0xc:
508 	case 0x15:
509 	case 0x1b:
510 		*freq = DDR_FREQ_800;
511 		break;
512 	case 0x10:
513 		*freq = DDR_FREQ_933;
514 		break;
515 	case 0x12:
516 		*freq = DDR_FREQ_900;
517 		break;
518 	case 0x13:
519 		*freq = DDR_FREQ_900;
520 		break;
521 	default:
522 		*freq = 0;
523 		return MV_NOT_SUPPORTED;
524 	}
525 
526 	return MV_OK;
527 }
528 
ddr3_tip_a38x_get_medium_freq(int dev_num,enum hws_ddr_freq * freq)529 int ddr3_tip_a38x_get_medium_freq(int dev_num, enum hws_ddr_freq *freq)
530 {
531 	u32 reg;
532 
533 	/* Read sample at reset setting */
534 	reg = (reg_read(REG_DEVICE_SAR1_ADDR) >>
535 	       RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
536 		RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
537 	switch (reg) {
538 	case 0x0:
539 	case 0x1:
540 		/* Medium is same as TF to run PBS in this freq */
541 		*freq = DDR_FREQ_333;
542 		break;
543 	case 0x2:
544 	case 0x3:
545 		/* Medium is same as TF to run PBS in this freq */
546 		*freq = DDR_FREQ_400;
547 		break;
548 	case 0x4:
549 	case 0xd:
550 		*freq = DDR_FREQ_533;
551 		break;
552 	case 0x8:
553 	case 0x11:
554 	case 0x14:
555 		*freq = DDR_FREQ_333;
556 		break;
557 	case 0xc:
558 	case 0x15:
559 	case 0x1b:
560 		*freq = DDR_FREQ_400;
561 		break;
562 	case 0x6:
563 		*freq = DDR_FREQ_300;
564 		break;
565 	case 0x12:
566 		*freq = DDR_FREQ_360;
567 		break;
568 	case 0x13:
569 		*freq = DDR_FREQ_400;
570 		break;
571 	default:
572 		*freq = 0;
573 		return MV_NOT_SUPPORTED;
574 	}
575 
576 	return MV_OK;
577 }
578 
ddr3_tip_get_init_freq(void)579 u32 ddr3_tip_get_init_freq(void)
580 {
581 	enum hws_ddr_freq freq;
582 
583 	ddr3_tip_a38x_get_init_freq(0, &freq);
584 
585 	return freq;
586 }
587 
ddr3_tip_a38x_set_divider(u8 dev_num,u32 if_id,enum hws_ddr_freq frequency)588 static int ddr3_tip_a38x_set_divider(u8 dev_num, u32 if_id,
589 				     enum hws_ddr_freq frequency)
590 {
591 	u32 divider = 0;
592 	u32 sar_val;
593 
594 	if (if_id != 0) {
595 		DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
596 				      ("A38x does not support interface 0x%x\n",
597 				       if_id));
598 		return MV_BAD_PARAM;
599 	}
600 
601 	/* get VCO freq index */
602 	sar_val = (reg_read(REG_DEVICE_SAR1_ADDR) >>
603 		   RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
604 		RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
605 	divider = a38x_vco_freq_per_sar[sar_val] / freq_val[frequency];
606 
607 	/* Set Sync mode */
608 	CHECK_STATUS(ddr3_tip_a38x_if_write
609 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x20220, 0x0,
610 		      0x1000));
611 	CHECK_STATUS(ddr3_tip_a38x_if_write
612 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe42f4, 0x0,
613 		      0x200));
614 
615 	/* cpupll_clkdiv_reset_mask */
616 	CHECK_STATUS(ddr3_tip_a38x_if_write
617 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264, 0x1f,
618 		      0xff));
619 
620 	/* cpupll_clkdiv_reload_smooth */
621 	CHECK_STATUS(ddr3_tip_a38x_if_write
622 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260,
623 		      (0x2 << 8), (0xff << 8)));
624 
625 	/* cpupll_clkdiv_relax_en */
626 	CHECK_STATUS(ddr3_tip_a38x_if_write
627 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260,
628 		      (0x2 << 24), (0xff << 24)));
629 
630 	/* write the divider */
631 	CHECK_STATUS(ddr3_tip_a38x_if_write
632 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4268,
633 		      (divider << 8), (0x3f << 8)));
634 
635 	/* set cpupll_clkdiv_reload_ratio */
636 	CHECK_STATUS(ddr3_tip_a38x_if_write
637 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264,
638 		      (1 << 8), (1 << 8)));
639 
640 	/* undet cpupll_clkdiv_reload_ratio */
641 	CHECK_STATUS(ddr3_tip_a38x_if_write
642 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264, 0,
643 		      (1 << 8)));
644 
645 	/* clear cpupll_clkdiv_reload_force */
646 	CHECK_STATUS(ddr3_tip_a38x_if_write
647 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260, 0,
648 		      (0xff << 8)));
649 
650 	/* clear cpupll_clkdiv_relax_en */
651 	CHECK_STATUS(ddr3_tip_a38x_if_write
652 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4260, 0,
653 		      (0xff << 24)));
654 
655 	/* clear cpupll_clkdiv_reset_mask */
656 	CHECK_STATUS(ddr3_tip_a38x_if_write
657 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0xe4264, 0,
658 		      0xff));
659 
660 	/* Dunit training clock + 1:1 mode */
661 	if ((frequency == DDR_FREQ_LOW_FREQ) || (freq_val[frequency] <= 400)) {
662 		CHECK_STATUS(ddr3_tip_a38x_if_write
663 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x18488,
664 			      (1 << 16), (1 << 16)));
665 		CHECK_STATUS(ddr3_tip_a38x_if_write
666 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1524,
667 			      (0 << 15), (1 << 15)));
668 	} else {
669 		CHECK_STATUS(ddr3_tip_a38x_if_write
670 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x18488,
671 			      0, (1 << 16)));
672 		CHECK_STATUS(ddr3_tip_a38x_if_write
673 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1524,
674 			      (1 << 15), (1 << 15)));
675 	}
676 
677 	return MV_OK;
678 }
679 
680 /*
681  * external read from memory
682  */
ddr3_tip_ext_read(u32 dev_num,u32 if_id,u32 reg_addr,u32 num_of_bursts,u32 * data)683 int ddr3_tip_ext_read(u32 dev_num, u32 if_id, u32 reg_addr,
684 		      u32 num_of_bursts, u32 *data)
685 {
686 	u32 burst_num;
687 
688 	for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++)
689 		data[burst_num] = readl(reg_addr + 4 * burst_num);
690 
691 	return MV_OK;
692 }
693 
694 /*
695  * external write to memory
696  */
ddr3_tip_ext_write(u32 dev_num,u32 if_id,u32 reg_addr,u32 num_of_bursts,u32 * data)697 int ddr3_tip_ext_write(u32 dev_num, u32 if_id, u32 reg_addr,
698 		       u32 num_of_bursts, u32 *data) {
699 	u32 burst_num;
700 
701 	for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++)
702 		writel(data[burst_num], reg_addr + 4 * burst_num);
703 
704 	return MV_OK;
705 }
706 
ddr3_silicon_pre_init(void)707 int ddr3_silicon_pre_init(void)
708 {
709 	return ddr3_silicon_init();
710 }
711 
ddr3_post_run_alg(void)712 int ddr3_post_run_alg(void)
713 {
714 	return MV_OK;
715 }
716 
ddr3_silicon_post_init(void)717 int ddr3_silicon_post_init(void)
718 {
719 	struct hws_topology_map *tm = ddr3_get_topology_map();
720 
721 	/* Set half bus width */
722 	if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask)) {
723 		CHECK_STATUS(ddr3_tip_if_write
724 			     (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
725 			      REG_SDRAM_CONFIG_ADDR, 0x0, 0x8000));
726 	}
727 
728 	return MV_OK;
729 }
730 
ddr3_tip_a38x_get_device_info(u8 dev_num,struct ddr3_device_info * info_ptr)731 int ddr3_tip_a38x_get_device_info(u8 dev_num, struct ddr3_device_info *info_ptr)
732 {
733 	info_ptr->device_id = 0x6800;
734 	info_ptr->ck_delay = ck_delay;
735 
736 	return MV_OK;
737 }
738