xref: /rk3399_rockchip-uboot/drivers/ddr/fsl/options.c (revision 89f5eaa1ee9a3307e05458aa4f3b2155ab0a7144)
1 /*
2  * Copyright 2008, 2010-2014 Freescale Semiconductor, Inc.
3  *
4  * SPDX-License-Identifier:	GPL-2.0+
5  */
6 
7 #include <common.h>
8 #include <hwconfig.h>
9 #include <fsl_ddr_sdram.h>
10 
11 #include <fsl_ddr.h>
12 #if defined(CONFIG_FSL_LSCH2) || defined(CONFIG_FSL_LSCH3)
13 #include <asm/arch/clock.h>
14 #endif
15 
16 /*
17  * Use our own stack based buffer before relocation to allow accessing longer
18  * hwconfig strings that might be in the environment before we've relocated.
19  * This is pretty fragile on both the use of stack and if the buffer is big
20  * enough. However we will get a warning from getenv_f for the later.
21  */
22 
23 /* Board-specific functions defined in each board's ddr.c */
24 extern void fsl_ddr_board_options(memctl_options_t *popts,
25 		dimm_params_t *pdimm,
26 		unsigned int ctrl_num);
27 
28 struct dynamic_odt {
29 	unsigned int odt_rd_cfg;
30 	unsigned int odt_wr_cfg;
31 	unsigned int odt_rtt_norm;
32 	unsigned int odt_rtt_wr;
33 };
34 
35 #ifdef CONFIG_SYS_FSL_DDR4
36 /* Quad rank is not verified yet due availability.
37  * Replacing 20 OHM with 34 OHM since DDR4 doesn't have 20 OHM option
38  */
39 static __maybe_unused const struct dynamic_odt single_Q[4] = {
40 	{	/* cs0 */
41 		FSL_DDR_ODT_NEVER,
42 		FSL_DDR_ODT_CS_AND_OTHER_DIMM,
43 		DDR4_RTT_34_OHM,	/* unverified */
44 		DDR4_RTT_120_OHM
45 	},
46 	{	/* cs1 */
47 		FSL_DDR_ODT_NEVER,
48 		FSL_DDR_ODT_NEVER,
49 		DDR4_RTT_OFF,
50 		DDR4_RTT_120_OHM
51 	},
52 	{	/* cs2 */
53 		FSL_DDR_ODT_NEVER,
54 		FSL_DDR_ODT_CS_AND_OTHER_DIMM,
55 		DDR4_RTT_34_OHM,
56 		DDR4_RTT_120_OHM
57 	},
58 	{	/* cs3 */
59 		FSL_DDR_ODT_NEVER,
60 		FSL_DDR_ODT_NEVER,	/* tied high */
61 		DDR4_RTT_OFF,
62 		DDR4_RTT_120_OHM
63 	}
64 };
65 
66 static __maybe_unused const struct dynamic_odt single_D[4] = {
67 	{	/* cs0 */
68 		FSL_DDR_ODT_NEVER,
69 		FSL_DDR_ODT_ALL,
70 		DDR4_RTT_40_OHM,
71 		DDR4_RTT_OFF
72 	},
73 	{	/* cs1 */
74 		FSL_DDR_ODT_NEVER,
75 		FSL_DDR_ODT_NEVER,
76 		DDR4_RTT_OFF,
77 		DDR4_RTT_OFF
78 	},
79 	{0, 0, 0, 0},
80 	{0, 0, 0, 0}
81 };
82 
83 static __maybe_unused const struct dynamic_odt single_S[4] = {
84 	{	/* cs0 */
85 		FSL_DDR_ODT_NEVER,
86 		FSL_DDR_ODT_ALL,
87 		DDR4_RTT_40_OHM,
88 		DDR4_RTT_OFF
89 	},
90 	{0, 0, 0, 0},
91 	{0, 0, 0, 0},
92 	{0, 0, 0, 0},
93 };
94 
95 static __maybe_unused const struct dynamic_odt dual_DD[4] = {
96 	{	/* cs0 */
97 		FSL_DDR_ODT_NEVER,
98 		FSL_DDR_ODT_SAME_DIMM,
99 		DDR4_RTT_120_OHM,
100 		DDR4_RTT_OFF
101 	},
102 	{	/* cs1 */
103 		FSL_DDR_ODT_OTHER_DIMM,
104 		FSL_DDR_ODT_OTHER_DIMM,
105 		DDR4_RTT_34_OHM,
106 		DDR4_RTT_OFF
107 	},
108 	{	/* cs2 */
109 		FSL_DDR_ODT_NEVER,
110 		FSL_DDR_ODT_SAME_DIMM,
111 		DDR4_RTT_120_OHM,
112 		DDR4_RTT_OFF
113 	},
114 	{	/* cs3 */
115 		FSL_DDR_ODT_OTHER_DIMM,
116 		FSL_DDR_ODT_OTHER_DIMM,
117 		DDR4_RTT_34_OHM,
118 		DDR4_RTT_OFF
119 	}
120 };
121 
122 static __maybe_unused const struct dynamic_odt dual_DS[4] = {
123 	{	/* cs0 */
124 		FSL_DDR_ODT_NEVER,
125 		FSL_DDR_ODT_SAME_DIMM,
126 		DDR4_RTT_120_OHM,
127 		DDR4_RTT_OFF
128 	},
129 	{	/* cs1 */
130 		FSL_DDR_ODT_OTHER_DIMM,
131 		FSL_DDR_ODT_OTHER_DIMM,
132 		DDR4_RTT_34_OHM,
133 		DDR4_RTT_OFF
134 	},
135 	{	/* cs2 */
136 		FSL_DDR_ODT_OTHER_DIMM,
137 		FSL_DDR_ODT_ALL,
138 		DDR4_RTT_34_OHM,
139 		DDR4_RTT_120_OHM
140 	},
141 	{0, 0, 0, 0}
142 };
143 static __maybe_unused const struct dynamic_odt dual_SD[4] = {
144 	{	/* cs0 */
145 		FSL_DDR_ODT_OTHER_DIMM,
146 		FSL_DDR_ODT_ALL,
147 		DDR4_RTT_34_OHM,
148 		DDR4_RTT_120_OHM
149 	},
150 	{0, 0, 0, 0},
151 	{	/* cs2 */
152 		FSL_DDR_ODT_NEVER,
153 		FSL_DDR_ODT_SAME_DIMM,
154 		DDR4_RTT_120_OHM,
155 		DDR4_RTT_OFF
156 	},
157 	{	/* cs3 */
158 		FSL_DDR_ODT_OTHER_DIMM,
159 		FSL_DDR_ODT_OTHER_DIMM,
160 		DDR4_RTT_34_OHM,
161 		DDR4_RTT_OFF
162 	}
163 };
164 
165 static __maybe_unused const struct dynamic_odt dual_SS[4] = {
166 	{	/* cs0 */
167 		FSL_DDR_ODT_OTHER_DIMM,
168 		FSL_DDR_ODT_ALL,
169 		DDR4_RTT_34_OHM,
170 		DDR4_RTT_120_OHM
171 	},
172 	{0, 0, 0, 0},
173 	{	/* cs2 */
174 		FSL_DDR_ODT_OTHER_DIMM,
175 		FSL_DDR_ODT_ALL,
176 		DDR4_RTT_34_OHM,
177 		DDR4_RTT_120_OHM
178 	},
179 	{0, 0, 0, 0}
180 };
181 
182 static __maybe_unused const struct dynamic_odt dual_D0[4] = {
183 	{	/* cs0 */
184 		FSL_DDR_ODT_NEVER,
185 		FSL_DDR_ODT_SAME_DIMM,
186 		DDR4_RTT_40_OHM,
187 		DDR4_RTT_OFF
188 	},
189 	{	/* cs1 */
190 		FSL_DDR_ODT_NEVER,
191 		FSL_DDR_ODT_NEVER,
192 		DDR4_RTT_OFF,
193 		DDR4_RTT_OFF
194 	},
195 	{0, 0, 0, 0},
196 	{0, 0, 0, 0}
197 };
198 
199 static __maybe_unused const struct dynamic_odt dual_0D[4] = {
200 	{0, 0, 0, 0},
201 	{0, 0, 0, 0},
202 	{	/* cs2 */
203 		FSL_DDR_ODT_NEVER,
204 		FSL_DDR_ODT_SAME_DIMM,
205 		DDR4_RTT_40_OHM,
206 		DDR4_RTT_OFF
207 	},
208 	{	/* cs3 */
209 		FSL_DDR_ODT_NEVER,
210 		FSL_DDR_ODT_NEVER,
211 		DDR4_RTT_OFF,
212 		DDR4_RTT_OFF
213 	}
214 };
215 
216 static __maybe_unused const struct dynamic_odt dual_S0[4] = {
217 	{	/* cs0 */
218 		FSL_DDR_ODT_NEVER,
219 		FSL_DDR_ODT_CS,
220 		DDR4_RTT_40_OHM,
221 		DDR4_RTT_OFF
222 	},
223 	{0, 0, 0, 0},
224 	{0, 0, 0, 0},
225 	{0, 0, 0, 0}
226 
227 };
228 
229 static __maybe_unused const struct dynamic_odt dual_0S[4] = {
230 	{0, 0, 0, 0},
231 	{0, 0, 0, 0},
232 	{	/* cs2 */
233 		FSL_DDR_ODT_NEVER,
234 		FSL_DDR_ODT_CS,
235 		DDR4_RTT_40_OHM,
236 		DDR4_RTT_OFF
237 	},
238 	{0, 0, 0, 0}
239 
240 };
241 
242 static __maybe_unused const struct dynamic_odt odt_unknown[4] = {
243 	{	/* cs0 */
244 		FSL_DDR_ODT_NEVER,
245 		FSL_DDR_ODT_CS,
246 		DDR4_RTT_120_OHM,
247 		DDR4_RTT_OFF
248 	},
249 	{	/* cs1 */
250 		FSL_DDR_ODT_NEVER,
251 		FSL_DDR_ODT_CS,
252 		DDR4_RTT_120_OHM,
253 		DDR4_RTT_OFF
254 	},
255 	{	/* cs2 */
256 		FSL_DDR_ODT_NEVER,
257 		FSL_DDR_ODT_CS,
258 		DDR4_RTT_120_OHM,
259 		DDR4_RTT_OFF
260 	},
261 	{	/* cs3 */
262 		FSL_DDR_ODT_NEVER,
263 		FSL_DDR_ODT_CS,
264 		DDR4_RTT_120_OHM,
265 		DDR4_RTT_OFF
266 	}
267 };
268 #elif defined(CONFIG_SYS_FSL_DDR3)
269 static __maybe_unused const struct dynamic_odt single_Q[4] = {
270 	{	/* cs0 */
271 		FSL_DDR_ODT_NEVER,
272 		FSL_DDR_ODT_CS_AND_OTHER_DIMM,
273 		DDR3_RTT_20_OHM,
274 		DDR3_RTT_120_OHM
275 	},
276 	{	/* cs1 */
277 		FSL_DDR_ODT_NEVER,
278 		FSL_DDR_ODT_NEVER,	/* tied high */
279 		DDR3_RTT_OFF,
280 		DDR3_RTT_120_OHM
281 	},
282 	{	/* cs2 */
283 		FSL_DDR_ODT_NEVER,
284 		FSL_DDR_ODT_CS_AND_OTHER_DIMM,
285 		DDR3_RTT_20_OHM,
286 		DDR3_RTT_120_OHM
287 	},
288 	{	/* cs3 */
289 		FSL_DDR_ODT_NEVER,
290 		FSL_DDR_ODT_NEVER,	/* tied high */
291 		DDR3_RTT_OFF,
292 		DDR3_RTT_120_OHM
293 	}
294 };
295 
296 static __maybe_unused const struct dynamic_odt single_D[4] = {
297 	{	/* cs0 */
298 		FSL_DDR_ODT_NEVER,
299 		FSL_DDR_ODT_ALL,
300 		DDR3_RTT_40_OHM,
301 		DDR3_RTT_OFF
302 	},
303 	{	/* cs1 */
304 		FSL_DDR_ODT_NEVER,
305 		FSL_DDR_ODT_NEVER,
306 		DDR3_RTT_OFF,
307 		DDR3_RTT_OFF
308 	},
309 	{0, 0, 0, 0},
310 	{0, 0, 0, 0}
311 };
312 
313 static __maybe_unused const struct dynamic_odt single_S[4] = {
314 	{	/* cs0 */
315 		FSL_DDR_ODT_NEVER,
316 		FSL_DDR_ODT_ALL,
317 		DDR3_RTT_40_OHM,
318 		DDR3_RTT_OFF
319 	},
320 	{0, 0, 0, 0},
321 	{0, 0, 0, 0},
322 	{0, 0, 0, 0},
323 };
324 
325 static __maybe_unused const struct dynamic_odt dual_DD[4] = {
326 	{	/* cs0 */
327 		FSL_DDR_ODT_NEVER,
328 		FSL_DDR_ODT_SAME_DIMM,
329 		DDR3_RTT_120_OHM,
330 		DDR3_RTT_OFF
331 	},
332 	{	/* cs1 */
333 		FSL_DDR_ODT_OTHER_DIMM,
334 		FSL_DDR_ODT_OTHER_DIMM,
335 		DDR3_RTT_30_OHM,
336 		DDR3_RTT_OFF
337 	},
338 	{	/* cs2 */
339 		FSL_DDR_ODT_NEVER,
340 		FSL_DDR_ODT_SAME_DIMM,
341 		DDR3_RTT_120_OHM,
342 		DDR3_RTT_OFF
343 	},
344 	{	/* cs3 */
345 		FSL_DDR_ODT_OTHER_DIMM,
346 		FSL_DDR_ODT_OTHER_DIMM,
347 		DDR3_RTT_30_OHM,
348 		DDR3_RTT_OFF
349 	}
350 };
351 
352 static __maybe_unused const struct dynamic_odt dual_DS[4] = {
353 	{	/* cs0 */
354 		FSL_DDR_ODT_NEVER,
355 		FSL_DDR_ODT_SAME_DIMM,
356 		DDR3_RTT_120_OHM,
357 		DDR3_RTT_OFF
358 	},
359 	{	/* cs1 */
360 		FSL_DDR_ODT_OTHER_DIMM,
361 		FSL_DDR_ODT_OTHER_DIMM,
362 		DDR3_RTT_30_OHM,
363 		DDR3_RTT_OFF
364 	},
365 	{	/* cs2 */
366 		FSL_DDR_ODT_OTHER_DIMM,
367 		FSL_DDR_ODT_ALL,
368 		DDR3_RTT_20_OHM,
369 		DDR3_RTT_120_OHM
370 	},
371 	{0, 0, 0, 0}
372 };
373 static __maybe_unused const struct dynamic_odt dual_SD[4] = {
374 	{	/* cs0 */
375 		FSL_DDR_ODT_OTHER_DIMM,
376 		FSL_DDR_ODT_ALL,
377 		DDR3_RTT_20_OHM,
378 		DDR3_RTT_120_OHM
379 	},
380 	{0, 0, 0, 0},
381 	{	/* cs2 */
382 		FSL_DDR_ODT_NEVER,
383 		FSL_DDR_ODT_SAME_DIMM,
384 		DDR3_RTT_120_OHM,
385 		DDR3_RTT_OFF
386 	},
387 	{	/* cs3 */
388 		FSL_DDR_ODT_OTHER_DIMM,
389 		FSL_DDR_ODT_OTHER_DIMM,
390 		DDR3_RTT_20_OHM,
391 		DDR3_RTT_OFF
392 	}
393 };
394 
395 static __maybe_unused const struct dynamic_odt dual_SS[4] = {
396 	{	/* cs0 */
397 		FSL_DDR_ODT_OTHER_DIMM,
398 		FSL_DDR_ODT_ALL,
399 		DDR3_RTT_30_OHM,
400 		DDR3_RTT_120_OHM
401 	},
402 	{0, 0, 0, 0},
403 	{	/* cs2 */
404 		FSL_DDR_ODT_OTHER_DIMM,
405 		FSL_DDR_ODT_ALL,
406 		DDR3_RTT_30_OHM,
407 		DDR3_RTT_120_OHM
408 	},
409 	{0, 0, 0, 0}
410 };
411 
412 static __maybe_unused const struct dynamic_odt dual_D0[4] = {
413 	{	/* cs0 */
414 		FSL_DDR_ODT_NEVER,
415 		FSL_DDR_ODT_SAME_DIMM,
416 		DDR3_RTT_40_OHM,
417 		DDR3_RTT_OFF
418 	},
419 	{	/* cs1 */
420 		FSL_DDR_ODT_NEVER,
421 		FSL_DDR_ODT_NEVER,
422 		DDR3_RTT_OFF,
423 		DDR3_RTT_OFF
424 	},
425 	{0, 0, 0, 0},
426 	{0, 0, 0, 0}
427 };
428 
429 static __maybe_unused const struct dynamic_odt dual_0D[4] = {
430 	{0, 0, 0, 0},
431 	{0, 0, 0, 0},
432 	{	/* cs2 */
433 		FSL_DDR_ODT_NEVER,
434 		FSL_DDR_ODT_SAME_DIMM,
435 		DDR3_RTT_40_OHM,
436 		DDR3_RTT_OFF
437 	},
438 	{	/* cs3 */
439 		FSL_DDR_ODT_NEVER,
440 		FSL_DDR_ODT_NEVER,
441 		DDR3_RTT_OFF,
442 		DDR3_RTT_OFF
443 	}
444 };
445 
446 static __maybe_unused const struct dynamic_odt dual_S0[4] = {
447 	{	/* cs0 */
448 		FSL_DDR_ODT_NEVER,
449 		FSL_DDR_ODT_CS,
450 		DDR3_RTT_40_OHM,
451 		DDR3_RTT_OFF
452 	},
453 	{0, 0, 0, 0},
454 	{0, 0, 0, 0},
455 	{0, 0, 0, 0}
456 
457 };
458 
459 static __maybe_unused const struct dynamic_odt dual_0S[4] = {
460 	{0, 0, 0, 0},
461 	{0, 0, 0, 0},
462 	{	/* cs2 */
463 		FSL_DDR_ODT_NEVER,
464 		FSL_DDR_ODT_CS,
465 		DDR3_RTT_40_OHM,
466 		DDR3_RTT_OFF
467 	},
468 	{0, 0, 0, 0}
469 
470 };
471 
472 static __maybe_unused const struct dynamic_odt odt_unknown[4] = {
473 	{	/* cs0 */
474 		FSL_DDR_ODT_NEVER,
475 		FSL_DDR_ODT_CS,
476 		DDR3_RTT_120_OHM,
477 		DDR3_RTT_OFF
478 	},
479 	{	/* cs1 */
480 		FSL_DDR_ODT_NEVER,
481 		FSL_DDR_ODT_CS,
482 		DDR3_RTT_120_OHM,
483 		DDR3_RTT_OFF
484 	},
485 	{	/* cs2 */
486 		FSL_DDR_ODT_NEVER,
487 		FSL_DDR_ODT_CS,
488 		DDR3_RTT_120_OHM,
489 		DDR3_RTT_OFF
490 	},
491 	{	/* cs3 */
492 		FSL_DDR_ODT_NEVER,
493 		FSL_DDR_ODT_CS,
494 		DDR3_RTT_120_OHM,
495 		DDR3_RTT_OFF
496 	}
497 };
498 #else	/* CONFIG_SYS_FSL_DDR3 */
499 static __maybe_unused const struct dynamic_odt single_Q[4] = {
500 	{0, 0, 0, 0},
501 	{0, 0, 0, 0},
502 	{0, 0, 0, 0},
503 	{0, 0, 0, 0}
504 };
505 
506 static __maybe_unused const struct dynamic_odt single_D[4] = {
507 	{	/* cs0 */
508 		FSL_DDR_ODT_NEVER,
509 		FSL_DDR_ODT_ALL,
510 		DDR2_RTT_150_OHM,
511 		DDR2_RTT_OFF
512 	},
513 	{	/* cs1 */
514 		FSL_DDR_ODT_NEVER,
515 		FSL_DDR_ODT_NEVER,
516 		DDR2_RTT_OFF,
517 		DDR2_RTT_OFF
518 	},
519 	{0, 0, 0, 0},
520 	{0, 0, 0, 0}
521 };
522 
523 static __maybe_unused const struct dynamic_odt single_S[4] = {
524 	{	/* cs0 */
525 		FSL_DDR_ODT_NEVER,
526 		FSL_DDR_ODT_ALL,
527 		DDR2_RTT_150_OHM,
528 		DDR2_RTT_OFF
529 	},
530 	{0, 0, 0, 0},
531 	{0, 0, 0, 0},
532 	{0, 0, 0, 0},
533 };
534 
535 static __maybe_unused const struct dynamic_odt dual_DD[4] = {
536 	{	/* cs0 */
537 		FSL_DDR_ODT_OTHER_DIMM,
538 		FSL_DDR_ODT_OTHER_DIMM,
539 		DDR2_RTT_75_OHM,
540 		DDR2_RTT_OFF
541 	},
542 	{	/* cs1 */
543 		FSL_DDR_ODT_NEVER,
544 		FSL_DDR_ODT_NEVER,
545 		DDR2_RTT_OFF,
546 		DDR2_RTT_OFF
547 	},
548 	{	/* cs2 */
549 		FSL_DDR_ODT_OTHER_DIMM,
550 		FSL_DDR_ODT_OTHER_DIMM,
551 		DDR2_RTT_75_OHM,
552 		DDR2_RTT_OFF
553 	},
554 	{	/* cs3 */
555 		FSL_DDR_ODT_NEVER,
556 		FSL_DDR_ODT_NEVER,
557 		DDR2_RTT_OFF,
558 		DDR2_RTT_OFF
559 	}
560 };
561 
562 static __maybe_unused const struct dynamic_odt dual_DS[4] = {
563 	{	/* cs0 */
564 		FSL_DDR_ODT_OTHER_DIMM,
565 		FSL_DDR_ODT_OTHER_DIMM,
566 		DDR2_RTT_75_OHM,
567 		DDR2_RTT_OFF
568 	},
569 	{	/* cs1 */
570 		FSL_DDR_ODT_NEVER,
571 		FSL_DDR_ODT_NEVER,
572 		DDR2_RTT_OFF,
573 		DDR2_RTT_OFF
574 	},
575 	{	/* cs2 */
576 		FSL_DDR_ODT_OTHER_DIMM,
577 		FSL_DDR_ODT_OTHER_DIMM,
578 		DDR2_RTT_75_OHM,
579 		DDR2_RTT_OFF
580 	},
581 	{0, 0, 0, 0}
582 };
583 
584 static __maybe_unused const struct dynamic_odt dual_SD[4] = {
585 	{	/* cs0 */
586 		FSL_DDR_ODT_OTHER_DIMM,
587 		FSL_DDR_ODT_OTHER_DIMM,
588 		DDR2_RTT_75_OHM,
589 		DDR2_RTT_OFF
590 	},
591 	{0, 0, 0, 0},
592 	{	/* cs2 */
593 		FSL_DDR_ODT_OTHER_DIMM,
594 		FSL_DDR_ODT_OTHER_DIMM,
595 		DDR2_RTT_75_OHM,
596 		DDR2_RTT_OFF
597 	},
598 	{	/* cs3 */
599 		FSL_DDR_ODT_NEVER,
600 		FSL_DDR_ODT_NEVER,
601 		DDR2_RTT_OFF,
602 		DDR2_RTT_OFF
603 	}
604 };
605 
606 static __maybe_unused const struct dynamic_odt dual_SS[4] = {
607 	{	/* cs0 */
608 		FSL_DDR_ODT_OTHER_DIMM,
609 		FSL_DDR_ODT_OTHER_DIMM,
610 		DDR2_RTT_75_OHM,
611 		DDR2_RTT_OFF
612 	},
613 	{0, 0, 0, 0},
614 	{	/* cs2 */
615 		FSL_DDR_ODT_OTHER_DIMM,
616 		FSL_DDR_ODT_OTHER_DIMM,
617 		DDR2_RTT_75_OHM,
618 		DDR2_RTT_OFF
619 	},
620 	{0, 0, 0, 0}
621 };
622 
623 static __maybe_unused const struct dynamic_odt dual_D0[4] = {
624 	{	/* cs0 */
625 		FSL_DDR_ODT_NEVER,
626 		FSL_DDR_ODT_ALL,
627 		DDR2_RTT_150_OHM,
628 		DDR2_RTT_OFF
629 	},
630 	{	/* cs1 */
631 		FSL_DDR_ODT_NEVER,
632 		FSL_DDR_ODT_NEVER,
633 		DDR2_RTT_OFF,
634 		DDR2_RTT_OFF
635 	},
636 	{0, 0, 0, 0},
637 	{0, 0, 0, 0}
638 };
639 
640 static __maybe_unused const struct dynamic_odt dual_0D[4] = {
641 	{0, 0, 0, 0},
642 	{0, 0, 0, 0},
643 	{	/* cs2 */
644 		FSL_DDR_ODT_NEVER,
645 		FSL_DDR_ODT_ALL,
646 		DDR2_RTT_150_OHM,
647 		DDR2_RTT_OFF
648 	},
649 	{	/* cs3 */
650 		FSL_DDR_ODT_NEVER,
651 		FSL_DDR_ODT_NEVER,
652 		DDR2_RTT_OFF,
653 		DDR2_RTT_OFF
654 	}
655 };
656 
657 static __maybe_unused const struct dynamic_odt dual_S0[4] = {
658 	{	/* cs0 */
659 		FSL_DDR_ODT_NEVER,
660 		FSL_DDR_ODT_CS,
661 		DDR2_RTT_150_OHM,
662 		DDR2_RTT_OFF
663 	},
664 	{0, 0, 0, 0},
665 	{0, 0, 0, 0},
666 	{0, 0, 0, 0}
667 
668 };
669 
670 static __maybe_unused const struct dynamic_odt dual_0S[4] = {
671 	{0, 0, 0, 0},
672 	{0, 0, 0, 0},
673 	{	/* cs2 */
674 		FSL_DDR_ODT_NEVER,
675 		FSL_DDR_ODT_CS,
676 		DDR2_RTT_150_OHM,
677 		DDR2_RTT_OFF
678 	},
679 	{0, 0, 0, 0}
680 
681 };
682 
683 static __maybe_unused const struct dynamic_odt odt_unknown[4] = {
684 	{	/* cs0 */
685 		FSL_DDR_ODT_NEVER,
686 		FSL_DDR_ODT_CS,
687 		DDR2_RTT_75_OHM,
688 		DDR2_RTT_OFF
689 	},
690 	{	/* cs1 */
691 		FSL_DDR_ODT_NEVER,
692 		FSL_DDR_ODT_NEVER,
693 		DDR2_RTT_OFF,
694 		DDR2_RTT_OFF
695 	},
696 	{	/* cs2 */
697 		FSL_DDR_ODT_NEVER,
698 		FSL_DDR_ODT_CS,
699 		DDR2_RTT_75_OHM,
700 		DDR2_RTT_OFF
701 	},
702 	{	/* cs3 */
703 		FSL_DDR_ODT_NEVER,
704 		FSL_DDR_ODT_NEVER,
705 		DDR2_RTT_OFF,
706 		DDR2_RTT_OFF
707 	}
708 };
709 #endif
710 
711 /*
712  * Automatically seleect bank interleaving mode based on DIMMs
713  * in this order: cs0_cs1_cs2_cs3, cs0_cs1, null.
714  * This function only deal with one or two slots per controller.
715  */
716 static inline unsigned int auto_bank_intlv(dimm_params_t *pdimm)
717 {
718 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
719 	if (pdimm[0].n_ranks == 4)
720 		return FSL_DDR_CS0_CS1_CS2_CS3;
721 	else if (pdimm[0].n_ranks == 2)
722 		return FSL_DDR_CS0_CS1;
723 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
724 #ifdef CONFIG_FSL_DDR_FIRST_SLOT_QUAD_CAPABLE
725 	if (pdimm[0].n_ranks == 4)
726 		return FSL_DDR_CS0_CS1_CS2_CS3;
727 #endif
728 	if (pdimm[0].n_ranks == 2) {
729 		if (pdimm[1].n_ranks == 2)
730 			return FSL_DDR_CS0_CS1_CS2_CS3;
731 		else
732 			return FSL_DDR_CS0_CS1;
733 	}
734 #endif
735 	return 0;
736 }
737 
738 unsigned int populate_memctl_options(const common_timing_params_t *common_dimm,
739 			memctl_options_t *popts,
740 			dimm_params_t *pdimm,
741 			unsigned int ctrl_num)
742 {
743 	unsigned int i;
744 	char buffer[HWCONFIG_BUFFER_SIZE];
745 	char *buf = NULL;
746 #if defined(CONFIG_SYS_FSL_DDR3) || \
747 	defined(CONFIG_SYS_FSL_DDR2) || \
748 	defined(CONFIG_SYS_FSL_DDR4)
749 	const struct dynamic_odt *pdodt = odt_unknown;
750 #endif
751 	ulong ddr_freq;
752 
753 	/*
754 	 * Extract hwconfig from environment since we have not properly setup
755 	 * the environment but need it for ddr config params
756 	 */
757 	if (getenv_f("hwconfig", buffer, sizeof(buffer)) > 0)
758 		buf = buffer;
759 
760 #if defined(CONFIG_SYS_FSL_DDR3) || \
761 	defined(CONFIG_SYS_FSL_DDR2) || \
762 	defined(CONFIG_SYS_FSL_DDR4)
763 	/* Chip select options. */
764 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
765 	switch (pdimm[0].n_ranks) {
766 	case 1:
767 		pdodt = single_S;
768 		break;
769 	case 2:
770 		pdodt = single_D;
771 		break;
772 	case 4:
773 		pdodt = single_Q;
774 		break;
775 	}
776 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
777 	switch (pdimm[0].n_ranks) {
778 #ifdef CONFIG_FSL_DDR_FIRST_SLOT_QUAD_CAPABLE
779 	case 4:
780 		pdodt = single_Q;
781 		if (pdimm[1].n_ranks)
782 			printf("Error: Quad- and Dual-rank DIMMs cannot be used together\n");
783 		break;
784 #endif
785 	case 2:
786 		switch (pdimm[1].n_ranks) {
787 		case 2:
788 			pdodt = dual_DD;
789 			break;
790 		case 1:
791 			pdodt = dual_DS;
792 			break;
793 		case 0:
794 			pdodt = dual_D0;
795 			break;
796 		}
797 		break;
798 	case 1:
799 		switch (pdimm[1].n_ranks) {
800 		case 2:
801 			pdodt = dual_SD;
802 			break;
803 		case 1:
804 			pdodt = dual_SS;
805 			break;
806 		case 0:
807 			pdodt = dual_S0;
808 			break;
809 		}
810 		break;
811 	case 0:
812 		switch (pdimm[1].n_ranks) {
813 		case 2:
814 			pdodt = dual_0D;
815 			break;
816 		case 1:
817 			pdodt = dual_0S;
818 			break;
819 		}
820 		break;
821 	}
822 #endif	/* CONFIG_DIMM_SLOTS_PER_CTLR */
823 #endif	/* CONFIG_SYS_FSL_DDR2, 3, 4 */
824 
825 	/* Pick chip-select local options. */
826 	for (i = 0; i < CONFIG_CHIP_SELECTS_PER_CTRL; i++) {
827 #if defined(CONFIG_SYS_FSL_DDR3) || \
828 	defined(CONFIG_SYS_FSL_DDR2) || \
829 	defined(CONFIG_SYS_FSL_DDR4)
830 		popts->cs_local_opts[i].odt_rd_cfg = pdodt[i].odt_rd_cfg;
831 		popts->cs_local_opts[i].odt_wr_cfg = pdodt[i].odt_wr_cfg;
832 		popts->cs_local_opts[i].odt_rtt_norm = pdodt[i].odt_rtt_norm;
833 		popts->cs_local_opts[i].odt_rtt_wr = pdodt[i].odt_rtt_wr;
834 #else
835 		popts->cs_local_opts[i].odt_rd_cfg = FSL_DDR_ODT_NEVER;
836 		popts->cs_local_opts[i].odt_wr_cfg = FSL_DDR_ODT_CS;
837 #endif
838 		popts->cs_local_opts[i].auto_precharge = 0;
839 	}
840 
841 	/* Pick interleaving mode. */
842 
843 	/*
844 	 * 0 = no interleaving
845 	 * 1 = interleaving between 2 controllers
846 	 */
847 	popts->memctl_interleaving = 0;
848 
849 	/*
850 	 * 0 = cacheline
851 	 * 1 = page
852 	 * 2 = (logical) bank
853 	 * 3 = superbank (only if CS interleaving is enabled)
854 	 */
855 	popts->memctl_interleaving_mode = 0;
856 
857 	/*
858 	 * 0: cacheline: bit 30 of the 36-bit physical addr selects the memctl
859 	 * 1: page:      bit to the left of the column bits selects the memctl
860 	 * 2: bank:      bit to the left of the bank bits selects the memctl
861 	 * 3: superbank: bit to the left of the chip select selects the memctl
862 	 *
863 	 * NOTE: ba_intlv (rank interleaving) is independent of memory
864 	 * controller interleaving; it is only within a memory controller.
865 	 * Must use superbank interleaving if rank interleaving is used and
866 	 * memory controller interleaving is enabled.
867 	 */
868 
869 	/*
870 	 * 0 = no
871 	 * 0x40 = CS0,CS1
872 	 * 0x20 = CS2,CS3
873 	 * 0x60 = CS0,CS1 + CS2,CS3
874 	 * 0x04 = CS0,CS1,CS2,CS3
875 	 */
876 	popts->ba_intlv_ctl = 0;
877 
878 	/* Memory Organization Parameters */
879 	popts->registered_dimm_en = common_dimm->all_dimms_registered;
880 
881 	/* Operational Mode Paramters */
882 
883 	/* Pick ECC modes */
884 	popts->ecc_mode = 0;		  /* 0 = disabled, 1 = enabled */
885 #ifdef CONFIG_DDR_ECC
886 	if (hwconfig_sub_f("fsl_ddr", "ecc", buf)) {
887 		if (hwconfig_subarg_cmp_f("fsl_ddr", "ecc", "on", buf))
888 			popts->ecc_mode = 1;
889 	} else
890 		popts->ecc_mode = 1;
891 #endif
892 	/* 1 = use memory controler to init data */
893 	popts->ecc_init_using_memctl = popts->ecc_mode ? 1 : 0;
894 
895 	/*
896 	 * Choose DQS config
897 	 * 0 for DDR1
898 	 * 1 for DDR2
899 	 */
900 #if defined(CONFIG_SYS_FSL_DDR1)
901 	popts->dqs_config = 0;
902 #elif defined(CONFIG_SYS_FSL_DDR2) || defined(CONFIG_SYS_FSL_DDR3)
903 	popts->dqs_config = 1;
904 #endif
905 
906 	/* Choose self-refresh during sleep. */
907 	popts->self_refresh_in_sleep = 1;
908 
909 	/* Choose dynamic power management mode. */
910 	popts->dynamic_power = 0;
911 
912 	/*
913 	 * check first dimm for primary sdram width
914 	 * presuming all dimms are similar
915 	 * 0 = 64-bit, 1 = 32-bit, 2 = 16-bit
916 	 */
917 #if defined(CONFIG_SYS_FSL_DDR1) || defined(CONFIG_SYS_FSL_DDR2)
918 	if (pdimm[0].n_ranks != 0) {
919 		if ((pdimm[0].data_width >= 64) && \
920 			(pdimm[0].data_width <= 72))
921 			popts->data_bus_width = 0;
922 		else if ((pdimm[0].data_width >= 32) && \
923 			(pdimm[0].data_width <= 40))
924 			popts->data_bus_width = 1;
925 		else {
926 			panic("Error: data width %u is invalid!\n",
927 				pdimm[0].data_width);
928 		}
929 	}
930 #else
931 	if (pdimm[0].n_ranks != 0) {
932 		if (pdimm[0].primary_sdram_width == 64)
933 			popts->data_bus_width = 0;
934 		else if (pdimm[0].primary_sdram_width == 32)
935 			popts->data_bus_width = 1;
936 		else if (pdimm[0].primary_sdram_width == 16)
937 			popts->data_bus_width = 2;
938 		else {
939 			panic("Error: primary sdram width %u is invalid!\n",
940 				pdimm[0].primary_sdram_width);
941 		}
942 	}
943 #endif
944 
945 	popts->x4_en = (pdimm[0].device_width == 4) ? 1 : 0;
946 
947 	/* Choose burst length. */
948 #if defined(CONFIG_SYS_FSL_DDR3) || defined(CONFIG_SYS_FSL_DDR4)
949 #if defined(CONFIG_E500MC)
950 	popts->otf_burst_chop_en = 0;	/* on-the-fly burst chop disable */
951 	popts->burst_length = DDR_BL8;	/* Fixed 8-beat burst len */
952 #else
953 	if ((popts->data_bus_width == 1) || (popts->data_bus_width == 2)) {
954 		/* 32-bit or 16-bit bus */
955 		popts->otf_burst_chop_en = 0;
956 		popts->burst_length = DDR_BL8;
957 	} else {
958 		popts->otf_burst_chop_en = 1;	/* on-the-fly burst chop */
959 		popts->burst_length = DDR_OTF;	/* on-the-fly BC4 and BL8 */
960 	}
961 #endif
962 #else
963 	popts->burst_length = DDR_BL4;	/* has to be 4 for DDR2 */
964 #endif
965 
966 	/* Choose ddr controller address mirror mode */
967 #if defined(CONFIG_SYS_FSL_DDR3) || defined(CONFIG_SYS_FSL_DDR4)
968 	for (i = 0; i < CONFIG_DIMM_SLOTS_PER_CTLR; i++) {
969 		if (pdimm[i].n_ranks) {
970 			popts->mirrored_dimm = pdimm[i].mirrored_dimm;
971 			break;
972 		}
973 	}
974 #endif
975 
976 	/* Global Timing Parameters. */
977 	debug("mclk_ps = %u ps\n", get_memory_clk_period_ps(ctrl_num));
978 
979 	/* Pick a caslat override. */
980 	popts->cas_latency_override = 0;
981 	popts->cas_latency_override_value = 3;
982 	if (popts->cas_latency_override) {
983 		debug("using caslat override value = %u\n",
984 		       popts->cas_latency_override_value);
985 	}
986 
987 	/* Decide whether to use the computed derated latency */
988 	popts->use_derated_caslat = 0;
989 
990 	/* Choose an additive latency. */
991 	popts->additive_latency_override = 0;
992 	popts->additive_latency_override_value = 3;
993 	if (popts->additive_latency_override) {
994 		debug("using additive latency override value = %u\n",
995 		       popts->additive_latency_override_value);
996 	}
997 
998 	/*
999 	 * 2T_EN setting
1000 	 *
1001 	 * Factors to consider for 2T_EN:
1002 	 *	- number of DIMMs installed
1003 	 *	- number of components, number of active ranks
1004 	 *	- how much time you want to spend playing around
1005 	 */
1006 	popts->twot_en = 0;
1007 	popts->threet_en = 0;
1008 
1009 	/* for RDIMM and DDR4 UDIMM/discrete memory, address parity enable */
1010 	if (popts->registered_dimm_en)
1011 		popts->ap_en = 1; /* 0 = disable,  1 = enable */
1012 	else
1013 		popts->ap_en = 0; /* disabled for DDR4 UDIMM/discrete default */
1014 
1015 	if (hwconfig_sub_f("fsl_ddr", "parity", buf)) {
1016 		if (hwconfig_subarg_cmp_f("fsl_ddr", "parity", "on", buf)) {
1017 			if (popts->registered_dimm_en ||
1018 			    (CONFIG_FSL_SDRAM_TYPE == SDRAM_TYPE_DDR4))
1019 				popts->ap_en = 1;
1020 		}
1021 	}
1022 
1023 	/*
1024 	 * BSTTOPRE precharge interval
1025 	 *
1026 	 * Set this to 0 for global auto precharge
1027 	 * The value of 0x100 has been used for DDR1, DDR2, DDR3.
1028 	 * It is not wrong. Any value should be OK. The performance depends on
1029 	 * applications. There is no one good value for all. One way to set
1030 	 * is to use 1/4 of refint value.
1031 	 */
1032 	popts->bstopre = picos_to_mclk(ctrl_num, common_dimm->refresh_rate_ps)
1033 			 >> 2;
1034 
1035 	/*
1036 	 * Window for four activates -- tFAW
1037 	 *
1038 	 * FIXME: UM: applies only to DDR2/DDR3 with eight logical banks only
1039 	 * FIXME: varies depending upon number of column addresses or data
1040 	 * FIXME: width, was considering looking at pdimm->primary_sdram_width
1041 	 */
1042 #if defined(CONFIG_SYS_FSL_DDR1)
1043 	popts->tfaw_window_four_activates_ps = mclk_to_picos(ctrl_num, 1);
1044 
1045 #elif defined(CONFIG_SYS_FSL_DDR2)
1046 	/*
1047 	 * x4/x8;  some datasheets have 35000
1048 	 * x16 wide columns only?  Use 50000?
1049 	 */
1050 	popts->tfaw_window_four_activates_ps = 37500;
1051 
1052 #else
1053 	popts->tfaw_window_four_activates_ps = pdimm[0].tfaw_ps;
1054 #endif
1055 	popts->zq_en = 0;
1056 	popts->wrlvl_en = 0;
1057 #if defined(CONFIG_SYS_FSL_DDR3) || defined(CONFIG_SYS_FSL_DDR4)
1058 	/*
1059 	 * due to ddr3 dimm is fly-by topology
1060 	 * we suggest to enable write leveling to
1061 	 * meet the tQDSS under different loading.
1062 	 */
1063 	popts->wrlvl_en = 1;
1064 	popts->zq_en = 1;
1065 	popts->wrlvl_override = 0;
1066 #endif
1067 
1068 	/*
1069 	 * Check interleaving configuration from environment.
1070 	 * Please refer to doc/README.fsl-ddr for the detail.
1071 	 *
1072 	 * If memory controller interleaving is enabled, then the data
1073 	 * bus widths must be programmed identically for all memory controllers.
1074 	 *
1075 	 * Attempt to set all controllers to the same chip select
1076 	 * interleaving mode. It will do a best effort to get the
1077 	 * requested ranks interleaved together such that the result
1078 	 * should be a subset of the requested configuration.
1079 	 *
1080 	 * if CONFIG_SYS_FSL_DDR_INTLV_256B is defined, mandatory interleaving
1081 	 * with 256 Byte is enabled.
1082 	 */
1083 #if (CONFIG_SYS_NUM_DDR_CTLRS > 1)
1084 	if (!hwconfig_sub_f("fsl_ddr", "ctlr_intlv", buf))
1085 #ifdef CONFIG_SYS_FSL_DDR_INTLV_256B
1086 		;
1087 #else
1088 		goto done;
1089 #endif
1090 	if (pdimm[0].n_ranks == 0) {
1091 		printf("There is no rank on CS0 for controller %d.\n", ctrl_num);
1092 		popts->memctl_interleaving = 0;
1093 		goto done;
1094 	}
1095 	popts->memctl_interleaving = 1;
1096 #ifdef CONFIG_SYS_FSL_DDR_INTLV_256B
1097 	popts->memctl_interleaving_mode = FSL_DDR_256B_INTERLEAVING;
1098 	popts->memctl_interleaving = 1;
1099 	debug("256 Byte interleaving\n");
1100 #else
1101 	/*
1102 	 * test null first. if CONFIG_HWCONFIG is not defined
1103 	 * hwconfig_arg_cmp returns non-zero
1104 	 */
1105 	if (hwconfig_subarg_cmp_f("fsl_ddr", "ctlr_intlv",
1106 				    "null", buf)) {
1107 		popts->memctl_interleaving = 0;
1108 		debug("memory controller interleaving disabled.\n");
1109 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1110 					"ctlr_intlv",
1111 					"cacheline", buf)) {
1112 		popts->memctl_interleaving_mode =
1113 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1114 			0 : FSL_DDR_CACHE_LINE_INTERLEAVING;
1115 		popts->memctl_interleaving =
1116 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1117 			0 : 1;
1118 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1119 					"ctlr_intlv",
1120 					"page", buf)) {
1121 		popts->memctl_interleaving_mode =
1122 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1123 			0 : FSL_DDR_PAGE_INTERLEAVING;
1124 		popts->memctl_interleaving =
1125 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1126 			0 : 1;
1127 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1128 					"ctlr_intlv",
1129 					"bank", buf)) {
1130 		popts->memctl_interleaving_mode =
1131 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1132 			0 : FSL_DDR_BANK_INTERLEAVING;
1133 		popts->memctl_interleaving =
1134 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1135 			0 : 1;
1136 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1137 					"ctlr_intlv",
1138 					"superbank", buf)) {
1139 		popts->memctl_interleaving_mode =
1140 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1141 			0 : FSL_DDR_SUPERBANK_INTERLEAVING;
1142 		popts->memctl_interleaving =
1143 			((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1144 			0 : 1;
1145 #if (CONFIG_SYS_NUM_DDR_CTLRS == 3)
1146 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1147 					"ctlr_intlv",
1148 					"3way_1KB", buf)) {
1149 		popts->memctl_interleaving_mode =
1150 			FSL_DDR_3WAY_1KB_INTERLEAVING;
1151 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1152 					"ctlr_intlv",
1153 					"3way_4KB", buf)) {
1154 		popts->memctl_interleaving_mode =
1155 			FSL_DDR_3WAY_4KB_INTERLEAVING;
1156 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1157 					"ctlr_intlv",
1158 					"3way_8KB", buf)) {
1159 		popts->memctl_interleaving_mode =
1160 			FSL_DDR_3WAY_8KB_INTERLEAVING;
1161 #elif (CONFIG_SYS_NUM_DDR_CTLRS == 4)
1162 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1163 					"ctlr_intlv",
1164 					"4way_1KB", buf)) {
1165 		popts->memctl_interleaving_mode =
1166 			FSL_DDR_4WAY_1KB_INTERLEAVING;
1167 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1168 					"ctlr_intlv",
1169 					"4way_4KB", buf)) {
1170 		popts->memctl_interleaving_mode =
1171 			FSL_DDR_4WAY_4KB_INTERLEAVING;
1172 	} else if (hwconfig_subarg_cmp_f("fsl_ddr",
1173 					"ctlr_intlv",
1174 					"4way_8KB", buf)) {
1175 		popts->memctl_interleaving_mode =
1176 			FSL_DDR_4WAY_8KB_INTERLEAVING;
1177 #endif
1178 	} else {
1179 		popts->memctl_interleaving = 0;
1180 		printf("hwconfig has unrecognized parameter for ctlr_intlv.\n");
1181 	}
1182 #endif	/* CONFIG_SYS_FSL_DDR_INTLV_256B */
1183 done:
1184 #endif /* CONFIG_SYS_NUM_DDR_CTLRS > 1 */
1185 	if ((hwconfig_sub_f("fsl_ddr", "bank_intlv", buf)) &&
1186 		(CONFIG_CHIP_SELECTS_PER_CTRL > 1)) {
1187 		/* test null first. if CONFIG_HWCONFIG is not defined,
1188 		 * hwconfig_subarg_cmp_f returns non-zero */
1189 		if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1190 					    "null", buf))
1191 			debug("bank interleaving disabled.\n");
1192 		else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1193 						 "cs0_cs1", buf))
1194 			popts->ba_intlv_ctl = FSL_DDR_CS0_CS1;
1195 		else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1196 						 "cs2_cs3", buf))
1197 			popts->ba_intlv_ctl = FSL_DDR_CS2_CS3;
1198 		else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1199 						 "cs0_cs1_and_cs2_cs3", buf))
1200 			popts->ba_intlv_ctl = FSL_DDR_CS0_CS1_AND_CS2_CS3;
1201 		else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1202 						 "cs0_cs1_cs2_cs3", buf))
1203 			popts->ba_intlv_ctl = FSL_DDR_CS0_CS1_CS2_CS3;
1204 		else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1205 						"auto", buf))
1206 			popts->ba_intlv_ctl = auto_bank_intlv(pdimm);
1207 		else
1208 			printf("hwconfig has unrecognized parameter for bank_intlv.\n");
1209 		switch (popts->ba_intlv_ctl & FSL_DDR_CS0_CS1_CS2_CS3) {
1210 		case FSL_DDR_CS0_CS1_CS2_CS3:
1211 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
1212 			if (pdimm[0].n_ranks < 4) {
1213 				popts->ba_intlv_ctl = 0;
1214 				printf("Not enough bank(chip-select) for "
1215 					"CS0+CS1+CS2+CS3 on controller %d, "
1216 					"interleaving disabled!\n", ctrl_num);
1217 			}
1218 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
1219 #ifdef CONFIG_FSL_DDR_FIRST_SLOT_QUAD_CAPABLE
1220 			if (pdimm[0].n_ranks == 4)
1221 				break;
1222 #endif
1223 			if ((pdimm[0].n_ranks < 2) && (pdimm[1].n_ranks < 2)) {
1224 				popts->ba_intlv_ctl = 0;
1225 				printf("Not enough bank(chip-select) for "
1226 					"CS0+CS1+CS2+CS3 on controller %d, "
1227 					"interleaving disabled!\n", ctrl_num);
1228 			}
1229 			if (pdimm[0].capacity != pdimm[1].capacity) {
1230 				popts->ba_intlv_ctl = 0;
1231 				printf("Not identical DIMM size for "
1232 					"CS0+CS1+CS2+CS3 on controller %d, "
1233 					"interleaving disabled!\n", ctrl_num);
1234 			}
1235 #endif
1236 			break;
1237 		case FSL_DDR_CS0_CS1:
1238 			if (pdimm[0].n_ranks < 2) {
1239 				popts->ba_intlv_ctl = 0;
1240 				printf("Not enough bank(chip-select) for "
1241 					"CS0+CS1 on controller %d, "
1242 					"interleaving disabled!\n", ctrl_num);
1243 			}
1244 			break;
1245 		case FSL_DDR_CS2_CS3:
1246 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
1247 			if (pdimm[0].n_ranks < 4) {
1248 				popts->ba_intlv_ctl = 0;
1249 				printf("Not enough bank(chip-select) for CS2+CS3 "
1250 					"on controller %d, interleaving disabled!\n", ctrl_num);
1251 			}
1252 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
1253 			if (pdimm[1].n_ranks < 2) {
1254 				popts->ba_intlv_ctl = 0;
1255 				printf("Not enough bank(chip-select) for CS2+CS3 "
1256 					"on controller %d, interleaving disabled!\n", ctrl_num);
1257 			}
1258 #endif
1259 			break;
1260 		case FSL_DDR_CS0_CS1_AND_CS2_CS3:
1261 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
1262 			if (pdimm[0].n_ranks < 4) {
1263 				popts->ba_intlv_ctl = 0;
1264 				printf("Not enough bank(CS) for CS0+CS1 and "
1265 					"CS2+CS3 on controller %d, "
1266 					"interleaving disabled!\n", ctrl_num);
1267 			}
1268 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
1269 			if ((pdimm[0].n_ranks < 2) || (pdimm[1].n_ranks < 2)) {
1270 				popts->ba_intlv_ctl = 0;
1271 				printf("Not enough bank(CS) for CS0+CS1 and "
1272 					"CS2+CS3 on controller %d, "
1273 					"interleaving disabled!\n", ctrl_num);
1274 			}
1275 #endif
1276 			break;
1277 		default:
1278 			popts->ba_intlv_ctl = 0;
1279 			break;
1280 		}
1281 	}
1282 
1283 	if (hwconfig_sub_f("fsl_ddr", "addr_hash", buf)) {
1284 		if (hwconfig_subarg_cmp_f("fsl_ddr", "addr_hash", "null", buf))
1285 			popts->addr_hash = 0;
1286 		else if (hwconfig_subarg_cmp_f("fsl_ddr", "addr_hash",
1287 					       "true", buf))
1288 			popts->addr_hash = 1;
1289 	}
1290 
1291 	if (pdimm[0].n_ranks == 4)
1292 		popts->quad_rank_present = 1;
1293 
1294 	ddr_freq = get_ddr_freq(ctrl_num) / 1000000;
1295 	if (popts->registered_dimm_en) {
1296 		popts->rcw_override = 1;
1297 		popts->rcw_1 = 0x000a5a00;
1298 		if (ddr_freq <= 800)
1299 			popts->rcw_2 = 0x00000000;
1300 		else if (ddr_freq <= 1066)
1301 			popts->rcw_2 = 0x00100000;
1302 		else if (ddr_freq <= 1333)
1303 			popts->rcw_2 = 0x00200000;
1304 		else
1305 			popts->rcw_2 = 0x00300000;
1306 	}
1307 
1308 	fsl_ddr_board_options(popts, pdimm, ctrl_num);
1309 
1310 	return 0;
1311 }
1312 
1313 void check_interleaving_options(fsl_ddr_info_t *pinfo)
1314 {
1315 	int i, j, k, check_n_ranks, intlv_invalid = 0;
1316 	unsigned int check_intlv, check_n_row_addr, check_n_col_addr;
1317 	unsigned long long check_rank_density;
1318 	struct dimm_params_s *dimm;
1319 	int first_ctrl = pinfo->first_ctrl;
1320 	int last_ctrl = first_ctrl + pinfo->num_ctrls - 1;
1321 
1322 	/*
1323 	 * Check if all controllers are configured for memory
1324 	 * controller interleaving. Identical dimms are recommended. At least
1325 	 * the size, row and col address should be checked.
1326 	 */
1327 	j = 0;
1328 	check_n_ranks = pinfo->dimm_params[first_ctrl][0].n_ranks;
1329 	check_rank_density = pinfo->dimm_params[first_ctrl][0].rank_density;
1330 	check_n_row_addr =  pinfo->dimm_params[first_ctrl][0].n_row_addr;
1331 	check_n_col_addr = pinfo->dimm_params[first_ctrl][0].n_col_addr;
1332 	check_intlv = pinfo->memctl_opts[first_ctrl].memctl_interleaving_mode;
1333 	for (i = first_ctrl; i <= last_ctrl; i++) {
1334 		dimm = &pinfo->dimm_params[i][0];
1335 		if (!pinfo->memctl_opts[i].memctl_interleaving) {
1336 			continue;
1337 		} else if (((check_rank_density != dimm->rank_density) ||
1338 		     (check_n_ranks != dimm->n_ranks) ||
1339 		     (check_n_row_addr != dimm->n_row_addr) ||
1340 		     (check_n_col_addr != dimm->n_col_addr) ||
1341 		     (check_intlv !=
1342 			pinfo->memctl_opts[i].memctl_interleaving_mode))){
1343 			intlv_invalid = 1;
1344 			break;
1345 		} else {
1346 			j++;
1347 		}
1348 
1349 	}
1350 	if (intlv_invalid) {
1351 		for (i = first_ctrl; i <= last_ctrl; i++)
1352 			pinfo->memctl_opts[i].memctl_interleaving = 0;
1353 		printf("Not all DIMMs are identical. "
1354 			"Memory controller interleaving disabled.\n");
1355 	} else {
1356 		switch (check_intlv) {
1357 		case FSL_DDR_256B_INTERLEAVING:
1358 		case FSL_DDR_CACHE_LINE_INTERLEAVING:
1359 		case FSL_DDR_PAGE_INTERLEAVING:
1360 		case FSL_DDR_BANK_INTERLEAVING:
1361 		case FSL_DDR_SUPERBANK_INTERLEAVING:
1362 #if (3 == CONFIG_SYS_NUM_DDR_CTLRS)
1363 				k = 2;
1364 #else
1365 				k = CONFIG_SYS_NUM_DDR_CTLRS;
1366 #endif
1367 			break;
1368 		case FSL_DDR_3WAY_1KB_INTERLEAVING:
1369 		case FSL_DDR_3WAY_4KB_INTERLEAVING:
1370 		case FSL_DDR_3WAY_8KB_INTERLEAVING:
1371 		case FSL_DDR_4WAY_1KB_INTERLEAVING:
1372 		case FSL_DDR_4WAY_4KB_INTERLEAVING:
1373 		case FSL_DDR_4WAY_8KB_INTERLEAVING:
1374 		default:
1375 			k = CONFIG_SYS_NUM_DDR_CTLRS;
1376 			break;
1377 		}
1378 		debug("%d of %d controllers are interleaving.\n", j, k);
1379 		if (j && (j != k)) {
1380 			for (i = first_ctrl; i <= last_ctrl; i++)
1381 				pinfo->memctl_opts[i].memctl_interleaving = 0;
1382 			if ((last_ctrl - first_ctrl) > 1)
1383 				puts("Not all controllers have compatible interleaving mode. All disabled.\n");
1384 		}
1385 	}
1386 	debug("Checking interleaving options completed\n");
1387 }
1388 
1389 int fsl_use_spd(void)
1390 {
1391 	int use_spd = 0;
1392 
1393 #ifdef CONFIG_DDR_SPD
1394 	char buffer[HWCONFIG_BUFFER_SIZE];
1395 	char *buf = NULL;
1396 
1397 	/*
1398 	 * Extract hwconfig from environment since we have not properly setup
1399 	 * the environment but need it for ddr config params
1400 	 */
1401 	if (getenv_f("hwconfig", buffer, sizeof(buffer)) > 0)
1402 		buf = buffer;
1403 
1404 	/* if hwconfig is not enabled, or "sdram" is not defined, use spd */
1405 	if (hwconfig_sub_f("fsl_ddr", "sdram", buf)) {
1406 		if (hwconfig_subarg_cmp_f("fsl_ddr", "sdram", "spd", buf))
1407 			use_spd = 1;
1408 		else if (hwconfig_subarg_cmp_f("fsl_ddr", "sdram",
1409 					       "fixed", buf))
1410 			use_spd = 0;
1411 		else
1412 			use_spd = 1;
1413 	} else
1414 		use_spd = 1;
1415 #endif
1416 
1417 	return use_spd;
1418 }
1419