xref: /rk3399_ARM-atf/drivers/st/ddr/stm32mp1_ddr.c (revision 3f9c97842e5780e0e21f8eb36844c8154635c8c4)
1 /*
2  * Copyright (C) 2018-2019, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 #include <stddef.h>
9 
10 #include <platform_def.h>
11 
12 #include <arch.h>
13 #include <arch_helpers.h>
14 #include <common/debug.h>
15 #include <drivers/delay_timer.h>
16 #include <drivers/st/stm32mp_pmic.h>
17 #include <drivers/st/stm32mp1_clk.h>
18 #include <drivers/st/stm32mp1_ddr.h>
19 #include <drivers/st/stm32mp1_ddr_regs.h>
20 #include <drivers/st/stm32mp1_pwr.h>
21 #include <drivers/st/stm32mp1_ram.h>
22 #include <drivers/st/stm32mp1_rcc.h>
23 #include <dt-bindings/clock/stm32mp1-clks.h>
24 #include <lib/mmio.h>
25 #include <plat/common/platform.h>
26 
27 struct reg_desc {
28 	const char *name;
29 	uint16_t offset;	/* Offset for base address */
30 	uint8_t par_offset;	/* Offset for parameter array */
31 };
32 
33 #define INVALID_OFFSET	0xFFU
34 
35 #define TIMESLOT_1US	(plat_get_syscnt_freq2() / 1000000U)
36 
37 #define DDRCTL_REG(x, y)					\
38 	{							\
39 		.name = #x,					\
40 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
41 		.par_offset = offsetof(struct y, x)		\
42 	}
43 
44 #define DDRPHY_REG(x, y)					\
45 	{							\
46 		.name = #x,					\
47 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
48 		.par_offset = offsetof(struct y, x)		\
49 	}
50 
51 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
52 static const struct reg_desc ddr_reg[] = {
53 	DDRCTL_REG_REG(mstr),
54 	DDRCTL_REG_REG(mrctrl0),
55 	DDRCTL_REG_REG(mrctrl1),
56 	DDRCTL_REG_REG(derateen),
57 	DDRCTL_REG_REG(derateint),
58 	DDRCTL_REG_REG(pwrctl),
59 	DDRCTL_REG_REG(pwrtmg),
60 	DDRCTL_REG_REG(hwlpctl),
61 	DDRCTL_REG_REG(rfshctl0),
62 	DDRCTL_REG_REG(rfshctl3),
63 	DDRCTL_REG_REG(crcparctl0),
64 	DDRCTL_REG_REG(zqctl0),
65 	DDRCTL_REG_REG(dfitmg0),
66 	DDRCTL_REG_REG(dfitmg1),
67 	DDRCTL_REG_REG(dfilpcfg0),
68 	DDRCTL_REG_REG(dfiupd0),
69 	DDRCTL_REG_REG(dfiupd1),
70 	DDRCTL_REG_REG(dfiupd2),
71 	DDRCTL_REG_REG(dfiphymstr),
72 	DDRCTL_REG_REG(odtmap),
73 	DDRCTL_REG_REG(dbg0),
74 	DDRCTL_REG_REG(dbg1),
75 	DDRCTL_REG_REG(dbgcmd),
76 	DDRCTL_REG_REG(poisoncfg),
77 	DDRCTL_REG_REG(pccfg),
78 };
79 
80 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
81 static const struct reg_desc ddr_timing[] = {
82 	DDRCTL_REG_TIMING(rfshtmg),
83 	DDRCTL_REG_TIMING(dramtmg0),
84 	DDRCTL_REG_TIMING(dramtmg1),
85 	DDRCTL_REG_TIMING(dramtmg2),
86 	DDRCTL_REG_TIMING(dramtmg3),
87 	DDRCTL_REG_TIMING(dramtmg4),
88 	DDRCTL_REG_TIMING(dramtmg5),
89 	DDRCTL_REG_TIMING(dramtmg6),
90 	DDRCTL_REG_TIMING(dramtmg7),
91 	DDRCTL_REG_TIMING(dramtmg8),
92 	DDRCTL_REG_TIMING(dramtmg14),
93 	DDRCTL_REG_TIMING(odtcfg),
94 };
95 
96 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
97 static const struct reg_desc ddr_map[] = {
98 	DDRCTL_REG_MAP(addrmap1),
99 	DDRCTL_REG_MAP(addrmap2),
100 	DDRCTL_REG_MAP(addrmap3),
101 	DDRCTL_REG_MAP(addrmap4),
102 	DDRCTL_REG_MAP(addrmap5),
103 	DDRCTL_REG_MAP(addrmap6),
104 	DDRCTL_REG_MAP(addrmap9),
105 	DDRCTL_REG_MAP(addrmap10),
106 	DDRCTL_REG_MAP(addrmap11),
107 };
108 
109 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
110 static const struct reg_desc ddr_perf[] = {
111 	DDRCTL_REG_PERF(sched),
112 	DDRCTL_REG_PERF(sched1),
113 	DDRCTL_REG_PERF(perfhpr1),
114 	DDRCTL_REG_PERF(perflpr1),
115 	DDRCTL_REG_PERF(perfwr1),
116 	DDRCTL_REG_PERF(pcfgr_0),
117 	DDRCTL_REG_PERF(pcfgw_0),
118 	DDRCTL_REG_PERF(pcfgqos0_0),
119 	DDRCTL_REG_PERF(pcfgqos1_0),
120 	DDRCTL_REG_PERF(pcfgwqos0_0),
121 	DDRCTL_REG_PERF(pcfgwqos1_0),
122 	DDRCTL_REG_PERF(pcfgr_1),
123 	DDRCTL_REG_PERF(pcfgw_1),
124 	DDRCTL_REG_PERF(pcfgqos0_1),
125 	DDRCTL_REG_PERF(pcfgqos1_1),
126 	DDRCTL_REG_PERF(pcfgwqos0_1),
127 	DDRCTL_REG_PERF(pcfgwqos1_1),
128 };
129 
130 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
131 static const struct reg_desc ddrphy_reg[] = {
132 	DDRPHY_REG_REG(pgcr),
133 	DDRPHY_REG_REG(aciocr),
134 	DDRPHY_REG_REG(dxccr),
135 	DDRPHY_REG_REG(dsgcr),
136 	DDRPHY_REG_REG(dcr),
137 	DDRPHY_REG_REG(odtcr),
138 	DDRPHY_REG_REG(zq0cr1),
139 	DDRPHY_REG_REG(dx0gcr),
140 	DDRPHY_REG_REG(dx1gcr),
141 	DDRPHY_REG_REG(dx2gcr),
142 	DDRPHY_REG_REG(dx3gcr),
143 };
144 
145 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
146 static const struct reg_desc ddrphy_timing[] = {
147 	DDRPHY_REG_TIMING(ptr0),
148 	DDRPHY_REG_TIMING(ptr1),
149 	DDRPHY_REG_TIMING(ptr2),
150 	DDRPHY_REG_TIMING(dtpr0),
151 	DDRPHY_REG_TIMING(dtpr1),
152 	DDRPHY_REG_TIMING(dtpr2),
153 	DDRPHY_REG_TIMING(mr0),
154 	DDRPHY_REG_TIMING(mr1),
155 	DDRPHY_REG_TIMING(mr2),
156 	DDRPHY_REG_TIMING(mr3),
157 };
158 
159 #define DDRPHY_REG_CAL(x)	DDRPHY_REG(x, stm32mp1_ddrphy_cal)
160 static const struct reg_desc ddrphy_cal[] = {
161 	DDRPHY_REG_CAL(dx0dllcr),
162 	DDRPHY_REG_CAL(dx0dqtr),
163 	DDRPHY_REG_CAL(dx0dqstr),
164 	DDRPHY_REG_CAL(dx1dllcr),
165 	DDRPHY_REG_CAL(dx1dqtr),
166 	DDRPHY_REG_CAL(dx1dqstr),
167 	DDRPHY_REG_CAL(dx2dllcr),
168 	DDRPHY_REG_CAL(dx2dqtr),
169 	DDRPHY_REG_CAL(dx2dqstr),
170 	DDRPHY_REG_CAL(dx3dllcr),
171 	DDRPHY_REG_CAL(dx3dqtr),
172 	DDRPHY_REG_CAL(dx3dqstr),
173 };
174 
175 #define DDR_REG_DYN(x)						\
176 	{							\
177 		.name = #x,					\
178 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
179 		.par_offset = INVALID_OFFSET \
180 	}
181 
182 static const struct reg_desc ddr_dyn[] = {
183 	DDR_REG_DYN(stat),
184 	DDR_REG_DYN(init0),
185 	DDR_REG_DYN(dfimisc),
186 	DDR_REG_DYN(dfistat),
187 	DDR_REG_DYN(swctl),
188 	DDR_REG_DYN(swstat),
189 	DDR_REG_DYN(pctrl_0),
190 	DDR_REG_DYN(pctrl_1),
191 };
192 
193 #define DDRPHY_REG_DYN(x)					\
194 	{							\
195 		.name = #x,					\
196 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
197 		.par_offset = INVALID_OFFSET			\
198 	}
199 
200 static const struct reg_desc ddrphy_dyn[] = {
201 	DDRPHY_REG_DYN(pir),
202 	DDRPHY_REG_DYN(pgsr),
203 };
204 
205 enum reg_type {
206 	REG_REG,
207 	REG_TIMING,
208 	REG_PERF,
209 	REG_MAP,
210 	REGPHY_REG,
211 	REGPHY_TIMING,
212 	REGPHY_CAL,
213 /*
214  * Dynamic registers => managed in driver or not changed,
215  * can be dumped in interactive mode.
216  */
217 	REG_DYN,
218 	REGPHY_DYN,
219 	REG_TYPE_NB
220 };
221 
222 enum base_type {
223 	DDR_BASE,
224 	DDRPHY_BASE,
225 	NONE_BASE
226 };
227 
228 struct ddr_reg_info {
229 	const char *name;
230 	const struct reg_desc *desc;
231 	uint8_t size;
232 	enum base_type base;
233 };
234 
235 static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
236 	[REG_REG] = {
237 		.name = "static",
238 		.desc = ddr_reg,
239 		.size = ARRAY_SIZE(ddr_reg),
240 		.base = DDR_BASE
241 	},
242 	[REG_TIMING] = {
243 		.name = "timing",
244 		.desc = ddr_timing,
245 		.size = ARRAY_SIZE(ddr_timing),
246 		.base = DDR_BASE
247 	},
248 	[REG_PERF] = {
249 		.name = "perf",
250 		.desc = ddr_perf,
251 		.size = ARRAY_SIZE(ddr_perf),
252 		.base = DDR_BASE
253 	},
254 	[REG_MAP] = {
255 		.name = "map",
256 		.desc = ddr_map,
257 		.size = ARRAY_SIZE(ddr_map),
258 		.base = DDR_BASE
259 	},
260 	[REGPHY_REG] = {
261 		.name = "static",
262 		.desc = ddrphy_reg,
263 		.size = ARRAY_SIZE(ddrphy_reg),
264 		.base = DDRPHY_BASE
265 	},
266 	[REGPHY_TIMING] = {
267 		.name = "timing",
268 		.desc = ddrphy_timing,
269 		.size = ARRAY_SIZE(ddrphy_timing),
270 		.base = DDRPHY_BASE
271 	},
272 	[REGPHY_CAL] = {
273 		.name = "cal",
274 		.desc = ddrphy_cal,
275 		.size = ARRAY_SIZE(ddrphy_cal),
276 		.base = DDRPHY_BASE
277 	},
278 	[REG_DYN] = {
279 		.name = "dyn",
280 		.desc = ddr_dyn,
281 		.size = ARRAY_SIZE(ddr_dyn),
282 		.base = DDR_BASE
283 	},
284 	[REGPHY_DYN] = {
285 		.name = "dyn",
286 		.desc = ddrphy_dyn,
287 		.size = ARRAY_SIZE(ddrphy_dyn),
288 		.base = DDRPHY_BASE
289 	},
290 };
291 
292 static uintptr_t get_base_addr(const struct ddr_info *priv, enum base_type base)
293 {
294 	if (base == DDRPHY_BASE) {
295 		return (uintptr_t)priv->phy;
296 	} else {
297 		return (uintptr_t)priv->ctl;
298 	}
299 }
300 
301 static void set_reg(const struct ddr_info *priv,
302 		    enum reg_type type,
303 		    const void *param)
304 {
305 	unsigned int i;
306 	unsigned int value;
307 	enum base_type base = ddr_registers[type].base;
308 	uintptr_t base_addr = get_base_addr(priv, base);
309 	const struct reg_desc *desc = ddr_registers[type].desc;
310 
311 	VERBOSE("init %s\n", ddr_registers[type].name);
312 	for (i = 0; i < ddr_registers[type].size; i++) {
313 		uintptr_t ptr = base_addr + desc[i].offset;
314 
315 		if (desc[i].par_offset == INVALID_OFFSET) {
316 			ERROR("invalid parameter offset for %s", desc[i].name);
317 			panic();
318 		} else {
319 			value = *((uint32_t *)((uintptr_t)param +
320 					       desc[i].par_offset));
321 			mmio_write_32(ptr, value);
322 		}
323 	}
324 }
325 
326 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
327 {
328 	uint32_t pgsr;
329 	int error = 0;
330 	unsigned long start;
331 	unsigned long time0, time;
332 
333 	start = get_timer(0);
334 	time0 = start;
335 
336 	do {
337 		pgsr = mmio_read_32((uintptr_t)&phy->pgsr);
338 		time = get_timer(start);
339 		if (time != time0) {
340 			VERBOSE("  > [0x%lx] pgsr = 0x%x &\n",
341 				(uintptr_t)&phy->pgsr, pgsr);
342 			VERBOSE("    [0x%lx] pir = 0x%x (time=%lx)\n",
343 				(uintptr_t)&phy->pir,
344 				mmio_read_32((uintptr_t)&phy->pir),
345 				time);
346 		}
347 
348 		time0 = time;
349 		if (time > plat_get_syscnt_freq2()) {
350 			panic();
351 		}
352 		if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
353 			VERBOSE("DQS Gate Trainig Error\n");
354 			error++;
355 		}
356 		if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
357 			VERBOSE("DQS Gate Trainig Intermittent Error\n");
358 			error++;
359 		}
360 		if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
361 			VERBOSE("DQS Drift Error\n");
362 			error++;
363 		}
364 		if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
365 			VERBOSE("Read Valid Training Error\n");
366 			error++;
367 		}
368 		if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
369 			VERBOSE("Read Valid Training Intermittent Error\n");
370 			error++;
371 		}
372 	} while ((pgsr & DDRPHYC_PGSR_IDONE) == 0U && error == 0);
373 	VERBOSE("\n[0x%lx] pgsr = 0x%x\n",
374 		(uintptr_t)&phy->pgsr, pgsr);
375 }
376 
377 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir)
378 {
379 	uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
380 
381 	mmio_write_32((uintptr_t)&phy->pir, pir_init);
382 	VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n",
383 		(uintptr_t)&phy->pir, pir_init,
384 		mmio_read_32((uintptr_t)&phy->pir));
385 
386 	/* Need to wait 10 configuration clock before start polling */
387 	udelay(10);
388 
389 	/* Wait DRAM initialization and Gate Training Evaluation complete */
390 	stm32mp1_ddrphy_idone_wait(phy);
391 }
392 
393 /* Start quasi dynamic register update */
394 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl)
395 {
396 	mmio_clrbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
397 	VERBOSE("[0x%lx] swctl = 0x%x\n",
398 		(uintptr_t)&ctl->swctl,  mmio_read_32((uintptr_t)&ctl->swctl));
399 }
400 
401 /* Wait quasi dynamic register update */
402 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
403 {
404 	unsigned long start;
405 	uint32_t swstat;
406 
407 	mmio_setbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
408 	VERBOSE("[0x%lx] swctl = 0x%x\n",
409 		(uintptr_t)&ctl->swctl, mmio_read_32((uintptr_t)&ctl->swctl));
410 
411 	start = get_timer(0);
412 	do {
413 		swstat = mmio_read_32((uintptr_t)&ctl->swstat);
414 		VERBOSE("[0x%lx] swstat = 0x%x ",
415 			(uintptr_t)&ctl->swstat, swstat);
416 		VERBOSE("timer in ms 0x%x = start 0x%lx\r",
417 			get_timer(0), start);
418 		if (get_timer(start) > plat_get_syscnt_freq2()) {
419 			panic();
420 		}
421 	} while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U);
422 
423 	VERBOSE("[0x%lx] swstat = 0x%x\n",
424 		(uintptr_t)&ctl->swstat, swstat);
425 }
426 
427 /* Wait quasi dynamic register update */
428 static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode)
429 {
430 	unsigned long start;
431 	uint32_t stat;
432 	uint32_t operating_mode;
433 	uint32_t selref_type;
434 	int break_loop = 0;
435 
436 	start = get_timer(0);
437 	for ( ; ; ) {
438 		stat = mmio_read_32((uintptr_t)&priv->ctl->stat);
439 		operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
440 		selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
441 		VERBOSE("[0x%lx] stat = 0x%x\n",
442 			(uintptr_t)&priv->ctl->stat, stat);
443 		VERBOSE("timer in ms 0x%x = start 0x%lx\r",
444 			get_timer(0), start);
445 		if (get_timer(start) > plat_get_syscnt_freq2()) {
446 			panic();
447 		}
448 
449 		if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
450 			/*
451 			 * Self-refresh due to software
452 			 * => checking also STAT.selfref_type.
453 			 */
454 			if ((operating_mode ==
455 			     DDRCTRL_STAT_OPERATING_MODE_SR) &&
456 			    (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
457 				break_loop = 1;
458 			}
459 		} else if (operating_mode == mode) {
460 			break_loop = 1;
461 		} else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
462 			   (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
463 			   (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
464 			/* Normal mode: handle also automatic self refresh */
465 			break_loop = 1;
466 		}
467 
468 		if (break_loop == 1) {
469 			break;
470 		}
471 	}
472 
473 	VERBOSE("[0x%lx] stat = 0x%x\n",
474 		(uintptr_t)&priv->ctl->stat, stat);
475 }
476 
477 /* Mode Register Writes (MRW or MRS) */
478 static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr,
479 					 uint32_t data)
480 {
481 	uint32_t mrctrl0;
482 
483 	VERBOSE("MRS: %d = %x\n", addr, data);
484 
485 	/*
486 	 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
487 	 *    This checks that there is no outstanding MR transaction.
488 	 *    No write should be performed to MRCTRL0 and MRCTRL1
489 	 *    if MRSTAT.mr_wr_busy = 1.
490 	 */
491 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
492 		DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
493 		;
494 	}
495 
496 	/*
497 	 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
498 	 *    and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
499 	 */
500 	mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
501 		  DDRCTRL_MRCTRL0_MR_RANK_ALL |
502 		  (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
503 		   DDRCTRL_MRCTRL0_MR_ADDR_MASK);
504 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
505 	VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n",
506 		(uintptr_t)&priv->ctl->mrctrl0,
507 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0);
508 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data);
509 	VERBOSE("[0x%lx] mrctrl1 = 0x%x\n",
510 		(uintptr_t)&priv->ctl->mrctrl1,
511 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl1));
512 
513 	/*
514 	 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
515 	 *    bit is self-clearing, and triggers the MR transaction.
516 	 *    The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
517 	 *    the MR transaction to SDRAM, and no further access can be
518 	 *    initiated until it is deasserted.
519 	 */
520 	mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
521 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
522 
523 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
524 	       DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
525 		;
526 	}
527 
528 	VERBOSE("[0x%lx] mrctrl0 = 0x%x\n",
529 		(uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
530 }
531 
532 /* Switch DDR3 from DLL-on to DLL-off */
533 static void stm32mp1_ddr3_dll_off(struct ddr_info *priv)
534 {
535 	uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1);
536 	uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2);
537 	uint32_t dbgcam;
538 
539 	VERBOSE("mr1: 0x%x\n", mr1);
540 	VERBOSE("mr2: 0x%x\n", mr2);
541 
542 	/*
543 	 * 1. Set the DBG1.dis_hif = 1.
544 	 *    This prevents further reads/writes being received on the HIF.
545 	 */
546 	mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
547 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
548 		(uintptr_t)&priv->ctl->dbg1,
549 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
550 
551 	/*
552 	 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
553 	 *    DBGCAM.wr_data_pipeline_empty = 1,
554 	 *    DBGCAM.rd_data_pipeline_empty = 1,
555 	 *    DBGCAM.dbg_wr_q_depth = 0 ,
556 	 *    DBGCAM.dbg_lpr_q_depth = 0, and
557 	 *    DBGCAM.dbg_hpr_q_depth = 0.
558 	 */
559 	do {
560 		dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam);
561 		VERBOSE("[0x%lx] dbgcam = 0x%x\n",
562 			(uintptr_t)&priv->ctl->dbgcam, dbgcam);
563 	} while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
564 		   DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
565 		 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
566 
567 	/*
568 	 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
569 	 *    to disable RTT_NOM:
570 	 *    a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
571 	 *    b. DDR4: Write to MR1[10:8]
572 	 */
573 	mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
574 	stm32mp1_mode_register_write(priv, 1, mr1);
575 
576 	/*
577 	 * 4. For DDR4 only: Perform an MRS command
578 	 *    (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
579 	 *    to disable RTT_PARK
580 	 */
581 
582 	/*
583 	 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
584 	 *    to write to MR2[10:9], to disable RTT_WR
585 	 *    (and therefore disable dynamic ODT).
586 	 *    This applies for both DDR3 and DDR4.
587 	 */
588 	mr2 &= ~GENMASK(10, 9);
589 	stm32mp1_mode_register_write(priv, 2, mr2);
590 
591 	/*
592 	 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
593 	 *    to disable the DLL. The timing of this MRS is automatically
594 	 *    handled by the uMCTL2.
595 	 *    a. DDR3: Write to MR1[0]
596 	 *    b. DDR4: Write to MR1[0]
597 	 */
598 	mr1 |= BIT(0);
599 	stm32mp1_mode_register_write(priv, 1, mr1);
600 
601 	/*
602 	 * 7. Put the SDRAM into self-refresh mode by setting
603 	 *    PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
604 	 *    the DDRC has entered self-refresh.
605 	 */
606 	mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl,
607 			DDRCTRL_PWRCTL_SELFREF_SW);
608 	VERBOSE("[0x%lx] pwrctl = 0x%x\n",
609 		(uintptr_t)&priv->ctl->pwrctl,
610 		mmio_read_32((uintptr_t)&priv->ctl->pwrctl));
611 
612 	/*
613 	 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
614 	 *    DWC_ddr_umctl2 core is in self-refresh mode.
615 	 *    Ensure transition to self-refresh was due to software
616 	 *    by checking that STAT.selfref_type[1:0]=2.
617 	 */
618 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
619 
620 	/*
621 	 * 9. Set the MSTR.dll_off_mode = 1.
622 	 *    warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
623 	 */
624 	stm32mp1_start_sw_done(priv->ctl);
625 
626 	mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
627 	VERBOSE("[0x%lx] mstr = 0x%x\n",
628 		(uintptr_t)&priv->ctl->mstr,
629 		mmio_read_32((uintptr_t)&priv->ctl->mstr));
630 
631 	stm32mp1_wait_sw_done_ack(priv->ctl);
632 
633 	/* 10. Change the clock frequency to the desired value. */
634 
635 	/*
636 	 * 11. Update any registers which may be required to change for the new
637 	 *     frequency. This includes static and dynamic registers.
638 	 *     This includes both uMCTL2 registers and PHY registers.
639 	 */
640 
641 	/* Change Bypass Mode Frequency Range */
642 	if (stm32mp_clk_get_rate(DDRPHYC) < 100000000U) {
643 		mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr,
644 				DDRPHYC_DLLGCR_BPS200);
645 	} else {
646 		mmio_setbits_32((uintptr_t)&priv->phy->dllgcr,
647 				DDRPHYC_DLLGCR_BPS200);
648 	}
649 
650 	mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
651 
652 	mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr,
653 			DDRPHYC_DXNDLLCR_DLLDIS);
654 	mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr,
655 			DDRPHYC_DXNDLLCR_DLLDIS);
656 	mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr,
657 			DDRPHYC_DXNDLLCR_DLLDIS);
658 	mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr,
659 			DDRPHYC_DXNDLLCR_DLLDIS);
660 
661 	/* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
662 	mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl,
663 			DDRCTRL_PWRCTL_SELFREF_SW);
664 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
665 
666 	/*
667 	 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
668 	 *     at this point.
669 	 */
670 
671 	/*
672 	 * 14. Perform MRS commands as required to re-program timing registers
673 	 *     in the SDRAM for the new frequency
674 	 *     (in particular, CL, CWL and WR may need to be changed).
675 	 */
676 
677 	/* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
678 	mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
679 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
680 		(uintptr_t)&priv->ctl->dbg1,
681 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
682 }
683 
684 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
685 {
686 	stm32mp1_start_sw_done(ctl);
687 	/* Quasi-dynamic register update*/
688 	mmio_setbits_32((uintptr_t)&ctl->rfshctl3,
689 			DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
690 	mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
691 	mmio_clrbits_32((uintptr_t)&ctl->dfimisc,
692 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
693 	stm32mp1_wait_sw_done_ack(ctl);
694 }
695 
696 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
697 				     uint32_t rfshctl3, uint32_t pwrctl)
698 {
699 	stm32mp1_start_sw_done(ctl);
700 	if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
701 		mmio_clrbits_32((uintptr_t)&ctl->rfshctl3,
702 				DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
703 	}
704 	if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
705 		mmio_setbits_32((uintptr_t)&ctl->pwrctl,
706 				DDRCTRL_PWRCTL_POWERDOWN_EN);
707 	}
708 	mmio_setbits_32((uintptr_t)&ctl->dfimisc,
709 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
710 	stm32mp1_wait_sw_done_ack(ctl);
711 }
712 
713 static int board_ddr_power_init(enum ddr_type ddr_type)
714 {
715 	if (dt_check_pmic()) {
716 		return pmic_ddr_power_init(ddr_type);
717 	}
718 
719 	return 0;
720 }
721 
722 void stm32mp1_ddr_init(struct ddr_info *priv,
723 		       struct stm32mp1_ddr_config *config)
724 {
725 	uint32_t pir;
726 	int ret = -EINVAL;
727 
728 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
729 		ret = board_ddr_power_init(STM32MP_DDR3);
730 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) {
731 		ret = board_ddr_power_init(STM32MP_LPDDR2);
732 	} else {
733 		ERROR("DDR type not supported\n");
734 	}
735 
736 	if (ret != 0) {
737 		panic();
738 	}
739 
740 	VERBOSE("name = %s\n", config->info.name);
741 	VERBOSE("speed = %d kHz\n", config->info.speed);
742 	VERBOSE("size  = 0x%x\n", config->info.size);
743 
744 	/* DDR INIT SEQUENCE */
745 
746 	/*
747 	 * 1. Program the DWC_ddr_umctl2 registers
748 	 *     nota: check DFIMISC.dfi_init_complete = 0
749 	 */
750 
751 	/* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
752 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
753 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
754 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
755 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
756 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
757 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
758 
759 	/* 1.2. start CLOCK */
760 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
761 		panic();
762 	}
763 
764 	/* 1.3. deassert reset */
765 	/* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
766 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
767 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
768 	/*
769 	 * De-assert presetn once the clocks are active
770 	 * and stable via DDRCAPBRST bit.
771 	 */
772 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
773 
774 	/* 1.4. wait 128 cycles to permit initialization of end logic */
775 	udelay(2);
776 	/* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
777 
778 	/* 1.5. initialize registers ddr_umctl2 */
779 	/* Stop uMCTL2 before PHY is ready */
780 	mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc,
781 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
782 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
783 		(uintptr_t)&priv->ctl->dfimisc,
784 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
785 
786 	set_reg(priv, REG_REG, &config->c_reg);
787 
788 	/* DDR3 = don't set DLLOFF for init mode */
789 	if ((config->c_reg.mstr &
790 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
791 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
792 		VERBOSE("deactivate DLL OFF in mstr\n");
793 		mmio_clrbits_32((uintptr_t)&priv->ctl->mstr,
794 				DDRCTRL_MSTR_DLL_OFF_MODE);
795 		VERBOSE("[0x%lx] mstr = 0x%x\n",
796 			(uintptr_t)&priv->ctl->mstr,
797 			mmio_read_32((uintptr_t)&priv->ctl->mstr));
798 	}
799 
800 	set_reg(priv, REG_TIMING, &config->c_timing);
801 	set_reg(priv, REG_MAP, &config->c_map);
802 
803 	/* Skip CTRL init, SDRAM init is done by PHY PUBL */
804 	mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0,
805 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
806 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
807 	VERBOSE("[0x%lx] init0 = 0x%x\n",
808 		(uintptr_t)&priv->ctl->init0,
809 		mmio_read_32((uintptr_t)&priv->ctl->init0));
810 
811 	set_reg(priv, REG_PERF, &config->c_perf);
812 
813 	/*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
814 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
815 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
816 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
817 
818 	/*
819 	 * 3. start PHY init by accessing relevant PUBL registers
820 	 *    (DXGCR, DCR, PTR*, MR*, DTPR*)
821 	 */
822 	set_reg(priv, REGPHY_REG, &config->p_reg);
823 	set_reg(priv, REGPHY_TIMING, &config->p_timing);
824 	set_reg(priv, REGPHY_CAL, &config->p_cal);
825 
826 	/* DDR3 = don't set DLLOFF for init mode */
827 	if ((config->c_reg.mstr &
828 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
829 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
830 		VERBOSE("deactivate DLL OFF in mr1\n");
831 		mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0));
832 		VERBOSE("[0x%lx] mr1 = 0x%x\n",
833 			(uintptr_t)&priv->phy->mr1,
834 			mmio_read_32((uintptr_t)&priv->phy->mr1));
835 	}
836 
837 	/*
838 	 *  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
839 	 *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
840 	 */
841 	stm32mp1_ddrphy_idone_wait(priv->phy);
842 
843 	/*
844 	 *  5. Indicate to PUBL that controller performs SDRAM initialization
845 	 *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
846 	 *     DRAM init is done by PHY, init0.skip_dram.init = 1
847 	 */
848 
849 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
850 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
851 
852 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
853 		pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
854 	}
855 
856 	stm32mp1_ddrphy_init(priv->phy, pir);
857 
858 	/*
859 	 *  6. SET DFIMISC.dfi_init_complete_en to 1
860 	 *  Enable quasi-dynamic register programming.
861 	 */
862 	stm32mp1_start_sw_done(priv->ctl);
863 
864 	mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc,
865 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
866 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
867 		(uintptr_t)&priv->ctl->dfimisc,
868 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
869 
870 	stm32mp1_wait_sw_done_ack(priv->ctl);
871 
872 	/*
873 	 *  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
874 	 *     by monitoring STAT.operating_mode signal
875 	 */
876 
877 	/* Wait uMCTL2 ready */
878 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
879 
880 	/* Switch to DLL OFF mode */
881 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
882 		stm32mp1_ddr3_dll_off(priv);
883 	}
884 
885 	VERBOSE("DDR DQS training : ");
886 
887 	/*
888 	 *  8. Disable Auto refresh and power down by setting
889 	 *    - RFSHCTL3.dis_au_refresh = 1
890 	 *    - PWRCTL.powerdown_en = 0
891 	 *    - DFIMISC.dfiinit_complete_en = 0
892 	 */
893 	stm32mp1_refresh_disable(priv->ctl);
894 
895 	/*
896 	 *  9. Program PUBL PGCR to enable refresh during training
897 	 *     and rank to train
898 	 *     not done => keep the programed value in PGCR
899 	 */
900 
901 	/*
902 	 * 10. configure PUBL PIR register to specify which training step
903 	 * to run
904 	 * Warning : RVTRN  is not supported by this PUBL
905 	 */
906 	stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
907 
908 	/* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
909 	stm32mp1_ddrphy_idone_wait(priv->phy);
910 
911 	/*
912 	 * 12. set back registers in step 8 to the orginal values if desidered
913 	 */
914 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
915 				 config->c_reg.pwrctl);
916 
917 	/* Enable uMCTL2 AXI port 0 */
918 	mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_0,
919 			DDRCTRL_PCTRL_N_PORT_EN);
920 	VERBOSE("[0x%lx] pctrl_0 = 0x%x\n",
921 		(uintptr_t)&priv->ctl->pctrl_0,
922 		mmio_read_32((uintptr_t)&priv->ctl->pctrl_0));
923 
924 	/* Enable uMCTL2 AXI port 1 */
925 	mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_1,
926 			DDRCTRL_PCTRL_N_PORT_EN);
927 	VERBOSE("[0x%lx] pctrl_1 = 0x%x\n",
928 		(uintptr_t)&priv->ctl->pctrl_1,
929 		mmio_read_32((uintptr_t)&priv->ctl->pctrl_1));
930 }
931