xref: /rk3399_ARM-atf/drivers/st/ddr/stm32mp1_ddr.c (revision 0ca4b4b79edc898314cd6dbc7dde0f5f450e7517)
1 /*
2  * Copyright (C) 2018-2021, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 #include <stddef.h>
9 
10 #include <platform_def.h>
11 
12 #include <arch.h>
13 #include <arch_helpers.h>
14 #include <common/debug.h>
15 #include <drivers/clk.h>
16 #include <drivers/delay_timer.h>
17 #include <drivers/st/stm32mp_pmic.h>
18 #include <drivers/st/stm32mp1_ddr.h>
19 #include <drivers/st/stm32mp1_ddr_regs.h>
20 #include <drivers/st/stm32mp1_pwr.h>
21 #include <drivers/st/stm32mp1_ram.h>
22 #include <lib/mmio.h>
23 #include <plat/common/platform.h>
24 
25 struct reg_desc {
26 	const char *name;
27 	uint16_t offset;	/* Offset for base address */
28 	uint8_t par_offset;	/* Offset for parameter array */
29 };
30 
31 #define INVALID_OFFSET	0xFFU
32 
33 #define TIMEOUT_US_1S	1000000U
34 
35 #define DDRCTL_REG(x, y)					\
36 	{							\
37 		.name = #x,					\
38 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
39 		.par_offset = offsetof(struct y, x)		\
40 	}
41 
42 #define DDRPHY_REG(x, y)					\
43 	{							\
44 		.name = #x,					\
45 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
46 		.par_offset = offsetof(struct y, x)		\
47 	}
48 
49 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
50 static const struct reg_desc ddr_reg[] = {
51 	DDRCTL_REG_REG(mstr),
52 	DDRCTL_REG_REG(mrctrl0),
53 	DDRCTL_REG_REG(mrctrl1),
54 	DDRCTL_REG_REG(derateen),
55 	DDRCTL_REG_REG(derateint),
56 	DDRCTL_REG_REG(pwrctl),
57 	DDRCTL_REG_REG(pwrtmg),
58 	DDRCTL_REG_REG(hwlpctl),
59 	DDRCTL_REG_REG(rfshctl0),
60 	DDRCTL_REG_REG(rfshctl3),
61 	DDRCTL_REG_REG(crcparctl0),
62 	DDRCTL_REG_REG(zqctl0),
63 	DDRCTL_REG_REG(dfitmg0),
64 	DDRCTL_REG_REG(dfitmg1),
65 	DDRCTL_REG_REG(dfilpcfg0),
66 	DDRCTL_REG_REG(dfiupd0),
67 	DDRCTL_REG_REG(dfiupd1),
68 	DDRCTL_REG_REG(dfiupd2),
69 	DDRCTL_REG_REG(dfiphymstr),
70 	DDRCTL_REG_REG(odtmap),
71 	DDRCTL_REG_REG(dbg0),
72 	DDRCTL_REG_REG(dbg1),
73 	DDRCTL_REG_REG(dbgcmd),
74 	DDRCTL_REG_REG(poisoncfg),
75 	DDRCTL_REG_REG(pccfg),
76 };
77 
78 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
79 static const struct reg_desc ddr_timing[] = {
80 	DDRCTL_REG_TIMING(rfshtmg),
81 	DDRCTL_REG_TIMING(dramtmg0),
82 	DDRCTL_REG_TIMING(dramtmg1),
83 	DDRCTL_REG_TIMING(dramtmg2),
84 	DDRCTL_REG_TIMING(dramtmg3),
85 	DDRCTL_REG_TIMING(dramtmg4),
86 	DDRCTL_REG_TIMING(dramtmg5),
87 	DDRCTL_REG_TIMING(dramtmg6),
88 	DDRCTL_REG_TIMING(dramtmg7),
89 	DDRCTL_REG_TIMING(dramtmg8),
90 	DDRCTL_REG_TIMING(dramtmg14),
91 	DDRCTL_REG_TIMING(odtcfg),
92 };
93 
94 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
95 static const struct reg_desc ddr_map[] = {
96 	DDRCTL_REG_MAP(addrmap1),
97 	DDRCTL_REG_MAP(addrmap2),
98 	DDRCTL_REG_MAP(addrmap3),
99 	DDRCTL_REG_MAP(addrmap4),
100 	DDRCTL_REG_MAP(addrmap5),
101 	DDRCTL_REG_MAP(addrmap6),
102 	DDRCTL_REG_MAP(addrmap9),
103 	DDRCTL_REG_MAP(addrmap10),
104 	DDRCTL_REG_MAP(addrmap11),
105 };
106 
107 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
108 static const struct reg_desc ddr_perf[] = {
109 	DDRCTL_REG_PERF(sched),
110 	DDRCTL_REG_PERF(sched1),
111 	DDRCTL_REG_PERF(perfhpr1),
112 	DDRCTL_REG_PERF(perflpr1),
113 	DDRCTL_REG_PERF(perfwr1),
114 	DDRCTL_REG_PERF(pcfgr_0),
115 	DDRCTL_REG_PERF(pcfgw_0),
116 	DDRCTL_REG_PERF(pcfgqos0_0),
117 	DDRCTL_REG_PERF(pcfgqos1_0),
118 	DDRCTL_REG_PERF(pcfgwqos0_0),
119 	DDRCTL_REG_PERF(pcfgwqos1_0),
120 	DDRCTL_REG_PERF(pcfgr_1),
121 	DDRCTL_REG_PERF(pcfgw_1),
122 	DDRCTL_REG_PERF(pcfgqos0_1),
123 	DDRCTL_REG_PERF(pcfgqos1_1),
124 	DDRCTL_REG_PERF(pcfgwqos0_1),
125 	DDRCTL_REG_PERF(pcfgwqos1_1),
126 };
127 
128 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
129 static const struct reg_desc ddrphy_reg[] = {
130 	DDRPHY_REG_REG(pgcr),
131 	DDRPHY_REG_REG(aciocr),
132 	DDRPHY_REG_REG(dxccr),
133 	DDRPHY_REG_REG(dsgcr),
134 	DDRPHY_REG_REG(dcr),
135 	DDRPHY_REG_REG(odtcr),
136 	DDRPHY_REG_REG(zq0cr1),
137 	DDRPHY_REG_REG(dx0gcr),
138 	DDRPHY_REG_REG(dx1gcr),
139 	DDRPHY_REG_REG(dx2gcr),
140 	DDRPHY_REG_REG(dx3gcr),
141 };
142 
143 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
144 static const struct reg_desc ddrphy_timing[] = {
145 	DDRPHY_REG_TIMING(ptr0),
146 	DDRPHY_REG_TIMING(ptr1),
147 	DDRPHY_REG_TIMING(ptr2),
148 	DDRPHY_REG_TIMING(dtpr0),
149 	DDRPHY_REG_TIMING(dtpr1),
150 	DDRPHY_REG_TIMING(dtpr2),
151 	DDRPHY_REG_TIMING(mr0),
152 	DDRPHY_REG_TIMING(mr1),
153 	DDRPHY_REG_TIMING(mr2),
154 	DDRPHY_REG_TIMING(mr3),
155 };
156 
157 #define DDRPHY_REG_CAL(x)	DDRPHY_REG(x, stm32mp1_ddrphy_cal)
158 static const struct reg_desc ddrphy_cal[] = {
159 	DDRPHY_REG_CAL(dx0dllcr),
160 	DDRPHY_REG_CAL(dx0dqtr),
161 	DDRPHY_REG_CAL(dx0dqstr),
162 	DDRPHY_REG_CAL(dx1dllcr),
163 	DDRPHY_REG_CAL(dx1dqtr),
164 	DDRPHY_REG_CAL(dx1dqstr),
165 	DDRPHY_REG_CAL(dx2dllcr),
166 	DDRPHY_REG_CAL(dx2dqtr),
167 	DDRPHY_REG_CAL(dx2dqstr),
168 	DDRPHY_REG_CAL(dx3dllcr),
169 	DDRPHY_REG_CAL(dx3dqtr),
170 	DDRPHY_REG_CAL(dx3dqstr),
171 };
172 
173 #define DDR_REG_DYN(x)						\
174 	{							\
175 		.name = #x,					\
176 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
177 		.par_offset = INVALID_OFFSET \
178 	}
179 
180 static const struct reg_desc ddr_dyn[] = {
181 	DDR_REG_DYN(stat),
182 	DDR_REG_DYN(init0),
183 	DDR_REG_DYN(dfimisc),
184 	DDR_REG_DYN(dfistat),
185 	DDR_REG_DYN(swctl),
186 	DDR_REG_DYN(swstat),
187 	DDR_REG_DYN(pctrl_0),
188 	DDR_REG_DYN(pctrl_1),
189 };
190 
191 #define DDRPHY_REG_DYN(x)					\
192 	{							\
193 		.name = #x,					\
194 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
195 		.par_offset = INVALID_OFFSET			\
196 	}
197 
198 static const struct reg_desc ddrphy_dyn[] = {
199 	DDRPHY_REG_DYN(pir),
200 	DDRPHY_REG_DYN(pgsr),
201 };
202 
203 enum reg_type {
204 	REG_REG,
205 	REG_TIMING,
206 	REG_PERF,
207 	REG_MAP,
208 	REGPHY_REG,
209 	REGPHY_TIMING,
210 	REGPHY_CAL,
211 /*
212  * Dynamic registers => managed in driver or not changed,
213  * can be dumped in interactive mode.
214  */
215 	REG_DYN,
216 	REGPHY_DYN,
217 	REG_TYPE_NB
218 };
219 
220 enum base_type {
221 	DDR_BASE,
222 	DDRPHY_BASE,
223 	NONE_BASE
224 };
225 
226 struct ddr_reg_info {
227 	const char *name;
228 	const struct reg_desc *desc;
229 	uint8_t size;
230 	enum base_type base;
231 };
232 
233 static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
234 	[REG_REG] = {
235 		.name = "static",
236 		.desc = ddr_reg,
237 		.size = ARRAY_SIZE(ddr_reg),
238 		.base = DDR_BASE
239 	},
240 	[REG_TIMING] = {
241 		.name = "timing",
242 		.desc = ddr_timing,
243 		.size = ARRAY_SIZE(ddr_timing),
244 		.base = DDR_BASE
245 	},
246 	[REG_PERF] = {
247 		.name = "perf",
248 		.desc = ddr_perf,
249 		.size = ARRAY_SIZE(ddr_perf),
250 		.base = DDR_BASE
251 	},
252 	[REG_MAP] = {
253 		.name = "map",
254 		.desc = ddr_map,
255 		.size = ARRAY_SIZE(ddr_map),
256 		.base = DDR_BASE
257 	},
258 	[REGPHY_REG] = {
259 		.name = "static",
260 		.desc = ddrphy_reg,
261 		.size = ARRAY_SIZE(ddrphy_reg),
262 		.base = DDRPHY_BASE
263 	},
264 	[REGPHY_TIMING] = {
265 		.name = "timing",
266 		.desc = ddrphy_timing,
267 		.size = ARRAY_SIZE(ddrphy_timing),
268 		.base = DDRPHY_BASE
269 	},
270 	[REGPHY_CAL] = {
271 		.name = "cal",
272 		.desc = ddrphy_cal,
273 		.size = ARRAY_SIZE(ddrphy_cal),
274 		.base = DDRPHY_BASE
275 	},
276 	[REG_DYN] = {
277 		.name = "dyn",
278 		.desc = ddr_dyn,
279 		.size = ARRAY_SIZE(ddr_dyn),
280 		.base = DDR_BASE
281 	},
282 	[REGPHY_DYN] = {
283 		.name = "dyn",
284 		.desc = ddrphy_dyn,
285 		.size = ARRAY_SIZE(ddrphy_dyn),
286 		.base = DDRPHY_BASE
287 	},
288 };
289 
290 static uintptr_t get_base_addr(const struct ddr_info *priv, enum base_type base)
291 {
292 	if (base == DDRPHY_BASE) {
293 		return (uintptr_t)priv->phy;
294 	} else {
295 		return (uintptr_t)priv->ctl;
296 	}
297 }
298 
299 static void set_reg(const struct ddr_info *priv,
300 		    enum reg_type type,
301 		    const void *param)
302 {
303 	unsigned int i;
304 	unsigned int value;
305 	enum base_type base = ddr_registers[type].base;
306 	uintptr_t base_addr = get_base_addr(priv, base);
307 	const struct reg_desc *desc = ddr_registers[type].desc;
308 
309 	VERBOSE("init %s\n", ddr_registers[type].name);
310 	for (i = 0; i < ddr_registers[type].size; i++) {
311 		uintptr_t ptr = base_addr + desc[i].offset;
312 
313 		if (desc[i].par_offset == INVALID_OFFSET) {
314 			ERROR("invalid parameter offset for %s", desc[i].name);
315 			panic();
316 		} else {
317 			value = *((uint32_t *)((uintptr_t)param +
318 					       desc[i].par_offset));
319 			mmio_write_32(ptr, value);
320 		}
321 	}
322 }
323 
324 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
325 {
326 	uint32_t pgsr;
327 	int error = 0;
328 	uint64_t timeout = timeout_init_us(TIMEOUT_US_1S);
329 
330 	do {
331 		pgsr = mmio_read_32((uintptr_t)&phy->pgsr);
332 
333 		VERBOSE("  > [0x%lx] pgsr = 0x%x &\n",
334 			(uintptr_t)&phy->pgsr, pgsr);
335 
336 		if (timeout_elapsed(timeout)) {
337 			panic();
338 		}
339 
340 		if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
341 			VERBOSE("DQS Gate Trainig Error\n");
342 			error++;
343 		}
344 
345 		if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
346 			VERBOSE("DQS Gate Trainig Intermittent Error\n");
347 			error++;
348 		}
349 
350 		if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
351 			VERBOSE("DQS Drift Error\n");
352 			error++;
353 		}
354 
355 		if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
356 			VERBOSE("Read Valid Training Error\n");
357 			error++;
358 		}
359 
360 		if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
361 			VERBOSE("Read Valid Training Intermittent Error\n");
362 			error++;
363 		}
364 	} while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0));
365 	VERBOSE("\n[0x%lx] pgsr = 0x%x\n",
366 		(uintptr_t)&phy->pgsr, pgsr);
367 }
368 
369 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir)
370 {
371 	uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
372 
373 	mmio_write_32((uintptr_t)&phy->pir, pir_init);
374 	VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n",
375 		(uintptr_t)&phy->pir, pir_init,
376 		mmio_read_32((uintptr_t)&phy->pir));
377 
378 	/* Need to wait 10 configuration clock before start polling */
379 	udelay(10);
380 
381 	/* Wait DRAM initialization and Gate Training Evaluation complete */
382 	stm32mp1_ddrphy_idone_wait(phy);
383 }
384 
385 /* Start quasi dynamic register update */
386 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl)
387 {
388 	mmio_clrbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
389 	VERBOSE("[0x%lx] swctl = 0x%x\n",
390 		(uintptr_t)&ctl->swctl,  mmio_read_32((uintptr_t)&ctl->swctl));
391 }
392 
393 /* Wait quasi dynamic register update */
394 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
395 {
396 	uint64_t timeout;
397 	uint32_t swstat;
398 
399 	mmio_setbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
400 	VERBOSE("[0x%lx] swctl = 0x%x\n",
401 		(uintptr_t)&ctl->swctl, mmio_read_32((uintptr_t)&ctl->swctl));
402 
403 	timeout = timeout_init_us(TIMEOUT_US_1S);
404 	do {
405 		swstat = mmio_read_32((uintptr_t)&ctl->swstat);
406 		VERBOSE("[0x%lx] swstat = 0x%x ",
407 			(uintptr_t)&ctl->swstat, swstat);
408 		if (timeout_elapsed(timeout)) {
409 			panic();
410 		}
411 	} while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U);
412 
413 	VERBOSE("[0x%lx] swstat = 0x%x\n",
414 		(uintptr_t)&ctl->swstat, swstat);
415 }
416 
417 /* Wait quasi dynamic register update */
418 static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode)
419 {
420 	uint64_t timeout;
421 	uint32_t stat;
422 	int break_loop = 0;
423 
424 	timeout = timeout_init_us(TIMEOUT_US_1S);
425 	for ( ; ; ) {
426 		uint32_t operating_mode;
427 		uint32_t selref_type;
428 
429 		stat = mmio_read_32((uintptr_t)&priv->ctl->stat);
430 		operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
431 		selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
432 		VERBOSE("[0x%lx] stat = 0x%x\n",
433 			(uintptr_t)&priv->ctl->stat, stat);
434 		if (timeout_elapsed(timeout)) {
435 			panic();
436 		}
437 
438 		if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
439 			/*
440 			 * Self-refresh due to software
441 			 * => checking also STAT.selfref_type.
442 			 */
443 			if ((operating_mode ==
444 			     DDRCTRL_STAT_OPERATING_MODE_SR) &&
445 			    (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
446 				break_loop = 1;
447 			}
448 		} else if (operating_mode == mode) {
449 			break_loop = 1;
450 		} else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
451 			   (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
452 			   (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
453 			/* Normal mode: handle also automatic self refresh */
454 			break_loop = 1;
455 		}
456 
457 		if (break_loop == 1) {
458 			break;
459 		}
460 	}
461 
462 	VERBOSE("[0x%lx] stat = 0x%x\n",
463 		(uintptr_t)&priv->ctl->stat, stat);
464 }
465 
466 /* Mode Register Writes (MRW or MRS) */
467 static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr,
468 					 uint32_t data)
469 {
470 	uint32_t mrctrl0;
471 
472 	VERBOSE("MRS: %d = %x\n", addr, data);
473 
474 	/*
475 	 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
476 	 *    This checks that there is no outstanding MR transaction.
477 	 *    No write should be performed to MRCTRL0 and MRCTRL1
478 	 *    if MRSTAT.mr_wr_busy = 1.
479 	 */
480 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
481 		DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
482 		;
483 	}
484 
485 	/*
486 	 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
487 	 *    and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
488 	 */
489 	mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
490 		  DDRCTRL_MRCTRL0_MR_RANK_ALL |
491 		  (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
492 		   DDRCTRL_MRCTRL0_MR_ADDR_MASK);
493 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
494 	VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n",
495 		(uintptr_t)&priv->ctl->mrctrl0,
496 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0);
497 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data);
498 	VERBOSE("[0x%lx] mrctrl1 = 0x%x\n",
499 		(uintptr_t)&priv->ctl->mrctrl1,
500 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl1));
501 
502 	/*
503 	 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
504 	 *    bit is self-clearing, and triggers the MR transaction.
505 	 *    The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
506 	 *    the MR transaction to SDRAM, and no further access can be
507 	 *    initiated until it is deasserted.
508 	 */
509 	mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
510 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
511 
512 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
513 	       DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
514 		;
515 	}
516 
517 	VERBOSE("[0x%lx] mrctrl0 = 0x%x\n",
518 		(uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
519 }
520 
521 /* Switch DDR3 from DLL-on to DLL-off */
522 static void stm32mp1_ddr3_dll_off(struct ddr_info *priv)
523 {
524 	uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1);
525 	uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2);
526 	uint32_t dbgcam;
527 
528 	VERBOSE("mr1: 0x%x\n", mr1);
529 	VERBOSE("mr2: 0x%x\n", mr2);
530 
531 	/*
532 	 * 1. Set the DBG1.dis_hif = 1.
533 	 *    This prevents further reads/writes being received on the HIF.
534 	 */
535 	mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
536 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
537 		(uintptr_t)&priv->ctl->dbg1,
538 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
539 
540 	/*
541 	 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
542 	 *    DBGCAM.wr_data_pipeline_empty = 1,
543 	 *    DBGCAM.rd_data_pipeline_empty = 1,
544 	 *    DBGCAM.dbg_wr_q_depth = 0 ,
545 	 *    DBGCAM.dbg_lpr_q_depth = 0, and
546 	 *    DBGCAM.dbg_hpr_q_depth = 0.
547 	 */
548 	do {
549 		dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam);
550 		VERBOSE("[0x%lx] dbgcam = 0x%x\n",
551 			(uintptr_t)&priv->ctl->dbgcam, dbgcam);
552 	} while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
553 		   DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
554 		 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
555 
556 	/*
557 	 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
558 	 *    to disable RTT_NOM:
559 	 *    a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
560 	 *    b. DDR4: Write to MR1[10:8]
561 	 */
562 	mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
563 	stm32mp1_mode_register_write(priv, 1, mr1);
564 
565 	/*
566 	 * 4. For DDR4 only: Perform an MRS command
567 	 *    (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
568 	 *    to disable RTT_PARK
569 	 */
570 
571 	/*
572 	 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
573 	 *    to write to MR2[10:9], to disable RTT_WR
574 	 *    (and therefore disable dynamic ODT).
575 	 *    This applies for both DDR3 and DDR4.
576 	 */
577 	mr2 &= ~GENMASK(10, 9);
578 	stm32mp1_mode_register_write(priv, 2, mr2);
579 
580 	/*
581 	 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
582 	 *    to disable the DLL. The timing of this MRS is automatically
583 	 *    handled by the uMCTL2.
584 	 *    a. DDR3: Write to MR1[0]
585 	 *    b. DDR4: Write to MR1[0]
586 	 */
587 	mr1 |= BIT(0);
588 	stm32mp1_mode_register_write(priv, 1, mr1);
589 
590 	/*
591 	 * 7. Put the SDRAM into self-refresh mode by setting
592 	 *    PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
593 	 *    the DDRC has entered self-refresh.
594 	 */
595 	mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl,
596 			DDRCTRL_PWRCTL_SELFREF_SW);
597 	VERBOSE("[0x%lx] pwrctl = 0x%x\n",
598 		(uintptr_t)&priv->ctl->pwrctl,
599 		mmio_read_32((uintptr_t)&priv->ctl->pwrctl));
600 
601 	/*
602 	 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
603 	 *    DWC_ddr_umctl2 core is in self-refresh mode.
604 	 *    Ensure transition to self-refresh was due to software
605 	 *    by checking that STAT.selfref_type[1:0]=2.
606 	 */
607 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
608 
609 	/*
610 	 * 9. Set the MSTR.dll_off_mode = 1.
611 	 *    warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
612 	 */
613 	stm32mp1_start_sw_done(priv->ctl);
614 
615 	mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
616 	VERBOSE("[0x%lx] mstr = 0x%x\n",
617 		(uintptr_t)&priv->ctl->mstr,
618 		mmio_read_32((uintptr_t)&priv->ctl->mstr));
619 
620 	stm32mp1_wait_sw_done_ack(priv->ctl);
621 
622 	/* 10. Change the clock frequency to the desired value. */
623 
624 	/*
625 	 * 11. Update any registers which may be required to change for the new
626 	 *     frequency. This includes static and dynamic registers.
627 	 *     This includes both uMCTL2 registers and PHY registers.
628 	 */
629 
630 	/* Change Bypass Mode Frequency Range */
631 	if (clk_get_rate(DDRPHYC) < 100000000U) {
632 		mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr,
633 				DDRPHYC_DLLGCR_BPS200);
634 	} else {
635 		mmio_setbits_32((uintptr_t)&priv->phy->dllgcr,
636 				DDRPHYC_DLLGCR_BPS200);
637 	}
638 
639 	mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
640 
641 	mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr,
642 			DDRPHYC_DXNDLLCR_DLLDIS);
643 	mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr,
644 			DDRPHYC_DXNDLLCR_DLLDIS);
645 	mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr,
646 			DDRPHYC_DXNDLLCR_DLLDIS);
647 	mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr,
648 			DDRPHYC_DXNDLLCR_DLLDIS);
649 
650 	/* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
651 	mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl,
652 			DDRCTRL_PWRCTL_SELFREF_SW);
653 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
654 
655 	/*
656 	 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
657 	 *     at this point.
658 	 */
659 
660 	/*
661 	 * 14. Perform MRS commands as required to re-program timing registers
662 	 *     in the SDRAM for the new frequency
663 	 *     (in particular, CL, CWL and WR may need to be changed).
664 	 */
665 
666 	/* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
667 	mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
668 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
669 		(uintptr_t)&priv->ctl->dbg1,
670 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
671 }
672 
673 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
674 {
675 	stm32mp1_start_sw_done(ctl);
676 	/* Quasi-dynamic register update*/
677 	mmio_setbits_32((uintptr_t)&ctl->rfshctl3,
678 			DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
679 	mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
680 	mmio_clrbits_32((uintptr_t)&ctl->dfimisc,
681 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
682 	stm32mp1_wait_sw_done_ack(ctl);
683 }
684 
685 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
686 				     uint32_t rfshctl3, uint32_t pwrctl)
687 {
688 	stm32mp1_start_sw_done(ctl);
689 	if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
690 		mmio_clrbits_32((uintptr_t)&ctl->rfshctl3,
691 				DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
692 	}
693 	if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
694 		mmio_setbits_32((uintptr_t)&ctl->pwrctl,
695 				DDRCTRL_PWRCTL_POWERDOWN_EN);
696 	}
697 	mmio_setbits_32((uintptr_t)&ctl->dfimisc,
698 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
699 	stm32mp1_wait_sw_done_ack(ctl);
700 }
701 
702 static int board_ddr_power_init(enum ddr_type ddr_type)
703 {
704 	if (dt_pmic_status() > 0) {
705 		return pmic_ddr_power_init(ddr_type);
706 	}
707 
708 	return 0;
709 }
710 
711 void stm32mp1_ddr_init(struct ddr_info *priv,
712 		       struct stm32mp1_ddr_config *config)
713 {
714 	uint32_t pir;
715 	int ret = -EINVAL;
716 
717 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
718 		ret = board_ddr_power_init(STM32MP_DDR3);
719 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) {
720 		ret = board_ddr_power_init(STM32MP_LPDDR2);
721 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) {
722 		ret = board_ddr_power_init(STM32MP_LPDDR3);
723 	} else {
724 		ERROR("DDR type not supported\n");
725 	}
726 
727 	if (ret != 0) {
728 		panic();
729 	}
730 
731 	VERBOSE("name = %s\n", config->info.name);
732 	VERBOSE("speed = %d kHz\n", config->info.speed);
733 	VERBOSE("size  = 0x%x\n", config->info.size);
734 
735 	/* DDR INIT SEQUENCE */
736 
737 	/*
738 	 * 1. Program the DWC_ddr_umctl2 registers
739 	 *     nota: check DFIMISC.dfi_init_complete = 0
740 	 */
741 
742 	/* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
743 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
744 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
745 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
746 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
747 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
748 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
749 
750 	/* 1.2. start CLOCK */
751 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
752 		panic();
753 	}
754 
755 	/* 1.3. deassert reset */
756 	/* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
757 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
758 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
759 	/*
760 	 * De-assert presetn once the clocks are active
761 	 * and stable via DDRCAPBRST bit.
762 	 */
763 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
764 
765 	/* 1.4. wait 128 cycles to permit initialization of end logic */
766 	udelay(2);
767 	/* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
768 
769 	/* 1.5. initialize registers ddr_umctl2 */
770 	/* Stop uMCTL2 before PHY is ready */
771 	mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc,
772 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
773 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
774 		(uintptr_t)&priv->ctl->dfimisc,
775 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
776 
777 	set_reg(priv, REG_REG, &config->c_reg);
778 
779 	/* DDR3 = don't set DLLOFF for init mode */
780 	if ((config->c_reg.mstr &
781 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
782 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
783 		VERBOSE("deactivate DLL OFF in mstr\n");
784 		mmio_clrbits_32((uintptr_t)&priv->ctl->mstr,
785 				DDRCTRL_MSTR_DLL_OFF_MODE);
786 		VERBOSE("[0x%lx] mstr = 0x%x\n",
787 			(uintptr_t)&priv->ctl->mstr,
788 			mmio_read_32((uintptr_t)&priv->ctl->mstr));
789 	}
790 
791 	set_reg(priv, REG_TIMING, &config->c_timing);
792 	set_reg(priv, REG_MAP, &config->c_map);
793 
794 	/* Skip CTRL init, SDRAM init is done by PHY PUBL */
795 	mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0,
796 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
797 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
798 	VERBOSE("[0x%lx] init0 = 0x%x\n",
799 		(uintptr_t)&priv->ctl->init0,
800 		mmio_read_32((uintptr_t)&priv->ctl->init0));
801 
802 	set_reg(priv, REG_PERF, &config->c_perf);
803 
804 	/*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
805 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
806 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
807 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
808 
809 	/*
810 	 * 3. start PHY init by accessing relevant PUBL registers
811 	 *    (DXGCR, DCR, PTR*, MR*, DTPR*)
812 	 */
813 	set_reg(priv, REGPHY_REG, &config->p_reg);
814 	set_reg(priv, REGPHY_TIMING, &config->p_timing);
815 	set_reg(priv, REGPHY_CAL, &config->p_cal);
816 
817 	/* DDR3 = don't set DLLOFF for init mode */
818 	if ((config->c_reg.mstr &
819 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
820 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
821 		VERBOSE("deactivate DLL OFF in mr1\n");
822 		mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0));
823 		VERBOSE("[0x%lx] mr1 = 0x%x\n",
824 			(uintptr_t)&priv->phy->mr1,
825 			mmio_read_32((uintptr_t)&priv->phy->mr1));
826 	}
827 
828 	/*
829 	 *  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
830 	 *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
831 	 */
832 	stm32mp1_ddrphy_idone_wait(priv->phy);
833 
834 	/*
835 	 *  5. Indicate to PUBL that controller performs SDRAM initialization
836 	 *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
837 	 *     DRAM init is done by PHY, init0.skip_dram.init = 1
838 	 */
839 
840 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
841 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
842 
843 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
844 		pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
845 	}
846 
847 	stm32mp1_ddrphy_init(priv->phy, pir);
848 
849 	/*
850 	 *  6. SET DFIMISC.dfi_init_complete_en to 1
851 	 *  Enable quasi-dynamic register programming.
852 	 */
853 	stm32mp1_start_sw_done(priv->ctl);
854 
855 	mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc,
856 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
857 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
858 		(uintptr_t)&priv->ctl->dfimisc,
859 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
860 
861 	stm32mp1_wait_sw_done_ack(priv->ctl);
862 
863 	/*
864 	 *  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
865 	 *     by monitoring STAT.operating_mode signal
866 	 */
867 
868 	/* Wait uMCTL2 ready */
869 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
870 
871 	/* Switch to DLL OFF mode */
872 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
873 		stm32mp1_ddr3_dll_off(priv);
874 	}
875 
876 	VERBOSE("DDR DQS training : ");
877 
878 	/*
879 	 *  8. Disable Auto refresh and power down by setting
880 	 *    - RFSHCTL3.dis_au_refresh = 1
881 	 *    - PWRCTL.powerdown_en = 0
882 	 *    - DFIMISC.dfiinit_complete_en = 0
883 	 */
884 	stm32mp1_refresh_disable(priv->ctl);
885 
886 	/*
887 	 *  9. Program PUBL PGCR to enable refresh during training
888 	 *     and rank to train
889 	 *     not done => keep the programed value in PGCR
890 	 */
891 
892 	/*
893 	 * 10. configure PUBL PIR register to specify which training step
894 	 * to run
895 	 * Warning : RVTRN  is not supported by this PUBL
896 	 */
897 	stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
898 
899 	/* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
900 	stm32mp1_ddrphy_idone_wait(priv->phy);
901 
902 	/*
903 	 * 12. set back registers in step 8 to the orginal values if desidered
904 	 */
905 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
906 				 config->c_reg.pwrctl);
907 
908 	/* Enable uMCTL2 AXI port 0 */
909 	mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_0,
910 			DDRCTRL_PCTRL_N_PORT_EN);
911 	VERBOSE("[0x%lx] pctrl_0 = 0x%x\n",
912 		(uintptr_t)&priv->ctl->pctrl_0,
913 		mmio_read_32((uintptr_t)&priv->ctl->pctrl_0));
914 
915 	/* Enable uMCTL2 AXI port 1 */
916 	mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_1,
917 			DDRCTRL_PCTRL_N_PORT_EN);
918 	VERBOSE("[0x%lx] pctrl_1 = 0x%x\n",
919 		(uintptr_t)&priv->ctl->pctrl_1,
920 		mmio_read_32((uintptr_t)&priv->ctl->pctrl_1));
921 }
922