xref: /rk3399_ARM-atf/drivers/st/ddr/stm32mp1_ddr.c (revision 23684d0e819b497d2661759b315e43e267a3a74c)
1 /*
2  * Copyright (C) 2018-2019, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5  */
6 
7 #include <stddef.h>
8 
9 #include <platform_def.h>
10 
11 #include <arch.h>
12 #include <arch_helpers.h>
13 #include <common/debug.h>
14 #include <drivers/delay_timer.h>
15 #include <drivers/st/stm32mp_pmic.h>
16 #include <drivers/st/stm32mp1_clk.h>
17 #include <drivers/st/stm32mp1_ddr.h>
18 #include <drivers/st/stm32mp1_ddr_regs.h>
19 #include <drivers/st/stm32mp1_pwr.h>
20 #include <drivers/st/stm32mp1_ram.h>
21 #include <drivers/st/stm32mp1_rcc.h>
22 #include <dt-bindings/clock/stm32mp1-clks.h>
23 #include <lib/mmio.h>
24 #include <plat/common/platform.h>
25 
26 struct reg_desc {
27 	const char *name;
28 	uint16_t offset;	/* Offset for base address */
29 	uint8_t par_offset;	/* Offset for parameter array */
30 };
31 
32 #define INVALID_OFFSET	0xFFU
33 
34 #define TIMESLOT_1US	(plat_get_syscnt_freq2() / 1000000U)
35 
36 #define DDRCTL_REG(x, y)					\
37 	{							\
38 		.name = #x,					\
39 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
40 		.par_offset = offsetof(struct y, x)		\
41 	}
42 
43 #define DDRPHY_REG(x, y)					\
44 	{							\
45 		.name = #x,					\
46 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
47 		.par_offset = offsetof(struct y, x)		\
48 	}
49 
50 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
51 static const struct reg_desc ddr_reg[] = {
52 	DDRCTL_REG_REG(mstr),
53 	DDRCTL_REG_REG(mrctrl0),
54 	DDRCTL_REG_REG(mrctrl1),
55 	DDRCTL_REG_REG(derateen),
56 	DDRCTL_REG_REG(derateint),
57 	DDRCTL_REG_REG(pwrctl),
58 	DDRCTL_REG_REG(pwrtmg),
59 	DDRCTL_REG_REG(hwlpctl),
60 	DDRCTL_REG_REG(rfshctl0),
61 	DDRCTL_REG_REG(rfshctl3),
62 	DDRCTL_REG_REG(crcparctl0),
63 	DDRCTL_REG_REG(zqctl0),
64 	DDRCTL_REG_REG(dfitmg0),
65 	DDRCTL_REG_REG(dfitmg1),
66 	DDRCTL_REG_REG(dfilpcfg0),
67 	DDRCTL_REG_REG(dfiupd0),
68 	DDRCTL_REG_REG(dfiupd1),
69 	DDRCTL_REG_REG(dfiupd2),
70 	DDRCTL_REG_REG(dfiphymstr),
71 	DDRCTL_REG_REG(odtmap),
72 	DDRCTL_REG_REG(dbg0),
73 	DDRCTL_REG_REG(dbg1),
74 	DDRCTL_REG_REG(dbgcmd),
75 	DDRCTL_REG_REG(poisoncfg),
76 	DDRCTL_REG_REG(pccfg),
77 };
78 
79 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
80 static const struct reg_desc ddr_timing[] = {
81 	DDRCTL_REG_TIMING(rfshtmg),
82 	DDRCTL_REG_TIMING(dramtmg0),
83 	DDRCTL_REG_TIMING(dramtmg1),
84 	DDRCTL_REG_TIMING(dramtmg2),
85 	DDRCTL_REG_TIMING(dramtmg3),
86 	DDRCTL_REG_TIMING(dramtmg4),
87 	DDRCTL_REG_TIMING(dramtmg5),
88 	DDRCTL_REG_TIMING(dramtmg6),
89 	DDRCTL_REG_TIMING(dramtmg7),
90 	DDRCTL_REG_TIMING(dramtmg8),
91 	DDRCTL_REG_TIMING(dramtmg14),
92 	DDRCTL_REG_TIMING(odtcfg),
93 };
94 
95 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
96 static const struct reg_desc ddr_map[] = {
97 	DDRCTL_REG_MAP(addrmap1),
98 	DDRCTL_REG_MAP(addrmap2),
99 	DDRCTL_REG_MAP(addrmap3),
100 	DDRCTL_REG_MAP(addrmap4),
101 	DDRCTL_REG_MAP(addrmap5),
102 	DDRCTL_REG_MAP(addrmap6),
103 	DDRCTL_REG_MAP(addrmap9),
104 	DDRCTL_REG_MAP(addrmap10),
105 	DDRCTL_REG_MAP(addrmap11),
106 };
107 
108 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
109 static const struct reg_desc ddr_perf[] = {
110 	DDRCTL_REG_PERF(sched),
111 	DDRCTL_REG_PERF(sched1),
112 	DDRCTL_REG_PERF(perfhpr1),
113 	DDRCTL_REG_PERF(perflpr1),
114 	DDRCTL_REG_PERF(perfwr1),
115 	DDRCTL_REG_PERF(pcfgr_0),
116 	DDRCTL_REG_PERF(pcfgw_0),
117 	DDRCTL_REG_PERF(pcfgqos0_0),
118 	DDRCTL_REG_PERF(pcfgqos1_0),
119 	DDRCTL_REG_PERF(pcfgwqos0_0),
120 	DDRCTL_REG_PERF(pcfgwqos1_0),
121 	DDRCTL_REG_PERF(pcfgr_1),
122 	DDRCTL_REG_PERF(pcfgw_1),
123 	DDRCTL_REG_PERF(pcfgqos0_1),
124 	DDRCTL_REG_PERF(pcfgqos1_1),
125 	DDRCTL_REG_PERF(pcfgwqos0_1),
126 	DDRCTL_REG_PERF(pcfgwqos1_1),
127 };
128 
129 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
130 static const struct reg_desc ddrphy_reg[] = {
131 	DDRPHY_REG_REG(pgcr),
132 	DDRPHY_REG_REG(aciocr),
133 	DDRPHY_REG_REG(dxccr),
134 	DDRPHY_REG_REG(dsgcr),
135 	DDRPHY_REG_REG(dcr),
136 	DDRPHY_REG_REG(odtcr),
137 	DDRPHY_REG_REG(zq0cr1),
138 	DDRPHY_REG_REG(dx0gcr),
139 	DDRPHY_REG_REG(dx1gcr),
140 	DDRPHY_REG_REG(dx2gcr),
141 	DDRPHY_REG_REG(dx3gcr),
142 };
143 
144 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
145 static const struct reg_desc ddrphy_timing[] = {
146 	DDRPHY_REG_TIMING(ptr0),
147 	DDRPHY_REG_TIMING(ptr1),
148 	DDRPHY_REG_TIMING(ptr2),
149 	DDRPHY_REG_TIMING(dtpr0),
150 	DDRPHY_REG_TIMING(dtpr1),
151 	DDRPHY_REG_TIMING(dtpr2),
152 	DDRPHY_REG_TIMING(mr0),
153 	DDRPHY_REG_TIMING(mr1),
154 	DDRPHY_REG_TIMING(mr2),
155 	DDRPHY_REG_TIMING(mr3),
156 };
157 
158 #define DDRPHY_REG_CAL(x)	DDRPHY_REG(x, stm32mp1_ddrphy_cal)
159 static const struct reg_desc ddrphy_cal[] = {
160 	DDRPHY_REG_CAL(dx0dllcr),
161 	DDRPHY_REG_CAL(dx0dqtr),
162 	DDRPHY_REG_CAL(dx0dqstr),
163 	DDRPHY_REG_CAL(dx1dllcr),
164 	DDRPHY_REG_CAL(dx1dqtr),
165 	DDRPHY_REG_CAL(dx1dqstr),
166 	DDRPHY_REG_CAL(dx2dllcr),
167 	DDRPHY_REG_CAL(dx2dqtr),
168 	DDRPHY_REG_CAL(dx2dqstr),
169 	DDRPHY_REG_CAL(dx3dllcr),
170 	DDRPHY_REG_CAL(dx3dqtr),
171 	DDRPHY_REG_CAL(dx3dqstr),
172 };
173 
174 #define DDR_REG_DYN(x)						\
175 	{							\
176 		.name = #x,					\
177 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
178 		.par_offset = INVALID_OFFSET \
179 	}
180 
181 static const struct reg_desc ddr_dyn[] = {
182 	DDR_REG_DYN(stat),
183 	DDR_REG_DYN(init0),
184 	DDR_REG_DYN(dfimisc),
185 	DDR_REG_DYN(dfistat),
186 	DDR_REG_DYN(swctl),
187 	DDR_REG_DYN(swstat),
188 	DDR_REG_DYN(pctrl_0),
189 	DDR_REG_DYN(pctrl_1),
190 };
191 
192 #define DDRPHY_REG_DYN(x)					\
193 	{							\
194 		.name = #x,					\
195 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
196 		.par_offset = INVALID_OFFSET			\
197 	}
198 
199 static const struct reg_desc ddrphy_dyn[] = {
200 	DDRPHY_REG_DYN(pir),
201 	DDRPHY_REG_DYN(pgsr),
202 };
203 
204 enum reg_type {
205 	REG_REG,
206 	REG_TIMING,
207 	REG_PERF,
208 	REG_MAP,
209 	REGPHY_REG,
210 	REGPHY_TIMING,
211 	REGPHY_CAL,
212 /*
213  * Dynamic registers => managed in driver or not changed,
214  * can be dumped in interactive mode.
215  */
216 	REG_DYN,
217 	REGPHY_DYN,
218 	REG_TYPE_NB
219 };
220 
221 enum base_type {
222 	DDR_BASE,
223 	DDRPHY_BASE,
224 	NONE_BASE
225 };
226 
227 struct ddr_reg_info {
228 	const char *name;
229 	const struct reg_desc *desc;
230 	uint8_t size;
231 	enum base_type base;
232 };
233 
234 static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
235 	[REG_REG] = {
236 		"static", ddr_reg, ARRAY_SIZE(ddr_reg), DDR_BASE
237 	},
238 	[REG_TIMING] = {
239 		"timing", ddr_timing, ARRAY_SIZE(ddr_timing), DDR_BASE
240 	},
241 	[REG_PERF] = {
242 		"perf", ddr_perf, ARRAY_SIZE(ddr_perf), DDR_BASE
243 	},
244 	[REG_MAP] = {
245 		"map", ddr_map, ARRAY_SIZE(ddr_map), DDR_BASE
246 	},
247 	[REGPHY_REG] = {
248 		"static", ddrphy_reg, ARRAY_SIZE(ddrphy_reg), DDRPHY_BASE
249 	},
250 	[REGPHY_TIMING] = {
251 		"timing", ddrphy_timing, ARRAY_SIZE(ddrphy_timing), DDRPHY_BASE
252 	},
253 	[REGPHY_CAL] = {
254 		"cal", ddrphy_cal, ARRAY_SIZE(ddrphy_cal), DDRPHY_BASE
255 	},
256 	[REG_DYN] = {
257 		"dyn", ddr_dyn, ARRAY_SIZE(ddr_dyn), DDR_BASE
258 	},
259 	[REGPHY_DYN] = {
260 		"dyn", ddrphy_dyn, ARRAY_SIZE(ddrphy_dyn), DDRPHY_BASE
261 	},
262 };
263 
264 static uint32_t get_base_addr(const struct ddr_info *priv, enum base_type base)
265 {
266 	if (base == DDRPHY_BASE) {
267 		return (uint32_t)priv->phy;
268 	} else {
269 		return (uint32_t)priv->ctl;
270 	}
271 }
272 
273 static void set_reg(const struct ddr_info *priv,
274 		    enum reg_type type,
275 		    const void *param)
276 {
277 	unsigned int i;
278 	unsigned int *ptr, value;
279 	enum base_type base = ddr_registers[type].base;
280 	uint32_t base_addr = get_base_addr(priv, base);
281 	const struct reg_desc *desc = ddr_registers[type].desc;
282 
283 	VERBOSE("init %s\n", ddr_registers[type].name);
284 	for (i = 0; i < ddr_registers[type].size; i++) {
285 		ptr = (unsigned int *)(base_addr + desc[i].offset);
286 		if (desc[i].par_offset == INVALID_OFFSET) {
287 			ERROR("invalid parameter offset for %s", desc[i].name);
288 			panic();
289 		} else {
290 			value = *((uint32_t *)((uint32_t)param +
291 					       desc[i].par_offset));
292 			mmio_write_32((uint32_t)ptr, value);
293 		}
294 	}
295 }
296 
297 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
298 {
299 	uint32_t pgsr;
300 	int error = 0;
301 	unsigned long start;
302 	unsigned long time0, time;
303 
304 	start = get_timer(0);
305 	time0 = start;
306 
307 	do {
308 		pgsr = mmio_read_32((uint32_t)&phy->pgsr);
309 		time = get_timer(start);
310 		if (time != time0) {
311 			VERBOSE("  > [0x%x] pgsr = 0x%x &\n",
312 				(uint32_t)&phy->pgsr, pgsr);
313 			VERBOSE("    [0x%x] pir = 0x%x (time=%x)\n",
314 				(uint32_t)&phy->pir,
315 				mmio_read_32((uint32_t)&phy->pir),
316 				(uint32_t)time);
317 		}
318 
319 		time0 = time;
320 		if (time > plat_get_syscnt_freq2()) {
321 			panic();
322 		}
323 		if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
324 			VERBOSE("DQS Gate Trainig Error\n");
325 			error++;
326 		}
327 		if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
328 			VERBOSE("DQS Gate Trainig Intermittent Error\n");
329 			error++;
330 		}
331 		if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
332 			VERBOSE("DQS Drift Error\n");
333 			error++;
334 		}
335 		if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
336 			VERBOSE("Read Valid Training Error\n");
337 			error++;
338 		}
339 		if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
340 			VERBOSE("Read Valid Training Intermittent Error\n");
341 			error++;
342 		}
343 	} while ((pgsr & DDRPHYC_PGSR_IDONE) == 0U && error == 0);
344 	VERBOSE("\n[0x%x] pgsr = 0x%x\n",
345 		(uint32_t)&phy->pgsr, pgsr);
346 }
347 
348 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir)
349 {
350 	uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
351 
352 	mmio_write_32((uint32_t)&phy->pir, pir_init);
353 	VERBOSE("[0x%x] pir = 0x%x -> 0x%x\n",
354 		(uint32_t)&phy->pir, pir_init,
355 		mmio_read_32((uint32_t)&phy->pir));
356 
357 	/* Need to wait 10 configuration clock before start polling */
358 	udelay(10);
359 
360 	/* Wait DRAM initialization and Gate Training Evaluation complete */
361 	stm32mp1_ddrphy_idone_wait(phy);
362 }
363 
364 /* Start quasi dynamic register update */
365 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl)
366 {
367 	mmio_clrbits_32((uint32_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
368 	VERBOSE("[0x%x] swctl = 0x%x\n",
369 		(uint32_t)&ctl->swctl,  mmio_read_32((uint32_t)&ctl->swctl));
370 }
371 
372 /* Wait quasi dynamic register update */
373 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
374 {
375 	unsigned long start;
376 	uint32_t swstat;
377 
378 	mmio_setbits_32((uint32_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
379 	VERBOSE("[0x%x] swctl = 0x%x\n",
380 		(uint32_t)&ctl->swctl, mmio_read_32((uint32_t)&ctl->swctl));
381 
382 	start = get_timer(0);
383 	do {
384 		swstat = mmio_read_32((uint32_t)&ctl->swstat);
385 		VERBOSE("[0x%x] swstat = 0x%x ",
386 			(uint32_t)&ctl->swstat, swstat);
387 		VERBOSE("timer in ms 0x%x = start 0x%lx\r",
388 			get_timer(0), start);
389 		if (get_timer(start) > plat_get_syscnt_freq2()) {
390 			panic();
391 		}
392 	} while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U);
393 
394 	VERBOSE("[0x%x] swstat = 0x%x\n",
395 		(uint32_t)&ctl->swstat, swstat);
396 }
397 
398 /* Wait quasi dynamic register update */
399 static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode)
400 {
401 	unsigned long start;
402 	uint32_t stat;
403 	uint32_t operating_mode;
404 	uint32_t selref_type;
405 	int break_loop = 0;
406 
407 	start = get_timer(0);
408 	for ( ; ; ) {
409 		stat = mmio_read_32((uint32_t)&priv->ctl->stat);
410 		operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
411 		selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
412 		VERBOSE("[0x%x] stat = 0x%x\n",
413 			(uint32_t)&priv->ctl->stat, stat);
414 		VERBOSE("timer in ms 0x%x = start 0x%lx\r",
415 			get_timer(0), start);
416 		if (get_timer(start) > plat_get_syscnt_freq2()) {
417 			panic();
418 		}
419 
420 		if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
421 			/*
422 			 * Self-refresh due to software
423 			 * => checking also STAT.selfref_type.
424 			 */
425 			if ((operating_mode ==
426 			     DDRCTRL_STAT_OPERATING_MODE_SR) &&
427 			    (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
428 				break_loop = 1;
429 			}
430 		} else if (operating_mode == mode) {
431 			break_loop = 1;
432 		} else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
433 			   (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
434 			   (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
435 			/* Normal mode: handle also automatic self refresh */
436 			break_loop = 1;
437 		}
438 
439 		if (break_loop == 1) {
440 			break;
441 		}
442 	}
443 
444 	VERBOSE("[0x%x] stat = 0x%x\n",
445 		(uint32_t)&priv->ctl->stat, stat);
446 }
447 
448 /* Mode Register Writes (MRW or MRS) */
449 static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr,
450 					 uint32_t data)
451 {
452 	uint32_t mrctrl0;
453 
454 	VERBOSE("MRS: %d = %x\n", addr, data);
455 
456 	/*
457 	 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
458 	 *    This checks that there is no outstanding MR transaction.
459 	 *    No write should be performed to MRCTRL0 and MRCTRL1
460 	 *    if MRSTAT.mr_wr_busy = 1.
461 	 */
462 	while ((mmio_read_32((uint32_t)&priv->ctl->mrstat) &
463 		DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
464 		;
465 	}
466 
467 	/*
468 	 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
469 	 *    and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
470 	 */
471 	mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
472 		  DDRCTRL_MRCTRL0_MR_RANK_ALL |
473 		  (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
474 		   DDRCTRL_MRCTRL0_MR_ADDR_MASK);
475 	mmio_write_32((uint32_t)&priv->ctl->mrctrl0, mrctrl0);
476 	VERBOSE("[0x%x] mrctrl0 = 0x%x (0x%x)\n",
477 		(uint32_t)&priv->ctl->mrctrl0,
478 		mmio_read_32((uint32_t)&priv->ctl->mrctrl0), mrctrl0);
479 	mmio_write_32((uint32_t)&priv->ctl->mrctrl1, data);
480 	VERBOSE("[0x%x] mrctrl1 = 0x%x\n",
481 		(uint32_t)&priv->ctl->mrctrl1,
482 		mmio_read_32((uint32_t)&priv->ctl->mrctrl1));
483 
484 	/*
485 	 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
486 	 *    bit is self-clearing, and triggers the MR transaction.
487 	 *    The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
488 	 *    the MR transaction to SDRAM, and no further access can be
489 	 *    initiated until it is deasserted.
490 	 */
491 	mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
492 	mmio_write_32((uint32_t)&priv->ctl->mrctrl0, mrctrl0);
493 
494 	while ((mmio_read_32((uint32_t)&priv->ctl->mrstat) &
495 	       DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
496 		;
497 	}
498 
499 	VERBOSE("[0x%x] mrctrl0 = 0x%x\n",
500 		(uint32_t)&priv->ctl->mrctrl0, mrctrl0);
501 }
502 
503 /* Switch DDR3 from DLL-on to DLL-off */
504 static void stm32mp1_ddr3_dll_off(struct ddr_info *priv)
505 {
506 	uint32_t mr1 = mmio_read_32((uint32_t)&priv->phy->mr1);
507 	uint32_t mr2 = mmio_read_32((uint32_t)&priv->phy->mr2);
508 	uint32_t dbgcam;
509 
510 	VERBOSE("mr1: 0x%x\n", mr1);
511 	VERBOSE("mr2: 0x%x\n", mr2);
512 
513 	/*
514 	 * 1. Set the DBG1.dis_hif = 1.
515 	 *    This prevents further reads/writes being received on the HIF.
516 	 */
517 	mmio_setbits_32((uint32_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
518 	VERBOSE("[0x%x] dbg1 = 0x%x\n",
519 		(uint32_t)&priv->ctl->dbg1,
520 		mmio_read_32((uint32_t)&priv->ctl->dbg1));
521 
522 	/*
523 	 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
524 	 *    DBGCAM.wr_data_pipeline_empty = 1,
525 	 *    DBGCAM.rd_data_pipeline_empty = 1,
526 	 *    DBGCAM.dbg_wr_q_depth = 0 ,
527 	 *    DBGCAM.dbg_lpr_q_depth = 0, and
528 	 *    DBGCAM.dbg_hpr_q_depth = 0.
529 	 */
530 	do {
531 		dbgcam = mmio_read_32((uint32_t)&priv->ctl->dbgcam);
532 		VERBOSE("[0x%x] dbgcam = 0x%x\n",
533 			(uint32_t)&priv->ctl->dbgcam, dbgcam);
534 	} while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
535 		   DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
536 		 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
537 
538 	/*
539 	 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
540 	 *    to disable RTT_NOM:
541 	 *    a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
542 	 *    b. DDR4: Write to MR1[10:8]
543 	 */
544 	mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
545 	stm32mp1_mode_register_write(priv, 1, mr1);
546 
547 	/*
548 	 * 4. For DDR4 only: Perform an MRS command
549 	 *    (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
550 	 *    to disable RTT_PARK
551 	 */
552 
553 	/*
554 	 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
555 	 *    to write to MR2[10:9], to disable RTT_WR
556 	 *    (and therefore disable dynamic ODT).
557 	 *    This applies for both DDR3 and DDR4.
558 	 */
559 	mr2 &= ~GENMASK(10, 9);
560 	stm32mp1_mode_register_write(priv, 2, mr2);
561 
562 	/*
563 	 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
564 	 *    to disable the DLL. The timing of this MRS is automatically
565 	 *    handled by the uMCTL2.
566 	 *    a. DDR3: Write to MR1[0]
567 	 *    b. DDR4: Write to MR1[0]
568 	 */
569 	mr1 |= BIT(0);
570 	stm32mp1_mode_register_write(priv, 1, mr1);
571 
572 	/*
573 	 * 7. Put the SDRAM into self-refresh mode by setting
574 	 *    PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
575 	 *    the DDRC has entered self-refresh.
576 	 */
577 	mmio_setbits_32((uint32_t)&priv->ctl->pwrctl,
578 			DDRCTRL_PWRCTL_SELFREF_SW);
579 	VERBOSE("[0x%x] pwrctl = 0x%x\n",
580 		(uint32_t)&priv->ctl->pwrctl,
581 		mmio_read_32((uint32_t)&priv->ctl->pwrctl));
582 
583 	/*
584 	 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
585 	 *    DWC_ddr_umctl2 core is in self-refresh mode.
586 	 *    Ensure transition to self-refresh was due to software
587 	 *    by checking that STAT.selfref_type[1:0]=2.
588 	 */
589 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
590 
591 	/*
592 	 * 9. Set the MSTR.dll_off_mode = 1.
593 	 *    warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
594 	 */
595 	stm32mp1_start_sw_done(priv->ctl);
596 
597 	mmio_setbits_32((uint32_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
598 	VERBOSE("[0x%x] mstr = 0x%x\n",
599 		(uint32_t)&priv->ctl->mstr,
600 		mmio_read_32((uint32_t)&priv->ctl->mstr));
601 
602 	stm32mp1_wait_sw_done_ack(priv->ctl);
603 
604 	/* 10. Change the clock frequency to the desired value. */
605 
606 	/*
607 	 * 11. Update any registers which may be required to change for the new
608 	 *     frequency. This includes static and dynamic registers.
609 	 *     This includes both uMCTL2 registers and PHY registers.
610 	 */
611 
612 	/* Change Bypass Mode Frequency Range */
613 	if (stm32mp1_clk_get_rate(DDRPHYC) < 100000000U) {
614 		mmio_clrbits_32((uint32_t)&priv->phy->dllgcr,
615 				DDRPHYC_DLLGCR_BPS200);
616 	} else {
617 		mmio_setbits_32((uint32_t)&priv->phy->dllgcr,
618 				DDRPHYC_DLLGCR_BPS200);
619 	}
620 
621 	mmio_setbits_32((uint32_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
622 
623 	mmio_setbits_32((uint32_t)&priv->phy->dx0dllcr,
624 			DDRPHYC_DXNDLLCR_DLLDIS);
625 	mmio_setbits_32((uint32_t)&priv->phy->dx1dllcr,
626 			DDRPHYC_DXNDLLCR_DLLDIS);
627 	mmio_setbits_32((uint32_t)&priv->phy->dx2dllcr,
628 			DDRPHYC_DXNDLLCR_DLLDIS);
629 	mmio_setbits_32((uint32_t)&priv->phy->dx3dllcr,
630 			DDRPHYC_DXNDLLCR_DLLDIS);
631 
632 	/* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
633 	mmio_clrbits_32((uint32_t)&priv->ctl->pwrctl,
634 			DDRCTRL_PWRCTL_SELFREF_SW);
635 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
636 
637 	/*
638 	 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
639 	 *     at this point.
640 	 */
641 
642 	/*
643 	 * 14. Perform MRS commands as required to re-program timing registers
644 	 *     in the SDRAM for the new frequency
645 	 *     (in particular, CL, CWL and WR may need to be changed).
646 	 */
647 
648 	/* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
649 	mmio_clrbits_32((uint32_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
650 	VERBOSE("[0x%x] dbg1 = 0x%x\n",
651 		(uint32_t)&priv->ctl->dbg1,
652 		mmio_read_32((uint32_t)&priv->ctl->dbg1));
653 }
654 
655 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
656 {
657 	stm32mp1_start_sw_done(ctl);
658 	/* Quasi-dynamic register update*/
659 	mmio_setbits_32((uint32_t)&ctl->rfshctl3,
660 			DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
661 	mmio_clrbits_32((uint32_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
662 	mmio_clrbits_32((uint32_t)&ctl->dfimisc,
663 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
664 	stm32mp1_wait_sw_done_ack(ctl);
665 }
666 
667 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
668 				     uint32_t rfshctl3, uint32_t pwrctl)
669 {
670 	stm32mp1_start_sw_done(ctl);
671 	if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
672 		mmio_clrbits_32((uint32_t)&ctl->rfshctl3,
673 				DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
674 	}
675 	if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
676 		mmio_setbits_32((uint32_t)&ctl->pwrctl,
677 				DDRCTRL_PWRCTL_POWERDOWN_EN);
678 	}
679 	mmio_setbits_32((uint32_t)&ctl->dfimisc,
680 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
681 	stm32mp1_wait_sw_done_ack(ctl);
682 }
683 
684 static int board_ddr_power_init(enum ddr_type ddr_type)
685 {
686 	if (dt_check_pmic()) {
687 		return pmic_ddr_power_init(ddr_type);
688 	}
689 
690 	return 0;
691 }
692 
693 void stm32mp1_ddr_init(struct ddr_info *priv,
694 		       struct stm32mp1_ddr_config *config)
695 {
696 	uint32_t pir;
697 	int ret;
698 
699 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
700 		ret = board_ddr_power_init(STM32MP_DDR3);
701 	} else {
702 		ret = board_ddr_power_init(STM32MP_LPDDR2);
703 	}
704 
705 	if (ret != 0) {
706 		panic();
707 	}
708 
709 	VERBOSE("name = %s\n", config->info.name);
710 	VERBOSE("speed = %d MHz\n", config->info.speed);
711 	VERBOSE("size  = 0x%x\n", config->info.size);
712 
713 	/* DDR INIT SEQUENCE */
714 
715 	/*
716 	 * 1. Program the DWC_ddr_umctl2 registers
717 	 *     nota: check DFIMISC.dfi_init_complete = 0
718 	 */
719 
720 	/* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
721 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
722 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
723 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
724 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
725 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
726 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
727 
728 	/* 1.2. start CLOCK */
729 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
730 		panic();
731 	}
732 
733 	/* 1.3. deassert reset */
734 	/* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
735 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
736 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
737 	/*
738 	 * De-assert presetn once the clocks are active
739 	 * and stable via DDRCAPBRST bit.
740 	 */
741 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
742 
743 	/* 1.4. wait 128 cycles to permit initialization of end logic */
744 	udelay(2);
745 	/* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
746 
747 	/* 1.5. initialize registers ddr_umctl2 */
748 	/* Stop uMCTL2 before PHY is ready */
749 	mmio_clrbits_32((uint32_t)&priv->ctl->dfimisc,
750 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
751 	VERBOSE("[0x%x] dfimisc = 0x%x\n",
752 		(uint32_t)&priv->ctl->dfimisc,
753 		mmio_read_32((uint32_t)&priv->ctl->dfimisc));
754 
755 	set_reg(priv, REG_REG, &config->c_reg);
756 
757 	/* DDR3 = don't set DLLOFF for init mode */
758 	if ((config->c_reg.mstr &
759 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
760 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
761 		VERBOSE("deactivate DLL OFF in mstr\n");
762 		mmio_clrbits_32((uint32_t)&priv->ctl->mstr,
763 				DDRCTRL_MSTR_DLL_OFF_MODE);
764 		VERBOSE("[0x%x] mstr = 0x%x\n",
765 			(uint32_t)&priv->ctl->mstr,
766 			mmio_read_32((uint32_t)&priv->ctl->mstr));
767 	}
768 
769 	set_reg(priv, REG_TIMING, &config->c_timing);
770 	set_reg(priv, REG_MAP, &config->c_map);
771 
772 	/* Skip CTRL init, SDRAM init is done by PHY PUBL */
773 	mmio_clrsetbits_32((uint32_t)&priv->ctl->init0,
774 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
775 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
776 	VERBOSE("[0x%x] init0 = 0x%x\n",
777 		(uint32_t)&priv->ctl->init0,
778 		mmio_read_32((uint32_t)&priv->ctl->init0));
779 
780 	set_reg(priv, REG_PERF, &config->c_perf);
781 
782 	/*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
783 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
784 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
785 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
786 
787 	/*
788 	 * 3. start PHY init by accessing relevant PUBL registers
789 	 *    (DXGCR, DCR, PTR*, MR*, DTPR*)
790 	 */
791 	set_reg(priv, REGPHY_REG, &config->p_reg);
792 	set_reg(priv, REGPHY_TIMING, &config->p_timing);
793 	set_reg(priv, REGPHY_CAL, &config->p_cal);
794 
795 	/* DDR3 = don't set DLLOFF for init mode */
796 	if ((config->c_reg.mstr &
797 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
798 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
799 		VERBOSE("deactivate DLL OFF in mr1\n");
800 		mmio_clrbits_32((uint32_t)&priv->phy->mr1, BIT(0));
801 		VERBOSE("[0x%x] mr1 = 0x%x\n",
802 			(uint32_t)&priv->phy->mr1,
803 			mmio_read_32((uint32_t)&priv->phy->mr1));
804 	}
805 
806 	/*
807 	 *  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
808 	 *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
809 	 */
810 	stm32mp1_ddrphy_idone_wait(priv->phy);
811 
812 	/*
813 	 *  5. Indicate to PUBL that controller performs SDRAM initialization
814 	 *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
815 	 *     DRAM init is done by PHY, init0.skip_dram.init = 1
816 	 */
817 
818 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
819 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
820 
821 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
822 		pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
823 	}
824 
825 	stm32mp1_ddrphy_init(priv->phy, pir);
826 
827 	/*
828 	 *  6. SET DFIMISC.dfi_init_complete_en to 1
829 	 *  Enable quasi-dynamic register programming.
830 	 */
831 	stm32mp1_start_sw_done(priv->ctl);
832 
833 	mmio_setbits_32((uint32_t)&priv->ctl->dfimisc,
834 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
835 	VERBOSE("[0x%x] dfimisc = 0x%x\n",
836 		(uint32_t)&priv->ctl->dfimisc,
837 		mmio_read_32((uint32_t)&priv->ctl->dfimisc));
838 
839 	stm32mp1_wait_sw_done_ack(priv->ctl);
840 
841 	/*
842 	 *  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
843 	 *     by monitoring STAT.operating_mode signal
844 	 */
845 
846 	/* Wait uMCTL2 ready */
847 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
848 
849 	/* Switch to DLL OFF mode */
850 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
851 		stm32mp1_ddr3_dll_off(priv);
852 	}
853 
854 	VERBOSE("DDR DQS training : ");
855 
856 	/*
857 	 *  8. Disable Auto refresh and power down by setting
858 	 *    - RFSHCTL3.dis_au_refresh = 1
859 	 *    - PWRCTL.powerdown_en = 0
860 	 *    - DFIMISC.dfiinit_complete_en = 0
861 	 */
862 	stm32mp1_refresh_disable(priv->ctl);
863 
864 	/*
865 	 *  9. Program PUBL PGCR to enable refresh during training
866 	 *     and rank to train
867 	 *     not done => keep the programed value in PGCR
868 	 */
869 
870 	/*
871 	 * 10. configure PUBL PIR register to specify which training step
872 	 * to run
873 	 * Warning : RVTRN  is not supported by this PUBL
874 	 */
875 	stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
876 
877 	/* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
878 	stm32mp1_ddrphy_idone_wait(priv->phy);
879 
880 	/*
881 	 * 12. set back registers in step 8 to the orginal values if desidered
882 	 */
883 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
884 				 config->c_reg.pwrctl);
885 
886 	/* Enable uMCTL2 AXI port 0 */
887 	mmio_setbits_32((uint32_t)&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
888 	VERBOSE("[0x%x] pctrl_0 = 0x%x\n",
889 		(uint32_t)&priv->ctl->pctrl_0,
890 		mmio_read_32((uint32_t)&priv->ctl->pctrl_0));
891 
892 	/* Enable uMCTL2 AXI port 1 */
893 	mmio_setbits_32((uint32_t)&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
894 	VERBOSE("[0x%x] pctrl_1 = 0x%x\n",
895 		(uint32_t)&priv->ctl->pctrl_1,
896 		mmio_read_32((uint32_t)&priv->ctl->pctrl_1));
897 }
898