xref: /rk3399_ARM-atf/drivers/st/ddr/stm32mp1_ddr.c (revision 5def13eb01ebac5656031bdc388a215d012fdaf8)
1 /*
2  * Copyright (C) 2018-2022, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 #include <stddef.h>
9 
10 #include <arch.h>
11 #include <arch_helpers.h>
12 #include <common/debug.h>
13 #include <drivers/clk.h>
14 #include <drivers/delay_timer.h>
15 #include <drivers/st/stm32mp1_ddr.h>
16 #include <drivers/st/stm32mp1_ddr_regs.h>
17 #include <drivers/st/stm32mp1_pwr.h>
18 #include <drivers/st/stm32mp1_ram.h>
19 #include <drivers/st/stm32mp_pmic.h>
20 #include <lib/mmio.h>
21 #include <plat/common/platform.h>
22 
23 #include <platform_def.h>
24 
25 struct reg_desc {
26 	const char *name;
27 	uint16_t offset;	/* Offset for base address */
28 	uint8_t par_offset;	/* Offset for parameter array */
29 };
30 
31 #define INVALID_OFFSET	0xFFU
32 
33 #define TIMEOUT_US_1S	1000000U
34 
35 #define DDRCTL_REG(x, y)					\
36 	{							\
37 		.name = #x,					\
38 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
39 		.par_offset = offsetof(struct y, x)		\
40 	}
41 
42 #define DDRPHY_REG(x, y)					\
43 	{							\
44 		.name = #x,					\
45 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
46 		.par_offset = offsetof(struct y, x)		\
47 	}
48 
49 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
50 static const struct reg_desc ddr_reg[] = {
51 	DDRCTL_REG_REG(mstr),
52 	DDRCTL_REG_REG(mrctrl0),
53 	DDRCTL_REG_REG(mrctrl1),
54 	DDRCTL_REG_REG(derateen),
55 	DDRCTL_REG_REG(derateint),
56 	DDRCTL_REG_REG(pwrctl),
57 	DDRCTL_REG_REG(pwrtmg),
58 	DDRCTL_REG_REG(hwlpctl),
59 	DDRCTL_REG_REG(rfshctl0),
60 	DDRCTL_REG_REG(rfshctl3),
61 	DDRCTL_REG_REG(crcparctl0),
62 	DDRCTL_REG_REG(zqctl0),
63 	DDRCTL_REG_REG(dfitmg0),
64 	DDRCTL_REG_REG(dfitmg1),
65 	DDRCTL_REG_REG(dfilpcfg0),
66 	DDRCTL_REG_REG(dfiupd0),
67 	DDRCTL_REG_REG(dfiupd1),
68 	DDRCTL_REG_REG(dfiupd2),
69 	DDRCTL_REG_REG(dfiphymstr),
70 	DDRCTL_REG_REG(odtmap),
71 	DDRCTL_REG_REG(dbg0),
72 	DDRCTL_REG_REG(dbg1),
73 	DDRCTL_REG_REG(dbgcmd),
74 	DDRCTL_REG_REG(poisoncfg),
75 	DDRCTL_REG_REG(pccfg),
76 };
77 
78 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
79 static const struct reg_desc ddr_timing[] = {
80 	DDRCTL_REG_TIMING(rfshtmg),
81 	DDRCTL_REG_TIMING(dramtmg0),
82 	DDRCTL_REG_TIMING(dramtmg1),
83 	DDRCTL_REG_TIMING(dramtmg2),
84 	DDRCTL_REG_TIMING(dramtmg3),
85 	DDRCTL_REG_TIMING(dramtmg4),
86 	DDRCTL_REG_TIMING(dramtmg5),
87 	DDRCTL_REG_TIMING(dramtmg6),
88 	DDRCTL_REG_TIMING(dramtmg7),
89 	DDRCTL_REG_TIMING(dramtmg8),
90 	DDRCTL_REG_TIMING(dramtmg14),
91 	DDRCTL_REG_TIMING(odtcfg),
92 };
93 
94 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
95 static const struct reg_desc ddr_map[] = {
96 	DDRCTL_REG_MAP(addrmap1),
97 	DDRCTL_REG_MAP(addrmap2),
98 	DDRCTL_REG_MAP(addrmap3),
99 	DDRCTL_REG_MAP(addrmap4),
100 	DDRCTL_REG_MAP(addrmap5),
101 	DDRCTL_REG_MAP(addrmap6),
102 	DDRCTL_REG_MAP(addrmap9),
103 	DDRCTL_REG_MAP(addrmap10),
104 	DDRCTL_REG_MAP(addrmap11),
105 };
106 
107 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
108 static const struct reg_desc ddr_perf[] = {
109 	DDRCTL_REG_PERF(sched),
110 	DDRCTL_REG_PERF(sched1),
111 	DDRCTL_REG_PERF(perfhpr1),
112 	DDRCTL_REG_PERF(perflpr1),
113 	DDRCTL_REG_PERF(perfwr1),
114 	DDRCTL_REG_PERF(pcfgr_0),
115 	DDRCTL_REG_PERF(pcfgw_0),
116 	DDRCTL_REG_PERF(pcfgqos0_0),
117 	DDRCTL_REG_PERF(pcfgqos1_0),
118 	DDRCTL_REG_PERF(pcfgwqos0_0),
119 	DDRCTL_REG_PERF(pcfgwqos1_0),
120 	DDRCTL_REG_PERF(pcfgr_1),
121 	DDRCTL_REG_PERF(pcfgw_1),
122 	DDRCTL_REG_PERF(pcfgqos0_1),
123 	DDRCTL_REG_PERF(pcfgqos1_1),
124 	DDRCTL_REG_PERF(pcfgwqos0_1),
125 	DDRCTL_REG_PERF(pcfgwqos1_1),
126 };
127 
128 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
129 static const struct reg_desc ddrphy_reg[] = {
130 	DDRPHY_REG_REG(pgcr),
131 	DDRPHY_REG_REG(aciocr),
132 	DDRPHY_REG_REG(dxccr),
133 	DDRPHY_REG_REG(dsgcr),
134 	DDRPHY_REG_REG(dcr),
135 	DDRPHY_REG_REG(odtcr),
136 	DDRPHY_REG_REG(zq0cr1),
137 	DDRPHY_REG_REG(dx0gcr),
138 	DDRPHY_REG_REG(dx1gcr),
139 	DDRPHY_REG_REG(dx2gcr),
140 	DDRPHY_REG_REG(dx3gcr),
141 };
142 
143 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
144 static const struct reg_desc ddrphy_timing[] = {
145 	DDRPHY_REG_TIMING(ptr0),
146 	DDRPHY_REG_TIMING(ptr1),
147 	DDRPHY_REG_TIMING(ptr2),
148 	DDRPHY_REG_TIMING(dtpr0),
149 	DDRPHY_REG_TIMING(dtpr1),
150 	DDRPHY_REG_TIMING(dtpr2),
151 	DDRPHY_REG_TIMING(mr0),
152 	DDRPHY_REG_TIMING(mr1),
153 	DDRPHY_REG_TIMING(mr2),
154 	DDRPHY_REG_TIMING(mr3),
155 };
156 
157 #define DDR_REG_DYN(x)						\
158 	{							\
159 		.name = #x,					\
160 		.offset = offsetof(struct stm32mp1_ddrctl, x),	\
161 		.par_offset = INVALID_OFFSET \
162 	}
163 
164 static const struct reg_desc ddr_dyn[] = {
165 	DDR_REG_DYN(stat),
166 	DDR_REG_DYN(init0),
167 	DDR_REG_DYN(dfimisc),
168 	DDR_REG_DYN(dfistat),
169 	DDR_REG_DYN(swctl),
170 	DDR_REG_DYN(swstat),
171 	DDR_REG_DYN(pctrl_0),
172 	DDR_REG_DYN(pctrl_1),
173 };
174 
175 #define DDRPHY_REG_DYN(x)					\
176 	{							\
177 		.name = #x,					\
178 		.offset = offsetof(struct stm32mp1_ddrphy, x),	\
179 		.par_offset = INVALID_OFFSET			\
180 	}
181 
182 static const struct reg_desc ddrphy_dyn[] = {
183 	DDRPHY_REG_DYN(pir),
184 	DDRPHY_REG_DYN(pgsr),
185 };
186 
187 enum reg_type {
188 	REG_REG,
189 	REG_TIMING,
190 	REG_PERF,
191 	REG_MAP,
192 	REGPHY_REG,
193 	REGPHY_TIMING,
194 /*
195  * Dynamic registers => managed in driver or not changed,
196  * can be dumped in interactive mode.
197  */
198 	REG_DYN,
199 	REGPHY_DYN,
200 	REG_TYPE_NB
201 };
202 
203 enum base_type {
204 	DDR_BASE,
205 	DDRPHY_BASE,
206 	NONE_BASE
207 };
208 
209 struct ddr_reg_info {
210 	const char *name;
211 	const struct reg_desc *desc;
212 	uint8_t size;
213 	enum base_type base;
214 };
215 
216 static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
217 	[REG_REG] = {
218 		.name = "static",
219 		.desc = ddr_reg,
220 		.size = ARRAY_SIZE(ddr_reg),
221 		.base = DDR_BASE
222 	},
223 	[REG_TIMING] = {
224 		.name = "timing",
225 		.desc = ddr_timing,
226 		.size = ARRAY_SIZE(ddr_timing),
227 		.base = DDR_BASE
228 	},
229 	[REG_PERF] = {
230 		.name = "perf",
231 		.desc = ddr_perf,
232 		.size = ARRAY_SIZE(ddr_perf),
233 		.base = DDR_BASE
234 	},
235 	[REG_MAP] = {
236 		.name = "map",
237 		.desc = ddr_map,
238 		.size = ARRAY_SIZE(ddr_map),
239 		.base = DDR_BASE
240 	},
241 	[REGPHY_REG] = {
242 		.name = "static",
243 		.desc = ddrphy_reg,
244 		.size = ARRAY_SIZE(ddrphy_reg),
245 		.base = DDRPHY_BASE
246 	},
247 	[REGPHY_TIMING] = {
248 		.name = "timing",
249 		.desc = ddrphy_timing,
250 		.size = ARRAY_SIZE(ddrphy_timing),
251 		.base = DDRPHY_BASE
252 	},
253 	[REG_DYN] = {
254 		.name = "dyn",
255 		.desc = ddr_dyn,
256 		.size = ARRAY_SIZE(ddr_dyn),
257 		.base = DDR_BASE
258 	},
259 	[REGPHY_DYN] = {
260 		.name = "dyn",
261 		.desc = ddrphy_dyn,
262 		.size = ARRAY_SIZE(ddrphy_dyn),
263 		.base = DDRPHY_BASE
264 	},
265 };
266 
267 static uintptr_t get_base_addr(const struct ddr_info *priv, enum base_type base)
268 {
269 	if (base == DDRPHY_BASE) {
270 		return (uintptr_t)priv->phy;
271 	} else {
272 		return (uintptr_t)priv->ctl;
273 	}
274 }
275 
276 static void set_reg(const struct ddr_info *priv,
277 		    enum reg_type type,
278 		    const void *param)
279 {
280 	unsigned int i;
281 	unsigned int value;
282 	enum base_type base = ddr_registers[type].base;
283 	uintptr_t base_addr = get_base_addr(priv, base);
284 	const struct reg_desc *desc = ddr_registers[type].desc;
285 
286 	VERBOSE("init %s\n", ddr_registers[type].name);
287 	for (i = 0; i < ddr_registers[type].size; i++) {
288 		uintptr_t ptr = base_addr + desc[i].offset;
289 
290 		if (desc[i].par_offset == INVALID_OFFSET) {
291 			ERROR("invalid parameter offset for %s", desc[i].name);
292 			panic();
293 		} else {
294 			value = *((uint32_t *)((uintptr_t)param +
295 					       desc[i].par_offset));
296 			mmio_write_32(ptr, value);
297 		}
298 	}
299 }
300 
301 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
302 {
303 	uint32_t pgsr;
304 	int error = 0;
305 	uint64_t timeout = timeout_init_us(TIMEOUT_US_1S);
306 
307 	do {
308 		pgsr = mmio_read_32((uintptr_t)&phy->pgsr);
309 
310 		VERBOSE("  > [0x%lx] pgsr = 0x%x &\n",
311 			(uintptr_t)&phy->pgsr, pgsr);
312 
313 		if (timeout_elapsed(timeout)) {
314 			panic();
315 		}
316 
317 		if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
318 			VERBOSE("DQS Gate Trainig Error\n");
319 			error++;
320 		}
321 
322 		if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
323 			VERBOSE("DQS Gate Trainig Intermittent Error\n");
324 			error++;
325 		}
326 
327 		if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
328 			VERBOSE("DQS Drift Error\n");
329 			error++;
330 		}
331 
332 		if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
333 			VERBOSE("Read Valid Training Error\n");
334 			error++;
335 		}
336 
337 		if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
338 			VERBOSE("Read Valid Training Intermittent Error\n");
339 			error++;
340 		}
341 	} while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0));
342 	VERBOSE("\n[0x%lx] pgsr = 0x%x\n",
343 		(uintptr_t)&phy->pgsr, pgsr);
344 }
345 
346 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir)
347 {
348 	uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
349 
350 	mmio_write_32((uintptr_t)&phy->pir, pir_init);
351 	VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n",
352 		(uintptr_t)&phy->pir, pir_init,
353 		mmio_read_32((uintptr_t)&phy->pir));
354 
355 	/* Need to wait 10 configuration clock before start polling */
356 	udelay(10);
357 
358 	/* Wait DRAM initialization and Gate Training Evaluation complete */
359 	stm32mp1_ddrphy_idone_wait(phy);
360 }
361 
362 /* Start quasi dynamic register update */
363 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl)
364 {
365 	mmio_clrbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
366 	VERBOSE("[0x%lx] swctl = 0x%x\n",
367 		(uintptr_t)&ctl->swctl,  mmio_read_32((uintptr_t)&ctl->swctl));
368 }
369 
370 /* Wait quasi dynamic register update */
371 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
372 {
373 	uint64_t timeout;
374 	uint32_t swstat;
375 
376 	mmio_setbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
377 	VERBOSE("[0x%lx] swctl = 0x%x\n",
378 		(uintptr_t)&ctl->swctl, mmio_read_32((uintptr_t)&ctl->swctl));
379 
380 	timeout = timeout_init_us(TIMEOUT_US_1S);
381 	do {
382 		swstat = mmio_read_32((uintptr_t)&ctl->swstat);
383 		VERBOSE("[0x%lx] swstat = 0x%x ",
384 			(uintptr_t)&ctl->swstat, swstat);
385 		if (timeout_elapsed(timeout)) {
386 			panic();
387 		}
388 	} while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U);
389 
390 	VERBOSE("[0x%lx] swstat = 0x%x\n",
391 		(uintptr_t)&ctl->swstat, swstat);
392 }
393 
394 /* Wait quasi dynamic register update */
395 static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode)
396 {
397 	uint64_t timeout;
398 	uint32_t stat;
399 	int break_loop = 0;
400 
401 	timeout = timeout_init_us(TIMEOUT_US_1S);
402 	for ( ; ; ) {
403 		uint32_t operating_mode;
404 		uint32_t selref_type;
405 
406 		stat = mmio_read_32((uintptr_t)&priv->ctl->stat);
407 		operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
408 		selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
409 		VERBOSE("[0x%lx] stat = 0x%x\n",
410 			(uintptr_t)&priv->ctl->stat, stat);
411 		if (timeout_elapsed(timeout)) {
412 			panic();
413 		}
414 
415 		if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
416 			/*
417 			 * Self-refresh due to software
418 			 * => checking also STAT.selfref_type.
419 			 */
420 			if ((operating_mode ==
421 			     DDRCTRL_STAT_OPERATING_MODE_SR) &&
422 			    (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
423 				break_loop = 1;
424 			}
425 		} else if (operating_mode == mode) {
426 			break_loop = 1;
427 		} else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
428 			   (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
429 			   (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
430 			/* Normal mode: handle also automatic self refresh */
431 			break_loop = 1;
432 		}
433 
434 		if (break_loop == 1) {
435 			break;
436 		}
437 	}
438 
439 	VERBOSE("[0x%lx] stat = 0x%x\n",
440 		(uintptr_t)&priv->ctl->stat, stat);
441 }
442 
443 /* Mode Register Writes (MRW or MRS) */
444 static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr,
445 					 uint32_t data)
446 {
447 	uint32_t mrctrl0;
448 
449 	VERBOSE("MRS: %d = %x\n", addr, data);
450 
451 	/*
452 	 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
453 	 *    This checks that there is no outstanding MR transaction.
454 	 *    No write should be performed to MRCTRL0 and MRCTRL1
455 	 *    if MRSTAT.mr_wr_busy = 1.
456 	 */
457 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
458 		DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
459 		;
460 	}
461 
462 	/*
463 	 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
464 	 *    and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
465 	 */
466 	mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
467 		  DDRCTRL_MRCTRL0_MR_RANK_ALL |
468 		  (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
469 		   DDRCTRL_MRCTRL0_MR_ADDR_MASK);
470 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
471 	VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n",
472 		(uintptr_t)&priv->ctl->mrctrl0,
473 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0);
474 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data);
475 	VERBOSE("[0x%lx] mrctrl1 = 0x%x\n",
476 		(uintptr_t)&priv->ctl->mrctrl1,
477 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl1));
478 
479 	/*
480 	 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
481 	 *    bit is self-clearing, and triggers the MR transaction.
482 	 *    The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
483 	 *    the MR transaction to SDRAM, and no further access can be
484 	 *    initiated until it is deasserted.
485 	 */
486 	mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
487 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
488 
489 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
490 	       DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
491 		;
492 	}
493 
494 	VERBOSE("[0x%lx] mrctrl0 = 0x%x\n",
495 		(uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
496 }
497 
498 /* Switch DDR3 from DLL-on to DLL-off */
499 static void stm32mp1_ddr3_dll_off(struct ddr_info *priv)
500 {
501 	uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1);
502 	uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2);
503 	uint32_t dbgcam;
504 
505 	VERBOSE("mr1: 0x%x\n", mr1);
506 	VERBOSE("mr2: 0x%x\n", mr2);
507 
508 	/*
509 	 * 1. Set the DBG1.dis_hif = 1.
510 	 *    This prevents further reads/writes being received on the HIF.
511 	 */
512 	mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
513 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
514 		(uintptr_t)&priv->ctl->dbg1,
515 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
516 
517 	/*
518 	 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
519 	 *    DBGCAM.wr_data_pipeline_empty = 1,
520 	 *    DBGCAM.rd_data_pipeline_empty = 1,
521 	 *    DBGCAM.dbg_wr_q_depth = 0 ,
522 	 *    DBGCAM.dbg_lpr_q_depth = 0, and
523 	 *    DBGCAM.dbg_hpr_q_depth = 0.
524 	 */
525 	do {
526 		dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam);
527 		VERBOSE("[0x%lx] dbgcam = 0x%x\n",
528 			(uintptr_t)&priv->ctl->dbgcam, dbgcam);
529 	} while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
530 		   DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
531 		 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
532 
533 	/*
534 	 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
535 	 *    to disable RTT_NOM:
536 	 *    a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
537 	 *    b. DDR4: Write to MR1[10:8]
538 	 */
539 	mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
540 	stm32mp1_mode_register_write(priv, 1, mr1);
541 
542 	/*
543 	 * 4. For DDR4 only: Perform an MRS command
544 	 *    (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
545 	 *    to disable RTT_PARK
546 	 */
547 
548 	/*
549 	 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
550 	 *    to write to MR2[10:9], to disable RTT_WR
551 	 *    (and therefore disable dynamic ODT).
552 	 *    This applies for both DDR3 and DDR4.
553 	 */
554 	mr2 &= ~GENMASK(10, 9);
555 	stm32mp1_mode_register_write(priv, 2, mr2);
556 
557 	/*
558 	 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
559 	 *    to disable the DLL. The timing of this MRS is automatically
560 	 *    handled by the uMCTL2.
561 	 *    a. DDR3: Write to MR1[0]
562 	 *    b. DDR4: Write to MR1[0]
563 	 */
564 	mr1 |= BIT(0);
565 	stm32mp1_mode_register_write(priv, 1, mr1);
566 
567 	/*
568 	 * 7. Put the SDRAM into self-refresh mode by setting
569 	 *    PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
570 	 *    the DDRC has entered self-refresh.
571 	 */
572 	mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl,
573 			DDRCTRL_PWRCTL_SELFREF_SW);
574 	VERBOSE("[0x%lx] pwrctl = 0x%x\n",
575 		(uintptr_t)&priv->ctl->pwrctl,
576 		mmio_read_32((uintptr_t)&priv->ctl->pwrctl));
577 
578 	/*
579 	 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
580 	 *    DWC_ddr_umctl2 core is in self-refresh mode.
581 	 *    Ensure transition to self-refresh was due to software
582 	 *    by checking that STAT.selfref_type[1:0]=2.
583 	 */
584 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
585 
586 	/*
587 	 * 9. Set the MSTR.dll_off_mode = 1.
588 	 *    warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
589 	 */
590 	stm32mp1_start_sw_done(priv->ctl);
591 
592 	mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
593 	VERBOSE("[0x%lx] mstr = 0x%x\n",
594 		(uintptr_t)&priv->ctl->mstr,
595 		mmio_read_32((uintptr_t)&priv->ctl->mstr));
596 
597 	stm32mp1_wait_sw_done_ack(priv->ctl);
598 
599 	/* 10. Change the clock frequency to the desired value. */
600 
601 	/*
602 	 * 11. Update any registers which may be required to change for the new
603 	 *     frequency. This includes static and dynamic registers.
604 	 *     This includes both uMCTL2 registers and PHY registers.
605 	 */
606 
607 	/* Change Bypass Mode Frequency Range */
608 	if (clk_get_rate(DDRPHYC) < 100000000U) {
609 		mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr,
610 				DDRPHYC_DLLGCR_BPS200);
611 	} else {
612 		mmio_setbits_32((uintptr_t)&priv->phy->dllgcr,
613 				DDRPHYC_DLLGCR_BPS200);
614 	}
615 
616 	mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
617 
618 	mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr,
619 			DDRPHYC_DXNDLLCR_DLLDIS);
620 	mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr,
621 			DDRPHYC_DXNDLLCR_DLLDIS);
622 	mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr,
623 			DDRPHYC_DXNDLLCR_DLLDIS);
624 	mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr,
625 			DDRPHYC_DXNDLLCR_DLLDIS);
626 
627 	/* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
628 	mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl,
629 			DDRCTRL_PWRCTL_SELFREF_SW);
630 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
631 
632 	/*
633 	 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
634 	 *     at this point.
635 	 */
636 
637 	/*
638 	 * 14. Perform MRS commands as required to re-program timing registers
639 	 *     in the SDRAM for the new frequency
640 	 *     (in particular, CL, CWL and WR may need to be changed).
641 	 */
642 
643 	/* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
644 	mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
645 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
646 		(uintptr_t)&priv->ctl->dbg1,
647 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
648 }
649 
650 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
651 {
652 	stm32mp1_start_sw_done(ctl);
653 	/* Quasi-dynamic register update*/
654 	mmio_setbits_32((uintptr_t)&ctl->rfshctl3,
655 			DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
656 	mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
657 	mmio_clrbits_32((uintptr_t)&ctl->dfimisc,
658 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
659 	stm32mp1_wait_sw_done_ack(ctl);
660 }
661 
662 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
663 				     uint32_t rfshctl3, uint32_t pwrctl)
664 {
665 	stm32mp1_start_sw_done(ctl);
666 	if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
667 		mmio_clrbits_32((uintptr_t)&ctl->rfshctl3,
668 				DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
669 	}
670 	if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
671 		mmio_setbits_32((uintptr_t)&ctl->pwrctl,
672 				DDRCTRL_PWRCTL_POWERDOWN_EN);
673 	}
674 	mmio_setbits_32((uintptr_t)&ctl->dfimisc,
675 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
676 	stm32mp1_wait_sw_done_ack(ctl);
677 }
678 
679 static int board_ddr_power_init(enum ddr_type ddr_type)
680 {
681 	if (dt_pmic_status() > 0) {
682 		return pmic_ddr_power_init(ddr_type);
683 	}
684 
685 	return 0;
686 }
687 
688 void stm32mp1_ddr_init(struct ddr_info *priv,
689 		       struct stm32mp1_ddr_config *config)
690 {
691 	uint32_t pir;
692 	int ret = -EINVAL;
693 
694 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
695 		ret = board_ddr_power_init(STM32MP_DDR3);
696 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) {
697 		ret = board_ddr_power_init(STM32MP_LPDDR2);
698 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) {
699 		ret = board_ddr_power_init(STM32MP_LPDDR3);
700 	} else {
701 		ERROR("DDR type not supported\n");
702 	}
703 
704 	if (ret != 0) {
705 		panic();
706 	}
707 
708 	VERBOSE("name = %s\n", config->info.name);
709 	VERBOSE("speed = %u kHz\n", config->info.speed);
710 	VERBOSE("size  = 0x%x\n", config->info.size);
711 
712 	/* DDR INIT SEQUENCE */
713 
714 	/*
715 	 * 1. Program the DWC_ddr_umctl2 registers
716 	 *     nota: check DFIMISC.dfi_init_complete = 0
717 	 */
718 
719 	/* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
720 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
721 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
722 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
723 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
724 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
725 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
726 
727 	/* 1.2. start CLOCK */
728 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
729 		panic();
730 	}
731 
732 	/* 1.3. deassert reset */
733 	/* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
734 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
735 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
736 	/*
737 	 * De-assert presetn once the clocks are active
738 	 * and stable via DDRCAPBRST bit.
739 	 */
740 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
741 
742 	/* 1.4. wait 128 cycles to permit initialization of end logic */
743 	udelay(2);
744 	/* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
745 
746 	/* 1.5. initialize registers ddr_umctl2 */
747 	/* Stop uMCTL2 before PHY is ready */
748 	mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc,
749 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
750 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
751 		(uintptr_t)&priv->ctl->dfimisc,
752 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
753 
754 	set_reg(priv, REG_REG, &config->c_reg);
755 
756 	/* DDR3 = don't set DLLOFF for init mode */
757 	if ((config->c_reg.mstr &
758 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
759 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
760 		VERBOSE("deactivate DLL OFF in mstr\n");
761 		mmio_clrbits_32((uintptr_t)&priv->ctl->mstr,
762 				DDRCTRL_MSTR_DLL_OFF_MODE);
763 		VERBOSE("[0x%lx] mstr = 0x%x\n",
764 			(uintptr_t)&priv->ctl->mstr,
765 			mmio_read_32((uintptr_t)&priv->ctl->mstr));
766 	}
767 
768 	set_reg(priv, REG_TIMING, &config->c_timing);
769 	set_reg(priv, REG_MAP, &config->c_map);
770 
771 	/* Skip CTRL init, SDRAM init is done by PHY PUBL */
772 	mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0,
773 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
774 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
775 	VERBOSE("[0x%lx] init0 = 0x%x\n",
776 		(uintptr_t)&priv->ctl->init0,
777 		mmio_read_32((uintptr_t)&priv->ctl->init0));
778 
779 	set_reg(priv, REG_PERF, &config->c_perf);
780 
781 	/*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
782 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
783 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
784 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
785 
786 	/*
787 	 * 3. start PHY init by accessing relevant PUBL registers
788 	 *    (DXGCR, DCR, PTR*, MR*, DTPR*)
789 	 */
790 	set_reg(priv, REGPHY_REG, &config->p_reg);
791 	set_reg(priv, REGPHY_TIMING, &config->p_timing);
792 
793 	/* DDR3 = don't set DLLOFF for init mode */
794 	if ((config->c_reg.mstr &
795 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
796 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
797 		VERBOSE("deactivate DLL OFF in mr1\n");
798 		mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0));
799 		VERBOSE("[0x%lx] mr1 = 0x%x\n",
800 			(uintptr_t)&priv->phy->mr1,
801 			mmio_read_32((uintptr_t)&priv->phy->mr1));
802 	}
803 
804 	/*
805 	 *  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
806 	 *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
807 	 */
808 	stm32mp1_ddrphy_idone_wait(priv->phy);
809 
810 	/*
811 	 *  5. Indicate to PUBL that controller performs SDRAM initialization
812 	 *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
813 	 *     DRAM init is done by PHY, init0.skip_dram.init = 1
814 	 */
815 
816 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
817 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
818 
819 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
820 		pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
821 	}
822 
823 	stm32mp1_ddrphy_init(priv->phy, pir);
824 
825 	/*
826 	 *  6. SET DFIMISC.dfi_init_complete_en to 1
827 	 *  Enable quasi-dynamic register programming.
828 	 */
829 	stm32mp1_start_sw_done(priv->ctl);
830 
831 	mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc,
832 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
833 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
834 		(uintptr_t)&priv->ctl->dfimisc,
835 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
836 
837 	stm32mp1_wait_sw_done_ack(priv->ctl);
838 
839 	/*
840 	 *  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
841 	 *     by monitoring STAT.operating_mode signal
842 	 */
843 
844 	/* Wait uMCTL2 ready */
845 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
846 
847 	/* Switch to DLL OFF mode */
848 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
849 		stm32mp1_ddr3_dll_off(priv);
850 	}
851 
852 	VERBOSE("DDR DQS training : ");
853 
854 	/*
855 	 *  8. Disable Auto refresh and power down by setting
856 	 *    - RFSHCTL3.dis_au_refresh = 1
857 	 *    - PWRCTL.powerdown_en = 0
858 	 *    - DFIMISC.dfiinit_complete_en = 0
859 	 */
860 	stm32mp1_refresh_disable(priv->ctl);
861 
862 	/*
863 	 *  9. Program PUBL PGCR to enable refresh during training
864 	 *     and rank to train
865 	 *     not done => keep the programed value in PGCR
866 	 */
867 
868 	/*
869 	 * 10. configure PUBL PIR register to specify which training step
870 	 * to run
871 	 * RVTRN is executed only on LPDDR2/LPDDR3
872 	 */
873 	pir = DDRPHYC_PIR_QSTRN;
874 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) == 0U) {
875 		pir |= DDRPHYC_PIR_RVTRN;
876 	}
877 
878 	stm32mp1_ddrphy_init(priv->phy, pir);
879 
880 	/* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
881 	stm32mp1_ddrphy_idone_wait(priv->phy);
882 
883 	/*
884 	 * 12. set back registers in step 8 to the orginal values if desidered
885 	 */
886 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
887 				 config->c_reg.pwrctl);
888 
889 	/* Enable uMCTL2 AXI port 0 */
890 	mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_0,
891 			DDRCTRL_PCTRL_N_PORT_EN);
892 	VERBOSE("[0x%lx] pctrl_0 = 0x%x\n",
893 		(uintptr_t)&priv->ctl->pctrl_0,
894 		mmio_read_32((uintptr_t)&priv->ctl->pctrl_0));
895 
896 	/* Enable uMCTL2 AXI port 1 */
897 	mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_1,
898 			DDRCTRL_PCTRL_N_PORT_EN);
899 	VERBOSE("[0x%lx] pctrl_1 = 0x%x\n",
900 		(uintptr_t)&priv->ctl->pctrl_1,
901 		mmio_read_32((uintptr_t)&priv->ctl->pctrl_1));
902 }
903