xref: /rk3399_ARM-atf/drivers/st/ddr/stm32mp2_ddr.c (revision 681296444e508e722565c6713effd2cf346a4dcf)
1 /*
2  * Copyright (C) 2021-2026, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 
9 #include <common/debug.h>
10 #include <ddrphy_phyinit.h>
11 #include <drivers/delay_timer.h>
12 #include <drivers/st/stm32mp2_ddr_helpers.h>
13 #include <drivers/st/stm32mp2_ddr_regs.h>
14 #include <drivers/st/stm32mp_ddr.h>
15 #include <lib/mmio.h>
16 
17 #include <platform_def.h>
18 
19 #define DDRDBG_FRAC_PLL_LOCK	U(0x10)
20 
21 #define DDRCTL_REG(x, y, z)					\
22 	{							\
23 		.offset = offsetof(struct stm32mp_ddrctl, x),	\
24 		.par_offset = offsetof(struct y, x),		\
25 		.qd = z						\
26 	}
27 
28 /*
29  * PARAMETERS: value get from device tree :
30  *             size / order need to be aligned with binding
31  *             modification NOT ALLOWED !!!
32  */
33 #define DDRCTL_REG_REG_SIZE	48	/* st,ctl-reg */
34 #define DDRCTL_REG_TIMING_SIZE	20	/* st,ctl-timing */
35 #define DDRCTL_REG_MAP_SIZE	12	/* st,ctl-map */
36 #if STM32MP_DDR_DUAL_AXI_PORT
37 #define DDRCTL_REG_PERF_SIZE	21	/* st,ctl-perf */
38 #else /* !STM32MP_DDR_DUAL_AXI_PORT */
39 #define DDRCTL_REG_PERF_SIZE	14	/* st,ctl-perf */
40 #endif /* STM32MP_DDR_DUAL_AXI_PORT */
41 
42 #define DDRPHY_REG_REG_SIZE	0	/* st,phy-reg */
43 #define	DDRPHY_REG_TIMING_SIZE	0	/* st,phy-timing */
44 
45 #define DDRCTL_REG_REG(x, z)	DDRCTL_REG(x, stm32mp2_ddrctrl_reg, z)
46 static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
47 	DDRCTL_REG_REG(mstr, true),
48 	DDRCTL_REG_REG(mrctrl0, false),
49 	DDRCTL_REG_REG(mrctrl1, false),
50 	DDRCTL_REG_REG(mrctrl2, false),
51 	DDRCTL_REG_REG(derateen, true),
52 	DDRCTL_REG_REG(derateint, false),
53 	DDRCTL_REG_REG(deratectl, false),
54 	DDRCTL_REG_REG(pwrctl, false),
55 	DDRCTL_REG_REG(pwrtmg, true),
56 	DDRCTL_REG_REG(hwlpctl, true),
57 	DDRCTL_REG_REG(rfshctl0, false),
58 	DDRCTL_REG_REG(rfshctl1, false),
59 	DDRCTL_REG_REG(rfshctl3, true),
60 	DDRCTL_REG_REG(crcparctl0, false),
61 	DDRCTL_REG_REG(crcparctl1, false),
62 	DDRCTL_REG_REG(init0, true),
63 	DDRCTL_REG_REG(init1, false),
64 	DDRCTL_REG_REG(init2, false),
65 	DDRCTL_REG_REG(init3, true),
66 	DDRCTL_REG_REG(init4, true),
67 	DDRCTL_REG_REG(init5, false),
68 	DDRCTL_REG_REG(init6, true),
69 	DDRCTL_REG_REG(init7, true),
70 	DDRCTL_REG_REG(dimmctl, false),
71 	DDRCTL_REG_REG(rankctl, true),
72 	DDRCTL_REG_REG(rankctl1, true),
73 	DDRCTL_REG_REG(zqctl0, true),
74 	DDRCTL_REG_REG(zqctl1, false),
75 	DDRCTL_REG_REG(zqctl2, false),
76 	DDRCTL_REG_REG(dfitmg0, true),
77 	DDRCTL_REG_REG(dfitmg1, true),
78 	DDRCTL_REG_REG(dfilpcfg0, false),
79 	DDRCTL_REG_REG(dfilpcfg1, false),
80 	DDRCTL_REG_REG(dfiupd0, true),
81 	DDRCTL_REG_REG(dfiupd1, false),
82 	DDRCTL_REG_REG(dfiupd2, false),
83 	DDRCTL_REG_REG(dfimisc, true),
84 	DDRCTL_REG_REG(dfitmg2, true),
85 	DDRCTL_REG_REG(dfitmg3, false),
86 	DDRCTL_REG_REG(dbictl, true),
87 	DDRCTL_REG_REG(dfiphymstr, false),
88 	DDRCTL_REG_REG(dbg0, false),
89 	DDRCTL_REG_REG(dbg1, false),
90 	DDRCTL_REG_REG(dbgcmd, false),
91 	DDRCTL_REG_REG(swctl, false), /* forced qd value */
92 	DDRCTL_REG_REG(swctlstatic, false),
93 	DDRCTL_REG_REG(poisoncfg, false),
94 	DDRCTL_REG_REG(pccfg, false),
95 };
96 
97 #define DDRCTL_REG_TIMING(x, z)	DDRCTL_REG(x, stm32mp2_ddrctrl_timing, z)
98 static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
99 	DDRCTL_REG_TIMING(rfshtmg, false),
100 	DDRCTL_REG_TIMING(rfshtmg1, false),
101 	DDRCTL_REG_TIMING(dramtmg0, true),
102 	DDRCTL_REG_TIMING(dramtmg1, true),
103 	DDRCTL_REG_TIMING(dramtmg2, true),
104 	DDRCTL_REG_TIMING(dramtmg3, true),
105 	DDRCTL_REG_TIMING(dramtmg4, true),
106 	DDRCTL_REG_TIMING(dramtmg5, true),
107 	DDRCTL_REG_TIMING(dramtmg6, true),
108 	DDRCTL_REG_TIMING(dramtmg7, true),
109 	DDRCTL_REG_TIMING(dramtmg8, true),
110 	DDRCTL_REG_TIMING(dramtmg9, true),
111 	DDRCTL_REG_TIMING(dramtmg10, true),
112 	DDRCTL_REG_TIMING(dramtmg11, true),
113 	DDRCTL_REG_TIMING(dramtmg12, true),
114 	DDRCTL_REG_TIMING(dramtmg13, true),
115 	DDRCTL_REG_TIMING(dramtmg14, true),
116 	DDRCTL_REG_TIMING(dramtmg15, true),
117 	DDRCTL_REG_TIMING(odtcfg, true),
118 	DDRCTL_REG_TIMING(odtmap, false),
119 };
120 
121 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp2_ddrctrl_map, false)
122 static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
123 	DDRCTL_REG_MAP(addrmap0),
124 	DDRCTL_REG_MAP(addrmap1),
125 	DDRCTL_REG_MAP(addrmap2),
126 	DDRCTL_REG_MAP(addrmap3),
127 	DDRCTL_REG_MAP(addrmap4),
128 	DDRCTL_REG_MAP(addrmap5),
129 	DDRCTL_REG_MAP(addrmap6),
130 	DDRCTL_REG_MAP(addrmap7),
131 	DDRCTL_REG_MAP(addrmap8),
132 	DDRCTL_REG_MAP(addrmap9),
133 	DDRCTL_REG_MAP(addrmap10),
134 	DDRCTL_REG_MAP(addrmap11),
135 };
136 
137 #define DDRCTL_REG_PERF(x, z)	DDRCTL_REG(x, stm32mp2_ddrctrl_perf, z)
138 static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
139 	DDRCTL_REG_PERF(sched, true),
140 	DDRCTL_REG_PERF(sched1, false),
141 	DDRCTL_REG_PERF(perfhpr1, true),
142 	DDRCTL_REG_PERF(perflpr1, true),
143 	DDRCTL_REG_PERF(perfwr1, true),
144 	DDRCTL_REG_PERF(sched3, false),
145 	DDRCTL_REG_PERF(sched4, false),
146 	DDRCTL_REG_PERF(pcfgr_0, false),
147 	DDRCTL_REG_PERF(pcfgw_0, false),
148 	DDRCTL_REG_PERF(pctrl_0, false),
149 	DDRCTL_REG_PERF(pcfgqos0_0, true),
150 	DDRCTL_REG_PERF(pcfgqos1_0, true),
151 	DDRCTL_REG_PERF(pcfgwqos0_0, true),
152 	DDRCTL_REG_PERF(pcfgwqos1_0, true),
153 #if STM32MP_DDR_DUAL_AXI_PORT
154 	DDRCTL_REG_PERF(pcfgr_1, false),
155 	DDRCTL_REG_PERF(pcfgw_1, false),
156 	DDRCTL_REG_PERF(pctrl_1, false),
157 	DDRCTL_REG_PERF(pcfgqos0_1, true),
158 	DDRCTL_REG_PERF(pcfgqos1_1, true),
159 	DDRCTL_REG_PERF(pcfgwqos0_1, true),
160 	DDRCTL_REG_PERF(pcfgwqos1_1, true),
161 #endif /* STM32MP_DDR_DUAL_AXI_PORT */
162 };
163 
164 static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {};
165 
166 static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {};
167 
168 /*
169  * REGISTERS ARRAY: used to parse device tree and interactive mode
170  */
171 static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] __unused = {
172 	[REG_REG] = {
173 		.name = "static",
174 		.desc = ddr_reg,
175 		.size = DDRCTL_REG_REG_SIZE,
176 		.base = DDR_BASE
177 	},
178 	[REG_TIMING] = {
179 		.name = "timing",
180 		.desc = ddr_timing,
181 		.size = DDRCTL_REG_TIMING_SIZE,
182 		.base = DDR_BASE
183 	},
184 	[REG_PERF] = {
185 		.name = "perf",
186 		.desc = ddr_perf,
187 		.size = DDRCTL_REG_PERF_SIZE,
188 		.base = DDR_BASE
189 	},
190 	[REG_MAP] = {
191 		.name = "map",
192 		.desc = ddr_map,
193 		.size = DDRCTL_REG_MAP_SIZE,
194 		.base = DDR_BASE
195 	},
196 	[REGPHY_REG] = {
197 		.name = "static",
198 		.desc = ddrphy_reg,
199 		.size = DDRPHY_REG_REG_SIZE,
200 		.base = DDRPHY_BASE
201 	},
202 	[REGPHY_TIMING] = {
203 		.name = "timing",
204 		.desc = ddrphy_timing,
205 		.size = DDRPHY_REG_TIMING_SIZE,
206 		.base = DDRPHY_BASE
207 	},
208 };
209 
ddr_reset(struct stm32mp_ddr_priv * priv)210 static void ddr_reset(struct stm32mp_ddr_priv *priv)
211 {
212 	udelay(DDR_DELAY_1US);
213 
214 	mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
215 	mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
216 		      RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN |
217 		      RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
218 	mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
219 		      RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN |
220 		      RCC_DDRCAPBCFGR_DDRCAPBRST);
221 	mmio_write_32(priv->rcc + RCC_DDRCFGR,
222 		      RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN | RCC_DDRCFGR_DDRCFGRST);
223 
224 	udelay(DDR_DELAY_1US);
225 
226 	mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
227 	mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
228 		      RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN);
229 	mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
230 		      RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN);
231 	mmio_write_32(priv->rcc + RCC_DDRCFGR, RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN);
232 
233 	udelay(DDR_DELAY_1US);
234 }
235 
ddr_standby_reset(struct stm32mp_ddr_priv * priv)236 static void ddr_standby_reset(struct stm32mp_ddr_priv *priv)
237 {
238 	udelay(DDR_DELAY_1US);
239 
240 	mmio_write_32(priv->rcc + RCC_DDRCPCFGR,
241 		      RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPRST);
242 	mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
243 	mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
244 		      RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN |
245 		      RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
246 	mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
247 		      RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN |
248 		      RCC_DDRCAPBCFGR_DDRCAPBRST);
249 
250 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
251 	mmio_setbits_32(priv->rcc + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
252 
253 	udelay(DDR_DELAY_1US);
254 }
255 
ddr_standby_reset_release(struct stm32mp_ddr_priv * priv)256 static void ddr_standby_reset_release(struct stm32mp_ddr_priv *priv)
257 {
258 	udelay(DDR_DELAY_1US);
259 
260 	mmio_write_32(priv->rcc + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN);
261 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
262 	mmio_clrbits_32(priv->rcc + RCC_DDRPHYCAPBCFGR, RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
263 	mmio_write_32(priv->rcc + RCC_DDRCFGR, RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN);
264 
265 	udelay(DDR_DELAY_1US);
266 }
267 
ddr_sysconf_configuration(struct stm32mp_ddr_priv * priv,struct stm32mp_ddr_config * config)268 static void ddr_sysconf_configuration(struct stm32mp_ddr_priv *priv,
269 				      struct stm32mp_ddr_config *config)
270 {
271 	mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE,
272 		      DDRDBG_LP_DISABLE_LPI_XPI_DISABLE | DDRDBG_LP_DISABLE_LPI_DDRC_DISABLE);
273 
274 	mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_BYPASS_PCLKEN,
275 		      (uint32_t)config->uib.pllbypass);
276 
277 	mmio_write_32(priv->rcc + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
278 	mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
279 
280 	udelay(DDR_DELAY_1US);
281 }
282 
set_dfi_init_complete_en(struct stm32mp_ddrctl * ctl,bool phy_init_done)283 static void set_dfi_init_complete_en(struct stm32mp_ddrctl *ctl, bool phy_init_done)
284 {
285 	/*
286 	 * Manage quasi-dynamic registers modification
287 	 * dfimisc.dfi_init_complete_en : Group 3
288 	 */
289 	stm32mp_ddr_set_qd3_update_conditions(ctl);
290 
291 	udelay(DDR_DELAY_1US);
292 
293 	if (phy_init_done) {
294 		/* Indicates to controller that PHY has completed initialization */
295 		mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
296 	} else {
297 		/* PHY not initialized yet, wait for completion */
298 		mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
299 	}
300 
301 	udelay(DDR_DELAY_1US);
302 
303 	stm32mp_ddr_unset_qd3_update_conditions(ctl);
304 
305 }
306 
disable_refresh(struct stm32mp_ddrctl * ctl)307 static void disable_refresh(struct stm32mp_ddrctl *ctl)
308 {
309 	mmio_setbits_32((uintptr_t)&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
310 
311 	stm32mp_ddr_wait_refresh_update_done_ack(ctl);
312 
313 	udelay(DDR_DELAY_1US);
314 
315 	mmio_clrbits_32((uintptr_t)&ctl->pwrctl,
316 			DDRCTRL_PWRCTL_POWERDOWN_EN | DDRCTRL_PWRCTL_SELFREF_EN);
317 
318 	udelay(DDR_DELAY_1US);
319 
320 	set_dfi_init_complete_en(ctl, false);
321 }
322 
restore_refresh(struct stm32mp_ddrctl * ctl,uint32_t rfshctl3,uint32_t pwrctl)323 static void restore_refresh(struct stm32mp_ddrctl *ctl, uint32_t rfshctl3, uint32_t pwrctl)
324 {
325 	if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
326 		mmio_clrbits_32((uintptr_t)&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
327 
328 		stm32mp_ddr_wait_refresh_update_done_ack(ctl);
329 
330 		udelay(DDR_DELAY_1US);
331 	}
332 
333 	if ((pwrctl & DDRCTRL_PWRCTL_SELFREF_SW) != 0U) {
334 		mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_SW);
335 
336 		udelay(DDR_DELAY_1US);
337 	}
338 
339 	if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
340 		mmio_setbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
341 
342 		udelay(DDR_DELAY_1US);
343 	}
344 
345 	if ((pwrctl & DDRCTRL_PWRCTL_SELFREF_EN) != 0U) {
346 		mmio_setbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_EN);
347 
348 		udelay(DDR_DELAY_1US);
349 	}
350 
351 	set_dfi_init_complete_en(ctl, true);
352 }
353 
stm32mp2_ddr_init(struct stm32mp_ddr_priv * priv,struct stm32mp_ddr_config * config)354 void stm32mp2_ddr_init(struct stm32mp_ddr_priv *priv,
355 		       struct stm32mp_ddr_config *config)
356 {
357 	int ret;
358 	uint32_t ddr_retdis;
359 	enum ddr_type ddr_type;
360 	bool cid_filtering = is_ddr_cid_filtering_enabled();
361 
362 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
363 		ddr_type = STM32MP_DDR3;
364 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR4) != 0U) {
365 		ddr_type = STM32MP_DDR4;
366 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR4) != 0U) {
367 		ddr_type = STM32MP_LPDDR4;
368 	} else {
369 		ERROR("DDR type not supported\n");
370 		panic();
371 	}
372 
373 	VERBOSE("name = %s\n", config->info.name);
374 	VERBOSE("speed = %u kHz\n", config->info.speed);
375 	VERBOSE("size  = 0x%zx\n", config->info.size);
376 	if (config->self_refresh) {
377 		VERBOSE("sel-refresh exit (zdata = 0x%x)\n", config->zdata);
378 	}
379 
380 	/* Check DDR PHY pads retention */
381 	if (cid_filtering) {
382 		ddr_disable_cid_filtering();
383 	}
384 	ddr_retdis = mmio_read_32(priv->pwr + PWR_CR11) & PWR_CR11_DDRRETDIS;
385 	if (cid_filtering) {
386 		ddr_enable_cid_filtering();
387 	}
388 
389 	if (config->self_refresh) {
390 		if (ddr_retdis == PWR_CR11_DDRRETDIS) {
391 			VERBOSE("self-refresh aborted: no retention\n");
392 			config->self_refresh = false;
393 		}
394 	}
395 
396 	if (config->self_refresh) {
397 		ddr_standby_reset(priv);
398 
399 		VERBOSE("disable DDR PHY retention\n");
400 		if (cid_filtering) {
401 			ddr_disable_cid_filtering();
402 		}
403 		mmio_setbits_32(priv->pwr + PWR_CR11, PWR_CR11_DDRRETDIS);
404 		if (cid_filtering) {
405 			ddr_enable_cid_filtering();
406 		}
407 
408 		udelay(DDR_DELAY_1US);
409 
410 		mmio_clrbits_32(priv->rcc + RCC_DDRCAPBCFGR, RCC_DDRCAPBCFGR_DDRCAPBRST);
411 
412 		udelay(DDR_DELAY_1US);
413 
414 	} else {
415 		if (stm32mp_board_ddr_power_init(ddr_type) != 0) {
416 			ERROR("DDR power init failed\n");
417 			panic();
418 		}
419 
420 		ddr_reset(priv);
421 
422 		ddr_sysconf_configuration(priv, config);
423 	}
424 
425 #if STM32MP_LPDDR4_TYPE
426 	/*
427 	 * Enable PWRCTL.SELFREF_SW to ensure correct setting of PWRCTL.LPDDR4_SR_ALLOWED.
428 	 * Later disabled in restore_refresh().
429 	 */
430 	config->c_reg.pwrctl |= DDRCTRL_PWRCTL_SELFREF_SW;
431 #endif /* STM32MP_LPDDR4_TYPE */
432 
433 	stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers);
434 	stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers);
435 	stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers);
436 	stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers);
437 
438 	if (!config->self_refresh) {
439 		VERBOSE("disable DDR PHY retention\n");
440 		if (cid_filtering) {
441 			ddr_disable_cid_filtering();
442 		}
443 		mmio_setbits_32(priv->pwr + PWR_CR11, PWR_CR11_DDRRETDIS);
444 		if (cid_filtering) {
445 			ddr_enable_cid_filtering();
446 		}
447 
448 		udelay(DDR_DELAY_1US);
449 
450 		/*  DDR core and PHY reset de-assert */
451 		mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
452 
453 		udelay(DDR_DELAY_1US);
454 
455 		disable_refresh(priv->ctl);
456 	}
457 
458 	if (config->self_refresh) {
459 		ddr_standby_reset_release(priv);
460 
461 		/* Initialize DDR by skipping training and disabling result saving */
462 		ret = ddrphy_phyinit_sequence(config, true, false);
463 
464 		if (ret == 0) {
465 			ret = ddrphy_phyinit_restore_sequence();
466 		}
467 
468 		/* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 0 */
469 		ddr_wait_lp3_mode(false);
470 	} else {
471 		/* Initialize DDR including training and result saving */
472 		ret = ddrphy_phyinit_sequence(config, false, true);
473 	}
474 
475 	if (ret != 0) {
476 		ERROR("DDR PHY init: Error %d\n", ret);
477 		panic();
478 	}
479 
480 	ddr_activate_controller(priv->ctl, false);
481 
482 	if (config->self_refresh) {
483 		struct stm32mp_ddrctl *ctl = priv->ctl;
484 
485 		/* SW self refresh exit prequested */
486 		mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_SW);
487 
488 		if (ddr_sr_exit_loop() != 0) {
489 			ERROR("DDR Standby exit error\n");
490 			panic();
491 		}
492 
493 		/* Re-enable DFI low-power interface */
494 		mmio_clrbits_32((uintptr_t)&ctl->dfilpcfg0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
495 	} else {
496 		restore_refresh(priv->ctl, config->c_reg.rfshctl3, config->c_reg.pwrctl);
497 	}
498 
499 	stm32mp_ddr_enable_axi_port(priv->ctl);
500 }
501