xref: /rk3399_ARM-atf/drivers/st/ddr/stm32mp2_ddr_helpers.c (revision 77586339b491783e705a3e6c05ae9304b64f2f64)
1 /*
2  * Copyright (c) 2024-2026, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 
9 #include <arch_helpers.h>
10 #include <common/debug.h>
11 #include <drivers/delay_timer.h>
12 #include <drivers/st/stm32mp2_ddr.h>
13 #include <drivers/st/stm32mp2_ddr_helpers.h>
14 #include <drivers/st/stm32mp2_ddr_regs.h>
15 #include <drivers/st/stm32mp_ddr.h>
16 #include <lib/mmio.h>
17 
18 #include <platform_def.h>
19 
20 /* HW idle period (unit: Multiples of 32 DFI clock cycles) */
21 #define HW_IDLE_PERIOD			0x3U
22 
23 static enum stm32mp2_ddr_sr_mode saved_ddr_sr_mode;
24 static uint32_t saved_sem_mutex;
25 
26 #pragma weak stm32_ddrdbg_get_base
stm32_ddrdbg_get_base(void)27 uintptr_t stm32_ddrdbg_get_base(void)
28 {
29 	return 0U;
30 }
31 
set_qd1_qd3_update_conditions(struct stm32mp_ddrctl * ctl)32 static void set_qd1_qd3_update_conditions(struct stm32mp_ddrctl *ctl)
33 {
34 	mmio_setbits_32((uintptr_t)&ctl->dbg1, DDRCTRL_DBG1_DIS_DQ);
35 
36 	stm32mp_ddr_set_qd3_update_conditions(ctl);
37 }
38 
unset_qd1_qd3_update_conditions(struct stm32mp_ddrctl * ctl)39 static void unset_qd1_qd3_update_conditions(struct stm32mp_ddrctl *ctl)
40 {
41 	stm32mp_ddr_unset_qd3_update_conditions(ctl);
42 
43 	mmio_clrbits_32((uintptr_t)&ctl->dbg1, DDRCTRL_DBG1_DIS_DQ);
44 }
45 
wait_dfi_init_complete(struct stm32mp_ddrctl * ctl)46 static void wait_dfi_init_complete(struct stm32mp_ddrctl *ctl)
47 {
48 	uint64_t timeout;
49 	uint32_t dfistat;
50 
51 	timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
52 	do {
53 		dfistat = mmio_read_32((uintptr_t)&ctl->dfistat);
54 		VERBOSE("[0x%lx] dfistat = 0x%x ", (uintptr_t)&ctl->dfistat, dfistat);
55 
56 		if (timeout_elapsed(timeout)) {
57 			panic();
58 		}
59 	} while ((dfistat & DDRCTRL_DFISTAT_DFI_INIT_COMPLETE) == 0U);
60 
61 	VERBOSE("[0x%lx] dfistat = 0x%x\n", (uintptr_t)&ctl->dfistat, dfistat);
62 }
63 
disable_dfi_low_power_interface(struct stm32mp_ddrctl * ctl)64 static void disable_dfi_low_power_interface(struct stm32mp_ddrctl *ctl)
65 {
66 	uint64_t timeout;
67 	uint32_t dfistat;
68 	uint32_t stat;
69 
70 	mmio_clrbits_32((uintptr_t)&ctl->dfilpcfg0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
71 
72 	timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
73 	do {
74 		dfistat = mmio_read_32((uintptr_t)&ctl->dfistat);
75 		stat = mmio_read_32((uintptr_t)&ctl->stat);
76 		VERBOSE("[0x%lx] dfistat = 0x%x ", (uintptr_t)&ctl->dfistat, dfistat);
77 		VERBOSE("[0x%lx] stat = 0x%x ", (uintptr_t)&ctl->stat, stat);
78 
79 		if (timeout_elapsed(timeout)) {
80 			panic();
81 		}
82 	} while (((dfistat & DDRCTRL_DFISTAT_DFI_LP_ACK) != 0U) ||
83 		 ((stat & DDRCTRL_STAT_OPERATING_MODE_MASK) == DDRCTRL_STAT_OPERATING_MODE_SR));
84 
85 	VERBOSE("[0x%lx] dfistat = 0x%x\n", (uintptr_t)&ctl->dfistat, dfistat);
86 	VERBOSE("[0x%lx] stat = 0x%x\n", (uintptr_t)&ctl->stat, stat);
87 }
88 
ddr_activate_controller(struct stm32mp_ddrctl * ctl,bool sr_entry)89 void ddr_activate_controller(struct stm32mp_ddrctl *ctl, bool sr_entry)
90 {
91 	/*
92 	 * Manage quasi-dynamic registers modification
93 	 * dfimisc.dfi_frequency : Group 1
94 	 * dfimisc.dfi_init_complete_en and dfimisc.dfi_init_start : Group 3
95 	 */
96 	set_qd1_qd3_update_conditions(ctl);
97 
98 	if (sr_entry) {
99 		mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_FREQUENCY);
100 	} else {
101 		mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_FREQUENCY);
102 	}
103 
104 	mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_START);
105 	mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_START);
106 
107 	wait_dfi_init_complete(ctl);
108 
109 	udelay(DDR_DELAY_1US);
110 
111 	if (sr_entry) {
112 		mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
113 	} else {
114 		mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
115 	}
116 
117 	udelay(DDR_DELAY_1US);
118 
119 	unset_qd1_qd3_update_conditions(ctl);
120 }
121 
122 #if STM32MP_LPDDR4_TYPE
disable_phy_ddc(void)123 static void disable_phy_ddc(void)
124 {
125 	/* Enable APB access to internal CSR registers */
126 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL, 0U);
127 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
128 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_UCCLKEN |
129 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
130 
131 	/* Disable DRAM drift compensation */
132 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_INITENG0_P0_SEQ0BDISABLEFLAG6, 0xFFFFU);
133 
134 	/* Disable APB access to internal CSR registers */
135 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
136 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
137 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL,
138 		      DDRPHY_APBONLY0_MICROCONTMUXSEL_MICROCONTMUXSEL);
139 }
140 #endif /* STM32MP_LPDDR4_TYPE */
141 
ddr_wait_lp3_mode(bool sr_entry)142 void ddr_wait_lp3_mode(bool sr_entry)
143 {
144 	uint64_t timeout;
145 	bool repeat_loop = false;
146 
147 	/* Enable APB access to internal CSR registers */
148 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL, 0U);
149 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
150 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_UCCLKEN |
151 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
152 
153 	timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
154 	do {
155 		uint16_t phyinlpx = mmio_read_32(stm32mp_ddrphyc_base() +
156 						 DDRPHY_INITENG0_P0_PHYINLPX);
157 
158 		if (timeout_elapsed(timeout)) {
159 			panic();
160 		}
161 
162 		if (sr_entry) {
163 			repeat_loop = (phyinlpx & DDRPHY_INITENG0_P0_PHYINLPX_PHYINLP3) == 0U;
164 		} else {
165 			repeat_loop = (phyinlpx & DDRPHY_INITENG0_P0_PHYINLPX_PHYINLP3) != 0U;
166 		}
167 	} while (repeat_loop);
168 
169 	/* Disable APB access to internal CSR registers */
170 #if STM32MP_DDR3_TYPE || STM32MP_DDR4_TYPE
171 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES, 0U);
172 #else /* STM32MP_LPDDR4_TYPE */
173 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
174 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
175 #endif /* STM32MP_DDR3_TYPE || STM32MP_DDR4_TYPE */
176 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL,
177 		      DDRPHY_APBONLY0_MICROCONTMUXSEL_MICROCONTMUXSEL);
178 }
179 
sr_loop(bool is_entry)180 static int sr_loop(bool is_entry)
181 {
182 	uint32_t type;
183 	uint32_t state __maybe_unused;
184 	uint64_t timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
185 	bool repeat_loop = false;
186 
187 	/*
188 	 * Wait for DDRCTRL to be out of or back to "normal/mission mode".
189 	 * Consider also SRPD mode for LPDDR4 only.
190 	 */
191 	do {
192 		type = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_STAT) &
193 		       DDRCTRL_STAT_SELFREF_TYPE_MASK;
194 #if STM32MP_LPDDR4_TYPE
195 		state = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_STAT) &
196 		       DDRCTRL_STAT_SELFREF_STATE_MASK;
197 #endif /* STM32MP_LPDDR4_TYPE */
198 
199 		if (timeout_elapsed(timeout)) {
200 			return -ETIMEDOUT;
201 		}
202 
203 		if (is_entry) {
204 #if STM32MP_LPDDR4_TYPE
205 			repeat_loop = (type == 0x0U) || (state != DDRCTRL_STAT_SELFREF_STATE_SRPD);
206 #else /* !STM32MP_LPDDR4_TYPE */
207 			repeat_loop = (type == 0x0U);
208 #endif /* STM32MP_LPDDR4_TYPE */
209 		} else {
210 #if STM32MP_LPDDR4_TYPE
211 			repeat_loop = (type != 0x0U) || (state != 0x0U);
212 #else /* !STM32MP_LPDDR4_TYPE */
213 			repeat_loop = (type != 0x0U);
214 #endif /* STM32MP_LPDDR4_TYPE */
215 		}
216 	} while (repeat_loop);
217 
218 	return 0;
219 }
220 
sr_entry_loop(void)221 static int sr_entry_loop(void)
222 {
223 	return sr_loop(true);
224 }
225 
ddr_sr_exit_loop(void)226 int ddr_sr_exit_loop(void)
227 {
228 	return sr_loop(false);
229 }
230 
is_ddr_cid_filtering_enabled(void)231 bool is_ddr_cid_filtering_enabled(void)
232 {
233 	return (mmio_read_32(stm32mp_rcc_base() + RCC_R104CIDCFGR) & RCC_R104CIDCFGR_CFEN) ==
234 	       RCC_R104CIDCFGR_CFEN;
235 }
236 
ddr_enable_cid_filtering(void)237 void ddr_enable_cid_filtering(void)
238 {
239 	mmio_setbits_32(stm32mp_rcc_base() + RCC_R104CIDCFGR, RCC_R104CIDCFGR_CFEN);
240 	if (saved_sem_mutex != 0U) {
241 		mmio_setbits_32(stm32mp_rcc_base() + RCC_R104SEMCR, RCC_R104SEMCR_SEM_MUTEX);
242 	}
243 }
244 
ddr_disable_cid_filtering(void)245 void ddr_disable_cid_filtering(void)
246 {
247 	/*
248 	 * Save the current mutex state to restore it later,
249 	 * since disabling CID filtering automatically releases the
250 	 * semaphore.
251 	 */
252 	saved_sem_mutex = mmio_read_32(stm32mp_rcc_base() + RCC_R104SEMCR) &
253 			  RCC_R104SEMCR_SEM_MUTEX;
254 	mmio_clrbits_32(stm32mp_rcc_base() + RCC_R104CIDCFGR, RCC_R104CIDCFGR_CFEN);
255 }
256 
sr_ssr_set(void)257 static int sr_ssr_set(void)
258 {
259 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
260 
261 	/*
262 	 * Disable Clock disable with LP modes
263 	 * (used in RUN mode for LPDDR2 with specific timing).
264 	 */
265 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE);
266 
267 	/* Disable automatic Self-Refresh mode */
268 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_EN);
269 
270 	mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE,
271 		      DDRDBG_LP_DISABLE_LPI_XPI_DISABLE | DDRDBG_LP_DISABLE_LPI_DDRC_DISABLE);
272 
273 	return 0;
274 }
275 
sr_ssr_entry(bool standby)276 static int sr_ssr_entry(bool standby)
277 {
278 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
279 	uintptr_t rcc_base = stm32mp_rcc_base();
280 
281 	if (stm32mp_ddr_disable_axi_port((struct stm32mp_ddrctl *)ddrctrl_base) != 0) {
282 		panic();
283 	}
284 
285 #if STM32MP_LPDDR4_TYPE
286 	if (standby) {
287 		/* Disable DRAM drift compensation */
288 		disable_phy_ddc();
289 	}
290 #endif /* STM32MP_LPDDR4_TYPE */
291 
292 	disable_dfi_low_power_interface((struct stm32mp_ddrctl *)ddrctrl_base);
293 
294 	/* SW self refresh entry prequested */
295 	mmio_setbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
296 #if STM32MP_LPDDR4_TYPE
297 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_STAY_IN_SELFREF);
298 #endif /* STM32MP_LPDDR4_TYPE */
299 
300 	if (sr_entry_loop() != 0) {
301 		return -1;
302 	}
303 
304 	ddr_activate_controller((struct stm32mp_ddrctl *)ddrctrl_base, true);
305 
306 	/* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 1 */
307 	ddr_wait_lp3_mode(true);
308 
309 	if (standby) {
310 		bool cid_filtering = is_ddr_cid_filtering_enabled();
311 
312 		if (cid_filtering) {
313 			ddr_disable_cid_filtering();
314 		}
315 		mmio_clrbits_32(stm32mp_pwr_base() + PWR_CR11, PWR_CR11_DDRRETDIS);
316 		if (cid_filtering) {
317 			ddr_enable_cid_filtering();
318 		}
319 
320 		udelay(DDR_DELAY_1US);
321 	}
322 
323 	mmio_clrsetbits_32(rcc_base + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPLPEN,
324 			   RCC_DDRCPCFGR_DDRCPEN);
325 	mmio_setbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
326 	mmio_setbits_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
327 
328 	return 0;
329 }
330 
sr_ssr_exit(void)331 static int sr_ssr_exit(void)
332 {
333 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
334 	uintptr_t rcc_base = stm32mp_rcc_base();
335 
336 	mmio_setbits_32(rcc_base + RCC_DDRCPCFGR,
337 			RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPEN);
338 	mmio_clrbits_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
339 	mmio_setbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
340 
341 	udelay(DDR_DELAY_1US);
342 
343 	ddr_activate_controller((struct stm32mp_ddrctl *)ddrctrl_base, false);
344 
345 	/* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 0 */
346 	ddr_wait_lp3_mode(false);
347 
348 	/* SW self refresh exit prequested */
349 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
350 
351 	if (ddr_sr_exit_loop() != 0) {
352 		return -1;
353 	}
354 
355 	/* Re-enable DFI low-power interface */
356 	mmio_setbits_32(ddrctrl_base + DDRCTRL_DFILPCFG0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
357 
358 	stm32mp_ddr_enable_axi_port((struct stm32mp_ddrctl *)ddrctrl_base);
359 
360 	return 0;
361 }
362 
sr_hsr_set(void)363 static int sr_hsr_set(void)
364 {
365 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
366 
367 	mmio_clrsetbits_32(stm32mp_rcc_base() + RCC_DDRITFCFGR,
368 			   RCC_DDRITFCFGR_DDRCKMOD_MASK, RCC_DDRITFCFGR_DDRCKMOD_HSR);
369 
370 	/*
371 	 * manage quasi-dynamic registers modification
372 	 * hwlpctl.hw_lp_en : Group 2
373 	 */
374 	if (stm32mp_ddr_sw_selfref_entry((struct stm32mp_ddrctl *)ddrctrl_base) != 0) {
375 		panic();
376 	}
377 	stm32mp_ddr_start_sw_done((struct stm32mp_ddrctl *)ddrctrl_base);
378 
379 	mmio_write_32(ddrctrl_base + DDRCTRL_HWLPCTL,
380 		      DDRCTRL_HWLPCTL_HW_LP_EN | DDRCTRL_HWLPCTL_HW_LP_EXIT_IDLE_EN |
381 		      (HW_IDLE_PERIOD << DDRCTRL_HWLPCTL_HW_LP_IDLE_X32_SHIFT));
382 
383 	stm32mp_ddr_wait_sw_done_ack((struct stm32mp_ddrctl *)ddrctrl_base);
384 	stm32mp_ddr_sw_selfref_exit((struct stm32mp_ddrctl *)ddrctrl_base);
385 
386 	return 0;
387 }
388 
sr_hsr_entry(void)389 static int sr_hsr_entry(void)
390 {
391 	mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPLPEN);
392 
393 	return sr_entry_loop(); /* read_data should be equal to 0x223 */
394 }
395 
sr_hsr_exit(void)396 static int sr_hsr_exit(void)
397 {
398 	mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR,
399 		      RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPEN);
400 
401 	return 0;
402 }
403 
sr_asr_set(void)404 static int sr_asr_set(void)
405 {
406 	mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE, 0U);
407 
408 	return 0;
409 }
410 
sr_asr_entry(void)411 static int sr_asr_entry(void)
412 {
413 	/*
414 	 * Automatically enter into self refresh when there is no ddr traffic
415 	 * for the delay programmed into SYSCONF_DDRC_AUTO_SR_DELAY register.
416 	 * Default value is 0x20 (unit: Multiples of 32 DFI clock cycles).
417 	 */
418 	return sr_entry_loop();
419 }
420 
sr_asr_exit(void)421 static int sr_asr_exit(void)
422 {
423 	return ddr_sr_exit_loop();
424 }
425 
ddr_sr_entry(bool standby)426 int ddr_sr_entry(bool standby)
427 {
428 	int ret = -EINVAL;
429 
430 	switch (saved_ddr_sr_mode) {
431 	case DDR_SSR_MODE:
432 		ret = sr_ssr_entry(standby);
433 		break;
434 	case DDR_HSR_MODE:
435 		ret = sr_hsr_entry();
436 		break;
437 	case DDR_ASR_MODE:
438 		ret = sr_asr_entry();
439 		break;
440 	default:
441 		break;
442 	}
443 
444 	return ret;
445 }
446 
ddr_sr_exit(void)447 int ddr_sr_exit(void)
448 {
449 	int ret = -EINVAL;
450 
451 	switch (saved_ddr_sr_mode) {
452 	case DDR_SSR_MODE:
453 		ret = sr_ssr_exit();
454 		break;
455 	case DDR_HSR_MODE:
456 		ret = sr_hsr_exit();
457 		break;
458 	case DDR_ASR_MODE:
459 		ret = sr_asr_exit();
460 		break;
461 	default:
462 		break;
463 	}
464 
465 	return ret;
466 }
467 
ddr_read_sr_mode(void)468 enum stm32mp2_ddr_sr_mode ddr_read_sr_mode(void)
469 {
470 	uint32_t pwrctl = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_PWRCTL);
471 	enum stm32mp2_ddr_sr_mode mode = DDR_SR_MODE_INVALID;
472 
473 	switch (pwrctl & (DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE |
474 			  DDRCTRL_PWRCTL_SELFREF_EN)) {
475 	case 0U:
476 		mode = DDR_SSR_MODE;
477 		break;
478 	case DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE:
479 		mode = DDR_HSR_MODE;
480 		break;
481 	case DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE | DDRCTRL_PWRCTL_SELFREF_EN:
482 		mode = DDR_ASR_MODE;
483 		break;
484 	default:
485 		break;
486 	}
487 
488 	return mode;
489 }
490 
ddr_set_sr_mode(enum stm32mp2_ddr_sr_mode mode)491 void ddr_set_sr_mode(enum stm32mp2_ddr_sr_mode mode)
492 {
493 	int ret = -EINVAL;
494 
495 	if (mode == saved_ddr_sr_mode) {
496 		return;
497 	}
498 
499 	switch (mode) {
500 	case DDR_SSR_MODE:
501 		ret = sr_ssr_set();
502 		break;
503 	case DDR_HSR_MODE:
504 		ret = sr_hsr_set();
505 		break;
506 	case DDR_ASR_MODE:
507 		ret = sr_asr_set();
508 		break;
509 	default:
510 		break;
511 	}
512 
513 	if (ret != 0) {
514 		ERROR("Unknown Self Refresh mode\n");
515 		panic();
516 	}
517 
518 	saved_ddr_sr_mode = mode;
519 }
520 
ddr_save_sr_mode(void)521 void ddr_save_sr_mode(void)
522 {
523 	saved_ddr_sr_mode = ddr_read_sr_mode();
524 }
525 
ddr_restore_sr_mode(void)526 void ddr_restore_sr_mode(void)
527 {
528 	ddr_set_sr_mode(saved_ddr_sr_mode);
529 }
530 
ddr_sub_system_clk_init(void)531 void ddr_sub_system_clk_init(void)
532 {
533 	mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR,
534 		      RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN);
535 }
536 
ddr_sub_system_clk_off(void)537 void ddr_sub_system_clk_off(void)
538 {
539 	uintptr_t rcc_base = stm32mp_rcc_base();
540 	bool cid_filtering = is_ddr_cid_filtering_enabled();
541 
542 	/* Clear DDR IO retention */
543 	if (cid_filtering) {
544 		ddr_disable_cid_filtering();
545 	}
546 	mmio_clrbits_32(stm32mp_pwr_base() + PWR_CR11, PWR_CR11_DDRRETDIS);
547 	if (cid_filtering) {
548 		ddr_enable_cid_filtering();
549 	}
550 
551 	udelay(DDR_DELAY_1US);
552 
553 	/* Reset DDR sub system */
554 	mmio_write_32(rcc_base + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPRST);
555 	mmio_write_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
556 	mmio_write_32(rcc_base + RCC_DDRPHYCAPBCFGR, RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
557 	mmio_write_32(rcc_base + RCC_DDRCAPBCFGR, RCC_DDRCAPBCFGR_DDRCAPBRST);
558 
559 	/* Deactivate clocks and PLL2 */
560 	mmio_clrbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
561 	mmio_clrbits_32(rcc_base + RCC_PLL2CFGR1, RCC_PLL2CFGR1_PLLEN);
562 }
563