xref: /rk3399_ARM-atf/plat/imx/imx8ulp/dram.c (revision caee2733ba4e7a09ea656b0be85f150a275cc57c)
1 /*
2  * Copyright 2021-2024 NXP
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <assert.h>
8 #include <stdbool.h>
9 
10 #include <arch_helpers.h>
11 #include <bl31/interrupt_mgmt.h>
12 #include <common/runtime_svc.h>
13 #include <lib/mmio.h>
14 #include <lib/spinlock.h>
15 #include <plat/common/platform.h>
16 
17 #include <platform_def.h>
18 
19 #include <dram.h>
20 #include <upower_api.h>
21 
22 #define PHY_FREQ_SEL_INDEX(x)		((x) << 16)
23 #define PHY_FREQ_MULTICAST_EN(x)	((x) << 8)
24 #define DENALI_PHY_1537			U(0x5804)
25 
26 #define IMX_DDRC_BASE			U(0x2E060000)
27 #define SAVED_DRAM_DATA_BASE		U(0x20055000)
28 #define DENALI_CTL_144			0x240
29 #define LPI_WAKEUP_EN_SHIFT		U(8)
30 #define IMX_LPAV_SIM_BASE		0x2DA50000
31 #define LPDDR_CTRL			0x14
32 #define LPDDR_AUTO_LP_MODE_DISABLE	BIT(24)
33 #define SOC_LP_CMD_SHIFT		U(15)
34 #define LPDDR_CTRL2			0x18
35 
36 #define DENALI_CTL_00			U(0x0)
37 #define DENALI_CTL_23			U(0x5c)
38 #define DFIBUS_FREQ_INIT_SHIFT		U(24)
39 #define TSREF2PHYMSTR_SHIFT		U(8)
40 #define TSREF2PHYMSTR_MASK		GENMASK(13, 8)
41 
42 #define DENALI_CTL_24			U(0x60)
43 #define DENALI_CTL_25			U(0x64)
44 
45 #define DENALI_CTL_93			U(0x174)
46 #define PWRUP_SREFRESH_EXIT		BIT(0)
47 
48 #define DENALI_CTL_127				U(0x1fc)
49 #define PHYMSTR_TRAIN_AFTER_INIT_COMPLETE	BIT(16)
50 
51 #define DENALI_CTL_147			U(0x24c)
52 #define DENALI_CTL_153			U(0x264)
53 #define PCPCS_PD_EN			BIT(8)
54 
55 #define DENALI_CTL_249			U(0x3E4)
56 #define DENALI_CTL_266			U(0x428)
57 
58 #define DENALI_PHY_1547			U(0x582c)
59 #define PHY_LP4_BOOT_DISABLE		BIT(8)
60 
61 #define DENALI_PHY_1559			U(0x585c)
62 #define DENALI_PHY_1590			U(0x58D8)
63 
64 #define DENALI_PI_00			U(0x2000)
65 #define DENALI_PI_04			U(0x2010)
66 #define DENALI_PI_52			U(0x20D0)
67 #define DENALI_PI_26			U(0x2068)
68 #define DENALI_PI_33			U(0x2084)
69 #define DENALI_PI_65			U(0x2104)
70 #define DENALI_PI_77			U(0x2134)
71 #define DENALI_PI_134			U(0x2218)
72 #define DENALI_PI_131			U(0x220C)
73 #define DENALI_PI_132			U(0x2210)
74 #define DENALI_PI_134			U(0x2218)
75 #define DENALI_PI_137			U(0x2224)
76 #define DENALI_PI_174			U(0x22B8)
77 #define DENALI_PI_175			U(0x22BC)
78 #define DENALI_PI_181			U(0x22D4)
79 #define DENALI_PI_182			U(0x22D8)
80 #define DENALI_PI_191			U(0x22FC)
81 #define DENALI_PI_192			U(0x2300)
82 #define DENALI_PI_212			U(0x2350)
83 #define DENALI_PI_214			U(0x2358)
84 #define DENALI_PI_217			U(0x2364)
85 
86 #define LPDDR3_TYPE	U(0x7)
87 #define LPDDR4_TYPE	U(0xB)
88 
89 extern void upower_wait_resp(void);
90 
91 struct dram_cfg_param {
92 	uint32_t reg;
93 	uint32_t val;
94 };
95 
96 struct dram_timing_info {
97 	/* ddr controller config */
98 	struct dram_cfg_param *ctl_cfg;
99 	unsigned int ctl_cfg_num;
100 	/* pi config */
101 	struct dram_cfg_param *pi_cfg;
102 	unsigned int pi_cfg_num;
103 	/* phy freq1 config */
104 	struct dram_cfg_param *phy_f1_cfg;
105 	unsigned int phy_f1_cfg_num;
106 	/* phy freq2 config */
107 	struct dram_cfg_param *phy_f2_cfg;
108 	unsigned int phy_f2_cfg_num;
109 	/* initialized drate table */
110 	unsigned int fsp_table[3];
111 };
112 
113 #define CTL_NUM		U(680)
114 #define PI_NUM		U(298)
115 #define PHY_NUM		U(1654)
116 #define PHY_DIFF_NUM	U(49)
117 struct dram_cfg {
118 	uint32_t ctl_cfg[CTL_NUM];
119 	uint32_t pi_cfg[PI_NUM];
120 	uint32_t phy_full[PHY_NUM];
121 	uint32_t phy_diff[PHY_DIFF_NUM];
122 };
123 
124 struct dram_timing_info *info;
125 struct dram_cfg *dram_timing_cfg;
126 
127 /* mark if dram cfg is already saved */
128 static bool dram_cfg_saved;
129 static uint32_t dram_class;
130 
131 /* PHY register index for frequency diff */
132 uint32_t freq_specific_reg_array[PHY_DIFF_NUM] = {
133 90, 92, 93, 96, 97, 100, 101, 102, 103, 104, 114,
134 346, 348, 349, 352, 353, 356, 357, 358, 359, 360,
135 370, 602, 604, 605, 608, 609, 612, 613, 614, 615,
136 616, 626, 858, 860, 861, 864, 865, 868, 869, 870,
137 871, 872, 882, 1063, 1319, 1566, 1624, 1625
138 };
139 
140 /* lock used for DDR DVFS */
141 spinlock_t dfs_lock;
142 static volatile uint32_t core_count;
143 static volatile bool in_progress;
144 static int num_fsp;
145 
146 static void ddr_init(void)
147 {
148 	unsigned int i;
149 
150 	/* restore the ddr ctl config */
151 	for (i = 0U; i < CTL_NUM; i++) {
152 		mmio_write_32(IMX_DDRC_BASE + i * 4, dram_timing_cfg->ctl_cfg[i]);
153 	}
154 
155 	/* load the PI registers */
156 	for (i = 0U; i < PI_NUM; i++) {
157 		mmio_write_32(IMX_DDRC_BASE + 0x2000 + i * 4, dram_timing_cfg->pi_cfg[i]);
158 	}
159 
160 
161 	 /* restore all PHY registers for all the fsp. */
162 	mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x100);
163 	/* restore all the phy configs */
164 	for (i = 0U; i < PHY_NUM; i++) {
165 		/* skip the reserved registers space */
166 		if (i >= 121U && i <= 255U) {
167 			continue;
168 		}
169 		if (i >= 377U && i <= 511U) {
170 			continue;
171 		}
172 		if (i >= 633U && i <= 767U) {
173 			continue;
174 		}
175 		if (i >= 889U && i <= 1023U) {
176 			continue;
177 		}
178 		if (i >= 1065U && i <= 1279U) {
179 			continue;
180 		}
181 		if (i >= 1321U && i <= 1535U) {
182 			continue;
183 		}
184 		mmio_write_32(IMX_DDRC_BASE + 0x4000 + i * 4, dram_timing_cfg->phy_full[i]);
185 	}
186 
187 	if (dram_class == LPDDR4_TYPE) {
188 		/* restore only the diff. */
189 		mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x0);
190 		for (i = 0U; i < PHY_DIFF_NUM; i++) {
191 			mmio_write_32(IMX_DDRC_BASE + 0x4000 + freq_specific_reg_array[i] * 4,
192 				      dram_timing_cfg->phy_diff[i]);
193 		}
194 	}
195 
196 	/* Re-enable MULTICAST mode */
197 	mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, PHY_FREQ_MULTICAST_EN(1));
198 }
199 
200 void dram_enter_retention(void)
201 {
202 	unsigned int i;
203 
204 	/* 1. config the PCC_LPDDR4[SSADO] to 2b'11 for ACK domain 0/1's STOP */
205 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, 0x2 << 22);
206 
207 	/*
208 	 * 2. Make sure the DENALI_CTL_144[LPI_WAKEUP_EN[5:0]] has the bit
209 	 * LPI_WAKEUP_EN[3] = 1b'1. This enables the option 'self-refresh
210 	 * long with mem and ctlr clk gating or self-refresh  power-down
211 	 * long with mem and ctlr clk gating'
212 	 */
213 	mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, BIT(3) << LPI_WAKEUP_EN_SHIFT);
214 
215 	/*
216 	 * 3a. Config SIM_LPAV LPDDR_CTRL[LPDDR_AUTO_LP_MODE_DISABLE] to 1b'0(enable
217 	 * the logic to automatic handles low power entry/exit. This is the recommended
218 	 * option over handling through software.
219 	 * 3b. Config the SIM_LPAV LPDDR_CTRL[SOC_LP_CMD] to 6b'101001(encoding for
220 	 * self_refresh with both DDR controller and DRAM clock gate. THis is mandatory
221 	 * since LPPDR logic will be power gated).
222 	 */
223 	mmio_clrbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL, LPDDR_AUTO_LP_MODE_DISABLE);
224 	mmio_clrsetbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL,
225 			   0x3f << SOC_LP_CMD_SHIFT, 0x29 << SOC_LP_CMD_SHIFT);
226 
227 	/* Save DDR Controller & PHY config.
228 	 * Set PHY_FREQ_SEL_MULTICAST_EN=0 & PHY_FREQ_SEL_INDEX=1. Read and store all
229 	 * the PHY registers for F2 into phy_f1_cfg, then read/store the diff between
230 	 * F1 & F2 into phy_f2_cfg.
231 	 */
232 	if (!dram_cfg_saved) {
233 		info = (struct dram_timing_info *)SAVED_DRAM_DATA_BASE;
234 		dram_timing_cfg = (struct dram_cfg *)(SAVED_DRAM_DATA_BASE +
235 					sizeof(struct dram_timing_info));
236 
237 		/* get the dram type */
238 		dram_class = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_00);
239 		dram_class = (dram_class >> 8) & 0xf;
240 
241 		/* save the ctl registers */
242 		for (i = 0U; i < CTL_NUM; i++) {
243 			dram_timing_cfg->ctl_cfg[i] = mmio_read_32(IMX_DDRC_BASE + i * 4);
244 		}
245 		dram_timing_cfg->ctl_cfg[0] = dram_timing_cfg->ctl_cfg[0] & 0xFFFFFFFE;
246 
247 		/* save the PI registers */
248 		for (i = 0U; i < PI_NUM; i++) {
249 			dram_timing_cfg->pi_cfg[i] = mmio_read_32(IMX_DDRC_BASE + 0x2000 + i * 4);
250 		}
251 		dram_timing_cfg->pi_cfg[0] = dram_timing_cfg->pi_cfg[0] & 0xFFFFFFFE;
252 
253 		/*
254 		 * Read and store all PHY registers. full array is a full
255 		 * copy for all the setpoint
256 		 */
257 		if (dram_class == LPDDR4_TYPE) {
258 			mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x10000);
259 			for (i = 0U; i < PHY_NUM; i++) {
260 				/* Make sure MULTICASE is enabled */
261 				if (i == 1537U) {
262 					dram_timing_cfg->phy_full[i] = 0x100;
263 				} else {
264 					dram_timing_cfg->phy_full[i] = mmio_read_32(IMX_DDRC_BASE + 0x4000 + i * 4);
265 				}
266 			}
267 
268 			/*
269 			 * set PHY_FREQ_SEL_MULTICAST_EN=0 & PHY_FREQ_SEL_INDEX=0.
270 			 * Read and store only the diff.
271 			 */
272 			mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x0);
273 			/* save only the frequency based diff config to save memory */
274 			for (i = 0U; i < PHY_DIFF_NUM; i++) {
275 				dram_timing_cfg->phy_diff[i] = mmio_read_32(IMX_DDRC_BASE + 0x4000 +
276 									    freq_specific_reg_array[i] * 4);
277 			}
278 		} else {
279 			/* LPDDR3, only f1 need to save */
280 			for (i = 0U; i < info->phy_f1_cfg_num; i++) {
281 				info->phy_f1_cfg[i].val = mmio_read_32(info->phy_f1_cfg[i].reg);
282 			}
283 		}
284 
285 		dram_cfg_saved = true;
286 	}
287 }
288 
289 void dram_exit_retention(void)
290 {
291 	uint32_t val;
292 
293 	/* 1. Config the LPAV PLL4 and DDR clock for the desired LPDDR operating frequency. */
294 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(30));
295 
296 	/* 2. Write PCC5.PCC_LPDDR4[SWRST] to 1b'1 to release LPDDR from reset. */
297 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(28));
298 
299 	/* 3. Reload the LPDDR CTL/PI/PHY register */
300 	ddr_init();
301 
302 	if (dram_class == LPDDR4_TYPE) {
303 		/* 4a. FIXME Set PHY_SET_DFI_INPUT_N parameters to 4'h1. LPDDR4 only */
304 		mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1559, 0x01010101);
305 
306 		/*
307 		 * 4b. CTL PWRUP_SREFRESH_EXIT=1'b0 for disabling self refresh exit
308 		 * from controller.
309 		 */
310 		/*
311 		 * 4c. PI_PWRUP_SELF_REF_EXIT=1, PI_MC_PWRUP_SELF_REF_EXIT=0 for enabling
312 		 * self refresh exit from PI
313 		 */
314 		/* 4c. PI_INT_LVL_EN=0 to skip Initialization trainings. */
315 		/*
316 		 * 4d. PI_WRLVL_EN_F0/1/2= PI_CALVL_EN_F0/1/2= PI_RDLVL_EN_F0/1/2=
317 		 * PI_RDLVL_GATE_EN_F0/1/2= PI_WDQLVL_EN_F0/1/2=0x2.
318 		 * Enable non initialization trainings.
319 		 */
320 		/* 4e. PI_PWRUP_SREFRESH_EXIT_CS=0xF */
321 		/* 4f. PI_DLL_RESET=0x1 */
322 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_137, 0x1);
323 		/* PI_PWRUP_SELF_REF_EXIT = 1 */
324 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_132, 0x01000000);
325 		/* PI_MC_PWRUP_SELF_REF_EXIT = 0 */
326 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_132, BIT(16));
327 		/* PI_INT_LVL_EN = 0 */
328 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_04, BIT(0));
329 		/* PI_WRLVL_EN_F0 = 3, PI_WRLVL_EN_F1 = 3 */
330 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_174, 0x03030000);
331 		/* PI_WRLVL_EN_F2 = 3 */
332 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_175, 0x03);
333 		/* PI_CALVL_EN_F0 = 3, PI_CALVL_EN_F1 = 3 */
334 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_191, 0x03030000);
335 		/* PI_CALVL_EN_F2 = 3 */
336 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_192, 0x03);
337 		/* PI_WDQLVL_EN_F0 = 3 */
338 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_212, 0x300);
339 		/* PI_WDQLVL_EN_F1 = 3 */
340 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_214, 0x03000000);
341 		/* PI_WDQLVL_EN_F2 = 3 */
342 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_217, 0x300);
343 		/* PI_EDLVL_EN_F0 = 3, PI_EDLVL_GATE_EN_F0 = 3 */
344 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_181, 0x03030000);
345 		/*
346 		 * PI_RDLVL_EN_F1 = 3, PI_RDLVL_GATE_EN_F1 = 3,
347 		 * PI_RDLVL_EN_F2 = 3, PI_RDLVL_GATE_EN_F2 = 3
348 		 */
349 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_182, 0x03030303);
350 		/* PI_PWRUP_SREFRESH_EXIT_CS = 0xF */
351 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_134, 0x000F0000);
352 	} else {
353 		/* PI_DLL_RESET=1 */
354 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_137, 0x1);
355 		/* PI_PWRUP_SELF_REF_EXIT=1 */
356 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_132, 0x01000000);
357 		/* PI_MC_PWRUP_SELF_REF_EXIT=0 */
358 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_132, BIT(16));
359 		/* PI_INT_LVL_EN=0 */
360 		mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_04, BIT(0));
361 		/* PI_WRLVL_EN_F0=3 */
362 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_174, 0x00030000);
363 		/* PI_CALVL_EN_F0=3 */
364 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_191, 0x00030000);
365 		/* PI_RDLVL_EN_F0=3,PI_RDLVL_GATE_EN_F0=3 */
366 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_181, 0x03030000);
367 		/* PI_PWRUP_SREFRESH_EXIT_CS=0xF */
368 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_134, 0x000F0000);
369 	}
370 
371 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, 0x00002D00);
372 
373 	/* Force in-order AXI read data */
374 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, 0x1);
375 
376 	/*
377 	 * Disable special R/W group switches so that R/W group placement
378 	 * is always at END of R/W group.
379 	 */
380 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_249, 0x0);
381 
382 	/* Reduce time for IO pad calibration */
383 	mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1590, 0x01000000);
384 
385 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_25, 0x00020100);
386 
387 	/* PD disable */
388 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_153, 0x04040000);
389 	/*
390 	 * 5. Disable automatic LP entry and PCPCS modes LP_AUTO_ENTRY_EN
391 	 * to 1b'0, PCPCS_PD_EN to 1b'0
392 	 */
393 
394 	upwr_xcp_set_ddr_retention(APD_DOMAIN, 0, NULL);
395 	upower_wait_resp();
396 
397 	if (dram_class == LPDDR4_TYPE) {
398 		/* 7. Write PI START parameter to 1'b1 */
399 		mmio_write_32(IMX_DDRC_BASE + DENALI_PI_00, 0x00000b01);
400 
401 		/* 8. Write CTL START parameter to 1'b1 */
402 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_00, 0x00000b01);
403 	} else {
404 		/* 7. Write PI START parameter to 1'b1 */
405 		mmio_write_32(IMX_DDRC_BASE + DENALI_PI_00, 0x00000701);
406 
407 		/* 8. Write CTL START parameter to 1'b1 */
408 		mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_00, 0x00000701);
409 	}
410 
411 	/* 9. DENALI_CTL_266:  Wait for INT_STATUS_INIT=0x2 */
412 	do {
413 		val = (mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_266) >> 8) & 0xFF;
414 	} while (val != 0x2);
415 
416 	/*
417 	 * 10. Run SW trainings by setting PI_CALVL_REQ,PI_WRLVL_REQ,PI_RDLVL_GATE_REQ,
418 	 * PI_RDLVL_REQ,PI_WDQLVL_REQ(NA for LPDDR3) in same order.
419 	 */
420 	if (dram_class == LPDDR4_TYPE) {
421 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_52, 0x10000); /* CALVL */
422 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_26, 0x100); /* WRLVL */
423 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x10000); /* RDGATE */
424 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x100); /* RDQLVL */
425 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_65, 0x10000); /* WDQLVL */
426 
427 		/* 11. Wait for trainings to get complete by polling PI_INT_STATUS */
428 		while ((mmio_read_32(IMX_DDRC_BASE + DENALI_PI_77) & 0x07E00000) != 0x07E00000) {
429 			;
430 		}
431 	} else {
432 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_52, 0x10000); /* CALVL */
433 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_26, 0x100); /* WRLVL */
434 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x10000); /* RDGATE */
435 		mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x100); /* RDQLVL */
436 		while ((mmio_read_32(IMX_DDRC_BASE + DENALI_PI_77) & 0x05E00000) != 0x05E00000) {
437 			;
438 		}
439 	}
440 }
441 
442 #define LPDDR_DONE       (0x1<<4)
443 #define SOC_FREQ_CHG_ACK (0x1<<6)
444 #define SOC_FREQ_CHG_REQ (0x1<<7)
445 #define LPI_WAKEUP_EN    (0x4<<8)
446 #define SOC_FREQ_REQ     (0x1<<11)
447 
448 #define LPDDR_EN_CLKGATE (0x1<<17)
449 
450 static void set_cgc2_ddrclk(uint8_t src, uint8_t div)
451 {
452 
453 	/* Wait until the reg is unlocked for writing */
454 	while (mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(31))
455 		;
456 
457 	mmio_write_32(IMX_CGC2_BASE + 0x40, (src << 28) | (div << 21));
458 	/* Wait for the clock switching done */
459 	while (!(mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(27)))
460 		;
461 }
462 static void set_ddr_clk(uint32_t ddr_freq)
463 {
464 	/* Disable DDR clock */
465 	mmio_clrbits_32(IMX_PCC5_BASE + 0x108, BIT(30));
466 	switch (ddr_freq) {
467 	/* boot frequency ? */
468 	case 48:
469 		set_cgc2_ddrclk(2, 0);
470 		break;
471 	/* default bypass frequency for fsp 1 */
472 	case 192:
473 		set_cgc2_ddrclk(0, 1);
474 		break;
475 	case 384:
476 		set_cgc2_ddrclk(0, 0);
477 		break;
478 	case 264:
479 		set_cgc2_ddrclk(4, 3);
480 		break;
481 	case 528:
482 		set_cgc2_ddrclk(4, 1);
483 		break;
484 	default:
485 		break;
486 	}
487 	/* Enable DDR clock */
488 	mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(30));
489 
490 	/* Wait until the reg is unlocked for writing */
491 	while (mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(31)) {
492 		;
493 	}
494 }
495 
496 #define AVD_SIM_LPDDR_CTRL	(IMX_LPAV_SIM_BASE + 0x14)
497 #define AVD_SIM_LPDDR_CTRL2	(IMX_LPAV_SIM_BASE + 0x18)
498 #define MAX_FSP_NUM	U(3)
499 #define DDR_DFS_GET_FSP_COUNT	0x10
500 #define DDR_BYPASS_DRATE	U(400)
501 
502 /* Normally, we only switch frequency between 1(bypass) and 2(highest) */
503 int lpddr4_dfs(uint32_t freq_index)
504 {
505 	uint32_t lpddr_ctrl, lpddr_ctrl2;
506 	uint32_t ddr_ctl_144;
507 
508 	/*
509 	 * Valid index: 0 to 2
510 	 * index 0: boot frequency
511 	 * index 1: bypass frequency
512 	 * index 2: highest frequency
513 	 */
514 	if (freq_index > 2U) {
515 		return -1;
516 	}
517 
518 	/* Enable LPI_WAKEUP_EN */
519 	ddr_ctl_144 = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_144);
520 	mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, LPI_WAKEUP_EN);
521 
522 	/* put DRAM into long self-refresh & clock gating */
523 	lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL);
524 	lpddr_ctrl = (lpddr_ctrl & ~((0x3f << 15) | (0x3 << 9))) | (0x28 << 15) | (freq_index << 9);
525 	mmio_write_32(AVD_SIM_LPDDR_CTRL, lpddr_ctrl);
526 
527 	/* Gating the clock */
528 	lpddr_ctrl2 = mmio_read_32(AVD_SIM_LPDDR_CTRL2);
529 	mmio_setbits_32(AVD_SIM_LPDDR_CTRL2, LPDDR_EN_CLKGATE);
530 
531 	/* Request frequency change */
532 	mmio_setbits_32(AVD_SIM_LPDDR_CTRL, SOC_FREQ_REQ);
533 
534 	do {
535 		lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL);
536 		if (lpddr_ctrl & SOC_FREQ_CHG_REQ) {
537 			/* Bypass mode */
538 			if (info->fsp_table[freq_index] < DDR_BYPASS_DRATE) {
539 				/* Change to PLL bypass mode */
540 				mmio_write_32(IMX_LPAV_SIM_BASE, 0x1);
541 				/* change the ddr clock source & frequency */
542 				set_ddr_clk(info->fsp_table[freq_index]);
543 			} else {
544 				/* Change to PLL unbypass mode */
545 				mmio_write_32(IMX_LPAV_SIM_BASE, 0x0);
546 				/* change the ddr clock source & frequency */
547 				set_ddr_clk(info->fsp_table[freq_index] >> 1);
548 			}
549 
550 			mmio_clrsetbits_32(AVD_SIM_LPDDR_CTRL, SOC_FREQ_CHG_REQ, SOC_FREQ_CHG_ACK);
551 			continue;
552 		}
553 	} while ((lpddr_ctrl & LPDDR_DONE) != 0); /* several try? */
554 
555 	/* restore the original setting */
556 	mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, ddr_ctl_144);
557 	mmio_write_32(AVD_SIM_LPDDR_CTRL2, lpddr_ctrl2);
558 
559 	/* Check the DFS result */
560 	lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL) & 0xF;
561 	if (lpddr_ctrl != 0U) {
562 		/* Must be something wrong, return failure */
563 		return -1;
564 	}
565 
566 	/* DFS done successfully */
567 	return 0;
568 }
569 
570 /* for the non-primary core, waiting for DFS done */
571 static uint64_t waiting_dvfs(uint32_t id, uint32_t flags,
572 		void *handle, void *cookie)
573 {
574 	uint32_t irq;
575 
576 	irq = plat_ic_acknowledge_interrupt();
577 	if (irq < 1022U) {
578 		plat_ic_end_of_interrupt(irq);
579 	}
580 
581 	/* set the WFE done status */
582 	spin_lock(&dfs_lock);
583 	core_count++;
584 	dsb();
585 	spin_unlock(&dfs_lock);
586 
587 	while (in_progress) {
588 		wfe();
589 	}
590 
591 	return 0;
592 }
593 
594 int dram_dvfs_handler(uint32_t smc_fid, void *handle,
595 		u_register_t x1, u_register_t x2, u_register_t x3)
596 {
597 	unsigned int fsp_index = x1;
598 	uint32_t online_cpus = x2 - 1;
599 	uint64_t mpidr = read_mpidr_el1();
600 	unsigned int cpu_id = MPIDR_AFFLVL0_VAL(mpidr);
601 
602 	/* Get the number of FSPs */
603 	if (x1 == DDR_DFS_GET_FSP_COUNT) {
604 		SMC_RET2(handle, num_fsp, info->fsp_table[1]);
605 	}
606 
607 	/* start lpddr frequency scaling */
608 	in_progress = true;
609 	dsb();
610 
611 	/* notify other core wait for scaling done */
612 	for (unsigned int i = 0; i < PLATFORM_CORE_COUNT; i++)
613 		/* Skip raise SGI for current CPU */
614 		if (i != cpu_id) {
615 			plat_ic_raise_el3_sgi(0x8, i);
616 		}
617 
618 	/* Make sure all the cpu in WFE */
619 	while (online_cpus != core_count) {
620 		;
621 	}
622 
623 	/* Flush the L1/L2 cache */
624 	dcsw_op_all(DCCSW);
625 
626 	lpddr4_dfs(fsp_index);
627 
628 	in_progress = false;
629 	core_count = 0;
630 	dsb();
631 	sev();
632 	isb();
633 
634 	SMC_RET1(handle, 0);
635 }
636 
637 void dram_init(void)
638 {
639 	uint32_t flags = 0;
640 	uint32_t rc;
641 	unsigned int i;
642 
643 	/* Register the EL3 handler for DDR DVFS */
644 	set_interrupt_rm_flag(flags, NON_SECURE);
645 	rc = register_interrupt_type_handler(INTR_TYPE_EL3, waiting_dvfs, flags);
646 	if (rc) {
647 		panic();
648 	}
649 
650 	info = (struct dram_timing_info *)SAVED_DRAM_DATA_BASE;
651 
652 	/* Get the num of the supported Fsp */
653 	for (i = 0; i < MAX_FSP_NUM; i++) {
654 		if (!info->fsp_table[i]) {
655 			break;
656 		}
657 	}
658 
659 	num_fsp = (i > MAX_FSP_NUM) ? MAX_FSP_NUM : i;
660 }
661