xref: /rk3399_ARM-atf/plat/rockchip/rk3399/drivers/dram/suspend.c (revision 61f72a34250d063da67f4fc2b0eb8c3fda3376be)
1 /*
2  * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <arch_helpers.h>
8 #include <debug.h>
9 #include <dram.h>
10 #include <plat_private.h>
11 #include <platform_def.h>
12 #include <pmu.h>
13 #include <pmu_bits.h>
14 #include <pmu_regs.h>
15 #include <rk3399_def.h>
16 #include <secure.h>
17 #include <soc.h>
18 #include <suspend.h>
19 
20 #define PMUGRF_OS_REG0			0x300
21 #define PMUGRF_OS_REG1			0x304
22 #define PMUGRF_OS_REG2			0x308
23 #define PMUGRF_OS_REG3			0x30c
24 
25 #define CRU_SFTRST_DDR_CTRL(ch, n)	((0x1 << (8 + 16 + (ch) * 4)) | \
26 					 ((n) << (8 + (ch) * 4)))
27 #define CRU_SFTRST_DDR_PHY(ch, n)	((0x1 << (9 + 16 + (ch) * 4)) | \
28 					 ((n) << (9 + (ch) * 4)))
29 
30 #define FBDIV_ENC(n)			((n) << 16)
31 #define FBDIV_DEC(n)			(((n) >> 16) & 0xfff)
32 #define POSTDIV2_ENC(n)			((n) << 12)
33 #define POSTDIV2_DEC(n)			(((n) >> 12) & 0x7)
34 #define POSTDIV1_ENC(n)			((n) << 8)
35 #define POSTDIV1_DEC(n)			(((n) >> 8) & 0x7)
36 #define REFDIV_ENC(n)			(n)
37 #define REFDIV_DEC(n)			((n) & 0x3f)
38 
39 /* PMU CRU */
40 #define PMUCRU_RSTNHOLD_CON0		0x120
41 #define PMUCRU_RSTNHOLD_CON1		0x124
42 
43 #define PRESET_GPIO0_HOLD(n)		(((n) << 7) | WMSK_BIT(7))
44 #define PRESET_GPIO1_HOLD(n)		(((n) << 8) | WMSK_BIT(8))
45 
46 #define SYS_COUNTER_FREQ_IN_MHZ		(SYS_COUNTER_FREQ_IN_TICKS / 1000000)
47 
48 __pmusramdata uint32_t dpll_data[PLL_CON_COUNT];
49 __pmusramdata uint32_t cru_clksel_con6;
50 
51 /*
52  * Copy @num registers from @src to @dst
53  */
54 static __pmusramfunc void sram_regcpy(uintptr_t dst, uintptr_t src,
55 		uint32_t num)
56 {
57 	while (num--) {
58 		mmio_write_32(dst, mmio_read_32(src));
59 		dst += sizeof(uint32_t);
60 		src += sizeof(uint32_t);
61 	}
62 }
63 
64 /*
65  * Copy @num registers from @src to @dst
66  * This is intentionally a copy of the sram_regcpy function. PMUSRAM functions
67  * cannot be called from code running in DRAM.
68  */
69 static void dram_regcpy(uintptr_t dst, uintptr_t src, uint32_t num)
70 {
71 	while (num--) {
72 		mmio_write_32(dst, mmio_read_32(src));
73 		dst += sizeof(uint32_t);
74 		src += sizeof(uint32_t);
75 	}
76 }
77 
78 static __pmusramfunc uint32_t sram_get_timer_value(void)
79 {
80 	/*
81 	 * Generic delay timer implementation expects the timer to be a down
82 	 * counter. We apply bitwise NOT operator to the tick values returned
83 	 * by read_cntpct_el0() to simulate the down counter.
84 	 */
85 	return (uint32_t)(~read_cntpct_el0());
86 }
87 
88 static __pmusramfunc void sram_udelay(uint32_t usec)
89 {
90 	uint32_t start, cnt, delta, total_ticks;
91 
92 	/* counter is decreasing */
93 	start = sram_get_timer_value();
94 	total_ticks = usec * SYS_COUNTER_FREQ_IN_MHZ;
95 	do {
96 		cnt = sram_get_timer_value();
97 		if (cnt > start) {
98 			delta = UINT32_MAX - cnt;
99 			delta += start;
100 		} else
101 			delta = start - cnt;
102 	} while (delta <= total_ticks);
103 }
104 
105 static __pmusramfunc void configure_sgrf(void)
106 {
107 	/*
108 	 * SGRF_DDR_RGN_DPLL_CLK and SGRF_DDR_RGN_RTC_CLK:
109 	 * IC ECO bug, need to set this register.
110 	 *
111 	 * SGRF_DDR_RGN_BYPS:
112 	 * After the PD_CENTER suspend/resume, the DDR region
113 	 * related registers in the SGRF will be reset, we
114 	 * need to re-initialize them.
115 	 */
116 	mmio_write_32(SGRF_BASE + SGRF_DDRRGN_CON0_16(16),
117 		      SGRF_DDR_RGN_DPLL_CLK |
118 		      SGRF_DDR_RGN_RTC_CLK |
119 		      SGRF_DDR_RGN_BYPS);
120 }
121 
122 static __pmusramfunc void rkclk_ddr_reset(uint32_t channel, uint32_t ctl,
123 		uint32_t phy)
124 {
125 	channel &= 0x1;
126 	ctl &= 0x1;
127 	phy &= 0x1;
128 	mmio_write_32(CRU_BASE + CRU_SOFTRST_CON(4),
129 		      CRU_SFTRST_DDR_CTRL(channel, ctl) |
130 		      CRU_SFTRST_DDR_PHY(channel, phy));
131 }
132 
133 static __pmusramfunc void phy_pctrl_reset(uint32_t ch)
134 {
135 	rkclk_ddr_reset(ch, 1, 1);
136 	sram_udelay(10);
137 	rkclk_ddr_reset(ch, 1, 0);
138 	sram_udelay(10);
139 	rkclk_ddr_reset(ch, 0, 0);
140 	sram_udelay(10);
141 }
142 
143 static __pmusramfunc void set_cs_training_index(uint32_t ch, uint32_t rank)
144 {
145 	uint32_t byte;
146 
147 	/* PHY_8/136/264/392 phy_per_cs_training_index_X 1bit offset_24 */
148 	for (byte = 0; byte < 4; byte++)
149 		mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 24,
150 				   rank << 24);
151 }
152 
153 static __pmusramfunc void select_per_cs_training_index(uint32_t ch,
154 		uint32_t rank)
155 {
156 	/* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
157 	if ((mmio_read_32(PHY_REG(ch, 84)) >> 16) & 1)
158 		set_cs_training_index(ch, rank);
159 }
160 
161 static __pmusramfunc void override_write_leveling_value(uint32_t ch)
162 {
163 	uint32_t byte;
164 
165 	for (byte = 0; byte < 4; byte++) {
166 		/*
167 		 * PHY_8/136/264/392
168 		 * phy_per_cs_training_multicast_en_X 1bit offset_16
169 		 */
170 		mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 16,
171 				   1 << 16);
172 		mmio_clrsetbits_32(PHY_REG(ch, 63 + (128 * byte)),
173 				   0xffff << 16,
174 				   0x200 << 16);
175 	}
176 
177 	/* CTL_200 ctrlupd_req 1bit offset_8 */
178 	mmio_clrsetbits_32(CTL_REG(ch, 200), 0x1 << 8, 0x1 << 8);
179 }
180 
181 static __pmusramfunc int data_training(uint32_t ch,
182 		struct rk3399_sdram_params *sdram_params,
183 		uint32_t training_flag)
184 {
185 	uint32_t obs_0, obs_1, obs_2, obs_3, obs_err = 0;
186 	uint32_t rank = sdram_params->ch[ch].rank;
187 	uint32_t rank_mask;
188 	uint32_t i, tmp;
189 
190 	if (sdram_params->dramtype == LPDDR4)
191 		rank_mask = (rank == 1) ? 0x5 : 0xf;
192 	else
193 		rank_mask = (rank == 1) ? 0x1 : 0x3;
194 
195 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
196 	mmio_setbits_32(PHY_REG(ch, 927), (1 << 22));
197 
198 	if (training_flag == PI_FULL_TRAINING) {
199 		if (sdram_params->dramtype == LPDDR4) {
200 			training_flag = PI_WRITE_LEVELING |
201 					PI_READ_GATE_TRAINING |
202 					PI_READ_LEVELING |
203 					PI_WDQ_LEVELING;
204 		} else if (sdram_params->dramtype == LPDDR3) {
205 			training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
206 					PI_READ_GATE_TRAINING;
207 		} else if (sdram_params->dramtype == DDR3) {
208 			training_flag = PI_WRITE_LEVELING |
209 					PI_READ_GATE_TRAINING |
210 					PI_READ_LEVELING;
211 		}
212 	}
213 
214 	/* ca training(LPDDR4,LPDDR3 support) */
215 	if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
216 		for (i = 0; i < 4; i++) {
217 			if (!(rank_mask & (1 << i)))
218 				continue;
219 
220 			select_per_cs_training_index(ch, i);
221 			/* PI_100 PI_CALVL_EN:RW:8:2 */
222 			mmio_clrsetbits_32(PI_REG(ch, 100), 0x3 << 8, 0x2 << 8);
223 
224 			/* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
225 			mmio_clrsetbits_32(PI_REG(ch, 92),
226 					   (0x1 << 16) | (0x3 << 24),
227 					   (0x1 << 16) | (i << 24));
228 			while (1) {
229 				/* PI_174 PI_INT_STATUS:RD:8:18 */
230 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
231 
232 				/*
233 				 * check status obs
234 				 * PHY_532/660/788 phy_adr_calvl_obs1_:0:32
235 				 */
236 				obs_0 = mmio_read_32(PHY_REG(ch, 532));
237 				obs_1 = mmio_read_32(PHY_REG(ch, 660));
238 				obs_2 = mmio_read_32(PHY_REG(ch, 788));
239 				if (((obs_0 >> 30) & 0x3) ||
240 				    ((obs_1 >> 30) & 0x3) ||
241 				    ((obs_2 >> 30) & 0x3))
242 					obs_err = 1;
243 				if ((((tmp >> 11) & 0x1) == 0x1) &&
244 				    (((tmp >> 13) & 0x1) == 0x1) &&
245 				    (((tmp >> 5) & 0x1) == 0x0) &&
246 				    (obs_err == 0))
247 					break;
248 				else if ((((tmp >> 5) & 0x1) == 0x1) ||
249 					 (obs_err == 1))
250 					return -1;
251 			}
252 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
253 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
254 		}
255 		mmio_clrbits_32(PI_REG(ch, 100), 0x3 << 8);
256 	}
257 
258 	/* write leveling(LPDDR4,LPDDR3,DDR3 support) */
259 	if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
260 		for (i = 0; i < rank; i++) {
261 			select_per_cs_training_index(ch, i);
262 			/* PI_60 PI_WRLVL_EN:RW:8:2 */
263 			mmio_clrsetbits_32(PI_REG(ch, 60), 0x3 << 8, 0x2 << 8);
264 			/* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
265 			mmio_clrsetbits_32(PI_REG(ch, 59),
266 					   (0x1 << 8) | (0x3 << 16),
267 					   (0x1 << 8) | (i << 16));
268 
269 			while (1) {
270 				/* PI_174 PI_INT_STATUS:RD:8:18 */
271 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
272 
273 				/*
274 				 * check status obs, if error maybe can not
275 				 * get leveling done PHY_40/168/296/424
276 				 * phy_wrlvl_status_obs_X:0:13
277 				 */
278 				obs_0 = mmio_read_32(PHY_REG(ch, 40));
279 				obs_1 = mmio_read_32(PHY_REG(ch, 168));
280 				obs_2 = mmio_read_32(PHY_REG(ch, 296));
281 				obs_3 = mmio_read_32(PHY_REG(ch, 424));
282 				if (((obs_0 >> 12) & 0x1) ||
283 				    ((obs_1 >> 12) & 0x1) ||
284 				    ((obs_2 >> 12) & 0x1) ||
285 				    ((obs_3 >> 12) & 0x1))
286 					obs_err = 1;
287 				if ((((tmp >> 10) & 0x1) == 0x1) &&
288 				    (((tmp >> 13) & 0x1) == 0x1) &&
289 				    (((tmp >> 4) & 0x1) == 0x0) &&
290 				    (obs_err == 0))
291 					break;
292 				else if ((((tmp >> 4) & 0x1) == 0x1) ||
293 					 (obs_err == 1))
294 					return -1;
295 			}
296 
297 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
298 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
299 		}
300 		override_write_leveling_value(ch);
301 		mmio_clrbits_32(PI_REG(ch, 60), 0x3 << 8);
302 	}
303 
304 	/* read gate training(LPDDR4,LPDDR3,DDR3 support) */
305 	if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
306 		for (i = 0; i < rank; i++) {
307 			select_per_cs_training_index(ch, i);
308 			/* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
309 			mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 24,
310 					   0x2 << 24);
311 			/*
312 			 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
313 			 * PI_RDLVL_CS:RW:24:2
314 			 */
315 			mmio_clrsetbits_32(PI_REG(ch, 74),
316 					   (0x1 << 16) | (0x3 << 24),
317 					   (0x1 << 16) | (i << 24));
318 
319 			while (1) {
320 				/* PI_174 PI_INT_STATUS:RD:8:18 */
321 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
322 
323 				/*
324 				 * check status obs
325 				 * PHY_43/171/299/427
326 				 *     PHY_GTLVL_STATUS_OBS_x:16:8
327 				 */
328 				obs_0 = mmio_read_32(PHY_REG(ch, 43));
329 				obs_1 = mmio_read_32(PHY_REG(ch, 171));
330 				obs_2 = mmio_read_32(PHY_REG(ch, 299));
331 				obs_3 = mmio_read_32(PHY_REG(ch, 427));
332 				if (((obs_0 >> (16 + 6)) & 0x3) ||
333 				    ((obs_1 >> (16 + 6)) & 0x3) ||
334 				    ((obs_2 >> (16 + 6)) & 0x3) ||
335 				    ((obs_3 >> (16 + 6)) & 0x3))
336 					obs_err = 1;
337 				if ((((tmp >> 9) & 0x1) == 0x1) &&
338 				    (((tmp >> 13) & 0x1) == 0x1) &&
339 				    (((tmp >> 3) & 0x1) == 0x0) &&
340 				    (obs_err == 0))
341 					break;
342 				else if ((((tmp >> 3) & 0x1) == 0x1) ||
343 					 (obs_err == 1))
344 					return -1;
345 			}
346 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
347 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
348 		}
349 		mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 24);
350 	}
351 
352 	/* read leveling(LPDDR4,LPDDR3,DDR3 support) */
353 	if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
354 		for (i = 0; i < rank; i++) {
355 			select_per_cs_training_index(ch, i);
356 			/* PI_80 PI_RDLVL_EN:RW:16:2 */
357 			mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 16,
358 					   0x2 << 16);
359 			/* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
360 			mmio_clrsetbits_32(PI_REG(ch, 74),
361 					   (0x1 << 8) | (0x3 << 24),
362 					   (0x1 << 8) | (i << 24));
363 			while (1) {
364 				/* PI_174 PI_INT_STATUS:RD:8:18 */
365 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
366 
367 				/*
368 				 * make sure status obs not report error bit
369 				 * PHY_46/174/302/430
370 				 *     phy_rdlvl_status_obs_X:16:8
371 				 */
372 				if ((((tmp >> 8) & 0x1) == 0x1) &&
373 				    (((tmp >> 13) & 0x1) == 0x1) &&
374 				    (((tmp >> 2) & 0x1) == 0x0))
375 					break;
376 				else if (((tmp >> 2) & 0x1) == 0x1)
377 					return -1;
378 			}
379 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
380 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
381 		}
382 		mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 16);
383 	}
384 
385 	/* wdq leveling(LPDDR4 support) */
386 	if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
387 		for (i = 0; i < 4; i++) {
388 			if (!(rank_mask & (1 << i)))
389 				continue;
390 
391 			select_per_cs_training_index(ch, i);
392 			/*
393 			 * disable PI_WDQLVL_VREF_EN before wdq leveling?
394 			 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
395 			 */
396 			mmio_clrbits_32(PI_REG(ch, 181), 0x1 << 8);
397 			/* PI_124 PI_WDQLVL_EN:RW:16:2 */
398 			mmio_clrsetbits_32(PI_REG(ch, 124), 0x3 << 16,
399 					   0x2 << 16);
400 			/* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
401 			mmio_clrsetbits_32(PI_REG(ch, 121),
402 					   (0x1 << 8) | (0x3 << 16),
403 					   (0x1 << 8) | (i << 16));
404 			while (1) {
405 				/* PI_174 PI_INT_STATUS:RD:8:18 */
406 				tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
407 				if ((((tmp >> 12) & 0x1) == 0x1) &&
408 				    (((tmp >> 13) & 0x1) == 0x1) &&
409 				    (((tmp >> 6) & 0x1) == 0x0))
410 					break;
411 				else if (((tmp >> 6) & 0x1) == 0x1)
412 					return -1;
413 			}
414 			/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
415 			mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
416 		}
417 		mmio_clrbits_32(PI_REG(ch, 124), 0x3 << 16);
418 	}
419 
420 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
421 	mmio_clrbits_32(PHY_REG(ch, 927), (1 << 22));
422 
423 	return 0;
424 }
425 
426 static __pmusramfunc void set_ddrconfig(
427 		struct rk3399_sdram_params *sdram_params,
428 		unsigned char channel, uint32_t ddrconfig)
429 {
430 	/* only need to set ddrconfig */
431 	struct rk3399_sdram_channel *ch = &sdram_params->ch[channel];
432 	unsigned int cs0_cap = 0;
433 	unsigned int cs1_cap = 0;
434 
435 	cs0_cap = (1 << (ch->cs0_row + ch->col + ch->bk + ch->bw - 20));
436 	if (ch->rank > 1)
437 		cs1_cap = cs0_cap >> (ch->cs0_row - ch->cs1_row);
438 	if (ch->row_3_4) {
439 		cs0_cap = cs0_cap * 3 / 4;
440 		cs1_cap = cs1_cap * 3 / 4;
441 	}
442 
443 	mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICECONF,
444 		      ddrconfig | (ddrconfig << 6));
445 	mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICESIZE,
446 		      ((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8));
447 }
448 
449 static __pmusramfunc void dram_all_config(
450 		struct rk3399_sdram_params *sdram_params)
451 {
452 	unsigned int i;
453 
454 	for (i = 0; i < 2; i++) {
455 		struct rk3399_sdram_channel *info = &sdram_params->ch[i];
456 		struct rk3399_msch_timings *noc = &info->noc_timings;
457 
458 		if (sdram_params->ch[i].col == 0)
459 			continue;
460 
461 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGA0,
462 			      noc->ddrtiminga0.d32);
463 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGB0,
464 			      noc->ddrtimingb0.d32);
465 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGC0,
466 			      noc->ddrtimingc0.d32);
467 		mmio_write_32(MSCH_BASE(i) + MSCH_DEVTODEV0,
468 			      noc->devtodev0.d32);
469 		mmio_write_32(MSCH_BASE(i) + MSCH_DDRMODE, noc->ddrmode.d32);
470 
471 		/* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
472 		if (sdram_params->ch[i].rank == 1)
473 			mmio_setbits_32(CTL_REG(i, 276), 1 << 17);
474 	}
475 
476 	DDR_STRIDE(sdram_params->stride);
477 
478 	/* reboot hold register set */
479 	mmio_write_32(PMUCRU_BASE + CRU_PMU_RSTHOLD_CON(1),
480 		      CRU_PMU_SGRF_RST_RLS |
481 		      PRESET_GPIO0_HOLD(1) |
482 		      PRESET_GPIO1_HOLD(1));
483 	mmio_clrsetbits_32(CRU_BASE + CRU_GLB_RST_CON, 0x3, 0x3);
484 }
485 
486 static __pmusramfunc void pctl_cfg(uint32_t ch,
487 		struct rk3399_sdram_params *sdram_params)
488 {
489 	const uint32_t *params_ctl = sdram_params->pctl_regs.denali_ctl;
490 	const uint32_t *params_pi = sdram_params->pi_regs.denali_pi;
491 	const struct rk3399_ddr_publ_regs *phy_regs = &sdram_params->phy_regs;
492 	uint32_t tmp, tmp1, tmp2, i;
493 
494 	/*
495 	 * Workaround controller bug:
496 	 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
497 	 */
498 	sram_regcpy(CTL_REG(ch, 1), (uintptr_t)&params_ctl[1],
499 		    CTL_REG_NUM - 1);
500 	mmio_write_32(CTL_REG(ch, 0), params_ctl[0]);
501 	sram_regcpy(PI_REG(ch, 0), (uintptr_t)&params_pi[0],
502 		    PI_REG_NUM);
503 
504 	sram_regcpy(PHY_REG(ch, 910), (uintptr_t)&phy_regs->phy896[910 - 896],
505 		    3);
506 
507 	mmio_clrsetbits_32(CTL_REG(ch, 68), PWRUP_SREFRESH_EXIT,
508 				PWRUP_SREFRESH_EXIT);
509 
510 	/* PHY_DLL_RST_EN */
511 	mmio_clrsetbits_32(PHY_REG(ch, 957), 0x3 << 24, 1 << 24);
512 	dmbst();
513 
514 	mmio_setbits_32(PI_REG(ch, 0), START);
515 	mmio_setbits_32(CTL_REG(ch, 0), START);
516 
517 	/* wait lock */
518 	while (1) {
519 		tmp = mmio_read_32(PHY_REG(ch, 920));
520 		tmp1 = mmio_read_32(PHY_REG(ch, 921));
521 		tmp2 = mmio_read_32(PHY_REG(ch, 922));
522 		if ((((tmp >> 16) & 0x1) == 0x1) &&
523 		     (((tmp1 >> 16) & 0x1) == 0x1) &&
524 		     (((tmp1 >> 0) & 0x1) == 0x1) &&
525 		     (((tmp2 >> 0) & 0x1) == 0x1))
526 			break;
527 		/* if PLL bypass,don't need wait lock */
528 		if (mmio_read_32(PHY_REG(ch, 911)) & 0x1)
529 			break;
530 	}
531 
532 	sram_regcpy(PHY_REG(ch, 896), (uintptr_t)&phy_regs->phy896[0], 63);
533 
534 	for (i = 0; i < 4; i++)
535 		sram_regcpy(PHY_REG(ch, 128 * i),
536 			    (uintptr_t)&phy_regs->phy0[0], 91);
537 
538 	for (i = 0; i < 3; i++)
539 		sram_regcpy(PHY_REG(ch, 512 + 128 * i),
540 				(uintptr_t)&phy_regs->phy512[i][0], 38);
541 }
542 
543 static __pmusramfunc int dram_switch_to_next_index(
544 		struct rk3399_sdram_params *sdram_params)
545 {
546 	uint32_t ch, ch_count;
547 	uint32_t fn = ((mmio_read_32(CTL_REG(0, 111)) >> 16) + 1) & 0x1;
548 
549 	mmio_write_32(CIC_BASE + CIC_CTRL0,
550 		      (((0x3 << 4) | (1 << 2) | 1) << 16) |
551 		      (fn << 4) | (1 << 2) | 1);
552 	while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 2)))
553 		;
554 
555 	mmio_write_32(CIC_BASE + CIC_CTRL0, 0x20002);
556 	while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 0)))
557 		;
558 
559 	ch_count = sdram_params->num_channels;
560 
561 	/* LPDDR4 f2 cann't do training, all training will fail */
562 	for (ch = 0; ch < ch_count; ch++) {
563 		mmio_clrsetbits_32(PHY_REG(ch, 896), (0x3 << 8) | 1,
564 				   fn << 8);
565 
566 		/* data_training failed */
567 		if (data_training(ch, sdram_params, PI_FULL_TRAINING))
568 			return -1;
569 	}
570 
571 	return 0;
572 }
573 
574 /*
575  * Needs to be done for both channels at once in case of a shared reset signal
576  * between channels.
577  */
578 static __pmusramfunc int pctl_start(uint32_t channel_mask,
579 		struct rk3399_sdram_params *sdram_params)
580 {
581 	uint32_t count;
582 	uint32_t byte;
583 
584 	mmio_setbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
585 	mmio_setbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
586 
587 	/* need de-access IO retention before controller START */
588 	if (channel_mask & (1 << 0))
589 		mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 19));
590 	if (channel_mask & (1 << 1))
591 		mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 23));
592 
593 	/* PHY_DLL_RST_EN */
594 	if (channel_mask & (1 << 0))
595 		mmio_clrsetbits_32(PHY_REG(0, 957), 0x3 << 24,
596 				   0x2 << 24);
597 	if (channel_mask & (1 << 1))
598 		mmio_clrsetbits_32(PHY_REG(1, 957), 0x3 << 24,
599 				   0x2 << 24);
600 
601 	/* check ERROR bit */
602 	if (channel_mask & (1 << 0)) {
603 		count = 0;
604 		while (!(mmio_read_32(CTL_REG(0, 203)) & (1 << 3))) {
605 			/* CKE is low, loop 10ms */
606 			if (count > 100)
607 				return -1;
608 
609 			sram_udelay(100);
610 			count++;
611 		}
612 
613 		mmio_clrbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
614 
615 		/* Restore the PHY_RX_CAL_DQS value */
616 		for (byte = 0; byte < 4; byte++)
617 			mmio_clrsetbits_32(PHY_REG(0, 57 + 128 * byte),
618 					   0xfff << 16,
619 					   sdram_params->rx_cal_dqs[0][byte]);
620 	}
621 	if (channel_mask & (1 << 1)) {
622 		count = 0;
623 		while (!(mmio_read_32(CTL_REG(1, 203)) & (1 << 3))) {
624 			/* CKE is low, loop 10ms */
625 			if (count > 100)
626 				return -1;
627 
628 			sram_udelay(100);
629 			count++;
630 		}
631 
632 		mmio_clrbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
633 
634 		/* Restore the PHY_RX_CAL_DQS value */
635 		for (byte = 0; byte < 4; byte++)
636 			mmio_clrsetbits_32(PHY_REG(1, 57 + 128 * byte),
637 					   0xfff << 16,
638 					   sdram_params->rx_cal_dqs[1][byte]);
639 	}
640 
641 	return 0;
642 }
643 
644 __pmusramfunc static void pmusram_restore_pll(int pll_id, uint32_t *src)
645 {
646 	mmio_write_32((CRU_BASE + CRU_PLL_CON(pll_id, 3)), PLL_SLOW_MODE);
647 
648 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 0), src[0] | REG_SOC_WMSK);
649 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 1), src[1] | REG_SOC_WMSK);
650 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 2), src[2]);
651 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 4), src[4] | REG_SOC_WMSK);
652 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 5), src[5] | REG_SOC_WMSK);
653 
654 	mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 3), src[3] | REG_SOC_WMSK);
655 
656 	while ((mmio_read_32(CRU_BASE + CRU_PLL_CON(pll_id, 2)) &
657 		(1 << 31)) == 0x0)
658 		;
659 }
660 
661 __pmusramfunc static void pmusram_enable_watchdog(void)
662 {
663 	/* Make the watchdog use the first global reset. */
664 	mmio_write_32(CRU_BASE + CRU_GLB_RST_CON, 1 << 1);
665 
666 	/*
667 	 * This gives the system ~8 seconds before reset. The pclk for the
668 	 * watchdog is 4MHz on reset. The value of 0x9 in WDT_TORR means that
669 	 * the watchdog will wait for 0x1ffffff cycles before resetting.
670 	 */
671 	mmio_write_32(WDT0_BASE + 4, 0x9);
672 
673 	/* Enable the watchdog */
674 	mmio_setbits_32(WDT0_BASE, 0x1);
675 
676 	/* Magic reset the watchdog timer value for WDT_CRR. */
677 	mmio_write_32(WDT0_BASE + 0xc, 0x76);
678 
679 	secure_watchdog_ungate();
680 
681 	/* The watchdog is in PD_ALIVE, so deidle it. */
682 	mmio_clrbits_32(PMU_BASE + PMU_BUS_CLR, PMU_CLR_ALIVE);
683 }
684 
685 void dmc_suspend(void)
686 {
687 	struct rk3399_sdram_params *sdram_params = &sdram_config;
688 	struct rk3399_ddr_publ_regs *phy_regs;
689 	uint32_t *params_ctl;
690 	uint32_t *params_pi;
691 	uint32_t refdiv, postdiv2, postdiv1, fbdiv;
692 	uint32_t ch, byte, i;
693 
694 	phy_regs = &sdram_params->phy_regs;
695 	params_ctl = sdram_params->pctl_regs.denali_ctl;
696 	params_pi = sdram_params->pi_regs.denali_pi;
697 
698 	/* save dpll register and ddr clock register value to pmusram */
699 	cru_clksel_con6 = mmio_read_32(CRU_BASE + CRU_CLKSEL_CON6);
700 	for (i = 0; i < PLL_CON_COUNT; i++)
701 		dpll_data[i] = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, i));
702 
703 	fbdiv = dpll_data[0] & 0xfff;
704 	postdiv2 = POSTDIV2_DEC(dpll_data[1]);
705 	postdiv1 = POSTDIV1_DEC(dpll_data[1]);
706 	refdiv = REFDIV_DEC(dpll_data[1]);
707 
708 	sdram_params->ddr_freq = ((fbdiv * 24) /
709 				(refdiv * postdiv1 * postdiv2)) * MHz;
710 
711 	INFO("sdram_params->ddr_freq = %d\n", sdram_params->ddr_freq);
712 	sdram_params->odt = (((mmio_read_32(PHY_REG(0, 5)) >> 16) &
713 			       0x7) != 0) ? 1 : 0;
714 
715 	/* copy the registers CTL PI and PHY */
716 	dram_regcpy((uintptr_t)&params_ctl[0], CTL_REG(0, 0), CTL_REG_NUM);
717 
718 	/* mask DENALI_CTL_00_DATA.START, only copy here, will trigger later */
719 	params_ctl[0] &= ~(0x1 << 0);
720 
721 	dram_regcpy((uintptr_t)&params_pi[0], PI_REG(0, 0),
722 		    PI_REG_NUM);
723 
724 	/* mask DENALI_PI_00_DATA.START, only copy here, will trigger later*/
725 	params_pi[0] &= ~(0x1 << 0);
726 
727 	dram_regcpy((uintptr_t)&phy_regs->phy0[0],
728 			    PHY_REG(0, 0), 91);
729 
730 	for (i = 0; i < 3; i++)
731 		dram_regcpy((uintptr_t)&phy_regs->phy512[i][0],
732 			    PHY_REG(0, 512 + 128 * i), 38);
733 
734 	dram_regcpy((uintptr_t)&phy_regs->phy896[0], PHY_REG(0, 896), 63);
735 
736 	for (ch = 0; ch < sdram_params->num_channels; ch++) {
737 		for (byte = 0; byte < 4; byte++)
738 			sdram_params->rx_cal_dqs[ch][byte] = (0xfff << 16) &
739 				mmio_read_32(PHY_REG(ch, 57 + byte * 128));
740 	}
741 
742 	/* set DENALI_PHY_957_DATA.PHY_DLL_RST_EN = 0x1 */
743 	phy_regs->phy896[957 - 896] &= ~(0x3 << 24);
744 	phy_regs->phy896[957 - 896] |= 1 << 24;
745 	phy_regs->phy896[0] |= 1;
746 	phy_regs->phy896[0] &= ~(0x3 << 8);
747 }
748 
749 __pmusramfunc void dmc_resume(void)
750 {
751 	struct rk3399_sdram_params *sdram_params = &sdram_config;
752 	uint32_t channel_mask = 0;
753 	uint32_t channel;
754 
755 	pmusram_enable_watchdog();
756 	pmu_sgrf_rst_hld_release();
757 	restore_pmu_rsthold();
758 	sram_secure_timer_init();
759 
760 	/*
761 	 * we switch ddr clock to abpll when suspend,
762 	 * we set back to dpll here
763 	 */
764 	mmio_write_32(CRU_BASE + CRU_CLKSEL_CON6,
765 			cru_clksel_con6 | REG_SOC_WMSK);
766 	pmusram_restore_pll(DPLL_ID, dpll_data);
767 
768 	configure_sgrf();
769 
770 retry:
771 	for (channel = 0; channel < sdram_params->num_channels; channel++) {
772 		phy_pctrl_reset(channel);
773 		pctl_cfg(channel, sdram_params);
774 	}
775 
776 	for (channel = 0; channel < 2; channel++) {
777 		if (sdram_params->ch[channel].col)
778 			channel_mask |= 1 << channel;
779 	}
780 
781 	if (pctl_start(channel_mask, sdram_params) < 0)
782 		goto retry;
783 
784 	for (channel = 0; channel < sdram_params->num_channels; channel++) {
785 		/* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
786 		if (sdram_params->dramtype == LPDDR3)
787 			sram_udelay(10);
788 
789 		/* If traning fail, retry to do it again. */
790 		if (data_training(channel, sdram_params, PI_FULL_TRAINING))
791 			goto retry;
792 
793 		set_ddrconfig(sdram_params, channel,
794 			      sdram_params->ch[channel].ddrconfig);
795 	}
796 
797 	dram_all_config(sdram_params);
798 
799 	/* Switch to index 1 and prepare for DDR frequency switch. */
800 	dram_switch_to_next_index(sdram_params);
801 }
802