xref: /OK3568_Linux_fs/u-boot/drivers/clk/clk_zynq.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * Copyright (C) 2017 Weidmüller Interface GmbH & Co. KG
3*4882a593Smuzhiyun  * Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (C) 2013 Soren Brinkmann <soren.brinkmann@xilinx.com>
6*4882a593Smuzhiyun  * Copyright (C) 2013 Xilinx, Inc. All rights reserved.
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  * SPDX-License-Identifier:	GPL-2.0+
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <common.h>
12*4882a593Smuzhiyun #include <clk-uclass.h>
13*4882a593Smuzhiyun #include <dm.h>
14*4882a593Smuzhiyun #include <dm/lists.h>
15*4882a593Smuzhiyun #include <errno.h>
16*4882a593Smuzhiyun #include <asm/io.h>
17*4882a593Smuzhiyun #include <asm/arch/clk.h>
18*4882a593Smuzhiyun #include <asm/arch/hardware.h>
19*4882a593Smuzhiyun #include <asm/arch/sys_proto.h>
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun /* Register bitfield defines */
22*4882a593Smuzhiyun #define PLLCTRL_FBDIV_MASK	0x7f000
23*4882a593Smuzhiyun #define PLLCTRL_FBDIV_SHIFT	12
24*4882a593Smuzhiyun #define PLLCTRL_BPFORCE_MASK	(1 << 4)
25*4882a593Smuzhiyun #define PLLCTRL_PWRDWN_MASK	2
26*4882a593Smuzhiyun #define PLLCTRL_PWRDWN_SHIFT	1
27*4882a593Smuzhiyun #define PLLCTRL_RESET_MASK	1
28*4882a593Smuzhiyun #define PLLCTRL_RESET_SHIFT	0
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun #define ZYNQ_CLK_MAXDIV		0x3f
31*4882a593Smuzhiyun #define CLK_CTRL_DIV1_SHIFT	20
32*4882a593Smuzhiyun #define CLK_CTRL_DIV1_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
33*4882a593Smuzhiyun #define CLK_CTRL_DIV0_SHIFT	8
34*4882a593Smuzhiyun #define CLK_CTRL_DIV0_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
35*4882a593Smuzhiyun #define CLK_CTRL_SRCSEL_SHIFT	4
36*4882a593Smuzhiyun #define CLK_CTRL_SRCSEL_MASK	(0x3 << CLK_CTRL_SRCSEL_SHIFT)
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun #define CLK_CTRL_DIV2X_SHIFT	26
39*4882a593Smuzhiyun #define CLK_CTRL_DIV2X_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV2X_SHIFT)
40*4882a593Smuzhiyun #define CLK_CTRL_DIV3X_SHIFT	20
41*4882a593Smuzhiyun #define CLK_CTRL_DIV3X_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV3X_SHIFT)
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun DECLARE_GLOBAL_DATA_PTR;
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
46*4882a593Smuzhiyun enum zynq_clk_rclk {mio_clk, emio_clk};
47*4882a593Smuzhiyun #endif
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun struct zynq_clk_priv {
50*4882a593Smuzhiyun 	ulong ps_clk_freq;
51*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
52*4882a593Smuzhiyun 	struct clk gem_emio_clk[2];
53*4882a593Smuzhiyun #endif
54*4882a593Smuzhiyun };
55*4882a593Smuzhiyun 
zynq_clk_get_register(enum zynq_clk id)56*4882a593Smuzhiyun static void *zynq_clk_get_register(enum zynq_clk id)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun 	switch (id) {
59*4882a593Smuzhiyun 	case armpll_clk:
60*4882a593Smuzhiyun 		return &slcr_base->arm_pll_ctrl;
61*4882a593Smuzhiyun 	case ddrpll_clk:
62*4882a593Smuzhiyun 		return &slcr_base->ddr_pll_ctrl;
63*4882a593Smuzhiyun 	case iopll_clk:
64*4882a593Smuzhiyun 		return &slcr_base->io_pll_ctrl;
65*4882a593Smuzhiyun 	case lqspi_clk:
66*4882a593Smuzhiyun 		return &slcr_base->lqspi_clk_ctrl;
67*4882a593Smuzhiyun 	case smc_clk:
68*4882a593Smuzhiyun 		return &slcr_base->smc_clk_ctrl;
69*4882a593Smuzhiyun 	case pcap_clk:
70*4882a593Smuzhiyun 		return &slcr_base->pcap_clk_ctrl;
71*4882a593Smuzhiyun 	case sdio0_clk ... sdio1_clk:
72*4882a593Smuzhiyun 		return &slcr_base->sdio_clk_ctrl;
73*4882a593Smuzhiyun 	case uart0_clk ... uart1_clk:
74*4882a593Smuzhiyun 		return &slcr_base->uart_clk_ctrl;
75*4882a593Smuzhiyun 	case spi0_clk ... spi1_clk:
76*4882a593Smuzhiyun 		return &slcr_base->spi_clk_ctrl;
77*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
78*4882a593Smuzhiyun 	case dci_clk:
79*4882a593Smuzhiyun 		return &slcr_base->dci_clk_ctrl;
80*4882a593Smuzhiyun 	case gem0_clk:
81*4882a593Smuzhiyun 		return &slcr_base->gem0_clk_ctrl;
82*4882a593Smuzhiyun 	case gem1_clk:
83*4882a593Smuzhiyun 		return &slcr_base->gem1_clk_ctrl;
84*4882a593Smuzhiyun 	case fclk0_clk:
85*4882a593Smuzhiyun 		return &slcr_base->fpga0_clk_ctrl;
86*4882a593Smuzhiyun 	case fclk1_clk:
87*4882a593Smuzhiyun 		return &slcr_base->fpga1_clk_ctrl;
88*4882a593Smuzhiyun 	case fclk2_clk:
89*4882a593Smuzhiyun 		return &slcr_base->fpga2_clk_ctrl;
90*4882a593Smuzhiyun 	case fclk3_clk:
91*4882a593Smuzhiyun 		return &slcr_base->fpga3_clk_ctrl;
92*4882a593Smuzhiyun 	case can0_clk ... can1_clk:
93*4882a593Smuzhiyun 		return &slcr_base->can_clk_ctrl;
94*4882a593Smuzhiyun 	case dbg_trc_clk ... dbg_apb_clk:
95*4882a593Smuzhiyun 		/* fall through */
96*4882a593Smuzhiyun #endif
97*4882a593Smuzhiyun 	default:
98*4882a593Smuzhiyun 		return &slcr_base->dbg_clk_ctrl;
99*4882a593Smuzhiyun 	}
100*4882a593Smuzhiyun }
101*4882a593Smuzhiyun 
zynq_clk_get_cpu_pll(u32 clk_ctrl)102*4882a593Smuzhiyun static enum zynq_clk zynq_clk_get_cpu_pll(u32 clk_ctrl)
103*4882a593Smuzhiyun {
104*4882a593Smuzhiyun 	u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun 	switch (srcsel) {
107*4882a593Smuzhiyun 	case 2:
108*4882a593Smuzhiyun 		return ddrpll_clk;
109*4882a593Smuzhiyun 	case 3:
110*4882a593Smuzhiyun 		return iopll_clk;
111*4882a593Smuzhiyun 	case 0 ... 1:
112*4882a593Smuzhiyun 	default:
113*4882a593Smuzhiyun 		return armpll_clk;
114*4882a593Smuzhiyun 	}
115*4882a593Smuzhiyun }
116*4882a593Smuzhiyun 
zynq_clk_get_peripheral_pll(u32 clk_ctrl)117*4882a593Smuzhiyun static enum zynq_clk zynq_clk_get_peripheral_pll(u32 clk_ctrl)
118*4882a593Smuzhiyun {
119*4882a593Smuzhiyun 	u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	switch (srcsel) {
122*4882a593Smuzhiyun 	case 2:
123*4882a593Smuzhiyun 		return armpll_clk;
124*4882a593Smuzhiyun 	case 3:
125*4882a593Smuzhiyun 		return ddrpll_clk;
126*4882a593Smuzhiyun 	case 0 ... 1:
127*4882a593Smuzhiyun 	default:
128*4882a593Smuzhiyun 		return iopll_clk;
129*4882a593Smuzhiyun 	}
130*4882a593Smuzhiyun }
131*4882a593Smuzhiyun 
zynq_clk_get_pll_rate(struct zynq_clk_priv * priv,enum zynq_clk id)132*4882a593Smuzhiyun static ulong zynq_clk_get_pll_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
133*4882a593Smuzhiyun {
134*4882a593Smuzhiyun 	u32 clk_ctrl, reset, pwrdwn, mul, bypass;
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun 	clk_ctrl = readl(zynq_clk_get_register(id));
137*4882a593Smuzhiyun 
138*4882a593Smuzhiyun 	reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
139*4882a593Smuzhiyun 	pwrdwn = (clk_ctrl & PLLCTRL_PWRDWN_MASK) >> PLLCTRL_PWRDWN_SHIFT;
140*4882a593Smuzhiyun 	if (reset || pwrdwn)
141*4882a593Smuzhiyun 		return 0;
142*4882a593Smuzhiyun 
143*4882a593Smuzhiyun 	bypass = clk_ctrl & PLLCTRL_BPFORCE_MASK;
144*4882a593Smuzhiyun 	if (bypass)
145*4882a593Smuzhiyun 		mul = 1;
146*4882a593Smuzhiyun 	else
147*4882a593Smuzhiyun 		mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun 	return priv->ps_clk_freq * mul;
150*4882a593Smuzhiyun }
151*4882a593Smuzhiyun 
152*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
zynq_clk_get_gem_rclk(enum zynq_clk id)153*4882a593Smuzhiyun static enum zynq_clk_rclk zynq_clk_get_gem_rclk(enum zynq_clk id)
154*4882a593Smuzhiyun {
155*4882a593Smuzhiyun 	u32 clk_ctrl, srcsel;
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun 	if (id == gem0_clk)
158*4882a593Smuzhiyun 		clk_ctrl = readl(&slcr_base->gem0_rclk_ctrl);
159*4882a593Smuzhiyun 	else
160*4882a593Smuzhiyun 		clk_ctrl = readl(&slcr_base->gem1_rclk_ctrl);
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun 	srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
163*4882a593Smuzhiyun 	if (srcsel)
164*4882a593Smuzhiyun 		return emio_clk;
165*4882a593Smuzhiyun 	else
166*4882a593Smuzhiyun 		return mio_clk;
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun #endif
169*4882a593Smuzhiyun 
zynq_clk_get_cpu_rate(struct zynq_clk_priv * priv,enum zynq_clk id)170*4882a593Smuzhiyun static ulong zynq_clk_get_cpu_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
171*4882a593Smuzhiyun {
172*4882a593Smuzhiyun 	u32 clk_621, clk_ctrl, div;
173*4882a593Smuzhiyun 	enum zynq_clk pll;
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	clk_ctrl = readl(&slcr_base->arm_clk_ctrl);
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun 	div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun 	switch (id) {
180*4882a593Smuzhiyun 	case cpu_1x_clk:
181*4882a593Smuzhiyun 		div *= 2;
182*4882a593Smuzhiyun 		/* fall through */
183*4882a593Smuzhiyun 	case cpu_2x_clk:
184*4882a593Smuzhiyun 		clk_621 = readl(&slcr_base->clk_621_true) & 1;
185*4882a593Smuzhiyun 		div *= 2 + clk_621;
186*4882a593Smuzhiyun 		break;
187*4882a593Smuzhiyun 	case cpu_3or2x_clk:
188*4882a593Smuzhiyun 		div *= 2;
189*4882a593Smuzhiyun 		/* fall through */
190*4882a593Smuzhiyun 	case cpu_6or4x_clk:
191*4882a593Smuzhiyun 		break;
192*4882a593Smuzhiyun 	default:
193*4882a593Smuzhiyun 		return 0;
194*4882a593Smuzhiyun 	}
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 	pll = zynq_clk_get_cpu_pll(clk_ctrl);
197*4882a593Smuzhiyun 
198*4882a593Smuzhiyun 	return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, pll), div);
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun 
201*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
zynq_clk_get_ddr2x_rate(struct zynq_clk_priv * priv)202*4882a593Smuzhiyun static ulong zynq_clk_get_ddr2x_rate(struct zynq_clk_priv *priv)
203*4882a593Smuzhiyun {
204*4882a593Smuzhiyun 	u32 clk_ctrl, div;
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun 	clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
207*4882a593Smuzhiyun 
208*4882a593Smuzhiyun 	div = (clk_ctrl & CLK_CTRL_DIV2X_MASK) >> CLK_CTRL_DIV2X_SHIFT;
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun 	return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun #endif
213*4882a593Smuzhiyun 
zynq_clk_get_ddr3x_rate(struct zynq_clk_priv * priv)214*4882a593Smuzhiyun static ulong zynq_clk_get_ddr3x_rate(struct zynq_clk_priv *priv)
215*4882a593Smuzhiyun {
216*4882a593Smuzhiyun 	u32 clk_ctrl, div;
217*4882a593Smuzhiyun 
218*4882a593Smuzhiyun 	clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
219*4882a593Smuzhiyun 
220*4882a593Smuzhiyun 	div = (clk_ctrl & CLK_CTRL_DIV3X_MASK) >> CLK_CTRL_DIV3X_SHIFT;
221*4882a593Smuzhiyun 
222*4882a593Smuzhiyun 	return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
223*4882a593Smuzhiyun }
224*4882a593Smuzhiyun 
225*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
zynq_clk_get_dci_rate(struct zynq_clk_priv * priv)226*4882a593Smuzhiyun static ulong zynq_clk_get_dci_rate(struct zynq_clk_priv *priv)
227*4882a593Smuzhiyun {
228*4882a593Smuzhiyun 	u32 clk_ctrl, div0, div1;
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	clk_ctrl = readl(&slcr_base->dci_clk_ctrl);
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun 	div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
233*4882a593Smuzhiyun 	div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
234*4882a593Smuzhiyun 
235*4882a593Smuzhiyun 	return DIV_ROUND_CLOSEST(DIV_ROUND_CLOSEST(
236*4882a593Smuzhiyun 		zynq_clk_get_pll_rate(priv, ddrpll_clk), div0), div1);
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun #endif
239*4882a593Smuzhiyun 
zynq_clk_get_peripheral_rate(struct zynq_clk_priv * priv,enum zynq_clk id,bool two_divs)240*4882a593Smuzhiyun static ulong zynq_clk_get_peripheral_rate(struct zynq_clk_priv *priv,
241*4882a593Smuzhiyun 					  enum zynq_clk id, bool two_divs)
242*4882a593Smuzhiyun {
243*4882a593Smuzhiyun 	enum zynq_clk pll;
244*4882a593Smuzhiyun 	u32 clk_ctrl, div0;
245*4882a593Smuzhiyun 	u32 div1 = 1;
246*4882a593Smuzhiyun 
247*4882a593Smuzhiyun 	clk_ctrl = readl(zynq_clk_get_register(id));
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 	div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
250*4882a593Smuzhiyun 	if (!div0)
251*4882a593Smuzhiyun 		div0 = 1;
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
254*4882a593Smuzhiyun 	if (two_divs) {
255*4882a593Smuzhiyun 		div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
256*4882a593Smuzhiyun 		if (!div1)
257*4882a593Smuzhiyun 			div1 = 1;
258*4882a593Smuzhiyun 	}
259*4882a593Smuzhiyun #endif
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun 	pll = zynq_clk_get_peripheral_pll(clk_ctrl);
262*4882a593Smuzhiyun 
263*4882a593Smuzhiyun 	return
264*4882a593Smuzhiyun 		DIV_ROUND_CLOSEST(
265*4882a593Smuzhiyun 			DIV_ROUND_CLOSEST(
266*4882a593Smuzhiyun 				zynq_clk_get_pll_rate(priv, pll), div0),
267*4882a593Smuzhiyun 			div1);
268*4882a593Smuzhiyun }
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
zynq_clk_get_gem_rate(struct zynq_clk_priv * priv,enum zynq_clk id)271*4882a593Smuzhiyun static ulong zynq_clk_get_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
272*4882a593Smuzhiyun {
273*4882a593Smuzhiyun 	struct clk *parent;
274*4882a593Smuzhiyun 
275*4882a593Smuzhiyun 	if (zynq_clk_get_gem_rclk(id) == mio_clk)
276*4882a593Smuzhiyun 		return zynq_clk_get_peripheral_rate(priv, id, true);
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun 	parent = &priv->gem_emio_clk[id - gem0_clk];
279*4882a593Smuzhiyun 	if (parent->dev)
280*4882a593Smuzhiyun 		return clk_get_rate(parent);
281*4882a593Smuzhiyun 
282*4882a593Smuzhiyun 	debug("%s: gem%d emio rx clock source unknown\n", __func__,
283*4882a593Smuzhiyun 	      id - gem0_clk);
284*4882a593Smuzhiyun 
285*4882a593Smuzhiyun 	return -ENOSYS;
286*4882a593Smuzhiyun }
287*4882a593Smuzhiyun 
zynq_clk_calc_peripheral_two_divs(ulong rate,ulong pll_rate,u32 * div0,u32 * div1)288*4882a593Smuzhiyun static unsigned long zynq_clk_calc_peripheral_two_divs(ulong rate,
289*4882a593Smuzhiyun 						       ulong pll_rate,
290*4882a593Smuzhiyun 						       u32 *div0, u32 *div1)
291*4882a593Smuzhiyun {
292*4882a593Smuzhiyun 	long new_err, best_err = (long)(~0UL >> 1);
293*4882a593Smuzhiyun 	ulong new_rate, best_rate = 0;
294*4882a593Smuzhiyun 	u32 d0, d1;
295*4882a593Smuzhiyun 
296*4882a593Smuzhiyun 	for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
297*4882a593Smuzhiyun 		for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
298*4882a593Smuzhiyun 			new_rate = DIV_ROUND_CLOSEST(
299*4882a593Smuzhiyun 					DIV_ROUND_CLOSEST(pll_rate, d0), d1);
300*4882a593Smuzhiyun 			new_err = abs(new_rate - rate);
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun 			if (new_err < best_err) {
303*4882a593Smuzhiyun 				*div0 = d0;
304*4882a593Smuzhiyun 				*div1 = d1;
305*4882a593Smuzhiyun 				best_err = new_err;
306*4882a593Smuzhiyun 				best_rate = new_rate;
307*4882a593Smuzhiyun 			}
308*4882a593Smuzhiyun 		}
309*4882a593Smuzhiyun 	}
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	return best_rate;
312*4882a593Smuzhiyun }
313*4882a593Smuzhiyun 
zynq_clk_set_peripheral_rate(struct zynq_clk_priv * priv,enum zynq_clk id,ulong rate,bool two_divs)314*4882a593Smuzhiyun static ulong zynq_clk_set_peripheral_rate(struct zynq_clk_priv *priv,
315*4882a593Smuzhiyun 					  enum zynq_clk id, ulong rate,
316*4882a593Smuzhiyun 					  bool two_divs)
317*4882a593Smuzhiyun {
318*4882a593Smuzhiyun 	enum zynq_clk pll;
319*4882a593Smuzhiyun 	u32 clk_ctrl, div0 = 0, div1 = 0;
320*4882a593Smuzhiyun 	ulong pll_rate, new_rate;
321*4882a593Smuzhiyun 	u32 *reg;
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 	reg = zynq_clk_get_register(id);
324*4882a593Smuzhiyun 	clk_ctrl = readl(reg);
325*4882a593Smuzhiyun 
326*4882a593Smuzhiyun 	pll = zynq_clk_get_peripheral_pll(clk_ctrl);
327*4882a593Smuzhiyun 	pll_rate = zynq_clk_get_pll_rate(priv, pll);
328*4882a593Smuzhiyun 	clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
329*4882a593Smuzhiyun 	if (two_divs) {
330*4882a593Smuzhiyun 		clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
331*4882a593Smuzhiyun 		new_rate = zynq_clk_calc_peripheral_two_divs(rate, pll_rate,
332*4882a593Smuzhiyun 				&div0, &div1);
333*4882a593Smuzhiyun 		clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
334*4882a593Smuzhiyun 	} else {
335*4882a593Smuzhiyun 		div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
336*4882a593Smuzhiyun 		if (div0 > ZYNQ_CLK_MAXDIV)
337*4882a593Smuzhiyun 			div0 = ZYNQ_CLK_MAXDIV;
338*4882a593Smuzhiyun 		new_rate = DIV_ROUND_CLOSEST(rate, div0);
339*4882a593Smuzhiyun 	}
340*4882a593Smuzhiyun 	clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
341*4882a593Smuzhiyun 
342*4882a593Smuzhiyun 	zynq_slcr_unlock();
343*4882a593Smuzhiyun 	writel(clk_ctrl, reg);
344*4882a593Smuzhiyun 	zynq_slcr_lock();
345*4882a593Smuzhiyun 
346*4882a593Smuzhiyun 	return new_rate;
347*4882a593Smuzhiyun }
348*4882a593Smuzhiyun 
zynq_clk_set_gem_rate(struct zynq_clk_priv * priv,enum zynq_clk id,ulong rate)349*4882a593Smuzhiyun static ulong zynq_clk_set_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id,
350*4882a593Smuzhiyun 				   ulong rate)
351*4882a593Smuzhiyun {
352*4882a593Smuzhiyun 	struct clk *parent;
353*4882a593Smuzhiyun 
354*4882a593Smuzhiyun 	if (zynq_clk_get_gem_rclk(id) == mio_clk)
355*4882a593Smuzhiyun 		return zynq_clk_set_peripheral_rate(priv, id, rate, true);
356*4882a593Smuzhiyun 
357*4882a593Smuzhiyun 	parent = &priv->gem_emio_clk[id - gem0_clk];
358*4882a593Smuzhiyun 	if (parent->dev)
359*4882a593Smuzhiyun 		return clk_set_rate(parent, rate);
360*4882a593Smuzhiyun 
361*4882a593Smuzhiyun 	debug("%s: gem%d emio rx clock source unknown\n", __func__,
362*4882a593Smuzhiyun 	      id - gem0_clk);
363*4882a593Smuzhiyun 
364*4882a593Smuzhiyun 	return -ENOSYS;
365*4882a593Smuzhiyun }
366*4882a593Smuzhiyun #endif
367*4882a593Smuzhiyun 
368*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
zynq_clk_get_rate(struct clk * clk)369*4882a593Smuzhiyun static ulong zynq_clk_get_rate(struct clk *clk)
370*4882a593Smuzhiyun {
371*4882a593Smuzhiyun 	struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
372*4882a593Smuzhiyun 	enum zynq_clk id = clk->id;
373*4882a593Smuzhiyun 	bool two_divs = false;
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun 	switch (id) {
376*4882a593Smuzhiyun 	case armpll_clk ... iopll_clk:
377*4882a593Smuzhiyun 		return zynq_clk_get_pll_rate(priv, id);
378*4882a593Smuzhiyun 	case cpu_6or4x_clk ... cpu_1x_clk:
379*4882a593Smuzhiyun 		return zynq_clk_get_cpu_rate(priv, id);
380*4882a593Smuzhiyun 	case ddr2x_clk:
381*4882a593Smuzhiyun 		return zynq_clk_get_ddr2x_rate(priv);
382*4882a593Smuzhiyun 	case ddr3x_clk:
383*4882a593Smuzhiyun 		return zynq_clk_get_ddr3x_rate(priv);
384*4882a593Smuzhiyun 	case dci_clk:
385*4882a593Smuzhiyun 		return zynq_clk_get_dci_rate(priv);
386*4882a593Smuzhiyun 	case gem0_clk ... gem1_clk:
387*4882a593Smuzhiyun 		return zynq_clk_get_gem_rate(priv, id);
388*4882a593Smuzhiyun 	case fclk0_clk ... can1_clk:
389*4882a593Smuzhiyun 		two_divs = true;
390*4882a593Smuzhiyun 		/* fall through */
391*4882a593Smuzhiyun 	case dbg_trc_clk ... dbg_apb_clk:
392*4882a593Smuzhiyun 	case lqspi_clk ... pcap_clk:
393*4882a593Smuzhiyun 	case sdio0_clk ... spi1_clk:
394*4882a593Smuzhiyun 		return zynq_clk_get_peripheral_rate(priv, id, two_divs);
395*4882a593Smuzhiyun 	case dma_clk:
396*4882a593Smuzhiyun 		return zynq_clk_get_cpu_rate(priv, cpu_2x_clk);
397*4882a593Smuzhiyun 	case usb0_aper_clk ... smc_aper_clk:
398*4882a593Smuzhiyun 		return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
399*4882a593Smuzhiyun 	default:
400*4882a593Smuzhiyun 		return -ENXIO;
401*4882a593Smuzhiyun 	}
402*4882a593Smuzhiyun }
403*4882a593Smuzhiyun 
zynq_clk_set_rate(struct clk * clk,ulong rate)404*4882a593Smuzhiyun static ulong zynq_clk_set_rate(struct clk *clk, ulong rate)
405*4882a593Smuzhiyun {
406*4882a593Smuzhiyun 	struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
407*4882a593Smuzhiyun 	enum zynq_clk id = clk->id;
408*4882a593Smuzhiyun 	bool two_divs = false;
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 	switch (id) {
411*4882a593Smuzhiyun 	case gem0_clk ... gem1_clk:
412*4882a593Smuzhiyun 		return zynq_clk_set_gem_rate(priv, id, rate);
413*4882a593Smuzhiyun 	case fclk0_clk ... can1_clk:
414*4882a593Smuzhiyun 		two_divs = true;
415*4882a593Smuzhiyun 		/* fall through */
416*4882a593Smuzhiyun 	case lqspi_clk ... pcap_clk:
417*4882a593Smuzhiyun 	case sdio0_clk ... spi1_clk:
418*4882a593Smuzhiyun 	case dbg_trc_clk ... dbg_apb_clk:
419*4882a593Smuzhiyun 		return zynq_clk_set_peripheral_rate(priv, id, rate, two_divs);
420*4882a593Smuzhiyun 	default:
421*4882a593Smuzhiyun 		return -ENXIO;
422*4882a593Smuzhiyun 	}
423*4882a593Smuzhiyun }
424*4882a593Smuzhiyun #else
zynq_clk_get_rate(struct clk * clk)425*4882a593Smuzhiyun static ulong zynq_clk_get_rate(struct clk *clk)
426*4882a593Smuzhiyun {
427*4882a593Smuzhiyun 	struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
428*4882a593Smuzhiyun 	enum zynq_clk id = clk->id;
429*4882a593Smuzhiyun 
430*4882a593Smuzhiyun 	switch (id) {
431*4882a593Smuzhiyun 	case cpu_6or4x_clk ... cpu_1x_clk:
432*4882a593Smuzhiyun 		return zynq_clk_get_cpu_rate(priv, id);
433*4882a593Smuzhiyun 	case ddr3x_clk:
434*4882a593Smuzhiyun 		return zynq_clk_get_ddr3x_rate(priv);
435*4882a593Smuzhiyun 	case lqspi_clk ... pcap_clk:
436*4882a593Smuzhiyun 	case sdio0_clk ... spi1_clk:
437*4882a593Smuzhiyun 		return zynq_clk_get_peripheral_rate(priv, id, 0);
438*4882a593Smuzhiyun 	default:
439*4882a593Smuzhiyun 		return -ENXIO;
440*4882a593Smuzhiyun 	}
441*4882a593Smuzhiyun }
442*4882a593Smuzhiyun #endif
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun static struct clk_ops zynq_clk_ops = {
445*4882a593Smuzhiyun 	.get_rate = zynq_clk_get_rate,
446*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
447*4882a593Smuzhiyun 	.set_rate = zynq_clk_set_rate,
448*4882a593Smuzhiyun #endif
449*4882a593Smuzhiyun };
450*4882a593Smuzhiyun 
zynq_clk_probe(struct udevice * dev)451*4882a593Smuzhiyun static int zynq_clk_probe(struct udevice *dev)
452*4882a593Smuzhiyun {
453*4882a593Smuzhiyun 	struct zynq_clk_priv *priv = dev_get_priv(dev);
454*4882a593Smuzhiyun #ifndef CONFIG_SPL_BUILD
455*4882a593Smuzhiyun 	unsigned int i;
456*4882a593Smuzhiyun 	char name[16];
457*4882a593Smuzhiyun 	int ret;
458*4882a593Smuzhiyun 
459*4882a593Smuzhiyun 	for (i = 0; i < 2; i++) {
460*4882a593Smuzhiyun 		sprintf(name, "gem%d_emio_clk", i);
461*4882a593Smuzhiyun 		ret = clk_get_by_name(dev, name, &priv->gem_emio_clk[i]);
462*4882a593Smuzhiyun 		if (ret < 0 && ret != -ENODATA) {
463*4882a593Smuzhiyun 			dev_err(dev, "failed to get %s clock\n", name);
464*4882a593Smuzhiyun 			return ret;
465*4882a593Smuzhiyun 		}
466*4882a593Smuzhiyun 	}
467*4882a593Smuzhiyun #endif
468*4882a593Smuzhiyun 
469*4882a593Smuzhiyun 	priv->ps_clk_freq = fdtdec_get_uint(gd->fdt_blob, dev_of_offset(dev),
470*4882a593Smuzhiyun 					    "ps-clk-frequency", 33333333UL);
471*4882a593Smuzhiyun 
472*4882a593Smuzhiyun 	return 0;
473*4882a593Smuzhiyun }
474*4882a593Smuzhiyun 
475*4882a593Smuzhiyun static const struct udevice_id zynq_clk_ids[] = {
476*4882a593Smuzhiyun 	{ .compatible = "xlnx,ps7-clkc"},
477*4882a593Smuzhiyun 	{}
478*4882a593Smuzhiyun };
479*4882a593Smuzhiyun 
480*4882a593Smuzhiyun U_BOOT_DRIVER(zynq_clk) = {
481*4882a593Smuzhiyun 	.name		= "zynq_clk",
482*4882a593Smuzhiyun 	.id		= UCLASS_CLK,
483*4882a593Smuzhiyun 	.of_match	= zynq_clk_ids,
484*4882a593Smuzhiyun 	.flags		= DM_FLAG_PRE_RELOC,
485*4882a593Smuzhiyun 	.ops		= &zynq_clk_ops,
486*4882a593Smuzhiyun 	.priv_auto_alloc_size = sizeof(struct zynq_clk_priv),
487*4882a593Smuzhiyun 	.probe		= zynq_clk_probe,
488*4882a593Smuzhiyun };
489