xref: /OK3568_Linux_fs/kernel/drivers/clk/imx/clk-pll14xx.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright 2017-2018 NXP.
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #include <linux/bits.h>
7*4882a593Smuzhiyun #include <linux/clk-provider.h>
8*4882a593Smuzhiyun #include <linux/err.h>
9*4882a593Smuzhiyun #include <linux/export.h>
10*4882a593Smuzhiyun #include <linux/io.h>
11*4882a593Smuzhiyun #include <linux/iopoll.h>
12*4882a593Smuzhiyun #include <linux/slab.h>
13*4882a593Smuzhiyun #include <linux/jiffies.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include "clk.h"
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun #define GNRL_CTL	0x0
18*4882a593Smuzhiyun #define DIV_CTL		0x4
19*4882a593Smuzhiyun #define LOCK_STATUS	BIT(31)
20*4882a593Smuzhiyun #define LOCK_SEL_MASK	BIT(29)
21*4882a593Smuzhiyun #define CLKE_MASK	BIT(11)
22*4882a593Smuzhiyun #define RST_MASK	BIT(9)
23*4882a593Smuzhiyun #define BYPASS_MASK	BIT(4)
24*4882a593Smuzhiyun #define MDIV_SHIFT	12
25*4882a593Smuzhiyun #define MDIV_MASK	GENMASK(21, 12)
26*4882a593Smuzhiyun #define PDIV_SHIFT	4
27*4882a593Smuzhiyun #define PDIV_MASK	GENMASK(9, 4)
28*4882a593Smuzhiyun #define SDIV_SHIFT	0
29*4882a593Smuzhiyun #define SDIV_MASK	GENMASK(2, 0)
30*4882a593Smuzhiyun #define KDIV_SHIFT	0
31*4882a593Smuzhiyun #define KDIV_MASK	GENMASK(15, 0)
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun #define LOCK_TIMEOUT_US		10000
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun struct clk_pll14xx {
36*4882a593Smuzhiyun 	struct clk_hw			hw;
37*4882a593Smuzhiyun 	void __iomem			*base;
38*4882a593Smuzhiyun 	enum imx_pll14xx_type		type;
39*4882a593Smuzhiyun 	const struct imx_pll14xx_rate_table *rate_table;
40*4882a593Smuzhiyun 	int rate_count;
41*4882a593Smuzhiyun };
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun #define to_clk_pll14xx(_hw) container_of(_hw, struct clk_pll14xx, hw)
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun static const struct imx_pll14xx_rate_table imx_pll1416x_tbl[] = {
46*4882a593Smuzhiyun 	PLL_1416X_RATE(1800000000U, 225, 3, 0),
47*4882a593Smuzhiyun 	PLL_1416X_RATE(1600000000U, 200, 3, 0),
48*4882a593Smuzhiyun 	PLL_1416X_RATE(1500000000U, 375, 3, 1),
49*4882a593Smuzhiyun 	PLL_1416X_RATE(1400000000U, 350, 3, 1),
50*4882a593Smuzhiyun 	PLL_1416X_RATE(1200000000U, 300, 3, 1),
51*4882a593Smuzhiyun 	PLL_1416X_RATE(1000000000U, 250, 3, 1),
52*4882a593Smuzhiyun 	PLL_1416X_RATE(800000000U,  200, 3, 1),
53*4882a593Smuzhiyun 	PLL_1416X_RATE(750000000U,  250, 2, 2),
54*4882a593Smuzhiyun 	PLL_1416X_RATE(700000000U,  350, 3, 2),
55*4882a593Smuzhiyun 	PLL_1416X_RATE(600000000U,  300, 3, 2),
56*4882a593Smuzhiyun };
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun static const struct imx_pll14xx_rate_table imx_pll1443x_tbl[] = {
59*4882a593Smuzhiyun 	PLL_1443X_RATE(1039500000U, 173, 2, 1, 16384),
60*4882a593Smuzhiyun 	PLL_1443X_RATE(650000000U, 325, 3, 2, 0),
61*4882a593Smuzhiyun 	PLL_1443X_RATE(594000000U, 198, 2, 2, 0),
62*4882a593Smuzhiyun 	PLL_1443X_RATE(519750000U, 173, 2, 2, 16384),
63*4882a593Smuzhiyun 	PLL_1443X_RATE(393216000U, 262, 2, 3, 9437),
64*4882a593Smuzhiyun 	PLL_1443X_RATE(361267200U, 361, 3, 3, 17511),
65*4882a593Smuzhiyun };
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun struct imx_pll14xx_clk imx_1443x_pll = {
68*4882a593Smuzhiyun 	.type = PLL_1443X,
69*4882a593Smuzhiyun 	.rate_table = imx_pll1443x_tbl,
70*4882a593Smuzhiyun 	.rate_count = ARRAY_SIZE(imx_pll1443x_tbl),
71*4882a593Smuzhiyun };
72*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(imx_1443x_pll);
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun struct imx_pll14xx_clk imx_1443x_dram_pll = {
75*4882a593Smuzhiyun 	.type = PLL_1443X,
76*4882a593Smuzhiyun 	.rate_table = imx_pll1443x_tbl,
77*4882a593Smuzhiyun 	.rate_count = ARRAY_SIZE(imx_pll1443x_tbl),
78*4882a593Smuzhiyun 	.flags = CLK_GET_RATE_NOCACHE,
79*4882a593Smuzhiyun };
80*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(imx_1443x_dram_pll);
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun struct imx_pll14xx_clk imx_1416x_pll = {
83*4882a593Smuzhiyun 	.type = PLL_1416X,
84*4882a593Smuzhiyun 	.rate_table = imx_pll1416x_tbl,
85*4882a593Smuzhiyun 	.rate_count = ARRAY_SIZE(imx_pll1416x_tbl),
86*4882a593Smuzhiyun };
87*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(imx_1416x_pll);
88*4882a593Smuzhiyun 
imx_get_pll_settings(struct clk_pll14xx * pll,unsigned long rate)89*4882a593Smuzhiyun static const struct imx_pll14xx_rate_table *imx_get_pll_settings(
90*4882a593Smuzhiyun 		struct clk_pll14xx *pll, unsigned long rate)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun 	const struct imx_pll14xx_rate_table *rate_table = pll->rate_table;
93*4882a593Smuzhiyun 	int i;
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun 	for (i = 0; i < pll->rate_count; i++)
96*4882a593Smuzhiyun 		if (rate == rate_table[i].rate)
97*4882a593Smuzhiyun 			return &rate_table[i];
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun 	return NULL;
100*4882a593Smuzhiyun }
101*4882a593Smuzhiyun 
clk_pll14xx_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * prate)102*4882a593Smuzhiyun static long clk_pll14xx_round_rate(struct clk_hw *hw, unsigned long rate,
103*4882a593Smuzhiyun 			unsigned long *prate)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
106*4882a593Smuzhiyun 	const struct imx_pll14xx_rate_table *rate_table = pll->rate_table;
107*4882a593Smuzhiyun 	int i;
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	/* Assumming rate_table is in descending order */
110*4882a593Smuzhiyun 	for (i = 0; i < pll->rate_count; i++)
111*4882a593Smuzhiyun 		if (rate >= rate_table[i].rate)
112*4882a593Smuzhiyun 			return rate_table[i].rate;
113*4882a593Smuzhiyun 
114*4882a593Smuzhiyun 	/* return minimum supported value */
115*4882a593Smuzhiyun 	return rate_table[i - 1].rate;
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun 
clk_pll1416x_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)118*4882a593Smuzhiyun static unsigned long clk_pll1416x_recalc_rate(struct clk_hw *hw,
119*4882a593Smuzhiyun 						  unsigned long parent_rate)
120*4882a593Smuzhiyun {
121*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
122*4882a593Smuzhiyun 	u32 mdiv, pdiv, sdiv, pll_div;
123*4882a593Smuzhiyun 	u64 fvco = parent_rate;
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun 	pll_div = readl_relaxed(pll->base + 4);
126*4882a593Smuzhiyun 	mdiv = (pll_div & MDIV_MASK) >> MDIV_SHIFT;
127*4882a593Smuzhiyun 	pdiv = (pll_div & PDIV_MASK) >> PDIV_SHIFT;
128*4882a593Smuzhiyun 	sdiv = (pll_div & SDIV_MASK) >> SDIV_SHIFT;
129*4882a593Smuzhiyun 
130*4882a593Smuzhiyun 	fvco *= mdiv;
131*4882a593Smuzhiyun 	do_div(fvco, pdiv << sdiv);
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun 	return fvco;
134*4882a593Smuzhiyun }
135*4882a593Smuzhiyun 
clk_pll1443x_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)136*4882a593Smuzhiyun static unsigned long clk_pll1443x_recalc_rate(struct clk_hw *hw,
137*4882a593Smuzhiyun 						  unsigned long parent_rate)
138*4882a593Smuzhiyun {
139*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
140*4882a593Smuzhiyun 	u32 mdiv, pdiv, sdiv, pll_div_ctl0, pll_div_ctl1;
141*4882a593Smuzhiyun 	short int kdiv;
142*4882a593Smuzhiyun 	u64 fvco = parent_rate;
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun 	pll_div_ctl0 = readl_relaxed(pll->base + 4);
145*4882a593Smuzhiyun 	pll_div_ctl1 = readl_relaxed(pll->base + 8);
146*4882a593Smuzhiyun 	mdiv = (pll_div_ctl0 & MDIV_MASK) >> MDIV_SHIFT;
147*4882a593Smuzhiyun 	pdiv = (pll_div_ctl0 & PDIV_MASK) >> PDIV_SHIFT;
148*4882a593Smuzhiyun 	sdiv = (pll_div_ctl0 & SDIV_MASK) >> SDIV_SHIFT;
149*4882a593Smuzhiyun 	kdiv = pll_div_ctl1 & KDIV_MASK;
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun 	/* fvco = (m * 65536 + k) * Fin / (p * 65536) */
152*4882a593Smuzhiyun 	fvco *= (mdiv * 65536 + kdiv);
153*4882a593Smuzhiyun 	pdiv *= 65536;
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 	do_div(fvco, pdiv << sdiv);
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun 	return fvco;
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun 
clk_pll14xx_mp_change(const struct imx_pll14xx_rate_table * rate,u32 pll_div)160*4882a593Smuzhiyun static inline bool clk_pll14xx_mp_change(const struct imx_pll14xx_rate_table *rate,
161*4882a593Smuzhiyun 					  u32 pll_div)
162*4882a593Smuzhiyun {
163*4882a593Smuzhiyun 	u32 old_mdiv, old_pdiv;
164*4882a593Smuzhiyun 
165*4882a593Smuzhiyun 	old_mdiv = (pll_div & MDIV_MASK) >> MDIV_SHIFT;
166*4882a593Smuzhiyun 	old_pdiv = (pll_div & PDIV_MASK) >> PDIV_SHIFT;
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	return rate->mdiv != old_mdiv || rate->pdiv != old_pdiv;
169*4882a593Smuzhiyun }
170*4882a593Smuzhiyun 
clk_pll14xx_wait_lock(struct clk_pll14xx * pll)171*4882a593Smuzhiyun static int clk_pll14xx_wait_lock(struct clk_pll14xx *pll)
172*4882a593Smuzhiyun {
173*4882a593Smuzhiyun 	u32 val;
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	return readl_poll_timeout(pll->base, val, val & LOCK_STATUS, 0,
176*4882a593Smuzhiyun 			LOCK_TIMEOUT_US);
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun 
clk_pll1416x_set_rate(struct clk_hw * hw,unsigned long drate,unsigned long prate)179*4882a593Smuzhiyun static int clk_pll1416x_set_rate(struct clk_hw *hw, unsigned long drate,
180*4882a593Smuzhiyun 				 unsigned long prate)
181*4882a593Smuzhiyun {
182*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
183*4882a593Smuzhiyun 	const struct imx_pll14xx_rate_table *rate;
184*4882a593Smuzhiyun 	u32 tmp, div_val;
185*4882a593Smuzhiyun 	int ret;
186*4882a593Smuzhiyun 
187*4882a593Smuzhiyun 	rate = imx_get_pll_settings(pll, drate);
188*4882a593Smuzhiyun 	if (!rate) {
189*4882a593Smuzhiyun 		pr_err("%s: Invalid rate : %lu for pll clk %s\n", __func__,
190*4882a593Smuzhiyun 		       drate, clk_hw_get_name(hw));
191*4882a593Smuzhiyun 		return -EINVAL;
192*4882a593Smuzhiyun 	}
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun 	tmp = readl_relaxed(pll->base + 4);
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 	if (!clk_pll14xx_mp_change(rate, tmp)) {
197*4882a593Smuzhiyun 		tmp &= ~(SDIV_MASK) << SDIV_SHIFT;
198*4882a593Smuzhiyun 		tmp |= rate->sdiv << SDIV_SHIFT;
199*4882a593Smuzhiyun 		writel_relaxed(tmp, pll->base + 4);
200*4882a593Smuzhiyun 
201*4882a593Smuzhiyun 		return 0;
202*4882a593Smuzhiyun 	}
203*4882a593Smuzhiyun 
204*4882a593Smuzhiyun 	/* Bypass clock and set lock to pll output lock */
205*4882a593Smuzhiyun 	tmp = readl_relaxed(pll->base);
206*4882a593Smuzhiyun 	tmp |= LOCK_SEL_MASK;
207*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 	/* Enable RST */
210*4882a593Smuzhiyun 	tmp &= ~RST_MASK;
211*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
212*4882a593Smuzhiyun 
213*4882a593Smuzhiyun 	/* Enable BYPASS */
214*4882a593Smuzhiyun 	tmp |= BYPASS_MASK;
215*4882a593Smuzhiyun 	writel(tmp, pll->base);
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun 	div_val = (rate->mdiv << MDIV_SHIFT) | (rate->pdiv << PDIV_SHIFT) |
218*4882a593Smuzhiyun 		(rate->sdiv << SDIV_SHIFT);
219*4882a593Smuzhiyun 	writel_relaxed(div_val, pll->base + 0x4);
220*4882a593Smuzhiyun 
221*4882a593Smuzhiyun 	/*
222*4882a593Smuzhiyun 	 * According to SPEC, t3 - t2 need to be greater than
223*4882a593Smuzhiyun 	 * 1us and 1/FREF, respectively.
224*4882a593Smuzhiyun 	 * FREF is FIN / Prediv, the prediv is [1, 63], so choose
225*4882a593Smuzhiyun 	 * 3us.
226*4882a593Smuzhiyun 	 */
227*4882a593Smuzhiyun 	udelay(3);
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun 	/* Disable RST */
230*4882a593Smuzhiyun 	tmp |= RST_MASK;
231*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
232*4882a593Smuzhiyun 
233*4882a593Smuzhiyun 	/* Wait Lock */
234*4882a593Smuzhiyun 	ret = clk_pll14xx_wait_lock(pll);
235*4882a593Smuzhiyun 	if (ret)
236*4882a593Smuzhiyun 		return ret;
237*4882a593Smuzhiyun 
238*4882a593Smuzhiyun 	/* Bypass */
239*4882a593Smuzhiyun 	tmp &= ~BYPASS_MASK;
240*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
241*4882a593Smuzhiyun 
242*4882a593Smuzhiyun 	return 0;
243*4882a593Smuzhiyun }
244*4882a593Smuzhiyun 
clk_pll1443x_set_rate(struct clk_hw * hw,unsigned long drate,unsigned long prate)245*4882a593Smuzhiyun static int clk_pll1443x_set_rate(struct clk_hw *hw, unsigned long drate,
246*4882a593Smuzhiyun 				 unsigned long prate)
247*4882a593Smuzhiyun {
248*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
249*4882a593Smuzhiyun 	const struct imx_pll14xx_rate_table *rate;
250*4882a593Smuzhiyun 	u32 tmp, div_val;
251*4882a593Smuzhiyun 	int ret;
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun 	rate = imx_get_pll_settings(pll, drate);
254*4882a593Smuzhiyun 	if (!rate) {
255*4882a593Smuzhiyun 		pr_err("%s: Invalid rate : %lu for pll clk %s\n", __func__,
256*4882a593Smuzhiyun 			drate, clk_hw_get_name(hw));
257*4882a593Smuzhiyun 		return -EINVAL;
258*4882a593Smuzhiyun 	}
259*4882a593Smuzhiyun 
260*4882a593Smuzhiyun 	tmp = readl_relaxed(pll->base + 4);
261*4882a593Smuzhiyun 
262*4882a593Smuzhiyun 	if (!clk_pll14xx_mp_change(rate, tmp)) {
263*4882a593Smuzhiyun 		tmp &= ~(SDIV_MASK) << SDIV_SHIFT;
264*4882a593Smuzhiyun 		tmp |= rate->sdiv << SDIV_SHIFT;
265*4882a593Smuzhiyun 		writel_relaxed(tmp, pll->base + 4);
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 		tmp = rate->kdiv << KDIV_SHIFT;
268*4882a593Smuzhiyun 		writel_relaxed(tmp, pll->base + 8);
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun 		return 0;
271*4882a593Smuzhiyun 	}
272*4882a593Smuzhiyun 
273*4882a593Smuzhiyun 	/* Enable RST */
274*4882a593Smuzhiyun 	tmp = readl_relaxed(pll->base);
275*4882a593Smuzhiyun 	tmp &= ~RST_MASK;
276*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun 	/* Enable BYPASS */
279*4882a593Smuzhiyun 	tmp |= BYPASS_MASK;
280*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
281*4882a593Smuzhiyun 
282*4882a593Smuzhiyun 	div_val = (rate->mdiv << MDIV_SHIFT) | (rate->pdiv << PDIV_SHIFT) |
283*4882a593Smuzhiyun 		(rate->sdiv << SDIV_SHIFT);
284*4882a593Smuzhiyun 	writel_relaxed(div_val, pll->base + 0x4);
285*4882a593Smuzhiyun 	writel_relaxed(rate->kdiv << KDIV_SHIFT, pll->base + 0x8);
286*4882a593Smuzhiyun 
287*4882a593Smuzhiyun 	/*
288*4882a593Smuzhiyun 	 * According to SPEC, t3 - t2 need to be greater than
289*4882a593Smuzhiyun 	 * 1us and 1/FREF, respectively.
290*4882a593Smuzhiyun 	 * FREF is FIN / Prediv, the prediv is [1, 63], so choose
291*4882a593Smuzhiyun 	 * 3us.
292*4882a593Smuzhiyun 	 */
293*4882a593Smuzhiyun 	udelay(3);
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	/* Disable RST */
296*4882a593Smuzhiyun 	tmp |= RST_MASK;
297*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
298*4882a593Smuzhiyun 
299*4882a593Smuzhiyun 	/* Wait Lock*/
300*4882a593Smuzhiyun 	ret = clk_pll14xx_wait_lock(pll);
301*4882a593Smuzhiyun 	if (ret)
302*4882a593Smuzhiyun 		return ret;
303*4882a593Smuzhiyun 
304*4882a593Smuzhiyun 	/* Bypass */
305*4882a593Smuzhiyun 	tmp &= ~BYPASS_MASK;
306*4882a593Smuzhiyun 	writel_relaxed(tmp, pll->base);
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 	return 0;
309*4882a593Smuzhiyun }
310*4882a593Smuzhiyun 
clk_pll14xx_prepare(struct clk_hw * hw)311*4882a593Smuzhiyun static int clk_pll14xx_prepare(struct clk_hw *hw)
312*4882a593Smuzhiyun {
313*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
314*4882a593Smuzhiyun 	u32 val;
315*4882a593Smuzhiyun 	int ret;
316*4882a593Smuzhiyun 
317*4882a593Smuzhiyun 	/*
318*4882a593Smuzhiyun 	 * RESETB = 1 from 0, PLL starts its normal
319*4882a593Smuzhiyun 	 * operation after lock time
320*4882a593Smuzhiyun 	 */
321*4882a593Smuzhiyun 	val = readl_relaxed(pll->base + GNRL_CTL);
322*4882a593Smuzhiyun 	if (val & RST_MASK)
323*4882a593Smuzhiyun 		return 0;
324*4882a593Smuzhiyun 	val |= BYPASS_MASK;
325*4882a593Smuzhiyun 	writel_relaxed(val, pll->base + GNRL_CTL);
326*4882a593Smuzhiyun 	val |= RST_MASK;
327*4882a593Smuzhiyun 	writel_relaxed(val, pll->base + GNRL_CTL);
328*4882a593Smuzhiyun 
329*4882a593Smuzhiyun 	ret = clk_pll14xx_wait_lock(pll);
330*4882a593Smuzhiyun 	if (ret)
331*4882a593Smuzhiyun 		return ret;
332*4882a593Smuzhiyun 
333*4882a593Smuzhiyun 	val &= ~BYPASS_MASK;
334*4882a593Smuzhiyun 	writel_relaxed(val, pll->base + GNRL_CTL);
335*4882a593Smuzhiyun 
336*4882a593Smuzhiyun 	return 0;
337*4882a593Smuzhiyun }
338*4882a593Smuzhiyun 
clk_pll14xx_is_prepared(struct clk_hw * hw)339*4882a593Smuzhiyun static int clk_pll14xx_is_prepared(struct clk_hw *hw)
340*4882a593Smuzhiyun {
341*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
342*4882a593Smuzhiyun 	u32 val;
343*4882a593Smuzhiyun 
344*4882a593Smuzhiyun 	val = readl_relaxed(pll->base + GNRL_CTL);
345*4882a593Smuzhiyun 
346*4882a593Smuzhiyun 	return (val & RST_MASK) ? 1 : 0;
347*4882a593Smuzhiyun }
348*4882a593Smuzhiyun 
clk_pll14xx_unprepare(struct clk_hw * hw)349*4882a593Smuzhiyun static void clk_pll14xx_unprepare(struct clk_hw *hw)
350*4882a593Smuzhiyun {
351*4882a593Smuzhiyun 	struct clk_pll14xx *pll = to_clk_pll14xx(hw);
352*4882a593Smuzhiyun 	u32 val;
353*4882a593Smuzhiyun 
354*4882a593Smuzhiyun 	/*
355*4882a593Smuzhiyun 	 * Set RST to 0, power down mode is enabled and
356*4882a593Smuzhiyun 	 * every digital block is reset
357*4882a593Smuzhiyun 	 */
358*4882a593Smuzhiyun 	val = readl_relaxed(pll->base + GNRL_CTL);
359*4882a593Smuzhiyun 	val &= ~RST_MASK;
360*4882a593Smuzhiyun 	writel_relaxed(val, pll->base + GNRL_CTL);
361*4882a593Smuzhiyun }
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun static const struct clk_ops clk_pll1416x_ops = {
364*4882a593Smuzhiyun 	.prepare	= clk_pll14xx_prepare,
365*4882a593Smuzhiyun 	.unprepare	= clk_pll14xx_unprepare,
366*4882a593Smuzhiyun 	.is_prepared	= clk_pll14xx_is_prepared,
367*4882a593Smuzhiyun 	.recalc_rate	= clk_pll1416x_recalc_rate,
368*4882a593Smuzhiyun 	.round_rate	= clk_pll14xx_round_rate,
369*4882a593Smuzhiyun 	.set_rate	= clk_pll1416x_set_rate,
370*4882a593Smuzhiyun };
371*4882a593Smuzhiyun 
372*4882a593Smuzhiyun static const struct clk_ops clk_pll1416x_min_ops = {
373*4882a593Smuzhiyun 	.recalc_rate	= clk_pll1416x_recalc_rate,
374*4882a593Smuzhiyun };
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun static const struct clk_ops clk_pll1443x_ops = {
377*4882a593Smuzhiyun 	.prepare	= clk_pll14xx_prepare,
378*4882a593Smuzhiyun 	.unprepare	= clk_pll14xx_unprepare,
379*4882a593Smuzhiyun 	.is_prepared	= clk_pll14xx_is_prepared,
380*4882a593Smuzhiyun 	.recalc_rate	= clk_pll1443x_recalc_rate,
381*4882a593Smuzhiyun 	.round_rate	= clk_pll14xx_round_rate,
382*4882a593Smuzhiyun 	.set_rate	= clk_pll1443x_set_rate,
383*4882a593Smuzhiyun };
384*4882a593Smuzhiyun 
imx_dev_clk_hw_pll14xx(struct device * dev,const char * name,const char * parent_name,void __iomem * base,const struct imx_pll14xx_clk * pll_clk)385*4882a593Smuzhiyun struct clk_hw *imx_dev_clk_hw_pll14xx(struct device *dev, const char *name,
386*4882a593Smuzhiyun 				const char *parent_name, void __iomem *base,
387*4882a593Smuzhiyun 				const struct imx_pll14xx_clk *pll_clk)
388*4882a593Smuzhiyun {
389*4882a593Smuzhiyun 	struct clk_pll14xx *pll;
390*4882a593Smuzhiyun 	struct clk_hw *hw;
391*4882a593Smuzhiyun 	struct clk_init_data init;
392*4882a593Smuzhiyun 	int ret;
393*4882a593Smuzhiyun 	u32 val;
394*4882a593Smuzhiyun 
395*4882a593Smuzhiyun 	pll = kzalloc(sizeof(*pll), GFP_KERNEL);
396*4882a593Smuzhiyun 	if (!pll)
397*4882a593Smuzhiyun 		return ERR_PTR(-ENOMEM);
398*4882a593Smuzhiyun 
399*4882a593Smuzhiyun 	init.name = name;
400*4882a593Smuzhiyun 	init.flags = pll_clk->flags;
401*4882a593Smuzhiyun 	init.parent_names = &parent_name;
402*4882a593Smuzhiyun 	init.num_parents = 1;
403*4882a593Smuzhiyun 
404*4882a593Smuzhiyun 	switch (pll_clk->type) {
405*4882a593Smuzhiyun 	case PLL_1416X:
406*4882a593Smuzhiyun 		if (!pll_clk->rate_table)
407*4882a593Smuzhiyun 			init.ops = &clk_pll1416x_min_ops;
408*4882a593Smuzhiyun 		else
409*4882a593Smuzhiyun 			init.ops = &clk_pll1416x_ops;
410*4882a593Smuzhiyun 		break;
411*4882a593Smuzhiyun 	case PLL_1443X:
412*4882a593Smuzhiyun 		init.ops = &clk_pll1443x_ops;
413*4882a593Smuzhiyun 		break;
414*4882a593Smuzhiyun 	default:
415*4882a593Smuzhiyun 		pr_err("%s: Unknown pll type for pll clk %s\n",
416*4882a593Smuzhiyun 		       __func__, name);
417*4882a593Smuzhiyun 		kfree(pll);
418*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
419*4882a593Smuzhiyun 	};
420*4882a593Smuzhiyun 
421*4882a593Smuzhiyun 	pll->base = base;
422*4882a593Smuzhiyun 	pll->hw.init = &init;
423*4882a593Smuzhiyun 	pll->type = pll_clk->type;
424*4882a593Smuzhiyun 	pll->rate_table = pll_clk->rate_table;
425*4882a593Smuzhiyun 	pll->rate_count = pll_clk->rate_count;
426*4882a593Smuzhiyun 
427*4882a593Smuzhiyun 	val = readl_relaxed(pll->base + GNRL_CTL);
428*4882a593Smuzhiyun 	val &= ~BYPASS_MASK;
429*4882a593Smuzhiyun 	writel_relaxed(val, pll->base + GNRL_CTL);
430*4882a593Smuzhiyun 
431*4882a593Smuzhiyun 	hw = &pll->hw;
432*4882a593Smuzhiyun 
433*4882a593Smuzhiyun 	ret = clk_hw_register(dev, hw);
434*4882a593Smuzhiyun 	if (ret) {
435*4882a593Smuzhiyun 		pr_err("%s: failed to register pll %s %d\n",
436*4882a593Smuzhiyun 			__func__, name, ret);
437*4882a593Smuzhiyun 		kfree(pll);
438*4882a593Smuzhiyun 		return ERR_PTR(ret);
439*4882a593Smuzhiyun 	}
440*4882a593Smuzhiyun 
441*4882a593Smuzhiyun 	return hw;
442*4882a593Smuzhiyun }
443*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(imx_dev_clk_hw_pll14xx);
444