xref: /OK3568_Linux_fs/kernel/drivers/clk/rockchip/clk-half-divider.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (c) 2018 Fuzhou Rockchip Electronics Co., Ltd
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #include <linux/clk-provider.h>
7*4882a593Smuzhiyun #include <linux/io.h>
8*4882a593Smuzhiyun #include <linux/slab.h>
9*4882a593Smuzhiyun #include "clk.h"
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #define div_mask(width)	((1 << (width)) - 1)
12*4882a593Smuzhiyun 
_is_best_half_div(unsigned long rate,unsigned long now,unsigned long best,unsigned long flags)13*4882a593Smuzhiyun static bool _is_best_half_div(unsigned long rate, unsigned long now,
14*4882a593Smuzhiyun 			      unsigned long best, unsigned long flags)
15*4882a593Smuzhiyun {
16*4882a593Smuzhiyun 	if (flags & CLK_DIVIDER_ROUND_CLOSEST)
17*4882a593Smuzhiyun 		return abs(rate - now) <= abs(rate - best);
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun 	return now <= rate && now >= best;
20*4882a593Smuzhiyun }
21*4882a593Smuzhiyun 
clk_half_divider_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)22*4882a593Smuzhiyun static unsigned long clk_half_divider_recalc_rate(struct clk_hw *hw,
23*4882a593Smuzhiyun 						  unsigned long parent_rate)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun 	struct clk_divider *divider = to_clk_divider(hw);
26*4882a593Smuzhiyun 	unsigned int val;
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun 	val = readl(divider->reg) >> divider->shift;
29*4882a593Smuzhiyun 	val &= div_mask(divider->width);
30*4882a593Smuzhiyun 	val = val * 2 + 3;
31*4882a593Smuzhiyun 
32*4882a593Smuzhiyun 	return DIV_ROUND_UP_ULL(((u64)parent_rate * 2), val);
33*4882a593Smuzhiyun }
34*4882a593Smuzhiyun 
clk_half_divider_bestdiv(struct clk_hw * hw,unsigned long rate,unsigned long * best_parent_rate,u8 width,unsigned long flags)35*4882a593Smuzhiyun static int clk_half_divider_bestdiv(struct clk_hw *hw, unsigned long rate,
36*4882a593Smuzhiyun 				    unsigned long *best_parent_rate, u8 width,
37*4882a593Smuzhiyun 				    unsigned long flags)
38*4882a593Smuzhiyun {
39*4882a593Smuzhiyun 	unsigned int i, bestdiv = 0;
40*4882a593Smuzhiyun 	unsigned long parent_rate, best = 0, now, maxdiv;
41*4882a593Smuzhiyun 	bool is_bestdiv = false;
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun 	if (!rate)
44*4882a593Smuzhiyun 		rate = 1;
45*4882a593Smuzhiyun 
46*4882a593Smuzhiyun 	maxdiv = div_mask(width);
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	if (!(clk_hw_get_flags(hw) & CLK_SET_RATE_PARENT)) {
49*4882a593Smuzhiyun 		parent_rate = *best_parent_rate;
50*4882a593Smuzhiyun 		bestdiv = DIV_ROUND_UP_ULL(((u64)parent_rate * 2), rate);
51*4882a593Smuzhiyun 		if (bestdiv < 3)
52*4882a593Smuzhiyun 			bestdiv = 0;
53*4882a593Smuzhiyun 		else
54*4882a593Smuzhiyun 			bestdiv = DIV_ROUND_UP(bestdiv - 3, 2);
55*4882a593Smuzhiyun 		bestdiv = bestdiv > maxdiv ? maxdiv : bestdiv;
56*4882a593Smuzhiyun 		return bestdiv;
57*4882a593Smuzhiyun 	}
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun 	/*
60*4882a593Smuzhiyun 	 * The maximum divider we can use without overflowing
61*4882a593Smuzhiyun 	 * unsigned long in rate * i below
62*4882a593Smuzhiyun 	 */
63*4882a593Smuzhiyun 	maxdiv = min(ULONG_MAX / rate, maxdiv);
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun 	for (i = 0; i <= maxdiv; i++) {
66*4882a593Smuzhiyun 		parent_rate = clk_hw_round_rate(clk_hw_get_parent(hw),
67*4882a593Smuzhiyun 						((u64)rate * (i * 2 + 3)) / 2);
68*4882a593Smuzhiyun 		now = DIV_ROUND_UP_ULL(((u64)parent_rate * 2),
69*4882a593Smuzhiyun 				       (i * 2 + 3));
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun 		if (_is_best_half_div(rate, now, best, flags)) {
72*4882a593Smuzhiyun 			is_bestdiv = true;
73*4882a593Smuzhiyun 			bestdiv = i;
74*4882a593Smuzhiyun 			best = now;
75*4882a593Smuzhiyun 			*best_parent_rate = parent_rate;
76*4882a593Smuzhiyun 		}
77*4882a593Smuzhiyun 	}
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun 	if (!is_bestdiv) {
80*4882a593Smuzhiyun 		bestdiv = div_mask(width);
81*4882a593Smuzhiyun 		*best_parent_rate = clk_hw_round_rate(clk_hw_get_parent(hw), 1);
82*4882a593Smuzhiyun 	}
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun 	return bestdiv;
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun 
clk_half_divider_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * prate)87*4882a593Smuzhiyun static long clk_half_divider_round_rate(struct clk_hw *hw, unsigned long rate,
88*4882a593Smuzhiyun 					unsigned long *prate)
89*4882a593Smuzhiyun {
90*4882a593Smuzhiyun 	struct clk_divider *divider = to_clk_divider(hw);
91*4882a593Smuzhiyun 	int div;
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun 	div = clk_half_divider_bestdiv(hw, rate, prate,
94*4882a593Smuzhiyun 				       divider->width,
95*4882a593Smuzhiyun 				       divider->flags);
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun 	return DIV_ROUND_UP_ULL(((u64)*prate * 2), div * 2 + 3);
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun 
clk_half_divider_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)100*4882a593Smuzhiyun static int clk_half_divider_set_rate(struct clk_hw *hw, unsigned long rate,
101*4882a593Smuzhiyun 				     unsigned long parent_rate)
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun 	struct clk_divider *divider = to_clk_divider(hw);
104*4882a593Smuzhiyun 	unsigned int value;
105*4882a593Smuzhiyun 	unsigned long flags = 0;
106*4882a593Smuzhiyun 	u32 val;
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 	value = DIV_ROUND_UP_ULL(((u64)parent_rate * 2), rate);
109*4882a593Smuzhiyun 	value = DIV_ROUND_UP(value - 3, 2);
110*4882a593Smuzhiyun 	value =  min_t(unsigned int, value, div_mask(divider->width));
111*4882a593Smuzhiyun 
112*4882a593Smuzhiyun 	if (divider->lock)
113*4882a593Smuzhiyun 		spin_lock_irqsave(divider->lock, flags);
114*4882a593Smuzhiyun 	else
115*4882a593Smuzhiyun 		__acquire(divider->lock);
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun 	if (divider->flags & CLK_DIVIDER_HIWORD_MASK) {
118*4882a593Smuzhiyun 		val = div_mask(divider->width) << (divider->shift + 16);
119*4882a593Smuzhiyun 	} else {
120*4882a593Smuzhiyun 		val = readl(divider->reg);
121*4882a593Smuzhiyun 		val &= ~(div_mask(divider->width) << divider->shift);
122*4882a593Smuzhiyun 	}
123*4882a593Smuzhiyun 	val |= value << divider->shift;
124*4882a593Smuzhiyun 	writel(val, divider->reg);
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun 	if (divider->lock)
127*4882a593Smuzhiyun 		spin_unlock_irqrestore(divider->lock, flags);
128*4882a593Smuzhiyun 	else
129*4882a593Smuzhiyun 		__release(divider->lock);
130*4882a593Smuzhiyun 
131*4882a593Smuzhiyun 	return 0;
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun 
134*4882a593Smuzhiyun static const struct clk_ops clk_half_divider_ops = {
135*4882a593Smuzhiyun 	.recalc_rate = clk_half_divider_recalc_rate,
136*4882a593Smuzhiyun 	.round_rate = clk_half_divider_round_rate,
137*4882a593Smuzhiyun 	.set_rate = clk_half_divider_set_rate,
138*4882a593Smuzhiyun };
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun /**
141*4882a593Smuzhiyun  * Register a clock branch.
142*4882a593Smuzhiyun  * Most clock branches have a form like
143*4882a593Smuzhiyun  *
144*4882a593Smuzhiyun  * src1 --|--\
145*4882a593Smuzhiyun  *        |M |--[GATE]-[DIV]-
146*4882a593Smuzhiyun  * src2 --|--/
147*4882a593Smuzhiyun  *
148*4882a593Smuzhiyun  * sometimes without one of those components.
149*4882a593Smuzhiyun  */
rockchip_clk_register_halfdiv(const char * name,const char * const * parent_names,u8 num_parents,void __iomem * base,int muxdiv_offset,u8 mux_shift,u8 mux_width,u8 mux_flags,int div_offset,u8 div_shift,u8 div_width,u8 div_flags,int gate_offset,u8 gate_shift,u8 gate_flags,unsigned long flags,spinlock_t * lock)150*4882a593Smuzhiyun struct clk *rockchip_clk_register_halfdiv(const char *name,
151*4882a593Smuzhiyun 					  const char *const *parent_names,
152*4882a593Smuzhiyun 					  u8 num_parents, void __iomem *base,
153*4882a593Smuzhiyun 					  int muxdiv_offset, u8 mux_shift,
154*4882a593Smuzhiyun 					  u8 mux_width, u8 mux_flags,
155*4882a593Smuzhiyun 					  int div_offset, u8 div_shift,
156*4882a593Smuzhiyun 					  u8 div_width, u8 div_flags,
157*4882a593Smuzhiyun 					  int gate_offset, u8 gate_shift,
158*4882a593Smuzhiyun 					  u8 gate_flags, unsigned long flags,
159*4882a593Smuzhiyun 					  spinlock_t *lock)
160*4882a593Smuzhiyun {
161*4882a593Smuzhiyun 	struct clk_hw *hw = ERR_PTR(-ENOMEM);
162*4882a593Smuzhiyun 	struct clk_mux *mux = NULL;
163*4882a593Smuzhiyun 	struct clk_gate *gate = NULL;
164*4882a593Smuzhiyun 	struct clk_divider *div = NULL;
165*4882a593Smuzhiyun 	const struct clk_ops *mux_ops = NULL, *div_ops = NULL,
166*4882a593Smuzhiyun 			     *gate_ops = NULL;
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	if (num_parents > 1) {
169*4882a593Smuzhiyun 		mux = kzalloc(sizeof(*mux), GFP_KERNEL);
170*4882a593Smuzhiyun 		if (!mux)
171*4882a593Smuzhiyun 			return ERR_PTR(-ENOMEM);
172*4882a593Smuzhiyun 
173*4882a593Smuzhiyun 		mux->reg = base + muxdiv_offset;
174*4882a593Smuzhiyun 		mux->shift = mux_shift;
175*4882a593Smuzhiyun 		mux->mask = BIT(mux_width) - 1;
176*4882a593Smuzhiyun 		mux->flags = mux_flags;
177*4882a593Smuzhiyun 		mux->lock = lock;
178*4882a593Smuzhiyun 		mux_ops = (mux_flags & CLK_MUX_READ_ONLY) ? &clk_mux_ro_ops
179*4882a593Smuzhiyun 							: &clk_mux_ops;
180*4882a593Smuzhiyun 	}
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	if (gate_offset >= 0) {
183*4882a593Smuzhiyun 		gate = kzalloc(sizeof(*gate), GFP_KERNEL);
184*4882a593Smuzhiyun 		if (!gate)
185*4882a593Smuzhiyun 			goto err_gate;
186*4882a593Smuzhiyun 
187*4882a593Smuzhiyun 		gate->flags = gate_flags;
188*4882a593Smuzhiyun 		gate->reg = base + gate_offset;
189*4882a593Smuzhiyun 		gate->bit_idx = gate_shift;
190*4882a593Smuzhiyun 		gate->lock = lock;
191*4882a593Smuzhiyun 		gate_ops = &clk_gate_ops;
192*4882a593Smuzhiyun 	}
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun 	if (div_width > 0) {
195*4882a593Smuzhiyun 		div = kzalloc(sizeof(*div), GFP_KERNEL);
196*4882a593Smuzhiyun 		if (!div)
197*4882a593Smuzhiyun 			goto err_div;
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun 		div->flags = div_flags;
200*4882a593Smuzhiyun 		if (div_offset)
201*4882a593Smuzhiyun 			div->reg = base + div_offset;
202*4882a593Smuzhiyun 		else
203*4882a593Smuzhiyun 			div->reg = base + muxdiv_offset;
204*4882a593Smuzhiyun 		div->shift = div_shift;
205*4882a593Smuzhiyun 		div->width = div_width;
206*4882a593Smuzhiyun 		div->lock = lock;
207*4882a593Smuzhiyun 		div_ops = &clk_half_divider_ops;
208*4882a593Smuzhiyun 	}
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun 	hw = clk_hw_register_composite(NULL, name, parent_names, num_parents,
211*4882a593Smuzhiyun 				       mux ? &mux->hw : NULL, mux_ops,
212*4882a593Smuzhiyun 				       div ? &div->hw : NULL, div_ops,
213*4882a593Smuzhiyun 				       gate ? &gate->hw : NULL, gate_ops,
214*4882a593Smuzhiyun 				       flags);
215*4882a593Smuzhiyun 	if (IS_ERR(hw))
216*4882a593Smuzhiyun 		goto err_div;
217*4882a593Smuzhiyun 
218*4882a593Smuzhiyun 	return hw->clk;
219*4882a593Smuzhiyun err_div:
220*4882a593Smuzhiyun 	kfree(gate);
221*4882a593Smuzhiyun err_gate:
222*4882a593Smuzhiyun 	kfree(mux);
223*4882a593Smuzhiyun 	return ERR_CAST(hw);
224*4882a593Smuzhiyun }
225