xref: /OK3568_Linux_fs/kernel/drivers/clk/rockchip/regmap/clk-regmap-composite.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /*
2  * Copyright (c) 2017 Rockchip Electronics Co. Ltd.
3  *
4  * Base on code in drivers/clk/clk-composite.c.
5  * See clk-composite.c for further copyright information.
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2 of the License, or
10  * (at your option) any later version.
11  *
12  * This program is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15  * GNU General Public License for more details.
16  */
17 
18 #include "clk-regmap.h"
19 
20 struct clk_regmap_composite {
21 	struct device *dev;
22 	struct clk_hw hw;
23 	struct clk_ops ops;
24 
25 	struct clk_hw *mux_hw;
26 	struct clk_hw *rate_hw;
27 	struct clk_hw *gate_hw;
28 
29 	const struct clk_ops *mux_ops;
30 	const struct clk_ops *rate_ops;
31 	const struct clk_ops *gate_ops;
32 };
33 
34 #define to_clk_regmap_composite(_hw)	\
35 		container_of(_hw, struct clk_regmap_composite, hw)
36 
clk_regmap_composite_get_parent(struct clk_hw * hw)37 static u8 clk_regmap_composite_get_parent(struct clk_hw *hw)
38 {
39 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
40 	const struct clk_ops *mux_ops = composite->mux_ops;
41 	struct clk_hw *mux_hw = composite->mux_hw;
42 
43 	__clk_hw_set_clk(mux_hw, hw);
44 
45 	return mux_ops->get_parent(mux_hw);
46 }
47 
clk_regmap_composite_set_parent(struct clk_hw * hw,u8 index)48 static int clk_regmap_composite_set_parent(struct clk_hw *hw, u8 index)
49 {
50 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
51 	const struct clk_ops *mux_ops = composite->mux_ops;
52 	struct clk_hw *mux_hw = composite->mux_hw;
53 
54 	__clk_hw_set_clk(mux_hw, hw);
55 
56 	return mux_ops->set_parent(mux_hw, index);
57 }
58 
clk_regmap_composite_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)59 static unsigned long clk_regmap_composite_recalc_rate(struct clk_hw *hw,
60 						      unsigned long parent_rate)
61 {
62 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
63 	const struct clk_ops *rate_ops = composite->rate_ops;
64 	struct clk_hw *rate_hw = composite->rate_hw;
65 
66 	__clk_hw_set_clk(rate_hw, hw);
67 
68 	return rate_ops->recalc_rate(rate_hw, parent_rate);
69 }
70 
clk_regmap_composite_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)71 static int clk_regmap_composite_determine_rate(struct clk_hw *hw,
72 					       struct clk_rate_request *req)
73 {
74 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
75 	const struct clk_ops *rate_ops = composite->rate_ops;
76 	const struct clk_ops *mux_ops = composite->mux_ops;
77 	struct clk_hw *rate_hw = composite->rate_hw;
78 	struct clk_hw *mux_hw = composite->mux_hw;
79 	struct clk_hw *parent;
80 	unsigned long parent_rate;
81 	long tmp_rate, best_rate = 0;
82 	unsigned long rate_diff;
83 	unsigned long best_rate_diff = ULONG_MAX;
84 	long rate;
85 	unsigned int i;
86 
87 	if (rate_hw && rate_ops && rate_ops->determine_rate) {
88 		__clk_hw_set_clk(rate_hw, hw);
89 		return rate_ops->determine_rate(rate_hw, req);
90 	} else if (rate_hw && rate_ops && rate_ops->round_rate &&
91 		   mux_hw && mux_ops && mux_ops->set_parent) {
92 		req->best_parent_hw = NULL;
93 
94 		if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
95 			parent = clk_hw_get_parent(mux_hw);
96 			req->best_parent_hw = parent;
97 			req->best_parent_rate = clk_hw_get_rate(parent);
98 
99 			rate = rate_ops->round_rate(rate_hw, req->rate,
100 						    &req->best_parent_rate);
101 			if (rate < 0)
102 				return rate;
103 
104 			req->rate = rate;
105 			return 0;
106 		}
107 
108 		for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
109 			parent = clk_hw_get_parent_by_index(mux_hw, i);
110 			if (!parent)
111 				continue;
112 
113 			parent_rate = clk_hw_get_rate(parent);
114 
115 			tmp_rate = rate_ops->round_rate(rate_hw, req->rate,
116 							&parent_rate);
117 			if (tmp_rate < 0)
118 				continue;
119 
120 			rate_diff = abs(req->rate - tmp_rate);
121 
122 			if (!rate_diff || !req->best_parent_hw ||
123 			    best_rate_diff > rate_diff) {
124 				req->best_parent_hw = parent;
125 				req->best_parent_rate = parent_rate;
126 				best_rate_diff = rate_diff;
127 				best_rate = tmp_rate;
128 			}
129 
130 			if (!rate_diff)
131 				return 0;
132 		}
133 
134 		req->rate = best_rate;
135 		return 0;
136 	} else if (mux_hw && mux_ops && mux_ops->determine_rate) {
137 		__clk_hw_set_clk(mux_hw, hw);
138 		return mux_ops->determine_rate(mux_hw, req);
139 	} else {
140 		return -EINVAL;
141 	}
142 
143 	return 0;
144 }
145 
clk_regmap_composite_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * prate)146 static long clk_regmap_composite_round_rate(struct clk_hw *hw,
147 					    unsigned long rate,
148 					    unsigned long *prate)
149 {
150 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
151 	const struct clk_ops *rate_ops = composite->rate_ops;
152 	struct clk_hw *rate_hw = composite->rate_hw;
153 
154 	__clk_hw_set_clk(rate_hw, hw);
155 
156 	return rate_ops->round_rate(rate_hw, rate, prate);
157 }
158 
clk_regmap_composite_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)159 static int clk_regmap_composite_set_rate(struct clk_hw *hw,
160 					 unsigned long rate,
161 					 unsigned long parent_rate)
162 {
163 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
164 	const struct clk_ops *rate_ops = composite->rate_ops;
165 	struct clk_hw *rate_hw = composite->rate_hw;
166 
167 	__clk_hw_set_clk(rate_hw, hw);
168 
169 	return rate_ops->set_rate(rate_hw, rate, parent_rate);
170 }
171 
clk_regmap_composite_is_prepared(struct clk_hw * hw)172 static int clk_regmap_composite_is_prepared(struct clk_hw *hw)
173 {
174 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
175 	const struct clk_ops *gate_ops = composite->gate_ops;
176 	struct clk_hw *gate_hw = composite->gate_hw;
177 
178 	__clk_hw_set_clk(gate_hw, hw);
179 
180 	return gate_ops->is_prepared(gate_hw);
181 }
182 
clk_regmap_composite_prepare(struct clk_hw * hw)183 static int clk_regmap_composite_prepare(struct clk_hw *hw)
184 {
185 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
186 	const struct clk_ops *gate_ops = composite->gate_ops;
187 	struct clk_hw *gate_hw = composite->gate_hw;
188 
189 	__clk_hw_set_clk(gate_hw, hw);
190 
191 	return gate_ops->prepare(gate_hw);
192 }
193 
clk_regmap_composite_unprepare(struct clk_hw * hw)194 static void clk_regmap_composite_unprepare(struct clk_hw *hw)
195 {
196 	struct clk_regmap_composite *composite = to_clk_regmap_composite(hw);
197 	const struct clk_ops *gate_ops = composite->gate_ops;
198 	struct clk_hw *gate_hw = composite->gate_hw;
199 
200 	__clk_hw_set_clk(gate_hw, hw);
201 
202 	gate_ops->unprepare(gate_hw);
203 }
204 
205 struct clk *
devm_clk_regmap_register_composite(struct device * dev,const char * name,const char * const * parent_names,u8 num_parents,struct regmap * regmap,u32 mux_reg,u8 mux_shift,u8 mux_width,u32 div_reg,u8 div_shift,u8 div_width,u8 div_flags,u32 gate_reg,u8 gate_shift,unsigned long flags)206 devm_clk_regmap_register_composite(struct device *dev, const char *name,
207 				   const char *const *parent_names,
208 				   u8 num_parents, struct regmap *regmap,
209 				   u32 mux_reg, u8 mux_shift, u8 mux_width,
210 				   u32 div_reg, u8 div_shift, u8 div_width,
211 				   u8 div_flags,
212 				   u32 gate_reg, u8 gate_shift,
213 				   unsigned long flags)
214 {
215 	struct clk_regmap_gate *gate = NULL;
216 	struct clk_regmap_mux *mux = NULL;
217 	struct clk_regmap_divider *div = NULL;
218 	struct clk_regmap_fractional_divider *fd = NULL;
219 	const struct clk_ops *mux_ops = NULL, *div_ops = NULL, *gate_ops = NULL;
220 	const struct clk_ops *fd_ops = NULL;
221 	struct clk_hw *mux_hw = NULL, *div_hw = NULL, *gate_hw = NULL;
222 	struct clk_hw *fd_hw = NULL;
223 	struct clk *clk;
224 	struct clk_init_data init = {};
225 	struct clk_regmap_composite *composite;
226 	struct clk_ops *clk_composite_ops;
227 
228 	if (num_parents > 1) {
229 		mux = devm_kzalloc(dev, sizeof(*mux), GFP_KERNEL);
230 		if (!mux)
231 			return ERR_PTR(-ENOMEM);
232 
233 		mux->dev = dev;
234 		mux->regmap = regmap;
235 		mux->reg = mux_reg;
236 		mux->shift = mux_shift;
237 		mux->mask = BIT(mux_width) - 1;
238 		mux_ops = &clk_regmap_mux_ops;
239 		mux_hw = &mux->hw;
240 	}
241 
242 	if (gate_reg > 0) {
243 		gate = devm_kzalloc(dev, sizeof(*gate), GFP_KERNEL);
244 		if (!gate)
245 			return ERR_PTR(-ENOMEM);
246 
247 		gate->dev = dev;
248 		gate->regmap = regmap;
249 		gate->reg = gate_reg;
250 		gate->shift = gate_shift;
251 		gate_ops = &clk_regmap_gate_ops;
252 		gate_hw = &gate->hw;
253 	}
254 
255 	if (div_reg > 0) {
256 		if (div_flags & CLK_DIVIDER_HIWORD_MASK) {
257 			div = devm_kzalloc(dev, sizeof(*div), GFP_KERNEL);
258 			if (!div)
259 				return ERR_PTR(-ENOMEM);
260 
261 			div->dev = dev;
262 			div->regmap = regmap;
263 			div->reg = div_reg;
264 			div->shift = div_shift;
265 			div->width = div_width;
266 			div_ops = &clk_regmap_divider_ops;
267 			div_hw = &div->hw;
268 		} else {
269 			fd = devm_kzalloc(dev, sizeof(*fd), GFP_KERNEL);
270 			if (!fd)
271 				return ERR_PTR(-ENOMEM);
272 
273 			fd->dev = dev;
274 			fd->regmap = regmap;
275 			fd->reg = div_reg;
276 			fd->mshift = 16;
277 			fd->mwidth = 16;
278 			fd->mmask = GENMASK(fd->mwidth - 1, 0) << fd->mshift;
279 			fd->nshift = 0;
280 			fd->nwidth = 16;
281 			fd->nmask = GENMASK(fd->nwidth - 1, 0) << fd->nshift;
282 			fd_ops = &clk_regmap_fractional_divider_ops;
283 			fd_hw = &fd->hw;
284 		}
285 	}
286 
287 	composite = devm_kzalloc(dev, sizeof(*composite), GFP_KERNEL);
288 	if (!composite)
289 		return ERR_PTR(-ENOMEM);
290 
291 	init.name = name;
292 	init.flags = flags;
293 	init.parent_names = parent_names;
294 	init.num_parents = num_parents;
295 
296 	clk_composite_ops = &composite->ops;
297 
298 	if (mux_hw && mux_ops) {
299 		if (!mux_ops->get_parent)
300 			return ERR_PTR(-EINVAL);
301 
302 		composite->mux_hw = mux_hw;
303 		composite->mux_ops = mux_ops;
304 		clk_composite_ops->get_parent =
305 			clk_regmap_composite_get_parent;
306 		if (mux_ops->set_parent)
307 			clk_composite_ops->set_parent =
308 				clk_regmap_composite_set_parent;
309 		if (mux_ops->determine_rate)
310 			clk_composite_ops->determine_rate =
311 				clk_regmap_composite_determine_rate;
312 	}
313 
314 	if (div_hw && div_ops) {
315 		if (!div_ops->recalc_rate)
316 			return ERR_PTR(-EINVAL);
317 
318 		clk_composite_ops->recalc_rate =
319 			clk_regmap_composite_recalc_rate;
320 
321 		if (div_ops->determine_rate)
322 			clk_composite_ops->determine_rate =
323 				clk_regmap_composite_determine_rate;
324 		else if (div_ops->round_rate)
325 			clk_composite_ops->round_rate =
326 				clk_regmap_composite_round_rate;
327 
328 		/* .set_rate requires either .round_rate or .determine_rate */
329 		if (div_ops->set_rate) {
330 			if (div_ops->determine_rate || div_ops->round_rate)
331 				clk_composite_ops->set_rate =
332 					clk_regmap_composite_set_rate;
333 			else
334 				WARN(1, "missing round_rate op\n");
335 		}
336 
337 		composite->rate_hw = div_hw;
338 		composite->rate_ops = div_ops;
339 	}
340 
341 	if (fd_hw && fd_ops) {
342 		if (!fd_ops->recalc_rate)
343 			return ERR_PTR(-EINVAL);
344 
345 		clk_composite_ops->recalc_rate =
346 			clk_regmap_composite_recalc_rate;
347 
348 		if (fd_ops->determine_rate)
349 			clk_composite_ops->determine_rate =
350 				clk_regmap_composite_determine_rate;
351 		else if (fd_ops->round_rate)
352 			clk_composite_ops->round_rate =
353 				clk_regmap_composite_round_rate;
354 
355 		/* .set_rate requires either .round_rate or .determine_rate */
356 		if (fd_ops->set_rate) {
357 			if (fd_ops->determine_rate || fd_ops->round_rate)
358 				clk_composite_ops->set_rate =
359 					clk_regmap_composite_set_rate;
360 			else
361 				WARN(1, "missing round_rate op\n");
362 		}
363 
364 		composite->rate_hw = fd_hw;
365 		composite->rate_ops = fd_ops;
366 	}
367 
368 	if (gate_hw && gate_ops) {
369 		if (!gate_ops->is_prepared || !gate_ops->prepare ||
370 		    !gate_ops->unprepare)
371 			return ERR_PTR(-EINVAL);
372 
373 		composite->gate_hw = gate_hw;
374 		composite->gate_ops = gate_ops;
375 		clk_composite_ops->is_prepared =
376 			clk_regmap_composite_is_prepared;
377 		clk_composite_ops->prepare = clk_regmap_composite_prepare;
378 		clk_composite_ops->unprepare = clk_regmap_composite_unprepare;
379 	}
380 
381 	init.ops = clk_composite_ops;
382 	composite->dev = dev;
383 	composite->hw.init = &init;
384 
385 	clk = devm_clk_register(dev, &composite->hw);
386 	if (IS_ERR(clk))
387 		return clk;
388 
389 	if (composite->mux_hw)
390 		composite->mux_hw->clk = clk;
391 
392 	if (composite->rate_hw)
393 		composite->rate_hw->clk = clk;
394 
395 	if (composite->gate_hw)
396 		composite->gate_hw->clk = clk;
397 
398 	return clk;
399 }
400 EXPORT_SYMBOL_GPL(devm_clk_regmap_register_composite);
401