xref: /OK3568_Linux_fs/kernel/drivers/clk/tegra/clk-tegra20-emc.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0+
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Based on drivers/clk/tegra/clk-emc.c
4*4882a593Smuzhiyun  * Copyright (c) 2014, NVIDIA CORPORATION.  All rights reserved.
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * Author: Dmitry Osipenko <digetx@gmail.com>
7*4882a593Smuzhiyun  * Copyright (C) 2019 GRATE-DRIVER project
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #define pr_fmt(fmt)	"tegra-emc-clk: " fmt
11*4882a593Smuzhiyun 
12*4882a593Smuzhiyun #include <linux/bits.h>
13*4882a593Smuzhiyun #include <linux/clk-provider.h>
14*4882a593Smuzhiyun #include <linux/clk/tegra.h>
15*4882a593Smuzhiyun #include <linux/err.h>
16*4882a593Smuzhiyun #include <linux/io.h>
17*4882a593Smuzhiyun #include <linux/kernel.h>
18*4882a593Smuzhiyun #include <linux/slab.h>
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun #include "clk.h"
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun #define CLK_SOURCE_EMC_2X_CLK_DIVISOR_MASK	GENMASK(7, 0)
23*4882a593Smuzhiyun #define CLK_SOURCE_EMC_2X_CLK_SRC_MASK		GENMASK(31, 30)
24*4882a593Smuzhiyun #define CLK_SOURCE_EMC_2X_CLK_SRC_SHIFT		30
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun #define MC_EMC_SAME_FREQ	BIT(16)
27*4882a593Smuzhiyun #define USE_PLLM_UD		BIT(29)
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun #define EMC_SRC_PLL_M		0
30*4882a593Smuzhiyun #define EMC_SRC_PLL_C		1
31*4882a593Smuzhiyun #define EMC_SRC_PLL_P		2
32*4882a593Smuzhiyun #define EMC_SRC_CLK_M		3
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun static const char * const emc_parent_clk_names[] = {
35*4882a593Smuzhiyun 	"pll_m", "pll_c", "pll_p", "clk_m",
36*4882a593Smuzhiyun };
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun struct tegra_clk_emc {
39*4882a593Smuzhiyun 	struct clk_hw hw;
40*4882a593Smuzhiyun 	void __iomem *reg;
41*4882a593Smuzhiyun 	bool mc_same_freq;
42*4882a593Smuzhiyun 	bool want_low_jitter;
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun 	tegra20_clk_emc_round_cb *round_cb;
45*4882a593Smuzhiyun 	void *cb_arg;
46*4882a593Smuzhiyun };
47*4882a593Smuzhiyun 
to_tegra_clk_emc(struct clk_hw * hw)48*4882a593Smuzhiyun static inline struct tegra_clk_emc *to_tegra_clk_emc(struct clk_hw *hw)
49*4882a593Smuzhiyun {
50*4882a593Smuzhiyun 	return container_of(hw, struct tegra_clk_emc, hw);
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun 
emc_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)53*4882a593Smuzhiyun static unsigned long emc_recalc_rate(struct clk_hw *hw,
54*4882a593Smuzhiyun 				     unsigned long parent_rate)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun 	struct tegra_clk_emc *emc = to_tegra_clk_emc(hw);
57*4882a593Smuzhiyun 	u32 val, div;
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun 	val = readl_relaxed(emc->reg);
60*4882a593Smuzhiyun 	div = val & CLK_SOURCE_EMC_2X_CLK_DIVISOR_MASK;
61*4882a593Smuzhiyun 
62*4882a593Smuzhiyun 	return DIV_ROUND_UP(parent_rate * 2, div + 2);
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun 
emc_get_parent(struct clk_hw * hw)65*4882a593Smuzhiyun static u8 emc_get_parent(struct clk_hw *hw)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun 	struct tegra_clk_emc *emc = to_tegra_clk_emc(hw);
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun 	return readl_relaxed(emc->reg) >> CLK_SOURCE_EMC_2X_CLK_SRC_SHIFT;
70*4882a593Smuzhiyun }
71*4882a593Smuzhiyun 
emc_set_parent(struct clk_hw * hw,u8 index)72*4882a593Smuzhiyun static int emc_set_parent(struct clk_hw *hw, u8 index)
73*4882a593Smuzhiyun {
74*4882a593Smuzhiyun 	struct tegra_clk_emc *emc = to_tegra_clk_emc(hw);
75*4882a593Smuzhiyun 	u32 val, div;
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	val = readl_relaxed(emc->reg);
78*4882a593Smuzhiyun 	val &= ~CLK_SOURCE_EMC_2X_CLK_SRC_MASK;
79*4882a593Smuzhiyun 	val |= index << CLK_SOURCE_EMC_2X_CLK_SRC_SHIFT;
80*4882a593Smuzhiyun 
81*4882a593Smuzhiyun 	div = val & CLK_SOURCE_EMC_2X_CLK_DIVISOR_MASK;
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun 	if (index == EMC_SRC_PLL_M && div == 0 && emc->want_low_jitter)
84*4882a593Smuzhiyun 		val |= USE_PLLM_UD;
85*4882a593Smuzhiyun 	else
86*4882a593Smuzhiyun 		val &= ~USE_PLLM_UD;
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun 	if (emc->mc_same_freq)
89*4882a593Smuzhiyun 		val |= MC_EMC_SAME_FREQ;
90*4882a593Smuzhiyun 	else
91*4882a593Smuzhiyun 		val &= ~MC_EMC_SAME_FREQ;
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun 	writel_relaxed(val, emc->reg);
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun 	fence_udelay(1, emc->reg);
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun 	return 0;
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun 
emc_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)100*4882a593Smuzhiyun static int emc_set_rate(struct clk_hw *hw, unsigned long rate,
101*4882a593Smuzhiyun 			unsigned long parent_rate)
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun 	struct tegra_clk_emc *emc = to_tegra_clk_emc(hw);
104*4882a593Smuzhiyun 	unsigned int index;
105*4882a593Smuzhiyun 	u32 val, div;
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun 	div = div_frac_get(rate, parent_rate, 8, 1, 0);
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	val = readl_relaxed(emc->reg);
110*4882a593Smuzhiyun 	val &= ~CLK_SOURCE_EMC_2X_CLK_DIVISOR_MASK;
111*4882a593Smuzhiyun 	val |= div;
112*4882a593Smuzhiyun 
113*4882a593Smuzhiyun 	index = val >> CLK_SOURCE_EMC_2X_CLK_SRC_SHIFT;
114*4882a593Smuzhiyun 
115*4882a593Smuzhiyun 	if (index == EMC_SRC_PLL_M && div == 0 && emc->want_low_jitter)
116*4882a593Smuzhiyun 		val |= USE_PLLM_UD;
117*4882a593Smuzhiyun 	else
118*4882a593Smuzhiyun 		val &= ~USE_PLLM_UD;
119*4882a593Smuzhiyun 
120*4882a593Smuzhiyun 	if (emc->mc_same_freq)
121*4882a593Smuzhiyun 		val |= MC_EMC_SAME_FREQ;
122*4882a593Smuzhiyun 	else
123*4882a593Smuzhiyun 		val &= ~MC_EMC_SAME_FREQ;
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun 	writel_relaxed(val, emc->reg);
126*4882a593Smuzhiyun 
127*4882a593Smuzhiyun 	fence_udelay(1, emc->reg);
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun 	return 0;
130*4882a593Smuzhiyun }
131*4882a593Smuzhiyun 
emc_set_rate_and_parent(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate,u8 index)132*4882a593Smuzhiyun static int emc_set_rate_and_parent(struct clk_hw *hw,
133*4882a593Smuzhiyun 				   unsigned long rate,
134*4882a593Smuzhiyun 				   unsigned long parent_rate,
135*4882a593Smuzhiyun 				   u8 index)
136*4882a593Smuzhiyun {
137*4882a593Smuzhiyun 	struct tegra_clk_emc *emc = to_tegra_clk_emc(hw);
138*4882a593Smuzhiyun 	u32 val, div;
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	div = div_frac_get(rate, parent_rate, 8, 1, 0);
141*4882a593Smuzhiyun 
142*4882a593Smuzhiyun 	val = readl_relaxed(emc->reg);
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun 	val &= ~CLK_SOURCE_EMC_2X_CLK_SRC_MASK;
145*4882a593Smuzhiyun 	val |= index << CLK_SOURCE_EMC_2X_CLK_SRC_SHIFT;
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	val &= ~CLK_SOURCE_EMC_2X_CLK_DIVISOR_MASK;
148*4882a593Smuzhiyun 	val |= div;
149*4882a593Smuzhiyun 
150*4882a593Smuzhiyun 	if (index == EMC_SRC_PLL_M && div == 0 && emc->want_low_jitter)
151*4882a593Smuzhiyun 		val |= USE_PLLM_UD;
152*4882a593Smuzhiyun 	else
153*4882a593Smuzhiyun 		val &= ~USE_PLLM_UD;
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 	if (emc->mc_same_freq)
156*4882a593Smuzhiyun 		val |= MC_EMC_SAME_FREQ;
157*4882a593Smuzhiyun 	else
158*4882a593Smuzhiyun 		val &= ~MC_EMC_SAME_FREQ;
159*4882a593Smuzhiyun 
160*4882a593Smuzhiyun 	writel_relaxed(val, emc->reg);
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun 	fence_udelay(1, emc->reg);
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun 	return 0;
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun 
emc_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)167*4882a593Smuzhiyun static int emc_determine_rate(struct clk_hw *hw, struct clk_rate_request *req)
168*4882a593Smuzhiyun {
169*4882a593Smuzhiyun 	struct tegra_clk_emc *emc = to_tegra_clk_emc(hw);
170*4882a593Smuzhiyun 	struct clk_hw *parent_hw;
171*4882a593Smuzhiyun 	unsigned long divided_rate;
172*4882a593Smuzhiyun 	unsigned long parent_rate;
173*4882a593Smuzhiyun 	unsigned int i;
174*4882a593Smuzhiyun 	long emc_rate;
175*4882a593Smuzhiyun 	int div;
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun 	emc_rate = emc->round_cb(req->rate, req->min_rate, req->max_rate,
178*4882a593Smuzhiyun 				 emc->cb_arg);
179*4882a593Smuzhiyun 	if (emc_rate < 0)
180*4882a593Smuzhiyun 		return emc_rate;
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(emc_parent_clk_names); i++) {
183*4882a593Smuzhiyun 		parent_hw = clk_hw_get_parent_by_index(hw, i);
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun 		if (req->best_parent_hw == parent_hw)
186*4882a593Smuzhiyun 			parent_rate = req->best_parent_rate;
187*4882a593Smuzhiyun 		else
188*4882a593Smuzhiyun 			parent_rate = clk_hw_get_rate(parent_hw);
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun 		if (emc_rate > parent_rate)
191*4882a593Smuzhiyun 			continue;
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun 		div = div_frac_get(emc_rate, parent_rate, 8, 1, 0);
194*4882a593Smuzhiyun 		divided_rate = DIV_ROUND_UP(parent_rate * 2, div + 2);
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 		if (divided_rate != emc_rate)
197*4882a593Smuzhiyun 			continue;
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun 		req->best_parent_rate = parent_rate;
200*4882a593Smuzhiyun 		req->best_parent_hw = parent_hw;
201*4882a593Smuzhiyun 		req->rate = emc_rate;
202*4882a593Smuzhiyun 		break;
203*4882a593Smuzhiyun 	}
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun 	if (i == ARRAY_SIZE(emc_parent_clk_names)) {
206*4882a593Smuzhiyun 		pr_err_once("can't find parent for rate %lu emc_rate %lu\n",
207*4882a593Smuzhiyun 			    req->rate, emc_rate);
208*4882a593Smuzhiyun 		return -EINVAL;
209*4882a593Smuzhiyun 	}
210*4882a593Smuzhiyun 
211*4882a593Smuzhiyun 	return 0;
212*4882a593Smuzhiyun }
213*4882a593Smuzhiyun 
214*4882a593Smuzhiyun static const struct clk_ops tegra_clk_emc_ops = {
215*4882a593Smuzhiyun 	.recalc_rate = emc_recalc_rate,
216*4882a593Smuzhiyun 	.get_parent = emc_get_parent,
217*4882a593Smuzhiyun 	.set_parent = emc_set_parent,
218*4882a593Smuzhiyun 	.set_rate = emc_set_rate,
219*4882a593Smuzhiyun 	.set_rate_and_parent = emc_set_rate_and_parent,
220*4882a593Smuzhiyun 	.determine_rate = emc_determine_rate,
221*4882a593Smuzhiyun };
222*4882a593Smuzhiyun 
tegra20_clk_set_emc_round_callback(tegra20_clk_emc_round_cb * round_cb,void * cb_arg)223*4882a593Smuzhiyun void tegra20_clk_set_emc_round_callback(tegra20_clk_emc_round_cb *round_cb,
224*4882a593Smuzhiyun 					void *cb_arg)
225*4882a593Smuzhiyun {
226*4882a593Smuzhiyun 	struct clk *clk = __clk_lookup("emc");
227*4882a593Smuzhiyun 	struct tegra_clk_emc *emc;
228*4882a593Smuzhiyun 	struct clk_hw *hw;
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	if (clk) {
231*4882a593Smuzhiyun 		hw = __clk_get_hw(clk);
232*4882a593Smuzhiyun 		emc = to_tegra_clk_emc(hw);
233*4882a593Smuzhiyun 
234*4882a593Smuzhiyun 		emc->round_cb = round_cb;
235*4882a593Smuzhiyun 		emc->cb_arg = cb_arg;
236*4882a593Smuzhiyun 	}
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun 
tegra20_clk_emc_driver_available(struct clk_hw * emc_hw)239*4882a593Smuzhiyun bool tegra20_clk_emc_driver_available(struct clk_hw *emc_hw)
240*4882a593Smuzhiyun {
241*4882a593Smuzhiyun 	return to_tegra_clk_emc(emc_hw)->round_cb != NULL;
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun 
tegra20_clk_register_emc(void __iomem * ioaddr,bool low_jitter)244*4882a593Smuzhiyun struct clk *tegra20_clk_register_emc(void __iomem *ioaddr, bool low_jitter)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun 	struct tegra_clk_emc *emc;
247*4882a593Smuzhiyun 	struct clk_init_data init;
248*4882a593Smuzhiyun 	struct clk *clk;
249*4882a593Smuzhiyun 
250*4882a593Smuzhiyun 	emc = kzalloc(sizeof(*emc), GFP_KERNEL);
251*4882a593Smuzhiyun 	if (!emc)
252*4882a593Smuzhiyun 		return NULL;
253*4882a593Smuzhiyun 
254*4882a593Smuzhiyun 	/*
255*4882a593Smuzhiyun 	 * EMC stands for External Memory Controller.
256*4882a593Smuzhiyun 	 *
257*4882a593Smuzhiyun 	 * We don't want EMC clock to be disabled ever by gating its
258*4882a593Smuzhiyun 	 * parent and whatnot because system is busted immediately in that
259*4882a593Smuzhiyun 	 * case, hence the clock is marked as critical.
260*4882a593Smuzhiyun 	 */
261*4882a593Smuzhiyun 	init.name = "emc";
262*4882a593Smuzhiyun 	init.ops = &tegra_clk_emc_ops;
263*4882a593Smuzhiyun 	init.flags = CLK_IS_CRITICAL;
264*4882a593Smuzhiyun 	init.parent_names = emc_parent_clk_names;
265*4882a593Smuzhiyun 	init.num_parents = ARRAY_SIZE(emc_parent_clk_names);
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 	emc->reg = ioaddr;
268*4882a593Smuzhiyun 	emc->hw.init = &init;
269*4882a593Smuzhiyun 	emc->want_low_jitter = low_jitter;
270*4882a593Smuzhiyun 
271*4882a593Smuzhiyun 	clk = clk_register(NULL, &emc->hw);
272*4882a593Smuzhiyun 	if (IS_ERR(clk)) {
273*4882a593Smuzhiyun 		kfree(emc);
274*4882a593Smuzhiyun 		return NULL;
275*4882a593Smuzhiyun 	}
276*4882a593Smuzhiyun 
277*4882a593Smuzhiyun 	return clk;
278*4882a593Smuzhiyun }
279*4882a593Smuzhiyun 
tegra20_clk_prepare_emc_mc_same_freq(struct clk * emc_clk,bool same)280*4882a593Smuzhiyun int tegra20_clk_prepare_emc_mc_same_freq(struct clk *emc_clk, bool same)
281*4882a593Smuzhiyun {
282*4882a593Smuzhiyun 	struct tegra_clk_emc *emc;
283*4882a593Smuzhiyun 	struct clk_hw *hw;
284*4882a593Smuzhiyun 
285*4882a593Smuzhiyun 	if (!emc_clk)
286*4882a593Smuzhiyun 		return -EINVAL;
287*4882a593Smuzhiyun 
288*4882a593Smuzhiyun 	hw = __clk_get_hw(emc_clk);
289*4882a593Smuzhiyun 	emc = to_tegra_clk_emc(hw);
290*4882a593Smuzhiyun 	emc->mc_same_freq = same;
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 	return 0;
293*4882a593Smuzhiyun }
294