xref: /OK3568_Linux_fs/kernel/drivers/gpu/drm/rcar-du/rcar_du_crtc.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0+
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * rcar_du_crtc.c  --  R-Car Display Unit CRTCs
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (C) 2013-2015 Renesas Electronics Corporation
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <linux/clk.h>
11*4882a593Smuzhiyun #include <linux/mutex.h>
12*4882a593Smuzhiyun #include <linux/platform_device.h>
13*4882a593Smuzhiyun #include <linux/sys_soc.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include <drm/drm_atomic.h>
16*4882a593Smuzhiyun #include <drm/drm_atomic_helper.h>
17*4882a593Smuzhiyun #include <drm/drm_bridge.h>
18*4882a593Smuzhiyun #include <drm/drm_crtc.h>
19*4882a593Smuzhiyun #include <drm/drm_device.h>
20*4882a593Smuzhiyun #include <drm/drm_fb_cma_helper.h>
21*4882a593Smuzhiyun #include <drm/drm_gem_cma_helper.h>
22*4882a593Smuzhiyun #include <drm/drm_plane_helper.h>
23*4882a593Smuzhiyun #include <drm/drm_vblank.h>
24*4882a593Smuzhiyun 
25*4882a593Smuzhiyun #include "rcar_cmm.h"
26*4882a593Smuzhiyun #include "rcar_du_crtc.h"
27*4882a593Smuzhiyun #include "rcar_du_drv.h"
28*4882a593Smuzhiyun #include "rcar_du_encoder.h"
29*4882a593Smuzhiyun #include "rcar_du_kms.h"
30*4882a593Smuzhiyun #include "rcar_du_plane.h"
31*4882a593Smuzhiyun #include "rcar_du_regs.h"
32*4882a593Smuzhiyun #include "rcar_du_vsp.h"
33*4882a593Smuzhiyun #include "rcar_lvds.h"
34*4882a593Smuzhiyun 
rcar_du_crtc_read(struct rcar_du_crtc * rcrtc,u32 reg)35*4882a593Smuzhiyun static u32 rcar_du_crtc_read(struct rcar_du_crtc *rcrtc, u32 reg)
36*4882a593Smuzhiyun {
37*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun 	return rcar_du_read(rcdu, rcrtc->mmio_offset + reg);
40*4882a593Smuzhiyun }
41*4882a593Smuzhiyun 
rcar_du_crtc_write(struct rcar_du_crtc * rcrtc,u32 reg,u32 data)42*4882a593Smuzhiyun static void rcar_du_crtc_write(struct rcar_du_crtc *rcrtc, u32 reg, u32 data)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
45*4882a593Smuzhiyun 
46*4882a593Smuzhiyun 	rcar_du_write(rcdu, rcrtc->mmio_offset + reg, data);
47*4882a593Smuzhiyun }
48*4882a593Smuzhiyun 
rcar_du_crtc_clr(struct rcar_du_crtc * rcrtc,u32 reg,u32 clr)49*4882a593Smuzhiyun static void rcar_du_crtc_clr(struct rcar_du_crtc *rcrtc, u32 reg, u32 clr)
50*4882a593Smuzhiyun {
51*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun 	rcar_du_write(rcdu, rcrtc->mmio_offset + reg,
54*4882a593Smuzhiyun 		      rcar_du_read(rcdu, rcrtc->mmio_offset + reg) & ~clr);
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun 
rcar_du_crtc_set(struct rcar_du_crtc * rcrtc,u32 reg,u32 set)57*4882a593Smuzhiyun static void rcar_du_crtc_set(struct rcar_du_crtc *rcrtc, u32 reg, u32 set)
58*4882a593Smuzhiyun {
59*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun 	rcar_du_write(rcdu, rcrtc->mmio_offset + reg,
62*4882a593Smuzhiyun 		      rcar_du_read(rcdu, rcrtc->mmio_offset + reg) | set);
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun 
rcar_du_crtc_dsysr_clr_set(struct rcar_du_crtc * rcrtc,u32 clr,u32 set)65*4882a593Smuzhiyun void rcar_du_crtc_dsysr_clr_set(struct rcar_du_crtc *rcrtc, u32 clr, u32 set)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun 	rcrtc->dsysr = (rcrtc->dsysr & ~clr) | set;
70*4882a593Smuzhiyun 	rcar_du_write(rcdu, rcrtc->mmio_offset + DSYSR, rcrtc->dsysr);
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun /* -----------------------------------------------------------------------------
74*4882a593Smuzhiyun  * Hardware Setup
75*4882a593Smuzhiyun  */
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun struct dpll_info {
78*4882a593Smuzhiyun 	unsigned int output;
79*4882a593Smuzhiyun 	unsigned int fdpll;
80*4882a593Smuzhiyun 	unsigned int n;
81*4882a593Smuzhiyun 	unsigned int m;
82*4882a593Smuzhiyun };
83*4882a593Smuzhiyun 
rcar_du_dpll_divider(struct rcar_du_crtc * rcrtc,struct dpll_info * dpll,unsigned long input,unsigned long target)84*4882a593Smuzhiyun static void rcar_du_dpll_divider(struct rcar_du_crtc *rcrtc,
85*4882a593Smuzhiyun 				 struct dpll_info *dpll,
86*4882a593Smuzhiyun 				 unsigned long input,
87*4882a593Smuzhiyun 				 unsigned long target)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun 	unsigned long best_diff = (unsigned long)-1;
90*4882a593Smuzhiyun 	unsigned long diff;
91*4882a593Smuzhiyun 	unsigned int fdpll;
92*4882a593Smuzhiyun 	unsigned int m;
93*4882a593Smuzhiyun 	unsigned int n;
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun 	/*
96*4882a593Smuzhiyun 	 *   fin                                 fvco        fout       fclkout
97*4882a593Smuzhiyun 	 * in --> [1/M] --> |PD| -> [LPF] -> [VCO] -> [1/P] -+-> [1/FDPLL] -> out
98*4882a593Smuzhiyun 	 *              +-> |  |                             |
99*4882a593Smuzhiyun 	 *              |                                    |
100*4882a593Smuzhiyun 	 *              +---------------- [1/N] <------------+
101*4882a593Smuzhiyun 	 *
102*4882a593Smuzhiyun 	 *	fclkout = fvco / P / FDPLL -- (1)
103*4882a593Smuzhiyun 	 *
104*4882a593Smuzhiyun 	 * fin/M = fvco/P/N
105*4882a593Smuzhiyun 	 *
106*4882a593Smuzhiyun 	 *	fvco = fin * P *  N / M -- (2)
107*4882a593Smuzhiyun 	 *
108*4882a593Smuzhiyun 	 * (1) + (2) indicates
109*4882a593Smuzhiyun 	 *
110*4882a593Smuzhiyun 	 *	fclkout = fin * N / M / FDPLL
111*4882a593Smuzhiyun 	 *
112*4882a593Smuzhiyun 	 * NOTES
113*4882a593Smuzhiyun 	 *	N	: (n + 1)
114*4882a593Smuzhiyun 	 *	M	: (m + 1)
115*4882a593Smuzhiyun 	 *	FDPLL	: (fdpll + 1)
116*4882a593Smuzhiyun 	 *	P	: 2
117*4882a593Smuzhiyun 	 *	2kHz < fvco < 4096MHz
118*4882a593Smuzhiyun 	 *
119*4882a593Smuzhiyun 	 * To minimize the jitter,
120*4882a593Smuzhiyun 	 * N : as large as possible
121*4882a593Smuzhiyun 	 * M : as small as possible
122*4882a593Smuzhiyun 	 */
123*4882a593Smuzhiyun 	for (m = 0; m < 4; m++) {
124*4882a593Smuzhiyun 		for (n = 119; n > 38; n--) {
125*4882a593Smuzhiyun 			/*
126*4882a593Smuzhiyun 			 * This code only runs on 64-bit architectures, the
127*4882a593Smuzhiyun 			 * unsigned long type can thus be used for 64-bit
128*4882a593Smuzhiyun 			 * computation. It will still compile without any
129*4882a593Smuzhiyun 			 * warning on 32-bit architectures.
130*4882a593Smuzhiyun 			 *
131*4882a593Smuzhiyun 			 * To optimize calculations, use fout instead of fvco
132*4882a593Smuzhiyun 			 * to verify the VCO frequency constraint.
133*4882a593Smuzhiyun 			 */
134*4882a593Smuzhiyun 			unsigned long fout = input * (n + 1) / (m + 1);
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun 			if (fout < 1000 || fout > 2048 * 1000 * 1000U)
137*4882a593Smuzhiyun 				continue;
138*4882a593Smuzhiyun 
139*4882a593Smuzhiyun 			for (fdpll = 1; fdpll < 32; fdpll++) {
140*4882a593Smuzhiyun 				unsigned long output;
141*4882a593Smuzhiyun 
142*4882a593Smuzhiyun 				output = fout / (fdpll + 1);
143*4882a593Smuzhiyun 				if (output >= 400 * 1000 * 1000)
144*4882a593Smuzhiyun 					continue;
145*4882a593Smuzhiyun 
146*4882a593Smuzhiyun 				diff = abs((long)output - (long)target);
147*4882a593Smuzhiyun 				if (best_diff > diff) {
148*4882a593Smuzhiyun 					best_diff = diff;
149*4882a593Smuzhiyun 					dpll->n = n;
150*4882a593Smuzhiyun 					dpll->m = m;
151*4882a593Smuzhiyun 					dpll->fdpll = fdpll;
152*4882a593Smuzhiyun 					dpll->output = output;
153*4882a593Smuzhiyun 				}
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 				if (diff == 0)
156*4882a593Smuzhiyun 					goto done;
157*4882a593Smuzhiyun 			}
158*4882a593Smuzhiyun 		}
159*4882a593Smuzhiyun 	}
160*4882a593Smuzhiyun 
161*4882a593Smuzhiyun done:
162*4882a593Smuzhiyun 	dev_dbg(rcrtc->dev->dev,
163*4882a593Smuzhiyun 		"output:%u, fdpll:%u, n:%u, m:%u, diff:%lu\n",
164*4882a593Smuzhiyun 		 dpll->output, dpll->fdpll, dpll->n, dpll->m, best_diff);
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun 
167*4882a593Smuzhiyun struct du_clk_params {
168*4882a593Smuzhiyun 	struct clk *clk;
169*4882a593Smuzhiyun 	unsigned long rate;
170*4882a593Smuzhiyun 	unsigned long diff;
171*4882a593Smuzhiyun 	u32 escr;
172*4882a593Smuzhiyun };
173*4882a593Smuzhiyun 
rcar_du_escr_divider(struct clk * clk,unsigned long target,u32 escr,struct du_clk_params * params)174*4882a593Smuzhiyun static void rcar_du_escr_divider(struct clk *clk, unsigned long target,
175*4882a593Smuzhiyun 				 u32 escr, struct du_clk_params *params)
176*4882a593Smuzhiyun {
177*4882a593Smuzhiyun 	unsigned long rate;
178*4882a593Smuzhiyun 	unsigned long diff;
179*4882a593Smuzhiyun 	u32 div;
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun 	/*
182*4882a593Smuzhiyun 	 * If the target rate has already been achieved perfectly we can't do
183*4882a593Smuzhiyun 	 * better.
184*4882a593Smuzhiyun 	 */
185*4882a593Smuzhiyun 	if (params->diff == 0)
186*4882a593Smuzhiyun 		return;
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun 	/*
189*4882a593Smuzhiyun 	 * Compute the input clock rate and internal divisor values to obtain
190*4882a593Smuzhiyun 	 * the clock rate closest to the target frequency.
191*4882a593Smuzhiyun 	 */
192*4882a593Smuzhiyun 	rate = clk_round_rate(clk, target);
193*4882a593Smuzhiyun 	div = clamp(DIV_ROUND_CLOSEST(rate, target), 1UL, 64UL) - 1;
194*4882a593Smuzhiyun 	diff = abs(rate / (div + 1) - target);
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 	/*
197*4882a593Smuzhiyun 	 * Store the parameters if the resulting frequency is better than any
198*4882a593Smuzhiyun 	 * previously calculated value.
199*4882a593Smuzhiyun 	 */
200*4882a593Smuzhiyun 	if (diff < params->diff) {
201*4882a593Smuzhiyun 		params->clk = clk;
202*4882a593Smuzhiyun 		params->rate = rate;
203*4882a593Smuzhiyun 		params->diff = diff;
204*4882a593Smuzhiyun 		params->escr = escr | div;
205*4882a593Smuzhiyun 	}
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun 
208*4882a593Smuzhiyun static const struct soc_device_attribute rcar_du_r8a7795_es1[] = {
209*4882a593Smuzhiyun 	{ .soc_id = "r8a7795", .revision = "ES1.*" },
210*4882a593Smuzhiyun 	{ /* sentinel */ }
211*4882a593Smuzhiyun };
212*4882a593Smuzhiyun 
rcar_du_crtc_set_display_timing(struct rcar_du_crtc * rcrtc)213*4882a593Smuzhiyun static void rcar_du_crtc_set_display_timing(struct rcar_du_crtc *rcrtc)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun 	const struct drm_display_mode *mode = &rcrtc->crtc.state->adjusted_mode;
216*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
217*4882a593Smuzhiyun 	unsigned long mode_clock = mode->clock * 1000;
218*4882a593Smuzhiyun 	unsigned int hdse_offset;
219*4882a593Smuzhiyun 	u32 dsmr;
220*4882a593Smuzhiyun 	u32 escr;
221*4882a593Smuzhiyun 
222*4882a593Smuzhiyun 	if (rcdu->info->dpll_mask & (1 << rcrtc->index)) {
223*4882a593Smuzhiyun 		unsigned long target = mode_clock;
224*4882a593Smuzhiyun 		struct dpll_info dpll = { 0 };
225*4882a593Smuzhiyun 		unsigned long extclk;
226*4882a593Smuzhiyun 		u32 dpllcr;
227*4882a593Smuzhiyun 		u32 div = 0;
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun 		/*
230*4882a593Smuzhiyun 		 * DU channels that have a display PLL can't use the internal
231*4882a593Smuzhiyun 		 * system clock, and have no internal clock divider.
232*4882a593Smuzhiyun 		 */
233*4882a593Smuzhiyun 
234*4882a593Smuzhiyun 		/*
235*4882a593Smuzhiyun 		 * The H3 ES1.x exhibits dot clock duty cycle stability issues.
236*4882a593Smuzhiyun 		 * We can work around them by configuring the DPLL to twice the
237*4882a593Smuzhiyun 		 * desired frequency, coupled with a /2 post-divider. Restrict
238*4882a593Smuzhiyun 		 * the workaround to H3 ES1.x as ES2.0 and all other SoCs have
239*4882a593Smuzhiyun 		 * no post-divider when a display PLL is present (as shown by
240*4882a593Smuzhiyun 		 * the workaround breaking HDMI output on M3-W during testing).
241*4882a593Smuzhiyun 		 */
242*4882a593Smuzhiyun 		if (soc_device_match(rcar_du_r8a7795_es1)) {
243*4882a593Smuzhiyun 			target *= 2;
244*4882a593Smuzhiyun 			div = 1;
245*4882a593Smuzhiyun 		}
246*4882a593Smuzhiyun 
247*4882a593Smuzhiyun 		extclk = clk_get_rate(rcrtc->extclock);
248*4882a593Smuzhiyun 		rcar_du_dpll_divider(rcrtc, &dpll, extclk, target);
249*4882a593Smuzhiyun 
250*4882a593Smuzhiyun 		dpllcr = DPLLCR_CODE | DPLLCR_CLKE
251*4882a593Smuzhiyun 		       | DPLLCR_FDPLL(dpll.fdpll)
252*4882a593Smuzhiyun 		       | DPLLCR_N(dpll.n) | DPLLCR_M(dpll.m)
253*4882a593Smuzhiyun 		       | DPLLCR_STBY;
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 		if (rcrtc->index == 1)
256*4882a593Smuzhiyun 			dpllcr |= DPLLCR_PLCS1
257*4882a593Smuzhiyun 			       |  DPLLCR_INCS_DOTCLKIN1;
258*4882a593Smuzhiyun 		else
259*4882a593Smuzhiyun 			dpllcr |= DPLLCR_PLCS0
260*4882a593Smuzhiyun 			       |  DPLLCR_INCS_DOTCLKIN0;
261*4882a593Smuzhiyun 
262*4882a593Smuzhiyun 		rcar_du_group_write(rcrtc->group, DPLLCR, dpllcr);
263*4882a593Smuzhiyun 
264*4882a593Smuzhiyun 		escr = ESCR_DCLKSEL_DCLKIN | div;
265*4882a593Smuzhiyun 	} else if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index)) {
266*4882a593Smuzhiyun 		/*
267*4882a593Smuzhiyun 		 * Use the LVDS PLL output as the dot clock when outputting to
268*4882a593Smuzhiyun 		 * the LVDS encoder on an SoC that supports this clock routing
269*4882a593Smuzhiyun 		 * option. We use the clock directly in that case, without any
270*4882a593Smuzhiyun 		 * additional divider.
271*4882a593Smuzhiyun 		 */
272*4882a593Smuzhiyun 		escr = ESCR_DCLKSEL_DCLKIN;
273*4882a593Smuzhiyun 	} else {
274*4882a593Smuzhiyun 		struct du_clk_params params = { .diff = (unsigned long)-1 };
275*4882a593Smuzhiyun 
276*4882a593Smuzhiyun 		rcar_du_escr_divider(rcrtc->clock, mode_clock,
277*4882a593Smuzhiyun 				     ESCR_DCLKSEL_CLKS, &params);
278*4882a593Smuzhiyun 		if (rcrtc->extclock)
279*4882a593Smuzhiyun 			rcar_du_escr_divider(rcrtc->extclock, mode_clock,
280*4882a593Smuzhiyun 					     ESCR_DCLKSEL_DCLKIN, &params);
281*4882a593Smuzhiyun 
282*4882a593Smuzhiyun 		dev_dbg(rcrtc->dev->dev, "mode clock %lu %s rate %lu\n",
283*4882a593Smuzhiyun 			mode_clock, params.clk == rcrtc->clock ? "cpg" : "ext",
284*4882a593Smuzhiyun 			params.rate);
285*4882a593Smuzhiyun 
286*4882a593Smuzhiyun 		clk_set_rate(params.clk, params.rate);
287*4882a593Smuzhiyun 		escr = params.escr;
288*4882a593Smuzhiyun 	}
289*4882a593Smuzhiyun 
290*4882a593Smuzhiyun 	dev_dbg(rcrtc->dev->dev, "%s: ESCR 0x%08x\n", __func__, escr);
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, rcrtc->index % 2 ? ESCR13 : ESCR02, escr);
293*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, rcrtc->index % 2 ? OTAR13 : OTAR02, 0);
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	/* Signal polarities */
296*4882a593Smuzhiyun 	dsmr = ((mode->flags & DRM_MODE_FLAG_PVSYNC) ? DSMR_VSL : 0)
297*4882a593Smuzhiyun 	     | ((mode->flags & DRM_MODE_FLAG_PHSYNC) ? DSMR_HSL : 0)
298*4882a593Smuzhiyun 	     | ((mode->flags & DRM_MODE_FLAG_INTERLACE) ? DSMR_ODEV : 0)
299*4882a593Smuzhiyun 	     | DSMR_DIPM_DISP | DSMR_CSPM;
300*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, DSMR, dsmr);
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun 	hdse_offset = 19;
303*4882a593Smuzhiyun 	if (rcrtc->group->cmms_mask & BIT(rcrtc->index % 2))
304*4882a593Smuzhiyun 		hdse_offset += 25;
305*4882a593Smuzhiyun 
306*4882a593Smuzhiyun 	/* Display timings */
307*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, HDSR, mode->htotal - mode->hsync_start -
308*4882a593Smuzhiyun 					hdse_offset);
309*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, HDER, mode->htotal - mode->hsync_start +
310*4882a593Smuzhiyun 					mode->hdisplay - hdse_offset);
311*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, HSWR, mode->hsync_end -
312*4882a593Smuzhiyun 					mode->hsync_start - 1);
313*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, HCR,  mode->htotal - 1);
314*4882a593Smuzhiyun 
315*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, VDSR, mode->crtc_vtotal -
316*4882a593Smuzhiyun 					mode->crtc_vsync_end - 2);
317*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, VDER, mode->crtc_vtotal -
318*4882a593Smuzhiyun 					mode->crtc_vsync_end +
319*4882a593Smuzhiyun 					mode->crtc_vdisplay - 2);
320*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, VSPR, mode->crtc_vtotal -
321*4882a593Smuzhiyun 					mode->crtc_vsync_end +
322*4882a593Smuzhiyun 					mode->crtc_vsync_start - 1);
323*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, VCR,  mode->crtc_vtotal - 1);
324*4882a593Smuzhiyun 
325*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, DESR,  mode->htotal - mode->hsync_start - 1);
326*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, DEWR,  mode->hdisplay);
327*4882a593Smuzhiyun }
328*4882a593Smuzhiyun 
plane_zpos(struct rcar_du_plane * plane)329*4882a593Smuzhiyun static unsigned int plane_zpos(struct rcar_du_plane *plane)
330*4882a593Smuzhiyun {
331*4882a593Smuzhiyun 	return plane->plane.state->normalized_zpos;
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun 
334*4882a593Smuzhiyun static const struct rcar_du_format_info *
plane_format(struct rcar_du_plane * plane)335*4882a593Smuzhiyun plane_format(struct rcar_du_plane *plane)
336*4882a593Smuzhiyun {
337*4882a593Smuzhiyun 	return to_rcar_plane_state(plane->plane.state)->format;
338*4882a593Smuzhiyun }
339*4882a593Smuzhiyun 
rcar_du_crtc_update_planes(struct rcar_du_crtc * rcrtc)340*4882a593Smuzhiyun static void rcar_du_crtc_update_planes(struct rcar_du_crtc *rcrtc)
341*4882a593Smuzhiyun {
342*4882a593Smuzhiyun 	struct rcar_du_plane *planes[RCAR_DU_NUM_HW_PLANES];
343*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
344*4882a593Smuzhiyun 	unsigned int num_planes = 0;
345*4882a593Smuzhiyun 	unsigned int dptsr_planes;
346*4882a593Smuzhiyun 	unsigned int hwplanes = 0;
347*4882a593Smuzhiyun 	unsigned int prio = 0;
348*4882a593Smuzhiyun 	unsigned int i;
349*4882a593Smuzhiyun 	u32 dspr = 0;
350*4882a593Smuzhiyun 
351*4882a593Smuzhiyun 	for (i = 0; i < rcrtc->group->num_planes; ++i) {
352*4882a593Smuzhiyun 		struct rcar_du_plane *plane = &rcrtc->group->planes[i];
353*4882a593Smuzhiyun 		unsigned int j;
354*4882a593Smuzhiyun 
355*4882a593Smuzhiyun 		if (plane->plane.state->crtc != &rcrtc->crtc ||
356*4882a593Smuzhiyun 		    !plane->plane.state->visible)
357*4882a593Smuzhiyun 			continue;
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun 		/* Insert the plane in the sorted planes array. */
360*4882a593Smuzhiyun 		for (j = num_planes++; j > 0; --j) {
361*4882a593Smuzhiyun 			if (plane_zpos(planes[j-1]) <= plane_zpos(plane))
362*4882a593Smuzhiyun 				break;
363*4882a593Smuzhiyun 			planes[j] = planes[j-1];
364*4882a593Smuzhiyun 		}
365*4882a593Smuzhiyun 
366*4882a593Smuzhiyun 		planes[j] = plane;
367*4882a593Smuzhiyun 		prio += plane_format(plane)->planes * 4;
368*4882a593Smuzhiyun 	}
369*4882a593Smuzhiyun 
370*4882a593Smuzhiyun 	for (i = 0; i < num_planes; ++i) {
371*4882a593Smuzhiyun 		struct rcar_du_plane *plane = planes[i];
372*4882a593Smuzhiyun 		struct drm_plane_state *state = plane->plane.state;
373*4882a593Smuzhiyun 		unsigned int index = to_rcar_plane_state(state)->hwindex;
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun 		prio -= 4;
376*4882a593Smuzhiyun 		dspr |= (index + 1) << prio;
377*4882a593Smuzhiyun 		hwplanes |= 1 << index;
378*4882a593Smuzhiyun 
379*4882a593Smuzhiyun 		if (plane_format(plane)->planes == 2) {
380*4882a593Smuzhiyun 			index = (index + 1) % 8;
381*4882a593Smuzhiyun 
382*4882a593Smuzhiyun 			prio -= 4;
383*4882a593Smuzhiyun 			dspr |= (index + 1) << prio;
384*4882a593Smuzhiyun 			hwplanes |= 1 << index;
385*4882a593Smuzhiyun 		}
386*4882a593Smuzhiyun 	}
387*4882a593Smuzhiyun 
388*4882a593Smuzhiyun 	/* If VSP+DU integration is enabled the plane assignment is fixed. */
389*4882a593Smuzhiyun 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) {
390*4882a593Smuzhiyun 		if (rcdu->info->gen < 3) {
391*4882a593Smuzhiyun 			dspr = (rcrtc->index % 2) + 1;
392*4882a593Smuzhiyun 			hwplanes = 1 << (rcrtc->index % 2);
393*4882a593Smuzhiyun 		} else {
394*4882a593Smuzhiyun 			dspr = (rcrtc->index % 2) ? 3 : 1;
395*4882a593Smuzhiyun 			hwplanes = 1 << ((rcrtc->index % 2) ? 2 : 0);
396*4882a593Smuzhiyun 		}
397*4882a593Smuzhiyun 	}
398*4882a593Smuzhiyun 
399*4882a593Smuzhiyun 	/*
400*4882a593Smuzhiyun 	 * Update the planes to display timing and dot clock generator
401*4882a593Smuzhiyun 	 * associations.
402*4882a593Smuzhiyun 	 *
403*4882a593Smuzhiyun 	 * Updating the DPTSR register requires restarting the CRTC group,
404*4882a593Smuzhiyun 	 * resulting in visible flicker. To mitigate the issue only update the
405*4882a593Smuzhiyun 	 * association if needed by enabled planes. Planes being disabled will
406*4882a593Smuzhiyun 	 * keep their current association.
407*4882a593Smuzhiyun 	 */
408*4882a593Smuzhiyun 	mutex_lock(&rcrtc->group->lock);
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 	dptsr_planes = rcrtc->index % 2 ? rcrtc->group->dptsr_planes | hwplanes
411*4882a593Smuzhiyun 		     : rcrtc->group->dptsr_planes & ~hwplanes;
412*4882a593Smuzhiyun 
413*4882a593Smuzhiyun 	if (dptsr_planes != rcrtc->group->dptsr_planes) {
414*4882a593Smuzhiyun 		rcar_du_group_write(rcrtc->group, DPTSR,
415*4882a593Smuzhiyun 				    (dptsr_planes << 16) | dptsr_planes);
416*4882a593Smuzhiyun 		rcrtc->group->dptsr_planes = dptsr_planes;
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun 		if (rcrtc->group->used_crtcs)
419*4882a593Smuzhiyun 			rcar_du_group_restart(rcrtc->group);
420*4882a593Smuzhiyun 	}
421*4882a593Smuzhiyun 
422*4882a593Smuzhiyun 	/* Restart the group if plane sources have changed. */
423*4882a593Smuzhiyun 	if (rcrtc->group->need_restart)
424*4882a593Smuzhiyun 		rcar_du_group_restart(rcrtc->group);
425*4882a593Smuzhiyun 
426*4882a593Smuzhiyun 	mutex_unlock(&rcrtc->group->lock);
427*4882a593Smuzhiyun 
428*4882a593Smuzhiyun 	rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR,
429*4882a593Smuzhiyun 			    dspr);
430*4882a593Smuzhiyun }
431*4882a593Smuzhiyun 
432*4882a593Smuzhiyun /* -----------------------------------------------------------------------------
433*4882a593Smuzhiyun  * Page Flip
434*4882a593Smuzhiyun  */
435*4882a593Smuzhiyun 
rcar_du_crtc_finish_page_flip(struct rcar_du_crtc * rcrtc)436*4882a593Smuzhiyun void rcar_du_crtc_finish_page_flip(struct rcar_du_crtc *rcrtc)
437*4882a593Smuzhiyun {
438*4882a593Smuzhiyun 	struct drm_pending_vblank_event *event;
439*4882a593Smuzhiyun 	struct drm_device *dev = rcrtc->crtc.dev;
440*4882a593Smuzhiyun 	unsigned long flags;
441*4882a593Smuzhiyun 
442*4882a593Smuzhiyun 	spin_lock_irqsave(&dev->event_lock, flags);
443*4882a593Smuzhiyun 	event = rcrtc->event;
444*4882a593Smuzhiyun 	rcrtc->event = NULL;
445*4882a593Smuzhiyun 	spin_unlock_irqrestore(&dev->event_lock, flags);
446*4882a593Smuzhiyun 
447*4882a593Smuzhiyun 	if (event == NULL)
448*4882a593Smuzhiyun 		return;
449*4882a593Smuzhiyun 
450*4882a593Smuzhiyun 	spin_lock_irqsave(&dev->event_lock, flags);
451*4882a593Smuzhiyun 	drm_crtc_send_vblank_event(&rcrtc->crtc, event);
452*4882a593Smuzhiyun 	wake_up(&rcrtc->flip_wait);
453*4882a593Smuzhiyun 	spin_unlock_irqrestore(&dev->event_lock, flags);
454*4882a593Smuzhiyun 
455*4882a593Smuzhiyun 	drm_crtc_vblank_put(&rcrtc->crtc);
456*4882a593Smuzhiyun }
457*4882a593Smuzhiyun 
rcar_du_crtc_page_flip_pending(struct rcar_du_crtc * rcrtc)458*4882a593Smuzhiyun static bool rcar_du_crtc_page_flip_pending(struct rcar_du_crtc *rcrtc)
459*4882a593Smuzhiyun {
460*4882a593Smuzhiyun 	struct drm_device *dev = rcrtc->crtc.dev;
461*4882a593Smuzhiyun 	unsigned long flags;
462*4882a593Smuzhiyun 	bool pending;
463*4882a593Smuzhiyun 
464*4882a593Smuzhiyun 	spin_lock_irqsave(&dev->event_lock, flags);
465*4882a593Smuzhiyun 	pending = rcrtc->event != NULL;
466*4882a593Smuzhiyun 	spin_unlock_irqrestore(&dev->event_lock, flags);
467*4882a593Smuzhiyun 
468*4882a593Smuzhiyun 	return pending;
469*4882a593Smuzhiyun }
470*4882a593Smuzhiyun 
rcar_du_crtc_wait_page_flip(struct rcar_du_crtc * rcrtc)471*4882a593Smuzhiyun static void rcar_du_crtc_wait_page_flip(struct rcar_du_crtc *rcrtc)
472*4882a593Smuzhiyun {
473*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
474*4882a593Smuzhiyun 
475*4882a593Smuzhiyun 	if (wait_event_timeout(rcrtc->flip_wait,
476*4882a593Smuzhiyun 			       !rcar_du_crtc_page_flip_pending(rcrtc),
477*4882a593Smuzhiyun 			       msecs_to_jiffies(50)))
478*4882a593Smuzhiyun 		return;
479*4882a593Smuzhiyun 
480*4882a593Smuzhiyun 	dev_warn(rcdu->dev, "page flip timeout\n");
481*4882a593Smuzhiyun 
482*4882a593Smuzhiyun 	rcar_du_crtc_finish_page_flip(rcrtc);
483*4882a593Smuzhiyun }
484*4882a593Smuzhiyun 
485*4882a593Smuzhiyun /* -----------------------------------------------------------------------------
486*4882a593Smuzhiyun  * Color Management Module (CMM)
487*4882a593Smuzhiyun  */
488*4882a593Smuzhiyun 
rcar_du_cmm_check(struct drm_crtc * crtc,struct drm_crtc_state * state)489*4882a593Smuzhiyun static int rcar_du_cmm_check(struct drm_crtc *crtc,
490*4882a593Smuzhiyun 			     struct drm_crtc_state *state)
491*4882a593Smuzhiyun {
492*4882a593Smuzhiyun 	struct drm_property_blob *drm_lut = state->gamma_lut;
493*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
494*4882a593Smuzhiyun 	struct device *dev = rcrtc->dev->dev;
495*4882a593Smuzhiyun 
496*4882a593Smuzhiyun 	if (!drm_lut)
497*4882a593Smuzhiyun 		return 0;
498*4882a593Smuzhiyun 
499*4882a593Smuzhiyun 	/* We only accept fully populated LUT tables. */
500*4882a593Smuzhiyun 	if (drm_color_lut_size(drm_lut) != CM2_LUT_SIZE) {
501*4882a593Smuzhiyun 		dev_err(dev, "invalid gamma lut size: %zu bytes\n",
502*4882a593Smuzhiyun 			drm_lut->length);
503*4882a593Smuzhiyun 		return -EINVAL;
504*4882a593Smuzhiyun 	}
505*4882a593Smuzhiyun 
506*4882a593Smuzhiyun 	return 0;
507*4882a593Smuzhiyun }
508*4882a593Smuzhiyun 
rcar_du_cmm_setup(struct drm_crtc * crtc)509*4882a593Smuzhiyun static void rcar_du_cmm_setup(struct drm_crtc *crtc)
510*4882a593Smuzhiyun {
511*4882a593Smuzhiyun 	struct drm_property_blob *drm_lut = crtc->state->gamma_lut;
512*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
513*4882a593Smuzhiyun 	struct rcar_cmm_config cmm_config = {};
514*4882a593Smuzhiyun 
515*4882a593Smuzhiyun 	if (!rcrtc->cmm)
516*4882a593Smuzhiyun 		return;
517*4882a593Smuzhiyun 
518*4882a593Smuzhiyun 	if (drm_lut)
519*4882a593Smuzhiyun 		cmm_config.lut.table = (struct drm_color_lut *)drm_lut->data;
520*4882a593Smuzhiyun 
521*4882a593Smuzhiyun 	rcar_cmm_setup(rcrtc->cmm, &cmm_config);
522*4882a593Smuzhiyun }
523*4882a593Smuzhiyun 
524*4882a593Smuzhiyun /* -----------------------------------------------------------------------------
525*4882a593Smuzhiyun  * Start/Stop and Suspend/Resume
526*4882a593Smuzhiyun  */
527*4882a593Smuzhiyun 
rcar_du_crtc_setup(struct rcar_du_crtc * rcrtc)528*4882a593Smuzhiyun static void rcar_du_crtc_setup(struct rcar_du_crtc *rcrtc)
529*4882a593Smuzhiyun {
530*4882a593Smuzhiyun 	/* Set display off and background to black */
531*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, DOOR, DOOR_RGB(0, 0, 0));
532*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, BPOR, BPOR_RGB(0, 0, 0));
533*4882a593Smuzhiyun 
534*4882a593Smuzhiyun 	/* Configure display timings and output routing */
535*4882a593Smuzhiyun 	rcar_du_crtc_set_display_timing(rcrtc);
536*4882a593Smuzhiyun 	rcar_du_group_set_routing(rcrtc->group);
537*4882a593Smuzhiyun 
538*4882a593Smuzhiyun 	/* Start with all planes disabled. */
539*4882a593Smuzhiyun 	rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR, 0);
540*4882a593Smuzhiyun 
541*4882a593Smuzhiyun 	/* Enable the VSP compositor. */
542*4882a593Smuzhiyun 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
543*4882a593Smuzhiyun 		rcar_du_vsp_enable(rcrtc);
544*4882a593Smuzhiyun 
545*4882a593Smuzhiyun 	/* Turn vertical blanking interrupt reporting on. */
546*4882a593Smuzhiyun 	drm_crtc_vblank_on(&rcrtc->crtc);
547*4882a593Smuzhiyun }
548*4882a593Smuzhiyun 
rcar_du_crtc_get(struct rcar_du_crtc * rcrtc)549*4882a593Smuzhiyun static int rcar_du_crtc_get(struct rcar_du_crtc *rcrtc)
550*4882a593Smuzhiyun {
551*4882a593Smuzhiyun 	int ret;
552*4882a593Smuzhiyun 
553*4882a593Smuzhiyun 	/*
554*4882a593Smuzhiyun 	 * Guard against double-get, as the function is called from both the
555*4882a593Smuzhiyun 	 * .atomic_enable() and .atomic_begin() handlers.
556*4882a593Smuzhiyun 	 */
557*4882a593Smuzhiyun 	if (rcrtc->initialized)
558*4882a593Smuzhiyun 		return 0;
559*4882a593Smuzhiyun 
560*4882a593Smuzhiyun 	ret = clk_prepare_enable(rcrtc->clock);
561*4882a593Smuzhiyun 	if (ret < 0)
562*4882a593Smuzhiyun 		return ret;
563*4882a593Smuzhiyun 
564*4882a593Smuzhiyun 	ret = clk_prepare_enable(rcrtc->extclock);
565*4882a593Smuzhiyun 	if (ret < 0)
566*4882a593Smuzhiyun 		goto error_clock;
567*4882a593Smuzhiyun 
568*4882a593Smuzhiyun 	ret = rcar_du_group_get(rcrtc->group);
569*4882a593Smuzhiyun 	if (ret < 0)
570*4882a593Smuzhiyun 		goto error_group;
571*4882a593Smuzhiyun 
572*4882a593Smuzhiyun 	rcar_du_crtc_setup(rcrtc);
573*4882a593Smuzhiyun 	rcrtc->initialized = true;
574*4882a593Smuzhiyun 
575*4882a593Smuzhiyun 	return 0;
576*4882a593Smuzhiyun 
577*4882a593Smuzhiyun error_group:
578*4882a593Smuzhiyun 	clk_disable_unprepare(rcrtc->extclock);
579*4882a593Smuzhiyun error_clock:
580*4882a593Smuzhiyun 	clk_disable_unprepare(rcrtc->clock);
581*4882a593Smuzhiyun 	return ret;
582*4882a593Smuzhiyun }
583*4882a593Smuzhiyun 
rcar_du_crtc_put(struct rcar_du_crtc * rcrtc)584*4882a593Smuzhiyun static void rcar_du_crtc_put(struct rcar_du_crtc *rcrtc)
585*4882a593Smuzhiyun {
586*4882a593Smuzhiyun 	rcar_du_group_put(rcrtc->group);
587*4882a593Smuzhiyun 
588*4882a593Smuzhiyun 	clk_disable_unprepare(rcrtc->extclock);
589*4882a593Smuzhiyun 	clk_disable_unprepare(rcrtc->clock);
590*4882a593Smuzhiyun 
591*4882a593Smuzhiyun 	rcrtc->initialized = false;
592*4882a593Smuzhiyun }
593*4882a593Smuzhiyun 
rcar_du_crtc_start(struct rcar_du_crtc * rcrtc)594*4882a593Smuzhiyun static void rcar_du_crtc_start(struct rcar_du_crtc *rcrtc)
595*4882a593Smuzhiyun {
596*4882a593Smuzhiyun 	bool interlaced;
597*4882a593Smuzhiyun 
598*4882a593Smuzhiyun 	/*
599*4882a593Smuzhiyun 	 * Select master sync mode. This enables display operation in master
600*4882a593Smuzhiyun 	 * sync mode (with the HSYNC and VSYNC signals configured as outputs and
601*4882a593Smuzhiyun 	 * actively driven).
602*4882a593Smuzhiyun 	 */
603*4882a593Smuzhiyun 	interlaced = rcrtc->crtc.mode.flags & DRM_MODE_FLAG_INTERLACE;
604*4882a593Smuzhiyun 	rcar_du_crtc_dsysr_clr_set(rcrtc, DSYSR_TVM_MASK | DSYSR_SCM_MASK,
605*4882a593Smuzhiyun 				   (interlaced ? DSYSR_SCM_INT_VIDEO : 0) |
606*4882a593Smuzhiyun 				   DSYSR_TVM_MASTER);
607*4882a593Smuzhiyun 
608*4882a593Smuzhiyun 	rcar_du_group_start_stop(rcrtc->group, true);
609*4882a593Smuzhiyun }
610*4882a593Smuzhiyun 
rcar_du_crtc_disable_planes(struct rcar_du_crtc * rcrtc)611*4882a593Smuzhiyun static void rcar_du_crtc_disable_planes(struct rcar_du_crtc *rcrtc)
612*4882a593Smuzhiyun {
613*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
614*4882a593Smuzhiyun 	struct drm_crtc *crtc = &rcrtc->crtc;
615*4882a593Smuzhiyun 	u32 status;
616*4882a593Smuzhiyun 
617*4882a593Smuzhiyun 	/* Make sure vblank interrupts are enabled. */
618*4882a593Smuzhiyun 	drm_crtc_vblank_get(crtc);
619*4882a593Smuzhiyun 
620*4882a593Smuzhiyun 	/*
621*4882a593Smuzhiyun 	 * Disable planes and calculate how many vertical blanking interrupts we
622*4882a593Smuzhiyun 	 * have to wait for. If a vertical blanking interrupt has been triggered
623*4882a593Smuzhiyun 	 * but not processed yet, we don't know whether it occurred before or
624*4882a593Smuzhiyun 	 * after the planes got disabled. We thus have to wait for two vblank
625*4882a593Smuzhiyun 	 * interrupts in that case.
626*4882a593Smuzhiyun 	 */
627*4882a593Smuzhiyun 	spin_lock_irq(&rcrtc->vblank_lock);
628*4882a593Smuzhiyun 	rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR, 0);
629*4882a593Smuzhiyun 	status = rcar_du_crtc_read(rcrtc, DSSR);
630*4882a593Smuzhiyun 	rcrtc->vblank_count = status & DSSR_VBK ? 2 : 1;
631*4882a593Smuzhiyun 	spin_unlock_irq(&rcrtc->vblank_lock);
632*4882a593Smuzhiyun 
633*4882a593Smuzhiyun 	if (!wait_event_timeout(rcrtc->vblank_wait, rcrtc->vblank_count == 0,
634*4882a593Smuzhiyun 				msecs_to_jiffies(100)))
635*4882a593Smuzhiyun 		dev_warn(rcdu->dev, "vertical blanking timeout\n");
636*4882a593Smuzhiyun 
637*4882a593Smuzhiyun 	drm_crtc_vblank_put(crtc);
638*4882a593Smuzhiyun }
639*4882a593Smuzhiyun 
rcar_du_crtc_stop(struct rcar_du_crtc * rcrtc)640*4882a593Smuzhiyun static void rcar_du_crtc_stop(struct rcar_du_crtc *rcrtc)
641*4882a593Smuzhiyun {
642*4882a593Smuzhiyun 	struct drm_crtc *crtc = &rcrtc->crtc;
643*4882a593Smuzhiyun 
644*4882a593Smuzhiyun 	/*
645*4882a593Smuzhiyun 	 * Disable all planes and wait for the change to take effect. This is
646*4882a593Smuzhiyun 	 * required as the plane enable registers are updated on vblank, and no
647*4882a593Smuzhiyun 	 * vblank will occur once the CRTC is stopped. Disabling planes when
648*4882a593Smuzhiyun 	 * starting the CRTC thus wouldn't be enough as it would start scanning
649*4882a593Smuzhiyun 	 * out immediately from old frame buffers until the next vblank.
650*4882a593Smuzhiyun 	 *
651*4882a593Smuzhiyun 	 * This increases the CRTC stop delay, especially when multiple CRTCs
652*4882a593Smuzhiyun 	 * are stopped in one operation as we now wait for one vblank per CRTC.
653*4882a593Smuzhiyun 	 * Whether this can be improved needs to be researched.
654*4882a593Smuzhiyun 	 */
655*4882a593Smuzhiyun 	rcar_du_crtc_disable_planes(rcrtc);
656*4882a593Smuzhiyun 
657*4882a593Smuzhiyun 	/*
658*4882a593Smuzhiyun 	 * Disable vertical blanking interrupt reporting. We first need to wait
659*4882a593Smuzhiyun 	 * for page flip completion before stopping the CRTC as userspace
660*4882a593Smuzhiyun 	 * expects page flips to eventually complete.
661*4882a593Smuzhiyun 	 */
662*4882a593Smuzhiyun 	rcar_du_crtc_wait_page_flip(rcrtc);
663*4882a593Smuzhiyun 	drm_crtc_vblank_off(crtc);
664*4882a593Smuzhiyun 
665*4882a593Smuzhiyun 	/* Disable the VSP compositor. */
666*4882a593Smuzhiyun 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
667*4882a593Smuzhiyun 		rcar_du_vsp_disable(rcrtc);
668*4882a593Smuzhiyun 
669*4882a593Smuzhiyun 	if (rcrtc->cmm)
670*4882a593Smuzhiyun 		rcar_cmm_disable(rcrtc->cmm);
671*4882a593Smuzhiyun 
672*4882a593Smuzhiyun 	/*
673*4882a593Smuzhiyun 	 * Select switch sync mode. This stops display operation and configures
674*4882a593Smuzhiyun 	 * the HSYNC and VSYNC signals as inputs.
675*4882a593Smuzhiyun 	 *
676*4882a593Smuzhiyun 	 * TODO: Find another way to stop the display for DUs that don't support
677*4882a593Smuzhiyun 	 * TVM sync.
678*4882a593Smuzhiyun 	 */
679*4882a593Smuzhiyun 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_TVM_SYNC))
680*4882a593Smuzhiyun 		rcar_du_crtc_dsysr_clr_set(rcrtc, DSYSR_TVM_MASK,
681*4882a593Smuzhiyun 					   DSYSR_TVM_SWITCH);
682*4882a593Smuzhiyun 
683*4882a593Smuzhiyun 	rcar_du_group_start_stop(rcrtc->group, false);
684*4882a593Smuzhiyun }
685*4882a593Smuzhiyun 
686*4882a593Smuzhiyun /* -----------------------------------------------------------------------------
687*4882a593Smuzhiyun  * CRTC Functions
688*4882a593Smuzhiyun  */
689*4882a593Smuzhiyun 
rcar_du_crtc_atomic_check(struct drm_crtc * crtc,struct drm_crtc_state * state)690*4882a593Smuzhiyun static int rcar_du_crtc_atomic_check(struct drm_crtc *crtc,
691*4882a593Smuzhiyun 				     struct drm_crtc_state *state)
692*4882a593Smuzhiyun {
693*4882a593Smuzhiyun 	struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(state);
694*4882a593Smuzhiyun 	struct drm_encoder *encoder;
695*4882a593Smuzhiyun 	int ret;
696*4882a593Smuzhiyun 
697*4882a593Smuzhiyun 	ret = rcar_du_cmm_check(crtc, state);
698*4882a593Smuzhiyun 	if (ret)
699*4882a593Smuzhiyun 		return ret;
700*4882a593Smuzhiyun 
701*4882a593Smuzhiyun 	/* Store the routes from the CRTC output to the DU outputs. */
702*4882a593Smuzhiyun 	rstate->outputs = 0;
703*4882a593Smuzhiyun 
704*4882a593Smuzhiyun 	drm_for_each_encoder_mask(encoder, crtc->dev, state->encoder_mask) {
705*4882a593Smuzhiyun 		struct rcar_du_encoder *renc;
706*4882a593Smuzhiyun 
707*4882a593Smuzhiyun 		/* Skip the writeback encoder. */
708*4882a593Smuzhiyun 		if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
709*4882a593Smuzhiyun 			continue;
710*4882a593Smuzhiyun 
711*4882a593Smuzhiyun 		renc = to_rcar_encoder(encoder);
712*4882a593Smuzhiyun 		rstate->outputs |= BIT(renc->output);
713*4882a593Smuzhiyun 	}
714*4882a593Smuzhiyun 
715*4882a593Smuzhiyun 	return 0;
716*4882a593Smuzhiyun }
717*4882a593Smuzhiyun 
rcar_du_crtc_atomic_enable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)718*4882a593Smuzhiyun static void rcar_du_crtc_atomic_enable(struct drm_crtc *crtc,
719*4882a593Smuzhiyun 				       struct drm_crtc_state *old_state)
720*4882a593Smuzhiyun {
721*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
722*4882a593Smuzhiyun 	struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(crtc->state);
723*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
724*4882a593Smuzhiyun 
725*4882a593Smuzhiyun 	if (rcrtc->cmm)
726*4882a593Smuzhiyun 		rcar_cmm_enable(rcrtc->cmm);
727*4882a593Smuzhiyun 	rcar_du_crtc_get(rcrtc);
728*4882a593Smuzhiyun 
729*4882a593Smuzhiyun 	/*
730*4882a593Smuzhiyun 	 * On D3/E3 the dot clock is provided by the LVDS encoder attached to
731*4882a593Smuzhiyun 	 * the DU channel. We need to enable its clock output explicitly if
732*4882a593Smuzhiyun 	 * the LVDS output is disabled.
733*4882a593Smuzhiyun 	 */
734*4882a593Smuzhiyun 	if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index) &&
735*4882a593Smuzhiyun 	    rstate->outputs == BIT(RCAR_DU_OUTPUT_DPAD0)) {
736*4882a593Smuzhiyun 		struct drm_bridge *bridge = rcdu->lvds[rcrtc->index];
737*4882a593Smuzhiyun 		const struct drm_display_mode *mode =
738*4882a593Smuzhiyun 			&crtc->state->adjusted_mode;
739*4882a593Smuzhiyun 
740*4882a593Smuzhiyun 		rcar_lvds_clk_enable(bridge, mode->clock * 1000);
741*4882a593Smuzhiyun 	}
742*4882a593Smuzhiyun 
743*4882a593Smuzhiyun 	rcar_du_crtc_start(rcrtc);
744*4882a593Smuzhiyun 
745*4882a593Smuzhiyun 	/*
746*4882a593Smuzhiyun 	 * TODO: The chip manual indicates that CMM tables should be written
747*4882a593Smuzhiyun 	 * after the DU channel has been activated. Investigate the impact
748*4882a593Smuzhiyun 	 * of this restriction on the first displayed frame.
749*4882a593Smuzhiyun 	 */
750*4882a593Smuzhiyun 	rcar_du_cmm_setup(crtc);
751*4882a593Smuzhiyun }
752*4882a593Smuzhiyun 
rcar_du_crtc_atomic_disable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)753*4882a593Smuzhiyun static void rcar_du_crtc_atomic_disable(struct drm_crtc *crtc,
754*4882a593Smuzhiyun 					struct drm_crtc_state *old_state)
755*4882a593Smuzhiyun {
756*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
757*4882a593Smuzhiyun 	struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(old_state);
758*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
759*4882a593Smuzhiyun 
760*4882a593Smuzhiyun 	rcar_du_crtc_stop(rcrtc);
761*4882a593Smuzhiyun 	rcar_du_crtc_put(rcrtc);
762*4882a593Smuzhiyun 
763*4882a593Smuzhiyun 	if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index) &&
764*4882a593Smuzhiyun 	    rstate->outputs == BIT(RCAR_DU_OUTPUT_DPAD0)) {
765*4882a593Smuzhiyun 		struct drm_bridge *bridge = rcdu->lvds[rcrtc->index];
766*4882a593Smuzhiyun 
767*4882a593Smuzhiyun 		/*
768*4882a593Smuzhiyun 		 * Disable the LVDS clock output, see
769*4882a593Smuzhiyun 		 * rcar_du_crtc_atomic_enable().
770*4882a593Smuzhiyun 		 */
771*4882a593Smuzhiyun 		rcar_lvds_clk_disable(bridge);
772*4882a593Smuzhiyun 	}
773*4882a593Smuzhiyun 
774*4882a593Smuzhiyun 	spin_lock_irq(&crtc->dev->event_lock);
775*4882a593Smuzhiyun 	if (crtc->state->event) {
776*4882a593Smuzhiyun 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
777*4882a593Smuzhiyun 		crtc->state->event = NULL;
778*4882a593Smuzhiyun 	}
779*4882a593Smuzhiyun 	spin_unlock_irq(&crtc->dev->event_lock);
780*4882a593Smuzhiyun }
781*4882a593Smuzhiyun 
rcar_du_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_crtc_state * old_crtc_state)782*4882a593Smuzhiyun static void rcar_du_crtc_atomic_begin(struct drm_crtc *crtc,
783*4882a593Smuzhiyun 				      struct drm_crtc_state *old_crtc_state)
784*4882a593Smuzhiyun {
785*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
786*4882a593Smuzhiyun 
787*4882a593Smuzhiyun 	WARN_ON(!crtc->state->enable);
788*4882a593Smuzhiyun 
789*4882a593Smuzhiyun 	/*
790*4882a593Smuzhiyun 	 * If a mode set is in progress we can be called with the CRTC disabled.
791*4882a593Smuzhiyun 	 * We thus need to first get and setup the CRTC in order to configure
792*4882a593Smuzhiyun 	 * planes. We must *not* put the CRTC in .atomic_flush(), as it must be
793*4882a593Smuzhiyun 	 * kept awake until the .atomic_enable() call that will follow. The get
794*4882a593Smuzhiyun 	 * operation in .atomic_enable() will in that case be a no-op, and the
795*4882a593Smuzhiyun 	 * CRTC will be put later in .atomic_disable().
796*4882a593Smuzhiyun 	 *
797*4882a593Smuzhiyun 	 * If a mode set is not in progress the CRTC is enabled, and the
798*4882a593Smuzhiyun 	 * following get call will be a no-op. There is thus no need to balance
799*4882a593Smuzhiyun 	 * it in .atomic_flush() either.
800*4882a593Smuzhiyun 	 */
801*4882a593Smuzhiyun 	rcar_du_crtc_get(rcrtc);
802*4882a593Smuzhiyun 
803*4882a593Smuzhiyun 	/* If the active state changed, we let .atomic_enable handle CMM. */
804*4882a593Smuzhiyun 	if (crtc->state->color_mgmt_changed && !crtc->state->active_changed)
805*4882a593Smuzhiyun 		rcar_du_cmm_setup(crtc);
806*4882a593Smuzhiyun 
807*4882a593Smuzhiyun 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
808*4882a593Smuzhiyun 		rcar_du_vsp_atomic_begin(rcrtc);
809*4882a593Smuzhiyun }
810*4882a593Smuzhiyun 
rcar_du_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_crtc_state * old_crtc_state)811*4882a593Smuzhiyun static void rcar_du_crtc_atomic_flush(struct drm_crtc *crtc,
812*4882a593Smuzhiyun 				      struct drm_crtc_state *old_crtc_state)
813*4882a593Smuzhiyun {
814*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
815*4882a593Smuzhiyun 	struct drm_device *dev = rcrtc->crtc.dev;
816*4882a593Smuzhiyun 	unsigned long flags;
817*4882a593Smuzhiyun 
818*4882a593Smuzhiyun 	rcar_du_crtc_update_planes(rcrtc);
819*4882a593Smuzhiyun 
820*4882a593Smuzhiyun 	if (crtc->state->event) {
821*4882a593Smuzhiyun 		WARN_ON(drm_crtc_vblank_get(crtc) != 0);
822*4882a593Smuzhiyun 
823*4882a593Smuzhiyun 		spin_lock_irqsave(&dev->event_lock, flags);
824*4882a593Smuzhiyun 		rcrtc->event = crtc->state->event;
825*4882a593Smuzhiyun 		crtc->state->event = NULL;
826*4882a593Smuzhiyun 		spin_unlock_irqrestore(&dev->event_lock, flags);
827*4882a593Smuzhiyun 	}
828*4882a593Smuzhiyun 
829*4882a593Smuzhiyun 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
830*4882a593Smuzhiyun 		rcar_du_vsp_atomic_flush(rcrtc);
831*4882a593Smuzhiyun }
832*4882a593Smuzhiyun 
833*4882a593Smuzhiyun static enum drm_mode_status
rcar_du_crtc_mode_valid(struct drm_crtc * crtc,const struct drm_display_mode * mode)834*4882a593Smuzhiyun rcar_du_crtc_mode_valid(struct drm_crtc *crtc,
835*4882a593Smuzhiyun 			const struct drm_display_mode *mode)
836*4882a593Smuzhiyun {
837*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
838*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
839*4882a593Smuzhiyun 	bool interlaced = mode->flags & DRM_MODE_FLAG_INTERLACE;
840*4882a593Smuzhiyun 	unsigned int min_sync_porch;
841*4882a593Smuzhiyun 	unsigned int vbp;
842*4882a593Smuzhiyun 
843*4882a593Smuzhiyun 	if (interlaced && !rcar_du_has(rcdu, RCAR_DU_FEATURE_INTERLACED))
844*4882a593Smuzhiyun 		return MODE_NO_INTERLACE;
845*4882a593Smuzhiyun 
846*4882a593Smuzhiyun 	/*
847*4882a593Smuzhiyun 	 * The hardware requires a minimum combined horizontal sync and back
848*4882a593Smuzhiyun 	 * porch of 20 pixels (when CMM isn't used) or 45 pixels (when CMM is
849*4882a593Smuzhiyun 	 * used), and a minimum vertical back porch of 3 lines.
850*4882a593Smuzhiyun 	 */
851*4882a593Smuzhiyun 	min_sync_porch = 20;
852*4882a593Smuzhiyun 	if (rcrtc->group->cmms_mask & BIT(rcrtc->index % 2))
853*4882a593Smuzhiyun 		min_sync_porch += 25;
854*4882a593Smuzhiyun 
855*4882a593Smuzhiyun 	if (mode->htotal - mode->hsync_start < min_sync_porch)
856*4882a593Smuzhiyun 		return MODE_HBLANK_NARROW;
857*4882a593Smuzhiyun 
858*4882a593Smuzhiyun 	vbp = (mode->vtotal - mode->vsync_end) / (interlaced ? 2 : 1);
859*4882a593Smuzhiyun 	if (vbp < 3)
860*4882a593Smuzhiyun 		return MODE_VBLANK_NARROW;
861*4882a593Smuzhiyun 
862*4882a593Smuzhiyun 	return MODE_OK;
863*4882a593Smuzhiyun }
864*4882a593Smuzhiyun 
865*4882a593Smuzhiyun static const struct drm_crtc_helper_funcs crtc_helper_funcs = {
866*4882a593Smuzhiyun 	.atomic_check = rcar_du_crtc_atomic_check,
867*4882a593Smuzhiyun 	.atomic_begin = rcar_du_crtc_atomic_begin,
868*4882a593Smuzhiyun 	.atomic_flush = rcar_du_crtc_atomic_flush,
869*4882a593Smuzhiyun 	.atomic_enable = rcar_du_crtc_atomic_enable,
870*4882a593Smuzhiyun 	.atomic_disable = rcar_du_crtc_atomic_disable,
871*4882a593Smuzhiyun 	.mode_valid = rcar_du_crtc_mode_valid,
872*4882a593Smuzhiyun };
873*4882a593Smuzhiyun 
rcar_du_crtc_crc_init(struct rcar_du_crtc * rcrtc)874*4882a593Smuzhiyun static void rcar_du_crtc_crc_init(struct rcar_du_crtc *rcrtc)
875*4882a593Smuzhiyun {
876*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
877*4882a593Smuzhiyun 	const char **sources;
878*4882a593Smuzhiyun 	unsigned int count;
879*4882a593Smuzhiyun 	int i = -1;
880*4882a593Smuzhiyun 
881*4882a593Smuzhiyun 	/* CRC available only on Gen3 HW. */
882*4882a593Smuzhiyun 	if (rcdu->info->gen < 3)
883*4882a593Smuzhiyun 		return;
884*4882a593Smuzhiyun 
885*4882a593Smuzhiyun 	/* Reserve 1 for "auto" source. */
886*4882a593Smuzhiyun 	count = rcrtc->vsp->num_planes + 1;
887*4882a593Smuzhiyun 
888*4882a593Smuzhiyun 	sources = kmalloc_array(count, sizeof(*sources), GFP_KERNEL);
889*4882a593Smuzhiyun 	if (!sources)
890*4882a593Smuzhiyun 		return;
891*4882a593Smuzhiyun 
892*4882a593Smuzhiyun 	sources[0] = kstrdup("auto", GFP_KERNEL);
893*4882a593Smuzhiyun 	if (!sources[0])
894*4882a593Smuzhiyun 		goto error;
895*4882a593Smuzhiyun 
896*4882a593Smuzhiyun 	for (i = 0; i < rcrtc->vsp->num_planes; ++i) {
897*4882a593Smuzhiyun 		struct drm_plane *plane = &rcrtc->vsp->planes[i].plane;
898*4882a593Smuzhiyun 		char name[16];
899*4882a593Smuzhiyun 
900*4882a593Smuzhiyun 		sprintf(name, "plane%u", plane->base.id);
901*4882a593Smuzhiyun 		sources[i + 1] = kstrdup(name, GFP_KERNEL);
902*4882a593Smuzhiyun 		if (!sources[i + 1])
903*4882a593Smuzhiyun 			goto error;
904*4882a593Smuzhiyun 	}
905*4882a593Smuzhiyun 
906*4882a593Smuzhiyun 	rcrtc->sources = sources;
907*4882a593Smuzhiyun 	rcrtc->sources_count = count;
908*4882a593Smuzhiyun 	return;
909*4882a593Smuzhiyun 
910*4882a593Smuzhiyun error:
911*4882a593Smuzhiyun 	while (i >= 0) {
912*4882a593Smuzhiyun 		kfree(sources[i]);
913*4882a593Smuzhiyun 		i--;
914*4882a593Smuzhiyun 	}
915*4882a593Smuzhiyun 	kfree(sources);
916*4882a593Smuzhiyun }
917*4882a593Smuzhiyun 
rcar_du_crtc_crc_cleanup(struct rcar_du_crtc * rcrtc)918*4882a593Smuzhiyun static void rcar_du_crtc_crc_cleanup(struct rcar_du_crtc *rcrtc)
919*4882a593Smuzhiyun {
920*4882a593Smuzhiyun 	unsigned int i;
921*4882a593Smuzhiyun 
922*4882a593Smuzhiyun 	if (!rcrtc->sources)
923*4882a593Smuzhiyun 		return;
924*4882a593Smuzhiyun 
925*4882a593Smuzhiyun 	for (i = 0; i < rcrtc->sources_count; i++)
926*4882a593Smuzhiyun 		kfree(rcrtc->sources[i]);
927*4882a593Smuzhiyun 	kfree(rcrtc->sources);
928*4882a593Smuzhiyun 
929*4882a593Smuzhiyun 	rcrtc->sources = NULL;
930*4882a593Smuzhiyun 	rcrtc->sources_count = 0;
931*4882a593Smuzhiyun }
932*4882a593Smuzhiyun 
933*4882a593Smuzhiyun static struct drm_crtc_state *
rcar_du_crtc_atomic_duplicate_state(struct drm_crtc * crtc)934*4882a593Smuzhiyun rcar_du_crtc_atomic_duplicate_state(struct drm_crtc *crtc)
935*4882a593Smuzhiyun {
936*4882a593Smuzhiyun 	struct rcar_du_crtc_state *state;
937*4882a593Smuzhiyun 	struct rcar_du_crtc_state *copy;
938*4882a593Smuzhiyun 
939*4882a593Smuzhiyun 	if (WARN_ON(!crtc->state))
940*4882a593Smuzhiyun 		return NULL;
941*4882a593Smuzhiyun 
942*4882a593Smuzhiyun 	state = to_rcar_crtc_state(crtc->state);
943*4882a593Smuzhiyun 	copy = kmemdup(state, sizeof(*state), GFP_KERNEL);
944*4882a593Smuzhiyun 	if (copy == NULL)
945*4882a593Smuzhiyun 		return NULL;
946*4882a593Smuzhiyun 
947*4882a593Smuzhiyun 	__drm_atomic_helper_crtc_duplicate_state(crtc, &copy->state);
948*4882a593Smuzhiyun 
949*4882a593Smuzhiyun 	return &copy->state;
950*4882a593Smuzhiyun }
951*4882a593Smuzhiyun 
rcar_du_crtc_atomic_destroy_state(struct drm_crtc * crtc,struct drm_crtc_state * state)952*4882a593Smuzhiyun static void rcar_du_crtc_atomic_destroy_state(struct drm_crtc *crtc,
953*4882a593Smuzhiyun 					      struct drm_crtc_state *state)
954*4882a593Smuzhiyun {
955*4882a593Smuzhiyun 	__drm_atomic_helper_crtc_destroy_state(state);
956*4882a593Smuzhiyun 	kfree(to_rcar_crtc_state(state));
957*4882a593Smuzhiyun }
958*4882a593Smuzhiyun 
rcar_du_crtc_cleanup(struct drm_crtc * crtc)959*4882a593Smuzhiyun static void rcar_du_crtc_cleanup(struct drm_crtc *crtc)
960*4882a593Smuzhiyun {
961*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
962*4882a593Smuzhiyun 
963*4882a593Smuzhiyun 	rcar_du_crtc_crc_cleanup(rcrtc);
964*4882a593Smuzhiyun 
965*4882a593Smuzhiyun 	return drm_crtc_cleanup(crtc);
966*4882a593Smuzhiyun }
967*4882a593Smuzhiyun 
rcar_du_crtc_reset(struct drm_crtc * crtc)968*4882a593Smuzhiyun static void rcar_du_crtc_reset(struct drm_crtc *crtc)
969*4882a593Smuzhiyun {
970*4882a593Smuzhiyun 	struct rcar_du_crtc_state *state;
971*4882a593Smuzhiyun 
972*4882a593Smuzhiyun 	if (crtc->state) {
973*4882a593Smuzhiyun 		rcar_du_crtc_atomic_destroy_state(crtc, crtc->state);
974*4882a593Smuzhiyun 		crtc->state = NULL;
975*4882a593Smuzhiyun 	}
976*4882a593Smuzhiyun 
977*4882a593Smuzhiyun 	state = kzalloc(sizeof(*state), GFP_KERNEL);
978*4882a593Smuzhiyun 	if (state == NULL)
979*4882a593Smuzhiyun 		return;
980*4882a593Smuzhiyun 
981*4882a593Smuzhiyun 	state->crc.source = VSP1_DU_CRC_NONE;
982*4882a593Smuzhiyun 	state->crc.index = 0;
983*4882a593Smuzhiyun 
984*4882a593Smuzhiyun 	__drm_atomic_helper_crtc_reset(crtc, &state->state);
985*4882a593Smuzhiyun }
986*4882a593Smuzhiyun 
rcar_du_crtc_enable_vblank(struct drm_crtc * crtc)987*4882a593Smuzhiyun static int rcar_du_crtc_enable_vblank(struct drm_crtc *crtc)
988*4882a593Smuzhiyun {
989*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
990*4882a593Smuzhiyun 
991*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, DSRCR, DSRCR_VBCL);
992*4882a593Smuzhiyun 	rcar_du_crtc_set(rcrtc, DIER, DIER_VBE);
993*4882a593Smuzhiyun 	rcrtc->vblank_enable = true;
994*4882a593Smuzhiyun 
995*4882a593Smuzhiyun 	return 0;
996*4882a593Smuzhiyun }
997*4882a593Smuzhiyun 
rcar_du_crtc_disable_vblank(struct drm_crtc * crtc)998*4882a593Smuzhiyun static void rcar_du_crtc_disable_vblank(struct drm_crtc *crtc)
999*4882a593Smuzhiyun {
1000*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1001*4882a593Smuzhiyun 
1002*4882a593Smuzhiyun 	rcar_du_crtc_clr(rcrtc, DIER, DIER_VBE);
1003*4882a593Smuzhiyun 	rcrtc->vblank_enable = false;
1004*4882a593Smuzhiyun }
1005*4882a593Smuzhiyun 
rcar_du_crtc_parse_crc_source(struct rcar_du_crtc * rcrtc,const char * source_name,enum vsp1_du_crc_source * source)1006*4882a593Smuzhiyun static int rcar_du_crtc_parse_crc_source(struct rcar_du_crtc *rcrtc,
1007*4882a593Smuzhiyun 					 const char *source_name,
1008*4882a593Smuzhiyun 					 enum vsp1_du_crc_source *source)
1009*4882a593Smuzhiyun {
1010*4882a593Smuzhiyun 	unsigned int index;
1011*4882a593Smuzhiyun 	int ret;
1012*4882a593Smuzhiyun 
1013*4882a593Smuzhiyun 	/*
1014*4882a593Smuzhiyun 	 * Parse the source name. Supported values are "plane%u" to compute the
1015*4882a593Smuzhiyun 	 * CRC on an input plane (%u is the plane ID), and "auto" to compute the
1016*4882a593Smuzhiyun 	 * CRC on the composer (VSP) output.
1017*4882a593Smuzhiyun 	 */
1018*4882a593Smuzhiyun 
1019*4882a593Smuzhiyun 	if (!source_name) {
1020*4882a593Smuzhiyun 		*source = VSP1_DU_CRC_NONE;
1021*4882a593Smuzhiyun 		return 0;
1022*4882a593Smuzhiyun 	} else if (!strcmp(source_name, "auto")) {
1023*4882a593Smuzhiyun 		*source = VSP1_DU_CRC_OUTPUT;
1024*4882a593Smuzhiyun 		return 0;
1025*4882a593Smuzhiyun 	} else if (strstarts(source_name, "plane")) {
1026*4882a593Smuzhiyun 		unsigned int i;
1027*4882a593Smuzhiyun 
1028*4882a593Smuzhiyun 		*source = VSP1_DU_CRC_PLANE;
1029*4882a593Smuzhiyun 
1030*4882a593Smuzhiyun 		ret = kstrtouint(source_name + strlen("plane"), 10, &index);
1031*4882a593Smuzhiyun 		if (ret < 0)
1032*4882a593Smuzhiyun 			return ret;
1033*4882a593Smuzhiyun 
1034*4882a593Smuzhiyun 		for (i = 0; i < rcrtc->vsp->num_planes; ++i) {
1035*4882a593Smuzhiyun 			if (index == rcrtc->vsp->planes[i].plane.base.id)
1036*4882a593Smuzhiyun 				return i;
1037*4882a593Smuzhiyun 		}
1038*4882a593Smuzhiyun 	}
1039*4882a593Smuzhiyun 
1040*4882a593Smuzhiyun 	return -EINVAL;
1041*4882a593Smuzhiyun }
1042*4882a593Smuzhiyun 
rcar_du_crtc_verify_crc_source(struct drm_crtc * crtc,const char * source_name,size_t * values_cnt)1043*4882a593Smuzhiyun static int rcar_du_crtc_verify_crc_source(struct drm_crtc *crtc,
1044*4882a593Smuzhiyun 					  const char *source_name,
1045*4882a593Smuzhiyun 					  size_t *values_cnt)
1046*4882a593Smuzhiyun {
1047*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1048*4882a593Smuzhiyun 	enum vsp1_du_crc_source source;
1049*4882a593Smuzhiyun 
1050*4882a593Smuzhiyun 	if (rcar_du_crtc_parse_crc_source(rcrtc, source_name, &source) < 0) {
1051*4882a593Smuzhiyun 		DRM_DEBUG_DRIVER("unknown source %s\n", source_name);
1052*4882a593Smuzhiyun 		return -EINVAL;
1053*4882a593Smuzhiyun 	}
1054*4882a593Smuzhiyun 
1055*4882a593Smuzhiyun 	*values_cnt = 1;
1056*4882a593Smuzhiyun 	return 0;
1057*4882a593Smuzhiyun }
1058*4882a593Smuzhiyun 
1059*4882a593Smuzhiyun static const char *const *
rcar_du_crtc_get_crc_sources(struct drm_crtc * crtc,size_t * count)1060*4882a593Smuzhiyun rcar_du_crtc_get_crc_sources(struct drm_crtc *crtc, size_t *count)
1061*4882a593Smuzhiyun {
1062*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1063*4882a593Smuzhiyun 
1064*4882a593Smuzhiyun 	*count = rcrtc->sources_count;
1065*4882a593Smuzhiyun 	return rcrtc->sources;
1066*4882a593Smuzhiyun }
1067*4882a593Smuzhiyun 
rcar_du_crtc_set_crc_source(struct drm_crtc * crtc,const char * source_name)1068*4882a593Smuzhiyun static int rcar_du_crtc_set_crc_source(struct drm_crtc *crtc,
1069*4882a593Smuzhiyun 				       const char *source_name)
1070*4882a593Smuzhiyun {
1071*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1072*4882a593Smuzhiyun 	struct drm_modeset_acquire_ctx ctx;
1073*4882a593Smuzhiyun 	struct drm_crtc_state *crtc_state;
1074*4882a593Smuzhiyun 	struct drm_atomic_state *state;
1075*4882a593Smuzhiyun 	enum vsp1_du_crc_source source;
1076*4882a593Smuzhiyun 	unsigned int index;
1077*4882a593Smuzhiyun 	int ret;
1078*4882a593Smuzhiyun 
1079*4882a593Smuzhiyun 	ret = rcar_du_crtc_parse_crc_source(rcrtc, source_name, &source);
1080*4882a593Smuzhiyun 	if (ret < 0)
1081*4882a593Smuzhiyun 		return ret;
1082*4882a593Smuzhiyun 
1083*4882a593Smuzhiyun 	index = ret;
1084*4882a593Smuzhiyun 
1085*4882a593Smuzhiyun 	/* Perform an atomic commit to set the CRC source. */
1086*4882a593Smuzhiyun 	drm_modeset_acquire_init(&ctx, 0);
1087*4882a593Smuzhiyun 
1088*4882a593Smuzhiyun 	state = drm_atomic_state_alloc(crtc->dev);
1089*4882a593Smuzhiyun 	if (!state) {
1090*4882a593Smuzhiyun 		ret = -ENOMEM;
1091*4882a593Smuzhiyun 		goto unlock;
1092*4882a593Smuzhiyun 	}
1093*4882a593Smuzhiyun 
1094*4882a593Smuzhiyun 	state->acquire_ctx = &ctx;
1095*4882a593Smuzhiyun 
1096*4882a593Smuzhiyun retry:
1097*4882a593Smuzhiyun 	crtc_state = drm_atomic_get_crtc_state(state, crtc);
1098*4882a593Smuzhiyun 	if (!IS_ERR(crtc_state)) {
1099*4882a593Smuzhiyun 		struct rcar_du_crtc_state *rcrtc_state;
1100*4882a593Smuzhiyun 
1101*4882a593Smuzhiyun 		rcrtc_state = to_rcar_crtc_state(crtc_state);
1102*4882a593Smuzhiyun 		rcrtc_state->crc.source = source;
1103*4882a593Smuzhiyun 		rcrtc_state->crc.index = index;
1104*4882a593Smuzhiyun 
1105*4882a593Smuzhiyun 		ret = drm_atomic_commit(state);
1106*4882a593Smuzhiyun 	} else {
1107*4882a593Smuzhiyun 		ret = PTR_ERR(crtc_state);
1108*4882a593Smuzhiyun 	}
1109*4882a593Smuzhiyun 
1110*4882a593Smuzhiyun 	if (ret == -EDEADLK) {
1111*4882a593Smuzhiyun 		drm_atomic_state_clear(state);
1112*4882a593Smuzhiyun 		drm_modeset_backoff(&ctx);
1113*4882a593Smuzhiyun 		goto retry;
1114*4882a593Smuzhiyun 	}
1115*4882a593Smuzhiyun 
1116*4882a593Smuzhiyun 	drm_atomic_state_put(state);
1117*4882a593Smuzhiyun 
1118*4882a593Smuzhiyun unlock:
1119*4882a593Smuzhiyun 	drm_modeset_drop_locks(&ctx);
1120*4882a593Smuzhiyun 	drm_modeset_acquire_fini(&ctx);
1121*4882a593Smuzhiyun 
1122*4882a593Smuzhiyun 	return ret;
1123*4882a593Smuzhiyun }
1124*4882a593Smuzhiyun 
1125*4882a593Smuzhiyun static const struct drm_crtc_funcs crtc_funcs_gen2 = {
1126*4882a593Smuzhiyun 	.reset = rcar_du_crtc_reset,
1127*4882a593Smuzhiyun 	.destroy = drm_crtc_cleanup,
1128*4882a593Smuzhiyun 	.set_config = drm_atomic_helper_set_config,
1129*4882a593Smuzhiyun 	.page_flip = drm_atomic_helper_page_flip,
1130*4882a593Smuzhiyun 	.atomic_duplicate_state = rcar_du_crtc_atomic_duplicate_state,
1131*4882a593Smuzhiyun 	.atomic_destroy_state = rcar_du_crtc_atomic_destroy_state,
1132*4882a593Smuzhiyun 	.enable_vblank = rcar_du_crtc_enable_vblank,
1133*4882a593Smuzhiyun 	.disable_vblank = rcar_du_crtc_disable_vblank,
1134*4882a593Smuzhiyun };
1135*4882a593Smuzhiyun 
1136*4882a593Smuzhiyun static const struct drm_crtc_funcs crtc_funcs_gen3 = {
1137*4882a593Smuzhiyun 	.reset = rcar_du_crtc_reset,
1138*4882a593Smuzhiyun 	.destroy = rcar_du_crtc_cleanup,
1139*4882a593Smuzhiyun 	.set_config = drm_atomic_helper_set_config,
1140*4882a593Smuzhiyun 	.page_flip = drm_atomic_helper_page_flip,
1141*4882a593Smuzhiyun 	.atomic_duplicate_state = rcar_du_crtc_atomic_duplicate_state,
1142*4882a593Smuzhiyun 	.atomic_destroy_state = rcar_du_crtc_atomic_destroy_state,
1143*4882a593Smuzhiyun 	.enable_vblank = rcar_du_crtc_enable_vblank,
1144*4882a593Smuzhiyun 	.disable_vblank = rcar_du_crtc_disable_vblank,
1145*4882a593Smuzhiyun 	.set_crc_source = rcar_du_crtc_set_crc_source,
1146*4882a593Smuzhiyun 	.verify_crc_source = rcar_du_crtc_verify_crc_source,
1147*4882a593Smuzhiyun 	.get_crc_sources = rcar_du_crtc_get_crc_sources,
1148*4882a593Smuzhiyun 	.gamma_set = drm_atomic_helper_legacy_gamma_set,
1149*4882a593Smuzhiyun };
1150*4882a593Smuzhiyun 
1151*4882a593Smuzhiyun /* -----------------------------------------------------------------------------
1152*4882a593Smuzhiyun  * Interrupt Handling
1153*4882a593Smuzhiyun  */
1154*4882a593Smuzhiyun 
rcar_du_crtc_irq(int irq,void * arg)1155*4882a593Smuzhiyun static irqreturn_t rcar_du_crtc_irq(int irq, void *arg)
1156*4882a593Smuzhiyun {
1157*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = arg;
1158*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rcrtc->dev;
1159*4882a593Smuzhiyun 	irqreturn_t ret = IRQ_NONE;
1160*4882a593Smuzhiyun 	u32 status;
1161*4882a593Smuzhiyun 
1162*4882a593Smuzhiyun 	spin_lock(&rcrtc->vblank_lock);
1163*4882a593Smuzhiyun 
1164*4882a593Smuzhiyun 	status = rcar_du_crtc_read(rcrtc, DSSR);
1165*4882a593Smuzhiyun 	rcar_du_crtc_write(rcrtc, DSRCR, status & DSRCR_MASK);
1166*4882a593Smuzhiyun 
1167*4882a593Smuzhiyun 	if (status & DSSR_VBK) {
1168*4882a593Smuzhiyun 		/*
1169*4882a593Smuzhiyun 		 * Wake up the vblank wait if the counter reaches 0. This must
1170*4882a593Smuzhiyun 		 * be protected by the vblank_lock to avoid races in
1171*4882a593Smuzhiyun 		 * rcar_du_crtc_disable_planes().
1172*4882a593Smuzhiyun 		 */
1173*4882a593Smuzhiyun 		if (rcrtc->vblank_count) {
1174*4882a593Smuzhiyun 			if (--rcrtc->vblank_count == 0)
1175*4882a593Smuzhiyun 				wake_up(&rcrtc->vblank_wait);
1176*4882a593Smuzhiyun 		}
1177*4882a593Smuzhiyun 	}
1178*4882a593Smuzhiyun 
1179*4882a593Smuzhiyun 	spin_unlock(&rcrtc->vblank_lock);
1180*4882a593Smuzhiyun 
1181*4882a593Smuzhiyun 	if (status & DSSR_VBK) {
1182*4882a593Smuzhiyun 		if (rcdu->info->gen < 3) {
1183*4882a593Smuzhiyun 			drm_crtc_handle_vblank(&rcrtc->crtc);
1184*4882a593Smuzhiyun 			rcar_du_crtc_finish_page_flip(rcrtc);
1185*4882a593Smuzhiyun 		}
1186*4882a593Smuzhiyun 
1187*4882a593Smuzhiyun 		ret = IRQ_HANDLED;
1188*4882a593Smuzhiyun 	}
1189*4882a593Smuzhiyun 
1190*4882a593Smuzhiyun 	return ret;
1191*4882a593Smuzhiyun }
1192*4882a593Smuzhiyun 
1193*4882a593Smuzhiyun /* -----------------------------------------------------------------------------
1194*4882a593Smuzhiyun  * Initialization
1195*4882a593Smuzhiyun  */
1196*4882a593Smuzhiyun 
rcar_du_crtc_create(struct rcar_du_group * rgrp,unsigned int swindex,unsigned int hwindex)1197*4882a593Smuzhiyun int rcar_du_crtc_create(struct rcar_du_group *rgrp, unsigned int swindex,
1198*4882a593Smuzhiyun 			unsigned int hwindex)
1199*4882a593Smuzhiyun {
1200*4882a593Smuzhiyun 	static const unsigned int mmio_offsets[] = {
1201*4882a593Smuzhiyun 		DU0_REG_OFFSET, DU1_REG_OFFSET, DU2_REG_OFFSET, DU3_REG_OFFSET
1202*4882a593Smuzhiyun 	};
1203*4882a593Smuzhiyun 
1204*4882a593Smuzhiyun 	struct rcar_du_device *rcdu = rgrp->dev;
1205*4882a593Smuzhiyun 	struct platform_device *pdev = to_platform_device(rcdu->dev);
1206*4882a593Smuzhiyun 	struct rcar_du_crtc *rcrtc = &rcdu->crtcs[swindex];
1207*4882a593Smuzhiyun 	struct drm_crtc *crtc = &rcrtc->crtc;
1208*4882a593Smuzhiyun 	struct drm_plane *primary;
1209*4882a593Smuzhiyun 	unsigned int irqflags;
1210*4882a593Smuzhiyun 	struct clk *clk;
1211*4882a593Smuzhiyun 	char clk_name[9];
1212*4882a593Smuzhiyun 	char *name;
1213*4882a593Smuzhiyun 	int irq;
1214*4882a593Smuzhiyun 	int ret;
1215*4882a593Smuzhiyun 
1216*4882a593Smuzhiyun 	/* Get the CRTC clock and the optional external clock. */
1217*4882a593Smuzhiyun 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_IRQ_CLOCK)) {
1218*4882a593Smuzhiyun 		sprintf(clk_name, "du.%u", hwindex);
1219*4882a593Smuzhiyun 		name = clk_name;
1220*4882a593Smuzhiyun 	} else {
1221*4882a593Smuzhiyun 		name = NULL;
1222*4882a593Smuzhiyun 	}
1223*4882a593Smuzhiyun 
1224*4882a593Smuzhiyun 	rcrtc->clock = devm_clk_get(rcdu->dev, name);
1225*4882a593Smuzhiyun 	if (IS_ERR(rcrtc->clock)) {
1226*4882a593Smuzhiyun 		dev_err(rcdu->dev, "no clock for DU channel %u\n", hwindex);
1227*4882a593Smuzhiyun 		return PTR_ERR(rcrtc->clock);
1228*4882a593Smuzhiyun 	}
1229*4882a593Smuzhiyun 
1230*4882a593Smuzhiyun 	sprintf(clk_name, "dclkin.%u", hwindex);
1231*4882a593Smuzhiyun 	clk = devm_clk_get(rcdu->dev, clk_name);
1232*4882a593Smuzhiyun 	if (!IS_ERR(clk)) {
1233*4882a593Smuzhiyun 		rcrtc->extclock = clk;
1234*4882a593Smuzhiyun 	} else if (PTR_ERR(clk) == -EPROBE_DEFER) {
1235*4882a593Smuzhiyun 		return -EPROBE_DEFER;
1236*4882a593Smuzhiyun 	} else if (rcdu->info->dpll_mask & BIT(hwindex)) {
1237*4882a593Smuzhiyun 		/*
1238*4882a593Smuzhiyun 		 * DU channels that have a display PLL can't use the internal
1239*4882a593Smuzhiyun 		 * system clock and thus require an external clock.
1240*4882a593Smuzhiyun 		 */
1241*4882a593Smuzhiyun 		ret = PTR_ERR(clk);
1242*4882a593Smuzhiyun 		dev_err(rcdu->dev, "can't get dclkin.%u: %d\n", hwindex, ret);
1243*4882a593Smuzhiyun 		return ret;
1244*4882a593Smuzhiyun 	}
1245*4882a593Smuzhiyun 
1246*4882a593Smuzhiyun 	init_waitqueue_head(&rcrtc->flip_wait);
1247*4882a593Smuzhiyun 	init_waitqueue_head(&rcrtc->vblank_wait);
1248*4882a593Smuzhiyun 	spin_lock_init(&rcrtc->vblank_lock);
1249*4882a593Smuzhiyun 
1250*4882a593Smuzhiyun 	rcrtc->dev = rcdu;
1251*4882a593Smuzhiyun 	rcrtc->group = rgrp;
1252*4882a593Smuzhiyun 	rcrtc->mmio_offset = mmio_offsets[hwindex];
1253*4882a593Smuzhiyun 	rcrtc->index = hwindex;
1254*4882a593Smuzhiyun 	rcrtc->dsysr = (rcrtc->index % 2 ? 0 : DSYSR_DRES) | DSYSR_TVM_TVSYNC;
1255*4882a593Smuzhiyun 
1256*4882a593Smuzhiyun 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE))
1257*4882a593Smuzhiyun 		primary = &rcrtc->vsp->planes[rcrtc->vsp_pipe].plane;
1258*4882a593Smuzhiyun 	else
1259*4882a593Smuzhiyun 		primary = &rgrp->planes[swindex % 2].plane;
1260*4882a593Smuzhiyun 
1261*4882a593Smuzhiyun 	ret = drm_crtc_init_with_planes(rcdu->ddev, crtc, primary, NULL,
1262*4882a593Smuzhiyun 					rcdu->info->gen <= 2 ?
1263*4882a593Smuzhiyun 					&crtc_funcs_gen2 : &crtc_funcs_gen3,
1264*4882a593Smuzhiyun 					NULL);
1265*4882a593Smuzhiyun 	if (ret < 0)
1266*4882a593Smuzhiyun 		return ret;
1267*4882a593Smuzhiyun 
1268*4882a593Smuzhiyun 	/* CMM might be disabled for this CRTC. */
1269*4882a593Smuzhiyun 	if (rcdu->cmms[swindex]) {
1270*4882a593Smuzhiyun 		rcrtc->cmm = rcdu->cmms[swindex];
1271*4882a593Smuzhiyun 		rgrp->cmms_mask |= BIT(hwindex % 2);
1272*4882a593Smuzhiyun 
1273*4882a593Smuzhiyun 		drm_mode_crtc_set_gamma_size(crtc, CM2_LUT_SIZE);
1274*4882a593Smuzhiyun 		drm_crtc_enable_color_mgmt(crtc, 0, false, CM2_LUT_SIZE);
1275*4882a593Smuzhiyun 	}
1276*4882a593Smuzhiyun 
1277*4882a593Smuzhiyun 	drm_crtc_helper_add(crtc, &crtc_helper_funcs);
1278*4882a593Smuzhiyun 
1279*4882a593Smuzhiyun 	/* Register the interrupt handler. */
1280*4882a593Smuzhiyun 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_IRQ_CLOCK)) {
1281*4882a593Smuzhiyun 		/* The IRQ's are associated with the CRTC (sw)index. */
1282*4882a593Smuzhiyun 		irq = platform_get_irq(pdev, swindex);
1283*4882a593Smuzhiyun 		irqflags = 0;
1284*4882a593Smuzhiyun 	} else {
1285*4882a593Smuzhiyun 		irq = platform_get_irq(pdev, 0);
1286*4882a593Smuzhiyun 		irqflags = IRQF_SHARED;
1287*4882a593Smuzhiyun 	}
1288*4882a593Smuzhiyun 
1289*4882a593Smuzhiyun 	if (irq < 0) {
1290*4882a593Smuzhiyun 		dev_err(rcdu->dev, "no IRQ for CRTC %u\n", swindex);
1291*4882a593Smuzhiyun 		return irq;
1292*4882a593Smuzhiyun 	}
1293*4882a593Smuzhiyun 
1294*4882a593Smuzhiyun 	ret = devm_request_irq(rcdu->dev, irq, rcar_du_crtc_irq, irqflags,
1295*4882a593Smuzhiyun 			       dev_name(rcdu->dev), rcrtc);
1296*4882a593Smuzhiyun 	if (ret < 0) {
1297*4882a593Smuzhiyun 		dev_err(rcdu->dev,
1298*4882a593Smuzhiyun 			"failed to register IRQ for CRTC %u\n", swindex);
1299*4882a593Smuzhiyun 		return ret;
1300*4882a593Smuzhiyun 	}
1301*4882a593Smuzhiyun 
1302*4882a593Smuzhiyun 	rcar_du_crtc_crc_init(rcrtc);
1303*4882a593Smuzhiyun 
1304*4882a593Smuzhiyun 	return 0;
1305*4882a593Smuzhiyun }
1306