1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright (c) 2015 MediaTek Inc.
4*4882a593Smuzhiyun */
5*4882a593Smuzhiyun
6*4882a593Smuzhiyun #include <linux/clk.h>
7*4882a593Smuzhiyun #include <linux/pm_runtime.h>
8*4882a593Smuzhiyun #include <linux/soc/mediatek/mtk-cmdq.h>
9*4882a593Smuzhiyun #include <linux/soc/mediatek/mtk-mmsys.h>
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #include <asm/barrier.h>
12*4882a593Smuzhiyun #include <soc/mediatek/smi.h>
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun #include <drm/drm_atomic_helper.h>
15*4882a593Smuzhiyun #include <drm/drm_plane_helper.h>
16*4882a593Smuzhiyun #include <drm/drm_probe_helper.h>
17*4882a593Smuzhiyun #include <drm/drm_vblank.h>
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun #include "mtk_drm_drv.h"
20*4882a593Smuzhiyun #include "mtk_drm_crtc.h"
21*4882a593Smuzhiyun #include "mtk_drm_ddp.h"
22*4882a593Smuzhiyun #include "mtk_drm_ddp_comp.h"
23*4882a593Smuzhiyun #include "mtk_drm_gem.h"
24*4882a593Smuzhiyun #include "mtk_drm_plane.h"
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun /**
27*4882a593Smuzhiyun * struct mtk_drm_crtc - MediaTek specific crtc structure.
28*4882a593Smuzhiyun * @base: crtc object.
29*4882a593Smuzhiyun * @enabled: records whether crtc_enable succeeded
30*4882a593Smuzhiyun * @planes: array of 4 drm_plane structures, one for each overlay plane
31*4882a593Smuzhiyun * @pending_planes: whether any plane has pending changes to be applied
32*4882a593Smuzhiyun * @mmsys_dev: pointer to the mmsys device for configuration registers
33*4882a593Smuzhiyun * @mutex: handle to one of the ten disp_mutex streams
34*4882a593Smuzhiyun * @ddp_comp_nr: number of components in ddp_comp
35*4882a593Smuzhiyun * @ddp_comp: array of pointers the mtk_ddp_comp structures used by this crtc
36*4882a593Smuzhiyun */
37*4882a593Smuzhiyun struct mtk_drm_crtc {
38*4882a593Smuzhiyun struct drm_crtc base;
39*4882a593Smuzhiyun bool enabled;
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun bool pending_needs_vblank;
42*4882a593Smuzhiyun struct drm_pending_vblank_event *event;
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun struct drm_plane *planes;
45*4882a593Smuzhiyun unsigned int layer_nr;
46*4882a593Smuzhiyun bool pending_planes;
47*4882a593Smuzhiyun bool pending_async_planes;
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun #if IS_REACHABLE(CONFIG_MTK_CMDQ)
50*4882a593Smuzhiyun struct cmdq_client *cmdq_client;
51*4882a593Smuzhiyun u32 cmdq_event;
52*4882a593Smuzhiyun #endif
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun struct device *mmsys_dev;
55*4882a593Smuzhiyun struct mtk_disp_mutex *mutex;
56*4882a593Smuzhiyun unsigned int ddp_comp_nr;
57*4882a593Smuzhiyun struct mtk_ddp_comp **ddp_comp;
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun /* lock for display hardware access */
60*4882a593Smuzhiyun struct mutex hw_lock;
61*4882a593Smuzhiyun };
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun struct mtk_crtc_state {
64*4882a593Smuzhiyun struct drm_crtc_state base;
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun bool pending_config;
67*4882a593Smuzhiyun unsigned int pending_width;
68*4882a593Smuzhiyun unsigned int pending_height;
69*4882a593Smuzhiyun unsigned int pending_vrefresh;
70*4882a593Smuzhiyun };
71*4882a593Smuzhiyun
to_mtk_crtc(struct drm_crtc * c)72*4882a593Smuzhiyun static inline struct mtk_drm_crtc *to_mtk_crtc(struct drm_crtc *c)
73*4882a593Smuzhiyun {
74*4882a593Smuzhiyun return container_of(c, struct mtk_drm_crtc, base);
75*4882a593Smuzhiyun }
76*4882a593Smuzhiyun
to_mtk_crtc_state(struct drm_crtc_state * s)77*4882a593Smuzhiyun static inline struct mtk_crtc_state *to_mtk_crtc_state(struct drm_crtc_state *s)
78*4882a593Smuzhiyun {
79*4882a593Smuzhiyun return container_of(s, struct mtk_crtc_state, base);
80*4882a593Smuzhiyun }
81*4882a593Smuzhiyun
mtk_drm_crtc_finish_page_flip(struct mtk_drm_crtc * mtk_crtc)82*4882a593Smuzhiyun static void mtk_drm_crtc_finish_page_flip(struct mtk_drm_crtc *mtk_crtc)
83*4882a593Smuzhiyun {
84*4882a593Smuzhiyun struct drm_crtc *crtc = &mtk_crtc->base;
85*4882a593Smuzhiyun unsigned long flags;
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun spin_lock_irqsave(&crtc->dev->event_lock, flags);
88*4882a593Smuzhiyun drm_crtc_send_vblank_event(crtc, mtk_crtc->event);
89*4882a593Smuzhiyun drm_crtc_vblank_put(crtc);
90*4882a593Smuzhiyun mtk_crtc->event = NULL;
91*4882a593Smuzhiyun spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
92*4882a593Smuzhiyun }
93*4882a593Smuzhiyun
mtk_drm_finish_page_flip(struct mtk_drm_crtc * mtk_crtc)94*4882a593Smuzhiyun static void mtk_drm_finish_page_flip(struct mtk_drm_crtc *mtk_crtc)
95*4882a593Smuzhiyun {
96*4882a593Smuzhiyun drm_crtc_handle_vblank(&mtk_crtc->base);
97*4882a593Smuzhiyun if (mtk_crtc->pending_needs_vblank) {
98*4882a593Smuzhiyun mtk_drm_crtc_finish_page_flip(mtk_crtc);
99*4882a593Smuzhiyun mtk_crtc->pending_needs_vblank = false;
100*4882a593Smuzhiyun }
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
mtk_drm_crtc_destroy(struct drm_crtc * crtc)103*4882a593Smuzhiyun static void mtk_drm_crtc_destroy(struct drm_crtc *crtc)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun mtk_disp_mutex_put(mtk_crtc->mutex);
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun drm_crtc_cleanup(crtc);
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
mtk_drm_crtc_reset(struct drm_crtc * crtc)112*4882a593Smuzhiyun static void mtk_drm_crtc_reset(struct drm_crtc *crtc)
113*4882a593Smuzhiyun {
114*4882a593Smuzhiyun struct mtk_crtc_state *state;
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun if (crtc->state)
117*4882a593Smuzhiyun __drm_atomic_helper_crtc_destroy_state(crtc->state);
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun kfree(to_mtk_crtc_state(crtc->state));
120*4882a593Smuzhiyun crtc->state = NULL;
121*4882a593Smuzhiyun
122*4882a593Smuzhiyun state = kzalloc(sizeof(*state), GFP_KERNEL);
123*4882a593Smuzhiyun if (state)
124*4882a593Smuzhiyun __drm_atomic_helper_crtc_reset(crtc, &state->base);
125*4882a593Smuzhiyun }
126*4882a593Smuzhiyun
mtk_drm_crtc_duplicate_state(struct drm_crtc * crtc)127*4882a593Smuzhiyun static struct drm_crtc_state *mtk_drm_crtc_duplicate_state(struct drm_crtc *crtc)
128*4882a593Smuzhiyun {
129*4882a593Smuzhiyun struct mtk_crtc_state *state;
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun state = kzalloc(sizeof(*state), GFP_KERNEL);
132*4882a593Smuzhiyun if (!state)
133*4882a593Smuzhiyun return NULL;
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun __drm_atomic_helper_crtc_duplicate_state(crtc, &state->base);
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun WARN_ON(state->base.crtc != crtc);
138*4882a593Smuzhiyun state->base.crtc = crtc;
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun return &state->base;
141*4882a593Smuzhiyun }
142*4882a593Smuzhiyun
mtk_drm_crtc_destroy_state(struct drm_crtc * crtc,struct drm_crtc_state * state)143*4882a593Smuzhiyun static void mtk_drm_crtc_destroy_state(struct drm_crtc *crtc,
144*4882a593Smuzhiyun struct drm_crtc_state *state)
145*4882a593Smuzhiyun {
146*4882a593Smuzhiyun __drm_atomic_helper_crtc_destroy_state(state);
147*4882a593Smuzhiyun kfree(to_mtk_crtc_state(state));
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun
mtk_drm_crtc_mode_fixup(struct drm_crtc * crtc,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)150*4882a593Smuzhiyun static bool mtk_drm_crtc_mode_fixup(struct drm_crtc *crtc,
151*4882a593Smuzhiyun const struct drm_display_mode *mode,
152*4882a593Smuzhiyun struct drm_display_mode *adjusted_mode)
153*4882a593Smuzhiyun {
154*4882a593Smuzhiyun /* Nothing to do here, but this callback is mandatory. */
155*4882a593Smuzhiyun return true;
156*4882a593Smuzhiyun }
157*4882a593Smuzhiyun
mtk_drm_crtc_mode_set_nofb(struct drm_crtc * crtc)158*4882a593Smuzhiyun static void mtk_drm_crtc_mode_set_nofb(struct drm_crtc *crtc)
159*4882a593Smuzhiyun {
160*4882a593Smuzhiyun struct mtk_crtc_state *state = to_mtk_crtc_state(crtc->state);
161*4882a593Smuzhiyun
162*4882a593Smuzhiyun state->pending_width = crtc->mode.hdisplay;
163*4882a593Smuzhiyun state->pending_height = crtc->mode.vdisplay;
164*4882a593Smuzhiyun state->pending_vrefresh = drm_mode_vrefresh(&crtc->mode);
165*4882a593Smuzhiyun wmb(); /* Make sure the above parameters are set before update */
166*4882a593Smuzhiyun state->pending_config = true;
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun
mtk_drm_crtc_enable_vblank(struct drm_crtc * crtc)169*4882a593Smuzhiyun static int mtk_drm_crtc_enable_vblank(struct drm_crtc *crtc)
170*4882a593Smuzhiyun {
171*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
172*4882a593Smuzhiyun struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0];
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun mtk_ddp_comp_enable_vblank(comp, &mtk_crtc->base);
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun return 0;
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun
mtk_drm_crtc_disable_vblank(struct drm_crtc * crtc)179*4882a593Smuzhiyun static void mtk_drm_crtc_disable_vblank(struct drm_crtc *crtc)
180*4882a593Smuzhiyun {
181*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
182*4882a593Smuzhiyun struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0];
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun mtk_ddp_comp_disable_vblank(comp);
185*4882a593Smuzhiyun }
186*4882a593Smuzhiyun
mtk_crtc_ddp_clk_enable(struct mtk_drm_crtc * mtk_crtc)187*4882a593Smuzhiyun static int mtk_crtc_ddp_clk_enable(struct mtk_drm_crtc *mtk_crtc)
188*4882a593Smuzhiyun {
189*4882a593Smuzhiyun int ret;
190*4882a593Smuzhiyun int i;
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) {
193*4882a593Smuzhiyun ret = clk_prepare_enable(mtk_crtc->ddp_comp[i]->clk);
194*4882a593Smuzhiyun if (ret) {
195*4882a593Smuzhiyun DRM_ERROR("Failed to enable clock %d: %d\n", i, ret);
196*4882a593Smuzhiyun goto err;
197*4882a593Smuzhiyun }
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun return 0;
201*4882a593Smuzhiyun err:
202*4882a593Smuzhiyun while (--i >= 0)
203*4882a593Smuzhiyun clk_disable_unprepare(mtk_crtc->ddp_comp[i]->clk);
204*4882a593Smuzhiyun return ret;
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun
mtk_crtc_ddp_clk_disable(struct mtk_drm_crtc * mtk_crtc)207*4882a593Smuzhiyun static void mtk_crtc_ddp_clk_disable(struct mtk_drm_crtc *mtk_crtc)
208*4882a593Smuzhiyun {
209*4882a593Smuzhiyun int i;
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++)
212*4882a593Smuzhiyun clk_disable_unprepare(mtk_crtc->ddp_comp[i]->clk);
213*4882a593Smuzhiyun }
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun static
mtk_drm_ddp_comp_for_plane(struct drm_crtc * crtc,struct drm_plane * plane,unsigned int * local_layer)216*4882a593Smuzhiyun struct mtk_ddp_comp *mtk_drm_ddp_comp_for_plane(struct drm_crtc *crtc,
217*4882a593Smuzhiyun struct drm_plane *plane,
218*4882a593Smuzhiyun unsigned int *local_layer)
219*4882a593Smuzhiyun {
220*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
221*4882a593Smuzhiyun struct mtk_ddp_comp *comp;
222*4882a593Smuzhiyun int i, count = 0;
223*4882a593Smuzhiyun unsigned int local_index = plane - mtk_crtc->planes;
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) {
226*4882a593Smuzhiyun comp = mtk_crtc->ddp_comp[i];
227*4882a593Smuzhiyun if (local_index < (count + mtk_ddp_comp_layer_nr(comp))) {
228*4882a593Smuzhiyun *local_layer = local_index - count;
229*4882a593Smuzhiyun return comp;
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun count += mtk_ddp_comp_layer_nr(comp);
232*4882a593Smuzhiyun }
233*4882a593Smuzhiyun
234*4882a593Smuzhiyun WARN(1, "Failed to find component for plane %d\n", plane->index);
235*4882a593Smuzhiyun return NULL;
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun
238*4882a593Smuzhiyun #if IS_REACHABLE(CONFIG_MTK_CMDQ)
ddp_cmdq_cb(struct cmdq_cb_data data)239*4882a593Smuzhiyun static void ddp_cmdq_cb(struct cmdq_cb_data data)
240*4882a593Smuzhiyun {
241*4882a593Smuzhiyun cmdq_pkt_destroy(data.data);
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun #endif
244*4882a593Smuzhiyun
mtk_crtc_ddp_hw_init(struct mtk_drm_crtc * mtk_crtc)245*4882a593Smuzhiyun static int mtk_crtc_ddp_hw_init(struct mtk_drm_crtc *mtk_crtc)
246*4882a593Smuzhiyun {
247*4882a593Smuzhiyun struct drm_crtc *crtc = &mtk_crtc->base;
248*4882a593Smuzhiyun struct drm_connector *connector;
249*4882a593Smuzhiyun struct drm_encoder *encoder;
250*4882a593Smuzhiyun struct drm_connector_list_iter conn_iter;
251*4882a593Smuzhiyun unsigned int width, height, vrefresh, bpc = MTK_MAX_BPC;
252*4882a593Smuzhiyun int ret;
253*4882a593Smuzhiyun int i;
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun if (WARN_ON(!crtc->state))
256*4882a593Smuzhiyun return -EINVAL;
257*4882a593Smuzhiyun
258*4882a593Smuzhiyun width = crtc->state->adjusted_mode.hdisplay;
259*4882a593Smuzhiyun height = crtc->state->adjusted_mode.vdisplay;
260*4882a593Smuzhiyun vrefresh = drm_mode_vrefresh(&crtc->state->adjusted_mode);
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun drm_for_each_encoder(encoder, crtc->dev) {
263*4882a593Smuzhiyun if (encoder->crtc != crtc)
264*4882a593Smuzhiyun continue;
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun drm_connector_list_iter_begin(crtc->dev, &conn_iter);
267*4882a593Smuzhiyun drm_for_each_connector_iter(connector, &conn_iter) {
268*4882a593Smuzhiyun if (connector->encoder != encoder)
269*4882a593Smuzhiyun continue;
270*4882a593Smuzhiyun if (connector->display_info.bpc != 0 &&
271*4882a593Smuzhiyun bpc > connector->display_info.bpc)
272*4882a593Smuzhiyun bpc = connector->display_info.bpc;
273*4882a593Smuzhiyun }
274*4882a593Smuzhiyun drm_connector_list_iter_end(&conn_iter);
275*4882a593Smuzhiyun }
276*4882a593Smuzhiyun
277*4882a593Smuzhiyun ret = pm_runtime_resume_and_get(crtc->dev->dev);
278*4882a593Smuzhiyun if (ret < 0) {
279*4882a593Smuzhiyun DRM_ERROR("Failed to enable power domain: %d\n", ret);
280*4882a593Smuzhiyun return ret;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun ret = mtk_disp_mutex_prepare(mtk_crtc->mutex);
284*4882a593Smuzhiyun if (ret < 0) {
285*4882a593Smuzhiyun DRM_ERROR("Failed to enable mutex clock: %d\n", ret);
286*4882a593Smuzhiyun goto err_pm_runtime_put;
287*4882a593Smuzhiyun }
288*4882a593Smuzhiyun
289*4882a593Smuzhiyun ret = mtk_crtc_ddp_clk_enable(mtk_crtc);
290*4882a593Smuzhiyun if (ret < 0) {
291*4882a593Smuzhiyun DRM_ERROR("Failed to enable component clocks: %d\n", ret);
292*4882a593Smuzhiyun goto err_mutex_unprepare;
293*4882a593Smuzhiyun }
294*4882a593Smuzhiyun
295*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr - 1; i++) {
296*4882a593Smuzhiyun mtk_mmsys_ddp_connect(mtk_crtc->mmsys_dev,
297*4882a593Smuzhiyun mtk_crtc->ddp_comp[i]->id,
298*4882a593Smuzhiyun mtk_crtc->ddp_comp[i + 1]->id);
299*4882a593Smuzhiyun mtk_disp_mutex_add_comp(mtk_crtc->mutex,
300*4882a593Smuzhiyun mtk_crtc->ddp_comp[i]->id);
301*4882a593Smuzhiyun }
302*4882a593Smuzhiyun mtk_disp_mutex_add_comp(mtk_crtc->mutex, mtk_crtc->ddp_comp[i]->id);
303*4882a593Smuzhiyun mtk_disp_mutex_enable(mtk_crtc->mutex);
304*4882a593Smuzhiyun
305*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) {
306*4882a593Smuzhiyun struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[i];
307*4882a593Smuzhiyun
308*4882a593Smuzhiyun if (i == 1)
309*4882a593Smuzhiyun mtk_ddp_comp_bgclr_in_on(comp);
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun mtk_ddp_comp_config(comp, width, height, vrefresh, bpc, NULL);
312*4882a593Smuzhiyun mtk_ddp_comp_start(comp);
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun
315*4882a593Smuzhiyun /* Initially configure all planes */
316*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->layer_nr; i++) {
317*4882a593Smuzhiyun struct drm_plane *plane = &mtk_crtc->planes[i];
318*4882a593Smuzhiyun struct mtk_plane_state *plane_state;
319*4882a593Smuzhiyun struct mtk_ddp_comp *comp;
320*4882a593Smuzhiyun unsigned int local_layer;
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun plane_state = to_mtk_plane_state(plane->state);
323*4882a593Smuzhiyun comp = mtk_drm_ddp_comp_for_plane(crtc, plane, &local_layer);
324*4882a593Smuzhiyun if (comp)
325*4882a593Smuzhiyun mtk_ddp_comp_layer_config(comp, local_layer,
326*4882a593Smuzhiyun plane_state, NULL);
327*4882a593Smuzhiyun }
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun return 0;
330*4882a593Smuzhiyun
331*4882a593Smuzhiyun err_mutex_unprepare:
332*4882a593Smuzhiyun mtk_disp_mutex_unprepare(mtk_crtc->mutex);
333*4882a593Smuzhiyun err_pm_runtime_put:
334*4882a593Smuzhiyun pm_runtime_put(crtc->dev->dev);
335*4882a593Smuzhiyun return ret;
336*4882a593Smuzhiyun }
337*4882a593Smuzhiyun
mtk_crtc_ddp_hw_fini(struct mtk_drm_crtc * mtk_crtc)338*4882a593Smuzhiyun static void mtk_crtc_ddp_hw_fini(struct mtk_drm_crtc *mtk_crtc)
339*4882a593Smuzhiyun {
340*4882a593Smuzhiyun struct drm_device *drm = mtk_crtc->base.dev;
341*4882a593Smuzhiyun struct drm_crtc *crtc = &mtk_crtc->base;
342*4882a593Smuzhiyun int i;
343*4882a593Smuzhiyun
344*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) {
345*4882a593Smuzhiyun mtk_ddp_comp_stop(mtk_crtc->ddp_comp[i]);
346*4882a593Smuzhiyun if (i == 1)
347*4882a593Smuzhiyun mtk_ddp_comp_bgclr_in_off(mtk_crtc->ddp_comp[i]);
348*4882a593Smuzhiyun }
349*4882a593Smuzhiyun
350*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++)
351*4882a593Smuzhiyun mtk_disp_mutex_remove_comp(mtk_crtc->mutex,
352*4882a593Smuzhiyun mtk_crtc->ddp_comp[i]->id);
353*4882a593Smuzhiyun mtk_disp_mutex_disable(mtk_crtc->mutex);
354*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr - 1; i++) {
355*4882a593Smuzhiyun mtk_mmsys_ddp_disconnect(mtk_crtc->mmsys_dev,
356*4882a593Smuzhiyun mtk_crtc->ddp_comp[i]->id,
357*4882a593Smuzhiyun mtk_crtc->ddp_comp[i + 1]->id);
358*4882a593Smuzhiyun mtk_disp_mutex_remove_comp(mtk_crtc->mutex,
359*4882a593Smuzhiyun mtk_crtc->ddp_comp[i]->id);
360*4882a593Smuzhiyun }
361*4882a593Smuzhiyun mtk_disp_mutex_remove_comp(mtk_crtc->mutex, mtk_crtc->ddp_comp[i]->id);
362*4882a593Smuzhiyun mtk_crtc_ddp_clk_disable(mtk_crtc);
363*4882a593Smuzhiyun mtk_disp_mutex_unprepare(mtk_crtc->mutex);
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun pm_runtime_put(drm->dev);
366*4882a593Smuzhiyun
367*4882a593Smuzhiyun if (crtc->state->event && !crtc->state->active) {
368*4882a593Smuzhiyun spin_lock_irq(&crtc->dev->event_lock);
369*4882a593Smuzhiyun drm_crtc_send_vblank_event(crtc, crtc->state->event);
370*4882a593Smuzhiyun crtc->state->event = NULL;
371*4882a593Smuzhiyun spin_unlock_irq(&crtc->dev->event_lock);
372*4882a593Smuzhiyun }
373*4882a593Smuzhiyun }
374*4882a593Smuzhiyun
mtk_crtc_ddp_config(struct drm_crtc * crtc,struct cmdq_pkt * cmdq_handle)375*4882a593Smuzhiyun static void mtk_crtc_ddp_config(struct drm_crtc *crtc,
376*4882a593Smuzhiyun struct cmdq_pkt *cmdq_handle)
377*4882a593Smuzhiyun {
378*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
379*4882a593Smuzhiyun struct mtk_crtc_state *state = to_mtk_crtc_state(mtk_crtc->base.state);
380*4882a593Smuzhiyun struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0];
381*4882a593Smuzhiyun unsigned int i;
382*4882a593Smuzhiyun unsigned int local_layer;
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun /*
385*4882a593Smuzhiyun * TODO: instead of updating the registers here, we should prepare
386*4882a593Smuzhiyun * working registers in atomic_commit and let the hardware command
387*4882a593Smuzhiyun * queue update module registers on vblank.
388*4882a593Smuzhiyun */
389*4882a593Smuzhiyun if (state->pending_config) {
390*4882a593Smuzhiyun mtk_ddp_comp_config(comp, state->pending_width,
391*4882a593Smuzhiyun state->pending_height,
392*4882a593Smuzhiyun state->pending_vrefresh, 0,
393*4882a593Smuzhiyun cmdq_handle);
394*4882a593Smuzhiyun
395*4882a593Smuzhiyun state->pending_config = false;
396*4882a593Smuzhiyun }
397*4882a593Smuzhiyun
398*4882a593Smuzhiyun if (mtk_crtc->pending_planes) {
399*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->layer_nr; i++) {
400*4882a593Smuzhiyun struct drm_plane *plane = &mtk_crtc->planes[i];
401*4882a593Smuzhiyun struct mtk_plane_state *plane_state;
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun plane_state = to_mtk_plane_state(plane->state);
404*4882a593Smuzhiyun
405*4882a593Smuzhiyun if (!plane_state->pending.config)
406*4882a593Smuzhiyun continue;
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun comp = mtk_drm_ddp_comp_for_plane(crtc, plane,
409*4882a593Smuzhiyun &local_layer);
410*4882a593Smuzhiyun
411*4882a593Smuzhiyun if (comp)
412*4882a593Smuzhiyun mtk_ddp_comp_layer_config(comp, local_layer,
413*4882a593Smuzhiyun plane_state,
414*4882a593Smuzhiyun cmdq_handle);
415*4882a593Smuzhiyun plane_state->pending.config = false;
416*4882a593Smuzhiyun }
417*4882a593Smuzhiyun mtk_crtc->pending_planes = false;
418*4882a593Smuzhiyun }
419*4882a593Smuzhiyun
420*4882a593Smuzhiyun if (mtk_crtc->pending_async_planes) {
421*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->layer_nr; i++) {
422*4882a593Smuzhiyun struct drm_plane *plane = &mtk_crtc->planes[i];
423*4882a593Smuzhiyun struct mtk_plane_state *plane_state;
424*4882a593Smuzhiyun
425*4882a593Smuzhiyun plane_state = to_mtk_plane_state(plane->state);
426*4882a593Smuzhiyun
427*4882a593Smuzhiyun if (!plane_state->pending.async_config)
428*4882a593Smuzhiyun continue;
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun comp = mtk_drm_ddp_comp_for_plane(crtc, plane,
431*4882a593Smuzhiyun &local_layer);
432*4882a593Smuzhiyun
433*4882a593Smuzhiyun if (comp)
434*4882a593Smuzhiyun mtk_ddp_comp_layer_config(comp, local_layer,
435*4882a593Smuzhiyun plane_state,
436*4882a593Smuzhiyun cmdq_handle);
437*4882a593Smuzhiyun plane_state->pending.async_config = false;
438*4882a593Smuzhiyun }
439*4882a593Smuzhiyun mtk_crtc->pending_async_planes = false;
440*4882a593Smuzhiyun }
441*4882a593Smuzhiyun }
442*4882a593Smuzhiyun
mtk_drm_crtc_hw_config(struct mtk_drm_crtc * mtk_crtc)443*4882a593Smuzhiyun static void mtk_drm_crtc_hw_config(struct mtk_drm_crtc *mtk_crtc)
444*4882a593Smuzhiyun {
445*4882a593Smuzhiyun #if IS_REACHABLE(CONFIG_MTK_CMDQ)
446*4882a593Smuzhiyun struct cmdq_pkt *cmdq_handle;
447*4882a593Smuzhiyun #endif
448*4882a593Smuzhiyun struct drm_crtc *crtc = &mtk_crtc->base;
449*4882a593Smuzhiyun struct mtk_drm_private *priv = crtc->dev->dev_private;
450*4882a593Smuzhiyun unsigned int pending_planes = 0, pending_async_planes = 0;
451*4882a593Smuzhiyun int i;
452*4882a593Smuzhiyun
453*4882a593Smuzhiyun mutex_lock(&mtk_crtc->hw_lock);
454*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->layer_nr; i++) {
455*4882a593Smuzhiyun struct drm_plane *plane = &mtk_crtc->planes[i];
456*4882a593Smuzhiyun struct mtk_plane_state *plane_state;
457*4882a593Smuzhiyun
458*4882a593Smuzhiyun plane_state = to_mtk_plane_state(plane->state);
459*4882a593Smuzhiyun if (plane_state->pending.dirty) {
460*4882a593Smuzhiyun plane_state->pending.config = true;
461*4882a593Smuzhiyun plane_state->pending.dirty = false;
462*4882a593Smuzhiyun pending_planes |= BIT(i);
463*4882a593Smuzhiyun } else if (plane_state->pending.async_dirty) {
464*4882a593Smuzhiyun plane_state->pending.async_config = true;
465*4882a593Smuzhiyun plane_state->pending.async_dirty = false;
466*4882a593Smuzhiyun pending_async_planes |= BIT(i);
467*4882a593Smuzhiyun }
468*4882a593Smuzhiyun }
469*4882a593Smuzhiyun if (pending_planes)
470*4882a593Smuzhiyun mtk_crtc->pending_planes = true;
471*4882a593Smuzhiyun if (pending_async_planes)
472*4882a593Smuzhiyun mtk_crtc->pending_async_planes = true;
473*4882a593Smuzhiyun
474*4882a593Smuzhiyun if (priv->data->shadow_register) {
475*4882a593Smuzhiyun mtk_disp_mutex_acquire(mtk_crtc->mutex);
476*4882a593Smuzhiyun mtk_crtc_ddp_config(crtc, NULL);
477*4882a593Smuzhiyun mtk_disp_mutex_release(mtk_crtc->mutex);
478*4882a593Smuzhiyun }
479*4882a593Smuzhiyun #if IS_REACHABLE(CONFIG_MTK_CMDQ)
480*4882a593Smuzhiyun if (mtk_crtc->cmdq_client) {
481*4882a593Smuzhiyun mbox_flush(mtk_crtc->cmdq_client->chan, 2000);
482*4882a593Smuzhiyun cmdq_handle = cmdq_pkt_create(mtk_crtc->cmdq_client, PAGE_SIZE);
483*4882a593Smuzhiyun cmdq_pkt_clear_event(cmdq_handle, mtk_crtc->cmdq_event);
484*4882a593Smuzhiyun cmdq_pkt_wfe(cmdq_handle, mtk_crtc->cmdq_event, false);
485*4882a593Smuzhiyun mtk_crtc_ddp_config(crtc, cmdq_handle);
486*4882a593Smuzhiyun cmdq_pkt_finalize(cmdq_handle);
487*4882a593Smuzhiyun cmdq_pkt_flush_async(cmdq_handle, ddp_cmdq_cb, cmdq_handle);
488*4882a593Smuzhiyun }
489*4882a593Smuzhiyun #endif
490*4882a593Smuzhiyun mutex_unlock(&mtk_crtc->hw_lock);
491*4882a593Smuzhiyun }
492*4882a593Smuzhiyun
mtk_drm_crtc_plane_check(struct drm_crtc * crtc,struct drm_plane * plane,struct mtk_plane_state * state)493*4882a593Smuzhiyun int mtk_drm_crtc_plane_check(struct drm_crtc *crtc, struct drm_plane *plane,
494*4882a593Smuzhiyun struct mtk_plane_state *state)
495*4882a593Smuzhiyun {
496*4882a593Smuzhiyun unsigned int local_layer;
497*4882a593Smuzhiyun struct mtk_ddp_comp *comp;
498*4882a593Smuzhiyun
499*4882a593Smuzhiyun comp = mtk_drm_ddp_comp_for_plane(crtc, plane, &local_layer);
500*4882a593Smuzhiyun if (comp)
501*4882a593Smuzhiyun return mtk_ddp_comp_layer_check(comp, local_layer, state);
502*4882a593Smuzhiyun return 0;
503*4882a593Smuzhiyun }
504*4882a593Smuzhiyun
mtk_drm_crtc_async_update(struct drm_crtc * crtc,struct drm_plane * plane,struct drm_plane_state * new_state)505*4882a593Smuzhiyun void mtk_drm_crtc_async_update(struct drm_crtc *crtc, struct drm_plane *plane,
506*4882a593Smuzhiyun struct drm_plane_state *new_state)
507*4882a593Smuzhiyun {
508*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
509*4882a593Smuzhiyun const struct drm_plane_helper_funcs *plane_helper_funcs =
510*4882a593Smuzhiyun plane->helper_private;
511*4882a593Smuzhiyun
512*4882a593Smuzhiyun if (!mtk_crtc->enabled)
513*4882a593Smuzhiyun return;
514*4882a593Smuzhiyun
515*4882a593Smuzhiyun plane_helper_funcs->atomic_update(plane, new_state);
516*4882a593Smuzhiyun mtk_drm_crtc_hw_config(mtk_crtc);
517*4882a593Smuzhiyun }
518*4882a593Smuzhiyun
mtk_drm_crtc_atomic_enable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)519*4882a593Smuzhiyun static void mtk_drm_crtc_atomic_enable(struct drm_crtc *crtc,
520*4882a593Smuzhiyun struct drm_crtc_state *old_state)
521*4882a593Smuzhiyun {
522*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
523*4882a593Smuzhiyun struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0];
524*4882a593Smuzhiyun int ret;
525*4882a593Smuzhiyun
526*4882a593Smuzhiyun DRM_DEBUG_DRIVER("%s %d\n", __func__, crtc->base.id);
527*4882a593Smuzhiyun
528*4882a593Smuzhiyun ret = mtk_smi_larb_get(comp->larb_dev);
529*4882a593Smuzhiyun if (ret) {
530*4882a593Smuzhiyun DRM_ERROR("Failed to get larb: %d\n", ret);
531*4882a593Smuzhiyun return;
532*4882a593Smuzhiyun }
533*4882a593Smuzhiyun
534*4882a593Smuzhiyun ret = mtk_crtc_ddp_hw_init(mtk_crtc);
535*4882a593Smuzhiyun if (ret) {
536*4882a593Smuzhiyun mtk_smi_larb_put(comp->larb_dev);
537*4882a593Smuzhiyun return;
538*4882a593Smuzhiyun }
539*4882a593Smuzhiyun
540*4882a593Smuzhiyun drm_crtc_vblank_on(crtc);
541*4882a593Smuzhiyun mtk_crtc->enabled = true;
542*4882a593Smuzhiyun }
543*4882a593Smuzhiyun
mtk_drm_crtc_atomic_disable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)544*4882a593Smuzhiyun static void mtk_drm_crtc_atomic_disable(struct drm_crtc *crtc,
545*4882a593Smuzhiyun struct drm_crtc_state *old_state)
546*4882a593Smuzhiyun {
547*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
548*4882a593Smuzhiyun struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0];
549*4882a593Smuzhiyun int i;
550*4882a593Smuzhiyun
551*4882a593Smuzhiyun DRM_DEBUG_DRIVER("%s %d\n", __func__, crtc->base.id);
552*4882a593Smuzhiyun if (!mtk_crtc->enabled)
553*4882a593Smuzhiyun return;
554*4882a593Smuzhiyun
555*4882a593Smuzhiyun /* Set all pending plane state to disabled */
556*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->layer_nr; i++) {
557*4882a593Smuzhiyun struct drm_plane *plane = &mtk_crtc->planes[i];
558*4882a593Smuzhiyun struct mtk_plane_state *plane_state;
559*4882a593Smuzhiyun
560*4882a593Smuzhiyun plane_state = to_mtk_plane_state(plane->state);
561*4882a593Smuzhiyun plane_state->pending.enable = false;
562*4882a593Smuzhiyun plane_state->pending.config = true;
563*4882a593Smuzhiyun }
564*4882a593Smuzhiyun mtk_crtc->pending_planes = true;
565*4882a593Smuzhiyun
566*4882a593Smuzhiyun mtk_drm_crtc_hw_config(mtk_crtc);
567*4882a593Smuzhiyun /* Wait for planes to be disabled */
568*4882a593Smuzhiyun drm_crtc_wait_one_vblank(crtc);
569*4882a593Smuzhiyun
570*4882a593Smuzhiyun drm_crtc_vblank_off(crtc);
571*4882a593Smuzhiyun mtk_crtc_ddp_hw_fini(mtk_crtc);
572*4882a593Smuzhiyun mtk_smi_larb_put(comp->larb_dev);
573*4882a593Smuzhiyun
574*4882a593Smuzhiyun mtk_crtc->enabled = false;
575*4882a593Smuzhiyun }
576*4882a593Smuzhiyun
mtk_drm_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_crtc_state * old_crtc_state)577*4882a593Smuzhiyun static void mtk_drm_crtc_atomic_begin(struct drm_crtc *crtc,
578*4882a593Smuzhiyun struct drm_crtc_state *old_crtc_state)
579*4882a593Smuzhiyun {
580*4882a593Smuzhiyun struct mtk_crtc_state *state = to_mtk_crtc_state(crtc->state);
581*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
582*4882a593Smuzhiyun
583*4882a593Smuzhiyun if (mtk_crtc->event && state->base.event)
584*4882a593Smuzhiyun DRM_ERROR("new event while there is still a pending event\n");
585*4882a593Smuzhiyun
586*4882a593Smuzhiyun if (state->base.event) {
587*4882a593Smuzhiyun state->base.event->pipe = drm_crtc_index(crtc);
588*4882a593Smuzhiyun WARN_ON(drm_crtc_vblank_get(crtc) != 0);
589*4882a593Smuzhiyun mtk_crtc->event = state->base.event;
590*4882a593Smuzhiyun state->base.event = NULL;
591*4882a593Smuzhiyun }
592*4882a593Smuzhiyun }
593*4882a593Smuzhiyun
mtk_drm_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_crtc_state * old_crtc_state)594*4882a593Smuzhiyun static void mtk_drm_crtc_atomic_flush(struct drm_crtc *crtc,
595*4882a593Smuzhiyun struct drm_crtc_state *old_crtc_state)
596*4882a593Smuzhiyun {
597*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
598*4882a593Smuzhiyun int i;
599*4882a593Smuzhiyun
600*4882a593Smuzhiyun if (mtk_crtc->event)
601*4882a593Smuzhiyun mtk_crtc->pending_needs_vblank = true;
602*4882a593Smuzhiyun if (crtc->state->color_mgmt_changed)
603*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) {
604*4882a593Smuzhiyun mtk_ddp_gamma_set(mtk_crtc->ddp_comp[i], crtc->state);
605*4882a593Smuzhiyun mtk_ddp_ctm_set(mtk_crtc->ddp_comp[i], crtc->state);
606*4882a593Smuzhiyun }
607*4882a593Smuzhiyun mtk_drm_crtc_hw_config(mtk_crtc);
608*4882a593Smuzhiyun }
609*4882a593Smuzhiyun
610*4882a593Smuzhiyun static const struct drm_crtc_funcs mtk_crtc_funcs = {
611*4882a593Smuzhiyun .set_config = drm_atomic_helper_set_config,
612*4882a593Smuzhiyun .page_flip = drm_atomic_helper_page_flip,
613*4882a593Smuzhiyun .destroy = mtk_drm_crtc_destroy,
614*4882a593Smuzhiyun .reset = mtk_drm_crtc_reset,
615*4882a593Smuzhiyun .atomic_duplicate_state = mtk_drm_crtc_duplicate_state,
616*4882a593Smuzhiyun .atomic_destroy_state = mtk_drm_crtc_destroy_state,
617*4882a593Smuzhiyun .gamma_set = drm_atomic_helper_legacy_gamma_set,
618*4882a593Smuzhiyun .enable_vblank = mtk_drm_crtc_enable_vblank,
619*4882a593Smuzhiyun .disable_vblank = mtk_drm_crtc_disable_vblank,
620*4882a593Smuzhiyun };
621*4882a593Smuzhiyun
622*4882a593Smuzhiyun static const struct drm_crtc_helper_funcs mtk_crtc_helper_funcs = {
623*4882a593Smuzhiyun .mode_fixup = mtk_drm_crtc_mode_fixup,
624*4882a593Smuzhiyun .mode_set_nofb = mtk_drm_crtc_mode_set_nofb,
625*4882a593Smuzhiyun .atomic_begin = mtk_drm_crtc_atomic_begin,
626*4882a593Smuzhiyun .atomic_flush = mtk_drm_crtc_atomic_flush,
627*4882a593Smuzhiyun .atomic_enable = mtk_drm_crtc_atomic_enable,
628*4882a593Smuzhiyun .atomic_disable = mtk_drm_crtc_atomic_disable,
629*4882a593Smuzhiyun };
630*4882a593Smuzhiyun
mtk_drm_crtc_init(struct drm_device * drm,struct mtk_drm_crtc * mtk_crtc,unsigned int pipe)631*4882a593Smuzhiyun static int mtk_drm_crtc_init(struct drm_device *drm,
632*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc,
633*4882a593Smuzhiyun unsigned int pipe)
634*4882a593Smuzhiyun {
635*4882a593Smuzhiyun struct drm_plane *primary = NULL;
636*4882a593Smuzhiyun struct drm_plane *cursor = NULL;
637*4882a593Smuzhiyun int i, ret;
638*4882a593Smuzhiyun
639*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->layer_nr; i++) {
640*4882a593Smuzhiyun if (mtk_crtc->planes[i].type == DRM_PLANE_TYPE_PRIMARY)
641*4882a593Smuzhiyun primary = &mtk_crtc->planes[i];
642*4882a593Smuzhiyun else if (mtk_crtc->planes[i].type == DRM_PLANE_TYPE_CURSOR)
643*4882a593Smuzhiyun cursor = &mtk_crtc->planes[i];
644*4882a593Smuzhiyun }
645*4882a593Smuzhiyun
646*4882a593Smuzhiyun ret = drm_crtc_init_with_planes(drm, &mtk_crtc->base, primary, cursor,
647*4882a593Smuzhiyun &mtk_crtc_funcs, NULL);
648*4882a593Smuzhiyun if (ret)
649*4882a593Smuzhiyun goto err_cleanup_crtc;
650*4882a593Smuzhiyun
651*4882a593Smuzhiyun drm_crtc_helper_add(&mtk_crtc->base, &mtk_crtc_helper_funcs);
652*4882a593Smuzhiyun
653*4882a593Smuzhiyun return 0;
654*4882a593Smuzhiyun
655*4882a593Smuzhiyun err_cleanup_crtc:
656*4882a593Smuzhiyun drm_crtc_cleanup(&mtk_crtc->base);
657*4882a593Smuzhiyun return ret;
658*4882a593Smuzhiyun }
659*4882a593Smuzhiyun
mtk_crtc_ddp_irq(struct drm_crtc * crtc,struct mtk_ddp_comp * comp)660*4882a593Smuzhiyun void mtk_crtc_ddp_irq(struct drm_crtc *crtc, struct mtk_ddp_comp *comp)
661*4882a593Smuzhiyun {
662*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc = to_mtk_crtc(crtc);
663*4882a593Smuzhiyun struct mtk_drm_private *priv = crtc->dev->dev_private;
664*4882a593Smuzhiyun
665*4882a593Smuzhiyun #if IS_REACHABLE(CONFIG_MTK_CMDQ)
666*4882a593Smuzhiyun if (!priv->data->shadow_register && !mtk_crtc->cmdq_client)
667*4882a593Smuzhiyun #else
668*4882a593Smuzhiyun if (!priv->data->shadow_register)
669*4882a593Smuzhiyun #endif
670*4882a593Smuzhiyun mtk_crtc_ddp_config(crtc, NULL);
671*4882a593Smuzhiyun
672*4882a593Smuzhiyun mtk_drm_finish_page_flip(mtk_crtc);
673*4882a593Smuzhiyun }
674*4882a593Smuzhiyun
mtk_drm_crtc_num_comp_planes(struct mtk_drm_crtc * mtk_crtc,int comp_idx)675*4882a593Smuzhiyun static int mtk_drm_crtc_num_comp_planes(struct mtk_drm_crtc *mtk_crtc,
676*4882a593Smuzhiyun int comp_idx)
677*4882a593Smuzhiyun {
678*4882a593Smuzhiyun struct mtk_ddp_comp *comp;
679*4882a593Smuzhiyun
680*4882a593Smuzhiyun if (comp_idx > 1)
681*4882a593Smuzhiyun return 0;
682*4882a593Smuzhiyun
683*4882a593Smuzhiyun comp = mtk_crtc->ddp_comp[comp_idx];
684*4882a593Smuzhiyun if (!comp->funcs)
685*4882a593Smuzhiyun return 0;
686*4882a593Smuzhiyun
687*4882a593Smuzhiyun if (comp_idx == 1 && !comp->funcs->bgclr_in_on)
688*4882a593Smuzhiyun return 0;
689*4882a593Smuzhiyun
690*4882a593Smuzhiyun return mtk_ddp_comp_layer_nr(comp);
691*4882a593Smuzhiyun }
692*4882a593Smuzhiyun
693*4882a593Smuzhiyun static inline
mtk_drm_crtc_plane_type(unsigned int plane_idx,unsigned int num_planes)694*4882a593Smuzhiyun enum drm_plane_type mtk_drm_crtc_plane_type(unsigned int plane_idx,
695*4882a593Smuzhiyun unsigned int num_planes)
696*4882a593Smuzhiyun {
697*4882a593Smuzhiyun if (plane_idx == 0)
698*4882a593Smuzhiyun return DRM_PLANE_TYPE_PRIMARY;
699*4882a593Smuzhiyun else if (plane_idx == (num_planes - 1))
700*4882a593Smuzhiyun return DRM_PLANE_TYPE_CURSOR;
701*4882a593Smuzhiyun else
702*4882a593Smuzhiyun return DRM_PLANE_TYPE_OVERLAY;
703*4882a593Smuzhiyun
704*4882a593Smuzhiyun }
705*4882a593Smuzhiyun
mtk_drm_crtc_init_comp_planes(struct drm_device * drm_dev,struct mtk_drm_crtc * mtk_crtc,int comp_idx,int pipe)706*4882a593Smuzhiyun static int mtk_drm_crtc_init_comp_planes(struct drm_device *drm_dev,
707*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc,
708*4882a593Smuzhiyun int comp_idx, int pipe)
709*4882a593Smuzhiyun {
710*4882a593Smuzhiyun int num_planes = mtk_drm_crtc_num_comp_planes(mtk_crtc, comp_idx);
711*4882a593Smuzhiyun struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[comp_idx];
712*4882a593Smuzhiyun int i, ret;
713*4882a593Smuzhiyun
714*4882a593Smuzhiyun for (i = 0; i < num_planes; i++) {
715*4882a593Smuzhiyun ret = mtk_plane_init(drm_dev,
716*4882a593Smuzhiyun &mtk_crtc->planes[mtk_crtc->layer_nr],
717*4882a593Smuzhiyun BIT(pipe),
718*4882a593Smuzhiyun mtk_drm_crtc_plane_type(mtk_crtc->layer_nr,
719*4882a593Smuzhiyun num_planes),
720*4882a593Smuzhiyun mtk_ddp_comp_supported_rotations(comp));
721*4882a593Smuzhiyun if (ret)
722*4882a593Smuzhiyun return ret;
723*4882a593Smuzhiyun
724*4882a593Smuzhiyun mtk_crtc->layer_nr++;
725*4882a593Smuzhiyun }
726*4882a593Smuzhiyun return 0;
727*4882a593Smuzhiyun }
728*4882a593Smuzhiyun
mtk_drm_crtc_create(struct drm_device * drm_dev,const enum mtk_ddp_comp_id * path,unsigned int path_len)729*4882a593Smuzhiyun int mtk_drm_crtc_create(struct drm_device *drm_dev,
730*4882a593Smuzhiyun const enum mtk_ddp_comp_id *path, unsigned int path_len)
731*4882a593Smuzhiyun {
732*4882a593Smuzhiyun struct mtk_drm_private *priv = drm_dev->dev_private;
733*4882a593Smuzhiyun struct device *dev = drm_dev->dev;
734*4882a593Smuzhiyun struct mtk_drm_crtc *mtk_crtc;
735*4882a593Smuzhiyun unsigned int num_comp_planes = 0;
736*4882a593Smuzhiyun int pipe = priv->num_pipes;
737*4882a593Smuzhiyun int ret;
738*4882a593Smuzhiyun int i;
739*4882a593Smuzhiyun bool has_ctm = false;
740*4882a593Smuzhiyun uint gamma_lut_size = 0;
741*4882a593Smuzhiyun
742*4882a593Smuzhiyun if (!path)
743*4882a593Smuzhiyun return 0;
744*4882a593Smuzhiyun
745*4882a593Smuzhiyun for (i = 0; i < path_len; i++) {
746*4882a593Smuzhiyun enum mtk_ddp_comp_id comp_id = path[i];
747*4882a593Smuzhiyun struct device_node *node;
748*4882a593Smuzhiyun
749*4882a593Smuzhiyun node = priv->comp_node[comp_id];
750*4882a593Smuzhiyun if (!node) {
751*4882a593Smuzhiyun dev_info(dev,
752*4882a593Smuzhiyun "Not creating crtc %d because component %d is disabled or missing\n",
753*4882a593Smuzhiyun pipe, comp_id);
754*4882a593Smuzhiyun return 0;
755*4882a593Smuzhiyun }
756*4882a593Smuzhiyun }
757*4882a593Smuzhiyun
758*4882a593Smuzhiyun mtk_crtc = devm_kzalloc(dev, sizeof(*mtk_crtc), GFP_KERNEL);
759*4882a593Smuzhiyun if (!mtk_crtc)
760*4882a593Smuzhiyun return -ENOMEM;
761*4882a593Smuzhiyun
762*4882a593Smuzhiyun mtk_crtc->mmsys_dev = priv->mmsys_dev;
763*4882a593Smuzhiyun mtk_crtc->ddp_comp_nr = path_len;
764*4882a593Smuzhiyun mtk_crtc->ddp_comp = devm_kmalloc_array(dev, mtk_crtc->ddp_comp_nr,
765*4882a593Smuzhiyun sizeof(*mtk_crtc->ddp_comp),
766*4882a593Smuzhiyun GFP_KERNEL);
767*4882a593Smuzhiyun if (!mtk_crtc->ddp_comp)
768*4882a593Smuzhiyun return -ENOMEM;
769*4882a593Smuzhiyun
770*4882a593Smuzhiyun mtk_crtc->mutex = mtk_disp_mutex_get(priv->mutex_dev, pipe);
771*4882a593Smuzhiyun if (IS_ERR(mtk_crtc->mutex)) {
772*4882a593Smuzhiyun ret = PTR_ERR(mtk_crtc->mutex);
773*4882a593Smuzhiyun dev_err(dev, "Failed to get mutex: %d\n", ret);
774*4882a593Smuzhiyun return ret;
775*4882a593Smuzhiyun }
776*4882a593Smuzhiyun
777*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) {
778*4882a593Smuzhiyun enum mtk_ddp_comp_id comp_id = path[i];
779*4882a593Smuzhiyun struct mtk_ddp_comp *comp;
780*4882a593Smuzhiyun struct device_node *node;
781*4882a593Smuzhiyun
782*4882a593Smuzhiyun node = priv->comp_node[comp_id];
783*4882a593Smuzhiyun comp = priv->ddp_comp[comp_id];
784*4882a593Smuzhiyun if (!comp) {
785*4882a593Smuzhiyun dev_err(dev, "Component %pOF not initialized\n", node);
786*4882a593Smuzhiyun ret = -ENODEV;
787*4882a593Smuzhiyun return ret;
788*4882a593Smuzhiyun }
789*4882a593Smuzhiyun
790*4882a593Smuzhiyun mtk_crtc->ddp_comp[i] = comp;
791*4882a593Smuzhiyun
792*4882a593Smuzhiyun if (comp->funcs) {
793*4882a593Smuzhiyun if (comp->funcs->gamma_set)
794*4882a593Smuzhiyun gamma_lut_size = MTK_LUT_SIZE;
795*4882a593Smuzhiyun
796*4882a593Smuzhiyun if (comp->funcs->ctm_set)
797*4882a593Smuzhiyun has_ctm = true;
798*4882a593Smuzhiyun }
799*4882a593Smuzhiyun }
800*4882a593Smuzhiyun
801*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++)
802*4882a593Smuzhiyun num_comp_planes += mtk_drm_crtc_num_comp_planes(mtk_crtc, i);
803*4882a593Smuzhiyun
804*4882a593Smuzhiyun mtk_crtc->planes = devm_kcalloc(dev, num_comp_planes,
805*4882a593Smuzhiyun sizeof(struct drm_plane), GFP_KERNEL);
806*4882a593Smuzhiyun
807*4882a593Smuzhiyun for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) {
808*4882a593Smuzhiyun ret = mtk_drm_crtc_init_comp_planes(drm_dev, mtk_crtc, i,
809*4882a593Smuzhiyun pipe);
810*4882a593Smuzhiyun if (ret)
811*4882a593Smuzhiyun return ret;
812*4882a593Smuzhiyun }
813*4882a593Smuzhiyun
814*4882a593Smuzhiyun ret = mtk_drm_crtc_init(drm_dev, mtk_crtc, pipe);
815*4882a593Smuzhiyun if (ret < 0)
816*4882a593Smuzhiyun return ret;
817*4882a593Smuzhiyun
818*4882a593Smuzhiyun if (gamma_lut_size)
819*4882a593Smuzhiyun drm_mode_crtc_set_gamma_size(&mtk_crtc->base, gamma_lut_size);
820*4882a593Smuzhiyun drm_crtc_enable_color_mgmt(&mtk_crtc->base, 0, has_ctm, gamma_lut_size);
821*4882a593Smuzhiyun priv->num_pipes++;
822*4882a593Smuzhiyun mutex_init(&mtk_crtc->hw_lock);
823*4882a593Smuzhiyun
824*4882a593Smuzhiyun #if IS_REACHABLE(CONFIG_MTK_CMDQ)
825*4882a593Smuzhiyun mtk_crtc->cmdq_client =
826*4882a593Smuzhiyun cmdq_mbox_create(mtk_crtc->mmsys_dev,
827*4882a593Smuzhiyun drm_crtc_index(&mtk_crtc->base),
828*4882a593Smuzhiyun 2000);
829*4882a593Smuzhiyun if (IS_ERR(mtk_crtc->cmdq_client)) {
830*4882a593Smuzhiyun dev_dbg(dev, "mtk_crtc %d failed to create mailbox client, writing register by CPU now\n",
831*4882a593Smuzhiyun drm_crtc_index(&mtk_crtc->base));
832*4882a593Smuzhiyun mtk_crtc->cmdq_client = NULL;
833*4882a593Smuzhiyun }
834*4882a593Smuzhiyun
835*4882a593Smuzhiyun if (mtk_crtc->cmdq_client) {
836*4882a593Smuzhiyun ret = of_property_read_u32_index(priv->mutex_node,
837*4882a593Smuzhiyun "mediatek,gce-events",
838*4882a593Smuzhiyun drm_crtc_index(&mtk_crtc->base),
839*4882a593Smuzhiyun &mtk_crtc->cmdq_event);
840*4882a593Smuzhiyun if (ret) {
841*4882a593Smuzhiyun dev_dbg(dev, "mtk_crtc %d failed to get mediatek,gce-events property\n",
842*4882a593Smuzhiyun drm_crtc_index(&mtk_crtc->base));
843*4882a593Smuzhiyun cmdq_mbox_destroy(mtk_crtc->cmdq_client);
844*4882a593Smuzhiyun mtk_crtc->cmdq_client = NULL;
845*4882a593Smuzhiyun }
846*4882a593Smuzhiyun }
847*4882a593Smuzhiyun #endif
848*4882a593Smuzhiyun return 0;
849*4882a593Smuzhiyun }
850