1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Hisilicon Hi6220 SoC ADE(Advanced Display Engine)'s crtc&plane driver
4 *
5 * Copyright (c) 2016 Linaro Limited.
6 * Copyright (c) 2014-2016 Hisilicon Limited.
7 *
8 * Author:
9 * Xinliang Liu <z.liuxinliang@hisilicon.com>
10 * Xinliang Liu <xinliang.liu@linaro.org>
11 * Xinwei Kong <kong.kongxinwei@hisilicon.com>
12 *
13 * This program is free software; you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License version 2 as
15 * published by the Free Software Foundation.
16 *
17 */
18
19 #include <linux/bitops.h>
20 #include <linux/clk.h>
21 #include <video/display_timing.h>
22 #include <linux/mfd/syscon.h>
23 #include <linux/regmap.h>
24 #include <linux/reset.h>
25 #include <linux/of_address.h>
26 #include <linux/of.h>
27 #include <linux/of_irq.h>
28 #include <linux/platform_device.h>
29
30 #include <drm/drm_drv.h>
31 #include <drm/drm_crtc.h>
32 #include <drm/drm_crtc_helper.h>
33 #include <drm/drm_atomic.h>
34 #include <drm/drm_atomic_helper.h>
35 #include <drm/drm_plane_helper.h>
36 #include <drm/drm_gem_cma_helper.h>
37 #include <drm/drm_gem_framebuffer_helper.h>
38 #include <drm/drm_fb_cma_helper.h>
39 #include <drm/drm_vblank.h>
40 #include <drm/drm_fourcc.h>
41
42 #include "kirin_drm_drv.h"
43 #include "kirin_dpe_reg.h"
44
45 #define DPE_WIDTH(width) ((width) - 1)
46 #define DPE_HEIGHT(height) ((height) - 1)
47
48 #define GET_FLUX_REQ_IN(max_depth) ((max_depth) * 50 / 100)
49 #define GET_FLUX_REQ_OUT(max_depth) ((max_depth) * 90 / 100)
50
51 #define DEFAULT_DPE_CORE_CLK_07V_RATE (400000000UL)
52 #define DPE_MAX_PXL0_CLK_144M (144000000UL)
53
54 #define DPE_UNSUPPORT (800)
55 #define RES_4K_PHONE (3840 * 2160)
56
57 enum dpe_ovl { DPE_OVL0 = 0, DPE_OVL_NUM };
58
59 enum dpe_channel {
60 DPE_CH0 = 0, /* channel 1 for primary plane */
61 DPE_CH_NUM
62 };
63
64 struct dpe_hw_ctx {
65 void __iomem *base;
66 void __iomem *noc_base;
67
68 struct clk *dpe_axi_clk;
69 struct clk *dpe_pclk_clk;
70 struct clk *dpe_pri_clk;
71 struct clk *dpe_pxl0_clk;
72 struct clk *dpe_mmbuf_clk;
73
74 bool power_on;
75 int irq;
76
77 struct drm_crtc *crtc;
78
79 u32 hdisplay;
80 u32 vdisplay;
81 };
82
83 static const struct kirin_format dpe_formats[] = {
84 { DRM_FORMAT_RGB565, DPE_RGB_565 },
85 { DRM_FORMAT_BGR565, DPE_BGR_565 },
86 { DRM_FORMAT_XRGB8888, DPE_RGBX_8888 },
87 { DRM_FORMAT_XBGR8888, DPE_BGRX_8888 },
88 { DRM_FORMAT_RGBA8888, DPE_RGBA_8888 },
89 { DRM_FORMAT_BGRA8888, DPE_BGRA_8888 },
90 { DRM_FORMAT_ARGB8888, DPE_BGRA_8888 },
91 { DRM_FORMAT_ABGR8888, DPE_RGBA_8888 },
92 };
93
94 static const u32 dpe_channel_formats[] = {
95 DRM_FORMAT_RGB565,
96 DRM_FORMAT_BGR565,
97 DRM_FORMAT_XRGB8888,
98 DRM_FORMAT_XBGR8888,
99 DRM_FORMAT_RGBA8888,
100 DRM_FORMAT_BGRA8888,
101 DRM_FORMAT_ARGB8888,
102 DRM_FORMAT_ABGR8888,
103 };
104
105 static u32 dpe_pixel_dma_format_map[] = {
106 DMA_PIXEL_FORMAT_RGB_565,
107 DMA_PIXEL_FORMAT_XRGB_4444,
108 DMA_PIXEL_FORMAT_ARGB_4444,
109 DMA_PIXEL_FORMAT_XRGB_5551,
110 DMA_PIXEL_FORMAT_ARGB_5551,
111 DMA_PIXEL_FORMAT_XRGB_8888,
112 DMA_PIXEL_FORMAT_ARGB_8888,
113 DMA_PIXEL_FORMAT_RGB_565,
114 DMA_PIXEL_FORMAT_XRGB_4444,
115 DMA_PIXEL_FORMAT_ARGB_4444,
116 DMA_PIXEL_FORMAT_XRGB_5551,
117 DMA_PIXEL_FORMAT_ARGB_5551,
118 DMA_PIXEL_FORMAT_XRGB_8888,
119 DMA_PIXEL_FORMAT_ARGB_8888,
120 DMA_PIXEL_FORMAT_YUYV_422_Pkg,
121 DMA_PIXEL_FORMAT_YUV_422_SP_HP,
122 DMA_PIXEL_FORMAT_YUV_422_SP_HP,
123 DMA_PIXEL_FORMAT_YUV_420_SP_HP,
124 DMA_PIXEL_FORMAT_YUV_420_SP_HP,
125 DMA_PIXEL_FORMAT_YUV_422_P_HP,
126 DMA_PIXEL_FORMAT_YUV_422_P_HP,
127 DMA_PIXEL_FORMAT_YUV_420_P_HP,
128 DMA_PIXEL_FORMAT_YUV_420_P_HP,
129 DMA_PIXEL_FORMAT_YUYV_422_Pkg,
130 DMA_PIXEL_FORMAT_YUYV_422_Pkg,
131 DMA_PIXEL_FORMAT_YUYV_422_Pkg,
132 DMA_PIXEL_FORMAT_YUYV_422_Pkg,
133 };
134
135 static u32 dpe_pixel_dfc_format_map[] = {
136 DFC_PIXEL_FORMAT_RGB_565,
137 DFC_PIXEL_FORMAT_XBGR_4444,
138 DFC_PIXEL_FORMAT_ABGR_4444,
139 DFC_PIXEL_FORMAT_XBGR_5551,
140 DFC_PIXEL_FORMAT_ABGR_5551,
141 DFC_PIXEL_FORMAT_XBGR_8888,
142 DFC_PIXEL_FORMAT_ABGR_8888,
143 DFC_PIXEL_FORMAT_BGR_565,
144 DFC_PIXEL_FORMAT_XRGB_4444,
145 DFC_PIXEL_FORMAT_ARGB_4444,
146 DFC_PIXEL_FORMAT_XRGB_5551,
147 DFC_PIXEL_FORMAT_ARGB_5551,
148 DFC_PIXEL_FORMAT_XRGB_8888,
149 DFC_PIXEL_FORMAT_ARGB_8888,
150 DFC_PIXEL_FORMAT_YUYV422,
151 DFC_PIXEL_FORMAT_YUYV422,
152 DFC_PIXEL_FORMAT_YVYU422,
153 DFC_PIXEL_FORMAT_YUYV422,
154 DFC_PIXEL_FORMAT_YVYU422,
155 DFC_PIXEL_FORMAT_YUYV422,
156 DFC_PIXEL_FORMAT_YVYU422,
157 DFC_PIXEL_FORMAT_YUYV422,
158 DFC_PIXEL_FORMAT_YVYU422,
159 DFC_PIXEL_FORMAT_YUYV422,
160 DFC_PIXEL_FORMAT_UYVY422,
161 DFC_PIXEL_FORMAT_YVYU422,
162 DFC_PIXEL_FORMAT_VYUY422,
163 };
164
165 static u32 mid_array[DPE_CH_NUM] = {0xb};
166 static u32 aif_offset[DPE_CH_NUM] = {AIF0_CH0_OFFSET};
167 static u32 mif_offset[DPE_CH_NUM] = {MIF_CH0_OFFSET};
168 static u32 rdma_offset[DPE_CH_NUM] = {DPE_RCH_D0_DMA_OFFSET};
169 static u32 rdfc_offset[DPE_CH_NUM] = {DPE_RCH_D0_DFC_OFFSET};
170 static u32 dpe_smmu_chn_sid_num[DPE_CH_NUM] = {4};
171 static u32 dpe_smmu_smrx_idx[DPE_CH_NUM] = {0};
172 static u32 mctl_offset[DPE_OVL_NUM] = {DPE_MCTRL_CTL0_OFFSET};
173 static u32 ovl_offset[DPE_OVL_NUM] = {DPE_OVL0_OFFSET};
174
dpe_get_format(u32 pixel_format)175 static u32 dpe_get_format(u32 pixel_format)
176 {
177 int i;
178
179 for (i = 0; i < ARRAY_SIZE(dpe_formats); i++)
180 if (dpe_formats[i].pixel_format == pixel_format)
181 return dpe_formats[i].hw_format;
182
183 DRM_ERROR("Not found pixel format!!fourcc_format= %d\n", pixel_format);
184 return DPE_UNSUPPORT;
185 }
186
dpe_set_reg(char __iomem * addr,u32 val,u8 bw,u8 bs)187 static void dpe_set_reg(char __iomem *addr, u32 val, u8 bw, u8 bs)
188 {
189 u32 mask = (1UL << bw) - 1UL;
190 u32 tmp = 0;
191
192 tmp = readl(addr);
193 tmp &= ~(mask << bs);
194
195 writel(tmp | ((val & mask) << bs), addr);
196 }
197
198 /* dpe mctl utils */
dpe_mctl_lock(struct dpe_hw_ctx * ctx)199 static void dpe_mctl_lock(struct dpe_hw_ctx *ctx)
200 {
201 void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
202
203 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX, 0x1, 1, 0);
204 }
205
dpe_mctl_unlock(struct dpe_hw_ctx * ctx)206 static void dpe_mctl_unlock(struct dpe_hw_ctx *ctx)
207 {
208 void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
209
210 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX, 0x0, 1, 0);
211 }
212
dpe_mctl_init(struct dpe_hw_ctx * ctx)213 static void dpe_mctl_init(struct dpe_hw_ctx *ctx)
214 {
215 void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
216
217 dpe_set_reg(mctl_base + MCTL_CTL_EN, 0x1, 32, 0);
218 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_ITF, 0x1, 32, 0);
219 dpe_set_reg(mctl_base + MCTL_CTL_DBG, 0xB13A00, 32, 0);
220 dpe_set_reg(mctl_base + MCTL_CTL_TOP, 0x2, 32, 0);
221 }
222
dpe_qos_init(struct dpe_hw_ctx * ctx)223 static void dpe_qos_init(struct dpe_hw_ctx *ctx)
224 {
225 void __iomem *noc_base = ctx->noc_base;
226
227 writel(0x2, noc_base + 0x000c);
228 writel(0x2, noc_base + 0x008c);
229 writel(0x2, noc_base + 0x010c);
230 writel(0x2, noc_base + 0x018c);
231 }
232
233 /* dpe ldi utils */
dpe_enable_ldi(struct dpe_hw_ctx * ctx)234 static void dpe_enable_ldi(struct dpe_hw_ctx *ctx)
235 {
236 void __iomem *ldi_base = ctx->base + DPE_LDI0_OFFSET;
237
238 dpe_set_reg(ldi_base + LDI_CTRL, 0x1, 1, 0);
239 }
240
241 /* interrupts utils */
dpe_interrupt_mask(struct dpe_hw_ctx * ctx)242 static void dpe_interrupt_mask(struct dpe_hw_ctx *ctx)
243 {
244 void __iomem *base = ctx->base;
245 u32 mask = ~0;
246
247 writel(mask, base + GLB_CPU_PDP_INT_MSK);
248 writel(mask, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
249 writel(mask, base + DPE_DPP_OFFSET + DPP_INT_MSK);
250 writel(mask, base + DPE_DBG_OFFSET + DBG_DPE_GLB_INT_MSK);
251 writel(mask, base + DPE_DBG_OFFSET + DBG_MCTL_INT_MSK);
252 writel(mask, base + DPE_DBG_OFFSET + DBG_WCH0_INT_MSK);
253 writel(mask, base + DPE_DBG_OFFSET + DBG_WCH1_INT_MSK);
254 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH0_INT_MSK);
255 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH1_INT_MSK);
256 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH2_INT_MSK);
257 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH3_INT_MSK);
258 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH4_INT_MSK);
259 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH5_INT_MSK);
260 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH6_INT_MSK);
261 writel(mask, base + DPE_DBG_OFFSET + DBG_RCH7_INT_MSK);
262 }
263
dpe_interrupt_unmask(struct dpe_hw_ctx * ctx)264 static void dpe_interrupt_unmask(struct dpe_hw_ctx *ctx)
265 {
266 void __iomem *base = ctx->base;
267 u32 unmask;
268
269 unmask = ~0;
270 unmask &= ~(BIT_DPP_INTS | BIT_ITF0_INTS | BIT_MMU_IRPT_NS);
271 writel(unmask, base + GLB_CPU_PDP_INT_MSK);
272
273 unmask = ~0;
274 unmask &= ~(BIT_VSYNC | BIT_LDI_UNFLOW);
275 writel(unmask, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
276 }
277
dpe_interrupt_clear(struct dpe_hw_ctx * ctx)278 static void dpe_interrupt_clear(struct dpe_hw_ctx *ctx)
279 {
280 void __iomem *base = ctx->base;
281 u32 clear = ~0;
282
283 writel(clear, base + GLB_CPU_PDP_INTS);
284 writel(clear, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INTS);
285 writel(clear, base + DPE_DPP_OFFSET + DPP_INTS);
286 writel(clear, base + DPE_DBG_OFFSET + DBG_MCTL_INTS);
287 writel(clear, base + DPE_DBG_OFFSET + DBG_WCH0_INTS);
288 writel(clear, base + DPE_DBG_OFFSET + DBG_WCH1_INTS);
289 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH0_INTS);
290 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH1_INTS);
291 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH2_INTS);
292 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH3_INTS);
293 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH4_INTS);
294 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH5_INTS);
295 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH6_INTS);
296 writel(clear, base + DPE_DBG_OFFSET + DBG_RCH7_INTS);
297 writel(clear, base + DPE_DBG_OFFSET + DBG_DPE_GLB_INTS);
298 }
299
dpe_irq_enable(struct dpe_hw_ctx * ctx)300 static void dpe_irq_enable(struct dpe_hw_ctx *ctx)
301 {
302 enable_irq(ctx->irq);
303 }
304
dpe_clk_enable(struct dpe_hw_ctx * ctx)305 static void dpe_clk_enable(struct dpe_hw_ctx *ctx)
306 {
307 void __iomem *base = ctx->base;
308
309 writel(0x00000088, base + DPE_IFBC_OFFSET + IFBC_MEM_CTRL);
310 writel(0x00000888, base + DPE_DSC_OFFSET + DSC_MEM_CTRL);
311 writel(0x00000008, base + DPE_LDI0_OFFSET + LDI_MEM_CTRL);
312 writel(0x00000008, base + DPE_DBUF0_OFFSET + DBUF_MEM_CTRL);
313 writel(0x00000008, base + DPE_DPP_DITHER_OFFSET + DITHER_MEM_CTRL);
314 writel(0x00000008, base + DPE_CMDLIST_OFFSET + CMD_MEM_CTRL);
315 writel(0x00000088, base + DPE_RCH_VG0_SCL_OFFSET + SCF_COEF_MEM_CTRL);
316 writel(0x00000008, base + DPE_RCH_VG0_SCL_OFFSET + SCF_LB_MEM_CTRL);
317 writel(0x00000008, base + DPE_RCH_VG0_ARSR_OFFSET + ARSR2P_LB_MEM_CTRL);
318 writel(0x00000008, base + DPE_RCH_VG0_DMA_OFFSET + VPP_MEM_CTRL);
319 writel(0x00000008, base + DPE_RCH_VG0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
320 writel(0x00008888, base + DPE_RCH_VG0_DMA_OFFSET + AFBCD_MEM_CTRL);
321 writel(0x00000088, base + DPE_RCH_VG1_SCL_OFFSET + SCF_COEF_MEM_CTRL);
322 writel(0x00000008, base + DPE_RCH_VG1_SCL_OFFSET + SCF_LB_MEM_CTRL);
323 writel(0x00000008, base + DPE_RCH_VG1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
324 writel(0x00008888, base + DPE_RCH_VG1_DMA_OFFSET + AFBCD_MEM_CTRL);
325 writel(0x00000088, base + DPE_RCH_VG2_SCL_OFFSET + SCF_COEF_MEM_CTRL);
326 writel(0x00000008, base + DPE_RCH_VG2_SCL_OFFSET + SCF_LB_MEM_CTRL);
327 writel(0x00000008, base + DPE_RCH_VG2_DMA_OFFSET + DMA_BUF_MEM_CTRL);
328 writel(0x00000088, base + DPE_RCH_G0_SCL_OFFSET + SCF_COEF_MEM_CTRL);
329 writel(0x00000008, base + DPE_RCH_G0_SCL_OFFSET + SCF_LB_MEM_CTRL);
330 writel(0x00000008, base + DPE_RCH_G0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
331 writel(0x00008888, base + DPE_RCH_G0_DMA_OFFSET + AFBCD_MEM_CTRL);
332 writel(0x00000088, base + DPE_RCH_G1_SCL_OFFSET + SCF_COEF_MEM_CTRL);
333 writel(0x00000008, base + DPE_RCH_G1_SCL_OFFSET + SCF_LB_MEM_CTRL);
334 writel(0x00000008, base + DPE_RCH_G1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
335 writel(0x00008888, base + DPE_RCH_G1_DMA_OFFSET + AFBCD_MEM_CTRL);
336 writel(0x00000008, base + DPE_RCH_D0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
337 writel(0x00008888, base + DPE_RCH_D0_DMA_OFFSET + AFBCD_MEM_CTRL);
338 writel(0x00000008, base + DPE_RCH_D1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
339 writel(0x00000008, base + DPE_RCH_D2_DMA_OFFSET + DMA_BUF_MEM_CTRL);
340 writel(0x00000008, base + DPE_RCH_D3_DMA_OFFSET + DMA_BUF_MEM_CTRL);
341 writel(0x00000008, base + DPE_WCH0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
342 writel(0x00000888, base + DPE_WCH0_DMA_OFFSET + AFBCE_MEM_CTRL);
343 writel(0x00000008, base + DPE_WCH0_DMA_OFFSET + ROT_MEM_CTRL);
344 writel(0x00000008, base + DPE_WCH1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
345 writel(0x00000888, base + DPE_WCH1_DMA_OFFSET + AFBCE_MEM_CTRL);
346 writel(0x00000008, base + DPE_WCH1_DMA_OFFSET + ROT_MEM_CTRL);
347 writel(0x00000008, base + DPE_WCH2_DMA_OFFSET + DMA_BUF_MEM_CTRL);
348 writel(0x00000008, base + DPE_WCH2_DMA_OFFSET + ROT_MEM_CTRL);
349 }
350
dpe_power_up(struct dpe_hw_ctx * ctx)351 static int dpe_power_up(struct dpe_hw_ctx *ctx)
352 {
353 int ret;
354
355 if (ctx->power_on)
356 return 0;
357
358 /*peri clk enable */
359 ret = clk_prepare_enable(ctx->dpe_pxl0_clk);
360 if (ret) {
361 DRM_ERROR("failed to enable dpe_pxl0_clk (%d)\n", ret);
362 return ret;
363 }
364
365 ret = clk_prepare_enable(ctx->dpe_pri_clk);
366 if (ret) {
367 DRM_ERROR("failed to enable dpe_pri_clk (%d)\n", ret);
368 return ret;
369 }
370
371 ret = clk_prepare_enable(ctx->dpe_pclk_clk);
372 if (ret) {
373 DRM_ERROR("failed to enable dpe_pclk_clk (%d)\n", ret);
374 return ret;
375 }
376
377 ret = clk_prepare_enable(ctx->dpe_axi_clk);
378 if (ret) {
379 DRM_ERROR("failed to enable dpe_axi_clk (%d)\n", ret);
380 return ret;
381 }
382
383 ret = clk_prepare_enable(ctx->dpe_mmbuf_clk);
384 if (ret) {
385 DRM_ERROR("failed to enable dpe_mmbuf_clk (%d)\n", ret);
386 return ret;
387 }
388
389 dpe_clk_enable(ctx);
390 dpe_interrupt_mask(ctx);
391 dpe_interrupt_clear(ctx);
392 dpe_irq_enable(ctx);
393 dpe_interrupt_unmask(ctx);
394
395 ctx->power_on = true;
396 return 0;
397 }
398
dpe_dpp_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)399 static void dpe_dpp_init(struct dpe_hw_ctx *ctx, struct drm_display_mode *mode,
400 struct drm_display_mode *adj_mode)
401 {
402 void __iomem *dpp_base = ctx->base + DPE_DPP_OFFSET;
403
404 writel((DPE_HEIGHT(mode->vdisplay) << 16) | DPE_WIDTH(mode->hdisplay),
405 dpp_base + DPP_IMG_SIZE_BEF_SR);
406 writel((DPE_HEIGHT(mode->vdisplay) << 16) | DPE_WIDTH(mode->hdisplay),
407 dpp_base + DPP_IMG_SIZE_AFT_SR);
408 }
409
dpe_ovl_init(struct dpe_hw_ctx * ctx,u32 xres,u32 yres)410 static void dpe_ovl_init(struct dpe_hw_ctx *ctx, u32 xres, u32 yres)
411 {
412 void __iomem *mctl_sys_base = ctx->base + DPE_MCTRL_SYS_OFFSET;
413 void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
414 void __iomem *ovl0_base = ctx->base + ovl_offset[DPE_OVL0];
415
416 dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x1, 32, 0);
417 dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x0, 32, 0);
418 dpe_set_reg(ovl0_base + OVL_SIZE, (xres - 1) | ((yres - 1) << 16), 32,
419 0);
420 dpe_set_reg(ovl0_base + OVL_BG_COLOR, 0xFF000000, 32, 0);
421 dpe_set_reg(ovl0_base + OVL_DST_STARTPOS, 0x0, 32, 0);
422 dpe_set_reg(ovl0_base + OVL_DST_ENDPOS, (xres - 1) | ((yres - 1) << 16),
423 32, 0);
424 dpe_set_reg(ovl0_base + OVL_GCFG, 0x10001, 32, 0);
425 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_ITF, 0x1, 32, 0);
426 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_DBUF, 0x1, 2, 0);
427 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_OV, 1 << DPE_OVL0, 4, 0);
428 dpe_set_reg(mctl_sys_base + MCTL_RCH_OV0_SEL, 0x8, 4, 0);
429 dpe_set_reg(mctl_sys_base + MCTL_OV0_FLUSH_EN, 0xd, 4, 0);
430 }
431
dpe_vesa_init(struct dpe_hw_ctx * ctx)432 static void dpe_vesa_init(struct dpe_hw_ctx *ctx)
433 {
434 void __iomem *base = ctx->base;
435
436 dpe_set_reg(base + DPE_LDI0_OFFSET + LDI_VESA_CLK_SEL, 0, 1, 0);
437 }
438
dpe_mipi_ifbc_get_rect(struct drm_rect * rect)439 static int dpe_mipi_ifbc_get_rect(struct drm_rect *rect)
440 {
441 u32 xres_div = XRES_DIV_1;
442 u32 yres_div = YRES_DIV_1;
443
444 if ((rect->x2 % xres_div) > 0)
445 DRM_ERROR("xres(%d) is not division_h(%d) pixel aligned!\n",
446 rect->x2, xres_div);
447
448 if ((rect->y2 % yres_div) > 0)
449 DRM_ERROR("yres(%d) is not division_v(%d) pixel aligned!\n",
450 rect->y2, yres_div);
451
452 rect->x2 /= xres_div;
453 rect->y2 /= yres_div;
454
455 return 0;
456 }
457
dpe_init_ldi_pxl_div(struct dpe_hw_ctx * ctx)458 static void dpe_init_ldi_pxl_div(struct dpe_hw_ctx *ctx)
459 {
460 void __iomem *ldi_base = ctx->base + DPE_LDI0_OFFSET;
461
462 dpe_set_reg(ldi_base + LDI_PXL0_DIV2_GT_EN, PXL0_DIV2_GT_EN_CLOSE, 1,
463 0);
464 dpe_set_reg(ldi_base + LDI_PXL0_DIV4_GT_EN, PXL0_DIV4_GT_EN_CLOSE, 1,
465 0);
466 dpe_set_reg(ldi_base + LDI_PXL0_GT_EN, 0x1, 1, 0);
467 dpe_set_reg(ldi_base + LDI_PXL0_DSI_GT_EN, PXL0_DSI_GT_EN_1, 2, 0);
468 dpe_set_reg(ldi_base + LDI_PXL0_DIVXCFG, PXL0_DIVCFG_0, 3, 0);
469 }
470
dpe_dbuf_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)471 static void dpe_dbuf_init(struct dpe_hw_ctx *ctx, struct drm_display_mode *mode,
472 struct drm_display_mode *adj_mode)
473 {
474 void __iomem *dbuf_base = ctx->base + DPE_DBUF0_OFFSET;
475
476 int sram_valid_num = 0;
477 int sram_max_mem_depth = 0;
478 int sram_min_support_depth = 0;
479
480 u32 thd_rqos_in = 0;
481 u32 thd_rqos_out = 0;
482 u32 thd_wqos_in = 0;
483 u32 thd_wqos_out = 0;
484 u32 thd_cg_in = 0;
485 u32 thd_cg_out = 0;
486 u32 thd_wr_wait = 0;
487 u32 thd_cg_hold = 0;
488 u32 thd_flux_req_befdfs_in = 0;
489 u32 thd_flux_req_befdfs_out = 0;
490 u32 thd_flux_req_aftdfs_in = 0;
491 u32 thd_flux_req_aftdfs_out = 0;
492 u32 thd_dfs_ok = 0;
493 u32 dfs_ok_mask = 0;
494 u32 thd_flux_req_sw_en = 1;
495 u32 hfp, hbp, hsw, vfp, vbp, vsw;
496
497 int dfs_time_min = 0;
498 int depth = 0;
499
500 hfp = mode->hsync_start - mode->hdisplay;
501 hbp = mode->htotal - mode->hsync_end;
502 hsw = mode->hsync_end - mode->hsync_start;
503 vfp = mode->vsync_start - mode->vdisplay;
504 vbp = mode->vtotal - mode->vsync_end;
505 vsw = mode->vsync_end - mode->vsync_start;
506
507 dbuf_base = ctx->base + DPE_DBUF0_OFFSET;
508
509 if (mode->hdisplay * mode->vdisplay >= RES_4K_PHONE)
510 dfs_time_min = DFS_TIME_MIN_4K;
511 else
512 dfs_time_min = DFS_TIME_MIN;
513
514 depth = DBUF0_DEPTH;
515
516 thd_cg_out = (DFS_TIME * adj_mode->clock * 1000UL * mode->hdisplay) /
517 (((hsw + hbp + hfp) + mode->hdisplay) * 6 * 1000000UL);
518
519 sram_valid_num = thd_cg_out / depth;
520 thd_cg_in = (sram_valid_num + 1) * depth - 1;
521 sram_max_mem_depth = (sram_valid_num + 1) * depth;
522
523 thd_rqos_in = thd_cg_out * 85 / 100;
524 thd_rqos_out = thd_cg_out;
525 thd_flux_req_befdfs_in = GET_FLUX_REQ_IN(sram_max_mem_depth);
526 thd_flux_req_befdfs_out = GET_FLUX_REQ_OUT(sram_max_mem_depth);
527
528 sram_min_support_depth =
529 dfs_time_min * mode->hdisplay /
530 (1000000 / 60 / (mode->vdisplay + vbp + vfp + vsw) *
531 (DBUF_WIDTH_BIT / 3 / BITS_PER_BYTE));
532
533 thd_flux_req_aftdfs_in = (sram_max_mem_depth - sram_min_support_depth);
534 thd_flux_req_aftdfs_in = thd_flux_req_aftdfs_in / 3;
535 thd_flux_req_aftdfs_out = 2 * thd_flux_req_aftdfs_in;
536 thd_dfs_ok = thd_flux_req_befdfs_in;
537
538 writel(mode->hdisplay * mode->vdisplay, dbuf_base + DBUF_FRM_SIZE);
539 writel(DPE_WIDTH(mode->hdisplay), dbuf_base + DBUF_FRM_HSIZE);
540 writel(sram_valid_num, dbuf_base + DBUF_SRAM_VALID_NUM);
541
542 writel((thd_rqos_out << 16) | thd_rqos_in, dbuf_base + DBUF_THD_RQOS);
543 writel((thd_wqos_out << 16) | thd_wqos_in, dbuf_base + DBUF_THD_WQOS);
544 writel((thd_cg_out << 16) | thd_cg_in, dbuf_base + DBUF_THD_CG);
545 writel((thd_cg_hold << 16) | thd_wr_wait, dbuf_base + DBUF_THD_OTHER);
546 writel((thd_flux_req_befdfs_out << 16) | thd_flux_req_befdfs_in,
547 dbuf_base + DBUF_THD_FLUX_REQ_BEF);
548 writel((thd_flux_req_aftdfs_out << 16) | thd_flux_req_aftdfs_in,
549 dbuf_base + DBUF_THD_FLUX_REQ_AFT);
550 writel(thd_dfs_ok, dbuf_base + DBUF_THD_DFS_OK);
551 writel((dfs_ok_mask << 1) | thd_flux_req_sw_en,
552 dbuf_base + DBUF_FLUX_REQ_CTRL);
553
554 writel(0x1, dbuf_base + DBUF_DFS_LP_CTRL);
555 }
556
dpe_ldi_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)557 static void dpe_ldi_init(struct dpe_hw_ctx *ctx, struct drm_display_mode *mode,
558 struct drm_display_mode *adj_mode)
559 {
560 void __iomem *ldi_base = ctx->base + DPE_LDI0_OFFSET;
561 struct drm_rect rect = { 0, 0, 0, 0 };
562 u32 hfp, hbp, hsw, vfp, vbp, vsw;
563 u32 vsync_plr = 0;
564 u32 hsync_plr = 0;
565 u32 pixelclk_plr = 0;
566 u32 data_en_plr = 0;
567
568 hfp = mode->hsync_start - mode->hdisplay;
569 hbp = mode->htotal - mode->hsync_end;
570 hsw = mode->hsync_end - mode->hsync_start;
571 vfp = mode->vsync_start - mode->vdisplay;
572 vbp = mode->vtotal - mode->vsync_end;
573 vsw = mode->vsync_end - mode->vsync_start;
574
575 rect.x1 = 0;
576 rect.y1 = 0;
577 rect.x2 = mode->hdisplay;
578 rect.y2 = mode->vdisplay;
579 dpe_mipi_ifbc_get_rect(&rect);
580 dpe_init_ldi_pxl_div(ctx);
581
582 writel(hfp | ((hbp + DPE_WIDTH(hsw)) << 16),
583 ldi_base + LDI_DPI0_HRZ_CTRL0);
584 writel(0, ldi_base + LDI_DPI0_HRZ_CTRL1);
585 writel(DPE_WIDTH(rect.x2), ldi_base + LDI_DPI0_HRZ_CTRL2);
586 writel(vfp | (vbp << 16), ldi_base + LDI_VRT_CTRL0);
587 writel(DPE_HEIGHT(vsw), ldi_base + LDI_VRT_CTRL1);
588 writel(DPE_HEIGHT(rect.y2), ldi_base + LDI_VRT_CTRL2);
589 writel(vsync_plr | (hsync_plr << 1) | (pixelclk_plr << 2) |
590 (data_en_plr << 3),
591 ldi_base + LDI_PLR_CTRL);
592
593 dpe_set_reg(ldi_base + LDI_CTRL, LCD_RGB888, 2, 3);
594 dpe_set_reg(ldi_base + LDI_CTRL, LCD_RGB, 1, 13);
595
596 writel(vfp, ldi_base + LDI_VINACT_MSK_LEN);
597 writel(0x1, ldi_base + LDI_CMD_EVENT_SEL);
598
599 dpe_set_reg(ldi_base + LDI_DSI_CMD_MOD_CTRL, 0x1, 1, 1);
600 dpe_set_reg(ldi_base + LDI_WORK_MODE, 0x1, 1, 0);
601 dpe_set_reg(ldi_base + LDI_CTRL, 0x0, 1, 0);
602 }
603
dpe_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)604 static void dpe_init(struct dpe_hw_ctx *ctx, struct drm_display_mode *mode,
605 struct drm_display_mode *adj_mode)
606 {
607 dpe_dbuf_init(ctx, mode, adj_mode);
608 dpe_dpp_init(ctx, mode, adj_mode);
609 dpe_vesa_init(ctx);
610 dpe_ldi_init(ctx, mode, adj_mode);
611 dpe_qos_init(ctx);
612 dpe_mctl_init(ctx);
613
614 dpe_mctl_lock(ctx);
615 dpe_ovl_init(ctx, mode->hdisplay, mode->vdisplay);
616 dpe_mctl_unlock(ctx);
617
618 // dpe_enable_ldi(ctx);
619
620 ctx->hdisplay = mode->hdisplay;
621 ctx->vdisplay = mode->vdisplay;
622 mdelay(60);
623 }
624
dpe_ldi_set_mode(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)625 static void dpe_ldi_set_mode(struct dpe_hw_ctx *ctx,
626 struct drm_display_mode *mode,
627 struct drm_display_mode *adj_mode)
628 {
629 int ret;
630 u32 clk_Hz;
631
632 switch (mode->clock) {
633 case 148500:
634 clk_Hz = 144000 * 1000UL;
635 break;
636 case 83496:
637 clk_Hz = 80000 * 1000UL;
638 break;
639 case 74440:
640 clk_Hz = 72000 * 1000UL;
641 break;
642 case 74250:
643 clk_Hz = 72000 * 1000UL;
644 break;
645 default:
646 clk_Hz = mode->clock * 1000UL;
647 }
648
649 ret = clk_set_rate(ctx->dpe_pxl0_clk, clk_Hz);
650 if (ret)
651 DRM_ERROR("failed to set pixel clk %dHz (%d)\n", clk_Hz, ret);
652
653 adj_mode->clock = clk_get_rate(ctx->dpe_pxl0_clk) / 1000;
654 }
655
dpe_enable_vblank(struct drm_crtc * crtc)656 static int dpe_enable_vblank(struct drm_crtc *crtc)
657 {
658 struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
659 struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
660
661 dpe_power_up(ctx);
662
663 return 0;
664 }
665
dpe_disable_vblank(struct drm_crtc * crtc)666 static void dpe_disable_vblank(struct drm_crtc *crtc)
667 {
668 struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
669 struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
670
671 if (!ctx->power_on) {
672 DRM_ERROR("power is down! vblank disable fail\n");
673 return;
674 }
675 }
676
dpe_crtc_atomic_enable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)677 static void dpe_crtc_atomic_enable(struct drm_crtc *crtc,
678 struct drm_crtc_state *old_state)
679 {
680 struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
681 struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
682 int ret;
683
684 if (kcrtc->enable)
685 return;
686
687 ret = dpe_power_up(ctx);
688 if (ret)
689 return;
690
691 kcrtc->enable = true;
692 drm_crtc_vblank_on(crtc);
693 }
694
dpe_crtc_atomic_disable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)695 static void dpe_crtc_atomic_disable(struct drm_crtc *crtc,
696 struct drm_crtc_state *old_state)
697 {
698 struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
699
700 if (!kcrtc->enable)
701 return;
702
703 drm_crtc_vblank_off(crtc);
704 kcrtc->enable = false;
705 }
706
dpe_crtc_mode_set_nofb(struct drm_crtc * crtc)707 static void dpe_crtc_mode_set_nofb(struct drm_crtc *crtc)
708 {
709 struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
710 struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
711 struct drm_display_mode *mode = &crtc->state->mode;
712 struct drm_display_mode *adj_mode = &crtc->state->adjusted_mode;
713
714 dpe_power_up(ctx);
715 dpe_ldi_set_mode(ctx, mode, adj_mode);
716 dpe_init(ctx, mode, adj_mode);
717 }
718
dpe_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_crtc_state * old_state)719 static void dpe_crtc_atomic_begin(struct drm_crtc *crtc,
720 struct drm_crtc_state *old_state)
721 {
722 struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
723 struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
724
725 dpe_power_up(ctx);
726 }
727
dpe_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_crtc_state * old_state)728 static void dpe_crtc_atomic_flush(struct drm_crtc *crtc,
729 struct drm_crtc_state *old_state)
730
731 {
732 struct drm_pending_vblank_event *event = crtc->state->event;
733
734 if (event) {
735 crtc->state->event = NULL;
736
737 spin_lock_irq(&crtc->dev->event_lock);
738 if (drm_crtc_vblank_get(crtc) == 0)
739 drm_crtc_arm_vblank_event(crtc, event);
740 else
741 drm_crtc_send_vblank_event(crtc, event);
742 spin_unlock_irq(&crtc->dev->event_lock);
743 }
744 }
745
746 const struct drm_crtc_helper_funcs dpe_crtc_helper_funcs = {
747 .atomic_enable = dpe_crtc_atomic_enable,
748 .atomic_disable = dpe_crtc_atomic_disable,
749 .mode_set_nofb = dpe_crtc_mode_set_nofb,
750 .atomic_begin = dpe_crtc_atomic_begin,
751 .atomic_flush = dpe_crtc_atomic_flush,
752 };
753
754 const struct drm_crtc_funcs dpe_crtc_funcs = {
755 .destroy = drm_crtc_cleanup,
756 .set_config = drm_atomic_helper_set_config,
757 .page_flip = drm_atomic_helper_page_flip,
758 .reset = drm_atomic_helper_crtc_reset,
759 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
760 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
761 .enable_vblank = dpe_enable_vblank,
762 .disable_vblank = dpe_disable_vblank,
763 };
764
dpe_unflow_handler(struct dpe_hw_ctx * ctx)765 static void dpe_unflow_handler(struct dpe_hw_ctx *ctx)
766 {
767 void __iomem *base = ctx->base;
768 u32 tmp = 0;
769
770 tmp = readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
771 tmp &= ~BIT_LDI_UNFLOW;
772
773 writel(tmp, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
774 }
775
dpe_mctl_ov_config(struct dpe_hw_ctx * ctx,u32 ch)776 static void dpe_mctl_ov_config(struct dpe_hw_ctx *ctx, u32 ch)
777 {
778 void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
779
780 dpe_set_reg(mctl_base + MCTL_CTL_EN, 0x1, 32, 0);
781 dpe_set_reg(mctl_base + MCTL_CTL_TOP, 0x2, 32, 0);
782 dpe_set_reg(mctl_base + MCTL_CTL_DBG, 0xB13A00, 32, 0);
783 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_RCH0 + ch * 4, 0x1, 32, 0);
784 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_ITF, 0x1, 2, 0);
785 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_DBUF, 0x1, 2, 0);
786 dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_OV, 1 << DPE_OVL0, 4, 0);
787 }
788
dpe_mctl_sys_config(struct dpe_hw_ctx * ctx,u32 ch)789 static void dpe_mctl_sys_config(struct dpe_hw_ctx *ctx, u32 ch)
790 {
791 void __iomem *mctl_sys_base = ctx->base + DPE_MCTRL_SYS_OFFSET;
792
793 dpe_set_reg(mctl_sys_base + MCTL_RCH0_OV_OEN + ch * 4, (1 << 1) | 0x100,
794 32, 0);
795 dpe_set_reg(mctl_sys_base + MCTL_RCH_OV0_SEL, 0x8, 4, 0);
796 dpe_set_reg(mctl_sys_base + MCTL_RCH_OV0_SEL, ch, 4,
797 (DPE_OVL0 + 1) * 4);
798 dpe_set_reg(mctl_sys_base + MCTL_OV0_FLUSH_EN, 0xd, 4, 0);
799 dpe_set_reg(mctl_sys_base + MCTL_RCH0_FLUSH_EN + ch * 4, 0x1, 32, 0);
800 }
801
dpe_ovl_config(struct dpe_hw_ctx * ctx,const struct drm_rect * rect,u32 xres,u32 yres)802 static void dpe_ovl_config(struct dpe_hw_ctx *ctx, const struct drm_rect *rect,
803 u32 xres, u32 yres)
804 {
805 void __iomem *ovl0_base = ctx->base + ovl_offset[DPE_OVL0];
806
807 dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x1, 32, 0);
808 dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x0, 32, 0);
809 dpe_set_reg(ovl0_base + OVL_SIZE, (xres - 1) | ((yres - 1) << 16), 32,
810 0);
811 dpe_set_reg(ovl0_base + OVL_BG_COLOR, 0xFF000000, 32, 0);
812 dpe_set_reg(ovl0_base + OVL_DST_STARTPOS, 0x0, 32, 0);
813 dpe_set_reg(ovl0_base + OVL_DST_ENDPOS, (xres - 1) | ((yres - 1) << 16),
814 32, 0);
815 dpe_set_reg(ovl0_base + OVL_GCFG, 0x10001, 32, 0);
816 dpe_set_reg(ovl0_base + OVL_LAYER0_POS, (rect->x1) | ((rect->y1) << 16),
817 32, 0);
818 dpe_set_reg(ovl0_base + OVL_LAYER0_SIZE,
819 (rect->x2) | ((rect->y2) << 16), 32, 0);
820 dpe_set_reg(ovl0_base + OVL_LAYER0_ALPHA, 0x00ff40ff, 32, 0);
821 dpe_set_reg(ovl0_base + OVL_LAYER0_CFG, 0x1, 1, 0);
822 }
823
dpe_rdma_config(struct dpe_hw_ctx * ctx,const struct drm_rect * rect,u32 display_addr,u32 hal_format,u32 bpp,int ch)824 static void dpe_rdma_config(struct dpe_hw_ctx *ctx, const struct drm_rect *rect,
825 u32 display_addr, u32 hal_format, u32 bpp, int ch)
826 {
827 void __iomem *rdma_base = ctx->base + rdma_offset[ch];
828
829 u32 aligned_pixel = 0;
830 u32 rdma_oft_x0, rdma_oft_y0, rdma_oft_x1, rdma_oft_y1;
831 u32 rdma_stride, rdma_format;
832 u32 stretch_size_vrt = 0;
833 u32 h_display = 0;
834
835 aligned_pixel = DMA_ALIGN_BYTES / bpp;
836 rdma_oft_x0 = rect->x1 / aligned_pixel;
837 rdma_oft_y0 = rect->y1;
838 rdma_oft_x1 = rect->x2 / aligned_pixel;
839 rdma_oft_y1 = rect->y2;
840
841 rdma_format = dpe_pixel_dma_format_map[hal_format];
842 stretch_size_vrt = rdma_oft_y1 - rdma_oft_y0;
843
844 h_display = (rect->x2 - rect->x1) + 1;
845 rdma_stride = (h_display * bpp) / DMA_ALIGN_BYTES;
846
847 dpe_set_reg(rdma_base + DMA_CH_REG_DEFAULT, 0x1, 32, 0);
848 dpe_set_reg(rdma_base + DMA_CH_REG_DEFAULT, 0x0, 32, 0);
849
850 dpe_set_reg(rdma_base + DMA_OFT_X0, rdma_oft_x0, 12, 0);
851 dpe_set_reg(rdma_base + DMA_OFT_Y0, rdma_oft_y0, 16, 0);
852 dpe_set_reg(rdma_base + DMA_OFT_X1, rdma_oft_x1, 12, 0);
853 dpe_set_reg(rdma_base + DMA_OFT_Y1, rdma_oft_y1, 16, 0);
854 dpe_set_reg(rdma_base + DMA_CTRL, rdma_format, 5, 3);
855 dpe_set_reg(rdma_base + DMA_CTRL, 0x0, 1, 8);
856 dpe_set_reg(rdma_base + DMA_STRETCH_SIZE_VRT, stretch_size_vrt, 32, 0);
857 dpe_set_reg(rdma_base + DMA_DATA_ADDR0, display_addr, 32, 0);
858 dpe_set_reg(rdma_base + DMA_STRIDE0, rdma_stride, 13, 0);
859 dpe_set_reg(rdma_base + DMA_CH_CTL, 0x1, 1, 0);
860 }
861
dpe_rdfc_config(struct dpe_hw_ctx * ctx,const struct drm_rect * rect,u32 hal_format,u32 bpp,int ch)862 static void dpe_rdfc_config(struct dpe_hw_ctx *ctx, const struct drm_rect *rect,
863 u32 hal_format, u32 bpp, int ch)
864 {
865 void __iomem *rdfc_base = ctx->base + rdfc_offset[ch];
866
867 u32 dfc_pix_in_num = 0;
868 u32 size_hrz = 0;
869 u32 size_vrt = 0;
870 u32 dfc_fmt = 0;
871
872 dfc_pix_in_num = (bpp <= 2) ? 0x1 : 0x0;
873 size_hrz = rect->x2 - rect->x1;
874 size_vrt = rect->y2 - rect->y1;
875
876 dfc_fmt = dpe_pixel_dfc_format_map[hal_format];
877
878 dpe_set_reg(rdfc_base + DFC_DISP_SIZE, (size_vrt | (size_hrz << 16)),
879 29, 0);
880 dpe_set_reg(rdfc_base + DFC_PIX_IN_NUM, dfc_pix_in_num, 1, 0);
881 dpe_set_reg(rdfc_base + DFC_DISP_FMT, dfc_fmt, 5, 1);
882 dpe_set_reg(rdfc_base + DFC_CTL_CLIP_EN, 0x1, 1, 0);
883 dpe_set_reg(rdfc_base + DFC_ICG_MODULE, 0x1, 1, 0);
884 }
885
dpe_aif_config(struct dpe_hw_ctx * ctx,u32 ch)886 static void dpe_aif_config(struct dpe_hw_ctx *ctx, u32 ch)
887 {
888 void __iomem *aif_ch_base = ctx->base + aif_offset[ch];
889
890 dpe_set_reg(aif_ch_base, 0x0, 1, 0);
891 dpe_set_reg(aif_ch_base, mid_array[ch], 4, 4);
892 }
893
dpe_mif_config(struct dpe_hw_ctx * ctx,u32 ch)894 static void dpe_mif_config(struct dpe_hw_ctx *ctx, u32 ch)
895 {
896 void __iomem *mif_ch_base = ctx->base + mif_offset[ch];
897
898 dpe_set_reg(mif_ch_base + MIF_CTRL1, 0x1, 1, 5);
899 }
900
dpe_smmu_config_off(struct dpe_hw_ctx * ctx,u32 ch)901 static void dpe_smmu_config_off(struct dpe_hw_ctx *ctx, u32 ch)
902 {
903 void __iomem *smmu_base = ctx->base + DPE_SMMU_OFFSET;
904 int i, index;
905
906 for (i = 0; i < dpe_smmu_chn_sid_num[ch]; i++) {
907 index = dpe_smmu_smrx_idx[ch] + i;
908 dpe_set_reg(smmu_base + SMMU_SMRx_NS + index * 0x4, 1, 32, 0);
909 }
910 }
911
dpe_update_channel(struct kirin_plane * kplane,struct drm_framebuffer * fb,int crtc_x,int crtc_y,unsigned int crtc_w,unsigned int crtc_h,u32 src_x,u32 src_y,u32 src_w,u32 src_h)912 static void dpe_update_channel(struct kirin_plane *kplane,
913 struct drm_framebuffer *fb, int crtc_x,
914 int crtc_y, unsigned int crtc_w,
915 unsigned int crtc_h, u32 src_x, u32 src_y,
916 u32 src_w, u32 src_h)
917 {
918 struct dpe_hw_ctx *ctx = kplane->hw_ctx;
919 struct drm_gem_cma_object *obj = drm_fb_cma_get_gem_obj(fb, 0);
920 struct drm_rect rect;
921 u32 bpp;
922 u32 stride;
923 u32 display_addr;
924 u32 hal_fmt;
925 u32 ch = DPE_CH0;
926
927 bpp = fb->format->cpp[0];
928 stride = fb->pitches[0];
929
930 display_addr = (u32)obj->paddr + src_y * stride;
931
932 rect.x1 = 0;
933 rect.x2 = src_w - 1;
934 rect.y1 = 0;
935 rect.y2 = src_h - 1;
936 hal_fmt = dpe_get_format(fb->format->format);
937
938 dpe_mctl_lock(ctx);
939 dpe_aif_config(ctx, ch);
940 dpe_mif_config(ctx, ch);
941 dpe_smmu_config_off(ctx, ch);
942
943 dpe_rdma_config(ctx, &rect, display_addr, hal_fmt, bpp, ch);
944 dpe_rdfc_config(ctx, &rect, hal_fmt, bpp, ch);
945 dpe_ovl_config(ctx, &rect, ctx->hdisplay, ctx->vdisplay);
946
947 dpe_mctl_ov_config(ctx, ch);
948 dpe_mctl_sys_config(ctx, ch);
949 dpe_mctl_unlock(ctx);
950 dpe_unflow_handler(ctx);
951
952 dpe_enable_ldi(ctx);
953 }
954
dpe_plane_atomic_update(struct drm_plane * plane,struct drm_plane_state * old_state)955 static void dpe_plane_atomic_update(struct drm_plane *plane,
956 struct drm_plane_state *old_state)
957 {
958 struct drm_plane_state *state = plane->state;
959 struct kirin_plane *kplane = to_kirin_plane(plane);
960
961 if (!state->fb) {
962 state->visible = false;
963 return;
964 }
965
966 dpe_update_channel(kplane, state->fb, state->crtc_x, state->crtc_y,
967 state->crtc_w, state->crtc_h, state->src_x >> 16,
968 state->src_y >> 16, state->src_w >> 16,
969 state->src_h >> 16);
970 }
971
dpe_plane_atomic_check(struct drm_plane * plane,struct drm_plane_state * state)972 static int dpe_plane_atomic_check(struct drm_plane *plane,
973 struct drm_plane_state *state)
974 {
975 struct drm_framebuffer *fb = state->fb;
976 struct drm_crtc *crtc = state->crtc;
977 struct drm_crtc_state *crtc_state;
978 u32 src_x = state->src_x >> 16;
979 u32 src_y = state->src_y >> 16;
980 u32 src_w = state->src_w >> 16;
981 u32 src_h = state->src_h >> 16;
982 int crtc_x = state->crtc_x;
983 int crtc_y = state->crtc_y;
984 u32 crtc_w = state->crtc_w;
985 u32 crtc_h = state->crtc_h;
986 u32 fmt;
987
988 if (!crtc || !fb)
989 return 0;
990
991 fmt = dpe_get_format(fb->format->format);
992 if (fmt == DPE_UNSUPPORT)
993 return -EINVAL;
994
995 crtc_state = drm_atomic_get_crtc_state(state->state, crtc);
996 if (IS_ERR(crtc_state))
997 return PTR_ERR(crtc_state);
998
999 if (src_w != crtc_w || src_h != crtc_h) {
1000 DRM_ERROR("Scale not support!!!\n");
1001 return -EINVAL;
1002 }
1003
1004 if (src_x + src_w > fb->width || src_y + src_h > fb->height)
1005 return -EINVAL;
1006
1007 if (crtc_x < 0 || crtc_y < 0)
1008 return -EINVAL;
1009
1010 if (crtc_x + crtc_w > crtc_state->adjusted_mode.hdisplay ||
1011 crtc_y + crtc_h > crtc_state->adjusted_mode.vdisplay)
1012 return -EINVAL;
1013
1014 return 0;
1015 }
1016
1017 const struct drm_plane_helper_funcs dpe_plane_helper_funcs = {
1018 .atomic_check = dpe_plane_atomic_check,
1019 .atomic_update = dpe_plane_atomic_update,
1020 };
1021
1022 const struct drm_plane_funcs dpe_plane_funcs = {
1023 .update_plane = drm_atomic_helper_update_plane,
1024 .disable_plane = drm_atomic_helper_disable_plane,
1025 .destroy = drm_plane_cleanup,
1026 .reset = drm_atomic_helper_plane_reset,
1027 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
1028 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
1029 };
1030
dpe_irq_handler(int irq,void * data)1031 static irqreturn_t dpe_irq_handler(int irq, void *data)
1032 {
1033 struct dpe_hw_ctx *ctx = data;
1034 struct drm_crtc *crtc = ctx->crtc;
1035 void __iomem *base = ctx->base;
1036
1037 u32 isr_s1 = 0;
1038 u32 isr_s2 = 0;
1039 u32 isr_s2_dpp = 0;
1040 u32 isr_s2_smmu = 0;
1041 u32 mask = 0;
1042
1043 isr_s1 = readl(base + GLB_CPU_PDP_INTS);
1044 isr_s2 = readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INTS);
1045 isr_s2_dpp = readl(base + DPE_DPP_OFFSET + DPP_INTS);
1046 isr_s2_smmu = readl(base + DPE_SMMU_OFFSET + SMMU_INTSTAT_NS);
1047
1048 writel(isr_s2_smmu, base + DPE_SMMU_OFFSET + SMMU_INTCLR_NS);
1049 writel(isr_s2_dpp, base + DPE_DPP_OFFSET + DPP_INTS);
1050 writel(isr_s2, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INTS);
1051 writel(isr_s1, base + GLB_CPU_PDP_INTS);
1052
1053 isr_s1 &= ~(readl(base + GLB_CPU_PDP_INT_MSK));
1054 isr_s2 &= ~(readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK));
1055 isr_s2_dpp &= ~(readl(base + DPE_DPP_OFFSET + DPP_INT_MSK));
1056
1057 if (isr_s2 & BIT_VSYNC)
1058 drm_crtc_handle_vblank(crtc);
1059
1060 if (isr_s2 & BIT_LDI_UNFLOW) {
1061 mask = readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
1062 mask |= BIT_LDI_UNFLOW;
1063 writel(mask, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
1064
1065 DRM_ERROR("ldi underflow!\n");
1066 }
1067
1068 return IRQ_HANDLED;
1069 }
1070
dpe_hw_ctx_alloc(struct platform_device * pdev,struct drm_crtc * crtc)1071 static void *dpe_hw_ctx_alloc(struct platform_device *pdev,
1072 struct drm_crtc *crtc)
1073 {
1074 struct dpe_hw_ctx *ctx = NULL;
1075 struct device *dev = &pdev->dev;
1076 struct device_node *np = pdev->dev.of_node;
1077 int ret = 0;
1078
1079 ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
1080 if (!ctx) {
1081 DRM_ERROR("failed to alloc ade_hw_ctx\n");
1082 return ERR_PTR(-ENOMEM);
1083 }
1084
1085 ctx->base = of_iomap(np, 0);
1086 if (!(ctx->base)) {
1087 DRM_ERROR("failed to get ade base resource.\n");
1088 return ERR_PTR(-ENXIO);
1089 }
1090
1091 ctx->noc_base = of_iomap(np, 4);
1092 if (!(ctx->noc_base)) {
1093 DRM_ERROR("failed to get noc_base resource.\n");
1094 return ERR_PTR(-ENXIO);
1095 }
1096
1097 ctx->irq = irq_of_parse_and_map(np, 0);
1098 if (ctx->irq <= 0) {
1099 DRM_ERROR("failed to get irq_pdp resource.\n");
1100 return ERR_PTR(-ENXIO);
1101 }
1102
1103 DRM_INFO("dpe irq = %d.", ctx->irq);
1104
1105 ctx->dpe_mmbuf_clk = devm_clk_get(dev, "clk_dss_axi_mm");
1106 if (!ctx->dpe_mmbuf_clk) {
1107 DRM_ERROR("failed to parse dpe_mmbuf_clk\n");
1108 return ERR_PTR(-ENODEV);
1109 }
1110
1111 ctx->dpe_axi_clk = devm_clk_get(dev, "aclk_dss");
1112 if (!ctx->dpe_axi_clk) {
1113 DRM_ERROR("failed to parse dpe_axi_clk\n");
1114 return ERR_PTR(-ENODEV);
1115 }
1116
1117 ctx->dpe_pclk_clk = devm_clk_get(dev, "pclk_dss");
1118 if (!ctx->dpe_pclk_clk) {
1119 DRM_ERROR("failed to parse dpe_pclk_clk\n");
1120 return ERR_PTR(-ENODEV);
1121 }
1122
1123 ctx->dpe_pri_clk = devm_clk_get(dev, "clk_edc0");
1124 if (!ctx->dpe_pri_clk) {
1125 DRM_ERROR("failed to parse dpe_pri_clk\n");
1126 return ERR_PTR(-ENODEV);
1127 }
1128
1129 ret = clk_set_rate(ctx->dpe_pri_clk, DEFAULT_DPE_CORE_CLK_07V_RATE);
1130 if (ret < 0) {
1131 DRM_ERROR("dpe_pri_clk clk_set_rate(%lu) failed, error=%d!\n",
1132 DEFAULT_DPE_CORE_CLK_07V_RATE, ret);
1133 return ERR_PTR(-EINVAL);
1134 }
1135
1136 ctx->dpe_pxl0_clk = devm_clk_get(dev, "clk_ldi0");
1137 if (!ctx->dpe_pxl0_clk) {
1138 DRM_ERROR("failed to parse dpe_pxl0_clk\n");
1139 return ERR_PTR(-ENODEV);
1140 }
1141
1142 ret = clk_set_rate(ctx->dpe_pxl0_clk, DPE_MAX_PXL0_CLK_144M);
1143 if (ret < 0) {
1144 DRM_ERROR("dpe_pxl0_clk clk_set_rate(%lu) failed, error=%d!\n",
1145 DPE_MAX_PXL0_CLK_144M, ret);
1146 return ERR_PTR(-EINVAL);
1147 }
1148
1149 ctx->crtc = crtc;
1150 ret = devm_request_irq(dev, ctx->irq, dpe_irq_handler, IRQF_SHARED,
1151 dev->driver->name, ctx);
1152 if (ret)
1153 return ERR_PTR(-EIO);
1154
1155 disable_irq(ctx->irq);
1156
1157 return ctx;
1158 }
1159
dpe_hw_ctx_cleanup(void * hw_ctx)1160 static void dpe_hw_ctx_cleanup(void *hw_ctx)
1161 {
1162 }
1163
1164 extern void dsi_set_output_client(struct drm_device *dev);
kirin_fbdev_output_poll_changed(struct drm_device * dev)1165 static void kirin_fbdev_output_poll_changed(struct drm_device *dev)
1166 {
1167 dsi_set_output_client(dev);
1168 }
1169
1170 static const struct drm_mode_config_funcs dpe_mode_config_funcs = {
1171 .fb_create = drm_gem_fb_create,
1172 .output_poll_changed = kirin_fbdev_output_poll_changed,
1173 .atomic_check = drm_atomic_helper_check,
1174 .atomic_commit = drm_atomic_helper_commit,
1175 };
1176
1177 DEFINE_DRM_GEM_CMA_FOPS(kirin_drm_fops);
1178 static struct drm_driver dpe_driver = {
1179 .driver_features = DRIVER_GEM | DRIVER_MODESET |
1180 DRIVER_ATOMIC | DRIVER_RENDER,
1181
1182 .date = "20170309",
1183 .fops = &kirin_drm_fops,
1184 .gem_free_object_unlocked = drm_gem_cma_free_object,
1185 .gem_vm_ops = &drm_gem_cma_vm_ops,
1186 .dumb_create = drm_gem_cma_dumb_create_internal,
1187 .prime_handle_to_fd = drm_gem_prime_handle_to_fd,
1188 .prime_fd_to_handle = drm_gem_prime_fd_to_handle,
1189 .gem_prime_export = drm_gem_prime_export,
1190 .gem_prime_import = drm_gem_prime_import,
1191 .gem_prime_get_sg_table = drm_gem_cma_prime_get_sg_table,
1192 .gem_prime_import_sg_table = drm_gem_cma_prime_import_sg_table,
1193 .gem_prime_vmap = drm_gem_cma_prime_vmap,
1194 .gem_prime_vunmap = drm_gem_cma_prime_vunmap,
1195 .gem_prime_mmap = drm_gem_cma_prime_mmap,
1196
1197 .name = "kirin",
1198 .desc = "Hisilicon Kirin SoCs' DRM Driver",
1199 .major = 1,
1200 .minor = 0,
1201 };
1202
1203 const struct kirin_drm_data dpe_driver_data = {
1204 .num_planes = DPE_CH_NUM,
1205 .prim_plane = DPE_CH0,
1206
1207 .channel_formats = dpe_channel_formats,
1208 .channel_formats_cnt = ARRAY_SIZE(dpe_channel_formats),
1209 .config_max_width = 4096,
1210 .config_max_height = 4096,
1211
1212 .driver = &dpe_driver,
1213
1214 .crtc_helper_funcs = &dpe_crtc_helper_funcs,
1215 .crtc_funcs = &dpe_crtc_funcs,
1216 .plane_helper_funcs = &dpe_plane_helper_funcs,
1217 .plane_funcs = &dpe_plane_funcs,
1218 .mode_config_funcs = &dpe_mode_config_funcs,
1219
1220 .alloc_hw_ctx = dpe_hw_ctx_alloc,
1221 .cleanup_hw_ctx = dpe_hw_ctx_cleanup,
1222 };
1223