1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd. */
3
4 #include <linux/delay.h>
5 #include <linux/iopoll.h>
6 #include <linux/pm_runtime.h>
7 #include <linux/rk-camera-module.h>
8 #include <media/v4l2-common.h>
9 #include <media/v4l2-event.h>
10 #include <media/v4l2-fh.h>
11 #include <media/v4l2-ioctl.h>
12 #include <media/v4l2-subdev.h>
13 #include <media/videobuf2-dma-contig.h>
14 #include "dev.h"
15 #include "isp_external.h"
16 #include "regs.h"
17
get_remote_mipi_sensor(struct rkisp_device * dev,struct v4l2_subdev ** sensor_sd,u32 function)18 static void get_remote_mipi_sensor(struct rkisp_device *dev,
19 struct v4l2_subdev **sensor_sd, u32 function)
20 {
21 struct media_graph graph;
22 struct media_entity *entity = &dev->isp_sdev.sd.entity;
23 struct media_device *mdev = entity->graph_obj.mdev;
24 int ret;
25
26 /* Walk the graph to locate sensor nodes. */
27 mutex_lock(&mdev->graph_mutex);
28 ret = media_graph_walk_init(&graph, mdev);
29 if (ret) {
30 mutex_unlock(&mdev->graph_mutex);
31 *sensor_sd = NULL;
32 return;
33 }
34
35 media_graph_walk_start(&graph, entity);
36 while ((entity = media_graph_walk_next(&graph))) {
37 if (entity->function == function)
38 break;
39 }
40 mutex_unlock(&mdev->graph_mutex);
41 media_graph_walk_cleanup(&graph);
42
43 if (entity)
44 *sensor_sd = media_entity_to_v4l2_subdev(entity);
45 else
46 *sensor_sd = NULL;
47 }
48
get_remote_subdev(struct v4l2_subdev * sd)49 static struct v4l2_subdev *get_remote_subdev(struct v4l2_subdev *sd)
50 {
51 struct media_pad *local, *remote;
52 struct v4l2_subdev *remote_sd = NULL;
53
54 local = &sd->entity.pads[CSI_SINK];
55 if (!local)
56 goto end;
57 remote = media_entity_remote_pad(local);
58 if (!remote)
59 goto end;
60
61 remote_sd = media_entity_to_v4l2_subdev(remote->entity);
62 end:
63 return remote_sd;
64 }
65
rkisp_csi_link_setup(struct media_entity * entity,const struct media_pad * local,const struct media_pad * remote,u32 flags)66 static int rkisp_csi_link_setup(struct media_entity *entity,
67 const struct media_pad *local,
68 const struct media_pad *remote,
69 u32 flags)
70 {
71 struct v4l2_subdev *sd = media_entity_to_v4l2_subdev(entity);
72 struct rkisp_csi_device *csi;
73 struct rkisp_stream *stream = NULL;
74 int ret = 0;
75 u8 id;
76
77 if (!sd)
78 return -ENODEV;
79
80 csi = v4l2_get_subdevdata(sd);
81 if (local->flags & MEDIA_PAD_FL_SOURCE) {
82 id = local->index - 1;
83 if (id && id < RKISP_STREAM_DMATX3)
84 stream = &csi->ispdev->cap_dev.stream[id + 1];
85 if (flags & MEDIA_LNK_FL_ENABLED) {
86 if (csi->sink[id].linked) {
87 ret = -EBUSY;
88 goto out;
89 }
90 csi->sink[id].linked = true;
91 csi->sink[id].index = 1 << id;
92 } else {
93 csi->sink[id].linked = false;
94 csi->sink[id].index = 0;
95 }
96 if (stream)
97 stream->linked = csi->sink[id].linked;
98 }
99
100 return 0;
101 out:
102 v4l2_err(sd, "pad%d is already linked\n", local->index);
103 return ret;
104 }
105
rkisp_csi_g_mbus_config(struct v4l2_subdev * sd,unsigned int pad_id,struct v4l2_mbus_config * config)106 static int rkisp_csi_g_mbus_config(struct v4l2_subdev *sd,
107 unsigned int pad_id,
108 struct v4l2_mbus_config *config)
109 {
110 struct v4l2_subdev *remote_sd;
111
112 if (!sd)
113 return -ENODEV;
114 remote_sd = get_remote_subdev(sd);
115 return v4l2_subdev_call(remote_sd, pad, get_mbus_config, pad_id, config);
116 }
117
rkisp_csi_get_set_fmt(struct v4l2_subdev * sd,struct v4l2_subdev_pad_config * cfg,struct v4l2_subdev_format * fmt)118 static int rkisp_csi_get_set_fmt(struct v4l2_subdev *sd,
119 struct v4l2_subdev_pad_config *cfg,
120 struct v4l2_subdev_format *fmt)
121 {
122 struct v4l2_subdev *remote_sd;
123
124 if (fmt->pad != CSI_SINK)
125 fmt->pad -= 1;
126
127 if (!sd)
128 return -ENODEV;
129 remote_sd = get_remote_subdev(sd);
130 return v4l2_subdev_call(remote_sd, pad, get_fmt, NULL, fmt);
131 }
132
rkisp_csi_s_stream(struct v4l2_subdev * sd,int on)133 static int rkisp_csi_s_stream(struct v4l2_subdev *sd, int on)
134 {
135 struct rkisp_csi_device *csi = v4l2_get_subdevdata(sd);
136 struct rkisp_device *dev = csi->ispdev;
137
138 csi->err_cnt = 0;
139 csi->irq_cnt = 0;
140 memset(csi->tx_first, 0, sizeof(csi->tx_first));
141
142 if (!IS_HDR_RDBK(dev->hdr.op_mode))
143 return 0;
144 if (on)
145 rkisp_write(dev, CSI2RX_Y_STAT_CTRL, SW_Y_STAT_EN, true);
146 else
147 rkisp_write(dev, CSI2RX_Y_STAT_CTRL, 0, true);
148 return 0;
149 }
150
rkisp_csi_s_power(struct v4l2_subdev * sd,int on)151 static int rkisp_csi_s_power(struct v4l2_subdev *sd, int on)
152 {
153 return 0;
154 }
155
156 static const struct media_entity_operations rkisp_csi_media_ops = {
157 .link_setup = rkisp_csi_link_setup,
158 .link_validate = v4l2_subdev_link_validate,
159 };
160
161 static const struct v4l2_subdev_pad_ops rkisp_csi_pad_ops = {
162 .set_fmt = rkisp_csi_get_set_fmt,
163 .get_fmt = rkisp_csi_get_set_fmt,
164 .get_mbus_config = rkisp_csi_g_mbus_config,
165 };
166
167 static const struct v4l2_subdev_video_ops rkisp_csi_video_ops = {
168 .s_stream = rkisp_csi_s_stream,
169 };
170
171 static const struct v4l2_subdev_core_ops rkisp_csi_core_ops = {
172 .s_power = rkisp_csi_s_power,
173 };
174
175 static struct v4l2_subdev_ops rkisp_csi_ops = {
176 .core = &rkisp_csi_core_ops,
177 .video = &rkisp_csi_video_ops,
178 .pad = &rkisp_csi_pad_ops,
179 };
180
csi_config(struct rkisp_csi_device * csi)181 static int csi_config(struct rkisp_csi_device *csi)
182 {
183 struct rkisp_device *dev = csi->ispdev;
184 struct rkisp_sensor_info *sensor = dev->active_sensor;
185 struct v4l2_subdev *mipi_sensor;
186 struct v4l2_ctrl *ctrl;
187 u32 emd_vc, emd_dt, mipi_ctrl;
188 int lanes, ret, i;
189
190 /*
191 * sensor->mbus is set in isp or d-phy notifier_bound function
192 */
193 switch (sensor->mbus.flags & V4L2_MBUS_CSI2_LANES) {
194 case V4L2_MBUS_CSI2_4_LANE:
195 lanes = 4;
196 break;
197 case V4L2_MBUS_CSI2_3_LANE:
198 lanes = 3;
199 break;
200 case V4L2_MBUS_CSI2_2_LANE:
201 lanes = 2;
202 break;
203 case V4L2_MBUS_CSI2_1_LANE:
204 lanes = 1;
205 break;
206 default:
207 return -EINVAL;
208 }
209
210 emd_vc = 0xFF;
211 emd_dt = 0;
212 dev->hdr.sensor = NULL;
213 get_remote_mipi_sensor(dev, &mipi_sensor, MEDIA_ENT_F_CAM_SENSOR);
214 if (mipi_sensor) {
215 ctrl = v4l2_ctrl_find(mipi_sensor->ctrl_handler,
216 CIFISP_CID_EMB_VC);
217 if (ctrl)
218 emd_vc = v4l2_ctrl_g_ctrl(ctrl);
219
220 ctrl = v4l2_ctrl_find(mipi_sensor->ctrl_handler,
221 CIFISP_CID_EMB_DT);
222 if (ctrl)
223 emd_dt = v4l2_ctrl_g_ctrl(ctrl);
224 dev->hdr.sensor = mipi_sensor;
225 }
226
227 dev->emd_dt = emd_dt;
228 dev->emd_vc = emd_vc;
229 dev->emd_data_idx = 0;
230 if (emd_vc <= CIF_ISP_ADD_DATA_VC_MAX) {
231 for (i = 0; i < RKISP_EMDDATA_FIFO_MAX; i++) {
232 ret = kfifo_alloc(&dev->emd_data_fifo[i].mipi_kfifo,
233 CIFISP_ADD_DATA_FIFO_SIZE,
234 GFP_ATOMIC);
235 if (ret) {
236 v4l2_err(&dev->v4l2_dev,
237 "kfifo_alloc failed with error %d\n",
238 ret);
239 return ret;
240 }
241 }
242 }
243
244 if (dev->isp_ver == ISP_V13 ||
245 dev->isp_ver == ISP_V12) {
246 /* lanes */
247 rkisp_write(dev, CIF_ISP_CSI0_CTRL1, lanes - 1, true);
248
249 /* linecnt */
250 rkisp_write(dev, CIF_ISP_CSI0_CTRL2, 0x3FFF, true);
251
252 /* Configure Data Type and Virtual Channel */
253 rkisp_write(dev, CIF_ISP_CSI0_DATA_IDS_1,
254 csi->mipi_di[0] | csi->mipi_di[1] << 8, true);
255
256 /* clear interrupts state */
257 rkisp_read(dev, CIF_ISP_CSI0_ERR1, true);
258 rkisp_read(dev, CIF_ISP_CSI0_ERR2, true);
259 rkisp_read(dev, CIF_ISP_CSI0_ERR3, true);
260 /* set interrupts mask */
261 rkisp_write(dev, CIF_ISP_CSI0_MASK1, 0x1FFFFFF0, true);
262 rkisp_write(dev, CIF_ISP_CSI0_MASK2, 0x03FFFFFF, true);
263 rkisp_write(dev, CIF_ISP_CSI0_MASK3,
264 CIF_ISP_CSI0_IMASK_FRAME_END(0x3F) |
265 CIF_ISP_CSI0_IMASK_RAW0_OUT_V_END |
266 CIF_ISP_CSI0_IMASK_RAW1_OUT_V_END |
267 CIF_ISP_CSI0_IMASK_LINECNT, true);
268
269 v4l2_dbg(1, rkisp_debug, &dev->v4l2_dev,
270 "CSI0_CTRL1 0x%08x\n"
271 "CSI0_IDS 0x%08x\n"
272 "CSI0_MASK3 0x%08x\n",
273 rkisp_read(dev, CIF_ISP_CSI0_CTRL1, true),
274 rkisp_read(dev, CIF_ISP_CSI0_DATA_IDS_1, true),
275 rkisp_read(dev, CIF_ISP_CSI0_MASK3, true));
276 } else if (dev->isp_ver == ISP_V20 || dev->isp_ver == ISP_V21) {
277 bool is_feature_on = dev->hw_dev->is_feature_on;
278 u64 iq_feature = dev->hw_dev->iq_feature;
279 struct rkmodule_hdr_cfg hdr_cfg;
280 u32 val, mask;
281
282 dev->hdr.op_mode = HDR_NORMAL;
283 dev->hdr.esp_mode = HDR_NORMAL_VC;
284 memset(&hdr_cfg, 0, sizeof(hdr_cfg));
285 if (rkisp_csi_get_hdr_cfg(dev, &hdr_cfg) == 0) {
286 dev->hdr.op_mode = hdr_cfg.hdr_mode;
287 dev->hdr.esp_mode = hdr_cfg.esp.mode;
288 }
289
290 /* normal read back mode */
291 if (dev->hdr.op_mode == HDR_NORMAL &&
292 (dev->isp_inp & INP_RAWRD2 || !dev->hw_dev->is_single))
293 dev->hdr.op_mode = HDR_RDBK_FRAME1;
294 /* HDR on the fly for isp21 */
295 if (dev->isp_ver == ISP_V21 && !(dev->isp_inp & INP_RAWRD2))
296 if (dev->hdr.op_mode == HDR_RDBK_FRAME2)
297 dev->hdr.op_mode = HDR_LINEX2_DDR;
298
299 /* op_mode update by mi_cfg_upd */
300 if (!dev->hw_dev->is_mi_update)
301 rkisp_write(dev, CSI2RX_CTRL0,
302 SW_IBUF_OP_MODE(dev->hdr.op_mode) |
303 SW_HDR_ESP_MODE(dev->hdr.esp_mode), true);
304 rkisp_write(dev, CSI2RX_CTRL1, lanes - 1, true);
305 rkisp_write(dev, CSI2RX_CTRL2, 0x3FFF, true);
306 val = SW_CSI_ID1(csi->mipi_di[1]) |
307 SW_CSI_ID2(csi->mipi_di[2]) |
308 SW_CSI_ID3(csi->mipi_di[3]);
309 mask = SW_CSI_ID1(0xff) | SW_CSI_ID2(0xff) | SW_CSI_ID3(0xff);
310 /* CSI_ID0 is for dmarx when read back mode */
311 if (dev->hw_dev->is_single) {
312 val |= SW_CSI_ID0(csi->mipi_di[0]);
313 rkisp_write(dev, CSI2RX_DATA_IDS_1, val, true);
314 } else {
315 rkisp_set_bits(dev, CSI2RX_DATA_IDS_1, mask, val, true);
316 for (i = 0; i < dev->hw_dev->dev_num; i++) {
317 if (dev->hw_dev->isp[i] &&
318 !dev->hw_dev->isp[i]->is_hw_link)
319 continue;
320 rkisp_set_bits(dev->hw_dev->isp[i],
321 CSI2RX_DATA_IDS_1, mask, val, false);
322 }
323 }
324 val = SW_CSI_ID4(csi->mipi_di[4]);
325 rkisp_write(dev, CSI2RX_DATA_IDS_2, val, true);
326 /* clear interrupts state */
327 rkisp_read(dev, CSI2RX_ERR_PHY, true);
328 /* set interrupts mask */
329 val = PHY_ERR_SOTHS | PHY_ERR_SOTSYNCHS |
330 PHY_ERR_EOTSYNCHS | PHY_ERR_ESC | PHY_ERR_CTL;
331 rkisp_write(dev, CSI2RX_MASK_PHY, val, true);
332 val = PACKET_ERR_F_BNDRY_MATCG | PACKET_ERR_F_SEQ |
333 PACKET_ERR_FRAME_DATA | PACKET_ERR_ECC_1BIT |
334 PACKET_ERR_ECC_2BIT | PACKET_ERR_CHECKSUM;
335 rkisp_write(dev, CSI2RX_MASK_PACKET, val, true);
336 val = AFIFO0_OVERFLOW | AFIFO1X_OVERFLOW |
337 LAFIFO1X_OVERFLOW | AFIFO2X_OVERFLOW |
338 IBUFX3_OVERFLOW | IBUF3R_OVERFLOW |
339 Y_STAT_AFIFOX3_OVERFLOW;
340 rkisp_write(dev, CSI2RX_MASK_OVERFLOW, val, true);
341 val = RAW0_WR_FRAME | RAW1_WR_FRAME | RAW2_WR_FRAME |
342 RAW_WR_SIZE_ERR | MIPI_LINECNT |
343 RAW_RD_SIZE_ERR | RAW0_Y_STATE |
344 RAW1_Y_STATE | RAW2_Y_STATE;
345 if (dev->isp_ver == ISP_V20)
346 val |= MIPI_DROP_FRM;
347 else
348 val |= ISP21_MIPI_DROP_FRM;
349 rkisp_write(dev, CSI2RX_MASK_STAT, val, true);
350
351 /* hdr merge */
352 switch (dev->hdr.op_mode) {
353 case HDR_RDBK_FRAME2:
354 case HDR_FRAMEX2_DDR:
355 case HDR_LINEX2_DDR:
356 case HDR_LINEX2_NO_DDR:
357 val = SW_HDRMGE_EN |
358 SW_HDRMGE_MODE_FRAMEX2;
359 break;
360 case HDR_RDBK_FRAME3:
361 case HDR_FRAMEX3_DDR:
362 case HDR_LINEX3_DDR:
363 val = SW_HDRMGE_EN |
364 SW_HDRMGE_MODE_FRAMEX3;
365 break;
366 default:
367 val = 0;
368 }
369 if (is_feature_on) {
370 if ((ISP2X_MODULE_HDRMGE & ~iq_feature) && (val & SW_HDRMGE_EN)) {
371 v4l2_err(&dev->v4l2_dev, "hdrmge is not supported\n");
372 return -EINVAL;
373 }
374 }
375 rkisp_write(dev, ISP_HDRMGE_BASE, val, false);
376
377 v4l2_dbg(1, rkisp_debug, &dev->v4l2_dev,
378 "CSI2RX_IDS 0x%08x 0x%08x\n",
379 rkisp_read(dev, CSI2RX_DATA_IDS_1, true),
380 rkisp_read(dev, CSI2RX_DATA_IDS_2, true));
381 } else {
382 mipi_ctrl = CIF_MIPI_CTRL_NUM_LANES(lanes - 1) |
383 CIF_MIPI_CTRL_SHUTDOWNLANES(0xf) |
384 CIF_MIPI_CTRL_ERR_SOT_SYNC_HS_SKIP |
385 CIF_MIPI_CTRL_CLOCKLANE_ENA;
386
387 rkisp_write(dev, CIF_MIPI_CTRL, mipi_ctrl, true);
388
389 /* Configure Data Type and Virtual Channel */
390 rkisp_write(dev, CIF_MIPI_IMG_DATA_SEL,
391 csi->mipi_di[0], true);
392
393 rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_1,
394 CIF_MIPI_DATA_SEL_DT(emd_dt) |
395 CIF_MIPI_DATA_SEL_VC(emd_vc), true);
396 rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_2,
397 CIF_MIPI_DATA_SEL_DT(emd_dt) |
398 CIF_MIPI_DATA_SEL_VC(emd_vc), true);
399 rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_3,
400 CIF_MIPI_DATA_SEL_DT(emd_dt) |
401 CIF_MIPI_DATA_SEL_VC(emd_vc), true);
402 rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_4,
403 CIF_MIPI_DATA_SEL_DT(emd_dt) |
404 CIF_MIPI_DATA_SEL_VC(emd_vc), true);
405
406 /* Clear MIPI interrupts */
407 rkisp_write(dev, CIF_MIPI_ICR, ~0, true);
408 /*
409 * Disable CIF_MIPI_ERR_DPHY interrupt here temporary for
410 * isp bus may be dead when switch isp.
411 */
412 rkisp_write(dev, CIF_MIPI_IMSC,
413 CIF_MIPI_FRAME_END | CIF_MIPI_ERR_CSI |
414 CIF_MIPI_ERR_DPHY | CIF_MIPI_SYNC_FIFO_OVFLW(0x0F) |
415 CIF_MIPI_ADD_DATA_OVFLW, true);
416
417 v4l2_dbg(1, rkisp_debug, &dev->v4l2_dev,
418 "\n MIPI_CTRL 0x%08x\n"
419 " MIPI_IMG_DATA_SEL 0x%08x\n"
420 " MIPI_STATUS 0x%08x\n"
421 " MIPI_IMSC 0x%08x\n",
422 rkisp_read(dev, CIF_MIPI_CTRL, true),
423 rkisp_read(dev, CIF_MIPI_IMG_DATA_SEL, true),
424 rkisp_read(dev, CIF_MIPI_STATUS, true),
425 rkisp_read(dev, CIF_MIPI_IMSC, true));
426 }
427
428 return 0;
429 }
430
rkisp_expander_config(struct rkisp_device * dev,struct rkmodule_hdr_cfg * cfg,bool on)431 int rkisp_expander_config(struct rkisp_device *dev,
432 struct rkmodule_hdr_cfg *cfg, bool on)
433 {
434 struct rkmodule_hdr_cfg hdr_cfg;
435 u32 i, val, num, d0, d1, drop_bit = 0;
436
437 if (dev->isp_ver != ISP_V32)
438 return 0;
439
440 if (!on) {
441 rkisp_write(dev, ISP32_EXPD_CTRL, 0, false);
442 return 0;
443 }
444
445 if (!cfg) {
446 if (rkisp_csi_get_hdr_cfg(dev, &hdr_cfg) != 0)
447 goto err;
448 cfg = &hdr_cfg;
449 }
450
451 if (cfg->hdr_mode != HDR_COMPR)
452 return 0;
453
454 /* compressed data max 12bit and src data max 20bit */
455 if (cfg->compr.bit > 20)
456 drop_bit = cfg->compr.bit - 20;
457 dev->hdr.compr_bit = cfg->compr.bit - drop_bit;
458
459 num = cfg->compr.segment;
460 for (i = 0; i < num; i++) {
461 val = cfg->compr.slope_k[i];
462 rkisp_write(dev, ISP32_EXPD_K0 + i * 4, val, false);
463 }
464
465 d0 = 0;
466 d1 = cfg->compr.data_compr[0];
467 val = ISP32_EXPD_DATA(d0, d1 > 0xfff ? 0xfff : d1);
468 rkisp_write(dev, ISP32_EXPD_X00_01, val, false);
469
470 d1 = cfg->compr.data_src_shitf[0];
471 val = ISP32_EXPD_DATA(d0, drop_bit ? d1 >> drop_bit : d1);
472 rkisp_write(dev, ISP32_EXPD_Y00_01, val, false);
473
474 for (i = 1; i < num - 1; i += 2) {
475 d0 = cfg->compr.data_compr[i];
476 d1 = cfg->compr.data_compr[i + 1];
477 val = ISP32_EXPD_DATA(d0 > 0xfff ? 0xfff : d0,
478 d1 > 0xfff ? 0xfff : d1);
479 rkisp_write(dev, ISP32_EXPD_X00_01 + (i + 1) * 2, val, false);
480
481 d0 = cfg->compr.data_src_shitf[i];
482 d1 = cfg->compr.data_src_shitf[i + 1];
483 if (drop_bit) {
484 d0 = d0 >> drop_bit;
485 d1 = d1 >> drop_bit;
486 }
487 val = ISP32_EXPD_DATA(d0, d1);
488 rkisp_write(dev, ISP32_EXPD_Y00_01 + (i + 1) * 2, val, false);
489 }
490
491 /* the last valid point */
492 val = cfg->compr.data_compr[i];
493 val = val > 0xfff ? 0xfff : val;
494 d0 = ISP32_EXPD_DATA(val, val);
495
496 val = cfg->compr.data_src_shitf[i];
497 val = drop_bit ? val >> drop_bit : val;
498 d1 = ISP32_EXPD_DATA(val, val);
499
500 num = HDR_COMPR_SEGMENT_16;
501 for (; i < num - 1; i += 2) {
502 rkisp_write(dev, ISP32_EXPD_X00_01 + (i + 1) * 2, d0, false);
503 rkisp_write(dev, ISP32_EXPD_Y00_01 + (i + 1) * 2, d1, false);
504 }
505 rkisp_write(dev, ISP32_EXPD_Y16, val, false);
506
507 switch (cfg->compr.segment) {
508 case HDR_COMPR_SEGMENT_12:
509 num = 1;
510 break;
511 case HDR_COMPR_SEGMENT_16:
512 num = 2;
513 break;
514 default:
515 num = 0;
516 }
517 val = ISP32_EXPD_EN |
518 ISP32_EXPD_MODE(num) |
519 ISP32_EXPD_K_SHIFT(cfg->compr.k_shift);
520 rkisp_write(dev, ISP32_EXPD_CTRL, val, false);
521 return 0;
522 err:
523 return -EINVAL;
524 }
525
rkisp_csi_get_hdr_cfg(struct rkisp_device * dev,void * arg)526 int rkisp_csi_get_hdr_cfg(struct rkisp_device *dev, void *arg)
527 {
528 struct rkmodule_hdr_cfg *cfg = arg;
529 struct v4l2_subdev *sd = NULL;
530 u32 type;
531
532 if (dev->isp_inp & INP_CSI) {
533 type = MEDIA_ENT_F_CAM_SENSOR;
534 } else if (dev->isp_inp & INP_CIF) {
535 type = MEDIA_ENT_F_PROC_VIDEO_COMPOSER;
536 } else {
537 switch (dev->isp_inp & 0x7) {
538 case INP_RAWRD2 | INP_RAWRD0:
539 cfg->hdr_mode = HDR_RDBK_FRAME2;
540 break;
541 case INP_RAWRD2 | INP_RAWRD1 | INP_RAWRD0:
542 cfg->hdr_mode = HDR_RDBK_FRAME3;
543 break;
544 default: //INP_RAWRD2
545 cfg->hdr_mode = HDR_RDBK_FRAME1;
546 }
547 return 0;
548 }
549 get_remote_mipi_sensor(dev, &sd, type);
550 if (!sd) {
551 v4l2_err(&dev->v4l2_dev, "%s don't find subdev\n", __func__);
552 return -EINVAL;
553 }
554
555 return v4l2_subdev_call(sd, core, ioctl, RKMODULE_GET_HDR_CFG, cfg);
556 }
557
rkisp_csi_config_patch(struct rkisp_device * dev)558 int rkisp_csi_config_patch(struct rkisp_device *dev)
559 {
560 int val = 0, ret = 0;
561 struct v4l2_subdev *mipi_sensor;
562 bool is_feature_on = dev->hw_dev->is_feature_on;
563 u64 iq_feature = dev->hw_dev->iq_feature;
564
565 if (dev->isp_inp & INP_CSI) {
566 dev->hw_dev->mipi_dev_id = dev->dev_id;
567 ret = csi_config(&dev->csi_dev);
568 } else {
569 struct rkmodule_hdr_cfg hdr_cfg;
570
571 memset(&hdr_cfg, 0, sizeof(hdr_cfg));
572 ret = rkisp_csi_get_hdr_cfg(dev, &hdr_cfg);
573 if (dev->isp_inp & INP_CIF) {
574 struct rkisp_vicap_mode mode;
575 int buf_cnt;
576
577 memset(&mode, 0, sizeof(mode));
578 mode.name = dev->name;
579
580 get_remote_mipi_sensor(dev, &mipi_sensor, MEDIA_ENT_F_PROC_VIDEO_COMPOSER);
581 if (!mipi_sensor)
582 return -EINVAL;
583 dev->hdr.op_mode = HDR_NORMAL;
584 dev->hdr.esp_mode = HDR_NORMAL_VC;
585 if (!ret) {
586 dev->hdr.op_mode = hdr_cfg.hdr_mode;
587 dev->hdr.esp_mode = hdr_cfg.esp.mode;
588 rkisp_expander_config(dev, &hdr_cfg, true);
589 }
590
591 /* normal read back mode default */
592 if (dev->hdr.op_mode == HDR_NORMAL || dev->hdr.op_mode == HDR_COMPR)
593 dev->hdr.op_mode = HDR_RDBK_FRAME1;
594
595 if (dev->isp_inp == INP_CIF && dev->isp_ver > ISP_V21)
596 mode.rdbk_mode = dev->is_rdbk_auto ? RKISP_VICAP_RDBK_AUTO : RKISP_VICAP_ONLINE;
597 else
598 mode.rdbk_mode = RKISP_VICAP_RDBK_AIQ;
599 v4l2_subdev_call(mipi_sensor, core, ioctl, RKISP_VICAP_CMD_MODE, &mode);
600 dev->vicap_in = mode.input;
601 /* vicap direct to isp */
602 if (dev->isp_ver >= ISP_V30 && !mode.rdbk_mode) {
603 switch (dev->hdr.op_mode) {
604 case HDR_RDBK_FRAME3:
605 dev->hdr.op_mode = HDR_LINEX3_DDR;
606 break;
607 case HDR_RDBK_FRAME2:
608 dev->hdr.op_mode = HDR_LINEX2_DDR;
609 break;
610 default:
611 dev->hdr.op_mode = HDR_NORMAL;
612 }
613 if (dev->hdr.op_mode != HDR_NORMAL) {
614 buf_cnt = 1;
615 v4l2_subdev_call(mipi_sensor, core, ioctl,
616 RKISP_VICAP_CMD_INIT_BUF, &buf_cnt);
617 }
618 } else if (mode.rdbk_mode == RKISP_VICAP_RDBK_AUTO) {
619 buf_cnt = RKISP_VICAP_BUF_CNT;
620 v4l2_subdev_call(mipi_sensor, core, ioctl,
621 RKISP_VICAP_CMD_INIT_BUF, &buf_cnt);
622 }
623 } else {
624 dev->hdr.op_mode = hdr_cfg.hdr_mode;
625 }
626
627 if (!dev->hw_dev->is_mi_update)
628 rkisp_unite_write(dev, CSI2RX_CTRL0,
629 SW_IBUF_OP_MODE(dev->hdr.op_mode),
630 true, dev->hw_dev->is_unite);
631
632 /* hdr merge */
633 switch (dev->hdr.op_mode) {
634 case HDR_RDBK_FRAME2:
635 case HDR_FRAMEX2_DDR:
636 case HDR_LINEX2_DDR:
637 case HDR_LINEX2_NO_DDR:
638 val = SW_HDRMGE_EN | SW_HDRMGE_MODE_FRAMEX2;
639 break;
640 case HDR_RDBK_FRAME3:
641 case HDR_FRAMEX3_DDR:
642 case HDR_LINEX3_DDR:
643 val = SW_HDRMGE_EN | SW_HDRMGE_MODE_FRAMEX3;
644 break;
645 default:
646 val = 0;
647 }
648 if (is_feature_on) {
649 if ((ISP2X_MODULE_HDRMGE & ~iq_feature) && (val & SW_HDRMGE_EN)) {
650 v4l2_err(&dev->v4l2_dev, "hdrmge is not supported\n");
651 return -EINVAL;
652 }
653 }
654 rkisp_unite_write(dev, ISP_HDRMGE_BASE, val, false, dev->hw_dev->is_unite);
655
656 val = RAW_RD_SIZE_ERR;
657 if (!IS_HDR_RDBK(dev->hdr.op_mode))
658 val |= ISP21_MIPI_DROP_FRM;
659 rkisp_unite_set_bits(dev, CSI2RX_MASK_STAT, 0, val, true, dev->hw_dev->is_unite);
660 }
661
662 if (IS_HDR_RDBK(dev->hdr.op_mode))
663 rkisp_unite_set_bits(dev, CTRL_SWS_CFG, 0, SW_MPIP_DROP_FRM_DIS,
664 true, dev->hw_dev->is_unite);
665
666 if (dev->isp_ver >= ISP_V30)
667 rkisp_unite_set_bits(dev, CTRL_SWS_CFG, 0, ISP3X_SW_ACK_FRM_PRO_DIS,
668 true, dev->hw_dev->is_unite);
669 /* line counter from isp out, default from mp out */
670 if (dev->isp_ver == ISP_V32_L)
671 rkisp_unite_set_bits(dev, CTRL_SWS_CFG, 0, ISP32L_ISP2ENC_CNT_MUX,
672 true, dev->hw_dev->is_unite);
673 dev->rdbk_cnt = -1;
674 dev->rdbk_cnt_x1 = -1;
675 dev->rdbk_cnt_x2 = -1;
676 dev->rdbk_cnt_x3 = -1;
677 dev->rd_mode = dev->hdr.op_mode;
678
679 return ret;
680 }
681
rkisp_csi_sof(struct rkisp_device * dev,u8 id)682 void rkisp_csi_sof(struct rkisp_device *dev, u8 id)
683 {
684 /* to get long frame vc_start */
685 switch (dev->hdr.op_mode) {
686 case HDR_RDBK_FRAME1:
687 if (id != HDR_DMA2)
688 return;
689 break;
690 case HDR_RDBK_FRAME2:
691 case HDR_FRAMEX2_DDR:
692 case HDR_LINEX2_DDR:
693 if (id != HDR_DMA0)
694 return;
695 break;
696 case HDR_RDBK_FRAME3:
697 case HDR_FRAMEX3_DDR:
698 case HDR_LINEX3_DDR:
699 if (id != HDR_DMA1)
700 return;
701 break;
702 default:
703 return;
704 }
705
706 rkisp_isp_queue_event_sof(&dev->isp_sdev);
707 }
708
rkisp_register_csi_subdev(struct rkisp_device * dev,struct v4l2_device * v4l2_dev)709 int rkisp_register_csi_subdev(struct rkisp_device *dev,
710 struct v4l2_device *v4l2_dev)
711 {
712 struct rkisp_csi_device *csi_dev = &dev->csi_dev;
713 struct v4l2_subdev *sd;
714 int ret;
715
716 memset(csi_dev, 0, sizeof(*csi_dev));
717 csi_dev->ispdev = dev;
718 sd = &csi_dev->sd;
719
720 v4l2_subdev_init(sd, &rkisp_csi_ops);
721 sd->flags |= V4L2_SUBDEV_FL_HAS_DEVNODE;
722 sd->entity.ops = &rkisp_csi_media_ops;
723 sd->entity.function = MEDIA_ENT_F_V4L2_SUBDEV_UNKNOWN;
724 snprintf(sd->name, sizeof(sd->name), CSI_DEV_NAME);
725
726 csi_dev->pads[CSI_SINK].flags =
727 MEDIA_PAD_FL_SINK | MEDIA_PAD_FL_MUST_CONNECT;
728 csi_dev->pads[CSI_SRC_CH0].flags =
729 MEDIA_PAD_FL_SOURCE | MEDIA_PAD_FL_MUST_CONNECT;
730
731 csi_dev->max_pad = CSI_SRC_CH0 + 1;
732 if (dev->isp_ver == ISP_V20 || dev->isp_ver == ISP_V21) {
733 csi_dev->max_pad = CSI_PAD_MAX;
734 csi_dev->pads[CSI_SRC_CH1].flags = MEDIA_PAD_FL_SOURCE;
735 csi_dev->pads[CSI_SRC_CH2].flags = MEDIA_PAD_FL_SOURCE;
736 csi_dev->pads[CSI_SRC_CH3].flags = MEDIA_PAD_FL_SOURCE;
737 csi_dev->pads[CSI_SRC_CH4].flags = MEDIA_PAD_FL_SOURCE;
738 } else if (dev->isp_ver >= ISP_V30) {
739 return 0;
740 }
741
742 ret = media_entity_pads_init(&sd->entity, csi_dev->max_pad,
743 csi_dev->pads);
744 if (ret < 0)
745 return ret;
746
747 sd->owner = THIS_MODULE;
748 v4l2_set_subdevdata(sd, csi_dev);
749 sd->grp_id = GRP_ID_CSI;
750 ret = v4l2_device_register_subdev(v4l2_dev, sd);
751 if (ret < 0) {
752 v4l2_err(v4l2_dev, "Failed to register csi subdev\n");
753 goto free_media;
754 }
755
756 return 0;
757 free_media:
758 media_entity_cleanup(&sd->entity);
759 return ret;
760 }
761
rkisp_unregister_csi_subdev(struct rkisp_device * dev)762 void rkisp_unregister_csi_subdev(struct rkisp_device *dev)
763 {
764 struct v4l2_subdev *sd = &dev->csi_dev.sd;
765
766 v4l2_device_unregister_subdev(sd);
767 media_entity_cleanup(&sd->entity);
768 }
769