• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd. */
3 
4 #include <linux/delay.h>
5 #include <linux/iopoll.h>
6 #include <linux/pm_runtime.h>
7 #include <linux/rk-camera-module.h>
8 #include <media/v4l2-common.h>
9 #include <media/v4l2-event.h>
10 #include <media/v4l2-fh.h>
11 #include <media/v4l2-ioctl.h>
12 #include <media/v4l2-subdev.h>
13 #include <media/videobuf2-dma-contig.h>
14 #include "dev.h"
15 #include "isp_external.h"
16 #include "regs.h"
17 
get_remote_mipi_sensor(struct rkisp_device * dev,struct v4l2_subdev ** sensor_sd,u32 function)18 static void get_remote_mipi_sensor(struct rkisp_device *dev,
19 				  struct v4l2_subdev **sensor_sd, u32 function)
20 {
21 	struct media_graph graph;
22 	struct media_entity *entity = &dev->isp_sdev.sd.entity;
23 	struct media_device *mdev = entity->graph_obj.mdev;
24 	int ret;
25 
26 	/* Walk the graph to locate sensor nodes. */
27 	mutex_lock(&mdev->graph_mutex);
28 	ret = media_graph_walk_init(&graph, mdev);
29 	if (ret) {
30 		mutex_unlock(&mdev->graph_mutex);
31 		*sensor_sd = NULL;
32 		return;
33 	}
34 
35 	media_graph_walk_start(&graph, entity);
36 	while ((entity = media_graph_walk_next(&graph))) {
37 		if (entity->function == function)
38 			break;
39 	}
40 	mutex_unlock(&mdev->graph_mutex);
41 	media_graph_walk_cleanup(&graph);
42 
43 	if (entity)
44 		*sensor_sd = media_entity_to_v4l2_subdev(entity);
45 	else
46 		*sensor_sd = NULL;
47 }
48 
get_remote_subdev(struct v4l2_subdev * sd)49 static struct v4l2_subdev *get_remote_subdev(struct v4l2_subdev *sd)
50 {
51 	struct media_pad *local, *remote;
52 	struct v4l2_subdev *remote_sd = NULL;
53 
54 	local = &sd->entity.pads[CSI_SINK];
55 	if (!local)
56 		goto end;
57 	remote = media_entity_remote_pad(local);
58 	if (!remote)
59 		goto end;
60 
61 	remote_sd = media_entity_to_v4l2_subdev(remote->entity);
62 end:
63 	return remote_sd;
64 }
65 
rkisp_csi_link_setup(struct media_entity * entity,const struct media_pad * local,const struct media_pad * remote,u32 flags)66 static int rkisp_csi_link_setup(struct media_entity *entity,
67 				 const struct media_pad *local,
68 				 const struct media_pad *remote,
69 				 u32 flags)
70 {
71 	struct v4l2_subdev *sd = media_entity_to_v4l2_subdev(entity);
72 	struct rkisp_csi_device *csi;
73 	struct rkisp_stream *stream = NULL;
74 	int ret = 0;
75 	u8 id;
76 
77 	if (!sd)
78 		return -ENODEV;
79 
80 	csi = v4l2_get_subdevdata(sd);
81 	if (local->flags & MEDIA_PAD_FL_SOURCE) {
82 		id = local->index - 1;
83 		if (id && id < RKISP_STREAM_DMATX3)
84 			stream = &csi->ispdev->cap_dev.stream[id + 1];
85 		if (flags & MEDIA_LNK_FL_ENABLED) {
86 			if (csi->sink[id].linked) {
87 				ret = -EBUSY;
88 				goto out;
89 			}
90 			csi->sink[id].linked = true;
91 			csi->sink[id].index = 1 << id;
92 		} else {
93 			csi->sink[id].linked = false;
94 			csi->sink[id].index = 0;
95 		}
96 		if (stream)
97 			stream->linked = csi->sink[id].linked;
98 	}
99 
100 	return 0;
101 out:
102 	v4l2_err(sd, "pad%d is already linked\n", local->index);
103 	return ret;
104 }
105 
rkisp_csi_g_mbus_config(struct v4l2_subdev * sd,unsigned int pad_id,struct v4l2_mbus_config * config)106 static int rkisp_csi_g_mbus_config(struct v4l2_subdev *sd,
107 				   unsigned int pad_id,
108 				   struct v4l2_mbus_config *config)
109 {
110 	struct v4l2_subdev *remote_sd;
111 
112 	if (!sd)
113 		return -ENODEV;
114 	remote_sd = get_remote_subdev(sd);
115 	return v4l2_subdev_call(remote_sd, pad, get_mbus_config, pad_id, config);
116 }
117 
rkisp_csi_get_set_fmt(struct v4l2_subdev * sd,struct v4l2_subdev_pad_config * cfg,struct v4l2_subdev_format * fmt)118 static int rkisp_csi_get_set_fmt(struct v4l2_subdev *sd,
119 				  struct v4l2_subdev_pad_config *cfg,
120 				  struct v4l2_subdev_format *fmt)
121 {
122 	struct v4l2_subdev *remote_sd;
123 
124 	if (fmt->pad != CSI_SINK)
125 		fmt->pad -= 1;
126 
127 	if (!sd)
128 		return -ENODEV;
129 	remote_sd = get_remote_subdev(sd);
130 	return v4l2_subdev_call(remote_sd, pad, get_fmt, NULL, fmt);
131 }
132 
rkisp_csi_s_stream(struct v4l2_subdev * sd,int on)133 static int rkisp_csi_s_stream(struct v4l2_subdev *sd, int on)
134 {
135 	struct rkisp_csi_device *csi = v4l2_get_subdevdata(sd);
136 	struct rkisp_device *dev = csi->ispdev;
137 
138 	csi->err_cnt = 0;
139 	csi->irq_cnt = 0;
140 	memset(csi->tx_first, 0, sizeof(csi->tx_first));
141 
142 	if (!IS_HDR_RDBK(dev->hdr.op_mode))
143 		return 0;
144 	if (on)
145 		rkisp_write(dev, CSI2RX_Y_STAT_CTRL, SW_Y_STAT_EN, true);
146 	else
147 		rkisp_write(dev, CSI2RX_Y_STAT_CTRL, 0, true);
148 	return 0;
149 }
150 
rkisp_csi_s_power(struct v4l2_subdev * sd,int on)151 static int rkisp_csi_s_power(struct v4l2_subdev *sd, int on)
152 {
153 	return 0;
154 }
155 
156 static const struct media_entity_operations rkisp_csi_media_ops = {
157 	.link_setup = rkisp_csi_link_setup,
158 	.link_validate = v4l2_subdev_link_validate,
159 };
160 
161 static const struct v4l2_subdev_pad_ops rkisp_csi_pad_ops = {
162 	.set_fmt = rkisp_csi_get_set_fmt,
163 	.get_fmt = rkisp_csi_get_set_fmt,
164 	.get_mbus_config = rkisp_csi_g_mbus_config,
165 };
166 
167 static const struct v4l2_subdev_video_ops rkisp_csi_video_ops = {
168 	.s_stream = rkisp_csi_s_stream,
169 };
170 
171 static const struct v4l2_subdev_core_ops rkisp_csi_core_ops = {
172 	.s_power = rkisp_csi_s_power,
173 };
174 
175 static struct v4l2_subdev_ops rkisp_csi_ops = {
176 	.core = &rkisp_csi_core_ops,
177 	.video = &rkisp_csi_video_ops,
178 	.pad = &rkisp_csi_pad_ops,
179 };
180 
csi_config(struct rkisp_csi_device * csi)181 static int csi_config(struct rkisp_csi_device *csi)
182 {
183 	struct rkisp_device *dev = csi->ispdev;
184 	struct rkisp_sensor_info *sensor = dev->active_sensor;
185 	struct v4l2_subdev *mipi_sensor;
186 	struct v4l2_ctrl *ctrl;
187 	u32 emd_vc, emd_dt, mipi_ctrl;
188 	int lanes, ret, i;
189 
190 	/*
191 	 * sensor->mbus is set in isp or d-phy notifier_bound function
192 	 */
193 	switch (sensor->mbus.flags & V4L2_MBUS_CSI2_LANES) {
194 	case V4L2_MBUS_CSI2_4_LANE:
195 		lanes = 4;
196 		break;
197 	case V4L2_MBUS_CSI2_3_LANE:
198 		lanes = 3;
199 		break;
200 	case V4L2_MBUS_CSI2_2_LANE:
201 		lanes = 2;
202 		break;
203 	case V4L2_MBUS_CSI2_1_LANE:
204 		lanes = 1;
205 		break;
206 	default:
207 		return -EINVAL;
208 	}
209 
210 	emd_vc = 0xFF;
211 	emd_dt = 0;
212 	dev->hdr.sensor = NULL;
213 	get_remote_mipi_sensor(dev, &mipi_sensor, MEDIA_ENT_F_CAM_SENSOR);
214 	if (mipi_sensor) {
215 		ctrl = v4l2_ctrl_find(mipi_sensor->ctrl_handler,
216 				      CIFISP_CID_EMB_VC);
217 		if (ctrl)
218 			emd_vc = v4l2_ctrl_g_ctrl(ctrl);
219 
220 		ctrl = v4l2_ctrl_find(mipi_sensor->ctrl_handler,
221 				      CIFISP_CID_EMB_DT);
222 		if (ctrl)
223 			emd_dt = v4l2_ctrl_g_ctrl(ctrl);
224 		dev->hdr.sensor = mipi_sensor;
225 	}
226 
227 	dev->emd_dt = emd_dt;
228 	dev->emd_vc = emd_vc;
229 	dev->emd_data_idx = 0;
230 	if (emd_vc <= CIF_ISP_ADD_DATA_VC_MAX) {
231 		for (i = 0; i < RKISP_EMDDATA_FIFO_MAX; i++) {
232 			ret = kfifo_alloc(&dev->emd_data_fifo[i].mipi_kfifo,
233 					  CIFISP_ADD_DATA_FIFO_SIZE,
234 					  GFP_ATOMIC);
235 			if (ret) {
236 				v4l2_err(&dev->v4l2_dev,
237 					 "kfifo_alloc failed with error %d\n",
238 					 ret);
239 				return ret;
240 			}
241 		}
242 	}
243 
244 	if (dev->isp_ver == ISP_V13 ||
245 	    dev->isp_ver == ISP_V12) {
246 		/* lanes */
247 		rkisp_write(dev, CIF_ISP_CSI0_CTRL1, lanes - 1, true);
248 
249 		/* linecnt */
250 		rkisp_write(dev, CIF_ISP_CSI0_CTRL2, 0x3FFF, true);
251 
252 		/* Configure Data Type and Virtual Channel */
253 		rkisp_write(dev, CIF_ISP_CSI0_DATA_IDS_1,
254 			    csi->mipi_di[0] | csi->mipi_di[1] << 8, true);
255 
256 		/* clear interrupts state */
257 		rkisp_read(dev, CIF_ISP_CSI0_ERR1, true);
258 		rkisp_read(dev, CIF_ISP_CSI0_ERR2, true);
259 		rkisp_read(dev, CIF_ISP_CSI0_ERR3, true);
260 		/* set interrupts mask */
261 		rkisp_write(dev, CIF_ISP_CSI0_MASK1, 0x1FFFFFF0, true);
262 		rkisp_write(dev, CIF_ISP_CSI0_MASK2, 0x03FFFFFF, true);
263 		rkisp_write(dev, CIF_ISP_CSI0_MASK3,
264 			    CIF_ISP_CSI0_IMASK_FRAME_END(0x3F) |
265 			    CIF_ISP_CSI0_IMASK_RAW0_OUT_V_END |
266 			    CIF_ISP_CSI0_IMASK_RAW1_OUT_V_END |
267 			    CIF_ISP_CSI0_IMASK_LINECNT, true);
268 
269 		v4l2_dbg(1, rkisp_debug, &dev->v4l2_dev,
270 			 "CSI0_CTRL1 0x%08x\n"
271 			 "CSI0_IDS 0x%08x\n"
272 			 "CSI0_MASK3 0x%08x\n",
273 			 rkisp_read(dev, CIF_ISP_CSI0_CTRL1, true),
274 			 rkisp_read(dev, CIF_ISP_CSI0_DATA_IDS_1, true),
275 			 rkisp_read(dev, CIF_ISP_CSI0_MASK3, true));
276 	} else if (dev->isp_ver == ISP_V20 || dev->isp_ver == ISP_V21) {
277 		bool is_feature_on = dev->hw_dev->is_feature_on;
278 		u64 iq_feature = dev->hw_dev->iq_feature;
279 		struct rkmodule_hdr_cfg hdr_cfg;
280 		u32 val, mask;
281 
282 		dev->hdr.op_mode = HDR_NORMAL;
283 		dev->hdr.esp_mode = HDR_NORMAL_VC;
284 		if (mipi_sensor) {
285 			ret = v4l2_subdev_call(mipi_sensor,
286 					       core, ioctl,
287 					       RKMODULE_GET_HDR_CFG,
288 					       &hdr_cfg);
289 			if (!ret) {
290 				dev->hdr.op_mode = hdr_cfg.hdr_mode;
291 				dev->hdr.esp_mode = hdr_cfg.esp.mode;
292 			}
293 		}
294 
295 		/* normal read back mode */
296 		if (dev->hdr.op_mode == HDR_NORMAL &&
297 		    (dev->isp_inp & INP_RAWRD2 || !dev->hw_dev->is_single))
298 			dev->hdr.op_mode = HDR_RDBK_FRAME1;
299 		/* HDR on the fly for isp21 */
300 		if (dev->isp_ver == ISP_V21 && !(dev->isp_inp & INP_RAWRD2))
301 			if (dev->hdr.op_mode == HDR_RDBK_FRAME2)
302 				dev->hdr.op_mode = HDR_LINEX2_DDR;
303 
304 		/* op_mode update by mi_cfg_upd */
305 		if (!dev->hw_dev->is_mi_update)
306 			rkisp_write(dev, CSI2RX_CTRL0,
307 				    SW_IBUF_OP_MODE(dev->hdr.op_mode) |
308 				    SW_HDR_ESP_MODE(dev->hdr.esp_mode), true);
309 		rkisp_write(dev, CSI2RX_CTRL1, lanes - 1, true);
310 		rkisp_write(dev, CSI2RX_CTRL2, 0x3FFF, true);
311 		val = SW_CSI_ID1(csi->mipi_di[1]) |
312 		      SW_CSI_ID2(csi->mipi_di[2]) |
313 		      SW_CSI_ID3(csi->mipi_di[3]);
314 		mask = SW_CSI_ID1(0xff) | SW_CSI_ID2(0xff) | SW_CSI_ID3(0xff);
315 		/* CSI_ID0 is for dmarx when read back mode */
316 		if (dev->hw_dev->is_single) {
317 			val |= SW_CSI_ID0(csi->mipi_di[0]);
318 			rkisp_write(dev, CSI2RX_DATA_IDS_1, val, true);
319 		} else {
320 			rkisp_set_bits(dev, CSI2RX_DATA_IDS_1, mask, val, true);
321 			for (i = 0; i < dev->hw_dev->dev_num; i++)
322 				rkisp_set_bits(dev->hw_dev->isp[i],
323 					CSI2RX_DATA_IDS_1, mask, val, false);
324 		}
325 		val = SW_CSI_ID4(csi->mipi_di[4]);
326 		rkisp_write(dev, CSI2RX_DATA_IDS_2, val, true);
327 		/* clear interrupts state */
328 		rkisp_read(dev, CSI2RX_ERR_PHY, true);
329 		/* set interrupts mask */
330 		val = PHY_ERR_SOTHS | PHY_ERR_SOTSYNCHS |
331 			PHY_ERR_EOTSYNCHS | PHY_ERR_ESC | PHY_ERR_CTL;
332 		rkisp_write(dev, CSI2RX_MASK_PHY, val, true);
333 		val = PACKET_ERR_F_BNDRY_MATCG | PACKET_ERR_F_SEQ |
334 			PACKET_ERR_FRAME_DATA | PACKET_ERR_ECC_1BIT |
335 			PACKET_ERR_ECC_2BIT | PACKET_ERR_CHECKSUM;
336 		rkisp_write(dev, CSI2RX_MASK_PACKET, val, true);
337 		val = AFIFO0_OVERFLOW | AFIFO1X_OVERFLOW |
338 			LAFIFO1X_OVERFLOW | AFIFO2X_OVERFLOW |
339 			IBUFX3_OVERFLOW | IBUF3R_OVERFLOW |
340 			Y_STAT_AFIFOX3_OVERFLOW;
341 		rkisp_write(dev, CSI2RX_MASK_OVERFLOW, val, true);
342 		val = RAW0_WR_FRAME | RAW1_WR_FRAME | RAW2_WR_FRAME |
343 			MIPI_DROP_FRM | RAW_WR_SIZE_ERR | MIPI_LINECNT |
344 			RAW_RD_SIZE_ERR | RAW0_Y_STATE |
345 			RAW1_Y_STATE | RAW2_Y_STATE;
346 		rkisp_write(dev, CSI2RX_MASK_STAT, val, true);
347 
348 		/* hdr merge */
349 		switch (dev->hdr.op_mode) {
350 		case HDR_RDBK_FRAME2:
351 		case HDR_FRAMEX2_DDR:
352 		case HDR_LINEX2_DDR:
353 		case HDR_LINEX2_NO_DDR:
354 			val = SW_HDRMGE_EN |
355 			      SW_HDRMGE_MODE_FRAMEX2;
356 			break;
357 		case HDR_RDBK_FRAME3:
358 		case HDR_FRAMEX3_DDR:
359 		case HDR_LINEX3_DDR:
360 			val = SW_HDRMGE_EN |
361 			      SW_HDRMGE_MODE_FRAMEX3;
362 			break;
363 		default:
364 			val = 0;
365 		}
366 		if (is_feature_on) {
367 			if ((ISP2X_MODULE_HDRMGE & ~iq_feature) && (val & SW_HDRMGE_EN)) {
368 				v4l2_err(&dev->v4l2_dev, "hdrmge is not supported\n");
369 				return -EINVAL;
370 			}
371 		}
372 		rkisp_write(dev, ISP_HDRMGE_BASE, val, false);
373 
374 		v4l2_dbg(1, rkisp_debug, &dev->v4l2_dev,
375 			 "CSI2RX_IDS 0x%08x 0x%08x\n",
376 			 rkisp_read(dev, CSI2RX_DATA_IDS_1, true),
377 			 rkisp_read(dev, CSI2RX_DATA_IDS_2, true));
378 	} else {
379 		mipi_ctrl = CIF_MIPI_CTRL_NUM_LANES(lanes - 1) |
380 			    CIF_MIPI_CTRL_SHUTDOWNLANES(0xf) |
381 			    CIF_MIPI_CTRL_ERR_SOT_SYNC_HS_SKIP |
382 			    CIF_MIPI_CTRL_CLOCKLANE_ENA;
383 
384 		rkisp_write(dev, CIF_MIPI_CTRL, mipi_ctrl, true);
385 
386 		/* Configure Data Type and Virtual Channel */
387 		rkisp_write(dev, CIF_MIPI_IMG_DATA_SEL,
388 			    csi->mipi_di[0], true);
389 
390 		rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_1,
391 			    CIF_MIPI_DATA_SEL_DT(emd_dt) |
392 			    CIF_MIPI_DATA_SEL_VC(emd_vc), true);
393 		rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_2,
394 			    CIF_MIPI_DATA_SEL_DT(emd_dt) |
395 			    CIF_MIPI_DATA_SEL_VC(emd_vc), true);
396 		rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_3,
397 			    CIF_MIPI_DATA_SEL_DT(emd_dt) |
398 			    CIF_MIPI_DATA_SEL_VC(emd_vc), true);
399 		rkisp_write(dev, CIF_MIPI_ADD_DATA_SEL_4,
400 			    CIF_MIPI_DATA_SEL_DT(emd_dt) |
401 			    CIF_MIPI_DATA_SEL_VC(emd_vc), true);
402 
403 		/* Clear MIPI interrupts */
404 		rkisp_write(dev, CIF_MIPI_ICR, ~0, true);
405 		/*
406 		 * Disable CIF_MIPI_ERR_DPHY interrupt here temporary for
407 		 * isp bus may be dead when switch isp.
408 		 */
409 		rkisp_write(dev, CIF_MIPI_IMSC,
410 			    CIF_MIPI_FRAME_END | CIF_MIPI_ERR_CSI |
411 			    CIF_MIPI_ERR_DPHY | CIF_MIPI_SYNC_FIFO_OVFLW(0x0F) |
412 			    CIF_MIPI_ADD_DATA_OVFLW, true);
413 
414 		v4l2_dbg(1, rkisp_debug, &dev->v4l2_dev,
415 			 "\n  MIPI_CTRL 0x%08x\n"
416 			 "  MIPI_IMG_DATA_SEL 0x%08x\n"
417 			 "  MIPI_STATUS 0x%08x\n"
418 			 "  MIPI_IMSC 0x%08x\n",
419 			 rkisp_read(dev, CIF_MIPI_CTRL, true),
420 			 rkisp_read(dev, CIF_MIPI_IMG_DATA_SEL, true),
421 			 rkisp_read(dev, CIF_MIPI_STATUS, true),
422 			 rkisp_read(dev, CIF_MIPI_IMSC, true));
423 	}
424 
425 	return 0;
426 }
427 
rkisp_csi_config_patch(struct rkisp_device * dev)428 int rkisp_csi_config_patch(struct rkisp_device *dev)
429 {
430 	int val = 0, ret = 0;
431 	struct v4l2_subdev *mipi_sensor;
432 	bool is_feature_on = dev->hw_dev->is_feature_on;
433 	u64 iq_feature = dev->hw_dev->iq_feature;
434 
435 	if (dev->isp_inp & INP_CSI) {
436 		dev->hw_dev->mipi_dev_id = dev->dev_id;
437 		ret = csi_config(&dev->csi_dev);
438 	} else {
439 		if (dev->isp_inp & INP_CIF) {
440 			struct rkmodule_hdr_cfg hdr_cfg;
441 			struct rkisp_vicap_mode mode = {
442 				.name = dev->name,
443 				.is_rdbk = true,
444 			};
445 
446 			get_remote_mipi_sensor(dev, &mipi_sensor, MEDIA_ENT_F_PROC_VIDEO_COMPOSER);
447 			dev->hdr.op_mode = HDR_NORMAL;
448 			dev->hdr.esp_mode = HDR_NORMAL_VC;
449 			if (mipi_sensor) {
450 				ret = v4l2_subdev_call(mipi_sensor,
451 						       core, ioctl,
452 						       RKMODULE_GET_HDR_CFG,
453 						       &hdr_cfg);
454 				if (!ret) {
455 					dev->hdr.op_mode = hdr_cfg.hdr_mode;
456 					dev->hdr.esp_mode = hdr_cfg.esp.mode;
457 				}
458 			}
459 
460 			/* normal read back mode for V2X */
461 			if (dev->hdr.op_mode == HDR_NORMAL)
462 				dev->hdr.op_mode = HDR_RDBK_FRAME1;
463 
464 			if (dev->isp_inp == INP_CIF && dev->hw_dev->is_single)
465 				mode.is_rdbk = false;
466 			v4l2_subdev_call(mipi_sensor, core, ioctl,
467 					 RKISP_VICAP_CMD_MODE, &mode);
468 			/* vicap direct to isp */
469 			if (dev->isp_ver == ISP_V30 && !mode.is_rdbk) {
470 				switch (dev->hdr.op_mode) {
471 				case HDR_RDBK_FRAME3:
472 					dev->hdr.op_mode = HDR_LINEX3_DDR;
473 					break;
474 				case HDR_RDBK_FRAME2:
475 					dev->hdr.op_mode = HDR_LINEX2_DDR;
476 					break;
477 				default:
478 					dev->hdr.op_mode = HDR_NORMAL;
479 				}
480 				if (dev->hdr.op_mode != HDR_NORMAL && mipi_sensor) {
481 					int cnt = RKISP_VICAP_BUF_CNT;
482 
483 					v4l2_subdev_call(mipi_sensor, core, ioctl,
484 							 RKISP_VICAP_CMD_INIT_BUF, &cnt);
485 				}
486 			}
487 		} else {
488 			switch (dev->isp_inp & 0x7) {
489 			case INP_RAWRD2 | INP_RAWRD0:
490 				dev->hdr.op_mode = HDR_RDBK_FRAME2;
491 				break;
492 			case INP_RAWRD2 | INP_RAWRD1 | INP_RAWRD0:
493 				dev->hdr.op_mode = HDR_RDBK_FRAME3;
494 				break;
495 			default: //INP_RAWRD2
496 				dev->hdr.op_mode = HDR_RDBK_FRAME1;
497 			}
498 		}
499 
500 		if (!dev->hw_dev->is_mi_update)
501 			rkisp_unite_write(dev, CSI2RX_CTRL0,
502 					  SW_IBUF_OP_MODE(dev->hdr.op_mode),
503 					  true, dev->hw_dev->is_unite);
504 
505 		/* hdr merge */
506 		switch (dev->hdr.op_mode) {
507 		case HDR_RDBK_FRAME2:
508 		case HDR_FRAMEX2_DDR:
509 		case HDR_LINEX2_DDR:
510 		case HDR_LINEX2_NO_DDR:
511 			val = SW_HDRMGE_EN | SW_HDRMGE_MODE_FRAMEX2;
512 			break;
513 		case HDR_RDBK_FRAME3:
514 		case HDR_FRAMEX3_DDR:
515 		case HDR_LINEX3_DDR:
516 			val = SW_HDRMGE_EN | SW_HDRMGE_MODE_FRAMEX3;
517 			break;
518 		default:
519 			val = 0;
520 		}
521 		if (is_feature_on) {
522 			if ((ISP2X_MODULE_HDRMGE & ~iq_feature) && (val & SW_HDRMGE_EN)) {
523 				v4l2_err(&dev->v4l2_dev, "hdrmge is not supported\n");
524 				return -EINVAL;
525 			}
526 		}
527 		rkisp_unite_write(dev, ISP_HDRMGE_BASE, val, false, dev->hw_dev->is_unite);
528 
529 		rkisp_unite_set_bits(dev, CSI2RX_MASK_STAT, 0, RAW_RD_SIZE_ERR,
530 				     true, dev->hw_dev->is_unite);
531 	}
532 
533 	if (IS_HDR_RDBK(dev->hdr.op_mode))
534 		rkisp_unite_set_bits(dev, CTRL_SWS_CFG, 0, SW_MPIP_DROP_FRM_DIS,
535 				     true, dev->hw_dev->is_unite);
536 
537 	if (dev->isp_ver == ISP_V30)
538 		rkisp_unite_set_bits(dev, CTRL_SWS_CFG, 0, ISP3X_SW_ACK_FRM_PRO_DIS,
539 				     true, dev->hw_dev->is_unite);
540 
541 	memset(dev->filt_state, 0, sizeof(dev->filt_state));
542 	dev->rdbk_cnt = -1;
543 	dev->rdbk_cnt_x1 = -1;
544 	dev->rdbk_cnt_x2 = -1;
545 	dev->rdbk_cnt_x3 = -1;
546 	dev->rd_mode = dev->hdr.op_mode;
547 
548 	return ret;
549 }
550 
rkisp_csi_sof(struct rkisp_device * dev,u8 id)551 void rkisp_csi_sof(struct rkisp_device *dev, u8 id)
552 {
553 	/* to get long frame vc_start */
554 	switch (dev->hdr.op_mode) {
555 	case HDR_RDBK_FRAME1:
556 		if (id != HDR_DMA2)
557 			return;
558 		break;
559 	case HDR_RDBK_FRAME2:
560 	case HDR_FRAMEX2_DDR:
561 	case HDR_LINEX2_DDR:
562 		if (id != HDR_DMA0)
563 			return;
564 		break;
565 	case HDR_RDBK_FRAME3:
566 	case HDR_FRAMEX3_DDR:
567 	case HDR_LINEX3_DDR:
568 		if (id != HDR_DMA1)
569 			return;
570 		break;
571 	default:
572 		return;
573 	}
574 
575 	rkisp_isp_queue_event_sof(&dev->isp_sdev);
576 }
577 
rkisp_register_csi_subdev(struct rkisp_device * dev,struct v4l2_device * v4l2_dev)578 int rkisp_register_csi_subdev(struct rkisp_device *dev,
579 			       struct v4l2_device *v4l2_dev)
580 {
581 	struct rkisp_csi_device *csi_dev = &dev->csi_dev;
582 	struct v4l2_subdev *sd;
583 	int ret;
584 
585 	memset(csi_dev, 0, sizeof(*csi_dev));
586 	csi_dev->ispdev = dev;
587 	sd = &csi_dev->sd;
588 
589 	v4l2_subdev_init(sd, &rkisp_csi_ops);
590 	sd->flags |= V4L2_SUBDEV_FL_HAS_DEVNODE;
591 	sd->entity.ops = &rkisp_csi_media_ops;
592 	sd->entity.function = MEDIA_ENT_F_V4L2_SUBDEV_UNKNOWN;
593 	snprintf(sd->name, sizeof(sd->name), CSI_DEV_NAME);
594 
595 	csi_dev->pads[CSI_SINK].flags =
596 		MEDIA_PAD_FL_SINK | MEDIA_PAD_FL_MUST_CONNECT;
597 	csi_dev->pads[CSI_SRC_CH0].flags =
598 		MEDIA_PAD_FL_SOURCE | MEDIA_PAD_FL_MUST_CONNECT;
599 
600 	csi_dev->max_pad = CSI_SRC_CH0 + 1;
601 	if (dev->isp_ver == ISP_V20 || dev->isp_ver == ISP_V21) {
602 		csi_dev->max_pad = CSI_PAD_MAX;
603 		csi_dev->pads[CSI_SRC_CH1].flags = MEDIA_PAD_FL_SOURCE;
604 		csi_dev->pads[CSI_SRC_CH2].flags = MEDIA_PAD_FL_SOURCE;
605 		csi_dev->pads[CSI_SRC_CH3].flags = MEDIA_PAD_FL_SOURCE;
606 		csi_dev->pads[CSI_SRC_CH4].flags = MEDIA_PAD_FL_SOURCE;
607 	} else if (dev->isp_ver == ISP_V30) {
608 		return 0;
609 	}
610 
611 	ret = media_entity_pads_init(&sd->entity, csi_dev->max_pad,
612 				     csi_dev->pads);
613 	if (ret < 0)
614 		return ret;
615 
616 	sd->owner = THIS_MODULE;
617 	v4l2_set_subdevdata(sd, csi_dev);
618 	sd->grp_id = GRP_ID_CSI;
619 	ret = v4l2_device_register_subdev(v4l2_dev, sd);
620 	if (ret < 0) {
621 		v4l2_err(v4l2_dev, "Failed to register csi subdev\n");
622 		goto free_media;
623 	}
624 
625 	return 0;
626 free_media:
627 	media_entity_cleanup(&sd->entity);
628 	return ret;
629 }
630 
rkisp_unregister_csi_subdev(struct rkisp_device * dev)631 void rkisp_unregister_csi_subdev(struct rkisp_device *dev)
632 {
633 	struct v4l2_subdev *sd = &dev->csi_dev.sd;
634 
635 	v4l2_device_unregister_subdev(sd);
636 	media_entity_cleanup(&sd->entity);
637 }
638