1 // SPDX-License-Identifier: GPL-2.0
2 // Copyright (C) 2018 Intel Corporation
3
4 #include <linux/module.h>
5 #include <linux/pm_runtime.h>
6
7 #include <media/v4l2-event.h>
8 #include <media/v4l2-ioctl.h>
9
10 #include "ipu3.h"
11 #include "ipu3-dmamap.h"
12
13 /******************** v4l2_subdev_ops ********************/
14
15 #define IPU3_RUNNING_MODE_VIDEO 0
16 #define IPU3_RUNNING_MODE_STILL 1
17
imgu_subdev_open(struct v4l2_subdev * sd,struct v4l2_subdev_fh * fh)18 static int imgu_subdev_open(struct v4l2_subdev *sd, struct v4l2_subdev_fh *fh)
19 {
20 struct imgu_v4l2_subdev *imgu_sd = container_of(sd,
21 struct imgu_v4l2_subdev,
22 subdev);
23 struct imgu_device *imgu = v4l2_get_subdevdata(sd);
24 struct imgu_media_pipe *imgu_pipe = &imgu->imgu_pipe[imgu_sd->pipe];
25 struct v4l2_rect try_crop = {
26 .top = 0,
27 .left = 0,
28 };
29 unsigned int i;
30
31 try_crop.width =
32 imgu_pipe->nodes[IMGU_NODE_IN].vdev_fmt.fmt.pix_mp.width;
33 try_crop.height =
34 imgu_pipe->nodes[IMGU_NODE_IN].vdev_fmt.fmt.pix_mp.height;
35
36 /* Initialize try_fmt */
37 for (i = 0; i < IMGU_NODE_NUM; i++) {
38 struct v4l2_mbus_framefmt *try_fmt =
39 v4l2_subdev_get_try_format(sd, fh->state, i);
40
41 try_fmt->width = try_crop.width;
42 try_fmt->height = try_crop.height;
43 try_fmt->code = imgu_pipe->nodes[i].pad_fmt.code;
44 try_fmt->field = V4L2_FIELD_NONE;
45 }
46
47 *v4l2_subdev_get_try_crop(sd, fh->state, IMGU_NODE_IN) = try_crop;
48 *v4l2_subdev_get_try_compose(sd, fh->state, IMGU_NODE_IN) = try_crop;
49
50 return 0;
51 }
52
imgu_subdev_s_stream(struct v4l2_subdev * sd,int enable)53 static int imgu_subdev_s_stream(struct v4l2_subdev *sd, int enable)
54 {
55 int i;
56 unsigned int node;
57 int r = 0;
58 struct imgu_device *imgu = v4l2_get_subdevdata(sd);
59 struct imgu_v4l2_subdev *imgu_sd = container_of(sd,
60 struct imgu_v4l2_subdev,
61 subdev);
62 unsigned int pipe = imgu_sd->pipe;
63 struct device *dev = &imgu->pci_dev->dev;
64 struct v4l2_pix_format_mplane *fmts[IPU3_CSS_QUEUES] = { NULL };
65 struct v4l2_rect *rects[IPU3_CSS_RECTS] = { NULL };
66 struct imgu_css_pipe *css_pipe = &imgu->css.pipes[pipe];
67 struct imgu_media_pipe *imgu_pipe = &imgu->imgu_pipe[pipe];
68
69 dev_dbg(dev, "%s %d for pipe %u", __func__, enable, pipe);
70 /* grab ctrl after streamon and return after off */
71 v4l2_ctrl_grab(imgu_sd->ctrl, enable);
72
73 if (!enable) {
74 imgu_sd->active = false;
75 return 0;
76 }
77
78 for (i = 0; i < IMGU_NODE_NUM; i++)
79 imgu_pipe->queue_enabled[i] = imgu_pipe->nodes[i].enabled;
80
81 /* This is handled specially */
82 imgu_pipe->queue_enabled[IPU3_CSS_QUEUE_PARAMS] = false;
83
84 /* Initialize CSS formats */
85 for (i = 0; i < IPU3_CSS_QUEUES; i++) {
86 node = imgu_map_node(imgu, i);
87 /* No need to reconfig meta nodes */
88 if (node == IMGU_NODE_STAT_3A || node == IMGU_NODE_PARAMS)
89 continue;
90 fmts[i] = imgu_pipe->queue_enabled[node] ?
91 &imgu_pipe->nodes[node].vdev_fmt.fmt.pix_mp : NULL;
92 }
93
94 /* Enable VF output only when VF queue requested by user */
95 css_pipe->vf_output_en = false;
96 if (imgu_pipe->nodes[IMGU_NODE_VF].enabled)
97 css_pipe->vf_output_en = true;
98
99 if (atomic_read(&imgu_sd->running_mode) == IPU3_RUNNING_MODE_VIDEO)
100 css_pipe->pipe_id = IPU3_CSS_PIPE_ID_VIDEO;
101 else
102 css_pipe->pipe_id = IPU3_CSS_PIPE_ID_CAPTURE;
103
104 dev_dbg(dev, "IPU3 pipe %u pipe_id %u", pipe, css_pipe->pipe_id);
105
106 rects[IPU3_CSS_RECT_EFFECTIVE] = &imgu_sd->rect.eff;
107 rects[IPU3_CSS_RECT_BDS] = &imgu_sd->rect.bds;
108 rects[IPU3_CSS_RECT_GDC] = &imgu_sd->rect.gdc;
109
110 r = imgu_css_fmt_set(&imgu->css, fmts, rects, pipe);
111 if (r) {
112 dev_err(dev, "failed to set initial formats pipe %u with (%d)",
113 pipe, r);
114 return r;
115 }
116
117 imgu_sd->active = true;
118
119 return 0;
120 }
121
imgu_subdev_get_fmt(struct v4l2_subdev * sd,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_format * fmt)122 static int imgu_subdev_get_fmt(struct v4l2_subdev *sd,
123 struct v4l2_subdev_state *sd_state,
124 struct v4l2_subdev_format *fmt)
125 {
126 struct imgu_device *imgu = v4l2_get_subdevdata(sd);
127 struct v4l2_mbus_framefmt *mf;
128 struct imgu_media_pipe *imgu_pipe;
129 u32 pad = fmt->pad;
130 struct imgu_v4l2_subdev *imgu_sd = container_of(sd,
131 struct imgu_v4l2_subdev,
132 subdev);
133 unsigned int pipe = imgu_sd->pipe;
134
135 imgu_pipe = &imgu->imgu_pipe[pipe];
136 if (fmt->which == V4L2_SUBDEV_FORMAT_ACTIVE) {
137 fmt->format = imgu_pipe->nodes[pad].pad_fmt;
138 } else {
139 mf = v4l2_subdev_get_try_format(sd, sd_state, pad);
140 fmt->format = *mf;
141 }
142
143 return 0;
144 }
145
imgu_subdev_set_fmt(struct v4l2_subdev * sd,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_format * fmt)146 static int imgu_subdev_set_fmt(struct v4l2_subdev *sd,
147 struct v4l2_subdev_state *sd_state,
148 struct v4l2_subdev_format *fmt)
149 {
150 struct imgu_media_pipe *imgu_pipe;
151 struct imgu_device *imgu = v4l2_get_subdevdata(sd);
152 struct imgu_v4l2_subdev *imgu_sd = container_of(sd,
153 struct imgu_v4l2_subdev,
154 subdev);
155 struct v4l2_mbus_framefmt *mf;
156 u32 pad = fmt->pad;
157 unsigned int pipe = imgu_sd->pipe;
158
159 dev_dbg(&imgu->pci_dev->dev, "set subdev %u pad %u fmt to [%ux%u]",
160 pipe, pad, fmt->format.width, fmt->format.height);
161
162 imgu_pipe = &imgu->imgu_pipe[pipe];
163 if (fmt->which == V4L2_SUBDEV_FORMAT_TRY)
164 mf = v4l2_subdev_get_try_format(sd, sd_state, pad);
165 else
166 mf = &imgu_pipe->nodes[pad].pad_fmt;
167
168 fmt->format.code = mf->code;
169 /* Clamp the w and h based on the hardware capabilities */
170 if (imgu_sd->subdev_pads[pad].flags & MEDIA_PAD_FL_SOURCE) {
171 fmt->format.width = clamp(fmt->format.width,
172 IPU3_OUTPUT_MIN_WIDTH,
173 IPU3_OUTPUT_MAX_WIDTH);
174 fmt->format.height = clamp(fmt->format.height,
175 IPU3_OUTPUT_MIN_HEIGHT,
176 IPU3_OUTPUT_MAX_HEIGHT);
177 } else {
178 fmt->format.width = clamp(fmt->format.width,
179 IPU3_INPUT_MIN_WIDTH,
180 IPU3_INPUT_MAX_WIDTH);
181 fmt->format.height = clamp(fmt->format.height,
182 IPU3_INPUT_MIN_HEIGHT,
183 IPU3_INPUT_MAX_HEIGHT);
184 }
185
186 *mf = fmt->format;
187
188 return 0;
189 }
190
191 static struct v4l2_rect *
imgu_subdev_get_crop(struct imgu_v4l2_subdev * sd,struct v4l2_subdev_state * sd_state,unsigned int pad,enum v4l2_subdev_format_whence which)192 imgu_subdev_get_crop(struct imgu_v4l2_subdev *sd,
193 struct v4l2_subdev_state *sd_state, unsigned int pad,
194 enum v4l2_subdev_format_whence which)
195 {
196 if (which == V4L2_SUBDEV_FORMAT_TRY)
197 return v4l2_subdev_get_try_crop(&sd->subdev, sd_state, pad);
198 else
199 return &sd->rect.eff;
200 }
201
202 static struct v4l2_rect *
imgu_subdev_get_compose(struct imgu_v4l2_subdev * sd,struct v4l2_subdev_state * sd_state,unsigned int pad,enum v4l2_subdev_format_whence which)203 imgu_subdev_get_compose(struct imgu_v4l2_subdev *sd,
204 struct v4l2_subdev_state *sd_state, unsigned int pad,
205 enum v4l2_subdev_format_whence which)
206 {
207 if (which == V4L2_SUBDEV_FORMAT_TRY)
208 return v4l2_subdev_get_try_compose(&sd->subdev, sd_state, pad);
209 else
210 return &sd->rect.bds;
211 }
212
imgu_subdev_get_selection(struct v4l2_subdev * sd,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_selection * sel)213 static int imgu_subdev_get_selection(struct v4l2_subdev *sd,
214 struct v4l2_subdev_state *sd_state,
215 struct v4l2_subdev_selection *sel)
216 {
217 struct imgu_v4l2_subdev *imgu_sd =
218 container_of(sd, struct imgu_v4l2_subdev, subdev);
219
220 if (sel->pad != IMGU_NODE_IN)
221 return -EINVAL;
222
223 switch (sel->target) {
224 case V4L2_SEL_TGT_CROP:
225 sel->r = *imgu_subdev_get_crop(imgu_sd, sd_state, sel->pad,
226 sel->which);
227 return 0;
228 case V4L2_SEL_TGT_COMPOSE:
229 sel->r = *imgu_subdev_get_compose(imgu_sd, sd_state, sel->pad,
230 sel->which);
231 return 0;
232 default:
233 return -EINVAL;
234 }
235 }
236
imgu_subdev_set_selection(struct v4l2_subdev * sd,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_selection * sel)237 static int imgu_subdev_set_selection(struct v4l2_subdev *sd,
238 struct v4l2_subdev_state *sd_state,
239 struct v4l2_subdev_selection *sel)
240 {
241 struct imgu_device *imgu = v4l2_get_subdevdata(sd);
242 struct imgu_v4l2_subdev *imgu_sd =
243 container_of(sd, struct imgu_v4l2_subdev, subdev);
244 struct v4l2_rect *rect;
245
246 dev_dbg(&imgu->pci_dev->dev,
247 "set subdev %u sel which %u target 0x%4x rect [%ux%u]",
248 imgu_sd->pipe, sel->which, sel->target,
249 sel->r.width, sel->r.height);
250
251 if (sel->pad != IMGU_NODE_IN)
252 return -EINVAL;
253
254 switch (sel->target) {
255 case V4L2_SEL_TGT_CROP:
256 rect = imgu_subdev_get_crop(imgu_sd, sd_state, sel->pad,
257 sel->which);
258 break;
259 case V4L2_SEL_TGT_COMPOSE:
260 rect = imgu_subdev_get_compose(imgu_sd, sd_state, sel->pad,
261 sel->which);
262 break;
263 default:
264 return -EINVAL;
265 }
266
267 *rect = sel->r;
268 return 0;
269 }
270
271 /******************** media_entity_operations ********************/
272
imgu_link_setup(struct media_entity * entity,const struct media_pad * local,const struct media_pad * remote,u32 flags)273 static int imgu_link_setup(struct media_entity *entity,
274 const struct media_pad *local,
275 const struct media_pad *remote, u32 flags)
276 {
277 struct imgu_media_pipe *imgu_pipe;
278 struct v4l2_subdev *sd = container_of(entity, struct v4l2_subdev,
279 entity);
280 struct imgu_device *imgu = v4l2_get_subdevdata(sd);
281 struct imgu_v4l2_subdev *imgu_sd = container_of(sd,
282 struct imgu_v4l2_subdev,
283 subdev);
284 unsigned int pipe = imgu_sd->pipe;
285 u32 pad = local->index;
286
287 WARN_ON(pad >= IMGU_NODE_NUM);
288
289 dev_dbg(&imgu->pci_dev->dev, "pipe %u pad %u is %s", pipe, pad,
290 flags & MEDIA_LNK_FL_ENABLED ? "enabled" : "disabled");
291
292 imgu_pipe = &imgu->imgu_pipe[pipe];
293 imgu_pipe->nodes[pad].enabled = flags & MEDIA_LNK_FL_ENABLED;
294
295 /* enable input node to enable the pipe */
296 if (pad != IMGU_NODE_IN)
297 return 0;
298
299 if (flags & MEDIA_LNK_FL_ENABLED)
300 __set_bit(pipe, imgu->css.enabled_pipes);
301 else
302 __clear_bit(pipe, imgu->css.enabled_pipes);
303
304 dev_dbg(&imgu->pci_dev->dev, "pipe %u is %s", pipe,
305 flags & MEDIA_LNK_FL_ENABLED ? "enabled" : "disabled");
306
307 return 0;
308 }
309
310 /******************** vb2_ops ********************/
311
imgu_vb2_buf_init(struct vb2_buffer * vb)312 static int imgu_vb2_buf_init(struct vb2_buffer *vb)
313 {
314 struct sg_table *sg = vb2_dma_sg_plane_desc(vb, 0);
315 struct imgu_device *imgu = vb2_get_drv_priv(vb->vb2_queue);
316 struct imgu_buffer *buf = container_of(vb,
317 struct imgu_buffer, vid_buf.vbb.vb2_buf);
318 struct imgu_video_device *node =
319 container_of(vb->vb2_queue, struct imgu_video_device, vbq);
320 unsigned int queue = imgu_node_to_queue(node->id);
321
322 if (queue == IPU3_CSS_QUEUE_PARAMS)
323 return 0;
324
325 return imgu_dmamap_map_sg(imgu, sg->sgl, sg->nents, &buf->map);
326 }
327
328 /* Called when each buffer is freed */
imgu_vb2_buf_cleanup(struct vb2_buffer * vb)329 static void imgu_vb2_buf_cleanup(struct vb2_buffer *vb)
330 {
331 struct imgu_device *imgu = vb2_get_drv_priv(vb->vb2_queue);
332 struct imgu_buffer *buf = container_of(vb,
333 struct imgu_buffer, vid_buf.vbb.vb2_buf);
334 struct imgu_video_device *node =
335 container_of(vb->vb2_queue, struct imgu_video_device, vbq);
336 unsigned int queue = imgu_node_to_queue(node->id);
337
338 if (queue == IPU3_CSS_QUEUE_PARAMS)
339 return;
340
341 imgu_dmamap_unmap(imgu, &buf->map);
342 }
343
344 /* Transfer buffer ownership to me */
imgu_vb2_buf_queue(struct vb2_buffer * vb)345 static void imgu_vb2_buf_queue(struct vb2_buffer *vb)
346 {
347 struct imgu_device *imgu = vb2_get_drv_priv(vb->vb2_queue);
348 struct imgu_video_device *node =
349 container_of(vb->vb2_queue, struct imgu_video_device, vbq);
350 unsigned int queue = imgu_node_to_queue(node->id);
351 struct imgu_buffer *buf = container_of(vb, struct imgu_buffer,
352 vid_buf.vbb.vb2_buf);
353 unsigned long need_bytes;
354 unsigned long payload = vb2_get_plane_payload(vb, 0);
355
356 if (vb->vb2_queue->type == V4L2_BUF_TYPE_META_CAPTURE ||
357 vb->vb2_queue->type == V4L2_BUF_TYPE_META_OUTPUT)
358 need_bytes = node->vdev_fmt.fmt.meta.buffersize;
359 else
360 need_bytes = node->vdev_fmt.fmt.pix_mp.plane_fmt[0].sizeimage;
361
362 if (queue == IPU3_CSS_QUEUE_PARAMS && payload && payload < need_bytes) {
363 dev_err(&imgu->pci_dev->dev, "invalid data size for params.");
364 vb2_buffer_done(vb, VB2_BUF_STATE_ERROR);
365 return;
366 }
367
368 mutex_lock(&imgu->lock);
369 if (queue != IPU3_CSS_QUEUE_PARAMS)
370 imgu_css_buf_init(&buf->css_buf, queue, buf->map.daddr);
371
372 list_add_tail(&buf->vid_buf.list, &node->buffers);
373 mutex_unlock(&imgu->lock);
374
375 vb2_set_plane_payload(vb, 0, need_bytes);
376
377 mutex_lock(&imgu->streaming_lock);
378 if (imgu->streaming)
379 imgu_queue_buffers(imgu, false, node->pipe);
380 mutex_unlock(&imgu->streaming_lock);
381
382 dev_dbg(&imgu->pci_dev->dev, "%s for pipe %u node %u", __func__,
383 node->pipe, node->id);
384 }
385
imgu_vb2_queue_setup(struct vb2_queue * vq,unsigned int * num_buffers,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_devs[])386 static int imgu_vb2_queue_setup(struct vb2_queue *vq,
387 unsigned int *num_buffers,
388 unsigned int *num_planes,
389 unsigned int sizes[],
390 struct device *alloc_devs[])
391 {
392 struct imgu_device *imgu = vb2_get_drv_priv(vq);
393 struct imgu_video_device *node =
394 container_of(vq, struct imgu_video_device, vbq);
395 const struct v4l2_format *fmt = &node->vdev_fmt;
396 unsigned int size;
397
398 *num_buffers = clamp_val(*num_buffers, 1, VB2_MAX_FRAME);
399 alloc_devs[0] = &imgu->pci_dev->dev;
400
401 if (vq->type == V4L2_BUF_TYPE_META_CAPTURE ||
402 vq->type == V4L2_BUF_TYPE_META_OUTPUT)
403 size = fmt->fmt.meta.buffersize;
404 else
405 size = fmt->fmt.pix_mp.plane_fmt[0].sizeimage;
406
407 if (*num_planes) {
408 if (sizes[0] < size)
409 return -EINVAL;
410 size = sizes[0];
411 }
412
413 *num_planes = 1;
414 sizes[0] = size;
415
416 /* Initialize buffer queue */
417 INIT_LIST_HEAD(&node->buffers);
418
419 return 0;
420 }
421
422 /* Check if all enabled video nodes are streaming, exception ignored */
imgu_all_nodes_streaming(struct imgu_device * imgu,struct imgu_video_device * except)423 static bool imgu_all_nodes_streaming(struct imgu_device *imgu,
424 struct imgu_video_device *except)
425 {
426 unsigned int i, pipe, p;
427 struct imgu_video_device *node;
428 struct device *dev = &imgu->pci_dev->dev;
429
430 pipe = except->pipe;
431 if (!test_bit(pipe, imgu->css.enabled_pipes)) {
432 dev_warn(&imgu->pci_dev->dev,
433 "pipe %u link is not ready yet", pipe);
434 return false;
435 }
436
437 for_each_set_bit(p, imgu->css.enabled_pipes, IMGU_MAX_PIPE_NUM) {
438 for (i = 0; i < IMGU_NODE_NUM; i++) {
439 node = &imgu->imgu_pipe[p].nodes[i];
440 dev_dbg(dev, "%s pipe %u queue %u name %s enabled = %u",
441 __func__, p, i, node->name, node->enabled);
442 if (node == except)
443 continue;
444 if (node->enabled && !vb2_start_streaming_called(&node->vbq))
445 return false;
446 }
447 }
448
449 return true;
450 }
451
imgu_return_all_buffers(struct imgu_device * imgu,struct imgu_video_device * node,enum vb2_buffer_state state)452 static void imgu_return_all_buffers(struct imgu_device *imgu,
453 struct imgu_video_device *node,
454 enum vb2_buffer_state state)
455 {
456 struct imgu_vb2_buffer *b, *b0;
457
458 /* Return all buffers */
459 mutex_lock(&imgu->lock);
460 list_for_each_entry_safe(b, b0, &node->buffers, list) {
461 list_del(&b->list);
462 vb2_buffer_done(&b->vbb.vb2_buf, state);
463 }
464 mutex_unlock(&imgu->lock);
465 }
466
imgu_vb2_start_streaming(struct vb2_queue * vq,unsigned int count)467 static int imgu_vb2_start_streaming(struct vb2_queue *vq, unsigned int count)
468 {
469 struct imgu_media_pipe *imgu_pipe;
470 struct imgu_device *imgu = vb2_get_drv_priv(vq);
471 struct device *dev = &imgu->pci_dev->dev;
472 struct imgu_video_device *node =
473 container_of(vq, struct imgu_video_device, vbq);
474 int r;
475 unsigned int pipe;
476
477 dev_dbg(dev, "%s node name %s pipe %u id %u", __func__,
478 node->name, node->pipe, node->id);
479
480 mutex_lock(&imgu->streaming_lock);
481 if (imgu->streaming) {
482 r = -EBUSY;
483 mutex_unlock(&imgu->streaming_lock);
484 goto fail_return_bufs;
485 }
486 mutex_unlock(&imgu->streaming_lock);
487
488 if (!node->enabled) {
489 dev_err(dev, "IMGU node is not enabled");
490 r = -EINVAL;
491 goto fail_return_bufs;
492 }
493
494 pipe = node->pipe;
495 imgu_pipe = &imgu->imgu_pipe[pipe];
496 atomic_set(&node->sequence, 0);
497 r = video_device_pipeline_start(&node->vdev, &imgu_pipe->pipeline);
498 if (r < 0)
499 goto fail_return_bufs;
500
501 if (!imgu_all_nodes_streaming(imgu, node))
502 return 0;
503
504 for_each_set_bit(pipe, imgu->css.enabled_pipes, IMGU_MAX_PIPE_NUM) {
505 r = v4l2_subdev_call(&imgu->imgu_pipe[pipe].imgu_sd.subdev,
506 video, s_stream, 1);
507 if (r < 0)
508 goto fail_stop_pipeline;
509 }
510
511 /* Start streaming of the whole pipeline now */
512 dev_dbg(dev, "IMGU streaming is ready to start");
513 mutex_lock(&imgu->streaming_lock);
514 r = imgu_s_stream(imgu, true);
515 if (!r)
516 imgu->streaming = true;
517 mutex_unlock(&imgu->streaming_lock);
518
519 return 0;
520
521 fail_stop_pipeline:
522 video_device_pipeline_stop(&node->vdev);
523 fail_return_bufs:
524 imgu_return_all_buffers(imgu, node, VB2_BUF_STATE_QUEUED);
525
526 return r;
527 }
528
imgu_vb2_stop_streaming(struct vb2_queue * vq)529 static void imgu_vb2_stop_streaming(struct vb2_queue *vq)
530 {
531 struct imgu_media_pipe *imgu_pipe;
532 struct imgu_device *imgu = vb2_get_drv_priv(vq);
533 struct device *dev = &imgu->pci_dev->dev;
534 struct imgu_video_device *node =
535 container_of(vq, struct imgu_video_device, vbq);
536 int r;
537 unsigned int pipe;
538
539 WARN_ON(!node->enabled);
540
541 pipe = node->pipe;
542 dev_dbg(dev, "Try to stream off node [%u][%u]", pipe, node->id);
543 imgu_pipe = &imgu->imgu_pipe[pipe];
544 r = v4l2_subdev_call(&imgu_pipe->imgu_sd.subdev, video, s_stream, 0);
545 if (r)
546 dev_err(&imgu->pci_dev->dev,
547 "failed to stop subdev streaming\n");
548
549 mutex_lock(&imgu->streaming_lock);
550 /* Was this the first node with streaming disabled? */
551 if (imgu->streaming && imgu_all_nodes_streaming(imgu, node)) {
552 /* Yes, really stop streaming now */
553 dev_dbg(dev, "IMGU streaming is ready to stop");
554 r = imgu_s_stream(imgu, false);
555 if (!r)
556 imgu->streaming = false;
557 }
558
559 imgu_return_all_buffers(imgu, node, VB2_BUF_STATE_ERROR);
560 mutex_unlock(&imgu->streaming_lock);
561
562 video_device_pipeline_stop(&node->vdev);
563 }
564
565 /******************** v4l2_ioctl_ops ********************/
566
567 #define VID_CAPTURE 0
568 #define VID_OUTPUT 1
569 #define DEF_VID_CAPTURE 0
570 #define DEF_VID_OUTPUT 1
571
572 struct imgu_fmt {
573 u32 fourcc;
574 u16 type; /* VID_CAPTURE or VID_OUTPUT not both */
575 };
576
577 /* format descriptions for capture and preview */
578 static const struct imgu_fmt formats[] = {
579 { V4L2_PIX_FMT_NV12, VID_CAPTURE },
580 { V4L2_PIX_FMT_IPU3_SGRBG10, VID_OUTPUT },
581 { V4L2_PIX_FMT_IPU3_SBGGR10, VID_OUTPUT },
582 { V4L2_PIX_FMT_IPU3_SGBRG10, VID_OUTPUT },
583 { V4L2_PIX_FMT_IPU3_SRGGB10, VID_OUTPUT },
584 };
585
586 /* Find the first matched format, return default if not found */
find_format(struct v4l2_format * f,u32 type)587 static const struct imgu_fmt *find_format(struct v4l2_format *f, u32 type)
588 {
589 unsigned int i;
590
591 for (i = 0; i < ARRAY_SIZE(formats); i++) {
592 if (formats[i].fourcc == f->fmt.pix_mp.pixelformat &&
593 formats[i].type == type)
594 return &formats[i];
595 }
596
597 return type == VID_CAPTURE ? &formats[DEF_VID_CAPTURE] :
598 &formats[DEF_VID_OUTPUT];
599 }
600
imgu_vidioc_querycap(struct file * file,void * fh,struct v4l2_capability * cap)601 static int imgu_vidioc_querycap(struct file *file, void *fh,
602 struct v4l2_capability *cap)
603 {
604 struct imgu_device *imgu = video_drvdata(file);
605
606 strscpy(cap->driver, IMGU_NAME, sizeof(cap->driver));
607 strscpy(cap->card, IMGU_NAME, sizeof(cap->card));
608 snprintf(cap->bus_info, sizeof(cap->bus_info), "PCI:%s",
609 pci_name(imgu->pci_dev));
610
611 return 0;
612 }
613
enum_fmts(struct v4l2_fmtdesc * f,u32 type)614 static int enum_fmts(struct v4l2_fmtdesc *f, u32 type)
615 {
616 unsigned int i, j;
617
618 if (f->mbus_code != 0 && f->mbus_code != MEDIA_BUS_FMT_FIXED)
619 return -EINVAL;
620
621 for (i = j = 0; i < ARRAY_SIZE(formats); ++i) {
622 if (formats[i].type == type) {
623 if (j == f->index)
624 break;
625 ++j;
626 }
627 }
628
629 if (i < ARRAY_SIZE(formats)) {
630 f->pixelformat = formats[i].fourcc;
631 return 0;
632 }
633
634 return -EINVAL;
635 }
636
vidioc_enum_fmt_vid_cap(struct file * file,void * priv,struct v4l2_fmtdesc * f)637 static int vidioc_enum_fmt_vid_cap(struct file *file, void *priv,
638 struct v4l2_fmtdesc *f)
639 {
640 if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
641 return -EINVAL;
642
643 return enum_fmts(f, VID_CAPTURE);
644 }
645
vidioc_enum_fmt_vid_out(struct file * file,void * priv,struct v4l2_fmtdesc * f)646 static int vidioc_enum_fmt_vid_out(struct file *file, void *priv,
647 struct v4l2_fmtdesc *f)
648 {
649 if (f->type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
650 return -EINVAL;
651
652 return enum_fmts(f, VID_OUTPUT);
653 }
654
655 /* Propagate forward always the format from the CIO2 subdev */
imgu_vidioc_g_fmt(struct file * file,void * fh,struct v4l2_format * f)656 static int imgu_vidioc_g_fmt(struct file *file, void *fh,
657 struct v4l2_format *f)
658 {
659 struct imgu_video_device *node = file_to_intel_imgu_node(file);
660
661 f->fmt = node->vdev_fmt.fmt;
662
663 return 0;
664 }
665
666 /*
667 * Set input/output format. Unless it is just a try, this also resets
668 * selections (ie. effective and BDS resolutions) to defaults.
669 */
imgu_fmt(struct imgu_device * imgu,unsigned int pipe,int node,struct v4l2_format * f,bool try)670 static int imgu_fmt(struct imgu_device *imgu, unsigned int pipe, int node,
671 struct v4l2_format *f, bool try)
672 {
673 struct device *dev = &imgu->pci_dev->dev;
674 struct v4l2_pix_format_mplane *fmts[IPU3_CSS_QUEUES] = { NULL };
675 struct v4l2_rect *rects[IPU3_CSS_RECTS] = { NULL };
676 struct v4l2_mbus_framefmt pad_fmt;
677 unsigned int i, css_q;
678 int ret;
679 struct imgu_css_pipe *css_pipe = &imgu->css.pipes[pipe];
680 struct imgu_media_pipe *imgu_pipe = &imgu->imgu_pipe[pipe];
681 struct imgu_v4l2_subdev *imgu_sd = &imgu_pipe->imgu_sd;
682
683 dev_dbg(dev, "set fmt node [%u][%u](try = %u)", pipe, node, try);
684
685 for (i = 0; i < IMGU_NODE_NUM; i++)
686 dev_dbg(dev, "IMGU pipe %u node %u enabled = %u",
687 pipe, i, imgu_pipe->nodes[i].enabled);
688
689 if (imgu_pipe->nodes[IMGU_NODE_VF].enabled)
690 css_pipe->vf_output_en = true;
691
692 if (atomic_read(&imgu_sd->running_mode) == IPU3_RUNNING_MODE_VIDEO)
693 css_pipe->pipe_id = IPU3_CSS_PIPE_ID_VIDEO;
694 else
695 css_pipe->pipe_id = IPU3_CSS_PIPE_ID_CAPTURE;
696
697 dev_dbg(dev, "IPU3 pipe %u pipe_id = %u", pipe, css_pipe->pipe_id);
698
699 css_q = imgu_node_to_queue(node);
700 for (i = 0; i < IPU3_CSS_QUEUES; i++) {
701 unsigned int inode = imgu_map_node(imgu, i);
702
703 /* Skip the meta node */
704 if (inode == IMGU_NODE_STAT_3A || inode == IMGU_NODE_PARAMS)
705 continue;
706
707 /* CSS expects some format on OUT queue */
708 if (i != IPU3_CSS_QUEUE_OUT &&
709 !imgu_pipe->nodes[inode].enabled && !try) {
710 fmts[i] = NULL;
711 continue;
712 }
713
714 if (i == css_q) {
715 fmts[i] = &f->fmt.pix_mp;
716 continue;
717 }
718
719 if (try) {
720 fmts[i] = kmemdup(&imgu_pipe->nodes[inode].vdev_fmt.fmt.pix_mp,
721 sizeof(struct v4l2_pix_format_mplane),
722 GFP_KERNEL);
723 if (!fmts[i]) {
724 ret = -ENOMEM;
725 goto out;
726 }
727 } else {
728 fmts[i] = &imgu_pipe->nodes[inode].vdev_fmt.fmt.pix_mp;
729 }
730
731 }
732
733 if (!try) {
734 /* eff and bds res got by imgu_s_sel */
735 struct imgu_v4l2_subdev *imgu_sd = &imgu_pipe->imgu_sd;
736
737 rects[IPU3_CSS_RECT_EFFECTIVE] = &imgu_sd->rect.eff;
738 rects[IPU3_CSS_RECT_BDS] = &imgu_sd->rect.bds;
739 rects[IPU3_CSS_RECT_GDC] = &imgu_sd->rect.gdc;
740
741 /* suppose that pad fmt was set by subdev s_fmt before */
742 pad_fmt = imgu_pipe->nodes[IMGU_NODE_IN].pad_fmt;
743 rects[IPU3_CSS_RECT_GDC]->width = pad_fmt.width;
744 rects[IPU3_CSS_RECT_GDC]->height = pad_fmt.height;
745 }
746
747 if (!fmts[css_q]) {
748 ret = -EINVAL;
749 goto out;
750 }
751
752 if (try)
753 ret = imgu_css_fmt_try(&imgu->css, fmts, rects, pipe);
754 else
755 ret = imgu_css_fmt_set(&imgu->css, fmts, rects, pipe);
756
757 /* ret is the binary number in the firmware blob */
758 if (ret < 0)
759 goto out;
760
761 /*
762 * imgu doesn't set the node to the value given by user
763 * before we return success from this function, so set it here.
764 */
765 if (!try)
766 imgu_pipe->nodes[node].vdev_fmt.fmt.pix_mp = f->fmt.pix_mp;
767
768 out:
769 if (try) {
770 for (i = 0; i < IPU3_CSS_QUEUES; i++)
771 if (i != css_q)
772 kfree(fmts[i]);
773 }
774
775 return ret;
776 }
777
imgu_try_fmt(struct file * file,void * fh,struct v4l2_format * f)778 static int imgu_try_fmt(struct file *file, void *fh, struct v4l2_format *f)
779 {
780 struct v4l2_pix_format_mplane *pixm = &f->fmt.pix_mp;
781 const struct imgu_fmt *fmt;
782
783 if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
784 fmt = find_format(f, VID_CAPTURE);
785 else if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
786 fmt = find_format(f, VID_OUTPUT);
787 else
788 return -EINVAL;
789
790 pixm->pixelformat = fmt->fourcc;
791
792 return 0;
793 }
794
imgu_vidioc_try_fmt(struct file * file,void * fh,struct v4l2_format * f)795 static int imgu_vidioc_try_fmt(struct file *file, void *fh,
796 struct v4l2_format *f)
797 {
798 struct imgu_device *imgu = video_drvdata(file);
799 struct device *dev = &imgu->pci_dev->dev;
800 struct imgu_video_device *node = file_to_intel_imgu_node(file);
801 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp;
802 int r;
803
804 dev_dbg(dev, "%s [%ux%u] for node %u\n", __func__,
805 pix_mp->width, pix_mp->height, node->id);
806
807 r = imgu_try_fmt(file, fh, f);
808 if (r)
809 return r;
810
811 return imgu_fmt(imgu, node->pipe, node->id, f, true);
812 }
813
imgu_vidioc_s_fmt(struct file * file,void * fh,struct v4l2_format * f)814 static int imgu_vidioc_s_fmt(struct file *file, void *fh, struct v4l2_format *f)
815 {
816 struct imgu_device *imgu = video_drvdata(file);
817 struct device *dev = &imgu->pci_dev->dev;
818 struct imgu_video_device *node = file_to_intel_imgu_node(file);
819 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp;
820 int r;
821
822 dev_dbg(dev, "%s [%ux%u] for node %u\n", __func__,
823 pix_mp->width, pix_mp->height, node->id);
824
825 r = imgu_try_fmt(file, fh, f);
826 if (r)
827 return r;
828
829 return imgu_fmt(imgu, node->pipe, node->id, f, false);
830 }
831
832 struct imgu_meta_fmt {
833 __u32 fourcc;
834 char *name;
835 };
836
837 /* From drivers/media/v4l2-core/v4l2-ioctl.c */
838 static const struct imgu_meta_fmt meta_fmts[] = {
839 { V4L2_META_FMT_IPU3_PARAMS, "IPU3 processing parameters" },
840 { V4L2_META_FMT_IPU3_STAT_3A, "IPU3 3A statistics" },
841 };
842
imgu_meta_enum_format(struct file * file,void * fh,struct v4l2_fmtdesc * fmt)843 static int imgu_meta_enum_format(struct file *file, void *fh,
844 struct v4l2_fmtdesc *fmt)
845 {
846 struct imgu_video_device *node = file_to_intel_imgu_node(file);
847 unsigned int i = fmt->type == V4L2_BUF_TYPE_META_OUTPUT ? 0 : 1;
848
849 /* Each node is dedicated to only one meta format */
850 if (fmt->index > 0 || fmt->type != node->vbq.type)
851 return -EINVAL;
852
853 if (fmt->mbus_code != 0 && fmt->mbus_code != MEDIA_BUS_FMT_FIXED)
854 return -EINVAL;
855
856 strscpy(fmt->description, meta_fmts[i].name, sizeof(fmt->description));
857 fmt->pixelformat = meta_fmts[i].fourcc;
858
859 return 0;
860 }
861
imgu_vidioc_g_meta_fmt(struct file * file,void * fh,struct v4l2_format * f)862 static int imgu_vidioc_g_meta_fmt(struct file *file, void *fh,
863 struct v4l2_format *f)
864 {
865 struct imgu_video_device *node = file_to_intel_imgu_node(file);
866
867 if (f->type != node->vbq.type)
868 return -EINVAL;
869
870 f->fmt = node->vdev_fmt.fmt;
871
872 return 0;
873 }
874
875 /******************** function pointers ********************/
876
877 static const struct v4l2_subdev_internal_ops imgu_subdev_internal_ops = {
878 .open = imgu_subdev_open,
879 };
880
881 static const struct v4l2_subdev_core_ops imgu_subdev_core_ops = {
882 .subscribe_event = v4l2_ctrl_subdev_subscribe_event,
883 .unsubscribe_event = v4l2_event_subdev_unsubscribe,
884 };
885
886 static const struct v4l2_subdev_video_ops imgu_subdev_video_ops = {
887 .s_stream = imgu_subdev_s_stream,
888 };
889
890 static const struct v4l2_subdev_pad_ops imgu_subdev_pad_ops = {
891 .link_validate = v4l2_subdev_link_validate_default,
892 .get_fmt = imgu_subdev_get_fmt,
893 .set_fmt = imgu_subdev_set_fmt,
894 .get_selection = imgu_subdev_get_selection,
895 .set_selection = imgu_subdev_set_selection,
896 };
897
898 static const struct v4l2_subdev_ops imgu_subdev_ops = {
899 .core = &imgu_subdev_core_ops,
900 .video = &imgu_subdev_video_ops,
901 .pad = &imgu_subdev_pad_ops,
902 };
903
904 static const struct media_entity_operations imgu_media_ops = {
905 .link_setup = imgu_link_setup,
906 .link_validate = v4l2_subdev_link_validate,
907 };
908
909 /****************** vb2_ops of the Q ********************/
910
911 static const struct vb2_ops imgu_vb2_ops = {
912 .buf_init = imgu_vb2_buf_init,
913 .buf_cleanup = imgu_vb2_buf_cleanup,
914 .buf_queue = imgu_vb2_buf_queue,
915 .queue_setup = imgu_vb2_queue_setup,
916 .start_streaming = imgu_vb2_start_streaming,
917 .stop_streaming = imgu_vb2_stop_streaming,
918 .wait_prepare = vb2_ops_wait_prepare,
919 .wait_finish = vb2_ops_wait_finish,
920 };
921
922 /****************** v4l2_file_operations *****************/
923
924 static const struct v4l2_file_operations imgu_v4l2_fops = {
925 .unlocked_ioctl = video_ioctl2,
926 .open = v4l2_fh_open,
927 .release = vb2_fop_release,
928 .poll = vb2_fop_poll,
929 .mmap = vb2_fop_mmap,
930 };
931
932 /******************** v4l2_ioctl_ops ********************/
933
934 static const struct v4l2_ioctl_ops imgu_v4l2_ioctl_ops = {
935 .vidioc_querycap = imgu_vidioc_querycap,
936
937 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt_vid_cap,
938 .vidioc_g_fmt_vid_cap_mplane = imgu_vidioc_g_fmt,
939 .vidioc_s_fmt_vid_cap_mplane = imgu_vidioc_s_fmt,
940 .vidioc_try_fmt_vid_cap_mplane = imgu_vidioc_try_fmt,
941
942 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt_vid_out,
943 .vidioc_g_fmt_vid_out_mplane = imgu_vidioc_g_fmt,
944 .vidioc_s_fmt_vid_out_mplane = imgu_vidioc_s_fmt,
945 .vidioc_try_fmt_vid_out_mplane = imgu_vidioc_try_fmt,
946
947 /* buffer queue management */
948 .vidioc_reqbufs = vb2_ioctl_reqbufs,
949 .vidioc_create_bufs = vb2_ioctl_create_bufs,
950 .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
951 .vidioc_querybuf = vb2_ioctl_querybuf,
952 .vidioc_qbuf = vb2_ioctl_qbuf,
953 .vidioc_dqbuf = vb2_ioctl_dqbuf,
954 .vidioc_streamon = vb2_ioctl_streamon,
955 .vidioc_streamoff = vb2_ioctl_streamoff,
956 .vidioc_expbuf = vb2_ioctl_expbuf,
957 };
958
959 static const struct v4l2_ioctl_ops imgu_v4l2_meta_ioctl_ops = {
960 .vidioc_querycap = imgu_vidioc_querycap,
961
962 /* meta capture */
963 .vidioc_enum_fmt_meta_cap = imgu_meta_enum_format,
964 .vidioc_g_fmt_meta_cap = imgu_vidioc_g_meta_fmt,
965 .vidioc_s_fmt_meta_cap = imgu_vidioc_g_meta_fmt,
966 .vidioc_try_fmt_meta_cap = imgu_vidioc_g_meta_fmt,
967
968 /* meta output */
969 .vidioc_enum_fmt_meta_out = imgu_meta_enum_format,
970 .vidioc_g_fmt_meta_out = imgu_vidioc_g_meta_fmt,
971 .vidioc_s_fmt_meta_out = imgu_vidioc_g_meta_fmt,
972 .vidioc_try_fmt_meta_out = imgu_vidioc_g_meta_fmt,
973
974 .vidioc_reqbufs = vb2_ioctl_reqbufs,
975 .vidioc_create_bufs = vb2_ioctl_create_bufs,
976 .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
977 .vidioc_querybuf = vb2_ioctl_querybuf,
978 .vidioc_qbuf = vb2_ioctl_qbuf,
979 .vidioc_dqbuf = vb2_ioctl_dqbuf,
980 .vidioc_streamon = vb2_ioctl_streamon,
981 .vidioc_streamoff = vb2_ioctl_streamoff,
982 .vidioc_expbuf = vb2_ioctl_expbuf,
983 };
984
imgu_sd_s_ctrl(struct v4l2_ctrl * ctrl)985 static int imgu_sd_s_ctrl(struct v4l2_ctrl *ctrl)
986 {
987 struct imgu_v4l2_subdev *imgu_sd =
988 container_of(ctrl->handler, struct imgu_v4l2_subdev, ctrl_handler);
989 struct imgu_device *imgu = v4l2_get_subdevdata(&imgu_sd->subdev);
990 struct device *dev = &imgu->pci_dev->dev;
991
992 dev_dbg(dev, "set val %d to ctrl 0x%8x for subdev %u",
993 ctrl->val, ctrl->id, imgu_sd->pipe);
994
995 switch (ctrl->id) {
996 case V4L2_CID_INTEL_IPU3_MODE:
997 atomic_set(&imgu_sd->running_mode, ctrl->val);
998 return 0;
999 default:
1000 return -EINVAL;
1001 }
1002 }
1003
1004 static const struct v4l2_ctrl_ops imgu_subdev_ctrl_ops = {
1005 .s_ctrl = imgu_sd_s_ctrl,
1006 };
1007
1008 static const char * const imgu_ctrl_mode_strings[] = {
1009 "Video mode",
1010 "Still mode",
1011 };
1012
1013 static const struct v4l2_ctrl_config imgu_subdev_ctrl_mode = {
1014 .ops = &imgu_subdev_ctrl_ops,
1015 .id = V4L2_CID_INTEL_IPU3_MODE,
1016 .name = "IPU3 Pipe Mode",
1017 .type = V4L2_CTRL_TYPE_MENU,
1018 .max = ARRAY_SIZE(imgu_ctrl_mode_strings) - 1,
1019 .def = IPU3_RUNNING_MODE_VIDEO,
1020 .qmenu = imgu_ctrl_mode_strings,
1021 };
1022
1023 /******************** Framework registration ********************/
1024
1025 /* helper function to config node's video properties */
imgu_node_to_v4l2(u32 node,struct video_device * vdev,struct v4l2_format * f)1026 static void imgu_node_to_v4l2(u32 node, struct video_device *vdev,
1027 struct v4l2_format *f)
1028 {
1029 u32 cap;
1030
1031 /* Should not happen */
1032 WARN_ON(node >= IMGU_NODE_NUM);
1033
1034 switch (node) {
1035 case IMGU_NODE_IN:
1036 cap = V4L2_CAP_VIDEO_OUTPUT_MPLANE;
1037 f->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1038 vdev->ioctl_ops = &imgu_v4l2_ioctl_ops;
1039 break;
1040 case IMGU_NODE_PARAMS:
1041 cap = V4L2_CAP_META_OUTPUT;
1042 f->type = V4L2_BUF_TYPE_META_OUTPUT;
1043 f->fmt.meta.dataformat = V4L2_META_FMT_IPU3_PARAMS;
1044 vdev->ioctl_ops = &imgu_v4l2_meta_ioctl_ops;
1045 imgu_css_meta_fmt_set(&f->fmt.meta);
1046 break;
1047 case IMGU_NODE_STAT_3A:
1048 cap = V4L2_CAP_META_CAPTURE;
1049 f->type = V4L2_BUF_TYPE_META_CAPTURE;
1050 f->fmt.meta.dataformat = V4L2_META_FMT_IPU3_STAT_3A;
1051 vdev->ioctl_ops = &imgu_v4l2_meta_ioctl_ops;
1052 imgu_css_meta_fmt_set(&f->fmt.meta);
1053 break;
1054 default:
1055 cap = V4L2_CAP_VIDEO_CAPTURE_MPLANE;
1056 f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1057 vdev->ioctl_ops = &imgu_v4l2_ioctl_ops;
1058 }
1059
1060 vdev->device_caps = V4L2_CAP_STREAMING | V4L2_CAP_IO_MC | cap;
1061 }
1062
imgu_v4l2_subdev_register(struct imgu_device * imgu,struct imgu_v4l2_subdev * imgu_sd,unsigned int pipe)1063 static int imgu_v4l2_subdev_register(struct imgu_device *imgu,
1064 struct imgu_v4l2_subdev *imgu_sd,
1065 unsigned int pipe)
1066 {
1067 int i, r;
1068 struct v4l2_ctrl_handler *hdl = &imgu_sd->ctrl_handler;
1069 struct imgu_media_pipe *imgu_pipe = &imgu->imgu_pipe[pipe];
1070
1071 /* Initialize subdev media entity */
1072 r = media_entity_pads_init(&imgu_sd->subdev.entity, IMGU_NODE_NUM,
1073 imgu_sd->subdev_pads);
1074 if (r) {
1075 dev_err(&imgu->pci_dev->dev,
1076 "failed initialize subdev media entity (%d)\n", r);
1077 return r;
1078 }
1079 imgu_sd->subdev.entity.ops = &imgu_media_ops;
1080 for (i = 0; i < IMGU_NODE_NUM; i++) {
1081 imgu_sd->subdev_pads[i].flags = imgu_pipe->nodes[i].output ?
1082 MEDIA_PAD_FL_SINK : MEDIA_PAD_FL_SOURCE;
1083 }
1084
1085 /* Initialize subdev */
1086 v4l2_subdev_init(&imgu_sd->subdev, &imgu_subdev_ops);
1087 imgu_sd->subdev.entity.function = MEDIA_ENT_F_PROC_VIDEO_STATISTICS;
1088 imgu_sd->subdev.internal_ops = &imgu_subdev_internal_ops;
1089 imgu_sd->subdev.flags = V4L2_SUBDEV_FL_HAS_DEVNODE |
1090 V4L2_SUBDEV_FL_HAS_EVENTS;
1091 snprintf(imgu_sd->subdev.name, sizeof(imgu_sd->subdev.name),
1092 "%s %u", IMGU_NAME, pipe);
1093 v4l2_set_subdevdata(&imgu_sd->subdev, imgu);
1094 atomic_set(&imgu_sd->running_mode, IPU3_RUNNING_MODE_VIDEO);
1095 v4l2_ctrl_handler_init(hdl, 1);
1096 imgu_sd->subdev.ctrl_handler = hdl;
1097 imgu_sd->ctrl = v4l2_ctrl_new_custom(hdl, &imgu_subdev_ctrl_mode, NULL);
1098 if (hdl->error) {
1099 r = hdl->error;
1100 dev_err(&imgu->pci_dev->dev,
1101 "failed to create subdev v4l2 ctrl with err %d", r);
1102 goto fail_subdev;
1103 }
1104 r = v4l2_device_register_subdev(&imgu->v4l2_dev, &imgu_sd->subdev);
1105 if (r) {
1106 dev_err(&imgu->pci_dev->dev,
1107 "failed initialize subdev (%d)\n", r);
1108 goto fail_subdev;
1109 }
1110
1111 imgu_sd->pipe = pipe;
1112 return 0;
1113
1114 fail_subdev:
1115 v4l2_ctrl_handler_free(imgu_sd->subdev.ctrl_handler);
1116 media_entity_cleanup(&imgu_sd->subdev.entity);
1117
1118 return r;
1119 }
1120
imgu_v4l2_node_setup(struct imgu_device * imgu,unsigned int pipe,int node_num)1121 static int imgu_v4l2_node_setup(struct imgu_device *imgu, unsigned int pipe,
1122 int node_num)
1123 {
1124 int r;
1125 u32 flags;
1126 struct v4l2_mbus_framefmt def_bus_fmt = { 0 };
1127 struct v4l2_pix_format_mplane def_pix_fmt = { 0 };
1128 struct device *dev = &imgu->pci_dev->dev;
1129 struct imgu_media_pipe *imgu_pipe = &imgu->imgu_pipe[pipe];
1130 struct v4l2_subdev *sd = &imgu_pipe->imgu_sd.subdev;
1131 struct imgu_video_device *node = &imgu_pipe->nodes[node_num];
1132 struct video_device *vdev = &node->vdev;
1133 struct vb2_queue *vbq = &node->vbq;
1134
1135 /* Initialize formats to default values */
1136 def_bus_fmt.width = 1920;
1137 def_bus_fmt.height = 1080;
1138 def_bus_fmt.code = MEDIA_BUS_FMT_FIXED;
1139 def_bus_fmt.field = V4L2_FIELD_NONE;
1140 def_bus_fmt.colorspace = V4L2_COLORSPACE_RAW;
1141 def_bus_fmt.ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
1142 def_bus_fmt.quantization = V4L2_QUANTIZATION_DEFAULT;
1143 def_bus_fmt.xfer_func = V4L2_XFER_FUNC_DEFAULT;
1144
1145 def_pix_fmt.width = def_bus_fmt.width;
1146 def_pix_fmt.height = def_bus_fmt.height;
1147 def_pix_fmt.field = def_bus_fmt.field;
1148 def_pix_fmt.num_planes = 1;
1149 def_pix_fmt.plane_fmt[0].bytesperline =
1150 imgu_bytesperline(def_pix_fmt.width,
1151 IMGU_ABI_FRAME_FORMAT_RAW_PACKED);
1152 def_pix_fmt.plane_fmt[0].sizeimage =
1153 def_pix_fmt.height * def_pix_fmt.plane_fmt[0].bytesperline;
1154 def_pix_fmt.flags = 0;
1155 def_pix_fmt.colorspace = def_bus_fmt.colorspace;
1156 def_pix_fmt.ycbcr_enc = def_bus_fmt.ycbcr_enc;
1157 def_pix_fmt.quantization = def_bus_fmt.quantization;
1158 def_pix_fmt.xfer_func = def_bus_fmt.xfer_func;
1159
1160 /* Initialize miscellaneous variables */
1161 mutex_init(&node->lock);
1162 INIT_LIST_HEAD(&node->buffers);
1163
1164 /* Initialize formats to default values */
1165 node->pad_fmt = def_bus_fmt;
1166 node->id = node_num;
1167 node->pipe = pipe;
1168 imgu_node_to_v4l2(node_num, vdev, &node->vdev_fmt);
1169 if (node->vdev_fmt.type ==
1170 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ||
1171 node->vdev_fmt.type ==
1172 V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
1173 def_pix_fmt.pixelformat = node->output ?
1174 V4L2_PIX_FMT_IPU3_SGRBG10 :
1175 V4L2_PIX_FMT_NV12;
1176 node->vdev_fmt.fmt.pix_mp = def_pix_fmt;
1177 }
1178
1179 /* Initialize media entities */
1180 r = media_entity_pads_init(&vdev->entity, 1, &node->vdev_pad);
1181 if (r) {
1182 dev_err(dev, "failed initialize media entity (%d)\n", r);
1183 mutex_destroy(&node->lock);
1184 return r;
1185 }
1186 node->vdev_pad.flags = node->output ?
1187 MEDIA_PAD_FL_SOURCE : MEDIA_PAD_FL_SINK;
1188 vdev->entity.ops = NULL;
1189
1190 /* Initialize vbq */
1191 vbq->type = node->vdev_fmt.type;
1192 vbq->io_modes = VB2_USERPTR | VB2_MMAP | VB2_DMABUF;
1193 vbq->ops = &imgu_vb2_ops;
1194 vbq->mem_ops = &vb2_dma_sg_memops;
1195 if (imgu->buf_struct_size <= 0)
1196 imgu->buf_struct_size =
1197 sizeof(struct imgu_vb2_buffer);
1198 vbq->buf_struct_size = imgu->buf_struct_size;
1199 vbq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1200 /* can streamon w/o buffers */
1201 vbq->min_buffers_needed = 0;
1202 vbq->drv_priv = imgu;
1203 vbq->lock = &node->lock;
1204 r = vb2_queue_init(vbq);
1205 if (r) {
1206 dev_err(dev, "failed to initialize video queue (%d)", r);
1207 media_entity_cleanup(&vdev->entity);
1208 return r;
1209 }
1210
1211 /* Initialize vdev */
1212 snprintf(vdev->name, sizeof(vdev->name), "%s %u %s",
1213 IMGU_NAME, pipe, node->name);
1214 vdev->release = video_device_release_empty;
1215 vdev->fops = &imgu_v4l2_fops;
1216 vdev->lock = &node->lock;
1217 vdev->v4l2_dev = &imgu->v4l2_dev;
1218 vdev->queue = &node->vbq;
1219 vdev->vfl_dir = node->output ? VFL_DIR_TX : VFL_DIR_RX;
1220 video_set_drvdata(vdev, imgu);
1221 r = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
1222 if (r) {
1223 dev_err(dev, "failed to register video device (%d)", r);
1224 media_entity_cleanup(&vdev->entity);
1225 return r;
1226 }
1227
1228 /* Create link between video node and the subdev pad */
1229 flags = 0;
1230 if (node->enabled)
1231 flags |= MEDIA_LNK_FL_ENABLED;
1232 if (node->output) {
1233 r = media_create_pad_link(&vdev->entity, 0, &sd->entity,
1234 node_num, flags);
1235 } else {
1236 if (node->id == IMGU_NODE_OUT) {
1237 flags |= MEDIA_LNK_FL_ENABLED | MEDIA_LNK_FL_IMMUTABLE;
1238 node->enabled = true;
1239 }
1240
1241 r = media_create_pad_link(&sd->entity, node_num, &vdev->entity,
1242 0, flags);
1243 }
1244 if (r) {
1245 dev_err(dev, "failed to create pad link (%d)", r);
1246 video_unregister_device(vdev);
1247 return r;
1248 }
1249
1250 return 0;
1251 }
1252
imgu_v4l2_nodes_cleanup_pipe(struct imgu_device * imgu,unsigned int pipe,int node)1253 static void imgu_v4l2_nodes_cleanup_pipe(struct imgu_device *imgu,
1254 unsigned int pipe, int node)
1255 {
1256 int i;
1257 struct imgu_media_pipe *imgu_pipe = &imgu->imgu_pipe[pipe];
1258
1259 for (i = 0; i < node; i++) {
1260 video_unregister_device(&imgu_pipe->nodes[i].vdev);
1261 media_entity_cleanup(&imgu_pipe->nodes[i].vdev.entity);
1262 mutex_destroy(&imgu_pipe->nodes[i].lock);
1263 }
1264 }
1265
imgu_v4l2_nodes_setup_pipe(struct imgu_device * imgu,int pipe)1266 static int imgu_v4l2_nodes_setup_pipe(struct imgu_device *imgu, int pipe)
1267 {
1268 int i;
1269
1270 for (i = 0; i < IMGU_NODE_NUM; i++) {
1271 int r = imgu_v4l2_node_setup(imgu, pipe, i);
1272
1273 if (r) {
1274 imgu_v4l2_nodes_cleanup_pipe(imgu, pipe, i);
1275 return r;
1276 }
1277 }
1278 return 0;
1279 }
1280
imgu_v4l2_subdev_cleanup(struct imgu_device * imgu,unsigned int i)1281 static void imgu_v4l2_subdev_cleanup(struct imgu_device *imgu, unsigned int i)
1282 {
1283 struct imgu_media_pipe *imgu_pipe = &imgu->imgu_pipe[i];
1284
1285 v4l2_device_unregister_subdev(&imgu_pipe->imgu_sd.subdev);
1286 v4l2_ctrl_handler_free(imgu_pipe->imgu_sd.subdev.ctrl_handler);
1287 media_entity_cleanup(&imgu_pipe->imgu_sd.subdev.entity);
1288 }
1289
imgu_v4l2_cleanup_pipes(struct imgu_device * imgu,unsigned int pipe)1290 static void imgu_v4l2_cleanup_pipes(struct imgu_device *imgu, unsigned int pipe)
1291 {
1292 int i;
1293
1294 for (i = 0; i < pipe; i++) {
1295 imgu_v4l2_nodes_cleanup_pipe(imgu, i, IMGU_NODE_NUM);
1296 imgu_v4l2_subdev_cleanup(imgu, i);
1297 }
1298 }
1299
imgu_v4l2_register_pipes(struct imgu_device * imgu)1300 static int imgu_v4l2_register_pipes(struct imgu_device *imgu)
1301 {
1302 struct imgu_media_pipe *imgu_pipe;
1303 int i, r;
1304
1305 for (i = 0; i < IMGU_MAX_PIPE_NUM; i++) {
1306 imgu_pipe = &imgu->imgu_pipe[i];
1307 r = imgu_v4l2_subdev_register(imgu, &imgu_pipe->imgu_sd, i);
1308 if (r) {
1309 dev_err(&imgu->pci_dev->dev,
1310 "failed to register subdev%u ret (%d)\n", i, r);
1311 goto pipes_cleanup;
1312 }
1313 r = imgu_v4l2_nodes_setup_pipe(imgu, i);
1314 if (r) {
1315 imgu_v4l2_subdev_cleanup(imgu, i);
1316 goto pipes_cleanup;
1317 }
1318 }
1319
1320 return 0;
1321
1322 pipes_cleanup:
1323 imgu_v4l2_cleanup_pipes(imgu, i);
1324 return r;
1325 }
1326
imgu_v4l2_register(struct imgu_device * imgu)1327 int imgu_v4l2_register(struct imgu_device *imgu)
1328 {
1329 int r;
1330
1331 /* Initialize miscellaneous variables */
1332 imgu->streaming = false;
1333
1334 /* Set up media device */
1335 media_device_pci_init(&imgu->media_dev, imgu->pci_dev, IMGU_NAME);
1336
1337 /* Set up v4l2 device */
1338 imgu->v4l2_dev.mdev = &imgu->media_dev;
1339 imgu->v4l2_dev.ctrl_handler = NULL;
1340 r = v4l2_device_register(&imgu->pci_dev->dev, &imgu->v4l2_dev);
1341 if (r) {
1342 dev_err(&imgu->pci_dev->dev,
1343 "failed to register V4L2 device (%d)\n", r);
1344 goto fail_v4l2_dev;
1345 }
1346
1347 r = imgu_v4l2_register_pipes(imgu);
1348 if (r) {
1349 dev_err(&imgu->pci_dev->dev,
1350 "failed to register pipes (%d)\n", r);
1351 goto fail_v4l2_pipes;
1352 }
1353
1354 r = v4l2_device_register_subdev_nodes(&imgu->v4l2_dev);
1355 if (r) {
1356 dev_err(&imgu->pci_dev->dev,
1357 "failed to register subdevs (%d)\n", r);
1358 goto fail_subdevs;
1359 }
1360
1361 r = media_device_register(&imgu->media_dev);
1362 if (r) {
1363 dev_err(&imgu->pci_dev->dev,
1364 "failed to register media device (%d)\n", r);
1365 goto fail_subdevs;
1366 }
1367
1368 return 0;
1369
1370 fail_subdevs:
1371 imgu_v4l2_cleanup_pipes(imgu, IMGU_MAX_PIPE_NUM);
1372 fail_v4l2_pipes:
1373 v4l2_device_unregister(&imgu->v4l2_dev);
1374 fail_v4l2_dev:
1375 media_device_cleanup(&imgu->media_dev);
1376
1377 return r;
1378 }
1379
imgu_v4l2_unregister(struct imgu_device * imgu)1380 int imgu_v4l2_unregister(struct imgu_device *imgu)
1381 {
1382 media_device_unregister(&imgu->media_dev);
1383 imgu_v4l2_cleanup_pipes(imgu, IMGU_MAX_PIPE_NUM);
1384 v4l2_device_unregister(&imgu->v4l2_dev);
1385 media_device_cleanup(&imgu->media_dev);
1386
1387 return 0;
1388 }
1389
imgu_v4l2_buffer_done(struct vb2_buffer * vb,enum vb2_buffer_state state)1390 void imgu_v4l2_buffer_done(struct vb2_buffer *vb,
1391 enum vb2_buffer_state state)
1392 {
1393 struct imgu_vb2_buffer *b =
1394 container_of(vb, struct imgu_vb2_buffer, vbb.vb2_buf);
1395
1396 list_del(&b->list);
1397 vb2_buffer_done(&b->vbb.vb2_buf, state);
1398 }
1399