• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * The Marvell camera core.  This device appears in a number of settings,
4  * so it needs platform-specific support outside of the core.
5  *
6  * Copyright 2011 Jonathan Corbet corbet@lwn.net
7  * Copyright 2018 Lubomir Rintel <lkundrak@v3.sk>
8  */
9 #include <linux/kernel.h>
10 #include <linux/module.h>
11 #include <linux/fs.h>
12 #include <linux/mm.h>
13 #include <linux/i2c.h>
14 #include <linux/interrupt.h>
15 #include <linux/spinlock.h>
16 #include <linux/slab.h>
17 #include <linux/device.h>
18 #include <linux/wait.h>
19 #include <linux/list.h>
20 #include <linux/dma-mapping.h>
21 #include <linux/delay.h>
22 #include <linux/vmalloc.h>
23 #include <linux/io.h>
24 #include <linux/clk.h>
25 #include <linux/clk-provider.h>
26 #include <linux/videodev2.h>
27 #include <media/v4l2-device.h>
28 #include <media/v4l2-ioctl.h>
29 #include <media/v4l2-ctrls.h>
30 #include <media/v4l2-event.h>
31 #include <media/videobuf2-vmalloc.h>
32 #include <media/videobuf2-dma-contig.h>
33 #include <media/videobuf2-dma-sg.h>
34 
35 #include "mcam-core.h"
36 
37 #ifdef MCAM_MODE_VMALLOC
38 /*
39  * Internal DMA buffer management.  Since the controller cannot do S/G I/O,
40  * we must have physically contiguous buffers to bring frames into.
41  * These parameters control how many buffers we use, whether we
42  * allocate them at load time (better chance of success, but nails down
43  * memory) or when somebody tries to use the camera (riskier), and,
44  * for load-time allocation, how big they should be.
45  *
46  * The controller can cycle through three buffers.  We could use
47  * more by flipping pointers around, but it probably makes little
48  * sense.
49  */
50 
51 static bool alloc_bufs_at_read;
52 module_param(alloc_bufs_at_read, bool, 0444);
53 MODULE_PARM_DESC(alloc_bufs_at_read,
54 		"Non-zero value causes DMA buffers to be allocated when the video capture device is read, rather than at module load time.  This saves memory, but decreases the chances of successfully getting those buffers.  This parameter is only used in the vmalloc buffer mode");
55 
56 static int n_dma_bufs = 3;
57 module_param(n_dma_bufs, uint, 0644);
58 MODULE_PARM_DESC(n_dma_bufs,
59 		"The number of DMA buffers to allocate.  Can be either two (saves memory, makes timing tighter) or three.");
60 
61 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2;  /* Worst case */
62 module_param(dma_buf_size, uint, 0444);
63 MODULE_PARM_DESC(dma_buf_size,
64 		"The size of the allocated DMA buffers.  If actual operating parameters require larger buffers, an attempt to reallocate will be made.");
65 #else /* MCAM_MODE_VMALLOC */
66 static const bool alloc_bufs_at_read;
67 static const int n_dma_bufs = 3;  /* Used by S/G_PARM */
68 #endif /* MCAM_MODE_VMALLOC */
69 
70 static bool flip;
71 module_param(flip, bool, 0444);
72 MODULE_PARM_DESC(flip,
73 		"If set, the sensor will be instructed to flip the image vertically.");
74 
75 static int buffer_mode = -1;
76 module_param(buffer_mode, int, 0444);
77 MODULE_PARM_DESC(buffer_mode,
78 		"Set the buffer mode to be used; default is to go with what the platform driver asks for.  Set to 0 for vmalloc, 1 for DMA contiguous.");
79 
80 /*
81  * Status flags.  Always manipulated with bit operations.
82  */
83 #define CF_BUF0_VALID	 0	/* Buffers valid - first three */
84 #define CF_BUF1_VALID	 1
85 #define CF_BUF2_VALID	 2
86 #define CF_DMA_ACTIVE	 3	/* A frame is incoming */
87 #define CF_CONFIG_NEEDED 4	/* Must configure hardware */
88 #define CF_SINGLE_BUFFER 5	/* Running with a single buffer */
89 #define CF_SG_RESTART	 6	/* SG restart needed */
90 #define CF_FRAME_SOF0	 7	/* Frame 0 started */
91 #define CF_FRAME_SOF1	 8
92 #define CF_FRAME_SOF2	 9
93 
94 #define sensor_call(cam, o, f, args...) \
95 	v4l2_subdev_call(cam->sensor, o, f, ##args)
96 
97 #define notifier_to_mcam(notifier) \
98 	container_of(notifier, struct mcam_camera, notifier)
99 
100 static struct mcam_format_struct {
101 	__u32 pixelformat;
102 	int bpp;   /* Bytes per pixel */
103 	bool planar;
104 	u32 mbus_code;
105 } mcam_formats[] = {
106 	{
107 		.pixelformat	= V4L2_PIX_FMT_YUYV,
108 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
109 		.bpp		= 2,
110 		.planar		= false,
111 	},
112 	{
113 		.pixelformat	= V4L2_PIX_FMT_YVYU,
114 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
115 		.bpp		= 2,
116 		.planar		= false,
117 	},
118 	{
119 		.pixelformat	= V4L2_PIX_FMT_YUV420,
120 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
121 		.bpp		= 1,
122 		.planar		= true,
123 	},
124 	{
125 		.pixelformat	= V4L2_PIX_FMT_YVU420,
126 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
127 		.bpp		= 1,
128 		.planar		= true,
129 	},
130 	{
131 		.pixelformat	= V4L2_PIX_FMT_XRGB444,
132 		.mbus_code	= MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE,
133 		.bpp		= 2,
134 		.planar		= false,
135 	},
136 	{
137 		.pixelformat	= V4L2_PIX_FMT_RGB565,
138 		.mbus_code	= MEDIA_BUS_FMT_RGB565_2X8_LE,
139 		.bpp		= 2,
140 		.planar		= false,
141 	},
142 	{
143 		.pixelformat	= V4L2_PIX_FMT_SBGGR8,
144 		.mbus_code	= MEDIA_BUS_FMT_SBGGR8_1X8,
145 		.bpp		= 1,
146 		.planar		= false,
147 	},
148 };
149 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
150 
mcam_find_format(u32 pixelformat)151 static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
152 {
153 	unsigned i;
154 
155 	for (i = 0; i < N_MCAM_FMTS; i++)
156 		if (mcam_formats[i].pixelformat == pixelformat)
157 			return mcam_formats + i;
158 	/* Not found? Then return the first format. */
159 	return mcam_formats;
160 }
161 
162 /*
163  * The default format we use until somebody says otherwise.
164  */
165 static const struct v4l2_pix_format mcam_def_pix_format = {
166 	.width		= VGA_WIDTH,
167 	.height		= VGA_HEIGHT,
168 	.pixelformat	= V4L2_PIX_FMT_YUYV,
169 	.field		= V4L2_FIELD_NONE,
170 	.bytesperline	= VGA_WIDTH*2,
171 	.sizeimage	= VGA_WIDTH*VGA_HEIGHT*2,
172 	.colorspace	= V4L2_COLORSPACE_SRGB,
173 };
174 
175 static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
176 
177 
178 /*
179  * The two-word DMA descriptor format used by the Armada 610 and like.  There
180  * Is a three-word format as well (set C1_DESC_3WORD) where the third
181  * word is a pointer to the next descriptor, but we don't use it.  Two-word
182  * descriptors have to be contiguous in memory.
183  */
184 struct mcam_dma_desc {
185 	u32 dma_addr;
186 	u32 segment_len;
187 };
188 
189 /*
190  * Our buffer type for working with videobuf2.  Note that the vb2
191  * developers have decreed that struct vb2_v4l2_buffer must be at the
192  * beginning of this structure.
193  */
194 struct mcam_vb_buffer {
195 	struct vb2_v4l2_buffer vb_buf;
196 	struct list_head queue;
197 	struct mcam_dma_desc *dma_desc;	/* Descriptor virtual address */
198 	dma_addr_t dma_desc_pa;		/* Descriptor physical address */
199 };
200 
vb_to_mvb(struct vb2_v4l2_buffer * vb)201 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_v4l2_buffer *vb)
202 {
203 	return container_of(vb, struct mcam_vb_buffer, vb_buf);
204 }
205 
206 /*
207  * Hand a completed buffer back to user space.
208  */
mcam_buffer_done(struct mcam_camera * cam,int frame,struct vb2_v4l2_buffer * vbuf)209 static void mcam_buffer_done(struct mcam_camera *cam, int frame,
210 		struct vb2_v4l2_buffer *vbuf)
211 {
212 	vbuf->vb2_buf.planes[0].bytesused = cam->pix_format.sizeimage;
213 	vbuf->sequence = cam->buf_seq[frame];
214 	vbuf->field = V4L2_FIELD_NONE;
215 	vbuf->vb2_buf.timestamp = ktime_get_ns();
216 	vb2_set_plane_payload(&vbuf->vb2_buf, 0, cam->pix_format.sizeimage);
217 	vb2_buffer_done(&vbuf->vb2_buf, VB2_BUF_STATE_DONE);
218 }
219 
220 
221 
222 /*
223  * Debugging and related.
224  */
225 #define cam_err(cam, fmt, arg...) \
226 	dev_err((cam)->dev, fmt, ##arg);
227 #define cam_warn(cam, fmt, arg...) \
228 	dev_warn((cam)->dev, fmt, ##arg);
229 #define cam_dbg(cam, fmt, arg...) \
230 	dev_dbg((cam)->dev, fmt, ##arg);
231 
232 
233 /*
234  * Flag manipulation helpers
235  */
mcam_reset_buffers(struct mcam_camera * cam)236 static void mcam_reset_buffers(struct mcam_camera *cam)
237 {
238 	int i;
239 
240 	cam->next_buf = -1;
241 	for (i = 0; i < cam->nbufs; i++) {
242 		clear_bit(i, &cam->flags);
243 		clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
244 	}
245 }
246 
mcam_needs_config(struct mcam_camera * cam)247 static inline int mcam_needs_config(struct mcam_camera *cam)
248 {
249 	return test_bit(CF_CONFIG_NEEDED, &cam->flags);
250 }
251 
mcam_set_config_needed(struct mcam_camera * cam,int needed)252 static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
253 {
254 	if (needed)
255 		set_bit(CF_CONFIG_NEEDED, &cam->flags);
256 	else
257 		clear_bit(CF_CONFIG_NEEDED, &cam->flags);
258 }
259 
260 /* ------------------------------------------------------------------- */
261 /*
262  * Make the controller start grabbing images.  Everything must
263  * be set up before doing this.
264  */
mcam_ctlr_start(struct mcam_camera * cam)265 static void mcam_ctlr_start(struct mcam_camera *cam)
266 {
267 	/* set_bit performs a read, so no other barrier should be
268 	   needed here */
269 	mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
270 }
271 
mcam_ctlr_stop(struct mcam_camera * cam)272 static void mcam_ctlr_stop(struct mcam_camera *cam)
273 {
274 	mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
275 }
276 
mcam_enable_mipi(struct mcam_camera * mcam)277 static void mcam_enable_mipi(struct mcam_camera *mcam)
278 {
279 	/* Using MIPI mode and enable MIPI */
280 	if (mcam->calc_dphy)
281 		mcam->calc_dphy(mcam);
282 	cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
283 			mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
284 	mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
285 	mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
286 	mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
287 
288 	if (!mcam->mipi_enabled) {
289 		if (mcam->lane > 4 || mcam->lane <= 0) {
290 			cam_warn(mcam, "lane number error\n");
291 			mcam->lane = 1;	/* set the default value */
292 		}
293 		/*
294 		 * 0x41 actives 1 lane
295 		 * 0x43 actives 2 lanes
296 		 * 0x45 actives 3 lanes (never happen)
297 		 * 0x47 actives 4 lanes
298 		 */
299 		mcam_reg_write(mcam, REG_CSI2_CTRL0,
300 			CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
301 		mcam->mipi_enabled = true;
302 	}
303 }
304 
mcam_disable_mipi(struct mcam_camera * mcam)305 static void mcam_disable_mipi(struct mcam_camera *mcam)
306 {
307 	/* Using Parallel mode or disable MIPI */
308 	mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
309 	mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
310 	mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
311 	mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
312 	mcam->mipi_enabled = false;
313 }
314 
mcam_fmt_is_planar(__u32 pfmt)315 static bool mcam_fmt_is_planar(__u32 pfmt)
316 {
317 	struct mcam_format_struct *f;
318 
319 	f = mcam_find_format(pfmt);
320 	return f->planar;
321 }
322 
mcam_write_yuv_bases(struct mcam_camera * cam,unsigned frame,dma_addr_t base)323 static void mcam_write_yuv_bases(struct mcam_camera *cam,
324 				 unsigned frame, dma_addr_t base)
325 {
326 	struct v4l2_pix_format *fmt = &cam->pix_format;
327 	u32 pixel_count = fmt->width * fmt->height;
328 	dma_addr_t y, u = 0, v = 0;
329 
330 	y = base;
331 
332 	switch (fmt->pixelformat) {
333 	case V4L2_PIX_FMT_YUV420:
334 		u = y + pixel_count;
335 		v = u + pixel_count / 4;
336 		break;
337 	case V4L2_PIX_FMT_YVU420:
338 		v = y + pixel_count;
339 		u = v + pixel_count / 4;
340 		break;
341 	default:
342 		break;
343 	}
344 
345 	mcam_reg_write(cam, REG_Y0BAR + frame * 4, y);
346 	if (mcam_fmt_is_planar(fmt->pixelformat)) {
347 		mcam_reg_write(cam, REG_U0BAR + frame * 4, u);
348 		mcam_reg_write(cam, REG_V0BAR + frame * 4, v);
349 	}
350 }
351 
352 /* ------------------------------------------------------------------- */
353 
354 #ifdef MCAM_MODE_VMALLOC
355 /*
356  * Code specific to the vmalloc buffer mode.
357  */
358 
359 /*
360  * Allocate in-kernel DMA buffers for vmalloc mode.
361  */
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)362 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
363 {
364 	int i;
365 
366 	mcam_set_config_needed(cam, 1);
367 	if (loadtime)
368 		cam->dma_buf_size = dma_buf_size;
369 	else
370 		cam->dma_buf_size = cam->pix_format.sizeimage;
371 	if (n_dma_bufs > 3)
372 		n_dma_bufs = 3;
373 
374 	cam->nbufs = 0;
375 	for (i = 0; i < n_dma_bufs; i++) {
376 		cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
377 				cam->dma_buf_size, cam->dma_handles + i,
378 				GFP_KERNEL);
379 		if (cam->dma_bufs[i] == NULL) {
380 			cam_warn(cam, "Failed to allocate DMA buffer\n");
381 			break;
382 		}
383 		(cam->nbufs)++;
384 	}
385 
386 	switch (cam->nbufs) {
387 	case 1:
388 		dma_free_coherent(cam->dev, cam->dma_buf_size,
389 				cam->dma_bufs[0], cam->dma_handles[0]);
390 		cam->nbufs = 0;
391 		/* fall-through */
392 	case 0:
393 		cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
394 		return -ENOMEM;
395 
396 	case 2:
397 		if (n_dma_bufs > 2)
398 			cam_warn(cam, "Will limp along with only 2 buffers\n");
399 		break;
400 	}
401 	return 0;
402 }
403 
mcam_free_dma_bufs(struct mcam_camera * cam)404 static void mcam_free_dma_bufs(struct mcam_camera *cam)
405 {
406 	int i;
407 
408 	for (i = 0; i < cam->nbufs; i++) {
409 		dma_free_coherent(cam->dev, cam->dma_buf_size,
410 				cam->dma_bufs[i], cam->dma_handles[i]);
411 		cam->dma_bufs[i] = NULL;
412 	}
413 	cam->nbufs = 0;
414 }
415 
416 
417 /*
418  * Set up DMA buffers when operating in vmalloc mode
419  */
mcam_ctlr_dma_vmalloc(struct mcam_camera * cam)420 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
421 {
422 	/*
423 	 * Store the first two YUV buffers. Then either
424 	 * set the third if it exists, or tell the controller
425 	 * to just use two.
426 	 */
427 	mcam_write_yuv_bases(cam, 0, cam->dma_handles[0]);
428 	mcam_write_yuv_bases(cam, 1, cam->dma_handles[1]);
429 	if (cam->nbufs > 2) {
430 		mcam_write_yuv_bases(cam, 2, cam->dma_handles[2]);
431 		mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
432 	} else
433 		mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
434 	if (cam->chip_id == MCAM_CAFE)
435 		mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
436 }
437 
438 /*
439  * Copy data out to user space in the vmalloc case
440  */
mcam_frame_tasklet(unsigned long data)441 static void mcam_frame_tasklet(unsigned long data)
442 {
443 	struct mcam_camera *cam = (struct mcam_camera *) data;
444 	int i;
445 	unsigned long flags;
446 	struct mcam_vb_buffer *buf;
447 
448 	spin_lock_irqsave(&cam->dev_lock, flags);
449 	for (i = 0; i < cam->nbufs; i++) {
450 		int bufno = cam->next_buf;
451 
452 		if (cam->state != S_STREAMING || bufno < 0)
453 			break;  /* I/O got stopped */
454 		if (++(cam->next_buf) >= cam->nbufs)
455 			cam->next_buf = 0;
456 		if (!test_bit(bufno, &cam->flags))
457 			continue;
458 		if (list_empty(&cam->buffers)) {
459 			cam->frame_state.singles++;
460 			break;  /* Leave it valid, hope for better later */
461 		}
462 		cam->frame_state.delivered++;
463 		clear_bit(bufno, &cam->flags);
464 		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
465 				queue);
466 		list_del_init(&buf->queue);
467 		/*
468 		 * Drop the lock during the big copy.  This *should* be safe...
469 		 */
470 		spin_unlock_irqrestore(&cam->dev_lock, flags);
471 		memcpy(vb2_plane_vaddr(&buf->vb_buf.vb2_buf, 0),
472 				cam->dma_bufs[bufno],
473 				cam->pix_format.sizeimage);
474 		mcam_buffer_done(cam, bufno, &buf->vb_buf);
475 		spin_lock_irqsave(&cam->dev_lock, flags);
476 	}
477 	spin_unlock_irqrestore(&cam->dev_lock, flags);
478 }
479 
480 
481 /*
482  * Make sure our allocated buffers are up to the task.
483  */
mcam_check_dma_buffers(struct mcam_camera * cam)484 static int mcam_check_dma_buffers(struct mcam_camera *cam)
485 {
486 	if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
487 			mcam_free_dma_bufs(cam);
488 	if (cam->nbufs == 0)
489 		return mcam_alloc_dma_bufs(cam, 0);
490 	return 0;
491 }
492 
mcam_vmalloc_done(struct mcam_camera * cam,int frame)493 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
494 {
495 	tasklet_schedule(&cam->s_tasklet);
496 }
497 
498 #else /* MCAM_MODE_VMALLOC */
499 
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)500 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
501 {
502 	return 0;
503 }
504 
mcam_free_dma_bufs(struct mcam_camera * cam)505 static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
506 {
507 	return;
508 }
509 
mcam_check_dma_buffers(struct mcam_camera * cam)510 static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
511 {
512 	return 0;
513 }
514 
515 
516 
517 #endif /* MCAM_MODE_VMALLOC */
518 
519 
520 #ifdef MCAM_MODE_DMA_CONTIG
521 /* ---------------------------------------------------------------------- */
522 /*
523  * DMA-contiguous code.
524  */
525 
526 /*
527  * Set up a contiguous buffer for the given frame.  Here also is where
528  * the underrun strategy is set: if there is no buffer available, reuse
529  * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
530  * keep the interrupt handler from giving that buffer back to user
531  * space.  In this way, we always have a buffer to DMA to and don't
532  * have to try to play games stopping and restarting the controller.
533  */
mcam_set_contig_buffer(struct mcam_camera * cam,int frame)534 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
535 {
536 	struct mcam_vb_buffer *buf;
537 	dma_addr_t dma_handle;
538 	struct vb2_v4l2_buffer *vb;
539 
540 	/*
541 	 * If there are no available buffers, go into single mode
542 	 */
543 	if (list_empty(&cam->buffers)) {
544 		buf = cam->vb_bufs[frame ^ 0x1];
545 		set_bit(CF_SINGLE_BUFFER, &cam->flags);
546 		cam->frame_state.singles++;
547 	} else {
548 		/*
549 		 * OK, we have a buffer we can use.
550 		 */
551 		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
552 					queue);
553 		list_del_init(&buf->queue);
554 		clear_bit(CF_SINGLE_BUFFER, &cam->flags);
555 	}
556 
557 	cam->vb_bufs[frame] = buf;
558 	vb = &buf->vb_buf;
559 
560 	dma_handle = vb2_dma_contig_plane_dma_addr(&vb->vb2_buf, 0);
561 	mcam_write_yuv_bases(cam, frame, dma_handle);
562 }
563 
564 /*
565  * Initial B_DMA_contig setup.
566  */
mcam_ctlr_dma_contig(struct mcam_camera * cam)567 static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
568 {
569 	mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
570 	cam->nbufs = 2;
571 	mcam_set_contig_buffer(cam, 0);
572 	mcam_set_contig_buffer(cam, 1);
573 }
574 
575 /*
576  * Frame completion handling.
577  */
mcam_dma_contig_done(struct mcam_camera * cam,int frame)578 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
579 {
580 	struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
581 
582 	if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
583 		cam->frame_state.delivered++;
584 		cam->vb_bufs[frame] = NULL;
585 		mcam_buffer_done(cam, frame, &buf->vb_buf);
586 	}
587 	mcam_set_contig_buffer(cam, frame);
588 }
589 
590 #endif /* MCAM_MODE_DMA_CONTIG */
591 
592 #ifdef MCAM_MODE_DMA_SG
593 /* ---------------------------------------------------------------------- */
594 /*
595  * Scatter/gather-specific code.
596  */
597 
598 /*
599  * Set up the next buffer for S/G I/O; caller should be sure that
600  * the controller is stopped and a buffer is available.
601  */
mcam_sg_next_buffer(struct mcam_camera * cam)602 static void mcam_sg_next_buffer(struct mcam_camera *cam)
603 {
604 	struct mcam_vb_buffer *buf;
605 	struct sg_table *sg_table;
606 
607 	buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
608 	list_del_init(&buf->queue);
609 	sg_table = vb2_dma_sg_plane_desc(&buf->vb_buf.vb2_buf, 0);
610 	/*
611 	 * Very Bad Not Good Things happen if you don't clear
612 	 * C1_DESC_ENA before making any descriptor changes.
613 	 */
614 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
615 	mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
616 	mcam_reg_write(cam, REG_DESC_LEN_Y,
617 			sg_table->nents * sizeof(struct mcam_dma_desc));
618 	mcam_reg_write(cam, REG_DESC_LEN_U, 0);
619 	mcam_reg_write(cam, REG_DESC_LEN_V, 0);
620 	mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
621 	cam->vb_bufs[0] = buf;
622 }
623 
624 /*
625  * Initial B_DMA_sg setup
626  */
mcam_ctlr_dma_sg(struct mcam_camera * cam)627 static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
628 {
629 	/*
630 	 * The list-empty condition can hit us at resume time
631 	 * if the buffer list was empty when the system was suspended.
632 	 */
633 	if (list_empty(&cam->buffers)) {
634 		set_bit(CF_SG_RESTART, &cam->flags);
635 		return;
636 	}
637 
638 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
639 	mcam_sg_next_buffer(cam);
640 	cam->nbufs = 3;
641 }
642 
643 
644 /*
645  * Frame completion with S/G is trickier.  We can't muck with
646  * a descriptor chain on the fly, since the controller buffers it
647  * internally.  So we have to actually stop and restart; Marvell
648  * says this is the way to do it.
649  *
650  * Of course, stopping is easier said than done; experience shows
651  * that the controller can start a frame *after* C0_ENABLE has been
652  * cleared.  So when running in S/G mode, the controller is "stopped"
653  * on receipt of the start-of-frame interrupt.  That means we can
654  * safely change the DMA descriptor array here and restart things
655  * (assuming there's another buffer waiting to go).
656  */
mcam_dma_sg_done(struct mcam_camera * cam,int frame)657 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
658 {
659 	struct mcam_vb_buffer *buf = cam->vb_bufs[0];
660 
661 	/*
662 	 * If we're no longer supposed to be streaming, don't do anything.
663 	 */
664 	if (cam->state != S_STREAMING)
665 		return;
666 	/*
667 	 * If we have another buffer available, put it in and
668 	 * restart the engine.
669 	 */
670 	if (!list_empty(&cam->buffers)) {
671 		mcam_sg_next_buffer(cam);
672 		mcam_ctlr_start(cam);
673 	/*
674 	 * Otherwise set CF_SG_RESTART and the controller will
675 	 * be restarted once another buffer shows up.
676 	 */
677 	} else {
678 		set_bit(CF_SG_RESTART, &cam->flags);
679 		cam->frame_state.singles++;
680 		cam->vb_bufs[0] = NULL;
681 	}
682 	/*
683 	 * Now we can give the completed frame back to user space.
684 	 */
685 	cam->frame_state.delivered++;
686 	mcam_buffer_done(cam, frame, &buf->vb_buf);
687 }
688 
689 
690 /*
691  * Scatter/gather mode requires stopping the controller between
692  * frames so we can put in a new DMA descriptor array.  If no new
693  * buffer exists at frame completion, the controller is left stopped;
694  * this function is charged with gettig things going again.
695  */
mcam_sg_restart(struct mcam_camera * cam)696 static void mcam_sg_restart(struct mcam_camera *cam)
697 {
698 	mcam_ctlr_dma_sg(cam);
699 	mcam_ctlr_start(cam);
700 	clear_bit(CF_SG_RESTART, &cam->flags);
701 }
702 
703 #else /* MCAM_MODE_DMA_SG */
704 
mcam_sg_restart(struct mcam_camera * cam)705 static inline void mcam_sg_restart(struct mcam_camera *cam)
706 {
707 	return;
708 }
709 
710 #endif /* MCAM_MODE_DMA_SG */
711 
712 /* ---------------------------------------------------------------------- */
713 /*
714  * Buffer-mode-independent controller code.
715  */
716 
717 /*
718  * Image format setup
719  */
mcam_ctlr_image(struct mcam_camera * cam)720 static void mcam_ctlr_image(struct mcam_camera *cam)
721 {
722 	struct v4l2_pix_format *fmt = &cam->pix_format;
723 	u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
724 
725 	cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
726 		fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
727 	imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
728 	imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
729 
730 	switch (fmt->pixelformat) {
731 	case V4L2_PIX_FMT_YUYV:
732 	case V4L2_PIX_FMT_YVYU:
733 		widthy = fmt->width * 2;
734 		widthuv = 0;
735 		break;
736 	case V4L2_PIX_FMT_YUV420:
737 	case V4L2_PIX_FMT_YVU420:
738 		widthy = fmt->width;
739 		widthuv = fmt->width / 2;
740 		break;
741 	default:
742 		widthy = fmt->bytesperline;
743 		widthuv = 0;
744 		break;
745 	}
746 
747 	mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
748 			IMGP_YP_MASK | IMGP_UVP_MASK);
749 	mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
750 	mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
751 
752 	/*
753 	 * Tell the controller about the image format we are using.
754 	 */
755 	switch (fmt->pixelformat) {
756 	case V4L2_PIX_FMT_YUV420:
757 	case V4L2_PIX_FMT_YVU420:
758 		mcam_reg_write_mask(cam, REG_CTRL0,
759 			C0_DF_YUV | C0_YUV_420PL | C0_YUVE_VYUY, C0_DF_MASK);
760 		break;
761 	case V4L2_PIX_FMT_YUYV:
762 		mcam_reg_write_mask(cam, REG_CTRL0,
763 			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_NOSWAP, C0_DF_MASK);
764 		break;
765 	case V4L2_PIX_FMT_YVYU:
766 		mcam_reg_write_mask(cam, REG_CTRL0,
767 			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK);
768 		break;
769 	case V4L2_PIX_FMT_XRGB444:
770 		mcam_reg_write_mask(cam, REG_CTRL0,
771 			C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XBGR, C0_DF_MASK);
772 		break;
773 	case V4L2_PIX_FMT_RGB565:
774 		mcam_reg_write_mask(cam, REG_CTRL0,
775 			C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
776 		break;
777 	case V4L2_PIX_FMT_SBGGR8:
778 		mcam_reg_write_mask(cam, REG_CTRL0,
779 			C0_DF_RGB | C0_RGB5_GRBG, C0_DF_MASK);
780 		break;
781 	default:
782 		cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
783 		break;
784 	}
785 
786 	/*
787 	 * Make sure it knows we want to use hsync/vsync.
788 	 */
789 	mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
790 }
791 
792 
793 /*
794  * Configure the controller for operation; caller holds the
795  * device mutex.
796  */
mcam_ctlr_configure(struct mcam_camera * cam)797 static int mcam_ctlr_configure(struct mcam_camera *cam)
798 {
799 	unsigned long flags;
800 
801 	spin_lock_irqsave(&cam->dev_lock, flags);
802 	clear_bit(CF_SG_RESTART, &cam->flags);
803 	cam->dma_setup(cam);
804 	mcam_ctlr_image(cam);
805 	mcam_set_config_needed(cam, 0);
806 	spin_unlock_irqrestore(&cam->dev_lock, flags);
807 	return 0;
808 }
809 
mcam_ctlr_irq_enable(struct mcam_camera * cam)810 static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
811 {
812 	/*
813 	 * Clear any pending interrupts, since we do not
814 	 * expect to have I/O active prior to enabling.
815 	 */
816 	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
817 	mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
818 }
819 
mcam_ctlr_irq_disable(struct mcam_camera * cam)820 static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
821 {
822 	mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
823 }
824 
825 /*
826  * Stop the controller, and don't return until we're really sure that no
827  * further DMA is going on.
828  */
mcam_ctlr_stop_dma(struct mcam_camera * cam)829 static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
830 {
831 	unsigned long flags;
832 
833 	/*
834 	 * Theory: stop the camera controller (whether it is operating
835 	 * or not).  Delay briefly just in case we race with the SOF
836 	 * interrupt, then wait until no DMA is active.
837 	 */
838 	spin_lock_irqsave(&cam->dev_lock, flags);
839 	clear_bit(CF_SG_RESTART, &cam->flags);
840 	mcam_ctlr_stop(cam);
841 	cam->state = S_IDLE;
842 	spin_unlock_irqrestore(&cam->dev_lock, flags);
843 	/*
844 	 * This is a brutally long sleep, but experience shows that
845 	 * it can take the controller a while to get the message that
846 	 * it needs to stop grabbing frames.  In particular, we can
847 	 * sometimes (on mmp) get a frame at the end WITHOUT the
848 	 * start-of-frame indication.
849 	 */
850 	msleep(150);
851 	if (test_bit(CF_DMA_ACTIVE, &cam->flags))
852 		cam_err(cam, "Timeout waiting for DMA to end\n");
853 		/* This would be bad news - what now? */
854 	spin_lock_irqsave(&cam->dev_lock, flags);
855 	mcam_ctlr_irq_disable(cam);
856 	spin_unlock_irqrestore(&cam->dev_lock, flags);
857 }
858 
859 /*
860  * Power up and down.
861  */
mcam_ctlr_power_up(struct mcam_camera * cam)862 static int mcam_ctlr_power_up(struct mcam_camera *cam)
863 {
864 	unsigned long flags;
865 	int ret;
866 
867 	spin_lock_irqsave(&cam->dev_lock, flags);
868 	if (cam->plat_power_up) {
869 		ret = cam->plat_power_up(cam);
870 		if (ret) {
871 			spin_unlock_irqrestore(&cam->dev_lock, flags);
872 			return ret;
873 		}
874 	}
875 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
876 	spin_unlock_irqrestore(&cam->dev_lock, flags);
877 	return 0;
878 }
879 
mcam_ctlr_power_down(struct mcam_camera * cam)880 static void mcam_ctlr_power_down(struct mcam_camera *cam)
881 {
882 	unsigned long flags;
883 
884 	spin_lock_irqsave(&cam->dev_lock, flags);
885 	/*
886 	 * School of hard knocks department: be sure we do any register
887 	 * twiddling on the controller *before* calling the platform
888 	 * power down routine.
889 	 */
890 	mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
891 	if (cam->plat_power_down)
892 		cam->plat_power_down(cam);
893 	spin_unlock_irqrestore(&cam->dev_lock, flags);
894 }
895 
896 /* ---------------------------------------------------------------------- */
897 /*
898  * Controller clocks.
899  */
mcam_clk_enable(struct mcam_camera * mcam)900 static void mcam_clk_enable(struct mcam_camera *mcam)
901 {
902 	unsigned int i;
903 
904 	for (i = 0; i < NR_MCAM_CLK; i++) {
905 		if (!IS_ERR(mcam->clk[i]))
906 			clk_prepare_enable(mcam->clk[i]);
907 	}
908 }
909 
mcam_clk_disable(struct mcam_camera * mcam)910 static void mcam_clk_disable(struct mcam_camera *mcam)
911 {
912 	int i;
913 
914 	for (i = NR_MCAM_CLK - 1; i >= 0; i--) {
915 		if (!IS_ERR(mcam->clk[i]))
916 			clk_disable_unprepare(mcam->clk[i]);
917 	}
918 }
919 
920 /* ---------------------------------------------------------------------- */
921 /*
922  * Master sensor clock.
923  */
mclk_prepare(struct clk_hw * hw)924 static int mclk_prepare(struct clk_hw *hw)
925 {
926 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
927 
928 	clk_prepare(cam->clk[0]);
929 	return 0;
930 }
931 
mclk_unprepare(struct clk_hw * hw)932 static void mclk_unprepare(struct clk_hw *hw)
933 {
934 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
935 
936 	clk_unprepare(cam->clk[0]);
937 }
938 
mclk_enable(struct clk_hw * hw)939 static int mclk_enable(struct clk_hw *hw)
940 {
941 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
942 	int mclk_src;
943 	int mclk_div;
944 
945 	/*
946 	 * Clock the sensor appropriately.  Controller clock should
947 	 * be 48MHz, sensor "typical" value is half that.
948 	 */
949 	if (cam->bus_type == V4L2_MBUS_CSI2_DPHY) {
950 		mclk_src = cam->mclk_src;
951 		mclk_div = cam->mclk_div;
952 	} else {
953 		mclk_src = 3;
954 		mclk_div = 2;
955 	}
956 
957 	clk_enable(cam->clk[0]);
958 	mcam_reg_write(cam, REG_CLKCTRL, (mclk_src << 29) | mclk_div);
959 	mcam_ctlr_power_up(cam);
960 
961 	return 0;
962 }
963 
mclk_disable(struct clk_hw * hw)964 static void mclk_disable(struct clk_hw *hw)
965 {
966 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
967 
968 	mcam_ctlr_power_down(cam);
969 	clk_disable(cam->clk[0]);
970 }
971 
mclk_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)972 static unsigned long mclk_recalc_rate(struct clk_hw *hw,
973 				unsigned long parent_rate)
974 {
975 	return 48000000;
976 }
977 
978 static const struct clk_ops mclk_ops = {
979 	.prepare = mclk_prepare,
980 	.unprepare = mclk_unprepare,
981 	.enable = mclk_enable,
982 	.disable = mclk_disable,
983 	.recalc_rate = mclk_recalc_rate,
984 };
985 
986 /* -------------------------------------------------------------------- */
987 /*
988  * Communications with the sensor.
989  */
990 
__mcam_cam_reset(struct mcam_camera * cam)991 static int __mcam_cam_reset(struct mcam_camera *cam)
992 {
993 	return sensor_call(cam, core, reset, 0);
994 }
995 
996 /*
997  * We have found the sensor on the i2c.  Let's try to have a
998  * conversation.
999  */
mcam_cam_init(struct mcam_camera * cam)1000 static int mcam_cam_init(struct mcam_camera *cam)
1001 {
1002 	int ret;
1003 
1004 	if (cam->state != S_NOTREADY)
1005 		cam_warn(cam, "Cam init with device in funky state %d",
1006 				cam->state);
1007 	ret = __mcam_cam_reset(cam);
1008 	/* Get/set parameters? */
1009 	cam->state = S_IDLE;
1010 	return ret;
1011 }
1012 
1013 /*
1014  * Configure the sensor to match the parameters we have.  Caller should
1015  * hold s_mutex
1016  */
mcam_cam_set_flip(struct mcam_camera * cam)1017 static int mcam_cam_set_flip(struct mcam_camera *cam)
1018 {
1019 	struct v4l2_control ctrl;
1020 
1021 	memset(&ctrl, 0, sizeof(ctrl));
1022 	ctrl.id = V4L2_CID_VFLIP;
1023 	ctrl.value = flip;
1024 	return v4l2_s_ctrl(NULL, cam->sensor->ctrl_handler, &ctrl);
1025 }
1026 
1027 
mcam_cam_configure(struct mcam_camera * cam)1028 static int mcam_cam_configure(struct mcam_camera *cam)
1029 {
1030 	struct v4l2_subdev_format format = {
1031 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1032 	};
1033 	int ret;
1034 
1035 	v4l2_fill_mbus_format(&format.format, &cam->pix_format, cam->mbus_code);
1036 	ret = sensor_call(cam, core, init, 0);
1037 	if (ret == 0)
1038 		ret = sensor_call(cam, pad, set_fmt, NULL, &format);
1039 	/*
1040 	 * OV7670 does weird things if flip is set *before* format...
1041 	 */
1042 	ret += mcam_cam_set_flip(cam);
1043 	return ret;
1044 }
1045 
1046 /*
1047  * Get everything ready, and start grabbing frames.
1048  */
mcam_read_setup(struct mcam_camera * cam)1049 static int mcam_read_setup(struct mcam_camera *cam)
1050 {
1051 	int ret;
1052 	unsigned long flags;
1053 
1054 	/*
1055 	 * Configuration.  If we still don't have DMA buffers,
1056 	 * make one last, desperate attempt.
1057 	 */
1058 	if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1059 			mcam_alloc_dma_bufs(cam, 0))
1060 		return -ENOMEM;
1061 
1062 	if (mcam_needs_config(cam)) {
1063 		mcam_cam_configure(cam);
1064 		ret = mcam_ctlr_configure(cam);
1065 		if (ret)
1066 			return ret;
1067 	}
1068 
1069 	/*
1070 	 * Turn it loose.
1071 	 */
1072 	spin_lock_irqsave(&cam->dev_lock, flags);
1073 	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1074 	mcam_reset_buffers(cam);
1075 	if (cam->bus_type == V4L2_MBUS_CSI2_DPHY)
1076 		mcam_enable_mipi(cam);
1077 	else
1078 		mcam_disable_mipi(cam);
1079 	mcam_ctlr_irq_enable(cam);
1080 	cam->state = S_STREAMING;
1081 	if (!test_bit(CF_SG_RESTART, &cam->flags))
1082 		mcam_ctlr_start(cam);
1083 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1084 	return 0;
1085 }
1086 
1087 /* ----------------------------------------------------------------------- */
1088 /*
1089  * Videobuf2 interface code.
1090  */
1091 
mcam_vb_queue_setup(struct vb2_queue * vq,unsigned int * nbufs,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_devs[])1092 static int mcam_vb_queue_setup(struct vb2_queue *vq,
1093 		unsigned int *nbufs,
1094 		unsigned int *num_planes, unsigned int sizes[],
1095 		struct device *alloc_devs[])
1096 {
1097 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1098 	int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1099 	unsigned size = cam->pix_format.sizeimage;
1100 
1101 	if (*nbufs < minbufs)
1102 		*nbufs = minbufs;
1103 
1104 	if (*num_planes)
1105 		return sizes[0] < size ? -EINVAL : 0;
1106 	sizes[0] = size;
1107 	*num_planes = 1; /* Someday we have to support planar formats... */
1108 	return 0;
1109 }
1110 
1111 
mcam_vb_buf_queue(struct vb2_buffer * vb)1112 static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1113 {
1114 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1115 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1116 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1117 	unsigned long flags;
1118 	int start;
1119 
1120 	spin_lock_irqsave(&cam->dev_lock, flags);
1121 	start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1122 	list_add(&mvb->queue, &cam->buffers);
1123 	if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1124 		mcam_sg_restart(cam);
1125 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1126 	if (start)
1127 		mcam_read_setup(cam);
1128 }
1129 
mcam_vb_requeue_bufs(struct vb2_queue * vq,enum vb2_buffer_state state)1130 static void mcam_vb_requeue_bufs(struct vb2_queue *vq,
1131 				 enum vb2_buffer_state state)
1132 {
1133 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1134 	struct mcam_vb_buffer *buf, *node;
1135 	unsigned long flags;
1136 	unsigned i;
1137 
1138 	spin_lock_irqsave(&cam->dev_lock, flags);
1139 	list_for_each_entry_safe(buf, node, &cam->buffers, queue) {
1140 		vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1141 		list_del(&buf->queue);
1142 	}
1143 	for (i = 0; i < MAX_DMA_BUFS; i++) {
1144 		buf = cam->vb_bufs[i];
1145 
1146 		if (buf) {
1147 			vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1148 			cam->vb_bufs[i] = NULL;
1149 		}
1150 	}
1151 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1152 }
1153 
1154 /*
1155  * These need to be called with the mutex held from vb2
1156  */
mcam_vb_start_streaming(struct vb2_queue * vq,unsigned int count)1157 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1158 {
1159 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1160 	unsigned int frame;
1161 	int ret;
1162 
1163 	if (cam->state != S_IDLE) {
1164 		mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1165 		return -EINVAL;
1166 	}
1167 	cam->frame_state.frames = 0;
1168 	cam->frame_state.singles = 0;
1169 	cam->frame_state.delivered = 0;
1170 	cam->sequence = 0;
1171 	/*
1172 	 * Videobuf2 sneakily hoards all the buffers and won't
1173 	 * give them to us until *after* streaming starts.  But
1174 	 * we can't actually start streaming until we have a
1175 	 * destination.  So go into a wait state and hope they
1176 	 * give us buffers soon.
1177 	 */
1178 	if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1179 		cam->state = S_BUFWAIT;
1180 		return 0;
1181 	}
1182 
1183 	/*
1184 	 * Ensure clear the left over frame flags
1185 	 * before every really start streaming
1186 	 */
1187 	for (frame = 0; frame < cam->nbufs; frame++)
1188 		clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1189 
1190 	ret = mcam_read_setup(cam);
1191 	if (ret)
1192 		mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1193 	return ret;
1194 }
1195 
mcam_vb_stop_streaming(struct vb2_queue * vq)1196 static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1197 {
1198 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1199 
1200 	cam_dbg(cam, "stop_streaming: %d frames, %d singles, %d delivered\n",
1201 			cam->frame_state.frames, cam->frame_state.singles,
1202 			cam->frame_state.delivered);
1203 	if (cam->state == S_BUFWAIT) {
1204 		/* They never gave us buffers */
1205 		cam->state = S_IDLE;
1206 		return;
1207 	}
1208 	if (cam->state != S_STREAMING)
1209 		return;
1210 	mcam_ctlr_stop_dma(cam);
1211 	/*
1212 	 * VB2 reclaims the buffers, so we need to forget
1213 	 * about them.
1214 	 */
1215 	mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_ERROR);
1216 }
1217 
1218 
1219 static const struct vb2_ops mcam_vb2_ops = {
1220 	.queue_setup		= mcam_vb_queue_setup,
1221 	.buf_queue		= mcam_vb_buf_queue,
1222 	.start_streaming	= mcam_vb_start_streaming,
1223 	.stop_streaming		= mcam_vb_stop_streaming,
1224 	.wait_prepare		= vb2_ops_wait_prepare,
1225 	.wait_finish		= vb2_ops_wait_finish,
1226 };
1227 
1228 
1229 #ifdef MCAM_MODE_DMA_SG
1230 /*
1231  * Scatter/gather mode uses all of the above functions plus a
1232  * few extras to deal with DMA mapping.
1233  */
mcam_vb_sg_buf_init(struct vb2_buffer * vb)1234 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1235 {
1236 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1237 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1238 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1239 	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1240 
1241 	mvb->dma_desc = dma_alloc_coherent(cam->dev,
1242 			ndesc * sizeof(struct mcam_dma_desc),
1243 			&mvb->dma_desc_pa, GFP_KERNEL);
1244 	if (mvb->dma_desc == NULL) {
1245 		cam_err(cam, "Unable to get DMA descriptor array\n");
1246 		return -ENOMEM;
1247 	}
1248 	return 0;
1249 }
1250 
mcam_vb_sg_buf_prepare(struct vb2_buffer * vb)1251 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1252 {
1253 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1254 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1255 	struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1256 	struct mcam_dma_desc *desc = mvb->dma_desc;
1257 	struct scatterlist *sg;
1258 	int i;
1259 
1260 	for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
1261 		desc->dma_addr = sg_dma_address(sg);
1262 		desc->segment_len = sg_dma_len(sg);
1263 		desc++;
1264 	}
1265 	return 0;
1266 }
1267 
mcam_vb_sg_buf_cleanup(struct vb2_buffer * vb)1268 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1269 {
1270 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1271 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1272 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1273 	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1274 
1275 	dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1276 			mvb->dma_desc, mvb->dma_desc_pa);
1277 }
1278 
1279 
1280 static const struct vb2_ops mcam_vb2_sg_ops = {
1281 	.queue_setup		= mcam_vb_queue_setup,
1282 	.buf_init		= mcam_vb_sg_buf_init,
1283 	.buf_prepare		= mcam_vb_sg_buf_prepare,
1284 	.buf_queue		= mcam_vb_buf_queue,
1285 	.buf_cleanup		= mcam_vb_sg_buf_cleanup,
1286 	.start_streaming	= mcam_vb_start_streaming,
1287 	.stop_streaming		= mcam_vb_stop_streaming,
1288 	.wait_prepare		= vb2_ops_wait_prepare,
1289 	.wait_finish		= vb2_ops_wait_finish,
1290 };
1291 
1292 #endif /* MCAM_MODE_DMA_SG */
1293 
mcam_setup_vb2(struct mcam_camera * cam)1294 static int mcam_setup_vb2(struct mcam_camera *cam)
1295 {
1296 	struct vb2_queue *vq = &cam->vb_queue;
1297 
1298 	memset(vq, 0, sizeof(*vq));
1299 	vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1300 	vq->drv_priv = cam;
1301 	vq->lock = &cam->s_mutex;
1302 	vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1303 	vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF | VB2_READ;
1304 	vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1305 	vq->dev = cam->dev;
1306 	INIT_LIST_HEAD(&cam->buffers);
1307 	switch (cam->buffer_mode) {
1308 	case B_DMA_contig:
1309 #ifdef MCAM_MODE_DMA_CONTIG
1310 		vq->ops = &mcam_vb2_ops;
1311 		vq->mem_ops = &vb2_dma_contig_memops;
1312 		cam->dma_setup = mcam_ctlr_dma_contig;
1313 		cam->frame_complete = mcam_dma_contig_done;
1314 #endif
1315 		break;
1316 	case B_DMA_sg:
1317 #ifdef MCAM_MODE_DMA_SG
1318 		vq->ops = &mcam_vb2_sg_ops;
1319 		vq->mem_ops = &vb2_dma_sg_memops;
1320 		cam->dma_setup = mcam_ctlr_dma_sg;
1321 		cam->frame_complete = mcam_dma_sg_done;
1322 #endif
1323 		break;
1324 	case B_vmalloc:
1325 #ifdef MCAM_MODE_VMALLOC
1326 		tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
1327 				(unsigned long) cam);
1328 		vq->ops = &mcam_vb2_ops;
1329 		vq->mem_ops = &vb2_vmalloc_memops;
1330 		cam->dma_setup = mcam_ctlr_dma_vmalloc;
1331 		cam->frame_complete = mcam_vmalloc_done;
1332 #endif
1333 		break;
1334 	}
1335 	return vb2_queue_init(vq);
1336 }
1337 
1338 
1339 /* ---------------------------------------------------------------------- */
1340 /*
1341  * The long list of V4L2 ioctl() operations.
1342  */
1343 
mcam_vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)1344 static int mcam_vidioc_querycap(struct file *file, void *priv,
1345 		struct v4l2_capability *cap)
1346 {
1347 	struct mcam_camera *cam = video_drvdata(file);
1348 
1349 	strscpy(cap->driver, "marvell_ccic", sizeof(cap->driver));
1350 	strscpy(cap->card, "marvell_ccic", sizeof(cap->card));
1351 	strscpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info));
1352 	return 0;
1353 }
1354 
1355 
mcam_vidioc_enum_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_fmtdesc * fmt)1356 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1357 		void *priv, struct v4l2_fmtdesc *fmt)
1358 {
1359 	if (fmt->index >= N_MCAM_FMTS)
1360 		return -EINVAL;
1361 	fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1362 	return 0;
1363 }
1364 
mcam_vidioc_try_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1365 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1366 		struct v4l2_format *fmt)
1367 {
1368 	struct mcam_camera *cam = video_drvdata(filp);
1369 	struct mcam_format_struct *f;
1370 	struct v4l2_pix_format *pix = &fmt->fmt.pix;
1371 	struct v4l2_subdev_pad_config pad_cfg;
1372 	struct v4l2_subdev_format format = {
1373 		.which = V4L2_SUBDEV_FORMAT_TRY,
1374 	};
1375 	int ret;
1376 
1377 	f = mcam_find_format(pix->pixelformat);
1378 	pix->pixelformat = f->pixelformat;
1379 	v4l2_fill_mbus_format(&format.format, pix, f->mbus_code);
1380 	ret = sensor_call(cam, pad, set_fmt, &pad_cfg, &format);
1381 	v4l2_fill_pix_format(pix, &format.format);
1382 	pix->bytesperline = pix->width * f->bpp;
1383 	switch (f->pixelformat) {
1384 	case V4L2_PIX_FMT_YUV420:
1385 	case V4L2_PIX_FMT_YVU420:
1386 		pix->sizeimage = pix->height * pix->bytesperline * 3 / 2;
1387 		break;
1388 	default:
1389 		pix->sizeimage = pix->height * pix->bytesperline;
1390 		break;
1391 	}
1392 	pix->colorspace = V4L2_COLORSPACE_SRGB;
1393 	return ret;
1394 }
1395 
mcam_vidioc_s_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1396 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1397 		struct v4l2_format *fmt)
1398 {
1399 	struct mcam_camera *cam = video_drvdata(filp);
1400 	struct mcam_format_struct *f;
1401 	int ret;
1402 
1403 	/*
1404 	 * Can't do anything if the device is not idle
1405 	 * Also can't if there are streaming buffers in place.
1406 	 */
1407 	if (cam->state != S_IDLE || vb2_is_busy(&cam->vb_queue))
1408 		return -EBUSY;
1409 
1410 	f = mcam_find_format(fmt->fmt.pix.pixelformat);
1411 
1412 	/*
1413 	 * See if the formatting works in principle.
1414 	 */
1415 	ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1416 	if (ret)
1417 		return ret;
1418 	/*
1419 	 * Now we start to change things for real, so let's do it
1420 	 * under lock.
1421 	 */
1422 	cam->pix_format = fmt->fmt.pix;
1423 	cam->mbus_code = f->mbus_code;
1424 
1425 	/*
1426 	 * Make sure we have appropriate DMA buffers.
1427 	 */
1428 	if (cam->buffer_mode == B_vmalloc) {
1429 		ret = mcam_check_dma_buffers(cam);
1430 		if (ret)
1431 			goto out;
1432 	}
1433 	mcam_set_config_needed(cam, 1);
1434 out:
1435 	return ret;
1436 }
1437 
1438 /*
1439  * Return our stored notion of how the camera is/should be configured.
1440  * The V4l2 spec wants us to be smarter, and actually get this from
1441  * the camera (and not mess with it at open time).  Someday.
1442  */
mcam_vidioc_g_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * f)1443 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1444 		struct v4l2_format *f)
1445 {
1446 	struct mcam_camera *cam = video_drvdata(filp);
1447 
1448 	f->fmt.pix = cam->pix_format;
1449 	return 0;
1450 }
1451 
1452 /*
1453  * We only have one input - the sensor - so minimize the nonsense here.
1454  */
mcam_vidioc_enum_input(struct file * filp,void * priv,struct v4l2_input * input)1455 static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1456 		struct v4l2_input *input)
1457 {
1458 	if (input->index != 0)
1459 		return -EINVAL;
1460 
1461 	input->type = V4L2_INPUT_TYPE_CAMERA;
1462 	strscpy(input->name, "Camera", sizeof(input->name));
1463 	return 0;
1464 }
1465 
mcam_vidioc_g_input(struct file * filp,void * priv,unsigned int * i)1466 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1467 {
1468 	*i = 0;
1469 	return 0;
1470 }
1471 
mcam_vidioc_s_input(struct file * filp,void * priv,unsigned int i)1472 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1473 {
1474 	if (i != 0)
1475 		return -EINVAL;
1476 	return 0;
1477 }
1478 
1479 /*
1480  * G/S_PARM.  Most of this is done by the sensor, but we are
1481  * the level which controls the number of read buffers.
1482  */
mcam_vidioc_g_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1483 static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1484 		struct v4l2_streamparm *a)
1485 {
1486 	struct mcam_camera *cam = video_drvdata(filp);
1487 	int ret;
1488 
1489 	ret = v4l2_g_parm_cap(video_devdata(filp), cam->sensor, a);
1490 	a->parm.capture.readbuffers = n_dma_bufs;
1491 	return ret;
1492 }
1493 
mcam_vidioc_s_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1494 static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1495 		struct v4l2_streamparm *a)
1496 {
1497 	struct mcam_camera *cam = video_drvdata(filp);
1498 	int ret;
1499 
1500 	ret = v4l2_s_parm_cap(video_devdata(filp), cam->sensor, a);
1501 	a->parm.capture.readbuffers = n_dma_bufs;
1502 	return ret;
1503 }
1504 
mcam_vidioc_enum_framesizes(struct file * filp,void * priv,struct v4l2_frmsizeenum * sizes)1505 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1506 		struct v4l2_frmsizeenum *sizes)
1507 {
1508 	struct mcam_camera *cam = video_drvdata(filp);
1509 	struct mcam_format_struct *f;
1510 	struct v4l2_subdev_frame_size_enum fse = {
1511 		.index = sizes->index,
1512 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1513 	};
1514 	int ret;
1515 
1516 	f = mcam_find_format(sizes->pixel_format);
1517 	if (f->pixelformat != sizes->pixel_format)
1518 		return -EINVAL;
1519 	fse.code = f->mbus_code;
1520 	ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
1521 	if (ret)
1522 		return ret;
1523 	if (fse.min_width == fse.max_width &&
1524 	    fse.min_height == fse.max_height) {
1525 		sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1526 		sizes->discrete.width = fse.min_width;
1527 		sizes->discrete.height = fse.min_height;
1528 		return 0;
1529 	}
1530 	sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS;
1531 	sizes->stepwise.min_width = fse.min_width;
1532 	sizes->stepwise.max_width = fse.max_width;
1533 	sizes->stepwise.min_height = fse.min_height;
1534 	sizes->stepwise.max_height = fse.max_height;
1535 	sizes->stepwise.step_width = 1;
1536 	sizes->stepwise.step_height = 1;
1537 	return 0;
1538 }
1539 
mcam_vidioc_enum_frameintervals(struct file * filp,void * priv,struct v4l2_frmivalenum * interval)1540 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1541 		struct v4l2_frmivalenum *interval)
1542 {
1543 	struct mcam_camera *cam = video_drvdata(filp);
1544 	struct mcam_format_struct *f;
1545 	struct v4l2_subdev_frame_interval_enum fie = {
1546 		.index = interval->index,
1547 		.width = interval->width,
1548 		.height = interval->height,
1549 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1550 	};
1551 	int ret;
1552 
1553 	f = mcam_find_format(interval->pixel_format);
1554 	if (f->pixelformat != interval->pixel_format)
1555 		return -EINVAL;
1556 	fie.code = f->mbus_code;
1557 	ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
1558 	if (ret)
1559 		return ret;
1560 	interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1561 	interval->discrete = fie.interval;
1562 	return 0;
1563 }
1564 
1565 #ifdef CONFIG_VIDEO_ADV_DEBUG
mcam_vidioc_g_register(struct file * file,void * priv,struct v4l2_dbg_register * reg)1566 static int mcam_vidioc_g_register(struct file *file, void *priv,
1567 		struct v4l2_dbg_register *reg)
1568 {
1569 	struct mcam_camera *cam = video_drvdata(file);
1570 
1571 	if (reg->reg > cam->regs_size - 4)
1572 		return -EINVAL;
1573 	reg->val = mcam_reg_read(cam, reg->reg);
1574 	reg->size = 4;
1575 	return 0;
1576 }
1577 
mcam_vidioc_s_register(struct file * file,void * priv,const struct v4l2_dbg_register * reg)1578 static int mcam_vidioc_s_register(struct file *file, void *priv,
1579 		const struct v4l2_dbg_register *reg)
1580 {
1581 	struct mcam_camera *cam = video_drvdata(file);
1582 
1583 	if (reg->reg > cam->regs_size - 4)
1584 		return -EINVAL;
1585 	mcam_reg_write(cam, reg->reg, reg->val);
1586 	return 0;
1587 }
1588 #endif
1589 
1590 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1591 	.vidioc_querycap	= mcam_vidioc_querycap,
1592 	.vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1593 	.vidioc_try_fmt_vid_cap	= mcam_vidioc_try_fmt_vid_cap,
1594 	.vidioc_s_fmt_vid_cap	= mcam_vidioc_s_fmt_vid_cap,
1595 	.vidioc_g_fmt_vid_cap	= mcam_vidioc_g_fmt_vid_cap,
1596 	.vidioc_enum_input	= mcam_vidioc_enum_input,
1597 	.vidioc_g_input		= mcam_vidioc_g_input,
1598 	.vidioc_s_input		= mcam_vidioc_s_input,
1599 	.vidioc_reqbufs		= vb2_ioctl_reqbufs,
1600 	.vidioc_create_bufs	= vb2_ioctl_create_bufs,
1601 	.vidioc_querybuf	= vb2_ioctl_querybuf,
1602 	.vidioc_qbuf		= vb2_ioctl_qbuf,
1603 	.vidioc_dqbuf		= vb2_ioctl_dqbuf,
1604 	.vidioc_expbuf		= vb2_ioctl_expbuf,
1605 	.vidioc_streamon	= vb2_ioctl_streamon,
1606 	.vidioc_streamoff	= vb2_ioctl_streamoff,
1607 	.vidioc_g_parm		= mcam_vidioc_g_parm,
1608 	.vidioc_s_parm		= mcam_vidioc_s_parm,
1609 	.vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1610 	.vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1611 	.vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
1612 	.vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1613 #ifdef CONFIG_VIDEO_ADV_DEBUG
1614 	.vidioc_g_register	= mcam_vidioc_g_register,
1615 	.vidioc_s_register	= mcam_vidioc_s_register,
1616 #endif
1617 };
1618 
1619 /* ---------------------------------------------------------------------- */
1620 /*
1621  * Our various file operations.
1622  */
mcam_v4l_open(struct file * filp)1623 static int mcam_v4l_open(struct file *filp)
1624 {
1625 	struct mcam_camera *cam = video_drvdata(filp);
1626 	int ret;
1627 
1628 	mutex_lock(&cam->s_mutex);
1629 	ret = v4l2_fh_open(filp);
1630 	if (ret)
1631 		goto out;
1632 	if (v4l2_fh_is_singular_file(filp)) {
1633 		ret = sensor_call(cam, core, s_power, 1);
1634 		if (ret)
1635 			goto out;
1636 		mcam_clk_enable(cam);
1637 		__mcam_cam_reset(cam);
1638 		mcam_set_config_needed(cam, 1);
1639 	}
1640 out:
1641 	mutex_unlock(&cam->s_mutex);
1642 	if (ret)
1643 		v4l2_fh_release(filp);
1644 	return ret;
1645 }
1646 
1647 
mcam_v4l_release(struct file * filp)1648 static int mcam_v4l_release(struct file *filp)
1649 {
1650 	struct mcam_camera *cam = video_drvdata(filp);
1651 	bool last_open;
1652 
1653 	mutex_lock(&cam->s_mutex);
1654 	last_open = v4l2_fh_is_singular_file(filp);
1655 	_vb2_fop_release(filp, NULL);
1656 	if (last_open) {
1657 		mcam_disable_mipi(cam);
1658 		sensor_call(cam, core, s_power, 0);
1659 		mcam_clk_disable(cam);
1660 		if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1661 			mcam_free_dma_bufs(cam);
1662 	}
1663 
1664 	mutex_unlock(&cam->s_mutex);
1665 	return 0;
1666 }
1667 
1668 static const struct v4l2_file_operations mcam_v4l_fops = {
1669 	.owner = THIS_MODULE,
1670 	.open = mcam_v4l_open,
1671 	.release = mcam_v4l_release,
1672 	.read = vb2_fop_read,
1673 	.poll = vb2_fop_poll,
1674 	.mmap = vb2_fop_mmap,
1675 	.unlocked_ioctl = video_ioctl2,
1676 };
1677 
1678 
1679 /*
1680  * This template device holds all of those v4l2 methods; we
1681  * clone it for specific real devices.
1682  */
1683 static const struct video_device mcam_v4l_template = {
1684 	.name = "mcam",
1685 	.fops = &mcam_v4l_fops,
1686 	.ioctl_ops = &mcam_v4l_ioctl_ops,
1687 	.release = video_device_release_empty,
1688 	.device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_READWRITE |
1689 		       V4L2_CAP_STREAMING,
1690 };
1691 
1692 /* ---------------------------------------------------------------------- */
1693 /*
1694  * Interrupt handler stuff
1695  */
mcam_frame_complete(struct mcam_camera * cam,int frame)1696 static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1697 {
1698 	/*
1699 	 * Basic frame housekeeping.
1700 	 */
1701 	set_bit(frame, &cam->flags);
1702 	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1703 	cam->next_buf = frame;
1704 	cam->buf_seq[frame] = cam->sequence++;
1705 	cam->frame_state.frames++;
1706 	/*
1707 	 * "This should never happen"
1708 	 */
1709 	if (cam->state != S_STREAMING)
1710 		return;
1711 	/*
1712 	 * Process the frame and set up the next one.
1713 	 */
1714 	cam->frame_complete(cam, frame);
1715 }
1716 
1717 
1718 /*
1719  * The interrupt handler; this needs to be called from the
1720  * platform irq handler with the lock held.
1721  */
mccic_irq(struct mcam_camera * cam,unsigned int irqs)1722 int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1723 {
1724 	unsigned int frame, handled = 0;
1725 
1726 	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1727 	/*
1728 	 * Handle any frame completions.  There really should
1729 	 * not be more than one of these, or we have fallen
1730 	 * far behind.
1731 	 *
1732 	 * When running in S/G mode, the frame number lacks any
1733 	 * real meaning - there's only one descriptor array - but
1734 	 * the controller still picks a different one to signal
1735 	 * each time.
1736 	 */
1737 	for (frame = 0; frame < cam->nbufs; frame++)
1738 		if (irqs & (IRQ_EOF0 << frame) &&
1739 			test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1740 			mcam_frame_complete(cam, frame);
1741 			handled = 1;
1742 			clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1743 			if (cam->buffer_mode == B_DMA_sg)
1744 				break;
1745 		}
1746 	/*
1747 	 * If a frame starts, note that we have DMA active.  This
1748 	 * code assumes that we won't get multiple frame interrupts
1749 	 * at once; may want to rethink that.
1750 	 */
1751 	for (frame = 0; frame < cam->nbufs; frame++) {
1752 		if (irqs & (IRQ_SOF0 << frame)) {
1753 			set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1754 			handled = IRQ_HANDLED;
1755 		}
1756 	}
1757 
1758 	if (handled == IRQ_HANDLED) {
1759 		set_bit(CF_DMA_ACTIVE, &cam->flags);
1760 		if (cam->buffer_mode == B_DMA_sg)
1761 			mcam_ctlr_stop(cam);
1762 	}
1763 	return handled;
1764 }
1765 EXPORT_SYMBOL_GPL(mccic_irq);
1766 
1767 /* ---------------------------------------------------------------------- */
1768 /*
1769  * Registration and such.
1770  */
1771 
mccic_notify_bound(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_subdev * asd)1772 static int mccic_notify_bound(struct v4l2_async_notifier *notifier,
1773 	struct v4l2_subdev *subdev, struct v4l2_async_subdev *asd)
1774 {
1775 	struct mcam_camera *cam = notifier_to_mcam(notifier);
1776 	int ret;
1777 
1778 	mutex_lock(&cam->s_mutex);
1779 	if (cam->sensor) {
1780 		cam_err(cam, "sensor already bound\n");
1781 		ret = -EBUSY;
1782 		goto out;
1783 	}
1784 
1785 	v4l2_set_subdev_hostdata(subdev, cam);
1786 	cam->sensor = subdev;
1787 
1788 	ret = mcam_cam_init(cam);
1789 	if (ret) {
1790 		cam->sensor = NULL;
1791 		goto out;
1792 	}
1793 
1794 	ret = mcam_setup_vb2(cam);
1795 	if (ret) {
1796 		cam->sensor = NULL;
1797 		goto out;
1798 	}
1799 
1800 	cam->vdev = mcam_v4l_template;
1801 	cam->vdev.v4l2_dev = &cam->v4l2_dev;
1802 	cam->vdev.lock = &cam->s_mutex;
1803 	cam->vdev.queue = &cam->vb_queue;
1804 	video_set_drvdata(&cam->vdev, cam);
1805 	ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
1806 	if (ret) {
1807 		cam->sensor = NULL;
1808 		goto out;
1809 	}
1810 
1811 	cam_dbg(cam, "sensor %s bound\n", subdev->name);
1812 out:
1813 	mutex_unlock(&cam->s_mutex);
1814 	return ret;
1815 }
1816 
mccic_notify_unbind(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_subdev * asd)1817 static void mccic_notify_unbind(struct v4l2_async_notifier *notifier,
1818 	struct v4l2_subdev *subdev, struct v4l2_async_subdev *asd)
1819 {
1820 	struct mcam_camera *cam = notifier_to_mcam(notifier);
1821 
1822 	mutex_lock(&cam->s_mutex);
1823 	if (cam->sensor != subdev) {
1824 		cam_err(cam, "sensor %s not bound\n", subdev->name);
1825 		goto out;
1826 	}
1827 
1828 	video_unregister_device(&cam->vdev);
1829 	cam->sensor = NULL;
1830 	cam_dbg(cam, "sensor %s unbound\n", subdev->name);
1831 
1832 out:
1833 	mutex_unlock(&cam->s_mutex);
1834 }
1835 
mccic_notify_complete(struct v4l2_async_notifier * notifier)1836 static int mccic_notify_complete(struct v4l2_async_notifier *notifier)
1837 {
1838 	struct mcam_camera *cam = notifier_to_mcam(notifier);
1839 	int ret;
1840 
1841 	/*
1842 	 * Get the v4l2 setup done.
1843 	 */
1844 	ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1845 	if (!ret)
1846 		cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1847 
1848 	return ret;
1849 }
1850 
1851 static const struct v4l2_async_notifier_operations mccic_notify_ops = {
1852 	.bound = mccic_notify_bound,
1853 	.unbind = mccic_notify_unbind,
1854 	.complete = mccic_notify_complete,
1855 };
1856 
mccic_register(struct mcam_camera * cam)1857 int mccic_register(struct mcam_camera *cam)
1858 {
1859 	struct clk_init_data mclk_init = { };
1860 	int ret;
1861 
1862 	/*
1863 	 * Validate the requested buffer mode.
1864 	 */
1865 	if (buffer_mode >= 0)
1866 		cam->buffer_mode = buffer_mode;
1867 	if (cam->buffer_mode == B_DMA_sg &&
1868 			cam->chip_id == MCAM_CAFE) {
1869 		printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, attempting vmalloc mode instead\n");
1870 		cam->buffer_mode = B_vmalloc;
1871 	}
1872 
1873 	if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1874 		printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1875 				cam->buffer_mode);
1876 		ret = -EINVAL;
1877 		goto out;
1878 	}
1879 
1880 	/*
1881 	 * Register with V4L
1882 	 */
1883 	ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
1884 	if (ret)
1885 		goto out;
1886 
1887 	mutex_init(&cam->s_mutex);
1888 	cam->state = S_NOTREADY;
1889 	mcam_set_config_needed(cam, 1);
1890 	cam->pix_format = mcam_def_pix_format;
1891 	cam->mbus_code = mcam_def_mbus_code;
1892 
1893 	/*
1894 	 * Register sensor notifier.
1895 	 */
1896 	v4l2_async_notifier_init(&cam->notifier);
1897 	ret = v4l2_async_notifier_add_subdev(&cam->notifier, &cam->asd);
1898 	if (ret) {
1899 		cam_warn(cam, "failed to add subdev to a notifier");
1900 		goto out;
1901 	}
1902 
1903 	cam->notifier.ops = &mccic_notify_ops;
1904 	ret = v4l2_async_notifier_register(&cam->v4l2_dev, &cam->notifier);
1905 	if (ret < 0) {
1906 		cam_warn(cam, "failed to register a sensor notifier");
1907 		goto out;
1908 	}
1909 
1910 	/*
1911 	 * Register sensor master clock.
1912 	 */
1913 	mclk_init.parent_names = NULL;
1914 	mclk_init.num_parents = 0;
1915 	mclk_init.ops = &mclk_ops;
1916 	mclk_init.name = "mclk";
1917 
1918 	of_property_read_string(cam->dev->of_node, "clock-output-names",
1919 							&mclk_init.name);
1920 
1921 	cam->mclk_hw.init = &mclk_init;
1922 
1923 	cam->mclk = devm_clk_register(cam->dev, &cam->mclk_hw);
1924 	if (IS_ERR(cam->mclk)) {
1925 		ret = PTR_ERR(cam->mclk);
1926 		dev_err(cam->dev, "can't register clock\n");
1927 		goto out;
1928 	}
1929 
1930 	/*
1931 	 * If so requested, try to get our DMA buffers now.
1932 	 */
1933 	if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1934 		if (mcam_alloc_dma_bufs(cam, 1))
1935 			cam_warn(cam, "Unable to alloc DMA buffers at load will try again later.");
1936 	}
1937 
1938 	return 0;
1939 
1940 out:
1941 	v4l2_async_notifier_unregister(&cam->notifier);
1942 	v4l2_device_unregister(&cam->v4l2_dev);
1943 	return ret;
1944 }
1945 EXPORT_SYMBOL_GPL(mccic_register);
1946 
mccic_shutdown(struct mcam_camera * cam)1947 void mccic_shutdown(struct mcam_camera *cam)
1948 {
1949 	/*
1950 	 * If we have no users (and we really, really should have no
1951 	 * users) the device will already be powered down.  Trying to
1952 	 * take it down again will wedge the machine, which is frowned
1953 	 * upon.
1954 	 */
1955 	if (!list_empty(&cam->vdev.fh_list)) {
1956 		cam_warn(cam, "Removing a device with users!\n");
1957 		sensor_call(cam, core, s_power, 0);
1958 	}
1959 	if (cam->buffer_mode == B_vmalloc)
1960 		mcam_free_dma_bufs(cam);
1961 	v4l2_ctrl_handler_free(&cam->ctrl_handler);
1962 	v4l2_async_notifier_unregister(&cam->notifier);
1963 	v4l2_device_unregister(&cam->v4l2_dev);
1964 }
1965 EXPORT_SYMBOL_GPL(mccic_shutdown);
1966 
1967 /*
1968  * Power management
1969  */
1970 #ifdef CONFIG_PM
1971 
mccic_suspend(struct mcam_camera * cam)1972 void mccic_suspend(struct mcam_camera *cam)
1973 {
1974 	mutex_lock(&cam->s_mutex);
1975 	if (!list_empty(&cam->vdev.fh_list)) {
1976 		enum mcam_state cstate = cam->state;
1977 
1978 		mcam_ctlr_stop_dma(cam);
1979 		sensor_call(cam, core, s_power, 0);
1980 		mcam_clk_disable(cam);
1981 		cam->state = cstate;
1982 	}
1983 	mutex_unlock(&cam->s_mutex);
1984 }
1985 EXPORT_SYMBOL_GPL(mccic_suspend);
1986 
mccic_resume(struct mcam_camera * cam)1987 int mccic_resume(struct mcam_camera *cam)
1988 {
1989 	int ret = 0;
1990 
1991 	mutex_lock(&cam->s_mutex);
1992 	if (!list_empty(&cam->vdev.fh_list)) {
1993 		mcam_clk_enable(cam);
1994 		ret = sensor_call(cam, core, s_power, 1);
1995 		if (ret) {
1996 			mutex_unlock(&cam->s_mutex);
1997 			return ret;
1998 		}
1999 		__mcam_cam_reset(cam);
2000 	} else {
2001 		sensor_call(cam, core, s_power, 0);
2002 	}
2003 	mutex_unlock(&cam->s_mutex);
2004 
2005 	set_bit(CF_CONFIG_NEEDED, &cam->flags);
2006 	if (cam->state == S_STREAMING) {
2007 		/*
2008 		 * If there was a buffer in the DMA engine at suspend
2009 		 * time, put it back on the queue or we'll forget about it.
2010 		 */
2011 		if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
2012 			list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
2013 		ret = mcam_read_setup(cam);
2014 	}
2015 	return ret;
2016 }
2017 EXPORT_SYMBOL_GPL(mccic_resume);
2018 #endif /* CONFIG_PM */
2019 
2020 MODULE_LICENSE("GPL v2");
2021 MODULE_AUTHOR("Jonathan Corbet <corbet@lwn.net>");
2022