• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * The Marvell camera core.  This device appears in a number of settings,
3  * so it needs platform-specific support outside of the core.
4  *
5  * Copyright 2011 Jonathan Corbet corbet@lwn.net
6  */
7 #include <linux/kernel.h>
8 #include <linux/module.h>
9 #include <linux/fs.h>
10 #include <linux/mm.h>
11 #include <linux/i2c.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock.h>
14 #include <linux/slab.h>
15 #include <linux/device.h>
16 #include <linux/wait.h>
17 #include <linux/list.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/delay.h>
20 #include <linux/vmalloc.h>
21 #include <linux/io.h>
22 #include <linux/clk.h>
23 #include <linux/videodev2.h>
24 #include <media/v4l2-device.h>
25 #include <media/v4l2-ioctl.h>
26 #include <media/v4l2-ctrls.h>
27 #include <media/ov7670.h>
28 #include <media/videobuf2-vmalloc.h>
29 #include <media/videobuf2-dma-contig.h>
30 #include <media/videobuf2-dma-sg.h>
31 
32 #include "mcam-core.h"
33 
34 #ifdef MCAM_MODE_VMALLOC
35 /*
36  * Internal DMA buffer management.  Since the controller cannot do S/G I/O,
37  * we must have physically contiguous buffers to bring frames into.
38  * These parameters control how many buffers we use, whether we
39  * allocate them at load time (better chance of success, but nails down
40  * memory) or when somebody tries to use the camera (riskier), and,
41  * for load-time allocation, how big they should be.
42  *
43  * The controller can cycle through three buffers.  We could use
44  * more by flipping pointers around, but it probably makes little
45  * sense.
46  */
47 
48 static bool alloc_bufs_at_read;
49 module_param(alloc_bufs_at_read, bool, 0444);
50 MODULE_PARM_DESC(alloc_bufs_at_read,
51 		"Non-zero value causes DMA buffers to be allocated when the "
52 		"video capture device is read, rather than at module load "
53 		"time.  This saves memory, but decreases the chances of "
54 		"successfully getting those buffers.  This parameter is "
55 		"only used in the vmalloc buffer mode");
56 
57 static int n_dma_bufs = 3;
58 module_param(n_dma_bufs, uint, 0644);
59 MODULE_PARM_DESC(n_dma_bufs,
60 		"The number of DMA buffers to allocate.  Can be either two "
61 		"(saves memory, makes timing tighter) or three.");
62 
63 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2;  /* Worst case */
64 module_param(dma_buf_size, uint, 0444);
65 MODULE_PARM_DESC(dma_buf_size,
66 		"The size of the allocated DMA buffers.  If actual operating "
67 		"parameters require larger buffers, an attempt to reallocate "
68 		"will be made.");
69 #else /* MCAM_MODE_VMALLOC */
70 static const bool alloc_bufs_at_read;
71 static const int n_dma_bufs = 3;  /* Used by S/G_PARM */
72 #endif /* MCAM_MODE_VMALLOC */
73 
74 static bool flip;
75 module_param(flip, bool, 0444);
76 MODULE_PARM_DESC(flip,
77 		"If set, the sensor will be instructed to flip the image "
78 		"vertically.");
79 
80 static int buffer_mode = -1;
81 module_param(buffer_mode, int, 0444);
82 MODULE_PARM_DESC(buffer_mode,
83 		"Set the buffer mode to be used; default is to go with what "
84 		"the platform driver asks for.  Set to 0 for vmalloc, 1 for "
85 		"DMA contiguous.");
86 
87 /*
88  * Status flags.  Always manipulated with bit operations.
89  */
90 #define CF_BUF0_VALID	 0	/* Buffers valid - first three */
91 #define CF_BUF1_VALID	 1
92 #define CF_BUF2_VALID	 2
93 #define CF_DMA_ACTIVE	 3	/* A frame is incoming */
94 #define CF_CONFIG_NEEDED 4	/* Must configure hardware */
95 #define CF_SINGLE_BUFFER 5	/* Running with a single buffer */
96 #define CF_SG_RESTART	 6	/* SG restart needed */
97 #define CF_FRAME_SOF0	 7	/* Frame 0 started */
98 #define CF_FRAME_SOF1	 8
99 #define CF_FRAME_SOF2	 9
100 
101 #define sensor_call(cam, o, f, args...) \
102 	v4l2_subdev_call(cam->sensor, o, f, ##args)
103 
104 static struct mcam_format_struct {
105 	__u8 *desc;
106 	__u32 pixelformat;
107 	int bpp;   /* Bytes per pixel */
108 	bool planar;
109 	enum v4l2_mbus_pixelcode mbus_code;
110 } mcam_formats[] = {
111 	{
112 		.desc		= "YUYV 4:2:2",
113 		.pixelformat	= V4L2_PIX_FMT_YUYV,
114 		.mbus_code	= V4L2_MBUS_FMT_YUYV8_2X8,
115 		.bpp		= 2,
116 		.planar		= false,
117 	},
118 	{
119 		.desc		= "UYVY 4:2:2",
120 		.pixelformat	= V4L2_PIX_FMT_UYVY,
121 		.mbus_code	= V4L2_MBUS_FMT_YUYV8_2X8,
122 		.bpp		= 2,
123 		.planar		= false,
124 	},
125 	{
126 		.desc		= "YUV 4:2:2 PLANAR",
127 		.pixelformat	= V4L2_PIX_FMT_YUV422P,
128 		.mbus_code	= V4L2_MBUS_FMT_YUYV8_2X8,
129 		.bpp		= 2,
130 		.planar		= true,
131 	},
132 	{
133 		.desc		= "YUV 4:2:0 PLANAR",
134 		.pixelformat	= V4L2_PIX_FMT_YUV420,
135 		.mbus_code	= V4L2_MBUS_FMT_YUYV8_2X8,
136 		.bpp		= 2,
137 		.planar		= true,
138 	},
139 	{
140 		.desc		= "YVU 4:2:0 PLANAR",
141 		.pixelformat	= V4L2_PIX_FMT_YVU420,
142 		.mbus_code	= V4L2_MBUS_FMT_YUYV8_2X8,
143 		.bpp		= 2,
144 		.planar		= true,
145 	},
146 	{
147 		.desc		= "RGB 444",
148 		.pixelformat	= V4L2_PIX_FMT_RGB444,
149 		.mbus_code	= V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
150 		.bpp		= 2,
151 		.planar		= false,
152 	},
153 	{
154 		.desc		= "RGB 565",
155 		.pixelformat	= V4L2_PIX_FMT_RGB565,
156 		.mbus_code	= V4L2_MBUS_FMT_RGB565_2X8_LE,
157 		.bpp		= 2,
158 		.planar		= false,
159 	},
160 	{
161 		.desc		= "Raw RGB Bayer",
162 		.pixelformat	= V4L2_PIX_FMT_SBGGR8,
163 		.mbus_code	= V4L2_MBUS_FMT_SBGGR8_1X8,
164 		.bpp		= 1,
165 		.planar		= false,
166 	},
167 };
168 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
169 
mcam_find_format(u32 pixelformat)170 static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
171 {
172 	unsigned i;
173 
174 	for (i = 0; i < N_MCAM_FMTS; i++)
175 		if (mcam_formats[i].pixelformat == pixelformat)
176 			return mcam_formats + i;
177 	/* Not found? Then return the first format. */
178 	return mcam_formats;
179 }
180 
181 /*
182  * The default format we use until somebody says otherwise.
183  */
184 static const struct v4l2_pix_format mcam_def_pix_format = {
185 	.width		= VGA_WIDTH,
186 	.height		= VGA_HEIGHT,
187 	.pixelformat	= V4L2_PIX_FMT_YUYV,
188 	.field		= V4L2_FIELD_NONE,
189 	.bytesperline	= VGA_WIDTH*2,
190 	.sizeimage	= VGA_WIDTH*VGA_HEIGHT*2,
191 };
192 
193 static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
194 					V4L2_MBUS_FMT_YUYV8_2X8;
195 
196 
197 /*
198  * The two-word DMA descriptor format used by the Armada 610 and like.  There
199  * Is a three-word format as well (set C1_DESC_3WORD) where the third
200  * word is a pointer to the next descriptor, but we don't use it.  Two-word
201  * descriptors have to be contiguous in memory.
202  */
203 struct mcam_dma_desc {
204 	u32 dma_addr;
205 	u32 segment_len;
206 };
207 
208 struct yuv_pointer_t {
209 	dma_addr_t y;
210 	dma_addr_t u;
211 	dma_addr_t v;
212 };
213 
214 /*
215  * Our buffer type for working with videobuf2.  Note that the vb2
216  * developers have decreed that struct vb2_buffer must be at the
217  * beginning of this structure.
218  */
219 struct mcam_vb_buffer {
220 	struct vb2_buffer vb_buf;
221 	struct list_head queue;
222 	struct mcam_dma_desc *dma_desc;	/* Descriptor virtual address */
223 	dma_addr_t dma_desc_pa;		/* Descriptor physical address */
224 	int dma_desc_nent;		/* Number of mapped descriptors */
225 	struct yuv_pointer_t yuv_p;
226 };
227 
vb_to_mvb(struct vb2_buffer * vb)228 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
229 {
230 	return container_of(vb, struct mcam_vb_buffer, vb_buf);
231 }
232 
233 /*
234  * Hand a completed buffer back to user space.
235  */
mcam_buffer_done(struct mcam_camera * cam,int frame,struct vb2_buffer * vbuf)236 static void mcam_buffer_done(struct mcam_camera *cam, int frame,
237 		struct vb2_buffer *vbuf)
238 {
239 	vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
240 	vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
241 	vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
242 	vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
243 }
244 
245 
246 
247 /*
248  * Debugging and related.
249  */
250 #define cam_err(cam, fmt, arg...) \
251 	dev_err((cam)->dev, fmt, ##arg);
252 #define cam_warn(cam, fmt, arg...) \
253 	dev_warn((cam)->dev, fmt, ##arg);
254 #define cam_dbg(cam, fmt, arg...) \
255 	dev_dbg((cam)->dev, fmt, ##arg);
256 
257 
258 /*
259  * Flag manipulation helpers
260  */
mcam_reset_buffers(struct mcam_camera * cam)261 static void mcam_reset_buffers(struct mcam_camera *cam)
262 {
263 	int i;
264 
265 	cam->next_buf = -1;
266 	for (i = 0; i < cam->nbufs; i++) {
267 		clear_bit(i, &cam->flags);
268 		clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
269 	}
270 }
271 
mcam_needs_config(struct mcam_camera * cam)272 static inline int mcam_needs_config(struct mcam_camera *cam)
273 {
274 	return test_bit(CF_CONFIG_NEEDED, &cam->flags);
275 }
276 
mcam_set_config_needed(struct mcam_camera * cam,int needed)277 static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
278 {
279 	if (needed)
280 		set_bit(CF_CONFIG_NEEDED, &cam->flags);
281 	else
282 		clear_bit(CF_CONFIG_NEEDED, &cam->flags);
283 }
284 
285 /* ------------------------------------------------------------------- */
286 /*
287  * Make the controller start grabbing images.  Everything must
288  * be set up before doing this.
289  */
mcam_ctlr_start(struct mcam_camera * cam)290 static void mcam_ctlr_start(struct mcam_camera *cam)
291 {
292 	/* set_bit performs a read, so no other barrier should be
293 	   needed here */
294 	mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
295 }
296 
mcam_ctlr_stop(struct mcam_camera * cam)297 static void mcam_ctlr_stop(struct mcam_camera *cam)
298 {
299 	mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
300 }
301 
mcam_enable_mipi(struct mcam_camera * mcam)302 static void mcam_enable_mipi(struct mcam_camera *mcam)
303 {
304 	/* Using MIPI mode and enable MIPI */
305 	cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
306 			mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
307 	mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
308 	mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
309 	mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
310 
311 	if (!mcam->mipi_enabled) {
312 		if (mcam->lane > 4 || mcam->lane <= 0) {
313 			cam_warn(mcam, "lane number error\n");
314 			mcam->lane = 1;	/* set the default value */
315 		}
316 		/*
317 		 * 0x41 actives 1 lane
318 		 * 0x43 actives 2 lanes
319 		 * 0x45 actives 3 lanes (never happen)
320 		 * 0x47 actives 4 lanes
321 		 */
322 		mcam_reg_write(mcam, REG_CSI2_CTRL0,
323 			CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
324 		mcam_reg_write(mcam, REG_CLKCTRL,
325 			(mcam->mclk_src << 29) | mcam->mclk_div);
326 
327 		mcam->mipi_enabled = true;
328 	}
329 }
330 
mcam_disable_mipi(struct mcam_camera * mcam)331 static void mcam_disable_mipi(struct mcam_camera *mcam)
332 {
333 	/* Using Parallel mode or disable MIPI */
334 	mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
335 	mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
336 	mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
337 	mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
338 	mcam->mipi_enabled = false;
339 }
340 
341 /* ------------------------------------------------------------------- */
342 
343 #ifdef MCAM_MODE_VMALLOC
344 /*
345  * Code specific to the vmalloc buffer mode.
346  */
347 
348 /*
349  * Allocate in-kernel DMA buffers for vmalloc mode.
350  */
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)351 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
352 {
353 	int i;
354 
355 	mcam_set_config_needed(cam, 1);
356 	if (loadtime)
357 		cam->dma_buf_size = dma_buf_size;
358 	else
359 		cam->dma_buf_size = cam->pix_format.sizeimage;
360 	if (n_dma_bufs > 3)
361 		n_dma_bufs = 3;
362 
363 	cam->nbufs = 0;
364 	for (i = 0; i < n_dma_bufs; i++) {
365 		cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
366 				cam->dma_buf_size, cam->dma_handles + i,
367 				GFP_KERNEL);
368 		if (cam->dma_bufs[i] == NULL) {
369 			cam_warn(cam, "Failed to allocate DMA buffer\n");
370 			break;
371 		}
372 		(cam->nbufs)++;
373 	}
374 
375 	switch (cam->nbufs) {
376 	case 1:
377 		dma_free_coherent(cam->dev, cam->dma_buf_size,
378 				cam->dma_bufs[0], cam->dma_handles[0]);
379 		cam->nbufs = 0;
380 	case 0:
381 		cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
382 		return -ENOMEM;
383 
384 	case 2:
385 		if (n_dma_bufs > 2)
386 			cam_warn(cam, "Will limp along with only 2 buffers\n");
387 		break;
388 	}
389 	return 0;
390 }
391 
mcam_free_dma_bufs(struct mcam_camera * cam)392 static void mcam_free_dma_bufs(struct mcam_camera *cam)
393 {
394 	int i;
395 
396 	for (i = 0; i < cam->nbufs; i++) {
397 		dma_free_coherent(cam->dev, cam->dma_buf_size,
398 				cam->dma_bufs[i], cam->dma_handles[i]);
399 		cam->dma_bufs[i] = NULL;
400 	}
401 	cam->nbufs = 0;
402 }
403 
404 
405 /*
406  * Set up DMA buffers when operating in vmalloc mode
407  */
mcam_ctlr_dma_vmalloc(struct mcam_camera * cam)408 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
409 {
410 	/*
411 	 * Store the first two Y buffers (we aren't supporting
412 	 * planar formats for now, so no UV bufs).  Then either
413 	 * set the third if it exists, or tell the controller
414 	 * to just use two.
415 	 */
416 	mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
417 	mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
418 	if (cam->nbufs > 2) {
419 		mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
420 		mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
421 	} else
422 		mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
423 	if (cam->chip_id == MCAM_CAFE)
424 		mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
425 }
426 
427 /*
428  * Copy data out to user space in the vmalloc case
429  */
mcam_frame_tasklet(unsigned long data)430 static void mcam_frame_tasklet(unsigned long data)
431 {
432 	struct mcam_camera *cam = (struct mcam_camera *) data;
433 	int i;
434 	unsigned long flags;
435 	struct mcam_vb_buffer *buf;
436 
437 	spin_lock_irqsave(&cam->dev_lock, flags);
438 	for (i = 0; i < cam->nbufs; i++) {
439 		int bufno = cam->next_buf;
440 
441 		if (cam->state != S_STREAMING || bufno < 0)
442 			break;  /* I/O got stopped */
443 		if (++(cam->next_buf) >= cam->nbufs)
444 			cam->next_buf = 0;
445 		if (!test_bit(bufno, &cam->flags))
446 			continue;
447 		if (list_empty(&cam->buffers)) {
448 			cam->frame_state.singles++;
449 			break;  /* Leave it valid, hope for better later */
450 		}
451 		cam->frame_state.delivered++;
452 		clear_bit(bufno, &cam->flags);
453 		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
454 				queue);
455 		list_del_init(&buf->queue);
456 		/*
457 		 * Drop the lock during the big copy.  This *should* be safe...
458 		 */
459 		spin_unlock_irqrestore(&cam->dev_lock, flags);
460 		memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
461 				cam->pix_format.sizeimage);
462 		mcam_buffer_done(cam, bufno, &buf->vb_buf);
463 		spin_lock_irqsave(&cam->dev_lock, flags);
464 	}
465 	spin_unlock_irqrestore(&cam->dev_lock, flags);
466 }
467 
468 
469 /*
470  * Make sure our allocated buffers are up to the task.
471  */
mcam_check_dma_buffers(struct mcam_camera * cam)472 static int mcam_check_dma_buffers(struct mcam_camera *cam)
473 {
474 	if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
475 			mcam_free_dma_bufs(cam);
476 	if (cam->nbufs == 0)
477 		return mcam_alloc_dma_bufs(cam, 0);
478 	return 0;
479 }
480 
mcam_vmalloc_done(struct mcam_camera * cam,int frame)481 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
482 {
483 	tasklet_schedule(&cam->s_tasklet);
484 }
485 
486 #else /* MCAM_MODE_VMALLOC */
487 
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)488 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
489 {
490 	return 0;
491 }
492 
mcam_free_dma_bufs(struct mcam_camera * cam)493 static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
494 {
495 	return;
496 }
497 
mcam_check_dma_buffers(struct mcam_camera * cam)498 static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
499 {
500 	return 0;
501 }
502 
503 
504 
505 #endif /* MCAM_MODE_VMALLOC */
506 
507 
508 #ifdef MCAM_MODE_DMA_CONTIG
509 /* ---------------------------------------------------------------------- */
510 /*
511  * DMA-contiguous code.
512  */
513 
mcam_fmt_is_planar(__u32 pfmt)514 static bool mcam_fmt_is_planar(__u32 pfmt)
515 {
516 	struct mcam_format_struct *f;
517 
518 	f = mcam_find_format(pfmt);
519 	return f->planar;
520 }
521 
522 /*
523  * Set up a contiguous buffer for the given frame.  Here also is where
524  * the underrun strategy is set: if there is no buffer available, reuse
525  * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
526  * keep the interrupt handler from giving that buffer back to user
527  * space.  In this way, we always have a buffer to DMA to and don't
528  * have to try to play games stopping and restarting the controller.
529  */
mcam_set_contig_buffer(struct mcam_camera * cam,int frame)530 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
531 {
532 	struct mcam_vb_buffer *buf;
533 	struct v4l2_pix_format *fmt = &cam->pix_format;
534 	dma_addr_t dma_handle;
535 	u32 pixel_count = fmt->width * fmt->height;
536 	struct vb2_buffer *vb;
537 
538 	/*
539 	 * If there are no available buffers, go into single mode
540 	 */
541 	if (list_empty(&cam->buffers)) {
542 		buf = cam->vb_bufs[frame ^ 0x1];
543 		set_bit(CF_SINGLE_BUFFER, &cam->flags);
544 		cam->frame_state.singles++;
545 	} else {
546 		/*
547 		 * OK, we have a buffer we can use.
548 		 */
549 		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
550 					queue);
551 		list_del_init(&buf->queue);
552 		clear_bit(CF_SINGLE_BUFFER, &cam->flags);
553 	}
554 
555 	cam->vb_bufs[frame] = buf;
556 	vb = &buf->vb_buf;
557 
558 	dma_handle = vb2_dma_contig_plane_dma_addr(vb, 0);
559 	buf->yuv_p.y = dma_handle;
560 
561 	switch (cam->pix_format.pixelformat) {
562 	case V4L2_PIX_FMT_YUV422P:
563 		buf->yuv_p.u = buf->yuv_p.y + pixel_count;
564 		buf->yuv_p.v = buf->yuv_p.u + pixel_count / 2;
565 		break;
566 	case V4L2_PIX_FMT_YUV420:
567 		buf->yuv_p.u = buf->yuv_p.y + pixel_count;
568 		buf->yuv_p.v = buf->yuv_p.u + pixel_count / 4;
569 		break;
570 	case V4L2_PIX_FMT_YVU420:
571 		buf->yuv_p.v = buf->yuv_p.y + pixel_count;
572 		buf->yuv_p.u = buf->yuv_p.v + pixel_count / 4;
573 		break;
574 	default:
575 		break;
576 	}
577 
578 	mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR, buf->yuv_p.y);
579 	if (mcam_fmt_is_planar(fmt->pixelformat)) {
580 		mcam_reg_write(cam, frame == 0 ?
581 					REG_U0BAR : REG_U1BAR, buf->yuv_p.u);
582 		mcam_reg_write(cam, frame == 0 ?
583 					REG_V0BAR : REG_V1BAR, buf->yuv_p.v);
584 	}
585 }
586 
587 /*
588  * Initial B_DMA_contig setup.
589  */
mcam_ctlr_dma_contig(struct mcam_camera * cam)590 static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
591 {
592 	mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
593 	cam->nbufs = 2;
594 	mcam_set_contig_buffer(cam, 0);
595 	mcam_set_contig_buffer(cam, 1);
596 }
597 
598 /*
599  * Frame completion handling.
600  */
mcam_dma_contig_done(struct mcam_camera * cam,int frame)601 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
602 {
603 	struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
604 
605 	if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
606 		cam->frame_state.delivered++;
607 		mcam_buffer_done(cam, frame, &buf->vb_buf);
608 	}
609 	mcam_set_contig_buffer(cam, frame);
610 }
611 
612 #endif /* MCAM_MODE_DMA_CONTIG */
613 
614 #ifdef MCAM_MODE_DMA_SG
615 /* ---------------------------------------------------------------------- */
616 /*
617  * Scatter/gather-specific code.
618  */
619 
620 /*
621  * Set up the next buffer for S/G I/O; caller should be sure that
622  * the controller is stopped and a buffer is available.
623  */
mcam_sg_next_buffer(struct mcam_camera * cam)624 static void mcam_sg_next_buffer(struct mcam_camera *cam)
625 {
626 	struct mcam_vb_buffer *buf;
627 
628 	buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
629 	list_del_init(&buf->queue);
630 	/*
631 	 * Very Bad Not Good Things happen if you don't clear
632 	 * C1_DESC_ENA before making any descriptor changes.
633 	 */
634 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
635 	mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
636 	mcam_reg_write(cam, REG_DESC_LEN_Y,
637 			buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
638 	mcam_reg_write(cam, REG_DESC_LEN_U, 0);
639 	mcam_reg_write(cam, REG_DESC_LEN_V, 0);
640 	mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
641 	cam->vb_bufs[0] = buf;
642 }
643 
644 /*
645  * Initial B_DMA_sg setup
646  */
mcam_ctlr_dma_sg(struct mcam_camera * cam)647 static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
648 {
649 	/*
650 	 * The list-empty condition can hit us at resume time
651 	 * if the buffer list was empty when the system was suspended.
652 	 */
653 	if (list_empty(&cam->buffers)) {
654 		set_bit(CF_SG_RESTART, &cam->flags);
655 		return;
656 	}
657 
658 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
659 	mcam_sg_next_buffer(cam);
660 	cam->nbufs = 3;
661 }
662 
663 
664 /*
665  * Frame completion with S/G is trickier.  We can't muck with
666  * a descriptor chain on the fly, since the controller buffers it
667  * internally.  So we have to actually stop and restart; Marvell
668  * says this is the way to do it.
669  *
670  * Of course, stopping is easier said than done; experience shows
671  * that the controller can start a frame *after* C0_ENABLE has been
672  * cleared.  So when running in S/G mode, the controller is "stopped"
673  * on receipt of the start-of-frame interrupt.  That means we can
674  * safely change the DMA descriptor array here and restart things
675  * (assuming there's another buffer waiting to go).
676  */
mcam_dma_sg_done(struct mcam_camera * cam,int frame)677 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
678 {
679 	struct mcam_vb_buffer *buf = cam->vb_bufs[0];
680 
681 	/*
682 	 * If we're no longer supposed to be streaming, don't do anything.
683 	 */
684 	if (cam->state != S_STREAMING)
685 		return;
686 	/*
687 	 * If we have another buffer available, put it in and
688 	 * restart the engine.
689 	 */
690 	if (!list_empty(&cam->buffers)) {
691 		mcam_sg_next_buffer(cam);
692 		mcam_ctlr_start(cam);
693 	/*
694 	 * Otherwise set CF_SG_RESTART and the controller will
695 	 * be restarted once another buffer shows up.
696 	 */
697 	} else {
698 		set_bit(CF_SG_RESTART, &cam->flags);
699 		cam->frame_state.singles++;
700 		cam->vb_bufs[0] = NULL;
701 	}
702 	/*
703 	 * Now we can give the completed frame back to user space.
704 	 */
705 	cam->frame_state.delivered++;
706 	mcam_buffer_done(cam, frame, &buf->vb_buf);
707 }
708 
709 
710 /*
711  * Scatter/gather mode requires stopping the controller between
712  * frames so we can put in a new DMA descriptor array.  If no new
713  * buffer exists at frame completion, the controller is left stopped;
714  * this function is charged with gettig things going again.
715  */
mcam_sg_restart(struct mcam_camera * cam)716 static void mcam_sg_restart(struct mcam_camera *cam)
717 {
718 	mcam_ctlr_dma_sg(cam);
719 	mcam_ctlr_start(cam);
720 	clear_bit(CF_SG_RESTART, &cam->flags);
721 }
722 
723 #else /* MCAM_MODE_DMA_SG */
724 
mcam_sg_restart(struct mcam_camera * cam)725 static inline void mcam_sg_restart(struct mcam_camera *cam)
726 {
727 	return;
728 }
729 
730 #endif /* MCAM_MODE_DMA_SG */
731 
732 /* ---------------------------------------------------------------------- */
733 /*
734  * Buffer-mode-independent controller code.
735  */
736 
737 /*
738  * Image format setup
739  */
mcam_ctlr_image(struct mcam_camera * cam)740 static void mcam_ctlr_image(struct mcam_camera *cam)
741 {
742 	struct v4l2_pix_format *fmt = &cam->pix_format;
743 	u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
744 
745 	cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
746 		fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
747 	imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
748 	imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
749 
750 	switch (fmt->pixelformat) {
751 	case V4L2_PIX_FMT_YUYV:
752 	case V4L2_PIX_FMT_UYVY:
753 		widthy = fmt->width * 2;
754 		widthuv = 0;
755 		break;
756 	case V4L2_PIX_FMT_JPEG:
757 		imgsz_h = (fmt->sizeimage / fmt->bytesperline) << IMGSZ_V_SHIFT;
758 		widthy = fmt->bytesperline;
759 		widthuv = 0;
760 		break;
761 	case V4L2_PIX_FMT_YUV422P:
762 	case V4L2_PIX_FMT_YUV420:
763 	case V4L2_PIX_FMT_YVU420:
764 		widthy = fmt->width;
765 		widthuv = fmt->width / 2;
766 		break;
767 	default:
768 		widthy = fmt->bytesperline;
769 		widthuv = 0;
770 	}
771 
772 	mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
773 			IMGP_YP_MASK | IMGP_UVP_MASK);
774 	mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
775 	mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
776 
777 	/*
778 	 * Tell the controller about the image format we are using.
779 	 */
780 	switch (fmt->pixelformat) {
781 	case V4L2_PIX_FMT_YUV422P:
782 		mcam_reg_write_mask(cam, REG_CTRL0,
783 			C0_DF_YUV | C0_YUV_PLANAR | C0_YUVE_YVYU, C0_DF_MASK);
784 		break;
785 	case V4L2_PIX_FMT_YUV420:
786 	case V4L2_PIX_FMT_YVU420:
787 		mcam_reg_write_mask(cam, REG_CTRL0,
788 			C0_DF_YUV | C0_YUV_420PL | C0_YUVE_YVYU, C0_DF_MASK);
789 		break;
790 	case V4L2_PIX_FMT_YUYV:
791 		mcam_reg_write_mask(cam, REG_CTRL0,
792 			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_UYVY, C0_DF_MASK);
793 		break;
794 	case V4L2_PIX_FMT_UYVY:
795 		mcam_reg_write_mask(cam, REG_CTRL0,
796 			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_YUYV, C0_DF_MASK);
797 		break;
798 	case V4L2_PIX_FMT_JPEG:
799 		mcam_reg_write_mask(cam, REG_CTRL0,
800 			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_YUYV, C0_DF_MASK);
801 		break;
802 	case V4L2_PIX_FMT_RGB444:
803 		mcam_reg_write_mask(cam, REG_CTRL0,
804 			C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XRGB, C0_DF_MASK);
805 		/* Alpha value? */
806 		break;
807 	case V4L2_PIX_FMT_RGB565:
808 		mcam_reg_write_mask(cam, REG_CTRL0,
809 			C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
810 		break;
811 	default:
812 		cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
813 		break;
814 	}
815 
816 	/*
817 	 * Make sure it knows we want to use hsync/vsync.
818 	 */
819 	mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
820 	/*
821 	 * This field controls the generation of EOF(DVP only)
822 	 */
823 	if (cam->bus_type != V4L2_MBUS_CSI2)
824 		mcam_reg_set_bit(cam, REG_CTRL0,
825 				C0_EOF_VSYNC | C0_VEDGE_CTRL);
826 }
827 
828 
829 /*
830  * Configure the controller for operation; caller holds the
831  * device mutex.
832  */
mcam_ctlr_configure(struct mcam_camera * cam)833 static int mcam_ctlr_configure(struct mcam_camera *cam)
834 {
835 	unsigned long flags;
836 
837 	spin_lock_irqsave(&cam->dev_lock, flags);
838 	clear_bit(CF_SG_RESTART, &cam->flags);
839 	cam->dma_setup(cam);
840 	mcam_ctlr_image(cam);
841 	mcam_set_config_needed(cam, 0);
842 	spin_unlock_irqrestore(&cam->dev_lock, flags);
843 	return 0;
844 }
845 
mcam_ctlr_irq_enable(struct mcam_camera * cam)846 static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
847 {
848 	/*
849 	 * Clear any pending interrupts, since we do not
850 	 * expect to have I/O active prior to enabling.
851 	 */
852 	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
853 	mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
854 }
855 
mcam_ctlr_irq_disable(struct mcam_camera * cam)856 static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
857 {
858 	mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
859 }
860 
861 
862 
mcam_ctlr_init(struct mcam_camera * cam)863 static void mcam_ctlr_init(struct mcam_camera *cam)
864 {
865 	unsigned long flags;
866 
867 	spin_lock_irqsave(&cam->dev_lock, flags);
868 	/*
869 	 * Make sure it's not powered down.
870 	 */
871 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
872 	/*
873 	 * Turn off the enable bit.  It sure should be off anyway,
874 	 * but it's good to be sure.
875 	 */
876 	mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
877 	/*
878 	 * Clock the sensor appropriately.  Controller clock should
879 	 * be 48MHz, sensor "typical" value is half that.
880 	 */
881 	mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
882 	spin_unlock_irqrestore(&cam->dev_lock, flags);
883 }
884 
885 
886 /*
887  * Stop the controller, and don't return until we're really sure that no
888  * further DMA is going on.
889  */
mcam_ctlr_stop_dma(struct mcam_camera * cam)890 static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
891 {
892 	unsigned long flags;
893 
894 	/*
895 	 * Theory: stop the camera controller (whether it is operating
896 	 * or not).  Delay briefly just in case we race with the SOF
897 	 * interrupt, then wait until no DMA is active.
898 	 */
899 	spin_lock_irqsave(&cam->dev_lock, flags);
900 	clear_bit(CF_SG_RESTART, &cam->flags);
901 	mcam_ctlr_stop(cam);
902 	cam->state = S_IDLE;
903 	spin_unlock_irqrestore(&cam->dev_lock, flags);
904 	/*
905 	 * This is a brutally long sleep, but experience shows that
906 	 * it can take the controller a while to get the message that
907 	 * it needs to stop grabbing frames.  In particular, we can
908 	 * sometimes (on mmp) get a frame at the end WITHOUT the
909 	 * start-of-frame indication.
910 	 */
911 	msleep(150);
912 	if (test_bit(CF_DMA_ACTIVE, &cam->flags))
913 		cam_err(cam, "Timeout waiting for DMA to end\n");
914 		/* This would be bad news - what now? */
915 	spin_lock_irqsave(&cam->dev_lock, flags);
916 	mcam_ctlr_irq_disable(cam);
917 	spin_unlock_irqrestore(&cam->dev_lock, flags);
918 }
919 
920 /*
921  * Power up and down.
922  */
mcam_ctlr_power_up(struct mcam_camera * cam)923 static int mcam_ctlr_power_up(struct mcam_camera *cam)
924 {
925 	unsigned long flags;
926 	int ret;
927 
928 	spin_lock_irqsave(&cam->dev_lock, flags);
929 	ret = cam->plat_power_up(cam);
930 	if (ret) {
931 		spin_unlock_irqrestore(&cam->dev_lock, flags);
932 		return ret;
933 	}
934 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
935 	spin_unlock_irqrestore(&cam->dev_lock, flags);
936 	msleep(5); /* Just to be sure */
937 	return 0;
938 }
939 
mcam_ctlr_power_down(struct mcam_camera * cam)940 static void mcam_ctlr_power_down(struct mcam_camera *cam)
941 {
942 	unsigned long flags;
943 
944 	spin_lock_irqsave(&cam->dev_lock, flags);
945 	/*
946 	 * School of hard knocks department: be sure we do any register
947 	 * twiddling on the controller *before* calling the platform
948 	 * power down routine.
949 	 */
950 	mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
951 	cam->plat_power_down(cam);
952 	spin_unlock_irqrestore(&cam->dev_lock, flags);
953 }
954 
955 /* -------------------------------------------------------------------- */
956 /*
957  * Communications with the sensor.
958  */
959 
__mcam_cam_reset(struct mcam_camera * cam)960 static int __mcam_cam_reset(struct mcam_camera *cam)
961 {
962 	return sensor_call(cam, core, reset, 0);
963 }
964 
965 /*
966  * We have found the sensor on the i2c.  Let's try to have a
967  * conversation.
968  */
mcam_cam_init(struct mcam_camera * cam)969 static int mcam_cam_init(struct mcam_camera *cam)
970 {
971 	int ret;
972 
973 	mutex_lock(&cam->s_mutex);
974 	if (cam->state != S_NOTREADY)
975 		cam_warn(cam, "Cam init with device in funky state %d",
976 				cam->state);
977 	ret = __mcam_cam_reset(cam);
978 	/* Get/set parameters? */
979 	cam->state = S_IDLE;
980 	mcam_ctlr_power_down(cam);
981 	mutex_unlock(&cam->s_mutex);
982 	return ret;
983 }
984 
985 /*
986  * Configure the sensor to match the parameters we have.  Caller should
987  * hold s_mutex
988  */
mcam_cam_set_flip(struct mcam_camera * cam)989 static int mcam_cam_set_flip(struct mcam_camera *cam)
990 {
991 	struct v4l2_control ctrl;
992 
993 	memset(&ctrl, 0, sizeof(ctrl));
994 	ctrl.id = V4L2_CID_VFLIP;
995 	ctrl.value = flip;
996 	return sensor_call(cam, core, s_ctrl, &ctrl);
997 }
998 
999 
mcam_cam_configure(struct mcam_camera * cam)1000 static int mcam_cam_configure(struct mcam_camera *cam)
1001 {
1002 	struct v4l2_mbus_framefmt mbus_fmt;
1003 	int ret;
1004 
1005 	v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
1006 	ret = sensor_call(cam, core, init, 0);
1007 	if (ret == 0)
1008 		ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
1009 	/*
1010 	 * OV7670 does weird things if flip is set *before* format...
1011 	 */
1012 	ret += mcam_cam_set_flip(cam);
1013 	return ret;
1014 }
1015 
1016 /*
1017  * Get everything ready, and start grabbing frames.
1018  */
mcam_read_setup(struct mcam_camera * cam)1019 static int mcam_read_setup(struct mcam_camera *cam)
1020 {
1021 	int ret;
1022 	unsigned long flags;
1023 
1024 	/*
1025 	 * Configuration.  If we still don't have DMA buffers,
1026 	 * make one last, desperate attempt.
1027 	 */
1028 	if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1029 			mcam_alloc_dma_bufs(cam, 0))
1030 		return -ENOMEM;
1031 
1032 	if (mcam_needs_config(cam)) {
1033 		mcam_cam_configure(cam);
1034 		ret = mcam_ctlr_configure(cam);
1035 		if (ret)
1036 			return ret;
1037 	}
1038 
1039 	/*
1040 	 * Turn it loose.
1041 	 */
1042 	spin_lock_irqsave(&cam->dev_lock, flags);
1043 	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1044 	mcam_reset_buffers(cam);
1045 	/*
1046 	 * Update CSI2_DPHY value
1047 	 */
1048 	if (cam->calc_dphy)
1049 		cam->calc_dphy(cam);
1050 	cam_dbg(cam, "camera: DPHY sets: dphy3=0x%x, dphy5=0x%x, dphy6=0x%x\n",
1051 			cam->dphy[0], cam->dphy[1], cam->dphy[2]);
1052 	if (cam->bus_type == V4L2_MBUS_CSI2)
1053 		mcam_enable_mipi(cam);
1054 	else
1055 		mcam_disable_mipi(cam);
1056 	mcam_ctlr_irq_enable(cam);
1057 	cam->state = S_STREAMING;
1058 	if (!test_bit(CF_SG_RESTART, &cam->flags))
1059 		mcam_ctlr_start(cam);
1060 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1061 	return 0;
1062 }
1063 
1064 /* ----------------------------------------------------------------------- */
1065 /*
1066  * Videobuf2 interface code.
1067  */
1068 
mcam_vb_queue_setup(struct vb2_queue * vq,const struct v4l2_format * fmt,unsigned int * nbufs,unsigned int * num_planes,unsigned int sizes[],void * alloc_ctxs[])1069 static int mcam_vb_queue_setup(struct vb2_queue *vq,
1070 		const struct v4l2_format *fmt, unsigned int *nbufs,
1071 		unsigned int *num_planes, unsigned int sizes[],
1072 		void *alloc_ctxs[])
1073 {
1074 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1075 	int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1076 
1077 	sizes[0] = cam->pix_format.sizeimage;
1078 	*num_planes = 1; /* Someday we have to support planar formats... */
1079 	if (*nbufs < minbufs)
1080 		*nbufs = minbufs;
1081 	if (cam->buffer_mode == B_DMA_contig)
1082 		alloc_ctxs[0] = cam->vb_alloc_ctx;
1083 	return 0;
1084 }
1085 
1086 
mcam_vb_buf_queue(struct vb2_buffer * vb)1087 static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1088 {
1089 	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1090 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1091 	unsigned long flags;
1092 	int start;
1093 
1094 	spin_lock_irqsave(&cam->dev_lock, flags);
1095 	start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1096 	list_add(&mvb->queue, &cam->buffers);
1097 	if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1098 		mcam_sg_restart(cam);
1099 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1100 	if (start)
1101 		mcam_read_setup(cam);
1102 }
1103 
1104 
1105 /*
1106  * vb2 uses these to release the mutex when waiting in dqbuf.  I'm
1107  * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
1108  * to be called with the mutex held), but better safe than sorry.
1109  */
mcam_vb_wait_prepare(struct vb2_queue * vq)1110 static void mcam_vb_wait_prepare(struct vb2_queue *vq)
1111 {
1112 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1113 
1114 	mutex_unlock(&cam->s_mutex);
1115 }
1116 
mcam_vb_wait_finish(struct vb2_queue * vq)1117 static void mcam_vb_wait_finish(struct vb2_queue *vq)
1118 {
1119 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1120 
1121 	mutex_lock(&cam->s_mutex);
1122 }
1123 
1124 /*
1125  * These need to be called with the mutex held from vb2
1126  */
mcam_vb_start_streaming(struct vb2_queue * vq,unsigned int count)1127 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1128 {
1129 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1130 	unsigned int frame;
1131 
1132 	if (cam->state != S_IDLE) {
1133 		INIT_LIST_HEAD(&cam->buffers);
1134 		return -EINVAL;
1135 	}
1136 	cam->sequence = 0;
1137 	/*
1138 	 * Videobuf2 sneakily hoards all the buffers and won't
1139 	 * give them to us until *after* streaming starts.  But
1140 	 * we can't actually start streaming until we have a
1141 	 * destination.  So go into a wait state and hope they
1142 	 * give us buffers soon.
1143 	 */
1144 	if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1145 		cam->state = S_BUFWAIT;
1146 		return 0;
1147 	}
1148 
1149 	/*
1150 	 * Ensure clear the left over frame flags
1151 	 * before every really start streaming
1152 	 */
1153 	for (frame = 0; frame < cam->nbufs; frame++)
1154 		clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1155 
1156 	return mcam_read_setup(cam);
1157 }
1158 
mcam_vb_stop_streaming(struct vb2_queue * vq)1159 static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1160 {
1161 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1162 	unsigned long flags;
1163 
1164 	if (cam->state == S_BUFWAIT) {
1165 		/* They never gave us buffers */
1166 		cam->state = S_IDLE;
1167 		return;
1168 	}
1169 	if (cam->state != S_STREAMING)
1170 		return;
1171 	mcam_ctlr_stop_dma(cam);
1172 	/*
1173 	 * Reset the CCIC PHY after stopping streaming,
1174 	 * otherwise, the CCIC may be unstable.
1175 	 */
1176 	if (cam->ctlr_reset)
1177 		cam->ctlr_reset(cam);
1178 	/*
1179 	 * VB2 reclaims the buffers, so we need to forget
1180 	 * about them.
1181 	 */
1182 	spin_lock_irqsave(&cam->dev_lock, flags);
1183 	INIT_LIST_HEAD(&cam->buffers);
1184 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1185 }
1186 
1187 
1188 static const struct vb2_ops mcam_vb2_ops = {
1189 	.queue_setup		= mcam_vb_queue_setup,
1190 	.buf_queue		= mcam_vb_buf_queue,
1191 	.start_streaming	= mcam_vb_start_streaming,
1192 	.stop_streaming		= mcam_vb_stop_streaming,
1193 	.wait_prepare		= mcam_vb_wait_prepare,
1194 	.wait_finish		= mcam_vb_wait_finish,
1195 };
1196 
1197 
1198 #ifdef MCAM_MODE_DMA_SG
1199 /*
1200  * Scatter/gather mode uses all of the above functions plus a
1201  * few extras to deal with DMA mapping.
1202  */
mcam_vb_sg_buf_init(struct vb2_buffer * vb)1203 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1204 {
1205 	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1206 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1207 	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1208 
1209 	mvb->dma_desc = dma_alloc_coherent(cam->dev,
1210 			ndesc * sizeof(struct mcam_dma_desc),
1211 			&mvb->dma_desc_pa, GFP_KERNEL);
1212 	if (mvb->dma_desc == NULL) {
1213 		cam_err(cam, "Unable to get DMA descriptor array\n");
1214 		return -ENOMEM;
1215 	}
1216 	return 0;
1217 }
1218 
mcam_vb_sg_buf_prepare(struct vb2_buffer * vb)1219 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1220 {
1221 	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1222 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1223 	struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1224 	struct mcam_dma_desc *desc = mvb->dma_desc;
1225 	struct scatterlist *sg;
1226 	int i;
1227 
1228 	mvb->dma_desc_nent = dma_map_sg(cam->dev, sg_table->sgl,
1229 			sg_table->nents, DMA_FROM_DEVICE);
1230 	if (mvb->dma_desc_nent <= 0)
1231 		return -EIO;  /* Not sure what's right here */
1232 	for_each_sg(sg_table->sgl, sg, mvb->dma_desc_nent, i) {
1233 		desc->dma_addr = sg_dma_address(sg);
1234 		desc->segment_len = sg_dma_len(sg);
1235 		desc++;
1236 	}
1237 	return 0;
1238 }
1239 
mcam_vb_sg_buf_finish(struct vb2_buffer * vb)1240 static void mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
1241 {
1242 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1243 	struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1244 
1245 	if (sg_table)
1246 		dma_unmap_sg(cam->dev, sg_table->sgl,
1247 				sg_table->nents, DMA_FROM_DEVICE);
1248 }
1249 
mcam_vb_sg_buf_cleanup(struct vb2_buffer * vb)1250 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1251 {
1252 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1253 	struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1254 	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1255 
1256 	dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1257 			mvb->dma_desc, mvb->dma_desc_pa);
1258 }
1259 
1260 
1261 static const struct vb2_ops mcam_vb2_sg_ops = {
1262 	.queue_setup		= mcam_vb_queue_setup,
1263 	.buf_init		= mcam_vb_sg_buf_init,
1264 	.buf_prepare		= mcam_vb_sg_buf_prepare,
1265 	.buf_queue		= mcam_vb_buf_queue,
1266 	.buf_finish		= mcam_vb_sg_buf_finish,
1267 	.buf_cleanup		= mcam_vb_sg_buf_cleanup,
1268 	.start_streaming	= mcam_vb_start_streaming,
1269 	.stop_streaming		= mcam_vb_stop_streaming,
1270 	.wait_prepare		= mcam_vb_wait_prepare,
1271 	.wait_finish		= mcam_vb_wait_finish,
1272 };
1273 
1274 #endif /* MCAM_MODE_DMA_SG */
1275 
mcam_setup_vb2(struct mcam_camera * cam)1276 static int mcam_setup_vb2(struct mcam_camera *cam)
1277 {
1278 	struct vb2_queue *vq = &cam->vb_queue;
1279 
1280 	memset(vq, 0, sizeof(*vq));
1281 	vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1282 	vq->drv_priv = cam;
1283 	INIT_LIST_HEAD(&cam->buffers);
1284 	switch (cam->buffer_mode) {
1285 	case B_DMA_contig:
1286 #ifdef MCAM_MODE_DMA_CONTIG
1287 		vq->ops = &mcam_vb2_ops;
1288 		vq->mem_ops = &vb2_dma_contig_memops;
1289 		vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1290 		cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
1291 		vq->io_modes = VB2_MMAP | VB2_USERPTR;
1292 		cam->dma_setup = mcam_ctlr_dma_contig;
1293 		cam->frame_complete = mcam_dma_contig_done;
1294 #endif
1295 		break;
1296 	case B_DMA_sg:
1297 #ifdef MCAM_MODE_DMA_SG
1298 		vq->ops = &mcam_vb2_sg_ops;
1299 		vq->mem_ops = &vb2_dma_sg_memops;
1300 		vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1301 		vq->io_modes = VB2_MMAP | VB2_USERPTR;
1302 		cam->dma_setup = mcam_ctlr_dma_sg;
1303 		cam->frame_complete = mcam_dma_sg_done;
1304 #endif
1305 		break;
1306 	case B_vmalloc:
1307 #ifdef MCAM_MODE_VMALLOC
1308 		tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
1309 				(unsigned long) cam);
1310 		vq->ops = &mcam_vb2_ops;
1311 		vq->mem_ops = &vb2_vmalloc_memops;
1312 		vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1313 		vq->io_modes = VB2_MMAP;
1314 		cam->dma_setup = mcam_ctlr_dma_vmalloc;
1315 		cam->frame_complete = mcam_vmalloc_done;
1316 #endif
1317 		break;
1318 	}
1319 	return vb2_queue_init(vq);
1320 }
1321 
mcam_cleanup_vb2(struct mcam_camera * cam)1322 static void mcam_cleanup_vb2(struct mcam_camera *cam)
1323 {
1324 	vb2_queue_release(&cam->vb_queue);
1325 #ifdef MCAM_MODE_DMA_CONTIG
1326 	if (cam->buffer_mode == B_DMA_contig)
1327 		vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
1328 #endif
1329 }
1330 
1331 
1332 /* ---------------------------------------------------------------------- */
1333 /*
1334  * The long list of V4L2 ioctl() operations.
1335  */
1336 
mcam_vidioc_streamon(struct file * filp,void * priv,enum v4l2_buf_type type)1337 static int mcam_vidioc_streamon(struct file *filp, void *priv,
1338 		enum v4l2_buf_type type)
1339 {
1340 	struct mcam_camera *cam = filp->private_data;
1341 	int ret;
1342 
1343 	mutex_lock(&cam->s_mutex);
1344 	ret = vb2_streamon(&cam->vb_queue, type);
1345 	mutex_unlock(&cam->s_mutex);
1346 	return ret;
1347 }
1348 
1349 
mcam_vidioc_streamoff(struct file * filp,void * priv,enum v4l2_buf_type type)1350 static int mcam_vidioc_streamoff(struct file *filp, void *priv,
1351 		enum v4l2_buf_type type)
1352 {
1353 	struct mcam_camera *cam = filp->private_data;
1354 	int ret;
1355 
1356 	mutex_lock(&cam->s_mutex);
1357 	ret = vb2_streamoff(&cam->vb_queue, type);
1358 	mutex_unlock(&cam->s_mutex);
1359 	return ret;
1360 }
1361 
1362 
mcam_vidioc_reqbufs(struct file * filp,void * priv,struct v4l2_requestbuffers * req)1363 static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
1364 		struct v4l2_requestbuffers *req)
1365 {
1366 	struct mcam_camera *cam = filp->private_data;
1367 	int ret;
1368 
1369 	mutex_lock(&cam->s_mutex);
1370 	ret = vb2_reqbufs(&cam->vb_queue, req);
1371 	mutex_unlock(&cam->s_mutex);
1372 	return ret;
1373 }
1374 
1375 
mcam_vidioc_querybuf(struct file * filp,void * priv,struct v4l2_buffer * buf)1376 static int mcam_vidioc_querybuf(struct file *filp, void *priv,
1377 		struct v4l2_buffer *buf)
1378 {
1379 	struct mcam_camera *cam = filp->private_data;
1380 	int ret;
1381 
1382 	mutex_lock(&cam->s_mutex);
1383 	ret = vb2_querybuf(&cam->vb_queue, buf);
1384 	mutex_unlock(&cam->s_mutex);
1385 	return ret;
1386 }
1387 
mcam_vidioc_qbuf(struct file * filp,void * priv,struct v4l2_buffer * buf)1388 static int mcam_vidioc_qbuf(struct file *filp, void *priv,
1389 		struct v4l2_buffer *buf)
1390 {
1391 	struct mcam_camera *cam = filp->private_data;
1392 	int ret;
1393 
1394 	mutex_lock(&cam->s_mutex);
1395 	ret = vb2_qbuf(&cam->vb_queue, buf);
1396 	mutex_unlock(&cam->s_mutex);
1397 	return ret;
1398 }
1399 
mcam_vidioc_dqbuf(struct file * filp,void * priv,struct v4l2_buffer * buf)1400 static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
1401 		struct v4l2_buffer *buf)
1402 {
1403 	struct mcam_camera *cam = filp->private_data;
1404 	int ret;
1405 
1406 	mutex_lock(&cam->s_mutex);
1407 	ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
1408 	mutex_unlock(&cam->s_mutex);
1409 	return ret;
1410 }
1411 
mcam_vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)1412 static int mcam_vidioc_querycap(struct file *file, void *priv,
1413 		struct v4l2_capability *cap)
1414 {
1415 	strcpy(cap->driver, "marvell_ccic");
1416 	strcpy(cap->card, "marvell_ccic");
1417 	cap->version = 1;
1418 	cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
1419 		V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
1420 	return 0;
1421 }
1422 
1423 
mcam_vidioc_enum_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_fmtdesc * fmt)1424 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1425 		void *priv, struct v4l2_fmtdesc *fmt)
1426 {
1427 	if (fmt->index >= N_MCAM_FMTS)
1428 		return -EINVAL;
1429 	strlcpy(fmt->description, mcam_formats[fmt->index].desc,
1430 			sizeof(fmt->description));
1431 	fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1432 	return 0;
1433 }
1434 
mcam_vidioc_try_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1435 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1436 		struct v4l2_format *fmt)
1437 {
1438 	struct mcam_camera *cam = priv;
1439 	struct mcam_format_struct *f;
1440 	struct v4l2_pix_format *pix = &fmt->fmt.pix;
1441 	struct v4l2_mbus_framefmt mbus_fmt;
1442 	int ret;
1443 
1444 	f = mcam_find_format(pix->pixelformat);
1445 	pix->pixelformat = f->pixelformat;
1446 	v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
1447 	mutex_lock(&cam->s_mutex);
1448 	ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
1449 	mutex_unlock(&cam->s_mutex);
1450 	v4l2_fill_pix_format(pix, &mbus_fmt);
1451 	switch (f->pixelformat) {
1452 	case V4L2_PIX_FMT_YUV420:
1453 	case V4L2_PIX_FMT_YVU420:
1454 		pix->bytesperline = pix->width * 3 / 2;
1455 		break;
1456 	default:
1457 		pix->bytesperline = pix->width * f->bpp;
1458 		break;
1459 	}
1460 	pix->sizeimage = pix->height * pix->bytesperline;
1461 	return ret;
1462 }
1463 
mcam_vidioc_s_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1464 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1465 		struct v4l2_format *fmt)
1466 {
1467 	struct mcam_camera *cam = priv;
1468 	struct mcam_format_struct *f;
1469 	int ret;
1470 
1471 	/*
1472 	 * Can't do anything if the device is not idle
1473 	 * Also can't if there are streaming buffers in place.
1474 	 */
1475 	if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
1476 		return -EBUSY;
1477 
1478 	f = mcam_find_format(fmt->fmt.pix.pixelformat);
1479 
1480 	/*
1481 	 * See if the formatting works in principle.
1482 	 */
1483 	ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1484 	if (ret)
1485 		return ret;
1486 	/*
1487 	 * Now we start to change things for real, so let's do it
1488 	 * under lock.
1489 	 */
1490 	mutex_lock(&cam->s_mutex);
1491 	cam->pix_format = fmt->fmt.pix;
1492 	cam->mbus_code = f->mbus_code;
1493 
1494 	/*
1495 	 * Make sure we have appropriate DMA buffers.
1496 	 */
1497 	if (cam->buffer_mode == B_vmalloc) {
1498 		ret = mcam_check_dma_buffers(cam);
1499 		if (ret)
1500 			goto out;
1501 	}
1502 	mcam_set_config_needed(cam, 1);
1503 out:
1504 	mutex_unlock(&cam->s_mutex);
1505 	return ret;
1506 }
1507 
1508 /*
1509  * Return our stored notion of how the camera is/should be configured.
1510  * The V4l2 spec wants us to be smarter, and actually get this from
1511  * the camera (and not mess with it at open time).  Someday.
1512  */
mcam_vidioc_g_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * f)1513 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1514 		struct v4l2_format *f)
1515 {
1516 	struct mcam_camera *cam = priv;
1517 
1518 	f->fmt.pix = cam->pix_format;
1519 	return 0;
1520 }
1521 
1522 /*
1523  * We only have one input - the sensor - so minimize the nonsense here.
1524  */
mcam_vidioc_enum_input(struct file * filp,void * priv,struct v4l2_input * input)1525 static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1526 		struct v4l2_input *input)
1527 {
1528 	if (input->index != 0)
1529 		return -EINVAL;
1530 
1531 	input->type = V4L2_INPUT_TYPE_CAMERA;
1532 	input->std = V4L2_STD_ALL; /* Not sure what should go here */
1533 	strcpy(input->name, "Camera");
1534 	return 0;
1535 }
1536 
mcam_vidioc_g_input(struct file * filp,void * priv,unsigned int * i)1537 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1538 {
1539 	*i = 0;
1540 	return 0;
1541 }
1542 
mcam_vidioc_s_input(struct file * filp,void * priv,unsigned int i)1543 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1544 {
1545 	if (i != 0)
1546 		return -EINVAL;
1547 	return 0;
1548 }
1549 
1550 /* from vivi.c */
mcam_vidioc_s_std(struct file * filp,void * priv,v4l2_std_id a)1551 static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id a)
1552 {
1553 	return 0;
1554 }
1555 
mcam_vidioc_g_std(struct file * filp,void * priv,v4l2_std_id * a)1556 static int mcam_vidioc_g_std(struct file *filp, void *priv, v4l2_std_id *a)
1557 {
1558 	*a = V4L2_STD_NTSC_M;
1559 	return 0;
1560 }
1561 
1562 /*
1563  * G/S_PARM.  Most of this is done by the sensor, but we are
1564  * the level which controls the number of read buffers.
1565  */
mcam_vidioc_g_parm(struct file * filp,void * priv,struct v4l2_streamparm * parms)1566 static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1567 		struct v4l2_streamparm *parms)
1568 {
1569 	struct mcam_camera *cam = priv;
1570 	int ret;
1571 
1572 	mutex_lock(&cam->s_mutex);
1573 	ret = sensor_call(cam, video, g_parm, parms);
1574 	mutex_unlock(&cam->s_mutex);
1575 	parms->parm.capture.readbuffers = n_dma_bufs;
1576 	return ret;
1577 }
1578 
mcam_vidioc_s_parm(struct file * filp,void * priv,struct v4l2_streamparm * parms)1579 static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1580 		struct v4l2_streamparm *parms)
1581 {
1582 	struct mcam_camera *cam = priv;
1583 	int ret;
1584 
1585 	mutex_lock(&cam->s_mutex);
1586 	ret = sensor_call(cam, video, s_parm, parms);
1587 	mutex_unlock(&cam->s_mutex);
1588 	parms->parm.capture.readbuffers = n_dma_bufs;
1589 	return ret;
1590 }
1591 
mcam_vidioc_enum_framesizes(struct file * filp,void * priv,struct v4l2_frmsizeenum * sizes)1592 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1593 		struct v4l2_frmsizeenum *sizes)
1594 {
1595 	struct mcam_camera *cam = priv;
1596 	int ret;
1597 
1598 	mutex_lock(&cam->s_mutex);
1599 	ret = sensor_call(cam, video, enum_framesizes, sizes);
1600 	mutex_unlock(&cam->s_mutex);
1601 	return ret;
1602 }
1603 
mcam_vidioc_enum_frameintervals(struct file * filp,void * priv,struct v4l2_frmivalenum * interval)1604 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1605 		struct v4l2_frmivalenum *interval)
1606 {
1607 	struct mcam_camera *cam = priv;
1608 	int ret;
1609 
1610 	mutex_lock(&cam->s_mutex);
1611 	ret = sensor_call(cam, video, enum_frameintervals, interval);
1612 	mutex_unlock(&cam->s_mutex);
1613 	return ret;
1614 }
1615 
1616 #ifdef CONFIG_VIDEO_ADV_DEBUG
mcam_vidioc_g_register(struct file * file,void * priv,struct v4l2_dbg_register * reg)1617 static int mcam_vidioc_g_register(struct file *file, void *priv,
1618 		struct v4l2_dbg_register *reg)
1619 {
1620 	struct mcam_camera *cam = priv;
1621 
1622 	if (reg->reg > cam->regs_size - 4)
1623 		return -EINVAL;
1624 	reg->val = mcam_reg_read(cam, reg->reg);
1625 	reg->size = 4;
1626 	return 0;
1627 }
1628 
mcam_vidioc_s_register(struct file * file,void * priv,const struct v4l2_dbg_register * reg)1629 static int mcam_vidioc_s_register(struct file *file, void *priv,
1630 		const struct v4l2_dbg_register *reg)
1631 {
1632 	struct mcam_camera *cam = priv;
1633 
1634 	if (reg->reg > cam->regs_size - 4)
1635 		return -EINVAL;
1636 	mcam_reg_write(cam, reg->reg, reg->val);
1637 	return 0;
1638 }
1639 #endif
1640 
1641 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1642 	.vidioc_querycap	= mcam_vidioc_querycap,
1643 	.vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1644 	.vidioc_try_fmt_vid_cap	= mcam_vidioc_try_fmt_vid_cap,
1645 	.vidioc_s_fmt_vid_cap	= mcam_vidioc_s_fmt_vid_cap,
1646 	.vidioc_g_fmt_vid_cap	= mcam_vidioc_g_fmt_vid_cap,
1647 	.vidioc_enum_input	= mcam_vidioc_enum_input,
1648 	.vidioc_g_input		= mcam_vidioc_g_input,
1649 	.vidioc_s_input		= mcam_vidioc_s_input,
1650 	.vidioc_s_std		= mcam_vidioc_s_std,
1651 	.vidioc_g_std		= mcam_vidioc_g_std,
1652 	.vidioc_reqbufs		= mcam_vidioc_reqbufs,
1653 	.vidioc_querybuf	= mcam_vidioc_querybuf,
1654 	.vidioc_qbuf		= mcam_vidioc_qbuf,
1655 	.vidioc_dqbuf		= mcam_vidioc_dqbuf,
1656 	.vidioc_streamon	= mcam_vidioc_streamon,
1657 	.vidioc_streamoff	= mcam_vidioc_streamoff,
1658 	.vidioc_g_parm		= mcam_vidioc_g_parm,
1659 	.vidioc_s_parm		= mcam_vidioc_s_parm,
1660 	.vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1661 	.vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1662 #ifdef CONFIG_VIDEO_ADV_DEBUG
1663 	.vidioc_g_register	= mcam_vidioc_g_register,
1664 	.vidioc_s_register	= mcam_vidioc_s_register,
1665 #endif
1666 };
1667 
1668 /* ---------------------------------------------------------------------- */
1669 /*
1670  * Our various file operations.
1671  */
mcam_v4l_open(struct file * filp)1672 static int mcam_v4l_open(struct file *filp)
1673 {
1674 	struct mcam_camera *cam = video_drvdata(filp);
1675 	int ret = 0;
1676 
1677 	filp->private_data = cam;
1678 
1679 	cam->frame_state.frames = 0;
1680 	cam->frame_state.singles = 0;
1681 	cam->frame_state.delivered = 0;
1682 	mutex_lock(&cam->s_mutex);
1683 	if (cam->users == 0) {
1684 		ret = mcam_setup_vb2(cam);
1685 		if (ret)
1686 			goto out;
1687 		ret = mcam_ctlr_power_up(cam);
1688 		if (ret)
1689 			goto out;
1690 		__mcam_cam_reset(cam);
1691 		mcam_set_config_needed(cam, 1);
1692 	}
1693 	(cam->users)++;
1694 out:
1695 	mutex_unlock(&cam->s_mutex);
1696 	return ret;
1697 }
1698 
1699 
mcam_v4l_release(struct file * filp)1700 static int mcam_v4l_release(struct file *filp)
1701 {
1702 	struct mcam_camera *cam = filp->private_data;
1703 
1704 	cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
1705 			cam->frame_state.frames, cam->frame_state.singles,
1706 			cam->frame_state.delivered);
1707 	mutex_lock(&cam->s_mutex);
1708 	(cam->users)--;
1709 	if (cam->users == 0) {
1710 		mcam_ctlr_stop_dma(cam);
1711 		mcam_cleanup_vb2(cam);
1712 		mcam_disable_mipi(cam);
1713 		mcam_ctlr_power_down(cam);
1714 		if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1715 			mcam_free_dma_bufs(cam);
1716 	}
1717 
1718 	mutex_unlock(&cam->s_mutex);
1719 	return 0;
1720 }
1721 
mcam_v4l_read(struct file * filp,char __user * buffer,size_t len,loff_t * pos)1722 static ssize_t mcam_v4l_read(struct file *filp,
1723 		char __user *buffer, size_t len, loff_t *pos)
1724 {
1725 	struct mcam_camera *cam = filp->private_data;
1726 	int ret;
1727 
1728 	mutex_lock(&cam->s_mutex);
1729 	ret = vb2_read(&cam->vb_queue, buffer, len, pos,
1730 			filp->f_flags & O_NONBLOCK);
1731 	mutex_unlock(&cam->s_mutex);
1732 	return ret;
1733 }
1734 
1735 
1736 
mcam_v4l_poll(struct file * filp,struct poll_table_struct * pt)1737 static unsigned int mcam_v4l_poll(struct file *filp,
1738 		struct poll_table_struct *pt)
1739 {
1740 	struct mcam_camera *cam = filp->private_data;
1741 	int ret;
1742 
1743 	mutex_lock(&cam->s_mutex);
1744 	ret = vb2_poll(&cam->vb_queue, filp, pt);
1745 	mutex_unlock(&cam->s_mutex);
1746 	return ret;
1747 }
1748 
1749 
mcam_v4l_mmap(struct file * filp,struct vm_area_struct * vma)1750 static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
1751 {
1752 	struct mcam_camera *cam = filp->private_data;
1753 	int ret;
1754 
1755 	mutex_lock(&cam->s_mutex);
1756 	ret = vb2_mmap(&cam->vb_queue, vma);
1757 	mutex_unlock(&cam->s_mutex);
1758 	return ret;
1759 }
1760 
1761 
1762 
1763 static const struct v4l2_file_operations mcam_v4l_fops = {
1764 	.owner = THIS_MODULE,
1765 	.open = mcam_v4l_open,
1766 	.release = mcam_v4l_release,
1767 	.read = mcam_v4l_read,
1768 	.poll = mcam_v4l_poll,
1769 	.mmap = mcam_v4l_mmap,
1770 	.unlocked_ioctl = video_ioctl2,
1771 };
1772 
1773 
1774 /*
1775  * This template device holds all of those v4l2 methods; we
1776  * clone it for specific real devices.
1777  */
1778 static struct video_device mcam_v4l_template = {
1779 	.name = "mcam",
1780 	.tvnorms = V4L2_STD_NTSC_M,
1781 
1782 	.fops = &mcam_v4l_fops,
1783 	.ioctl_ops = &mcam_v4l_ioctl_ops,
1784 	.release = video_device_release_empty,
1785 };
1786 
1787 /* ---------------------------------------------------------------------- */
1788 /*
1789  * Interrupt handler stuff
1790  */
mcam_frame_complete(struct mcam_camera * cam,int frame)1791 static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1792 {
1793 	/*
1794 	 * Basic frame housekeeping.
1795 	 */
1796 	set_bit(frame, &cam->flags);
1797 	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1798 	cam->next_buf = frame;
1799 	cam->buf_seq[frame] = ++(cam->sequence);
1800 	cam->frame_state.frames++;
1801 	/*
1802 	 * "This should never happen"
1803 	 */
1804 	if (cam->state != S_STREAMING)
1805 		return;
1806 	/*
1807 	 * Process the frame and set up the next one.
1808 	 */
1809 	cam->frame_complete(cam, frame);
1810 }
1811 
1812 
1813 /*
1814  * The interrupt handler; this needs to be called from the
1815  * platform irq handler with the lock held.
1816  */
mccic_irq(struct mcam_camera * cam,unsigned int irqs)1817 int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1818 {
1819 	unsigned int frame, handled = 0;
1820 
1821 	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1822 	/*
1823 	 * Handle any frame completions.  There really should
1824 	 * not be more than one of these, or we have fallen
1825 	 * far behind.
1826 	 *
1827 	 * When running in S/G mode, the frame number lacks any
1828 	 * real meaning - there's only one descriptor array - but
1829 	 * the controller still picks a different one to signal
1830 	 * each time.
1831 	 */
1832 	for (frame = 0; frame < cam->nbufs; frame++)
1833 		if (irqs & (IRQ_EOF0 << frame) &&
1834 			test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1835 			mcam_frame_complete(cam, frame);
1836 			handled = 1;
1837 			clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1838 			if (cam->buffer_mode == B_DMA_sg)
1839 				break;
1840 		}
1841 	/*
1842 	 * If a frame starts, note that we have DMA active.  This
1843 	 * code assumes that we won't get multiple frame interrupts
1844 	 * at once; may want to rethink that.
1845 	 */
1846 	for (frame = 0; frame < cam->nbufs; frame++) {
1847 		if (irqs & (IRQ_SOF0 << frame)) {
1848 			set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1849 			handled = IRQ_HANDLED;
1850 		}
1851 	}
1852 
1853 	if (handled == IRQ_HANDLED) {
1854 		set_bit(CF_DMA_ACTIVE, &cam->flags);
1855 		if (cam->buffer_mode == B_DMA_sg)
1856 			mcam_ctlr_stop(cam);
1857 	}
1858 	return handled;
1859 }
1860 
1861 /* ---------------------------------------------------------------------- */
1862 /*
1863  * Registration and such.
1864  */
1865 static struct ov7670_config sensor_cfg = {
1866 	/*
1867 	 * Exclude QCIF mode, because it only captures a tiny portion
1868 	 * of the sensor FOV
1869 	 */
1870 	.min_width = 320,
1871 	.min_height = 240,
1872 };
1873 
1874 
mccic_register(struct mcam_camera * cam)1875 int mccic_register(struct mcam_camera *cam)
1876 {
1877 	struct i2c_board_info ov7670_info = {
1878 		.type = "ov7670",
1879 		.addr = 0x42 >> 1,
1880 		.platform_data = &sensor_cfg,
1881 	};
1882 	int ret;
1883 
1884 	/*
1885 	 * Validate the requested buffer mode.
1886 	 */
1887 	if (buffer_mode >= 0)
1888 		cam->buffer_mode = buffer_mode;
1889 	if (cam->buffer_mode == B_DMA_sg &&
1890 			cam->chip_id == MCAM_CAFE) {
1891 		printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
1892 			"attempting vmalloc mode instead\n");
1893 		cam->buffer_mode = B_vmalloc;
1894 	}
1895 	if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1896 		printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1897 				cam->buffer_mode);
1898 		return -EINVAL;
1899 	}
1900 	/*
1901 	 * Register with V4L
1902 	 */
1903 	ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
1904 	if (ret)
1905 		return ret;
1906 
1907 	mutex_init(&cam->s_mutex);
1908 	cam->state = S_NOTREADY;
1909 	mcam_set_config_needed(cam, 1);
1910 	cam->pix_format = mcam_def_pix_format;
1911 	cam->mbus_code = mcam_def_mbus_code;
1912 	INIT_LIST_HEAD(&cam->buffers);
1913 	mcam_ctlr_init(cam);
1914 
1915 	/*
1916 	 * Try to find the sensor.
1917 	 */
1918 	sensor_cfg.clock_speed = cam->clock_speed;
1919 	sensor_cfg.use_smbus = cam->use_smbus;
1920 	cam->sensor_addr = ov7670_info.addr;
1921 	cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
1922 			cam->i2c_adapter, &ov7670_info, NULL);
1923 	if (cam->sensor == NULL) {
1924 		ret = -ENODEV;
1925 		goto out_unregister;
1926 	}
1927 
1928 	ret = mcam_cam_init(cam);
1929 	if (ret)
1930 		goto out_unregister;
1931 	/*
1932 	 * Get the v4l2 setup done.
1933 	 */
1934 	ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1935 	if (ret)
1936 		goto out_unregister;
1937 	cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1938 
1939 	mutex_lock(&cam->s_mutex);
1940 	cam->vdev = mcam_v4l_template;
1941 	cam->vdev.debug = 0;
1942 	cam->vdev.v4l2_dev = &cam->v4l2_dev;
1943 	video_set_drvdata(&cam->vdev, cam);
1944 	ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
1945 	if (ret)
1946 		goto out;
1947 
1948 	/*
1949 	 * If so requested, try to get our DMA buffers now.
1950 	 */
1951 	if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1952 		if (mcam_alloc_dma_bufs(cam, 1))
1953 			cam_warn(cam, "Unable to alloc DMA buffers at load"
1954 					" will try again later.");
1955 	}
1956 
1957 out:
1958 	v4l2_ctrl_handler_free(&cam->ctrl_handler);
1959 	mutex_unlock(&cam->s_mutex);
1960 	return ret;
1961 out_unregister:
1962 	v4l2_device_unregister(&cam->v4l2_dev);
1963 	return ret;
1964 }
1965 
1966 
mccic_shutdown(struct mcam_camera * cam)1967 void mccic_shutdown(struct mcam_camera *cam)
1968 {
1969 	/*
1970 	 * If we have no users (and we really, really should have no
1971 	 * users) the device will already be powered down.  Trying to
1972 	 * take it down again will wedge the machine, which is frowned
1973 	 * upon.
1974 	 */
1975 	if (cam->users > 0) {
1976 		cam_warn(cam, "Removing a device with users!\n");
1977 		mcam_ctlr_power_down(cam);
1978 	}
1979 	vb2_queue_release(&cam->vb_queue);
1980 	if (cam->buffer_mode == B_vmalloc)
1981 		mcam_free_dma_bufs(cam);
1982 	video_unregister_device(&cam->vdev);
1983 	v4l2_ctrl_handler_free(&cam->ctrl_handler);
1984 	v4l2_device_unregister(&cam->v4l2_dev);
1985 }
1986 
1987 /*
1988  * Power management
1989  */
1990 #ifdef CONFIG_PM
1991 
mccic_suspend(struct mcam_camera * cam)1992 void mccic_suspend(struct mcam_camera *cam)
1993 {
1994 	mutex_lock(&cam->s_mutex);
1995 	if (cam->users > 0) {
1996 		enum mcam_state cstate = cam->state;
1997 
1998 		mcam_ctlr_stop_dma(cam);
1999 		mcam_ctlr_power_down(cam);
2000 		cam->state = cstate;
2001 	}
2002 	mutex_unlock(&cam->s_mutex);
2003 }
2004 
mccic_resume(struct mcam_camera * cam)2005 int mccic_resume(struct mcam_camera *cam)
2006 {
2007 	int ret = 0;
2008 
2009 	mutex_lock(&cam->s_mutex);
2010 	if (cam->users > 0) {
2011 		ret = mcam_ctlr_power_up(cam);
2012 		if (ret) {
2013 			mutex_unlock(&cam->s_mutex);
2014 			return ret;
2015 		}
2016 		__mcam_cam_reset(cam);
2017 	} else {
2018 		mcam_ctlr_power_down(cam);
2019 	}
2020 	mutex_unlock(&cam->s_mutex);
2021 
2022 	set_bit(CF_CONFIG_NEEDED, &cam->flags);
2023 	if (cam->state == S_STREAMING) {
2024 		/*
2025 		 * If there was a buffer in the DMA engine at suspend
2026 		 * time, put it back on the queue or we'll forget about it.
2027 		 */
2028 		if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
2029 			list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
2030 		ret = mcam_read_setup(cam);
2031 	}
2032 	return ret;
2033 }
2034 #endif /* CONFIG_PM */
2035