1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * The Marvell camera core.  This device appears in a number of settings,
4  * so it needs platform-specific support outside of the core.
5  *
6  * Copyright 2011 Jonathan Corbet corbet@lwn.net
7  * Copyright 2018 Lubomir Rintel <lkundrak@v3.sk>
8  */
9 #include <linux/kernel.h>
10 #include <linux/module.h>
11 #include <linux/fs.h>
12 #include <linux/mm.h>
13 #include <linux/i2c.h>
14 #include <linux/interrupt.h>
15 #include <linux/spinlock.h>
16 #include <linux/slab.h>
17 #include <linux/device.h>
18 #include <linux/wait.h>
19 #include <linux/list.h>
20 #include <linux/dma-mapping.h>
21 #include <linux/delay.h>
22 #include <linux/vmalloc.h>
23 #include <linux/io.h>
24 #include <linux/clk.h>
25 #include <linux/clk-provider.h>
26 #include <linux/videodev2.h>
27 #include <linux/pm_runtime.h>
28 #include <media/v4l2-device.h>
29 #include <media/v4l2-ioctl.h>
30 #include <media/v4l2-ctrls.h>
31 #include <media/v4l2-event.h>
32 #include <media/videobuf2-vmalloc.h>
33 #include <media/videobuf2-dma-contig.h>
34 #include <media/videobuf2-dma-sg.h>
35 
36 #include "mcam-core.h"
37 
38 #ifdef MCAM_MODE_VMALLOC
39 /*
40  * Internal DMA buffer management.  Since the controller cannot do S/G I/O,
41  * we must have physically contiguous buffers to bring frames into.
42  * These parameters control how many buffers we use, whether we
43  * allocate them at load time (better chance of success, but nails down
44  * memory) or when somebody tries to use the camera (riskier), and,
45  * for load-time allocation, how big they should be.
46  *
47  * The controller can cycle through three buffers.  We could use
48  * more by flipping pointers around, but it probably makes little
49  * sense.
50  */
51 
52 static bool alloc_bufs_at_read;
53 module_param(alloc_bufs_at_read, bool, 0444);
54 MODULE_PARM_DESC(alloc_bufs_at_read,
55 		"Non-zero value causes DMA buffers to be allocated when the video capture device is read, rather than at module load time.  This saves memory, but decreases the chances of successfully getting those buffers.  This parameter is only used in the vmalloc buffer mode");
56 
57 static int n_dma_bufs = 3;
58 module_param(n_dma_bufs, uint, 0644);
59 MODULE_PARM_DESC(n_dma_bufs,
60 		"The number of DMA buffers to allocate.  Can be either two (saves memory, makes timing tighter) or three.");
61 
62 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2;  /* Worst case */
63 module_param(dma_buf_size, uint, 0444);
64 MODULE_PARM_DESC(dma_buf_size,
65 		"The size of the allocated DMA buffers.  If actual operating parameters require larger buffers, an attempt to reallocate will be made.");
66 #else /* MCAM_MODE_VMALLOC */
67 static const bool alloc_bufs_at_read;
68 static const int n_dma_bufs = 3;  /* Used by S/G_PARM */
69 #endif /* MCAM_MODE_VMALLOC */
70 
71 static bool flip;
72 module_param(flip, bool, 0444);
73 MODULE_PARM_DESC(flip,
74 		"If set, the sensor will be instructed to flip the image vertically.");
75 
76 static int buffer_mode = -1;
77 module_param(buffer_mode, int, 0444);
78 MODULE_PARM_DESC(buffer_mode,
79 		"Set the buffer mode to be used; default is to go with what the platform driver asks for.  Set to 0 for vmalloc, 1 for DMA contiguous.");
80 
81 /*
82  * Status flags.  Always manipulated with bit operations.
83  */
84 #define CF_BUF0_VALID	 0	/* Buffers valid - first three */
85 #define CF_BUF1_VALID	 1
86 #define CF_BUF2_VALID	 2
87 #define CF_DMA_ACTIVE	 3	/* A frame is incoming */
88 #define CF_CONFIG_NEEDED 4	/* Must configure hardware */
89 #define CF_SINGLE_BUFFER 5	/* Running with a single buffer */
90 #define CF_SG_RESTART	 6	/* SG restart needed */
91 #define CF_FRAME_SOF0	 7	/* Frame 0 started */
92 #define CF_FRAME_SOF1	 8
93 #define CF_FRAME_SOF2	 9
94 
95 #define sensor_call(cam, o, f, args...) \
96 	v4l2_subdev_call(cam->sensor, o, f, ##args)
97 
98 #define notifier_to_mcam(notifier) \
99 	container_of(notifier, struct mcam_camera, notifier)
100 
101 static struct mcam_format_struct {
102 	__u32 pixelformat;
103 	int bpp;   /* Bytes per pixel */
104 	bool planar;
105 	u32 mbus_code;
106 } mcam_formats[] = {
107 	{
108 		.pixelformat	= V4L2_PIX_FMT_YUYV,
109 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
110 		.bpp		= 2,
111 		.planar		= false,
112 	},
113 	{
114 		.pixelformat	= V4L2_PIX_FMT_YVYU,
115 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
116 		.bpp		= 2,
117 		.planar		= false,
118 	},
119 	{
120 		.pixelformat	= V4L2_PIX_FMT_YUV420,
121 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
122 		.bpp		= 1,
123 		.planar		= true,
124 	},
125 	{
126 		.pixelformat	= V4L2_PIX_FMT_YVU420,
127 		.mbus_code	= MEDIA_BUS_FMT_YUYV8_2X8,
128 		.bpp		= 1,
129 		.planar		= true,
130 	},
131 	{
132 		.pixelformat	= V4L2_PIX_FMT_XRGB444,
133 		.mbus_code	= MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE,
134 		.bpp		= 2,
135 		.planar		= false,
136 	},
137 	{
138 		.pixelformat	= V4L2_PIX_FMT_RGB565,
139 		.mbus_code	= MEDIA_BUS_FMT_RGB565_2X8_LE,
140 		.bpp		= 2,
141 		.planar		= false,
142 	},
143 	{
144 		.pixelformat	= V4L2_PIX_FMT_SBGGR8,
145 		.mbus_code	= MEDIA_BUS_FMT_SBGGR8_1X8,
146 		.bpp		= 1,
147 		.planar		= false,
148 	},
149 };
150 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
151 
mcam_find_format(u32 pixelformat)152 static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
153 {
154 	unsigned i;
155 
156 	for (i = 0; i < N_MCAM_FMTS; i++)
157 		if (mcam_formats[i].pixelformat == pixelformat)
158 			return mcam_formats + i;
159 	/* Not found? Then return the first format. */
160 	return mcam_formats;
161 }
162 
163 /*
164  * The default format we use until somebody says otherwise.
165  */
166 static const struct v4l2_pix_format mcam_def_pix_format = {
167 	.width		= VGA_WIDTH,
168 	.height		= VGA_HEIGHT,
169 	.pixelformat	= V4L2_PIX_FMT_YUYV,
170 	.field		= V4L2_FIELD_NONE,
171 	.bytesperline	= VGA_WIDTH*2,
172 	.sizeimage	= VGA_WIDTH*VGA_HEIGHT*2,
173 	.colorspace	= V4L2_COLORSPACE_SRGB,
174 };
175 
176 static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
177 
178 
179 /*
180  * The two-word DMA descriptor format used by the Armada 610 and like.  There
181  * Is a three-word format as well (set C1_DESC_3WORD) where the third
182  * word is a pointer to the next descriptor, but we don't use it.  Two-word
183  * descriptors have to be contiguous in memory.
184  */
185 struct mcam_dma_desc {
186 	u32 dma_addr;
187 	u32 segment_len;
188 };
189 
190 /*
191  * Our buffer type for working with videobuf2.  Note that the vb2
192  * developers have decreed that struct vb2_v4l2_buffer must be at the
193  * beginning of this structure.
194  */
195 struct mcam_vb_buffer {
196 	struct vb2_v4l2_buffer vb_buf;
197 	struct list_head queue;
198 	struct mcam_dma_desc *dma_desc;	/* Descriptor virtual address */
199 	dma_addr_t dma_desc_pa;		/* Descriptor physical address */
200 };
201 
vb_to_mvb(struct vb2_v4l2_buffer * vb)202 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_v4l2_buffer *vb)
203 {
204 	return container_of(vb, struct mcam_vb_buffer, vb_buf);
205 }
206 
207 /*
208  * Hand a completed buffer back to user space.
209  */
mcam_buffer_done(struct mcam_camera * cam,int frame,struct vb2_v4l2_buffer * vbuf)210 static void mcam_buffer_done(struct mcam_camera *cam, int frame,
211 		struct vb2_v4l2_buffer *vbuf)
212 {
213 	vbuf->vb2_buf.planes[0].bytesused = cam->pix_format.sizeimage;
214 	vbuf->sequence = cam->buf_seq[frame];
215 	vbuf->field = V4L2_FIELD_NONE;
216 	vbuf->vb2_buf.timestamp = ktime_get_ns();
217 	vb2_set_plane_payload(&vbuf->vb2_buf, 0, cam->pix_format.sizeimage);
218 	vb2_buffer_done(&vbuf->vb2_buf, VB2_BUF_STATE_DONE);
219 }
220 
221 
222 
223 /*
224  * Debugging and related.
225  */
226 #define cam_err(cam, fmt, arg...) \
227 	dev_err((cam)->dev, fmt, ##arg);
228 #define cam_warn(cam, fmt, arg...) \
229 	dev_warn((cam)->dev, fmt, ##arg);
230 #define cam_dbg(cam, fmt, arg...) \
231 	dev_dbg((cam)->dev, fmt, ##arg);
232 
233 
234 /*
235  * Flag manipulation helpers
236  */
mcam_reset_buffers(struct mcam_camera * cam)237 static void mcam_reset_buffers(struct mcam_camera *cam)
238 {
239 	int i;
240 
241 	cam->next_buf = -1;
242 	for (i = 0; i < cam->nbufs; i++) {
243 		clear_bit(i, &cam->flags);
244 		clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
245 	}
246 }
247 
mcam_needs_config(struct mcam_camera * cam)248 static inline int mcam_needs_config(struct mcam_camera *cam)
249 {
250 	return test_bit(CF_CONFIG_NEEDED, &cam->flags);
251 }
252 
mcam_set_config_needed(struct mcam_camera * cam,int needed)253 static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
254 {
255 	if (needed)
256 		set_bit(CF_CONFIG_NEEDED, &cam->flags);
257 	else
258 		clear_bit(CF_CONFIG_NEEDED, &cam->flags);
259 }
260 
261 /* ------------------------------------------------------------------- */
262 /*
263  * Make the controller start grabbing images.  Everything must
264  * be set up before doing this.
265  */
mcam_ctlr_start(struct mcam_camera * cam)266 static void mcam_ctlr_start(struct mcam_camera *cam)
267 {
268 	/* set_bit performs a read, so no other barrier should be
269 	   needed here */
270 	mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
271 }
272 
mcam_ctlr_stop(struct mcam_camera * cam)273 static void mcam_ctlr_stop(struct mcam_camera *cam)
274 {
275 	mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
276 }
277 
mcam_enable_mipi(struct mcam_camera * mcam)278 static void mcam_enable_mipi(struct mcam_camera *mcam)
279 {
280 	/* Using MIPI mode and enable MIPI */
281 	if (mcam->calc_dphy)
282 		mcam->calc_dphy(mcam);
283 	cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
284 			mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
285 	mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
286 	mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
287 	mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
288 
289 	if (!mcam->mipi_enabled) {
290 		if (mcam->lane > 4 || mcam->lane <= 0) {
291 			cam_warn(mcam, "lane number error\n");
292 			mcam->lane = 1;	/* set the default value */
293 		}
294 		/*
295 		 * 0x41 actives 1 lane
296 		 * 0x43 actives 2 lanes
297 		 * 0x45 actives 3 lanes (never happen)
298 		 * 0x47 actives 4 lanes
299 		 */
300 		mcam_reg_write(mcam, REG_CSI2_CTRL0,
301 			CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
302 		mcam->mipi_enabled = true;
303 	}
304 }
305 
mcam_disable_mipi(struct mcam_camera * mcam)306 static void mcam_disable_mipi(struct mcam_camera *mcam)
307 {
308 	/* Using Parallel mode or disable MIPI */
309 	mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
310 	mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
311 	mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
312 	mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
313 	mcam->mipi_enabled = false;
314 }
315 
mcam_fmt_is_planar(__u32 pfmt)316 static bool mcam_fmt_is_planar(__u32 pfmt)
317 {
318 	struct mcam_format_struct *f;
319 
320 	f = mcam_find_format(pfmt);
321 	return f->planar;
322 }
323 
mcam_write_yuv_bases(struct mcam_camera * cam,unsigned frame,dma_addr_t base)324 static void mcam_write_yuv_bases(struct mcam_camera *cam,
325 				 unsigned frame, dma_addr_t base)
326 {
327 	struct v4l2_pix_format *fmt = &cam->pix_format;
328 	u32 pixel_count = fmt->width * fmt->height;
329 	dma_addr_t y, u = 0, v = 0;
330 
331 	y = base;
332 
333 	switch (fmt->pixelformat) {
334 	case V4L2_PIX_FMT_YUV420:
335 		u = y + pixel_count;
336 		v = u + pixel_count / 4;
337 		break;
338 	case V4L2_PIX_FMT_YVU420:
339 		v = y + pixel_count;
340 		u = v + pixel_count / 4;
341 		break;
342 	default:
343 		break;
344 	}
345 
346 	mcam_reg_write(cam, REG_Y0BAR + frame * 4, y);
347 	if (mcam_fmt_is_planar(fmt->pixelformat)) {
348 		mcam_reg_write(cam, REG_U0BAR + frame * 4, u);
349 		mcam_reg_write(cam, REG_V0BAR + frame * 4, v);
350 	}
351 }
352 
353 /* ------------------------------------------------------------------- */
354 
355 #ifdef MCAM_MODE_VMALLOC
356 /*
357  * Code specific to the vmalloc buffer mode.
358  */
359 
360 /*
361  * Allocate in-kernel DMA buffers for vmalloc mode.
362  */
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)363 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
364 {
365 	int i;
366 
367 	mcam_set_config_needed(cam, 1);
368 	if (loadtime)
369 		cam->dma_buf_size = dma_buf_size;
370 	else
371 		cam->dma_buf_size = cam->pix_format.sizeimage;
372 	if (n_dma_bufs > 3)
373 		n_dma_bufs = 3;
374 
375 	cam->nbufs = 0;
376 	for (i = 0; i < n_dma_bufs; i++) {
377 		cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
378 				cam->dma_buf_size, cam->dma_handles + i,
379 				GFP_KERNEL);
380 		if (cam->dma_bufs[i] == NULL) {
381 			cam_warn(cam, "Failed to allocate DMA buffer\n");
382 			break;
383 		}
384 		(cam->nbufs)++;
385 	}
386 
387 	switch (cam->nbufs) {
388 	case 1:
389 		dma_free_coherent(cam->dev, cam->dma_buf_size,
390 				cam->dma_bufs[0], cam->dma_handles[0]);
391 		cam->nbufs = 0;
392 		fallthrough;
393 	case 0:
394 		cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
395 		return -ENOMEM;
396 
397 	case 2:
398 		if (n_dma_bufs > 2)
399 			cam_warn(cam, "Will limp along with only 2 buffers\n");
400 		break;
401 	}
402 	return 0;
403 }
404 
mcam_free_dma_bufs(struct mcam_camera * cam)405 static void mcam_free_dma_bufs(struct mcam_camera *cam)
406 {
407 	int i;
408 
409 	for (i = 0; i < cam->nbufs; i++) {
410 		dma_free_coherent(cam->dev, cam->dma_buf_size,
411 				cam->dma_bufs[i], cam->dma_handles[i]);
412 		cam->dma_bufs[i] = NULL;
413 	}
414 	cam->nbufs = 0;
415 }
416 
417 
418 /*
419  * Set up DMA buffers when operating in vmalloc mode
420  */
mcam_ctlr_dma_vmalloc(struct mcam_camera * cam)421 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
422 {
423 	/*
424 	 * Store the first two YUV buffers. Then either
425 	 * set the third if it exists, or tell the controller
426 	 * to just use two.
427 	 */
428 	mcam_write_yuv_bases(cam, 0, cam->dma_handles[0]);
429 	mcam_write_yuv_bases(cam, 1, cam->dma_handles[1]);
430 	if (cam->nbufs > 2) {
431 		mcam_write_yuv_bases(cam, 2, cam->dma_handles[2]);
432 		mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
433 	} else
434 		mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
435 	if (cam->chip_id == MCAM_CAFE)
436 		mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
437 }
438 
439 /*
440  * Copy data out to user space in the vmalloc case
441  */
mcam_frame_tasklet(struct tasklet_struct * t)442 static void mcam_frame_tasklet(struct tasklet_struct *t)
443 {
444 	struct mcam_camera *cam = from_tasklet(cam, t, s_tasklet);
445 	int i;
446 	unsigned long flags;
447 	struct mcam_vb_buffer *buf;
448 
449 	spin_lock_irqsave(&cam->dev_lock, flags);
450 	for (i = 0; i < cam->nbufs; i++) {
451 		int bufno = cam->next_buf;
452 
453 		if (cam->state != S_STREAMING || bufno < 0)
454 			break;  /* I/O got stopped */
455 		if (++(cam->next_buf) >= cam->nbufs)
456 			cam->next_buf = 0;
457 		if (!test_bit(bufno, &cam->flags))
458 			continue;
459 		if (list_empty(&cam->buffers)) {
460 			cam->frame_state.singles++;
461 			break;  /* Leave it valid, hope for better later */
462 		}
463 		cam->frame_state.delivered++;
464 		clear_bit(bufno, &cam->flags);
465 		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
466 				queue);
467 		list_del_init(&buf->queue);
468 		/*
469 		 * Drop the lock during the big copy.  This *should* be safe...
470 		 */
471 		spin_unlock_irqrestore(&cam->dev_lock, flags);
472 		memcpy(vb2_plane_vaddr(&buf->vb_buf.vb2_buf, 0),
473 				cam->dma_bufs[bufno],
474 				cam->pix_format.sizeimage);
475 		mcam_buffer_done(cam, bufno, &buf->vb_buf);
476 		spin_lock_irqsave(&cam->dev_lock, flags);
477 	}
478 	spin_unlock_irqrestore(&cam->dev_lock, flags);
479 }
480 
481 
482 /*
483  * Make sure our allocated buffers are up to the task.
484  */
mcam_check_dma_buffers(struct mcam_camera * cam)485 static int mcam_check_dma_buffers(struct mcam_camera *cam)
486 {
487 	if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
488 			mcam_free_dma_bufs(cam);
489 	if (cam->nbufs == 0)
490 		return mcam_alloc_dma_bufs(cam, 0);
491 	return 0;
492 }
493 
mcam_vmalloc_done(struct mcam_camera * cam,int frame)494 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
495 {
496 	tasklet_schedule(&cam->s_tasklet);
497 }
498 
499 #else /* MCAM_MODE_VMALLOC */
500 
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)501 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
502 {
503 	return 0;
504 }
505 
mcam_free_dma_bufs(struct mcam_camera * cam)506 static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
507 {
508 	return;
509 }
510 
mcam_check_dma_buffers(struct mcam_camera * cam)511 static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
512 {
513 	return 0;
514 }
515 
516 
517 
518 #endif /* MCAM_MODE_VMALLOC */
519 
520 
521 #ifdef MCAM_MODE_DMA_CONTIG
522 /* ---------------------------------------------------------------------- */
523 /*
524  * DMA-contiguous code.
525  */
526 
527 /*
528  * Set up a contiguous buffer for the given frame.  Here also is where
529  * the underrun strategy is set: if there is no buffer available, reuse
530  * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
531  * keep the interrupt handler from giving that buffer back to user
532  * space.  In this way, we always have a buffer to DMA to and don't
533  * have to try to play games stopping and restarting the controller.
534  */
mcam_set_contig_buffer(struct mcam_camera * cam,int frame)535 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
536 {
537 	struct mcam_vb_buffer *buf;
538 	dma_addr_t dma_handle;
539 	struct vb2_v4l2_buffer *vb;
540 
541 	/*
542 	 * If there are no available buffers, go into single mode
543 	 */
544 	if (list_empty(&cam->buffers)) {
545 		buf = cam->vb_bufs[frame ^ 0x1];
546 		set_bit(CF_SINGLE_BUFFER, &cam->flags);
547 		cam->frame_state.singles++;
548 	} else {
549 		/*
550 		 * OK, we have a buffer we can use.
551 		 */
552 		buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
553 					queue);
554 		list_del_init(&buf->queue);
555 		clear_bit(CF_SINGLE_BUFFER, &cam->flags);
556 	}
557 
558 	cam->vb_bufs[frame] = buf;
559 	vb = &buf->vb_buf;
560 
561 	dma_handle = vb2_dma_contig_plane_dma_addr(&vb->vb2_buf, 0);
562 	mcam_write_yuv_bases(cam, frame, dma_handle);
563 }
564 
565 /*
566  * Initial B_DMA_contig setup.
567  */
mcam_ctlr_dma_contig(struct mcam_camera * cam)568 static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
569 {
570 	mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
571 	cam->nbufs = 2;
572 	mcam_set_contig_buffer(cam, 0);
573 	mcam_set_contig_buffer(cam, 1);
574 }
575 
576 /*
577  * Frame completion handling.
578  */
mcam_dma_contig_done(struct mcam_camera * cam,int frame)579 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
580 {
581 	struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
582 
583 	if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
584 		cam->frame_state.delivered++;
585 		cam->vb_bufs[frame] = NULL;
586 		mcam_buffer_done(cam, frame, &buf->vb_buf);
587 	}
588 	mcam_set_contig_buffer(cam, frame);
589 }
590 
591 #endif /* MCAM_MODE_DMA_CONTIG */
592 
593 #ifdef MCAM_MODE_DMA_SG
594 /* ---------------------------------------------------------------------- */
595 /*
596  * Scatter/gather-specific code.
597  */
598 
599 /*
600  * Set up the next buffer for S/G I/O; caller should be sure that
601  * the controller is stopped and a buffer is available.
602  */
mcam_sg_next_buffer(struct mcam_camera * cam)603 static void mcam_sg_next_buffer(struct mcam_camera *cam)
604 {
605 	struct mcam_vb_buffer *buf;
606 	struct sg_table *sg_table;
607 
608 	buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
609 	list_del_init(&buf->queue);
610 	sg_table = vb2_dma_sg_plane_desc(&buf->vb_buf.vb2_buf, 0);
611 	/*
612 	 * Very Bad Not Good Things happen if you don't clear
613 	 * C1_DESC_ENA before making any descriptor changes.
614 	 */
615 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
616 	mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
617 	mcam_reg_write(cam, REG_DESC_LEN_Y,
618 			sg_table->nents * sizeof(struct mcam_dma_desc));
619 	mcam_reg_write(cam, REG_DESC_LEN_U, 0);
620 	mcam_reg_write(cam, REG_DESC_LEN_V, 0);
621 	mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
622 	cam->vb_bufs[0] = buf;
623 }
624 
625 /*
626  * Initial B_DMA_sg setup
627  */
mcam_ctlr_dma_sg(struct mcam_camera * cam)628 static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
629 {
630 	/*
631 	 * The list-empty condition can hit us at resume time
632 	 * if the buffer list was empty when the system was suspended.
633 	 */
634 	if (list_empty(&cam->buffers)) {
635 		set_bit(CF_SG_RESTART, &cam->flags);
636 		return;
637 	}
638 
639 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
640 	mcam_sg_next_buffer(cam);
641 	cam->nbufs = 3;
642 }
643 
644 
645 /*
646  * Frame completion with S/G is trickier.  We can't muck with
647  * a descriptor chain on the fly, since the controller buffers it
648  * internally.  So we have to actually stop and restart; Marvell
649  * says this is the way to do it.
650  *
651  * Of course, stopping is easier said than done; experience shows
652  * that the controller can start a frame *after* C0_ENABLE has been
653  * cleared.  So when running in S/G mode, the controller is "stopped"
654  * on receipt of the start-of-frame interrupt.  That means we can
655  * safely change the DMA descriptor array here and restart things
656  * (assuming there's another buffer waiting to go).
657  */
mcam_dma_sg_done(struct mcam_camera * cam,int frame)658 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
659 {
660 	struct mcam_vb_buffer *buf = cam->vb_bufs[0];
661 
662 	/*
663 	 * If we're no longer supposed to be streaming, don't do anything.
664 	 */
665 	if (cam->state != S_STREAMING)
666 		return;
667 	/*
668 	 * If we have another buffer available, put it in and
669 	 * restart the engine.
670 	 */
671 	if (!list_empty(&cam->buffers)) {
672 		mcam_sg_next_buffer(cam);
673 		mcam_ctlr_start(cam);
674 	/*
675 	 * Otherwise set CF_SG_RESTART and the controller will
676 	 * be restarted once another buffer shows up.
677 	 */
678 	} else {
679 		set_bit(CF_SG_RESTART, &cam->flags);
680 		cam->frame_state.singles++;
681 		cam->vb_bufs[0] = NULL;
682 	}
683 	/*
684 	 * Now we can give the completed frame back to user space.
685 	 */
686 	cam->frame_state.delivered++;
687 	mcam_buffer_done(cam, frame, &buf->vb_buf);
688 }
689 
690 
691 /*
692  * Scatter/gather mode requires stopping the controller between
693  * frames so we can put in a new DMA descriptor array.  If no new
694  * buffer exists at frame completion, the controller is left stopped;
695  * this function is charged with gettig things going again.
696  */
mcam_sg_restart(struct mcam_camera * cam)697 static void mcam_sg_restart(struct mcam_camera *cam)
698 {
699 	mcam_ctlr_dma_sg(cam);
700 	mcam_ctlr_start(cam);
701 	clear_bit(CF_SG_RESTART, &cam->flags);
702 }
703 
704 #else /* MCAM_MODE_DMA_SG */
705 
mcam_sg_restart(struct mcam_camera * cam)706 static inline void mcam_sg_restart(struct mcam_camera *cam)
707 {
708 	return;
709 }
710 
711 #endif /* MCAM_MODE_DMA_SG */
712 
713 /* ---------------------------------------------------------------------- */
714 /*
715  * Buffer-mode-independent controller code.
716  */
717 
718 /*
719  * Image format setup
720  */
mcam_ctlr_image(struct mcam_camera * cam)721 static void mcam_ctlr_image(struct mcam_camera *cam)
722 {
723 	struct v4l2_pix_format *fmt = &cam->pix_format;
724 	u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
725 
726 	cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
727 		fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
728 	imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
729 	imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
730 
731 	switch (fmt->pixelformat) {
732 	case V4L2_PIX_FMT_YUYV:
733 	case V4L2_PIX_FMT_YVYU:
734 		widthy = fmt->width * 2;
735 		widthuv = 0;
736 		break;
737 	case V4L2_PIX_FMT_YUV420:
738 	case V4L2_PIX_FMT_YVU420:
739 		widthy = fmt->width;
740 		widthuv = fmt->width / 2;
741 		break;
742 	default:
743 		widthy = fmt->bytesperline;
744 		widthuv = 0;
745 		break;
746 	}
747 
748 	mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
749 			IMGP_YP_MASK | IMGP_UVP_MASK);
750 	mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
751 	mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
752 
753 	/*
754 	 * Tell the controller about the image format we are using.
755 	 */
756 	switch (fmt->pixelformat) {
757 	case V4L2_PIX_FMT_YUV420:
758 	case V4L2_PIX_FMT_YVU420:
759 		mcam_reg_write_mask(cam, REG_CTRL0,
760 			C0_DF_YUV | C0_YUV_420PL | C0_YUVE_VYUY, C0_DF_MASK);
761 		break;
762 	case V4L2_PIX_FMT_YUYV:
763 		mcam_reg_write_mask(cam, REG_CTRL0,
764 			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_NOSWAP, C0_DF_MASK);
765 		break;
766 	case V4L2_PIX_FMT_YVYU:
767 		mcam_reg_write_mask(cam, REG_CTRL0,
768 			C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK);
769 		break;
770 	case V4L2_PIX_FMT_XRGB444:
771 		mcam_reg_write_mask(cam, REG_CTRL0,
772 			C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XBGR, C0_DF_MASK);
773 		break;
774 	case V4L2_PIX_FMT_RGB565:
775 		mcam_reg_write_mask(cam, REG_CTRL0,
776 			C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
777 		break;
778 	case V4L2_PIX_FMT_SBGGR8:
779 		mcam_reg_write_mask(cam, REG_CTRL0,
780 			C0_DF_RGB | C0_RGB5_GRBG, C0_DF_MASK);
781 		break;
782 	default:
783 		cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
784 		break;
785 	}
786 
787 	/*
788 	 * Make sure it knows we want to use hsync/vsync.
789 	 */
790 	mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
791 }
792 
793 
794 /*
795  * Configure the controller for operation; caller holds the
796  * device mutex.
797  */
mcam_ctlr_configure(struct mcam_camera * cam)798 static int mcam_ctlr_configure(struct mcam_camera *cam)
799 {
800 	unsigned long flags;
801 
802 	spin_lock_irqsave(&cam->dev_lock, flags);
803 	clear_bit(CF_SG_RESTART, &cam->flags);
804 	cam->dma_setup(cam);
805 	mcam_ctlr_image(cam);
806 	mcam_set_config_needed(cam, 0);
807 	spin_unlock_irqrestore(&cam->dev_lock, flags);
808 	return 0;
809 }
810 
mcam_ctlr_irq_enable(struct mcam_camera * cam)811 static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
812 {
813 	/*
814 	 * Clear any pending interrupts, since we do not
815 	 * expect to have I/O active prior to enabling.
816 	 */
817 	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
818 	mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
819 }
820 
mcam_ctlr_irq_disable(struct mcam_camera * cam)821 static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
822 {
823 	mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
824 }
825 
826 /*
827  * Stop the controller, and don't return until we're really sure that no
828  * further DMA is going on.
829  */
mcam_ctlr_stop_dma(struct mcam_camera * cam)830 static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
831 {
832 	unsigned long flags;
833 
834 	/*
835 	 * Theory: stop the camera controller (whether it is operating
836 	 * or not).  Delay briefly just in case we race with the SOF
837 	 * interrupt, then wait until no DMA is active.
838 	 */
839 	spin_lock_irqsave(&cam->dev_lock, flags);
840 	clear_bit(CF_SG_RESTART, &cam->flags);
841 	mcam_ctlr_stop(cam);
842 	cam->state = S_IDLE;
843 	spin_unlock_irqrestore(&cam->dev_lock, flags);
844 	/*
845 	 * This is a brutally long sleep, but experience shows that
846 	 * it can take the controller a while to get the message that
847 	 * it needs to stop grabbing frames.  In particular, we can
848 	 * sometimes (on mmp) get a frame at the end WITHOUT the
849 	 * start-of-frame indication.
850 	 */
851 	msleep(150);
852 	if (test_bit(CF_DMA_ACTIVE, &cam->flags))
853 		cam_err(cam, "Timeout waiting for DMA to end\n");
854 		/* This would be bad news - what now? */
855 	spin_lock_irqsave(&cam->dev_lock, flags);
856 	mcam_ctlr_irq_disable(cam);
857 	spin_unlock_irqrestore(&cam->dev_lock, flags);
858 }
859 
860 /*
861  * Power up and down.
862  */
mcam_ctlr_power_up(struct mcam_camera * cam)863 static int mcam_ctlr_power_up(struct mcam_camera *cam)
864 {
865 	unsigned long flags;
866 	int ret;
867 
868 	spin_lock_irqsave(&cam->dev_lock, flags);
869 	if (cam->plat_power_up) {
870 		ret = cam->plat_power_up(cam);
871 		if (ret) {
872 			spin_unlock_irqrestore(&cam->dev_lock, flags);
873 			return ret;
874 		}
875 	}
876 	mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
877 	spin_unlock_irqrestore(&cam->dev_lock, flags);
878 	return 0;
879 }
880 
mcam_ctlr_power_down(struct mcam_camera * cam)881 static void mcam_ctlr_power_down(struct mcam_camera *cam)
882 {
883 	unsigned long flags;
884 
885 	spin_lock_irqsave(&cam->dev_lock, flags);
886 	/*
887 	 * School of hard knocks department: be sure we do any register
888 	 * twiddling on the controller *before* calling the platform
889 	 * power down routine.
890 	 */
891 	mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
892 	if (cam->plat_power_down)
893 		cam->plat_power_down(cam);
894 	spin_unlock_irqrestore(&cam->dev_lock, flags);
895 }
896 
897 /* ---------------------------------------------------------------------- */
898 /*
899  * Master sensor clock.
900  */
mclk_prepare(struct clk_hw * hw)901 static int mclk_prepare(struct clk_hw *hw)
902 {
903 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
904 
905 	clk_prepare(cam->clk[0]);
906 	return 0;
907 }
908 
mclk_unprepare(struct clk_hw * hw)909 static void mclk_unprepare(struct clk_hw *hw)
910 {
911 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
912 
913 	clk_unprepare(cam->clk[0]);
914 }
915 
mclk_enable(struct clk_hw * hw)916 static int mclk_enable(struct clk_hw *hw)
917 {
918 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
919 	int mclk_src;
920 	int mclk_div;
921 
922 	/*
923 	 * Clock the sensor appropriately.  Controller clock should
924 	 * be 48MHz, sensor "typical" value is half that.
925 	 */
926 	if (cam->bus_type == V4L2_MBUS_CSI2_DPHY) {
927 		mclk_src = cam->mclk_src;
928 		mclk_div = cam->mclk_div;
929 	} else {
930 		mclk_src = 3;
931 		mclk_div = 2;
932 	}
933 
934 	clk_enable(cam->clk[0]);
935 	mcam_reg_write(cam, REG_CLKCTRL, (mclk_src << 29) | mclk_div);
936 	mcam_ctlr_power_up(cam);
937 
938 	return 0;
939 }
940 
mclk_disable(struct clk_hw * hw)941 static void mclk_disable(struct clk_hw *hw)
942 {
943 	struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
944 
945 	mcam_ctlr_power_down(cam);
946 	clk_disable(cam->clk[0]);
947 }
948 
mclk_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)949 static unsigned long mclk_recalc_rate(struct clk_hw *hw,
950 				unsigned long parent_rate)
951 {
952 	return 48000000;
953 }
954 
955 static const struct clk_ops mclk_ops = {
956 	.prepare = mclk_prepare,
957 	.unprepare = mclk_unprepare,
958 	.enable = mclk_enable,
959 	.disable = mclk_disable,
960 	.recalc_rate = mclk_recalc_rate,
961 };
962 
963 /* -------------------------------------------------------------------- */
964 /*
965  * Communications with the sensor.
966  */
967 
__mcam_cam_reset(struct mcam_camera * cam)968 static int __mcam_cam_reset(struct mcam_camera *cam)
969 {
970 	return sensor_call(cam, core, reset, 0);
971 }
972 
973 /*
974  * We have found the sensor on the i2c.  Let's try to have a
975  * conversation.
976  */
mcam_cam_init(struct mcam_camera * cam)977 static int mcam_cam_init(struct mcam_camera *cam)
978 {
979 	int ret;
980 
981 	if (cam->state != S_NOTREADY)
982 		cam_warn(cam, "Cam init with device in funky state %d",
983 				cam->state);
984 	ret = __mcam_cam_reset(cam);
985 	/* Get/set parameters? */
986 	cam->state = S_IDLE;
987 	return ret;
988 }
989 
990 /*
991  * Configure the sensor to match the parameters we have.  Caller should
992  * hold s_mutex
993  */
mcam_cam_set_flip(struct mcam_camera * cam)994 static int mcam_cam_set_flip(struct mcam_camera *cam)
995 {
996 	struct v4l2_control ctrl;
997 
998 	memset(&ctrl, 0, sizeof(ctrl));
999 	ctrl.id = V4L2_CID_VFLIP;
1000 	ctrl.value = flip;
1001 	return v4l2_s_ctrl(NULL, cam->sensor->ctrl_handler, &ctrl);
1002 }
1003 
1004 
mcam_cam_configure(struct mcam_camera * cam)1005 static int mcam_cam_configure(struct mcam_camera *cam)
1006 {
1007 	struct v4l2_subdev_format format = {
1008 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1009 	};
1010 	int ret;
1011 
1012 	v4l2_fill_mbus_format(&format.format, &cam->pix_format, cam->mbus_code);
1013 	ret = sensor_call(cam, core, init, 0);
1014 	if (ret == 0)
1015 		ret = sensor_call(cam, pad, set_fmt, NULL, &format);
1016 	/*
1017 	 * OV7670 does weird things if flip is set *before* format...
1018 	 */
1019 	ret += mcam_cam_set_flip(cam);
1020 	return ret;
1021 }
1022 
1023 /*
1024  * Get everything ready, and start grabbing frames.
1025  */
mcam_read_setup(struct mcam_camera * cam)1026 static int mcam_read_setup(struct mcam_camera *cam)
1027 {
1028 	int ret;
1029 	unsigned long flags;
1030 
1031 	/*
1032 	 * Configuration.  If we still don't have DMA buffers,
1033 	 * make one last, desperate attempt.
1034 	 */
1035 	if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1036 			mcam_alloc_dma_bufs(cam, 0))
1037 		return -ENOMEM;
1038 
1039 	if (mcam_needs_config(cam)) {
1040 		mcam_cam_configure(cam);
1041 		ret = mcam_ctlr_configure(cam);
1042 		if (ret)
1043 			return ret;
1044 	}
1045 
1046 	/*
1047 	 * Turn it loose.
1048 	 */
1049 	spin_lock_irqsave(&cam->dev_lock, flags);
1050 	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1051 	mcam_reset_buffers(cam);
1052 	if (cam->bus_type == V4L2_MBUS_CSI2_DPHY)
1053 		mcam_enable_mipi(cam);
1054 	else
1055 		mcam_disable_mipi(cam);
1056 	mcam_ctlr_irq_enable(cam);
1057 	cam->state = S_STREAMING;
1058 	if (!test_bit(CF_SG_RESTART, &cam->flags))
1059 		mcam_ctlr_start(cam);
1060 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1061 	return 0;
1062 }
1063 
1064 /* ----------------------------------------------------------------------- */
1065 /*
1066  * Videobuf2 interface code.
1067  */
1068 
mcam_vb_queue_setup(struct vb2_queue * vq,unsigned int * nbufs,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_devs[])1069 static int mcam_vb_queue_setup(struct vb2_queue *vq,
1070 		unsigned int *nbufs,
1071 		unsigned int *num_planes, unsigned int sizes[],
1072 		struct device *alloc_devs[])
1073 {
1074 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1075 	int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1076 	unsigned size = cam->pix_format.sizeimage;
1077 
1078 	if (*nbufs < minbufs)
1079 		*nbufs = minbufs;
1080 
1081 	if (*num_planes)
1082 		return sizes[0] < size ? -EINVAL : 0;
1083 	sizes[0] = size;
1084 	*num_planes = 1; /* Someday we have to support planar formats... */
1085 	return 0;
1086 }
1087 
1088 
mcam_vb_buf_queue(struct vb2_buffer * vb)1089 static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1090 {
1091 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1092 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1093 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1094 	unsigned long flags;
1095 	int start;
1096 
1097 	spin_lock_irqsave(&cam->dev_lock, flags);
1098 	start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1099 	list_add(&mvb->queue, &cam->buffers);
1100 	if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1101 		mcam_sg_restart(cam);
1102 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1103 	if (start)
1104 		mcam_read_setup(cam);
1105 }
1106 
mcam_vb_requeue_bufs(struct vb2_queue * vq,enum vb2_buffer_state state)1107 static void mcam_vb_requeue_bufs(struct vb2_queue *vq,
1108 				 enum vb2_buffer_state state)
1109 {
1110 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1111 	struct mcam_vb_buffer *buf, *node;
1112 	unsigned long flags;
1113 	unsigned i;
1114 
1115 	spin_lock_irqsave(&cam->dev_lock, flags);
1116 	list_for_each_entry_safe(buf, node, &cam->buffers, queue) {
1117 		vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1118 		list_del(&buf->queue);
1119 	}
1120 	for (i = 0; i < MAX_DMA_BUFS; i++) {
1121 		buf = cam->vb_bufs[i];
1122 
1123 		if (buf) {
1124 			vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1125 			cam->vb_bufs[i] = NULL;
1126 		}
1127 	}
1128 	spin_unlock_irqrestore(&cam->dev_lock, flags);
1129 }
1130 
1131 /*
1132  * These need to be called with the mutex held from vb2
1133  */
mcam_vb_start_streaming(struct vb2_queue * vq,unsigned int count)1134 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1135 {
1136 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1137 	unsigned int frame;
1138 	int ret;
1139 
1140 	if (cam->state != S_IDLE) {
1141 		mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1142 		return -EINVAL;
1143 	}
1144 	cam->frame_state.frames = 0;
1145 	cam->frame_state.singles = 0;
1146 	cam->frame_state.delivered = 0;
1147 	cam->sequence = 0;
1148 	/*
1149 	 * Videobuf2 sneakily hoards all the buffers and won't
1150 	 * give them to us until *after* streaming starts.  But
1151 	 * we can't actually start streaming until we have a
1152 	 * destination.  So go into a wait state and hope they
1153 	 * give us buffers soon.
1154 	 */
1155 	if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1156 		cam->state = S_BUFWAIT;
1157 		return 0;
1158 	}
1159 
1160 	/*
1161 	 * Ensure clear the left over frame flags
1162 	 * before every really start streaming
1163 	 */
1164 	for (frame = 0; frame < cam->nbufs; frame++)
1165 		clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1166 
1167 	ret = mcam_read_setup(cam);
1168 	if (ret)
1169 		mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1170 	return ret;
1171 }
1172 
mcam_vb_stop_streaming(struct vb2_queue * vq)1173 static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1174 {
1175 	struct mcam_camera *cam = vb2_get_drv_priv(vq);
1176 
1177 	cam_dbg(cam, "stop_streaming: %d frames, %d singles, %d delivered\n",
1178 			cam->frame_state.frames, cam->frame_state.singles,
1179 			cam->frame_state.delivered);
1180 	if (cam->state == S_BUFWAIT) {
1181 		/* They never gave us buffers */
1182 		cam->state = S_IDLE;
1183 		return;
1184 	}
1185 	if (cam->state != S_STREAMING)
1186 		return;
1187 	mcam_ctlr_stop_dma(cam);
1188 	/*
1189 	 * VB2 reclaims the buffers, so we need to forget
1190 	 * about them.
1191 	 */
1192 	mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_ERROR);
1193 }
1194 
1195 
1196 static const struct vb2_ops mcam_vb2_ops = {
1197 	.queue_setup		= mcam_vb_queue_setup,
1198 	.buf_queue		= mcam_vb_buf_queue,
1199 	.start_streaming	= mcam_vb_start_streaming,
1200 	.stop_streaming		= mcam_vb_stop_streaming,
1201 	.wait_prepare		= vb2_ops_wait_prepare,
1202 	.wait_finish		= vb2_ops_wait_finish,
1203 };
1204 
1205 
1206 #ifdef MCAM_MODE_DMA_SG
1207 /*
1208  * Scatter/gather mode uses all of the above functions plus a
1209  * few extras to deal with DMA mapping.
1210  */
mcam_vb_sg_buf_init(struct vb2_buffer * vb)1211 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1212 {
1213 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1214 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1215 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1216 	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1217 
1218 	mvb->dma_desc = dma_alloc_coherent(cam->dev,
1219 			ndesc * sizeof(struct mcam_dma_desc),
1220 			&mvb->dma_desc_pa, GFP_KERNEL);
1221 	if (mvb->dma_desc == NULL) {
1222 		cam_err(cam, "Unable to get DMA descriptor array\n");
1223 		return -ENOMEM;
1224 	}
1225 	return 0;
1226 }
1227 
mcam_vb_sg_buf_prepare(struct vb2_buffer * vb)1228 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1229 {
1230 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1231 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1232 	struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1233 	struct mcam_dma_desc *desc = mvb->dma_desc;
1234 	struct scatterlist *sg;
1235 	int i;
1236 
1237 	for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
1238 		desc->dma_addr = sg_dma_address(sg);
1239 		desc->segment_len = sg_dma_len(sg);
1240 		desc++;
1241 	}
1242 	return 0;
1243 }
1244 
mcam_vb_sg_buf_cleanup(struct vb2_buffer * vb)1245 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1246 {
1247 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1248 	struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1249 	struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1250 	int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1251 
1252 	dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1253 			mvb->dma_desc, mvb->dma_desc_pa);
1254 }
1255 
1256 
1257 static const struct vb2_ops mcam_vb2_sg_ops = {
1258 	.queue_setup		= mcam_vb_queue_setup,
1259 	.buf_init		= mcam_vb_sg_buf_init,
1260 	.buf_prepare		= mcam_vb_sg_buf_prepare,
1261 	.buf_queue		= mcam_vb_buf_queue,
1262 	.buf_cleanup		= mcam_vb_sg_buf_cleanup,
1263 	.start_streaming	= mcam_vb_start_streaming,
1264 	.stop_streaming		= mcam_vb_stop_streaming,
1265 	.wait_prepare		= vb2_ops_wait_prepare,
1266 	.wait_finish		= vb2_ops_wait_finish,
1267 };
1268 
1269 #endif /* MCAM_MODE_DMA_SG */
1270 
mcam_setup_vb2(struct mcam_camera * cam)1271 static int mcam_setup_vb2(struct mcam_camera *cam)
1272 {
1273 	struct vb2_queue *vq = &cam->vb_queue;
1274 
1275 	memset(vq, 0, sizeof(*vq));
1276 	vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1277 	vq->drv_priv = cam;
1278 	vq->lock = &cam->s_mutex;
1279 	vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1280 	vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF | VB2_READ;
1281 	vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1282 	vq->dev = cam->dev;
1283 	INIT_LIST_HEAD(&cam->buffers);
1284 	switch (cam->buffer_mode) {
1285 	case B_DMA_contig:
1286 #ifdef MCAM_MODE_DMA_CONTIG
1287 		vq->ops = &mcam_vb2_ops;
1288 		vq->mem_ops = &vb2_dma_contig_memops;
1289 		cam->dma_setup = mcam_ctlr_dma_contig;
1290 		cam->frame_complete = mcam_dma_contig_done;
1291 #endif
1292 		break;
1293 	case B_DMA_sg:
1294 #ifdef MCAM_MODE_DMA_SG
1295 		vq->ops = &mcam_vb2_sg_ops;
1296 		vq->mem_ops = &vb2_dma_sg_memops;
1297 		cam->dma_setup = mcam_ctlr_dma_sg;
1298 		cam->frame_complete = mcam_dma_sg_done;
1299 #endif
1300 		break;
1301 	case B_vmalloc:
1302 #ifdef MCAM_MODE_VMALLOC
1303 		tasklet_setup(&cam->s_tasklet, mcam_frame_tasklet);
1304 		vq->ops = &mcam_vb2_ops;
1305 		vq->mem_ops = &vb2_vmalloc_memops;
1306 		cam->dma_setup = mcam_ctlr_dma_vmalloc;
1307 		cam->frame_complete = mcam_vmalloc_done;
1308 #endif
1309 		break;
1310 	}
1311 	return vb2_queue_init(vq);
1312 }
1313 
1314 
1315 /* ---------------------------------------------------------------------- */
1316 /*
1317  * The long list of V4L2 ioctl() operations.
1318  */
1319 
mcam_vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)1320 static int mcam_vidioc_querycap(struct file *file, void *priv,
1321 		struct v4l2_capability *cap)
1322 {
1323 	struct mcam_camera *cam = video_drvdata(file);
1324 
1325 	strscpy(cap->driver, "marvell_ccic", sizeof(cap->driver));
1326 	strscpy(cap->card, "marvell_ccic", sizeof(cap->card));
1327 	strscpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info));
1328 	return 0;
1329 }
1330 
1331 
mcam_vidioc_enum_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_fmtdesc * fmt)1332 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1333 		void *priv, struct v4l2_fmtdesc *fmt)
1334 {
1335 	if (fmt->index >= N_MCAM_FMTS)
1336 		return -EINVAL;
1337 	fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1338 	return 0;
1339 }
1340 
mcam_vidioc_try_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1341 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1342 		struct v4l2_format *fmt)
1343 {
1344 	struct mcam_camera *cam = video_drvdata(filp);
1345 	struct mcam_format_struct *f;
1346 	struct v4l2_pix_format *pix = &fmt->fmt.pix;
1347 	struct v4l2_subdev_pad_config pad_cfg;
1348 	struct v4l2_subdev_format format = {
1349 		.which = V4L2_SUBDEV_FORMAT_TRY,
1350 	};
1351 	int ret;
1352 
1353 	f = mcam_find_format(pix->pixelformat);
1354 	pix->pixelformat = f->pixelformat;
1355 	v4l2_fill_mbus_format(&format.format, pix, f->mbus_code);
1356 	ret = sensor_call(cam, pad, set_fmt, &pad_cfg, &format);
1357 	v4l2_fill_pix_format(pix, &format.format);
1358 	pix->bytesperline = pix->width * f->bpp;
1359 	switch (f->pixelformat) {
1360 	case V4L2_PIX_FMT_YUV420:
1361 	case V4L2_PIX_FMT_YVU420:
1362 		pix->sizeimage = pix->height * pix->bytesperline * 3 / 2;
1363 		break;
1364 	default:
1365 		pix->sizeimage = pix->height * pix->bytesperline;
1366 		break;
1367 	}
1368 	pix->colorspace = V4L2_COLORSPACE_SRGB;
1369 	return ret;
1370 }
1371 
mcam_vidioc_s_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1372 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1373 		struct v4l2_format *fmt)
1374 {
1375 	struct mcam_camera *cam = video_drvdata(filp);
1376 	struct mcam_format_struct *f;
1377 	int ret;
1378 
1379 	/*
1380 	 * Can't do anything if the device is not idle
1381 	 * Also can't if there are streaming buffers in place.
1382 	 */
1383 	if (cam->state != S_IDLE || vb2_is_busy(&cam->vb_queue))
1384 		return -EBUSY;
1385 
1386 	f = mcam_find_format(fmt->fmt.pix.pixelformat);
1387 
1388 	/*
1389 	 * See if the formatting works in principle.
1390 	 */
1391 	ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1392 	if (ret)
1393 		return ret;
1394 	/*
1395 	 * Now we start to change things for real, so let's do it
1396 	 * under lock.
1397 	 */
1398 	cam->pix_format = fmt->fmt.pix;
1399 	cam->mbus_code = f->mbus_code;
1400 
1401 	/*
1402 	 * Make sure we have appropriate DMA buffers.
1403 	 */
1404 	if (cam->buffer_mode == B_vmalloc) {
1405 		ret = mcam_check_dma_buffers(cam);
1406 		if (ret)
1407 			goto out;
1408 	}
1409 	mcam_set_config_needed(cam, 1);
1410 out:
1411 	return ret;
1412 }
1413 
1414 /*
1415  * Return our stored notion of how the camera is/should be configured.
1416  * The V4l2 spec wants us to be smarter, and actually get this from
1417  * the camera (and not mess with it at open time).  Someday.
1418  */
mcam_vidioc_g_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * f)1419 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1420 		struct v4l2_format *f)
1421 {
1422 	struct mcam_camera *cam = video_drvdata(filp);
1423 
1424 	f->fmt.pix = cam->pix_format;
1425 	return 0;
1426 }
1427 
1428 /*
1429  * We only have one input - the sensor - so minimize the nonsense here.
1430  */
mcam_vidioc_enum_input(struct file * filp,void * priv,struct v4l2_input * input)1431 static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1432 		struct v4l2_input *input)
1433 {
1434 	if (input->index != 0)
1435 		return -EINVAL;
1436 
1437 	input->type = V4L2_INPUT_TYPE_CAMERA;
1438 	strscpy(input->name, "Camera", sizeof(input->name));
1439 	return 0;
1440 }
1441 
mcam_vidioc_g_input(struct file * filp,void * priv,unsigned int * i)1442 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1443 {
1444 	*i = 0;
1445 	return 0;
1446 }
1447 
mcam_vidioc_s_input(struct file * filp,void * priv,unsigned int i)1448 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1449 {
1450 	if (i != 0)
1451 		return -EINVAL;
1452 	return 0;
1453 }
1454 
1455 /*
1456  * G/S_PARM.  Most of this is done by the sensor, but we are
1457  * the level which controls the number of read buffers.
1458  */
mcam_vidioc_g_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1459 static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1460 		struct v4l2_streamparm *a)
1461 {
1462 	struct mcam_camera *cam = video_drvdata(filp);
1463 	int ret;
1464 
1465 	ret = v4l2_g_parm_cap(video_devdata(filp), cam->sensor, a);
1466 	a->parm.capture.readbuffers = n_dma_bufs;
1467 	return ret;
1468 }
1469 
mcam_vidioc_s_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1470 static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1471 		struct v4l2_streamparm *a)
1472 {
1473 	struct mcam_camera *cam = video_drvdata(filp);
1474 	int ret;
1475 
1476 	ret = v4l2_s_parm_cap(video_devdata(filp), cam->sensor, a);
1477 	a->parm.capture.readbuffers = n_dma_bufs;
1478 	return ret;
1479 }
1480 
mcam_vidioc_enum_framesizes(struct file * filp,void * priv,struct v4l2_frmsizeenum * sizes)1481 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1482 		struct v4l2_frmsizeenum *sizes)
1483 {
1484 	struct mcam_camera *cam = video_drvdata(filp);
1485 	struct mcam_format_struct *f;
1486 	struct v4l2_subdev_frame_size_enum fse = {
1487 		.index = sizes->index,
1488 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1489 	};
1490 	int ret;
1491 
1492 	f = mcam_find_format(sizes->pixel_format);
1493 	if (f->pixelformat != sizes->pixel_format)
1494 		return -EINVAL;
1495 	fse.code = f->mbus_code;
1496 	ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
1497 	if (ret)
1498 		return ret;
1499 	if (fse.min_width == fse.max_width &&
1500 	    fse.min_height == fse.max_height) {
1501 		sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1502 		sizes->discrete.width = fse.min_width;
1503 		sizes->discrete.height = fse.min_height;
1504 		return 0;
1505 	}
1506 	sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS;
1507 	sizes->stepwise.min_width = fse.min_width;
1508 	sizes->stepwise.max_width = fse.max_width;
1509 	sizes->stepwise.min_height = fse.min_height;
1510 	sizes->stepwise.max_height = fse.max_height;
1511 	sizes->stepwise.step_width = 1;
1512 	sizes->stepwise.step_height = 1;
1513 	return 0;
1514 }
1515 
mcam_vidioc_enum_frameintervals(struct file * filp,void * priv,struct v4l2_frmivalenum * interval)1516 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1517 		struct v4l2_frmivalenum *interval)
1518 {
1519 	struct mcam_camera *cam = video_drvdata(filp);
1520 	struct mcam_format_struct *f;
1521 	struct v4l2_subdev_frame_interval_enum fie = {
1522 		.index = interval->index,
1523 		.width = interval->width,
1524 		.height = interval->height,
1525 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1526 	};
1527 	int ret;
1528 
1529 	f = mcam_find_format(interval->pixel_format);
1530 	if (f->pixelformat != interval->pixel_format)
1531 		return -EINVAL;
1532 	fie.code = f->mbus_code;
1533 	ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
1534 	if (ret)
1535 		return ret;
1536 	interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1537 	interval->discrete = fie.interval;
1538 	return 0;
1539 }
1540 
1541 #ifdef CONFIG_VIDEO_ADV_DEBUG
mcam_vidioc_g_register(struct file * file,void * priv,struct v4l2_dbg_register * reg)1542 static int mcam_vidioc_g_register(struct file *file, void *priv,
1543 		struct v4l2_dbg_register *reg)
1544 {
1545 	struct mcam_camera *cam = video_drvdata(file);
1546 
1547 	if (reg->reg > cam->regs_size - 4)
1548 		return -EINVAL;
1549 	reg->val = mcam_reg_read(cam, reg->reg);
1550 	reg->size = 4;
1551 	return 0;
1552 }
1553 
mcam_vidioc_s_register(struct file * file,void * priv,const struct v4l2_dbg_register * reg)1554 static int mcam_vidioc_s_register(struct file *file, void *priv,
1555 		const struct v4l2_dbg_register *reg)
1556 {
1557 	struct mcam_camera *cam = video_drvdata(file);
1558 
1559 	if (reg->reg > cam->regs_size - 4)
1560 		return -EINVAL;
1561 	mcam_reg_write(cam, reg->reg, reg->val);
1562 	return 0;
1563 }
1564 #endif
1565 
1566 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1567 	.vidioc_querycap	= mcam_vidioc_querycap,
1568 	.vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1569 	.vidioc_try_fmt_vid_cap	= mcam_vidioc_try_fmt_vid_cap,
1570 	.vidioc_s_fmt_vid_cap	= mcam_vidioc_s_fmt_vid_cap,
1571 	.vidioc_g_fmt_vid_cap	= mcam_vidioc_g_fmt_vid_cap,
1572 	.vidioc_enum_input	= mcam_vidioc_enum_input,
1573 	.vidioc_g_input		= mcam_vidioc_g_input,
1574 	.vidioc_s_input		= mcam_vidioc_s_input,
1575 	.vidioc_reqbufs		= vb2_ioctl_reqbufs,
1576 	.vidioc_create_bufs	= vb2_ioctl_create_bufs,
1577 	.vidioc_querybuf	= vb2_ioctl_querybuf,
1578 	.vidioc_qbuf		= vb2_ioctl_qbuf,
1579 	.vidioc_dqbuf		= vb2_ioctl_dqbuf,
1580 	.vidioc_expbuf		= vb2_ioctl_expbuf,
1581 	.vidioc_streamon	= vb2_ioctl_streamon,
1582 	.vidioc_streamoff	= vb2_ioctl_streamoff,
1583 	.vidioc_g_parm		= mcam_vidioc_g_parm,
1584 	.vidioc_s_parm		= mcam_vidioc_s_parm,
1585 	.vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1586 	.vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1587 	.vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
1588 	.vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1589 #ifdef CONFIG_VIDEO_ADV_DEBUG
1590 	.vidioc_g_register	= mcam_vidioc_g_register,
1591 	.vidioc_s_register	= mcam_vidioc_s_register,
1592 #endif
1593 };
1594 
1595 /* ---------------------------------------------------------------------- */
1596 /*
1597  * Our various file operations.
1598  */
mcam_v4l_open(struct file * filp)1599 static int mcam_v4l_open(struct file *filp)
1600 {
1601 	struct mcam_camera *cam = video_drvdata(filp);
1602 	int ret;
1603 
1604 	mutex_lock(&cam->s_mutex);
1605 	ret = v4l2_fh_open(filp);
1606 	if (ret)
1607 		goto out;
1608 	if (v4l2_fh_is_singular_file(filp)) {
1609 		ret = sensor_call(cam, core, s_power, 1);
1610 		if (ret)
1611 			goto out;
1612 		pm_runtime_get_sync(cam->dev);
1613 		__mcam_cam_reset(cam);
1614 		mcam_set_config_needed(cam, 1);
1615 	}
1616 out:
1617 	mutex_unlock(&cam->s_mutex);
1618 	if (ret)
1619 		v4l2_fh_release(filp);
1620 	return ret;
1621 }
1622 
1623 
mcam_v4l_release(struct file * filp)1624 static int mcam_v4l_release(struct file *filp)
1625 {
1626 	struct mcam_camera *cam = video_drvdata(filp);
1627 	bool last_open;
1628 
1629 	mutex_lock(&cam->s_mutex);
1630 	last_open = v4l2_fh_is_singular_file(filp);
1631 	_vb2_fop_release(filp, NULL);
1632 	if (last_open) {
1633 		mcam_disable_mipi(cam);
1634 		sensor_call(cam, core, s_power, 0);
1635 		pm_runtime_put(cam->dev);
1636 		if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1637 			mcam_free_dma_bufs(cam);
1638 	}
1639 
1640 	mutex_unlock(&cam->s_mutex);
1641 	return 0;
1642 }
1643 
1644 static const struct v4l2_file_operations mcam_v4l_fops = {
1645 	.owner = THIS_MODULE,
1646 	.open = mcam_v4l_open,
1647 	.release = mcam_v4l_release,
1648 	.read = vb2_fop_read,
1649 	.poll = vb2_fop_poll,
1650 	.mmap = vb2_fop_mmap,
1651 	.unlocked_ioctl = video_ioctl2,
1652 };
1653 
1654 
1655 /*
1656  * This template device holds all of those v4l2 methods; we
1657  * clone it for specific real devices.
1658  */
1659 static const struct video_device mcam_v4l_template = {
1660 	.name = "mcam",
1661 	.fops = &mcam_v4l_fops,
1662 	.ioctl_ops = &mcam_v4l_ioctl_ops,
1663 	.release = video_device_release_empty,
1664 	.device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_READWRITE |
1665 		       V4L2_CAP_STREAMING,
1666 };
1667 
1668 /* ---------------------------------------------------------------------- */
1669 /*
1670  * Interrupt handler stuff
1671  */
mcam_frame_complete(struct mcam_camera * cam,int frame)1672 static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1673 {
1674 	/*
1675 	 * Basic frame housekeeping.
1676 	 */
1677 	set_bit(frame, &cam->flags);
1678 	clear_bit(CF_DMA_ACTIVE, &cam->flags);
1679 	cam->next_buf = frame;
1680 	cam->buf_seq[frame] = cam->sequence++;
1681 	cam->frame_state.frames++;
1682 	/*
1683 	 * "This should never happen"
1684 	 */
1685 	if (cam->state != S_STREAMING)
1686 		return;
1687 	/*
1688 	 * Process the frame and set up the next one.
1689 	 */
1690 	cam->frame_complete(cam, frame);
1691 }
1692 
1693 
1694 /*
1695  * The interrupt handler; this needs to be called from the
1696  * platform irq handler with the lock held.
1697  */
mccic_irq(struct mcam_camera * cam,unsigned int irqs)1698 int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1699 {
1700 	unsigned int frame, handled = 0;
1701 
1702 	mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1703 	/*
1704 	 * Handle any frame completions.  There really should
1705 	 * not be more than one of these, or we have fallen
1706 	 * far behind.
1707 	 *
1708 	 * When running in S/G mode, the frame number lacks any
1709 	 * real meaning - there's only one descriptor array - but
1710 	 * the controller still picks a different one to signal
1711 	 * each time.
1712 	 */
1713 	for (frame = 0; frame < cam->nbufs; frame++)
1714 		if (irqs & (IRQ_EOF0 << frame) &&
1715 			test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1716 			mcam_frame_complete(cam, frame);
1717 			handled = 1;
1718 			clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1719 			if (cam->buffer_mode == B_DMA_sg)
1720 				break;
1721 		}
1722 	/*
1723 	 * If a frame starts, note that we have DMA active.  This
1724 	 * code assumes that we won't get multiple frame interrupts
1725 	 * at once; may want to rethink that.
1726 	 */
1727 	for (frame = 0; frame < cam->nbufs; frame++) {
1728 		if (irqs & (IRQ_SOF0 << frame)) {
1729 			set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1730 			handled = IRQ_HANDLED;
1731 		}
1732 	}
1733 
1734 	if (handled == IRQ_HANDLED) {
1735 		set_bit(CF_DMA_ACTIVE, &cam->flags);
1736 		if (cam->buffer_mode == B_DMA_sg)
1737 			mcam_ctlr_stop(cam);
1738 	}
1739 	return handled;
1740 }
1741 EXPORT_SYMBOL_GPL(mccic_irq);
1742 
1743 /* ---------------------------------------------------------------------- */
1744 /*
1745  * Registration and such.
1746  */
1747 
mccic_notify_bound(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_subdev * asd)1748 static int mccic_notify_bound(struct v4l2_async_notifier *notifier,
1749 	struct v4l2_subdev *subdev, struct v4l2_async_subdev *asd)
1750 {
1751 	struct mcam_camera *cam = notifier_to_mcam(notifier);
1752 	int ret;
1753 
1754 	mutex_lock(&cam->s_mutex);
1755 	if (cam->sensor) {
1756 		cam_err(cam, "sensor already bound\n");
1757 		ret = -EBUSY;
1758 		goto out;
1759 	}
1760 
1761 	v4l2_set_subdev_hostdata(subdev, cam);
1762 	cam->sensor = subdev;
1763 
1764 	ret = mcam_cam_init(cam);
1765 	if (ret) {
1766 		cam->sensor = NULL;
1767 		goto out;
1768 	}
1769 
1770 	ret = mcam_setup_vb2(cam);
1771 	if (ret) {
1772 		cam->sensor = NULL;
1773 		goto out;
1774 	}
1775 
1776 	cam->vdev = mcam_v4l_template;
1777 	cam->vdev.v4l2_dev = &cam->v4l2_dev;
1778 	cam->vdev.lock = &cam->s_mutex;
1779 	cam->vdev.queue = &cam->vb_queue;
1780 	video_set_drvdata(&cam->vdev, cam);
1781 	ret = video_register_device(&cam->vdev, VFL_TYPE_VIDEO, -1);
1782 	if (ret) {
1783 		cam->sensor = NULL;
1784 		goto out;
1785 	}
1786 
1787 	cam_dbg(cam, "sensor %s bound\n", subdev->name);
1788 out:
1789 	mutex_unlock(&cam->s_mutex);
1790 	return ret;
1791 }
1792 
mccic_notify_unbind(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_subdev * asd)1793 static void mccic_notify_unbind(struct v4l2_async_notifier *notifier,
1794 	struct v4l2_subdev *subdev, struct v4l2_async_subdev *asd)
1795 {
1796 	struct mcam_camera *cam = notifier_to_mcam(notifier);
1797 
1798 	mutex_lock(&cam->s_mutex);
1799 	if (cam->sensor != subdev) {
1800 		cam_err(cam, "sensor %s not bound\n", subdev->name);
1801 		goto out;
1802 	}
1803 
1804 	video_unregister_device(&cam->vdev);
1805 	cam->sensor = NULL;
1806 	cam_dbg(cam, "sensor %s unbound\n", subdev->name);
1807 
1808 out:
1809 	mutex_unlock(&cam->s_mutex);
1810 }
1811 
mccic_notify_complete(struct v4l2_async_notifier * notifier)1812 static int mccic_notify_complete(struct v4l2_async_notifier *notifier)
1813 {
1814 	struct mcam_camera *cam = notifier_to_mcam(notifier);
1815 	int ret;
1816 
1817 	/*
1818 	 * Get the v4l2 setup done.
1819 	 */
1820 	ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1821 	if (!ret)
1822 		cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1823 
1824 	return ret;
1825 }
1826 
1827 static const struct v4l2_async_notifier_operations mccic_notify_ops = {
1828 	.bound = mccic_notify_bound,
1829 	.unbind = mccic_notify_unbind,
1830 	.complete = mccic_notify_complete,
1831 };
1832 
mccic_register(struct mcam_camera * cam)1833 int mccic_register(struct mcam_camera *cam)
1834 {
1835 	struct clk_init_data mclk_init = { };
1836 	int ret;
1837 
1838 	/*
1839 	 * Validate the requested buffer mode.
1840 	 */
1841 	if (buffer_mode >= 0)
1842 		cam->buffer_mode = buffer_mode;
1843 	if (cam->buffer_mode == B_DMA_sg &&
1844 			cam->chip_id == MCAM_CAFE) {
1845 		printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, attempting vmalloc mode instead\n");
1846 		cam->buffer_mode = B_vmalloc;
1847 	}
1848 
1849 	if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1850 		printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1851 				cam->buffer_mode);
1852 		ret = -EINVAL;
1853 		goto out;
1854 	}
1855 
1856 	/*
1857 	 * Register with V4L
1858 	 */
1859 	ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
1860 	if (ret)
1861 		goto out;
1862 
1863 	mutex_init(&cam->s_mutex);
1864 	cam->state = S_NOTREADY;
1865 	mcam_set_config_needed(cam, 1);
1866 	cam->pix_format = mcam_def_pix_format;
1867 	cam->mbus_code = mcam_def_mbus_code;
1868 
1869 	/*
1870 	 * Register sensor notifier.
1871 	 */
1872 	v4l2_async_notifier_init(&cam->notifier);
1873 	ret = v4l2_async_notifier_add_subdev(&cam->notifier, &cam->asd);
1874 	if (ret) {
1875 		cam_warn(cam, "failed to add subdev to a notifier");
1876 		goto out;
1877 	}
1878 
1879 	cam->notifier.ops = &mccic_notify_ops;
1880 	ret = v4l2_async_notifier_register(&cam->v4l2_dev, &cam->notifier);
1881 	if (ret < 0) {
1882 		cam_warn(cam, "failed to register a sensor notifier");
1883 		goto out;
1884 	}
1885 
1886 	/*
1887 	 * Register sensor master clock.
1888 	 */
1889 	mclk_init.parent_names = NULL;
1890 	mclk_init.num_parents = 0;
1891 	mclk_init.ops = &mclk_ops;
1892 	mclk_init.name = "mclk";
1893 
1894 	of_property_read_string(cam->dev->of_node, "clock-output-names",
1895 							&mclk_init.name);
1896 
1897 	cam->mclk_hw.init = &mclk_init;
1898 
1899 	cam->mclk = devm_clk_register(cam->dev, &cam->mclk_hw);
1900 	if (IS_ERR(cam->mclk)) {
1901 		ret = PTR_ERR(cam->mclk);
1902 		dev_err(cam->dev, "can't register clock\n");
1903 		goto out;
1904 	}
1905 
1906 	/*
1907 	 * If so requested, try to get our DMA buffers now.
1908 	 */
1909 	if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1910 		if (mcam_alloc_dma_bufs(cam, 1))
1911 			cam_warn(cam, "Unable to alloc DMA buffers at load will try again later.");
1912 	}
1913 
1914 	return 0;
1915 
1916 out:
1917 	v4l2_async_notifier_unregister(&cam->notifier);
1918 	v4l2_device_unregister(&cam->v4l2_dev);
1919 	v4l2_async_notifier_cleanup(&cam->notifier);
1920 	return ret;
1921 }
1922 EXPORT_SYMBOL_GPL(mccic_register);
1923 
mccic_shutdown(struct mcam_camera * cam)1924 void mccic_shutdown(struct mcam_camera *cam)
1925 {
1926 	/*
1927 	 * If we have no users (and we really, really should have no
1928 	 * users) the device will already be powered down.  Trying to
1929 	 * take it down again will wedge the machine, which is frowned
1930 	 * upon.
1931 	 */
1932 	if (!list_empty(&cam->vdev.fh_list)) {
1933 		cam_warn(cam, "Removing a device with users!\n");
1934 		sensor_call(cam, core, s_power, 0);
1935 	}
1936 	if (cam->buffer_mode == B_vmalloc)
1937 		mcam_free_dma_bufs(cam);
1938 	v4l2_ctrl_handler_free(&cam->ctrl_handler);
1939 	v4l2_async_notifier_unregister(&cam->notifier);
1940 	v4l2_device_unregister(&cam->v4l2_dev);
1941 	v4l2_async_notifier_cleanup(&cam->notifier);
1942 }
1943 EXPORT_SYMBOL_GPL(mccic_shutdown);
1944 
1945 /*
1946  * Power management
1947  */
mccic_suspend(struct mcam_camera * cam)1948 void mccic_suspend(struct mcam_camera *cam)
1949 {
1950 	mutex_lock(&cam->s_mutex);
1951 	if (!list_empty(&cam->vdev.fh_list)) {
1952 		enum mcam_state cstate = cam->state;
1953 
1954 		mcam_ctlr_stop_dma(cam);
1955 		sensor_call(cam, core, s_power, 0);
1956 		cam->state = cstate;
1957 	}
1958 	mutex_unlock(&cam->s_mutex);
1959 }
1960 EXPORT_SYMBOL_GPL(mccic_suspend);
1961 
mccic_resume(struct mcam_camera * cam)1962 int mccic_resume(struct mcam_camera *cam)
1963 {
1964 	int ret = 0;
1965 
1966 	mutex_lock(&cam->s_mutex);
1967 	if (!list_empty(&cam->vdev.fh_list)) {
1968 		ret = sensor_call(cam, core, s_power, 1);
1969 		if (ret) {
1970 			mutex_unlock(&cam->s_mutex);
1971 			return ret;
1972 		}
1973 		__mcam_cam_reset(cam);
1974 	} else {
1975 		sensor_call(cam, core, s_power, 0);
1976 	}
1977 	mutex_unlock(&cam->s_mutex);
1978 
1979 	set_bit(CF_CONFIG_NEEDED, &cam->flags);
1980 	if (cam->state == S_STREAMING) {
1981 		/*
1982 		 * If there was a buffer in the DMA engine at suspend
1983 		 * time, put it back on the queue or we'll forget about it.
1984 		 */
1985 		if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
1986 			list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
1987 		ret = mcam_read_setup(cam);
1988 	}
1989 	return ret;
1990 }
1991 EXPORT_SYMBOL_GPL(mccic_resume);
1992 
1993 MODULE_LICENSE("GPL v2");
1994 MODULE_AUTHOR("Jonathan Corbet <corbet@lwn.net>");
1995