[media] marvell-ccic: webcam drivers shouldn't support g/s_std
[deliverable/linux.git] / drivers / media / platform / marvell-ccic / mcam-core.c
1 /*
2 * The Marvell camera core. This device appears in a number of settings,
3 * so it needs platform-specific support outside of the core.
4 *
5 * Copyright 2011 Jonathan Corbet corbet@lwn.net
6 */
7 #include <linux/kernel.h>
8 #include <linux/module.h>
9 #include <linux/fs.h>
10 #include <linux/mm.h>
11 #include <linux/i2c.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock.h>
14 #include <linux/slab.h>
15 #include <linux/device.h>
16 #include <linux/wait.h>
17 #include <linux/list.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/delay.h>
20 #include <linux/vmalloc.h>
21 #include <linux/io.h>
22 #include <linux/clk.h>
23 #include <linux/videodev2.h>
24 #include <media/v4l2-device.h>
25 #include <media/v4l2-ioctl.h>
26 #include <media/v4l2-ctrls.h>
27 #include <media/ov7670.h>
28 #include <media/videobuf2-vmalloc.h>
29 #include <media/videobuf2-dma-contig.h>
30 #include <media/videobuf2-dma-sg.h>
31
32 #include "mcam-core.h"
33
34 #ifdef MCAM_MODE_VMALLOC
35 /*
36 * Internal DMA buffer management. Since the controller cannot do S/G I/O,
37 * we must have physically contiguous buffers to bring frames into.
38 * These parameters control how many buffers we use, whether we
39 * allocate them at load time (better chance of success, but nails down
40 * memory) or when somebody tries to use the camera (riskier), and,
41 * for load-time allocation, how big they should be.
42 *
43 * The controller can cycle through three buffers. We could use
44 * more by flipping pointers around, but it probably makes little
45 * sense.
46 */
47
48 static bool alloc_bufs_at_read;
49 module_param(alloc_bufs_at_read, bool, 0444);
50 MODULE_PARM_DESC(alloc_bufs_at_read,
51 "Non-zero value causes DMA buffers to be allocated when the "
52 "video capture device is read, rather than at module load "
53 "time. This saves memory, but decreases the chances of "
54 "successfully getting those buffers. This parameter is "
55 "only used in the vmalloc buffer mode");
56
57 static int n_dma_bufs = 3;
58 module_param(n_dma_bufs, uint, 0644);
59 MODULE_PARM_DESC(n_dma_bufs,
60 "The number of DMA buffers to allocate. Can be either two "
61 "(saves memory, makes timing tighter) or three.");
62
63 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
64 module_param(dma_buf_size, uint, 0444);
65 MODULE_PARM_DESC(dma_buf_size,
66 "The size of the allocated DMA buffers. If actual operating "
67 "parameters require larger buffers, an attempt to reallocate "
68 "will be made.");
69 #else /* MCAM_MODE_VMALLOC */
70 static const bool alloc_bufs_at_read;
71 static const int n_dma_bufs = 3; /* Used by S/G_PARM */
72 #endif /* MCAM_MODE_VMALLOC */
73
74 static bool flip;
75 module_param(flip, bool, 0444);
76 MODULE_PARM_DESC(flip,
77 "If set, the sensor will be instructed to flip the image "
78 "vertically.");
79
80 static int buffer_mode = -1;
81 module_param(buffer_mode, int, 0444);
82 MODULE_PARM_DESC(buffer_mode,
83 "Set the buffer mode to be used; default is to go with what "
84 "the platform driver asks for. Set to 0 for vmalloc, 1 for "
85 "DMA contiguous.");
86
87 /*
88 * Status flags. Always manipulated with bit operations.
89 */
90 #define CF_BUF0_VALID 0 /* Buffers valid - first three */
91 #define CF_BUF1_VALID 1
92 #define CF_BUF2_VALID 2
93 #define CF_DMA_ACTIVE 3 /* A frame is incoming */
94 #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
95 #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
96 #define CF_SG_RESTART 6 /* SG restart needed */
97 #define CF_FRAME_SOF0 7 /* Frame 0 started */
98 #define CF_FRAME_SOF1 8
99 #define CF_FRAME_SOF2 9
100
101 #define sensor_call(cam, o, f, args...) \
102 v4l2_subdev_call(cam->sensor, o, f, ##args)
103
104 static struct mcam_format_struct {
105 __u8 *desc;
106 __u32 pixelformat;
107 int bpp; /* Bytes per pixel */
108 bool planar;
109 u32 mbus_code;
110 } mcam_formats[] = {
111 {
112 .desc = "YUYV 4:2:2",
113 .pixelformat = V4L2_PIX_FMT_YUYV,
114 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
115 .bpp = 2,
116 .planar = false,
117 },
118 {
119 .desc = "UYVY 4:2:2",
120 .pixelformat = V4L2_PIX_FMT_UYVY,
121 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
122 .bpp = 2,
123 .planar = false,
124 },
125 {
126 .desc = "YUV 4:2:2 PLANAR",
127 .pixelformat = V4L2_PIX_FMT_YUV422P,
128 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
129 .bpp = 2,
130 .planar = true,
131 },
132 {
133 .desc = "YUV 4:2:0 PLANAR",
134 .pixelformat = V4L2_PIX_FMT_YUV420,
135 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
136 .bpp = 2,
137 .planar = true,
138 },
139 {
140 .desc = "YVU 4:2:0 PLANAR",
141 .pixelformat = V4L2_PIX_FMT_YVU420,
142 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
143 .bpp = 2,
144 .planar = true,
145 },
146 {
147 .desc = "RGB 444",
148 .pixelformat = V4L2_PIX_FMT_RGB444,
149 .mbus_code = MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE,
150 .bpp = 2,
151 .planar = false,
152 },
153 {
154 .desc = "RGB 565",
155 .pixelformat = V4L2_PIX_FMT_RGB565,
156 .mbus_code = MEDIA_BUS_FMT_RGB565_2X8_LE,
157 .bpp = 2,
158 .planar = false,
159 },
160 {
161 .desc = "Raw RGB Bayer",
162 .pixelformat = V4L2_PIX_FMT_SBGGR8,
163 .mbus_code = MEDIA_BUS_FMT_SBGGR8_1X8,
164 .bpp = 1,
165 .planar = false,
166 },
167 };
168 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
169
170 static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
171 {
172 unsigned i;
173
174 for (i = 0; i < N_MCAM_FMTS; i++)
175 if (mcam_formats[i].pixelformat == pixelformat)
176 return mcam_formats + i;
177 /* Not found? Then return the first format. */
178 return mcam_formats;
179 }
180
181 /*
182 * The default format we use until somebody says otherwise.
183 */
184 static const struct v4l2_pix_format mcam_def_pix_format = {
185 .width = VGA_WIDTH,
186 .height = VGA_HEIGHT,
187 .pixelformat = V4L2_PIX_FMT_YUYV,
188 .field = V4L2_FIELD_NONE,
189 .bytesperline = VGA_WIDTH*2,
190 .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
191 };
192
193 static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
194
195
196 /*
197 * The two-word DMA descriptor format used by the Armada 610 and like. There
198 * Is a three-word format as well (set C1_DESC_3WORD) where the third
199 * word is a pointer to the next descriptor, but we don't use it. Two-word
200 * descriptors have to be contiguous in memory.
201 */
202 struct mcam_dma_desc {
203 u32 dma_addr;
204 u32 segment_len;
205 };
206
207 struct yuv_pointer_t {
208 dma_addr_t y;
209 dma_addr_t u;
210 dma_addr_t v;
211 };
212
213 /*
214 * Our buffer type for working with videobuf2. Note that the vb2
215 * developers have decreed that struct vb2_buffer must be at the
216 * beginning of this structure.
217 */
218 struct mcam_vb_buffer {
219 struct vb2_buffer vb_buf;
220 struct list_head queue;
221 struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
222 dma_addr_t dma_desc_pa; /* Descriptor physical address */
223 int dma_desc_nent; /* Number of mapped descriptors */
224 struct yuv_pointer_t yuv_p;
225 };
226
227 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
228 {
229 return container_of(vb, struct mcam_vb_buffer, vb_buf);
230 }
231
232 /*
233 * Hand a completed buffer back to user space.
234 */
235 static void mcam_buffer_done(struct mcam_camera *cam, int frame,
236 struct vb2_buffer *vbuf)
237 {
238 vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
239 vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
240 vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
241 vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
242 }
243
244
245
246 /*
247 * Debugging and related.
248 */
249 #define cam_err(cam, fmt, arg...) \
250 dev_err((cam)->dev, fmt, ##arg);
251 #define cam_warn(cam, fmt, arg...) \
252 dev_warn((cam)->dev, fmt, ##arg);
253 #define cam_dbg(cam, fmt, arg...) \
254 dev_dbg((cam)->dev, fmt, ##arg);
255
256
257 /*
258 * Flag manipulation helpers
259 */
260 static void mcam_reset_buffers(struct mcam_camera *cam)
261 {
262 int i;
263
264 cam->next_buf = -1;
265 for (i = 0; i < cam->nbufs; i++) {
266 clear_bit(i, &cam->flags);
267 clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
268 }
269 }
270
271 static inline int mcam_needs_config(struct mcam_camera *cam)
272 {
273 return test_bit(CF_CONFIG_NEEDED, &cam->flags);
274 }
275
276 static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
277 {
278 if (needed)
279 set_bit(CF_CONFIG_NEEDED, &cam->flags);
280 else
281 clear_bit(CF_CONFIG_NEEDED, &cam->flags);
282 }
283
284 /* ------------------------------------------------------------------- */
285 /*
286 * Make the controller start grabbing images. Everything must
287 * be set up before doing this.
288 */
289 static void mcam_ctlr_start(struct mcam_camera *cam)
290 {
291 /* set_bit performs a read, so no other barrier should be
292 needed here */
293 mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
294 }
295
296 static void mcam_ctlr_stop(struct mcam_camera *cam)
297 {
298 mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
299 }
300
301 static void mcam_enable_mipi(struct mcam_camera *mcam)
302 {
303 /* Using MIPI mode and enable MIPI */
304 cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
305 mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
306 mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
307 mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
308 mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
309
310 if (!mcam->mipi_enabled) {
311 if (mcam->lane > 4 || mcam->lane <= 0) {
312 cam_warn(mcam, "lane number error\n");
313 mcam->lane = 1; /* set the default value */
314 }
315 /*
316 * 0x41 actives 1 lane
317 * 0x43 actives 2 lanes
318 * 0x45 actives 3 lanes (never happen)
319 * 0x47 actives 4 lanes
320 */
321 mcam_reg_write(mcam, REG_CSI2_CTRL0,
322 CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
323 mcam_reg_write(mcam, REG_CLKCTRL,
324 (mcam->mclk_src << 29) | mcam->mclk_div);
325
326 mcam->mipi_enabled = true;
327 }
328 }
329
330 static void mcam_disable_mipi(struct mcam_camera *mcam)
331 {
332 /* Using Parallel mode or disable MIPI */
333 mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
334 mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
335 mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
336 mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
337 mcam->mipi_enabled = false;
338 }
339
340 /* ------------------------------------------------------------------- */
341
342 #ifdef MCAM_MODE_VMALLOC
343 /*
344 * Code specific to the vmalloc buffer mode.
345 */
346
347 /*
348 * Allocate in-kernel DMA buffers for vmalloc mode.
349 */
350 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
351 {
352 int i;
353
354 mcam_set_config_needed(cam, 1);
355 if (loadtime)
356 cam->dma_buf_size = dma_buf_size;
357 else
358 cam->dma_buf_size = cam->pix_format.sizeimage;
359 if (n_dma_bufs > 3)
360 n_dma_bufs = 3;
361
362 cam->nbufs = 0;
363 for (i = 0; i < n_dma_bufs; i++) {
364 cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
365 cam->dma_buf_size, cam->dma_handles + i,
366 GFP_KERNEL);
367 if (cam->dma_bufs[i] == NULL) {
368 cam_warn(cam, "Failed to allocate DMA buffer\n");
369 break;
370 }
371 (cam->nbufs)++;
372 }
373
374 switch (cam->nbufs) {
375 case 1:
376 dma_free_coherent(cam->dev, cam->dma_buf_size,
377 cam->dma_bufs[0], cam->dma_handles[0]);
378 cam->nbufs = 0;
379 case 0:
380 cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
381 return -ENOMEM;
382
383 case 2:
384 if (n_dma_bufs > 2)
385 cam_warn(cam, "Will limp along with only 2 buffers\n");
386 break;
387 }
388 return 0;
389 }
390
391 static void mcam_free_dma_bufs(struct mcam_camera *cam)
392 {
393 int i;
394
395 for (i = 0; i < cam->nbufs; i++) {
396 dma_free_coherent(cam->dev, cam->dma_buf_size,
397 cam->dma_bufs[i], cam->dma_handles[i]);
398 cam->dma_bufs[i] = NULL;
399 }
400 cam->nbufs = 0;
401 }
402
403
404 /*
405 * Set up DMA buffers when operating in vmalloc mode
406 */
407 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
408 {
409 /*
410 * Store the first two Y buffers (we aren't supporting
411 * planar formats for now, so no UV bufs). Then either
412 * set the third if it exists, or tell the controller
413 * to just use two.
414 */
415 mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
416 mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
417 if (cam->nbufs > 2) {
418 mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
419 mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
420 } else
421 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
422 if (cam->chip_id == MCAM_CAFE)
423 mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
424 }
425
426 /*
427 * Copy data out to user space in the vmalloc case
428 */
429 static void mcam_frame_tasklet(unsigned long data)
430 {
431 struct mcam_camera *cam = (struct mcam_camera *) data;
432 int i;
433 unsigned long flags;
434 struct mcam_vb_buffer *buf;
435
436 spin_lock_irqsave(&cam->dev_lock, flags);
437 for (i = 0; i < cam->nbufs; i++) {
438 int bufno = cam->next_buf;
439
440 if (cam->state != S_STREAMING || bufno < 0)
441 break; /* I/O got stopped */
442 if (++(cam->next_buf) >= cam->nbufs)
443 cam->next_buf = 0;
444 if (!test_bit(bufno, &cam->flags))
445 continue;
446 if (list_empty(&cam->buffers)) {
447 cam->frame_state.singles++;
448 break; /* Leave it valid, hope for better later */
449 }
450 cam->frame_state.delivered++;
451 clear_bit(bufno, &cam->flags);
452 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
453 queue);
454 list_del_init(&buf->queue);
455 /*
456 * Drop the lock during the big copy. This *should* be safe...
457 */
458 spin_unlock_irqrestore(&cam->dev_lock, flags);
459 memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
460 cam->pix_format.sizeimage);
461 mcam_buffer_done(cam, bufno, &buf->vb_buf);
462 spin_lock_irqsave(&cam->dev_lock, flags);
463 }
464 spin_unlock_irqrestore(&cam->dev_lock, flags);
465 }
466
467
468 /*
469 * Make sure our allocated buffers are up to the task.
470 */
471 static int mcam_check_dma_buffers(struct mcam_camera *cam)
472 {
473 if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
474 mcam_free_dma_bufs(cam);
475 if (cam->nbufs == 0)
476 return mcam_alloc_dma_bufs(cam, 0);
477 return 0;
478 }
479
480 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
481 {
482 tasklet_schedule(&cam->s_tasklet);
483 }
484
485 #else /* MCAM_MODE_VMALLOC */
486
487 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
488 {
489 return 0;
490 }
491
492 static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
493 {
494 return;
495 }
496
497 static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
498 {
499 return 0;
500 }
501
502
503
504 #endif /* MCAM_MODE_VMALLOC */
505
506
507 #ifdef MCAM_MODE_DMA_CONTIG
508 /* ---------------------------------------------------------------------- */
509 /*
510 * DMA-contiguous code.
511 */
512
513 static bool mcam_fmt_is_planar(__u32 pfmt)
514 {
515 struct mcam_format_struct *f;
516
517 f = mcam_find_format(pfmt);
518 return f->planar;
519 }
520
521 /*
522 * Set up a contiguous buffer for the given frame. Here also is where
523 * the underrun strategy is set: if there is no buffer available, reuse
524 * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
525 * keep the interrupt handler from giving that buffer back to user
526 * space. In this way, we always have a buffer to DMA to and don't
527 * have to try to play games stopping and restarting the controller.
528 */
529 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
530 {
531 struct mcam_vb_buffer *buf;
532 struct v4l2_pix_format *fmt = &cam->pix_format;
533 dma_addr_t dma_handle;
534 u32 pixel_count = fmt->width * fmt->height;
535 struct vb2_buffer *vb;
536
537 /*
538 * If there are no available buffers, go into single mode
539 */
540 if (list_empty(&cam->buffers)) {
541 buf = cam->vb_bufs[frame ^ 0x1];
542 set_bit(CF_SINGLE_BUFFER, &cam->flags);
543 cam->frame_state.singles++;
544 } else {
545 /*
546 * OK, we have a buffer we can use.
547 */
548 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
549 queue);
550 list_del_init(&buf->queue);
551 clear_bit(CF_SINGLE_BUFFER, &cam->flags);
552 }
553
554 cam->vb_bufs[frame] = buf;
555 vb = &buf->vb_buf;
556
557 dma_handle = vb2_dma_contig_plane_dma_addr(vb, 0);
558 buf->yuv_p.y = dma_handle;
559
560 switch (cam->pix_format.pixelformat) {
561 case V4L2_PIX_FMT_YUV422P:
562 buf->yuv_p.u = buf->yuv_p.y + pixel_count;
563 buf->yuv_p.v = buf->yuv_p.u + pixel_count / 2;
564 break;
565 case V4L2_PIX_FMT_YUV420:
566 buf->yuv_p.u = buf->yuv_p.y + pixel_count;
567 buf->yuv_p.v = buf->yuv_p.u + pixel_count / 4;
568 break;
569 case V4L2_PIX_FMT_YVU420:
570 buf->yuv_p.v = buf->yuv_p.y + pixel_count;
571 buf->yuv_p.u = buf->yuv_p.v + pixel_count / 4;
572 break;
573 default:
574 break;
575 }
576
577 mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR, buf->yuv_p.y);
578 if (mcam_fmt_is_planar(fmt->pixelformat)) {
579 mcam_reg_write(cam, frame == 0 ?
580 REG_U0BAR : REG_U1BAR, buf->yuv_p.u);
581 mcam_reg_write(cam, frame == 0 ?
582 REG_V0BAR : REG_V1BAR, buf->yuv_p.v);
583 }
584 }
585
586 /*
587 * Initial B_DMA_contig setup.
588 */
589 static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
590 {
591 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
592 cam->nbufs = 2;
593 mcam_set_contig_buffer(cam, 0);
594 mcam_set_contig_buffer(cam, 1);
595 }
596
597 /*
598 * Frame completion handling.
599 */
600 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
601 {
602 struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
603
604 if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
605 cam->frame_state.delivered++;
606 mcam_buffer_done(cam, frame, &buf->vb_buf);
607 }
608 mcam_set_contig_buffer(cam, frame);
609 }
610
611 #endif /* MCAM_MODE_DMA_CONTIG */
612
613 #ifdef MCAM_MODE_DMA_SG
614 /* ---------------------------------------------------------------------- */
615 /*
616 * Scatter/gather-specific code.
617 */
618
619 /*
620 * Set up the next buffer for S/G I/O; caller should be sure that
621 * the controller is stopped and a buffer is available.
622 */
623 static void mcam_sg_next_buffer(struct mcam_camera *cam)
624 {
625 struct mcam_vb_buffer *buf;
626
627 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
628 list_del_init(&buf->queue);
629 /*
630 * Very Bad Not Good Things happen if you don't clear
631 * C1_DESC_ENA before making any descriptor changes.
632 */
633 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
634 mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
635 mcam_reg_write(cam, REG_DESC_LEN_Y,
636 buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
637 mcam_reg_write(cam, REG_DESC_LEN_U, 0);
638 mcam_reg_write(cam, REG_DESC_LEN_V, 0);
639 mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
640 cam->vb_bufs[0] = buf;
641 }
642
643 /*
644 * Initial B_DMA_sg setup
645 */
646 static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
647 {
648 /*
649 * The list-empty condition can hit us at resume time
650 * if the buffer list was empty when the system was suspended.
651 */
652 if (list_empty(&cam->buffers)) {
653 set_bit(CF_SG_RESTART, &cam->flags);
654 return;
655 }
656
657 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
658 mcam_sg_next_buffer(cam);
659 cam->nbufs = 3;
660 }
661
662
663 /*
664 * Frame completion with S/G is trickier. We can't muck with
665 * a descriptor chain on the fly, since the controller buffers it
666 * internally. So we have to actually stop and restart; Marvell
667 * says this is the way to do it.
668 *
669 * Of course, stopping is easier said than done; experience shows
670 * that the controller can start a frame *after* C0_ENABLE has been
671 * cleared. So when running in S/G mode, the controller is "stopped"
672 * on receipt of the start-of-frame interrupt. That means we can
673 * safely change the DMA descriptor array here and restart things
674 * (assuming there's another buffer waiting to go).
675 */
676 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
677 {
678 struct mcam_vb_buffer *buf = cam->vb_bufs[0];
679
680 /*
681 * If we're no longer supposed to be streaming, don't do anything.
682 */
683 if (cam->state != S_STREAMING)
684 return;
685 /*
686 * If we have another buffer available, put it in and
687 * restart the engine.
688 */
689 if (!list_empty(&cam->buffers)) {
690 mcam_sg_next_buffer(cam);
691 mcam_ctlr_start(cam);
692 /*
693 * Otherwise set CF_SG_RESTART and the controller will
694 * be restarted once another buffer shows up.
695 */
696 } else {
697 set_bit(CF_SG_RESTART, &cam->flags);
698 cam->frame_state.singles++;
699 cam->vb_bufs[0] = NULL;
700 }
701 /*
702 * Now we can give the completed frame back to user space.
703 */
704 cam->frame_state.delivered++;
705 mcam_buffer_done(cam, frame, &buf->vb_buf);
706 }
707
708
709 /*
710 * Scatter/gather mode requires stopping the controller between
711 * frames so we can put in a new DMA descriptor array. If no new
712 * buffer exists at frame completion, the controller is left stopped;
713 * this function is charged with gettig things going again.
714 */
715 static void mcam_sg_restart(struct mcam_camera *cam)
716 {
717 mcam_ctlr_dma_sg(cam);
718 mcam_ctlr_start(cam);
719 clear_bit(CF_SG_RESTART, &cam->flags);
720 }
721
722 #else /* MCAM_MODE_DMA_SG */
723
724 static inline void mcam_sg_restart(struct mcam_camera *cam)
725 {
726 return;
727 }
728
729 #endif /* MCAM_MODE_DMA_SG */
730
731 /* ---------------------------------------------------------------------- */
732 /*
733 * Buffer-mode-independent controller code.
734 */
735
736 /*
737 * Image format setup
738 */
739 static void mcam_ctlr_image(struct mcam_camera *cam)
740 {
741 struct v4l2_pix_format *fmt = &cam->pix_format;
742 u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
743
744 cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
745 fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
746 imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
747 imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
748
749 switch (fmt->pixelformat) {
750 case V4L2_PIX_FMT_YUYV:
751 case V4L2_PIX_FMT_UYVY:
752 widthy = fmt->width * 2;
753 widthuv = 0;
754 break;
755 case V4L2_PIX_FMT_JPEG:
756 imgsz_h = (fmt->sizeimage / fmt->bytesperline) << IMGSZ_V_SHIFT;
757 widthy = fmt->bytesperline;
758 widthuv = 0;
759 break;
760 case V4L2_PIX_FMT_YUV422P:
761 case V4L2_PIX_FMT_YUV420:
762 case V4L2_PIX_FMT_YVU420:
763 widthy = fmt->width;
764 widthuv = fmt->width / 2;
765 break;
766 default:
767 widthy = fmt->bytesperline;
768 widthuv = 0;
769 }
770
771 mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
772 IMGP_YP_MASK | IMGP_UVP_MASK);
773 mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
774 mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
775
776 /*
777 * Tell the controller about the image format we are using.
778 */
779 switch (fmt->pixelformat) {
780 case V4L2_PIX_FMT_YUV422P:
781 mcam_reg_write_mask(cam, REG_CTRL0,
782 C0_DF_YUV | C0_YUV_PLANAR | C0_YUVE_YVYU, C0_DF_MASK);
783 break;
784 case V4L2_PIX_FMT_YUV420:
785 case V4L2_PIX_FMT_YVU420:
786 mcam_reg_write_mask(cam, REG_CTRL0,
787 C0_DF_YUV | C0_YUV_420PL | C0_YUVE_YVYU, C0_DF_MASK);
788 break;
789 case V4L2_PIX_FMT_YUYV:
790 mcam_reg_write_mask(cam, REG_CTRL0,
791 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_UYVY, C0_DF_MASK);
792 break;
793 case V4L2_PIX_FMT_UYVY:
794 mcam_reg_write_mask(cam, REG_CTRL0,
795 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_YUYV, C0_DF_MASK);
796 break;
797 case V4L2_PIX_FMT_JPEG:
798 mcam_reg_write_mask(cam, REG_CTRL0,
799 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_YUYV, C0_DF_MASK);
800 break;
801 case V4L2_PIX_FMT_RGB444:
802 mcam_reg_write_mask(cam, REG_CTRL0,
803 C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XRGB, C0_DF_MASK);
804 /* Alpha value? */
805 break;
806 case V4L2_PIX_FMT_RGB565:
807 mcam_reg_write_mask(cam, REG_CTRL0,
808 C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
809 break;
810 default:
811 cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
812 break;
813 }
814
815 /*
816 * Make sure it knows we want to use hsync/vsync.
817 */
818 mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
819 /*
820 * This field controls the generation of EOF(DVP only)
821 */
822 if (cam->bus_type != V4L2_MBUS_CSI2)
823 mcam_reg_set_bit(cam, REG_CTRL0,
824 C0_EOF_VSYNC | C0_VEDGE_CTRL);
825 }
826
827
828 /*
829 * Configure the controller for operation; caller holds the
830 * device mutex.
831 */
832 static int mcam_ctlr_configure(struct mcam_camera *cam)
833 {
834 unsigned long flags;
835
836 spin_lock_irqsave(&cam->dev_lock, flags);
837 clear_bit(CF_SG_RESTART, &cam->flags);
838 cam->dma_setup(cam);
839 mcam_ctlr_image(cam);
840 mcam_set_config_needed(cam, 0);
841 spin_unlock_irqrestore(&cam->dev_lock, flags);
842 return 0;
843 }
844
845 static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
846 {
847 /*
848 * Clear any pending interrupts, since we do not
849 * expect to have I/O active prior to enabling.
850 */
851 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
852 mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
853 }
854
855 static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
856 {
857 mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
858 }
859
860
861
862 static void mcam_ctlr_init(struct mcam_camera *cam)
863 {
864 unsigned long flags;
865
866 spin_lock_irqsave(&cam->dev_lock, flags);
867 /*
868 * Make sure it's not powered down.
869 */
870 mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
871 /*
872 * Turn off the enable bit. It sure should be off anyway,
873 * but it's good to be sure.
874 */
875 mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
876 /*
877 * Clock the sensor appropriately. Controller clock should
878 * be 48MHz, sensor "typical" value is half that.
879 */
880 mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
881 spin_unlock_irqrestore(&cam->dev_lock, flags);
882 }
883
884
885 /*
886 * Stop the controller, and don't return until we're really sure that no
887 * further DMA is going on.
888 */
889 static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
890 {
891 unsigned long flags;
892
893 /*
894 * Theory: stop the camera controller (whether it is operating
895 * or not). Delay briefly just in case we race with the SOF
896 * interrupt, then wait until no DMA is active.
897 */
898 spin_lock_irqsave(&cam->dev_lock, flags);
899 clear_bit(CF_SG_RESTART, &cam->flags);
900 mcam_ctlr_stop(cam);
901 cam->state = S_IDLE;
902 spin_unlock_irqrestore(&cam->dev_lock, flags);
903 /*
904 * This is a brutally long sleep, but experience shows that
905 * it can take the controller a while to get the message that
906 * it needs to stop grabbing frames. In particular, we can
907 * sometimes (on mmp) get a frame at the end WITHOUT the
908 * start-of-frame indication.
909 */
910 msleep(150);
911 if (test_bit(CF_DMA_ACTIVE, &cam->flags))
912 cam_err(cam, "Timeout waiting for DMA to end\n");
913 /* This would be bad news - what now? */
914 spin_lock_irqsave(&cam->dev_lock, flags);
915 mcam_ctlr_irq_disable(cam);
916 spin_unlock_irqrestore(&cam->dev_lock, flags);
917 }
918
919 /*
920 * Power up and down.
921 */
922 static int mcam_ctlr_power_up(struct mcam_camera *cam)
923 {
924 unsigned long flags;
925 int ret;
926
927 spin_lock_irqsave(&cam->dev_lock, flags);
928 ret = cam->plat_power_up(cam);
929 if (ret) {
930 spin_unlock_irqrestore(&cam->dev_lock, flags);
931 return ret;
932 }
933 mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
934 spin_unlock_irqrestore(&cam->dev_lock, flags);
935 msleep(5); /* Just to be sure */
936 return 0;
937 }
938
939 static void mcam_ctlr_power_down(struct mcam_camera *cam)
940 {
941 unsigned long flags;
942
943 spin_lock_irqsave(&cam->dev_lock, flags);
944 /*
945 * School of hard knocks department: be sure we do any register
946 * twiddling on the controller *before* calling the platform
947 * power down routine.
948 */
949 mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
950 cam->plat_power_down(cam);
951 spin_unlock_irqrestore(&cam->dev_lock, flags);
952 }
953
954 /* -------------------------------------------------------------------- */
955 /*
956 * Communications with the sensor.
957 */
958
959 static int __mcam_cam_reset(struct mcam_camera *cam)
960 {
961 return sensor_call(cam, core, reset, 0);
962 }
963
964 /*
965 * We have found the sensor on the i2c. Let's try to have a
966 * conversation.
967 */
968 static int mcam_cam_init(struct mcam_camera *cam)
969 {
970 int ret;
971
972 mutex_lock(&cam->s_mutex);
973 if (cam->state != S_NOTREADY)
974 cam_warn(cam, "Cam init with device in funky state %d",
975 cam->state);
976 ret = __mcam_cam_reset(cam);
977 /* Get/set parameters? */
978 cam->state = S_IDLE;
979 mcam_ctlr_power_down(cam);
980 mutex_unlock(&cam->s_mutex);
981 return ret;
982 }
983
984 /*
985 * Configure the sensor to match the parameters we have. Caller should
986 * hold s_mutex
987 */
988 static int mcam_cam_set_flip(struct mcam_camera *cam)
989 {
990 struct v4l2_control ctrl;
991
992 memset(&ctrl, 0, sizeof(ctrl));
993 ctrl.id = V4L2_CID_VFLIP;
994 ctrl.value = flip;
995 return sensor_call(cam, core, s_ctrl, &ctrl);
996 }
997
998
999 static int mcam_cam_configure(struct mcam_camera *cam)
1000 {
1001 struct v4l2_mbus_framefmt mbus_fmt;
1002 int ret;
1003
1004 v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
1005 ret = sensor_call(cam, core, init, 0);
1006 if (ret == 0)
1007 ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
1008 /*
1009 * OV7670 does weird things if flip is set *before* format...
1010 */
1011 ret += mcam_cam_set_flip(cam);
1012 return ret;
1013 }
1014
1015 /*
1016 * Get everything ready, and start grabbing frames.
1017 */
1018 static int mcam_read_setup(struct mcam_camera *cam)
1019 {
1020 int ret;
1021 unsigned long flags;
1022
1023 /*
1024 * Configuration. If we still don't have DMA buffers,
1025 * make one last, desperate attempt.
1026 */
1027 if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1028 mcam_alloc_dma_bufs(cam, 0))
1029 return -ENOMEM;
1030
1031 if (mcam_needs_config(cam)) {
1032 mcam_cam_configure(cam);
1033 ret = mcam_ctlr_configure(cam);
1034 if (ret)
1035 return ret;
1036 }
1037
1038 /*
1039 * Turn it loose.
1040 */
1041 spin_lock_irqsave(&cam->dev_lock, flags);
1042 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1043 mcam_reset_buffers(cam);
1044 /*
1045 * Update CSI2_DPHY value
1046 */
1047 if (cam->calc_dphy)
1048 cam->calc_dphy(cam);
1049 cam_dbg(cam, "camera: DPHY sets: dphy3=0x%x, dphy5=0x%x, dphy6=0x%x\n",
1050 cam->dphy[0], cam->dphy[1], cam->dphy[2]);
1051 if (cam->bus_type == V4L2_MBUS_CSI2)
1052 mcam_enable_mipi(cam);
1053 else
1054 mcam_disable_mipi(cam);
1055 mcam_ctlr_irq_enable(cam);
1056 cam->state = S_STREAMING;
1057 if (!test_bit(CF_SG_RESTART, &cam->flags))
1058 mcam_ctlr_start(cam);
1059 spin_unlock_irqrestore(&cam->dev_lock, flags);
1060 return 0;
1061 }
1062
1063 /* ----------------------------------------------------------------------- */
1064 /*
1065 * Videobuf2 interface code.
1066 */
1067
1068 static int mcam_vb_queue_setup(struct vb2_queue *vq,
1069 const struct v4l2_format *fmt, unsigned int *nbufs,
1070 unsigned int *num_planes, unsigned int sizes[],
1071 void *alloc_ctxs[])
1072 {
1073 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1074 int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1075
1076 sizes[0] = cam->pix_format.sizeimage;
1077 *num_planes = 1; /* Someday we have to support planar formats... */
1078 if (*nbufs < minbufs)
1079 *nbufs = minbufs;
1080 if (cam->buffer_mode == B_DMA_contig)
1081 alloc_ctxs[0] = cam->vb_alloc_ctx;
1082 else if (cam->buffer_mode == B_DMA_sg)
1083 alloc_ctxs[0] = cam->vb_alloc_ctx_sg;
1084 return 0;
1085 }
1086
1087
1088 static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1089 {
1090 struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1091 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1092 unsigned long flags;
1093 int start;
1094
1095 spin_lock_irqsave(&cam->dev_lock, flags);
1096 start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1097 list_add(&mvb->queue, &cam->buffers);
1098 if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1099 mcam_sg_restart(cam);
1100 spin_unlock_irqrestore(&cam->dev_lock, flags);
1101 if (start)
1102 mcam_read_setup(cam);
1103 }
1104
1105 /*
1106 * These need to be called with the mutex held from vb2
1107 */
1108 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1109 {
1110 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1111 unsigned int frame;
1112
1113 if (cam->state != S_IDLE) {
1114 INIT_LIST_HEAD(&cam->buffers);
1115 return -EINVAL;
1116 }
1117 cam->sequence = 0;
1118 /*
1119 * Videobuf2 sneakily hoards all the buffers and won't
1120 * give them to us until *after* streaming starts. But
1121 * we can't actually start streaming until we have a
1122 * destination. So go into a wait state and hope they
1123 * give us buffers soon.
1124 */
1125 if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1126 cam->state = S_BUFWAIT;
1127 return 0;
1128 }
1129
1130 /*
1131 * Ensure clear the left over frame flags
1132 * before every really start streaming
1133 */
1134 for (frame = 0; frame < cam->nbufs; frame++)
1135 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1136
1137 return mcam_read_setup(cam);
1138 }
1139
1140 static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1141 {
1142 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1143 unsigned long flags;
1144
1145 if (cam->state == S_BUFWAIT) {
1146 /* They never gave us buffers */
1147 cam->state = S_IDLE;
1148 return;
1149 }
1150 if (cam->state != S_STREAMING)
1151 return;
1152 mcam_ctlr_stop_dma(cam);
1153 /*
1154 * Reset the CCIC PHY after stopping streaming,
1155 * otherwise, the CCIC may be unstable.
1156 */
1157 if (cam->ctlr_reset)
1158 cam->ctlr_reset(cam);
1159 /*
1160 * VB2 reclaims the buffers, so we need to forget
1161 * about them.
1162 */
1163 spin_lock_irqsave(&cam->dev_lock, flags);
1164 INIT_LIST_HEAD(&cam->buffers);
1165 spin_unlock_irqrestore(&cam->dev_lock, flags);
1166 }
1167
1168
1169 static const struct vb2_ops mcam_vb2_ops = {
1170 .queue_setup = mcam_vb_queue_setup,
1171 .buf_queue = mcam_vb_buf_queue,
1172 .start_streaming = mcam_vb_start_streaming,
1173 .stop_streaming = mcam_vb_stop_streaming,
1174 .wait_prepare = vb2_ops_wait_prepare,
1175 .wait_finish = vb2_ops_wait_finish,
1176 };
1177
1178
1179 #ifdef MCAM_MODE_DMA_SG
1180 /*
1181 * Scatter/gather mode uses all of the above functions plus a
1182 * few extras to deal with DMA mapping.
1183 */
1184 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1185 {
1186 struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1187 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1188 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1189
1190 mvb->dma_desc = dma_alloc_coherent(cam->dev,
1191 ndesc * sizeof(struct mcam_dma_desc),
1192 &mvb->dma_desc_pa, GFP_KERNEL);
1193 if (mvb->dma_desc == NULL) {
1194 cam_err(cam, "Unable to get DMA descriptor array\n");
1195 return -ENOMEM;
1196 }
1197 return 0;
1198 }
1199
1200 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1201 {
1202 struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1203 struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1204 struct mcam_dma_desc *desc = mvb->dma_desc;
1205 struct scatterlist *sg;
1206 int i;
1207
1208 for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
1209 desc->dma_addr = sg_dma_address(sg);
1210 desc->segment_len = sg_dma_len(sg);
1211 desc++;
1212 }
1213 return 0;
1214 }
1215
1216 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1217 {
1218 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1219 struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
1220 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1221
1222 dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1223 mvb->dma_desc, mvb->dma_desc_pa);
1224 }
1225
1226
1227 static const struct vb2_ops mcam_vb2_sg_ops = {
1228 .queue_setup = mcam_vb_queue_setup,
1229 .buf_init = mcam_vb_sg_buf_init,
1230 .buf_prepare = mcam_vb_sg_buf_prepare,
1231 .buf_queue = mcam_vb_buf_queue,
1232 .buf_cleanup = mcam_vb_sg_buf_cleanup,
1233 .start_streaming = mcam_vb_start_streaming,
1234 .stop_streaming = mcam_vb_stop_streaming,
1235 .wait_prepare = vb2_ops_wait_prepare,
1236 .wait_finish = vb2_ops_wait_finish,
1237 };
1238
1239 #endif /* MCAM_MODE_DMA_SG */
1240
1241 static int mcam_setup_vb2(struct mcam_camera *cam)
1242 {
1243 struct vb2_queue *vq = &cam->vb_queue;
1244
1245 memset(vq, 0, sizeof(*vq));
1246 vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1247 vq->drv_priv = cam;
1248 vq->lock = &cam->s_mutex;
1249 vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1250 INIT_LIST_HEAD(&cam->buffers);
1251 switch (cam->buffer_mode) {
1252 case B_DMA_contig:
1253 #ifdef MCAM_MODE_DMA_CONTIG
1254 vq->ops = &mcam_vb2_ops;
1255 vq->mem_ops = &vb2_dma_contig_memops;
1256 vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1257 vq->io_modes = VB2_MMAP | VB2_USERPTR;
1258 cam->dma_setup = mcam_ctlr_dma_contig;
1259 cam->frame_complete = mcam_dma_contig_done;
1260 cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
1261 if (IS_ERR(cam->vb_alloc_ctx))
1262 return PTR_ERR(cam->vb_alloc_ctx);
1263 #endif
1264 break;
1265 case B_DMA_sg:
1266 #ifdef MCAM_MODE_DMA_SG
1267 vq->ops = &mcam_vb2_sg_ops;
1268 vq->mem_ops = &vb2_dma_sg_memops;
1269 vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1270 vq->io_modes = VB2_MMAP | VB2_USERPTR;
1271 cam->dma_setup = mcam_ctlr_dma_sg;
1272 cam->frame_complete = mcam_dma_sg_done;
1273 cam->vb_alloc_ctx_sg = vb2_dma_sg_init_ctx(cam->dev);
1274 if (IS_ERR(cam->vb_alloc_ctx_sg))
1275 return PTR_ERR(cam->vb_alloc_ctx_sg);
1276 #endif
1277 break;
1278 case B_vmalloc:
1279 #ifdef MCAM_MODE_VMALLOC
1280 tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
1281 (unsigned long) cam);
1282 vq->ops = &mcam_vb2_ops;
1283 vq->mem_ops = &vb2_vmalloc_memops;
1284 vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1285 vq->io_modes = VB2_MMAP;
1286 cam->dma_setup = mcam_ctlr_dma_vmalloc;
1287 cam->frame_complete = mcam_vmalloc_done;
1288 #endif
1289 break;
1290 }
1291 return vb2_queue_init(vq);
1292 }
1293
1294 static void mcam_cleanup_vb2(struct mcam_camera *cam)
1295 {
1296 vb2_queue_release(&cam->vb_queue);
1297 #ifdef MCAM_MODE_DMA_CONTIG
1298 if (cam->buffer_mode == B_DMA_contig)
1299 vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
1300 #endif
1301 #ifdef MCAM_MODE_DMA_SG
1302 if (cam->buffer_mode == B_DMA_sg)
1303 vb2_dma_sg_cleanup_ctx(cam->vb_alloc_ctx_sg);
1304 #endif
1305 }
1306
1307
1308 /* ---------------------------------------------------------------------- */
1309 /*
1310 * The long list of V4L2 ioctl() operations.
1311 */
1312
1313 static int mcam_vidioc_streamon(struct file *filp, void *priv,
1314 enum v4l2_buf_type type)
1315 {
1316 struct mcam_camera *cam = filp->private_data;
1317 int ret;
1318
1319 mutex_lock(&cam->s_mutex);
1320 ret = vb2_streamon(&cam->vb_queue, type);
1321 mutex_unlock(&cam->s_mutex);
1322 return ret;
1323 }
1324
1325
1326 static int mcam_vidioc_streamoff(struct file *filp, void *priv,
1327 enum v4l2_buf_type type)
1328 {
1329 struct mcam_camera *cam = filp->private_data;
1330 int ret;
1331
1332 mutex_lock(&cam->s_mutex);
1333 ret = vb2_streamoff(&cam->vb_queue, type);
1334 mutex_unlock(&cam->s_mutex);
1335 return ret;
1336 }
1337
1338
1339 static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
1340 struct v4l2_requestbuffers *req)
1341 {
1342 struct mcam_camera *cam = filp->private_data;
1343 int ret;
1344
1345 mutex_lock(&cam->s_mutex);
1346 ret = vb2_reqbufs(&cam->vb_queue, req);
1347 mutex_unlock(&cam->s_mutex);
1348 return ret;
1349 }
1350
1351
1352 static int mcam_vidioc_querybuf(struct file *filp, void *priv,
1353 struct v4l2_buffer *buf)
1354 {
1355 struct mcam_camera *cam = filp->private_data;
1356 int ret;
1357
1358 mutex_lock(&cam->s_mutex);
1359 ret = vb2_querybuf(&cam->vb_queue, buf);
1360 mutex_unlock(&cam->s_mutex);
1361 return ret;
1362 }
1363
1364 static int mcam_vidioc_qbuf(struct file *filp, void *priv,
1365 struct v4l2_buffer *buf)
1366 {
1367 struct mcam_camera *cam = filp->private_data;
1368 int ret;
1369
1370 mutex_lock(&cam->s_mutex);
1371 ret = vb2_qbuf(&cam->vb_queue, buf);
1372 mutex_unlock(&cam->s_mutex);
1373 return ret;
1374 }
1375
1376 static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
1377 struct v4l2_buffer *buf)
1378 {
1379 struct mcam_camera *cam = filp->private_data;
1380 int ret;
1381
1382 mutex_lock(&cam->s_mutex);
1383 ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
1384 mutex_unlock(&cam->s_mutex);
1385 return ret;
1386 }
1387
1388 static int mcam_vidioc_querycap(struct file *file, void *priv,
1389 struct v4l2_capability *cap)
1390 {
1391 struct mcam_camera *cam = priv;
1392
1393 strcpy(cap->driver, "marvell_ccic");
1394 strcpy(cap->card, "marvell_ccic");
1395 strlcpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info));
1396 cap->device_caps = V4L2_CAP_VIDEO_CAPTURE |
1397 V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
1398 cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
1399 return 0;
1400 }
1401
1402
1403 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1404 void *priv, struct v4l2_fmtdesc *fmt)
1405 {
1406 if (fmt->index >= N_MCAM_FMTS)
1407 return -EINVAL;
1408 strlcpy(fmt->description, mcam_formats[fmt->index].desc,
1409 sizeof(fmt->description));
1410 fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1411 return 0;
1412 }
1413
1414 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1415 struct v4l2_format *fmt)
1416 {
1417 struct mcam_camera *cam = priv;
1418 struct mcam_format_struct *f;
1419 struct v4l2_pix_format *pix = &fmt->fmt.pix;
1420 struct v4l2_mbus_framefmt mbus_fmt;
1421 int ret;
1422
1423 f = mcam_find_format(pix->pixelformat);
1424 pix->pixelformat = f->pixelformat;
1425 v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
1426 mutex_lock(&cam->s_mutex);
1427 ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
1428 mutex_unlock(&cam->s_mutex);
1429 v4l2_fill_pix_format(pix, &mbus_fmt);
1430 switch (f->pixelformat) {
1431 case V4L2_PIX_FMT_YUV420:
1432 case V4L2_PIX_FMT_YVU420:
1433 pix->bytesperline = pix->width * 3 / 2;
1434 break;
1435 default:
1436 pix->bytesperline = pix->width * f->bpp;
1437 break;
1438 }
1439 pix->sizeimage = pix->height * pix->bytesperline;
1440 return ret;
1441 }
1442
1443 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1444 struct v4l2_format *fmt)
1445 {
1446 struct mcam_camera *cam = priv;
1447 struct mcam_format_struct *f;
1448 int ret;
1449
1450 /*
1451 * Can't do anything if the device is not idle
1452 * Also can't if there are streaming buffers in place.
1453 */
1454 if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
1455 return -EBUSY;
1456
1457 f = mcam_find_format(fmt->fmt.pix.pixelformat);
1458
1459 /*
1460 * See if the formatting works in principle.
1461 */
1462 ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1463 if (ret)
1464 return ret;
1465 /*
1466 * Now we start to change things for real, so let's do it
1467 * under lock.
1468 */
1469 mutex_lock(&cam->s_mutex);
1470 cam->pix_format = fmt->fmt.pix;
1471 cam->mbus_code = f->mbus_code;
1472
1473 /*
1474 * Make sure we have appropriate DMA buffers.
1475 */
1476 if (cam->buffer_mode == B_vmalloc) {
1477 ret = mcam_check_dma_buffers(cam);
1478 if (ret)
1479 goto out;
1480 }
1481 mcam_set_config_needed(cam, 1);
1482 out:
1483 mutex_unlock(&cam->s_mutex);
1484 return ret;
1485 }
1486
1487 /*
1488 * Return our stored notion of how the camera is/should be configured.
1489 * The V4l2 spec wants us to be smarter, and actually get this from
1490 * the camera (and not mess with it at open time). Someday.
1491 */
1492 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1493 struct v4l2_format *f)
1494 {
1495 struct mcam_camera *cam = priv;
1496
1497 f->fmt.pix = cam->pix_format;
1498 return 0;
1499 }
1500
1501 /*
1502 * We only have one input - the sensor - so minimize the nonsense here.
1503 */
1504 static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1505 struct v4l2_input *input)
1506 {
1507 if (input->index != 0)
1508 return -EINVAL;
1509
1510 input->type = V4L2_INPUT_TYPE_CAMERA;
1511 strcpy(input->name, "Camera");
1512 return 0;
1513 }
1514
1515 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1516 {
1517 *i = 0;
1518 return 0;
1519 }
1520
1521 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1522 {
1523 if (i != 0)
1524 return -EINVAL;
1525 return 0;
1526 }
1527
1528 /*
1529 * G/S_PARM. Most of this is done by the sensor, but we are
1530 * the level which controls the number of read buffers.
1531 */
1532 static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1533 struct v4l2_streamparm *parms)
1534 {
1535 struct mcam_camera *cam = priv;
1536 int ret;
1537
1538 mutex_lock(&cam->s_mutex);
1539 ret = sensor_call(cam, video, g_parm, parms);
1540 mutex_unlock(&cam->s_mutex);
1541 parms->parm.capture.readbuffers = n_dma_bufs;
1542 return ret;
1543 }
1544
1545 static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1546 struct v4l2_streamparm *parms)
1547 {
1548 struct mcam_camera *cam = priv;
1549 int ret;
1550
1551 mutex_lock(&cam->s_mutex);
1552 ret = sensor_call(cam, video, s_parm, parms);
1553 mutex_unlock(&cam->s_mutex);
1554 parms->parm.capture.readbuffers = n_dma_bufs;
1555 return ret;
1556 }
1557
1558 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1559 struct v4l2_frmsizeenum *sizes)
1560 {
1561 struct mcam_camera *cam = priv;
1562 struct mcam_format_struct *f;
1563 struct v4l2_subdev_frame_size_enum fse = {
1564 .index = sizes->index,
1565 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1566 };
1567 int ret;
1568
1569 f = mcam_find_format(sizes->pixel_format);
1570 if (f->pixelformat != sizes->pixel_format)
1571 return -EINVAL;
1572 fse.code = f->mbus_code;
1573 mutex_lock(&cam->s_mutex);
1574 ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
1575 mutex_unlock(&cam->s_mutex);
1576 if (ret)
1577 return ret;
1578 if (fse.min_width == fse.max_width &&
1579 fse.min_height == fse.max_height) {
1580 sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1581 sizes->discrete.width = fse.min_width;
1582 sizes->discrete.height = fse.min_height;
1583 return 0;
1584 }
1585 sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS;
1586 sizes->stepwise.min_width = fse.min_width;
1587 sizes->stepwise.max_width = fse.max_width;
1588 sizes->stepwise.min_height = fse.min_height;
1589 sizes->stepwise.max_height = fse.max_height;
1590 sizes->stepwise.step_width = 1;
1591 sizes->stepwise.step_height = 1;
1592 return 0;
1593 }
1594
1595 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1596 struct v4l2_frmivalenum *interval)
1597 {
1598 struct mcam_camera *cam = priv;
1599 struct mcam_format_struct *f;
1600 struct v4l2_subdev_frame_interval_enum fie = {
1601 .index = interval->index,
1602 .width = interval->width,
1603 .height = interval->height,
1604 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1605 };
1606 int ret;
1607
1608 f = mcam_find_format(interval->pixel_format);
1609 if (f->pixelformat != interval->pixel_format)
1610 return -EINVAL;
1611 fie.code = f->mbus_code;
1612 mutex_lock(&cam->s_mutex);
1613 ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
1614 mutex_unlock(&cam->s_mutex);
1615 if (ret)
1616 return ret;
1617 interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1618 interval->discrete = fie.interval;
1619 return 0;
1620 }
1621
1622 #ifdef CONFIG_VIDEO_ADV_DEBUG
1623 static int mcam_vidioc_g_register(struct file *file, void *priv,
1624 struct v4l2_dbg_register *reg)
1625 {
1626 struct mcam_camera *cam = priv;
1627
1628 if (reg->reg > cam->regs_size - 4)
1629 return -EINVAL;
1630 reg->val = mcam_reg_read(cam, reg->reg);
1631 reg->size = 4;
1632 return 0;
1633 }
1634
1635 static int mcam_vidioc_s_register(struct file *file, void *priv,
1636 const struct v4l2_dbg_register *reg)
1637 {
1638 struct mcam_camera *cam = priv;
1639
1640 if (reg->reg > cam->regs_size - 4)
1641 return -EINVAL;
1642 mcam_reg_write(cam, reg->reg, reg->val);
1643 return 0;
1644 }
1645 #endif
1646
1647 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1648 .vidioc_querycap = mcam_vidioc_querycap,
1649 .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1650 .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
1651 .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
1652 .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
1653 .vidioc_enum_input = mcam_vidioc_enum_input,
1654 .vidioc_g_input = mcam_vidioc_g_input,
1655 .vidioc_s_input = mcam_vidioc_s_input,
1656 .vidioc_reqbufs = mcam_vidioc_reqbufs,
1657 .vidioc_querybuf = mcam_vidioc_querybuf,
1658 .vidioc_qbuf = mcam_vidioc_qbuf,
1659 .vidioc_dqbuf = mcam_vidioc_dqbuf,
1660 .vidioc_streamon = mcam_vidioc_streamon,
1661 .vidioc_streamoff = mcam_vidioc_streamoff,
1662 .vidioc_g_parm = mcam_vidioc_g_parm,
1663 .vidioc_s_parm = mcam_vidioc_s_parm,
1664 .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1665 .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1666 #ifdef CONFIG_VIDEO_ADV_DEBUG
1667 .vidioc_g_register = mcam_vidioc_g_register,
1668 .vidioc_s_register = mcam_vidioc_s_register,
1669 #endif
1670 };
1671
1672 /* ---------------------------------------------------------------------- */
1673 /*
1674 * Our various file operations.
1675 */
1676 static int mcam_v4l_open(struct file *filp)
1677 {
1678 struct mcam_camera *cam = video_drvdata(filp);
1679 int ret = 0;
1680
1681 filp->private_data = cam;
1682
1683 cam->frame_state.frames = 0;
1684 cam->frame_state.singles = 0;
1685 cam->frame_state.delivered = 0;
1686 mutex_lock(&cam->s_mutex);
1687 if (cam->users == 0) {
1688 ret = mcam_setup_vb2(cam);
1689 if (ret)
1690 goto out;
1691 ret = mcam_ctlr_power_up(cam);
1692 if (ret)
1693 goto out;
1694 __mcam_cam_reset(cam);
1695 mcam_set_config_needed(cam, 1);
1696 }
1697 (cam->users)++;
1698 out:
1699 mutex_unlock(&cam->s_mutex);
1700 return ret;
1701 }
1702
1703
1704 static int mcam_v4l_release(struct file *filp)
1705 {
1706 struct mcam_camera *cam = filp->private_data;
1707
1708 cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
1709 cam->frame_state.frames, cam->frame_state.singles,
1710 cam->frame_state.delivered);
1711 mutex_lock(&cam->s_mutex);
1712 (cam->users)--;
1713 if (cam->users == 0) {
1714 mcam_ctlr_stop_dma(cam);
1715 mcam_cleanup_vb2(cam);
1716 mcam_disable_mipi(cam);
1717 mcam_ctlr_power_down(cam);
1718 if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1719 mcam_free_dma_bufs(cam);
1720 }
1721
1722 mutex_unlock(&cam->s_mutex);
1723 return 0;
1724 }
1725
1726 static ssize_t mcam_v4l_read(struct file *filp,
1727 char __user *buffer, size_t len, loff_t *pos)
1728 {
1729 struct mcam_camera *cam = filp->private_data;
1730 int ret;
1731
1732 mutex_lock(&cam->s_mutex);
1733 ret = vb2_read(&cam->vb_queue, buffer, len, pos,
1734 filp->f_flags & O_NONBLOCK);
1735 mutex_unlock(&cam->s_mutex);
1736 return ret;
1737 }
1738
1739
1740
1741 static unsigned int mcam_v4l_poll(struct file *filp,
1742 struct poll_table_struct *pt)
1743 {
1744 struct mcam_camera *cam = filp->private_data;
1745 int ret;
1746
1747 mutex_lock(&cam->s_mutex);
1748 ret = vb2_poll(&cam->vb_queue, filp, pt);
1749 mutex_unlock(&cam->s_mutex);
1750 return ret;
1751 }
1752
1753
1754 static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
1755 {
1756 struct mcam_camera *cam = filp->private_data;
1757 int ret;
1758
1759 mutex_lock(&cam->s_mutex);
1760 ret = vb2_mmap(&cam->vb_queue, vma);
1761 mutex_unlock(&cam->s_mutex);
1762 return ret;
1763 }
1764
1765
1766
1767 static const struct v4l2_file_operations mcam_v4l_fops = {
1768 .owner = THIS_MODULE,
1769 .open = mcam_v4l_open,
1770 .release = mcam_v4l_release,
1771 .read = mcam_v4l_read,
1772 .poll = mcam_v4l_poll,
1773 .mmap = mcam_v4l_mmap,
1774 .unlocked_ioctl = video_ioctl2,
1775 };
1776
1777
1778 /*
1779 * This template device holds all of those v4l2 methods; we
1780 * clone it for specific real devices.
1781 */
1782 static struct video_device mcam_v4l_template = {
1783 .name = "mcam",
1784 .tvnorms = V4L2_STD_NTSC_M,
1785
1786 .fops = &mcam_v4l_fops,
1787 .ioctl_ops = &mcam_v4l_ioctl_ops,
1788 .release = video_device_release_empty,
1789 };
1790
1791 /* ---------------------------------------------------------------------- */
1792 /*
1793 * Interrupt handler stuff
1794 */
1795 static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1796 {
1797 /*
1798 * Basic frame housekeeping.
1799 */
1800 set_bit(frame, &cam->flags);
1801 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1802 cam->next_buf = frame;
1803 cam->buf_seq[frame] = ++(cam->sequence);
1804 cam->frame_state.frames++;
1805 /*
1806 * "This should never happen"
1807 */
1808 if (cam->state != S_STREAMING)
1809 return;
1810 /*
1811 * Process the frame and set up the next one.
1812 */
1813 cam->frame_complete(cam, frame);
1814 }
1815
1816
1817 /*
1818 * The interrupt handler; this needs to be called from the
1819 * platform irq handler with the lock held.
1820 */
1821 int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1822 {
1823 unsigned int frame, handled = 0;
1824
1825 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1826 /*
1827 * Handle any frame completions. There really should
1828 * not be more than one of these, or we have fallen
1829 * far behind.
1830 *
1831 * When running in S/G mode, the frame number lacks any
1832 * real meaning - there's only one descriptor array - but
1833 * the controller still picks a different one to signal
1834 * each time.
1835 */
1836 for (frame = 0; frame < cam->nbufs; frame++)
1837 if (irqs & (IRQ_EOF0 << frame) &&
1838 test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1839 mcam_frame_complete(cam, frame);
1840 handled = 1;
1841 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1842 if (cam->buffer_mode == B_DMA_sg)
1843 break;
1844 }
1845 /*
1846 * If a frame starts, note that we have DMA active. This
1847 * code assumes that we won't get multiple frame interrupts
1848 * at once; may want to rethink that.
1849 */
1850 for (frame = 0; frame < cam->nbufs; frame++) {
1851 if (irqs & (IRQ_SOF0 << frame)) {
1852 set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1853 handled = IRQ_HANDLED;
1854 }
1855 }
1856
1857 if (handled == IRQ_HANDLED) {
1858 set_bit(CF_DMA_ACTIVE, &cam->flags);
1859 if (cam->buffer_mode == B_DMA_sg)
1860 mcam_ctlr_stop(cam);
1861 }
1862 return handled;
1863 }
1864
1865 /* ---------------------------------------------------------------------- */
1866 /*
1867 * Registration and such.
1868 */
1869 static struct ov7670_config sensor_cfg = {
1870 /*
1871 * Exclude QCIF mode, because it only captures a tiny portion
1872 * of the sensor FOV
1873 */
1874 .min_width = 320,
1875 .min_height = 240,
1876 };
1877
1878
1879 int mccic_register(struct mcam_camera *cam)
1880 {
1881 struct i2c_board_info ov7670_info = {
1882 .type = "ov7670",
1883 .addr = 0x42 >> 1,
1884 .platform_data = &sensor_cfg,
1885 };
1886 int ret;
1887
1888 /*
1889 * Validate the requested buffer mode.
1890 */
1891 if (buffer_mode >= 0)
1892 cam->buffer_mode = buffer_mode;
1893 if (cam->buffer_mode == B_DMA_sg &&
1894 cam->chip_id == MCAM_CAFE) {
1895 printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
1896 "attempting vmalloc mode instead\n");
1897 cam->buffer_mode = B_vmalloc;
1898 }
1899 if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1900 printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1901 cam->buffer_mode);
1902 return -EINVAL;
1903 }
1904 /*
1905 * Register with V4L
1906 */
1907 ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
1908 if (ret)
1909 return ret;
1910
1911 mutex_init(&cam->s_mutex);
1912 cam->state = S_NOTREADY;
1913 mcam_set_config_needed(cam, 1);
1914 cam->pix_format = mcam_def_pix_format;
1915 cam->mbus_code = mcam_def_mbus_code;
1916 INIT_LIST_HEAD(&cam->buffers);
1917 mcam_ctlr_init(cam);
1918
1919 /*
1920 * Try to find the sensor.
1921 */
1922 sensor_cfg.clock_speed = cam->clock_speed;
1923 sensor_cfg.use_smbus = cam->use_smbus;
1924 cam->sensor_addr = ov7670_info.addr;
1925 cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
1926 cam->i2c_adapter, &ov7670_info, NULL);
1927 if (cam->sensor == NULL) {
1928 ret = -ENODEV;
1929 goto out_unregister;
1930 }
1931
1932 ret = mcam_cam_init(cam);
1933 if (ret)
1934 goto out_unregister;
1935 /*
1936 * Get the v4l2 setup done.
1937 */
1938 ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1939 if (ret)
1940 goto out_unregister;
1941 cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1942
1943 mutex_lock(&cam->s_mutex);
1944 cam->vdev = mcam_v4l_template;
1945 cam->vdev.v4l2_dev = &cam->v4l2_dev;
1946 video_set_drvdata(&cam->vdev, cam);
1947 ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
1948 if (ret)
1949 goto out;
1950
1951 /*
1952 * If so requested, try to get our DMA buffers now.
1953 */
1954 if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1955 if (mcam_alloc_dma_bufs(cam, 1))
1956 cam_warn(cam, "Unable to alloc DMA buffers at load"
1957 " will try again later.");
1958 }
1959
1960 out:
1961 v4l2_ctrl_handler_free(&cam->ctrl_handler);
1962 mutex_unlock(&cam->s_mutex);
1963 return ret;
1964 out_unregister:
1965 v4l2_device_unregister(&cam->v4l2_dev);
1966 return ret;
1967 }
1968
1969
1970 void mccic_shutdown(struct mcam_camera *cam)
1971 {
1972 /*
1973 * If we have no users (and we really, really should have no
1974 * users) the device will already be powered down. Trying to
1975 * take it down again will wedge the machine, which is frowned
1976 * upon.
1977 */
1978 if (cam->users > 0) {
1979 cam_warn(cam, "Removing a device with users!\n");
1980 mcam_ctlr_power_down(cam);
1981 }
1982 vb2_queue_release(&cam->vb_queue);
1983 if (cam->buffer_mode == B_vmalloc)
1984 mcam_free_dma_bufs(cam);
1985 video_unregister_device(&cam->vdev);
1986 v4l2_ctrl_handler_free(&cam->ctrl_handler);
1987 v4l2_device_unregister(&cam->v4l2_dev);
1988 }
1989
1990 /*
1991 * Power management
1992 */
1993 #ifdef CONFIG_PM
1994
1995 void mccic_suspend(struct mcam_camera *cam)
1996 {
1997 mutex_lock(&cam->s_mutex);
1998 if (cam->users > 0) {
1999 enum mcam_state cstate = cam->state;
2000
2001 mcam_ctlr_stop_dma(cam);
2002 mcam_ctlr_power_down(cam);
2003 cam->state = cstate;
2004 }
2005 mutex_unlock(&cam->s_mutex);
2006 }
2007
2008 int mccic_resume(struct mcam_camera *cam)
2009 {
2010 int ret = 0;
2011
2012 mutex_lock(&cam->s_mutex);
2013 if (cam->users > 0) {
2014 ret = mcam_ctlr_power_up(cam);
2015 if (ret) {
2016 mutex_unlock(&cam->s_mutex);
2017 return ret;
2018 }
2019 __mcam_cam_reset(cam);
2020 } else {
2021 mcam_ctlr_power_down(cam);
2022 }
2023 mutex_unlock(&cam->s_mutex);
2024
2025 set_bit(CF_CONFIG_NEEDED, &cam->flags);
2026 if (cam->state == S_STREAMING) {
2027 /*
2028 * If there was a buffer in the DMA engine at suspend
2029 * time, put it back on the queue or we'll forget about it.
2030 */
2031 if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
2032 list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
2033 ret = mcam_read_setup(cam);
2034 }
2035 return ret;
2036 }
2037 #endif /* CONFIG_PM */
This page took 0.076235 seconds and 6 git commands to generate.