[media] v4l: omap4iss: Remove iss_video streaming field
[deliverable/linux.git] / drivers / staging / media / omap4iss / iss_video.c
1 /*
2 * TI OMAP4 ISS V4L2 Driver - Generic video node
3 *
4 * Copyright (C) 2012 Texas Instruments, Inc.
5 *
6 * Author: Sergio Aguirre <sergio.a.aguirre@gmail.com>
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 */
13
14 #include <asm/cacheflush.h>
15 #include <linux/clk.h>
16 #include <linux/mm.h>
17 #include <linux/pagemap.h>
18 #include <linux/sched.h>
19 #include <linux/slab.h>
20 #include <linux/vmalloc.h>
21 #include <linux/module.h>
22 #include <media/v4l2-dev.h>
23 #include <media/v4l2-ioctl.h>
24
25 #include "iss_video.h"
26 #include "iss.h"
27
28
29 /* -----------------------------------------------------------------------------
30 * Helper functions
31 */
32
33 static struct iss_format_info formats[] = {
34 { V4L2_MBUS_FMT_Y8_1X8, V4L2_MBUS_FMT_Y8_1X8,
35 V4L2_MBUS_FMT_Y8_1X8, V4L2_MBUS_FMT_Y8_1X8,
36 V4L2_PIX_FMT_GREY, 8, },
37 { V4L2_MBUS_FMT_Y10_1X10, V4L2_MBUS_FMT_Y10_1X10,
38 V4L2_MBUS_FMT_Y10_1X10, V4L2_MBUS_FMT_Y8_1X8,
39 V4L2_PIX_FMT_Y10, 10, },
40 { V4L2_MBUS_FMT_Y12_1X12, V4L2_MBUS_FMT_Y10_1X10,
41 V4L2_MBUS_FMT_Y12_1X12, V4L2_MBUS_FMT_Y8_1X8,
42 V4L2_PIX_FMT_Y12, 12, },
43 { V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_MBUS_FMT_SBGGR8_1X8,
44 V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_MBUS_FMT_SBGGR8_1X8,
45 V4L2_PIX_FMT_SBGGR8, 8, },
46 { V4L2_MBUS_FMT_SGBRG8_1X8, V4L2_MBUS_FMT_SGBRG8_1X8,
47 V4L2_MBUS_FMT_SGBRG8_1X8, V4L2_MBUS_FMT_SGBRG8_1X8,
48 V4L2_PIX_FMT_SGBRG8, 8, },
49 { V4L2_MBUS_FMT_SGRBG8_1X8, V4L2_MBUS_FMT_SGRBG8_1X8,
50 V4L2_MBUS_FMT_SGRBG8_1X8, V4L2_MBUS_FMT_SGRBG8_1X8,
51 V4L2_PIX_FMT_SGRBG8, 8, },
52 { V4L2_MBUS_FMT_SRGGB8_1X8, V4L2_MBUS_FMT_SRGGB8_1X8,
53 V4L2_MBUS_FMT_SRGGB8_1X8, V4L2_MBUS_FMT_SRGGB8_1X8,
54 V4L2_PIX_FMT_SRGGB8, 8, },
55 { V4L2_MBUS_FMT_SGRBG10_DPCM8_1X8, V4L2_MBUS_FMT_SGRBG10_DPCM8_1X8,
56 V4L2_MBUS_FMT_SGRBG10_1X10, 0,
57 V4L2_PIX_FMT_SGRBG10DPCM8, 8, },
58 { V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_MBUS_FMT_SBGGR10_1X10,
59 V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_MBUS_FMT_SBGGR8_1X8,
60 V4L2_PIX_FMT_SBGGR10, 10, },
61 { V4L2_MBUS_FMT_SGBRG10_1X10, V4L2_MBUS_FMT_SGBRG10_1X10,
62 V4L2_MBUS_FMT_SGBRG10_1X10, V4L2_MBUS_FMT_SGBRG8_1X8,
63 V4L2_PIX_FMT_SGBRG10, 10, },
64 { V4L2_MBUS_FMT_SGRBG10_1X10, V4L2_MBUS_FMT_SGRBG10_1X10,
65 V4L2_MBUS_FMT_SGRBG10_1X10, V4L2_MBUS_FMT_SGRBG8_1X8,
66 V4L2_PIX_FMT_SGRBG10, 10, },
67 { V4L2_MBUS_FMT_SRGGB10_1X10, V4L2_MBUS_FMT_SRGGB10_1X10,
68 V4L2_MBUS_FMT_SRGGB10_1X10, V4L2_MBUS_FMT_SRGGB8_1X8,
69 V4L2_PIX_FMT_SRGGB10, 10, },
70 { V4L2_MBUS_FMT_SBGGR12_1X12, V4L2_MBUS_FMT_SBGGR10_1X10,
71 V4L2_MBUS_FMT_SBGGR12_1X12, V4L2_MBUS_FMT_SBGGR8_1X8,
72 V4L2_PIX_FMT_SBGGR12, 12, },
73 { V4L2_MBUS_FMT_SGBRG12_1X12, V4L2_MBUS_FMT_SGBRG10_1X10,
74 V4L2_MBUS_FMT_SGBRG12_1X12, V4L2_MBUS_FMT_SGBRG8_1X8,
75 V4L2_PIX_FMT_SGBRG12, 12, },
76 { V4L2_MBUS_FMT_SGRBG12_1X12, V4L2_MBUS_FMT_SGRBG10_1X10,
77 V4L2_MBUS_FMT_SGRBG12_1X12, V4L2_MBUS_FMT_SGRBG8_1X8,
78 V4L2_PIX_FMT_SGRBG12, 12, },
79 { V4L2_MBUS_FMT_SRGGB12_1X12, V4L2_MBUS_FMT_SRGGB10_1X10,
80 V4L2_MBUS_FMT_SRGGB12_1X12, V4L2_MBUS_FMT_SRGGB8_1X8,
81 V4L2_PIX_FMT_SRGGB12, 12, },
82 { V4L2_MBUS_FMT_UYVY8_1X16, V4L2_MBUS_FMT_UYVY8_1X16,
83 V4L2_MBUS_FMT_UYVY8_1X16, 0,
84 V4L2_PIX_FMT_UYVY, 16, },
85 { V4L2_MBUS_FMT_YUYV8_1X16, V4L2_MBUS_FMT_YUYV8_1X16,
86 V4L2_MBUS_FMT_YUYV8_1X16, 0,
87 V4L2_PIX_FMT_YUYV, 16, },
88 { V4L2_MBUS_FMT_YUYV8_1_5X8, V4L2_MBUS_FMT_YUYV8_1_5X8,
89 V4L2_MBUS_FMT_YUYV8_1_5X8, 0,
90 V4L2_PIX_FMT_NV12, 8, },
91 };
92
93 const struct iss_format_info *
94 omap4iss_video_format_info(enum v4l2_mbus_pixelcode code)
95 {
96 unsigned int i;
97
98 for (i = 0; i < ARRAY_SIZE(formats); ++i) {
99 if (formats[i].code == code)
100 return &formats[i];
101 }
102
103 return NULL;
104 }
105
106 /*
107 * iss_video_mbus_to_pix - Convert v4l2_mbus_framefmt to v4l2_pix_format
108 * @video: ISS video instance
109 * @mbus: v4l2_mbus_framefmt format (input)
110 * @pix: v4l2_pix_format format (output)
111 *
112 * Fill the output pix structure with information from the input mbus format.
113 * The bytesperline and sizeimage fields are computed from the requested bytes
114 * per line value in the pix format and information from the video instance.
115 *
116 * Return the number of padding bytes at end of line.
117 */
118 static unsigned int iss_video_mbus_to_pix(const struct iss_video *video,
119 const struct v4l2_mbus_framefmt *mbus,
120 struct v4l2_pix_format *pix)
121 {
122 unsigned int bpl = pix->bytesperline;
123 unsigned int min_bpl;
124 unsigned int i;
125
126 memset(pix, 0, sizeof(*pix));
127 pix->width = mbus->width;
128 pix->height = mbus->height;
129
130 /* Skip the last format in the loop so that it will be selected if no
131 * match is found.
132 */
133 for (i = 0; i < ARRAY_SIZE(formats) - 1; ++i) {
134 if (formats[i].code == mbus->code)
135 break;
136 }
137
138 min_bpl = pix->width * ALIGN(formats[i].bpp, 8) / 8;
139
140 /* Clamp the requested bytes per line value. If the maximum bytes per
141 * line value is zero, the module doesn't support user configurable line
142 * sizes. Override the requested value with the minimum in that case.
143 */
144 if (video->bpl_max)
145 bpl = clamp(bpl, min_bpl, video->bpl_max);
146 else
147 bpl = min_bpl;
148
149 if (!video->bpl_zero_padding || bpl != min_bpl)
150 bpl = ALIGN(bpl, video->bpl_alignment);
151
152 pix->pixelformat = formats[i].pixelformat;
153 pix->bytesperline = bpl;
154 pix->sizeimage = pix->bytesperline * pix->height;
155 pix->colorspace = mbus->colorspace;
156 pix->field = mbus->field;
157
158 /* FIXME: Special case for NV12! We should make this nicer... */
159 if (pix->pixelformat == V4L2_PIX_FMT_NV12)
160 pix->sizeimage += (pix->bytesperline * pix->height) / 2;
161
162 return bpl - min_bpl;
163 }
164
165 static void iss_video_pix_to_mbus(const struct v4l2_pix_format *pix,
166 struct v4l2_mbus_framefmt *mbus)
167 {
168 unsigned int i;
169
170 memset(mbus, 0, sizeof(*mbus));
171 mbus->width = pix->width;
172 mbus->height = pix->height;
173
174 for (i = 0; i < ARRAY_SIZE(formats); ++i) {
175 if (formats[i].pixelformat == pix->pixelformat)
176 break;
177 }
178
179 if (WARN_ON(i == ARRAY_SIZE(formats)))
180 return;
181
182 mbus->code = formats[i].code;
183 mbus->colorspace = pix->colorspace;
184 mbus->field = pix->field;
185 }
186
187 static struct v4l2_subdev *
188 iss_video_remote_subdev(struct iss_video *video, u32 *pad)
189 {
190 struct media_pad *remote;
191
192 remote = media_entity_remote_pad(&video->pad);
193
194 if (remote == NULL ||
195 media_entity_type(remote->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
196 return NULL;
197
198 if (pad)
199 *pad = remote->index;
200
201 return media_entity_to_v4l2_subdev(remote->entity);
202 }
203
204 /* Return a pointer to the ISS video instance at the far end of the pipeline. */
205 static struct iss_video *
206 iss_video_far_end(struct iss_video *video)
207 {
208 struct media_entity_graph graph;
209 struct media_entity *entity = &video->video.entity;
210 struct media_device *mdev = entity->parent;
211 struct iss_video *far_end = NULL;
212
213 mutex_lock(&mdev->graph_mutex);
214 media_entity_graph_walk_start(&graph, entity);
215
216 while ((entity = media_entity_graph_walk_next(&graph))) {
217 if (entity == &video->video.entity)
218 continue;
219
220 if (media_entity_type(entity) != MEDIA_ENT_T_DEVNODE)
221 continue;
222
223 far_end = to_iss_video(media_entity_to_video_device(entity));
224 if (far_end->type != video->type)
225 break;
226
227 far_end = NULL;
228 }
229
230 mutex_unlock(&mdev->graph_mutex);
231 return far_end;
232 }
233
234 static int
235 __iss_video_get_format(struct iss_video *video, struct v4l2_format *format)
236 {
237 struct v4l2_subdev_format fmt;
238 struct v4l2_subdev *subdev;
239 u32 pad;
240 int ret;
241
242 subdev = iss_video_remote_subdev(video, &pad);
243 if (subdev == NULL)
244 return -EINVAL;
245
246 mutex_lock(&video->mutex);
247
248 fmt.pad = pad;
249 fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
250 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
251 if (ret == -ENOIOCTLCMD)
252 ret = -EINVAL;
253
254 mutex_unlock(&video->mutex);
255
256 if (ret)
257 return ret;
258
259 format->type = video->type;
260 return iss_video_mbus_to_pix(video, &fmt.format, &format->fmt.pix);
261 }
262
263 static int
264 iss_video_check_format(struct iss_video *video, struct iss_video_fh *vfh)
265 {
266 struct v4l2_format format;
267 int ret;
268
269 memcpy(&format, &vfh->format, sizeof(format));
270 ret = __iss_video_get_format(video, &format);
271 if (ret < 0)
272 return ret;
273
274 if (vfh->format.fmt.pix.pixelformat != format.fmt.pix.pixelformat ||
275 vfh->format.fmt.pix.height != format.fmt.pix.height ||
276 vfh->format.fmt.pix.width != format.fmt.pix.width ||
277 vfh->format.fmt.pix.bytesperline != format.fmt.pix.bytesperline ||
278 vfh->format.fmt.pix.sizeimage != format.fmt.pix.sizeimage)
279 return -EINVAL;
280
281 return ret;
282 }
283
284 /* -----------------------------------------------------------------------------
285 * Video queue operations
286 */
287
288 static int iss_video_queue_setup(struct vb2_queue *vq, const struct v4l2_format *fmt,
289 unsigned int *count, unsigned int *num_planes,
290 unsigned int sizes[], void *alloc_ctxs[])
291 {
292 struct iss_video_fh *vfh = vb2_get_drv_priv(vq);
293 struct iss_video *video = vfh->video;
294
295 /* Revisit multi-planar support for NV12 */
296 *num_planes = 1;
297
298 sizes[0] = vfh->format.fmt.pix.sizeimage;
299 if (sizes[0] == 0)
300 return -EINVAL;
301
302 alloc_ctxs[0] = video->alloc_ctx;
303
304 *count = min(*count, (unsigned int)(video->capture_mem / PAGE_ALIGN(sizes[0])));
305
306 return 0;
307 }
308
309 static void iss_video_buf_cleanup(struct vb2_buffer *vb)
310 {
311 struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb);
312
313 if (buffer->iss_addr)
314 buffer->iss_addr = 0;
315 }
316
317 static int iss_video_buf_prepare(struct vb2_buffer *vb)
318 {
319 struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue);
320 struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb);
321 struct iss_video *video = vfh->video;
322 unsigned long size = vfh->format.fmt.pix.sizeimage;
323 dma_addr_t addr;
324
325 if (vb2_plane_size(vb, 0) < size)
326 return -ENOBUFS;
327
328 addr = vb2_dma_contig_plane_dma_addr(vb, 0);
329 if (!IS_ALIGNED(addr, 32)) {
330 dev_dbg(video->iss->dev, "Buffer address must be "
331 "aligned to 32 bytes boundary.\n");
332 return -EINVAL;
333 }
334
335 vb2_set_plane_payload(vb, 0, size);
336 buffer->iss_addr = addr;
337 return 0;
338 }
339
340 static void iss_video_buf_queue(struct vb2_buffer *vb)
341 {
342 struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue);
343 struct iss_video *video = vfh->video;
344 struct iss_buffer *buffer = container_of(vb, struct iss_buffer, vb);
345 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity);
346 unsigned int empty;
347 unsigned long flags;
348
349 spin_lock_irqsave(&video->qlock, flags);
350 empty = list_empty(&video->dmaqueue);
351 list_add_tail(&buffer->list, &video->dmaqueue);
352 spin_unlock_irqrestore(&video->qlock, flags);
353
354 if (empty) {
355 enum iss_pipeline_state state;
356 unsigned int start;
357
358 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
359 state = ISS_PIPELINE_QUEUE_OUTPUT;
360 else
361 state = ISS_PIPELINE_QUEUE_INPUT;
362
363 spin_lock_irqsave(&pipe->lock, flags);
364 pipe->state |= state;
365 video->ops->queue(video, buffer);
366 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_QUEUED;
367
368 start = iss_pipeline_ready(pipe);
369 if (start)
370 pipe->state |= ISS_PIPELINE_STREAM;
371 spin_unlock_irqrestore(&pipe->lock, flags);
372
373 if (start)
374 omap4iss_pipeline_set_stream(pipe,
375 ISS_PIPELINE_STREAM_SINGLESHOT);
376 }
377 }
378
379 static struct vb2_ops iss_video_vb2ops = {
380 .queue_setup = iss_video_queue_setup,
381 .buf_prepare = iss_video_buf_prepare,
382 .buf_queue = iss_video_buf_queue,
383 .buf_cleanup = iss_video_buf_cleanup,
384 };
385
386 /*
387 * omap4iss_video_buffer_next - Complete the current buffer and return the next
388 * @video: ISS video object
389 *
390 * Remove the current video buffer from the DMA queue and fill its timestamp,
391 * field count and state fields before waking up its completion handler.
392 *
393 * For capture video nodes, the buffer state is set to VB2_BUF_STATE_DONE if no
394 * error has been flagged in the pipeline, or to VB2_BUF_STATE_ERROR otherwise.
395 *
396 * The DMA queue is expected to contain at least one buffer.
397 *
398 * Return a pointer to the next buffer in the DMA queue, or NULL if the queue is
399 * empty.
400 */
401 struct iss_buffer *omap4iss_video_buffer_next(struct iss_video *video)
402 {
403 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity);
404 enum iss_pipeline_state state;
405 struct iss_buffer *buf;
406 unsigned long flags;
407 struct timespec ts;
408
409 spin_lock_irqsave(&video->qlock, flags);
410 if (WARN_ON(list_empty(&video->dmaqueue))) {
411 spin_unlock_irqrestore(&video->qlock, flags);
412 return NULL;
413 }
414
415 buf = list_first_entry(&video->dmaqueue, struct iss_buffer,
416 list);
417 list_del(&buf->list);
418 spin_unlock_irqrestore(&video->qlock, flags);
419
420 ktime_get_ts(&ts);
421 buf->vb.v4l2_buf.timestamp.tv_sec = ts.tv_sec;
422 buf->vb.v4l2_buf.timestamp.tv_usec = ts.tv_nsec / NSEC_PER_USEC;
423
424 /* Do frame number propagation only if this is the output video node.
425 * Frame number either comes from the CSI receivers or it gets
426 * incremented here if H3A is not active.
427 * Note: There is no guarantee that the output buffer will finish
428 * first, so the input number might lag behind by 1 in some cases.
429 */
430 if (video == pipe->output && !pipe->do_propagation)
431 buf->vb.v4l2_buf.sequence = atomic_inc_return(&pipe->frame_number);
432 else
433 buf->vb.v4l2_buf.sequence = atomic_read(&pipe->frame_number);
434
435 vb2_buffer_done(&buf->vb, pipe->error ? VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
436 pipe->error = false;
437
438 spin_lock_irqsave(&video->qlock, flags);
439 if (list_empty(&video->dmaqueue)) {
440 spin_unlock_irqrestore(&video->qlock, flags);
441 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
442 state = ISS_PIPELINE_QUEUE_OUTPUT
443 | ISS_PIPELINE_STREAM;
444 else
445 state = ISS_PIPELINE_QUEUE_INPUT
446 | ISS_PIPELINE_STREAM;
447
448 spin_lock_irqsave(&pipe->lock, flags);
449 pipe->state &= ~state;
450 if (video->pipe.stream_state == ISS_PIPELINE_STREAM_CONTINUOUS)
451 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN;
452 spin_unlock_irqrestore(&pipe->lock, flags);
453 return NULL;
454 }
455
456 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && pipe->input != NULL) {
457 spin_lock_irqsave(&pipe->lock, flags);
458 pipe->state &= ~ISS_PIPELINE_STREAM;
459 spin_unlock_irqrestore(&pipe->lock, flags);
460 }
461
462 buf = list_first_entry(&video->dmaqueue, struct iss_buffer,
463 list);
464 spin_unlock_irqrestore(&video->qlock, flags);
465 buf->vb.state = VB2_BUF_STATE_ACTIVE;
466 return buf;
467 }
468
469 /* -----------------------------------------------------------------------------
470 * V4L2 ioctls
471 */
472
473 static int
474 iss_video_querycap(struct file *file, void *fh, struct v4l2_capability *cap)
475 {
476 struct iss_video *video = video_drvdata(file);
477
478 strlcpy(cap->driver, ISS_VIDEO_DRIVER_NAME, sizeof(cap->driver));
479 strlcpy(cap->card, video->video.name, sizeof(cap->card));
480 strlcpy(cap->bus_info, "media", sizeof(cap->bus_info));
481
482 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
483 cap->device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING;
484 else
485 cap->device_caps = V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_STREAMING;
486
487 cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING
488 | V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT;
489
490 return 0;
491 }
492
493 static int
494 iss_video_get_format(struct file *file, void *fh, struct v4l2_format *format)
495 {
496 struct iss_video_fh *vfh = to_iss_video_fh(fh);
497 struct iss_video *video = video_drvdata(file);
498
499 if (format->type != video->type)
500 return -EINVAL;
501
502 mutex_lock(&video->mutex);
503 *format = vfh->format;
504 mutex_unlock(&video->mutex);
505
506 return 0;
507 }
508
509 static int
510 iss_video_set_format(struct file *file, void *fh, struct v4l2_format *format)
511 {
512 struct iss_video_fh *vfh = to_iss_video_fh(fh);
513 struct iss_video *video = video_drvdata(file);
514 struct v4l2_mbus_framefmt fmt;
515
516 if (format->type != video->type)
517 return -EINVAL;
518
519 mutex_lock(&video->mutex);
520
521 /* Fill the bytesperline and sizeimage fields by converting to media bus
522 * format and back to pixel format.
523 */
524 iss_video_pix_to_mbus(&format->fmt.pix, &fmt);
525 iss_video_mbus_to_pix(video, &fmt, &format->fmt.pix);
526
527 vfh->format = *format;
528
529 mutex_unlock(&video->mutex);
530 return 0;
531 }
532
533 static int
534 iss_video_try_format(struct file *file, void *fh, struct v4l2_format *format)
535 {
536 struct iss_video *video = video_drvdata(file);
537 struct v4l2_subdev_format fmt;
538 struct v4l2_subdev *subdev;
539 u32 pad;
540 int ret;
541
542 if (format->type != video->type)
543 return -EINVAL;
544
545 subdev = iss_video_remote_subdev(video, &pad);
546 if (subdev == NULL)
547 return -EINVAL;
548
549 iss_video_pix_to_mbus(&format->fmt.pix, &fmt.format);
550
551 fmt.pad = pad;
552 fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
553 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
554 if (ret)
555 return ret == -ENOIOCTLCMD ? -EINVAL : ret;
556
557 iss_video_mbus_to_pix(video, &fmt.format, &format->fmt.pix);
558 return 0;
559 }
560
561 static int
562 iss_video_cropcap(struct file *file, void *fh, struct v4l2_cropcap *cropcap)
563 {
564 struct iss_video *video = video_drvdata(file);
565 struct v4l2_subdev *subdev;
566 int ret;
567
568 subdev = iss_video_remote_subdev(video, NULL);
569 if (subdev == NULL)
570 return -EINVAL;
571
572 mutex_lock(&video->mutex);
573 ret = v4l2_subdev_call(subdev, video, cropcap, cropcap);
574 mutex_unlock(&video->mutex);
575
576 return ret == -ENOIOCTLCMD ? -EINVAL : ret;
577 }
578
579 static int
580 iss_video_get_crop(struct file *file, void *fh, struct v4l2_crop *crop)
581 {
582 struct iss_video *video = video_drvdata(file);
583 struct v4l2_subdev_format format;
584 struct v4l2_subdev *subdev;
585 u32 pad;
586 int ret;
587
588 subdev = iss_video_remote_subdev(video, &pad);
589 if (subdev == NULL)
590 return -EINVAL;
591
592 /* Try the get crop operation first and fallback to get format if not
593 * implemented.
594 */
595 ret = v4l2_subdev_call(subdev, video, g_crop, crop);
596 if (ret != -ENOIOCTLCMD)
597 return ret;
598
599 format.pad = pad;
600 format.which = V4L2_SUBDEV_FORMAT_ACTIVE;
601 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &format);
602 if (ret < 0)
603 return ret == -ENOIOCTLCMD ? -EINVAL : ret;
604
605 crop->c.left = 0;
606 crop->c.top = 0;
607 crop->c.width = format.format.width;
608 crop->c.height = format.format.height;
609
610 return 0;
611 }
612
613 static int
614 iss_video_set_crop(struct file *file, void *fh, const struct v4l2_crop *crop)
615 {
616 struct iss_video *video = video_drvdata(file);
617 struct v4l2_subdev *subdev;
618 int ret;
619
620 subdev = iss_video_remote_subdev(video, NULL);
621 if (subdev == NULL)
622 return -EINVAL;
623
624 mutex_lock(&video->mutex);
625 ret = v4l2_subdev_call(subdev, video, s_crop, crop);
626 mutex_unlock(&video->mutex);
627
628 return ret == -ENOIOCTLCMD ? -EINVAL : ret;
629 }
630
631 static int
632 iss_video_get_param(struct file *file, void *fh, struct v4l2_streamparm *a)
633 {
634 struct iss_video_fh *vfh = to_iss_video_fh(fh);
635 struct iss_video *video = video_drvdata(file);
636
637 if (video->type != V4L2_BUF_TYPE_VIDEO_OUTPUT ||
638 video->type != a->type)
639 return -EINVAL;
640
641 memset(a, 0, sizeof(*a));
642 a->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
643 a->parm.output.capability = V4L2_CAP_TIMEPERFRAME;
644 a->parm.output.timeperframe = vfh->timeperframe;
645
646 return 0;
647 }
648
649 static int
650 iss_video_set_param(struct file *file, void *fh, struct v4l2_streamparm *a)
651 {
652 struct iss_video_fh *vfh = to_iss_video_fh(fh);
653 struct iss_video *video = video_drvdata(file);
654
655 if (video->type != V4L2_BUF_TYPE_VIDEO_OUTPUT ||
656 video->type != a->type)
657 return -EINVAL;
658
659 if (a->parm.output.timeperframe.denominator == 0)
660 a->parm.output.timeperframe.denominator = 1;
661
662 vfh->timeperframe = a->parm.output.timeperframe;
663
664 return 0;
665 }
666
667 static int
668 iss_video_reqbufs(struct file *file, void *fh, struct v4l2_requestbuffers *rb)
669 {
670 struct iss_video_fh *vfh = to_iss_video_fh(fh);
671
672 return vb2_reqbufs(&vfh->queue, rb);
673 }
674
675 static int
676 iss_video_querybuf(struct file *file, void *fh, struct v4l2_buffer *b)
677 {
678 struct iss_video_fh *vfh = to_iss_video_fh(fh);
679
680 return vb2_querybuf(&vfh->queue, b);
681 }
682
683 static int
684 iss_video_qbuf(struct file *file, void *fh, struct v4l2_buffer *b)
685 {
686 struct iss_video_fh *vfh = to_iss_video_fh(fh);
687
688 return vb2_qbuf(&vfh->queue, b);
689 }
690
691 static int
692 iss_video_dqbuf(struct file *file, void *fh, struct v4l2_buffer *b)
693 {
694 struct iss_video_fh *vfh = to_iss_video_fh(fh);
695
696 return vb2_dqbuf(&vfh->queue, b, file->f_flags & O_NONBLOCK);
697 }
698
699 /*
700 * Stream management
701 *
702 * Every ISS pipeline has a single input and a single output. The input can be
703 * either a sensor or a video node. The output is always a video node.
704 *
705 * As every pipeline has an output video node, the ISS video objects at the
706 * pipeline output stores the pipeline state. It tracks the streaming state of
707 * both the input and output, as well as the availability of buffers.
708 *
709 * In sensor-to-memory mode, frames are always available at the pipeline input.
710 * Starting the sensor usually requires I2C transfers and must be done in
711 * interruptible context. The pipeline is started and stopped synchronously
712 * to the stream on/off commands. All modules in the pipeline will get their
713 * subdev set stream handler called. The module at the end of the pipeline must
714 * delay starting the hardware until buffers are available at its output.
715 *
716 * In memory-to-memory mode, starting/stopping the stream requires
717 * synchronization between the input and output. ISS modules can't be stopped
718 * in the middle of a frame, and at least some of the modules seem to become
719 * busy as soon as they're started, even if they don't receive a frame start
720 * event. For that reason frames need to be processed in single-shot mode. The
721 * driver needs to wait until a frame is completely processed and written to
722 * memory before restarting the pipeline for the next frame. Pipelined
723 * processing might be possible but requires more testing.
724 *
725 * Stream start must be delayed until buffers are available at both the input
726 * and output. The pipeline must be started in the videobuf queue callback with
727 * the buffers queue spinlock held. The modules subdev set stream operation must
728 * not sleep.
729 */
730 static int
731 iss_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type)
732 {
733 struct iss_video_fh *vfh = to_iss_video_fh(fh);
734 struct iss_video *video = video_drvdata(file);
735 enum iss_pipeline_state state;
736 struct iss_pipeline *pipe;
737 struct iss_video *far_end;
738 unsigned long flags;
739 int ret;
740
741 if (type != video->type)
742 return -EINVAL;
743
744 mutex_lock(&video->stream_lock);
745
746 /* Start streaming on the pipeline. No link touching an entity in the
747 * pipeline can be activated or deactivated once streaming is started.
748 */
749 pipe = video->video.entity.pipe
750 ? to_iss_pipeline(&video->video.entity) : &video->pipe;
751 pipe->external = NULL;
752 pipe->external_rate = 0;
753 pipe->external_bpp = 0;
754
755 if (video->iss->pdata->set_constraints)
756 video->iss->pdata->set_constraints(video->iss, true);
757
758 ret = media_entity_pipeline_start(&video->video.entity, &pipe->pipe);
759 if (ret < 0)
760 goto err_media_entity_pipeline_start;
761
762 /* Verify that the currently configured format matches the output of
763 * the connected subdev.
764 */
765 ret = iss_video_check_format(video, vfh);
766 if (ret < 0)
767 goto err_iss_video_check_format;
768
769 video->bpl_padding = ret;
770 video->bpl_value = vfh->format.fmt.pix.bytesperline;
771
772 /* Find the ISS video node connected at the far end of the pipeline and
773 * update the pipeline.
774 */
775 far_end = iss_video_far_end(video);
776
777 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
778 state = ISS_PIPELINE_STREAM_OUTPUT | ISS_PIPELINE_IDLE_OUTPUT;
779 pipe->input = far_end;
780 pipe->output = video;
781 } else {
782 if (far_end == NULL) {
783 ret = -EPIPE;
784 goto err_iss_video_check_format;
785 }
786
787 state = ISS_PIPELINE_STREAM_INPUT | ISS_PIPELINE_IDLE_INPUT;
788 pipe->input = video;
789 pipe->output = far_end;
790 }
791
792 spin_lock_irqsave(&pipe->lock, flags);
793 pipe->state &= ~ISS_PIPELINE_STREAM;
794 pipe->state |= state;
795 spin_unlock_irqrestore(&pipe->lock, flags);
796
797 /* Set the maximum time per frame as the value requested by userspace.
798 * This is a soft limit that can be overridden if the hardware doesn't
799 * support the request limit.
800 */
801 if (video->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
802 pipe->max_timeperframe = vfh->timeperframe;
803
804 video->queue = &vfh->queue;
805 INIT_LIST_HEAD(&video->dmaqueue);
806 spin_lock_init(&video->qlock);
807 atomic_set(&pipe->frame_number, -1);
808
809 ret = vb2_streamon(&vfh->queue, type);
810 if (ret < 0)
811 goto err_iss_video_check_format;
812
813 /* In sensor-to-memory mode, the stream can be started synchronously
814 * to the stream on command. In memory-to-memory mode, it will be
815 * started when buffers are queued on both the input and output.
816 */
817 if (pipe->input == NULL) {
818 unsigned long flags;
819 ret = omap4iss_pipeline_set_stream(pipe,
820 ISS_PIPELINE_STREAM_CONTINUOUS);
821 if (ret < 0)
822 goto err_omap4iss_set_stream;
823 spin_lock_irqsave(&video->qlock, flags);
824 if (list_empty(&video->dmaqueue))
825 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN;
826 spin_unlock_irqrestore(&video->qlock, flags);
827 }
828
829 if (ret < 0) {
830 err_omap4iss_set_stream:
831 vb2_streamoff(&vfh->queue, type);
832 err_iss_video_check_format:
833 media_entity_pipeline_stop(&video->video.entity);
834 err_media_entity_pipeline_start:
835 if (video->iss->pdata->set_constraints)
836 video->iss->pdata->set_constraints(video->iss, false);
837 video->queue = NULL;
838 }
839
840 mutex_unlock(&video->stream_lock);
841 return ret;
842 }
843
844 static int
845 iss_video_streamoff(struct file *file, void *fh, enum v4l2_buf_type type)
846 {
847 struct iss_video_fh *vfh = to_iss_video_fh(fh);
848 struct iss_video *video = video_drvdata(file);
849 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity);
850 enum iss_pipeline_state state;
851 unsigned long flags;
852
853 if (type != video->type)
854 return -EINVAL;
855
856 mutex_lock(&video->stream_lock);
857
858 if (!vb2_is_streaming(&vfh->queue))
859 goto done;
860
861 /* Update the pipeline state. */
862 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
863 state = ISS_PIPELINE_STREAM_OUTPUT
864 | ISS_PIPELINE_QUEUE_OUTPUT;
865 else
866 state = ISS_PIPELINE_STREAM_INPUT
867 | ISS_PIPELINE_QUEUE_INPUT;
868
869 spin_lock_irqsave(&pipe->lock, flags);
870 pipe->state &= ~state;
871 spin_unlock_irqrestore(&pipe->lock, flags);
872
873 /* Stop the stream. */
874 omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_STOPPED);
875 vb2_streamoff(&vfh->queue, type);
876 video->queue = NULL;
877
878 if (video->iss->pdata->set_constraints)
879 video->iss->pdata->set_constraints(video->iss, false);
880 media_entity_pipeline_stop(&video->video.entity);
881
882 done:
883 mutex_unlock(&video->stream_lock);
884 return 0;
885 }
886
887 static int
888 iss_video_enum_input(struct file *file, void *fh, struct v4l2_input *input)
889 {
890 if (input->index > 0)
891 return -EINVAL;
892
893 strlcpy(input->name, "camera", sizeof(input->name));
894 input->type = V4L2_INPUT_TYPE_CAMERA;
895
896 return 0;
897 }
898
899 static int
900 iss_video_g_input(struct file *file, void *fh, unsigned int *input)
901 {
902 *input = 0;
903
904 return 0;
905 }
906
907 static const struct v4l2_ioctl_ops iss_video_ioctl_ops = {
908 .vidioc_querycap = iss_video_querycap,
909 .vidioc_g_fmt_vid_cap = iss_video_get_format,
910 .vidioc_s_fmt_vid_cap = iss_video_set_format,
911 .vidioc_try_fmt_vid_cap = iss_video_try_format,
912 .vidioc_g_fmt_vid_out = iss_video_get_format,
913 .vidioc_s_fmt_vid_out = iss_video_set_format,
914 .vidioc_try_fmt_vid_out = iss_video_try_format,
915 .vidioc_cropcap = iss_video_cropcap,
916 .vidioc_g_crop = iss_video_get_crop,
917 .vidioc_s_crop = iss_video_set_crop,
918 .vidioc_g_parm = iss_video_get_param,
919 .vidioc_s_parm = iss_video_set_param,
920 .vidioc_reqbufs = iss_video_reqbufs,
921 .vidioc_querybuf = iss_video_querybuf,
922 .vidioc_qbuf = iss_video_qbuf,
923 .vidioc_dqbuf = iss_video_dqbuf,
924 .vidioc_streamon = iss_video_streamon,
925 .vidioc_streamoff = iss_video_streamoff,
926 .vidioc_enum_input = iss_video_enum_input,
927 .vidioc_g_input = iss_video_g_input,
928 };
929
930 /* -----------------------------------------------------------------------------
931 * V4L2 file operations
932 */
933
934 static int iss_video_open(struct file *file)
935 {
936 struct iss_video *video = video_drvdata(file);
937 struct iss_video_fh *handle;
938 struct vb2_queue *q;
939 int ret = 0;
940
941 handle = kzalloc(sizeof(*handle), GFP_KERNEL);
942 if (handle == NULL)
943 return -ENOMEM;
944
945 v4l2_fh_init(&handle->vfh, &video->video);
946 v4l2_fh_add(&handle->vfh);
947
948 /* If this is the first user, initialise the pipeline. */
949 if (omap4iss_get(video->iss) == NULL) {
950 ret = -EBUSY;
951 goto done;
952 }
953
954 ret = omap4iss_pipeline_pm_use(&video->video.entity, 1);
955 if (ret < 0) {
956 omap4iss_put(video->iss);
957 goto done;
958 }
959
960 video->alloc_ctx = vb2_dma_contig_init_ctx(video->iss->dev);
961 if (IS_ERR(video->alloc_ctx)) {
962 ret = PTR_ERR(video->alloc_ctx);
963 omap4iss_put(video->iss);
964 goto done;
965 }
966
967 q = &handle->queue;
968
969 q->type = video->type;
970 q->io_modes = VB2_MMAP;
971 q->drv_priv = handle;
972 q->ops = &iss_video_vb2ops;
973 q->mem_ops = &vb2_dma_contig_memops;
974 q->buf_struct_size = sizeof(struct iss_buffer);
975
976 ret = vb2_queue_init(q);
977 if (ret) {
978 omap4iss_put(video->iss);
979 goto done;
980 }
981
982 memset(&handle->format, 0, sizeof(handle->format));
983 handle->format.type = video->type;
984 handle->timeperframe.denominator = 1;
985
986 handle->video = video;
987 file->private_data = &handle->vfh;
988
989 done:
990 if (ret < 0) {
991 v4l2_fh_del(&handle->vfh);
992 kfree(handle);
993 }
994
995 return ret;
996 }
997
998 static int iss_video_release(struct file *file)
999 {
1000 struct iss_video *video = video_drvdata(file);
1001 struct v4l2_fh *vfh = file->private_data;
1002 struct iss_video_fh *handle = to_iss_video_fh(vfh);
1003
1004 /* Disable streaming and free the buffers queue resources. */
1005 iss_video_streamoff(file, vfh, video->type);
1006
1007 omap4iss_pipeline_pm_use(&video->video.entity, 0);
1008
1009 /* Release the videobuf2 queue */
1010 vb2_queue_release(&handle->queue);
1011
1012 /* Release the file handle. */
1013 v4l2_fh_del(vfh);
1014 kfree(handle);
1015 file->private_data = NULL;
1016
1017 omap4iss_put(video->iss);
1018
1019 return 0;
1020 }
1021
1022 static unsigned int iss_video_poll(struct file *file, poll_table *wait)
1023 {
1024 struct iss_video_fh *vfh = to_iss_video_fh(file->private_data);
1025
1026 return vb2_poll(&vfh->queue, file, wait);
1027 }
1028
1029 static int iss_video_mmap(struct file *file, struct vm_area_struct *vma)
1030 {
1031 struct iss_video_fh *vfh = to_iss_video_fh(file->private_data);
1032
1033 return vb2_mmap(&vfh->queue, vma);;
1034 }
1035
1036 static struct v4l2_file_operations iss_video_fops = {
1037 .owner = THIS_MODULE,
1038 .unlocked_ioctl = video_ioctl2,
1039 .open = iss_video_open,
1040 .release = iss_video_release,
1041 .poll = iss_video_poll,
1042 .mmap = iss_video_mmap,
1043 };
1044
1045 /* -----------------------------------------------------------------------------
1046 * ISS video core
1047 */
1048
1049 static const struct iss_video_operations iss_video_dummy_ops = {
1050 };
1051
1052 int omap4iss_video_init(struct iss_video *video, const char *name)
1053 {
1054 const char *direction;
1055 int ret;
1056
1057 switch (video->type) {
1058 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1059 direction = "output";
1060 video->pad.flags = MEDIA_PAD_FL_SINK;
1061 break;
1062 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1063 direction = "input";
1064 video->pad.flags = MEDIA_PAD_FL_SOURCE;
1065 break;
1066
1067 default:
1068 return -EINVAL;
1069 }
1070
1071 ret = media_entity_init(&video->video.entity, 1, &video->pad, 0);
1072 if (ret < 0)
1073 return ret;
1074
1075 mutex_init(&video->mutex);
1076 atomic_set(&video->active, 0);
1077
1078 spin_lock_init(&video->pipe.lock);
1079 mutex_init(&video->stream_lock);
1080
1081 /* Initialize the video device. */
1082 if (video->ops == NULL)
1083 video->ops = &iss_video_dummy_ops;
1084
1085 video->video.fops = &iss_video_fops;
1086 snprintf(video->video.name, sizeof(video->video.name),
1087 "OMAP4 ISS %s %s", name, direction);
1088 video->video.vfl_type = VFL_TYPE_GRABBER;
1089 video->video.release = video_device_release_empty;
1090 video->video.ioctl_ops = &iss_video_ioctl_ops;
1091 video->pipe.stream_state = ISS_PIPELINE_STREAM_STOPPED;
1092
1093 video_set_drvdata(&video->video, video);
1094
1095 return 0;
1096 }
1097
1098 void omap4iss_video_cleanup(struct iss_video *video)
1099 {
1100 media_entity_cleanup(&video->video.entity);
1101 mutex_destroy(&video->stream_lock);
1102 mutex_destroy(&video->mutex);
1103 }
1104
1105 int omap4iss_video_register(struct iss_video *video, struct v4l2_device *vdev)
1106 {
1107 int ret;
1108
1109 video->video.v4l2_dev = vdev;
1110
1111 ret = video_register_device(&video->video, VFL_TYPE_GRABBER, -1);
1112 if (ret < 0)
1113 printk(KERN_ERR "%s: could not register video device (%d)\n",
1114 __func__, ret);
1115
1116 return ret;
1117 }
1118
1119 void omap4iss_video_unregister(struct iss_video *video)
1120 {
1121 if (video_is_registered(&video->video))
1122 video_unregister_device(&video->video);
1123 }
This page took 0.168101 seconds and 5 git commands to generate.