2 * Copyright (c) 2011 - 2012 Samsung Electronics Co., Ltd.
3 * http://www.samsung.com
5 * Samsung EXYNOS5 SoC series G-Scaler driver
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published
9 * by the Free Software Foundation, either version 2 of the License,
10 * or (at your option) any later version.
13 #include <linux/module.h>
14 #include <linux/kernel.h>
15 #include <linux/types.h>
16 #include <linux/errno.h>
17 #include <linux/bug.h>
18 #include <linux/interrupt.h>
19 #include <linux/workqueue.h>
20 #include <linux/device.h>
21 #include <linux/platform_device.h>
22 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/clk.h>
27 #include <media/v4l2-ioctl.h>
31 static int gsc_m2m_ctx_stop_req(struct gsc_ctx
*ctx
)
33 struct gsc_ctx
*curr_ctx
;
34 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
37 curr_ctx
= v4l2_m2m_get_curr_priv(gsc
->m2m
.m2m_dev
);
38 if (!gsc_m2m_pending(gsc
) || (curr_ctx
!= ctx
))
41 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ
, ctx
);
42 ret
= wait_event_timeout(gsc
->irq_queue
,
43 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ
, ctx
),
44 GSC_SHUTDOWN_TIMEOUT
);
46 return ret
== 0 ? -ETIMEDOUT
: ret
;
49 static int gsc_m2m_start_streaming(struct vb2_queue
*q
, unsigned int count
)
51 struct gsc_ctx
*ctx
= q
->drv_priv
;
54 ret
= pm_runtime_get_sync(&ctx
->gsc_dev
->pdev
->dev
);
55 return ret
> 0 ? 0 : ret
;
58 static int gsc_m2m_stop_streaming(struct vb2_queue
*q
)
60 struct gsc_ctx
*ctx
= q
->drv_priv
;
63 ret
= gsc_m2m_ctx_stop_req(ctx
);
64 if (ret
== -ETIMEDOUT
)
65 gsc_m2m_job_finish(ctx
, VB2_BUF_STATE_ERROR
);
67 pm_runtime_put(&ctx
->gsc_dev
->pdev
->dev
);
72 void gsc_m2m_job_finish(struct gsc_ctx
*ctx
, int vb_state
)
74 struct vb2_buffer
*src_vb
, *dst_vb
;
76 if (!ctx
|| !ctx
->m2m_ctx
)
79 src_vb
= v4l2_m2m_src_buf_remove(ctx
->m2m_ctx
);
80 dst_vb
= v4l2_m2m_dst_buf_remove(ctx
->m2m_ctx
);
82 if (src_vb
&& dst_vb
) {
83 v4l2_m2m_buf_done(src_vb
, vb_state
);
84 v4l2_m2m_buf_done(dst_vb
, vb_state
);
86 v4l2_m2m_job_finish(ctx
->gsc_dev
->m2m
.m2m_dev
,
92 static void gsc_m2m_job_abort(void *priv
)
94 struct gsc_ctx
*ctx
= priv
;
97 ret
= gsc_m2m_ctx_stop_req(ctx
);
98 if (ret
== -ETIMEDOUT
)
99 gsc_m2m_job_finish(ctx
, VB2_BUF_STATE_ERROR
);
102 static int gsc_fill_addr(struct gsc_ctx
*ctx
)
104 struct gsc_frame
*s_frame
, *d_frame
;
105 struct vb2_buffer
*vb
= NULL
;
108 s_frame
= &ctx
->s_frame
;
109 d_frame
= &ctx
->d_frame
;
111 vb
= v4l2_m2m_next_src_buf(ctx
->m2m_ctx
);
112 ret
= gsc_prepare_addr(ctx
, vb
, s_frame
, &s_frame
->addr
);
116 vb
= v4l2_m2m_next_dst_buf(ctx
->m2m_ctx
);
117 return gsc_prepare_addr(ctx
, vb
, d_frame
, &d_frame
->addr
);
120 static void gsc_m2m_device_run(void *priv
)
122 struct gsc_ctx
*ctx
= priv
;
128 if (WARN(!ctx
, "null hardware context\n"))
132 spin_lock_irqsave(&gsc
->slock
, flags
);
134 set_bit(ST_M2M_PEND
, &gsc
->state
);
136 /* Reconfigure hardware if the context has changed. */
137 if (gsc
->m2m
.ctx
!= ctx
) {
138 pr_debug("gsc->m2m.ctx = 0x%p, current_ctx = 0x%p",
140 ctx
->state
|= GSC_PARAMS
;
144 is_set
= (ctx
->state
& GSC_CTX_STOP_REQ
) ? 1 : 0;
145 ctx
->state
&= ~GSC_CTX_STOP_REQ
;
147 wake_up(&gsc
->irq_queue
);
151 ret
= gsc_fill_addr(ctx
);
153 pr_err("Wrong address");
157 gsc_set_prefbuf(gsc
, &ctx
->s_frame
);
158 gsc_hw_set_input_addr(gsc
, &ctx
->s_frame
.addr
, GSC_M2M_BUF_NUM
);
159 gsc_hw_set_output_addr(gsc
, &ctx
->d_frame
.addr
, GSC_M2M_BUF_NUM
);
161 if (ctx
->state
& GSC_PARAMS
) {
162 gsc_hw_set_input_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
163 gsc_hw_set_output_buf_masking(gsc
, GSC_M2M_BUF_NUM
, false);
164 gsc_hw_set_frm_done_irq_mask(gsc
, false);
165 gsc_hw_set_gsc_irq_enable(gsc
, true);
167 if (gsc_set_scaler_info(ctx
)) {
168 pr_err("Scaler setup error");
172 gsc_hw_set_input_path(ctx
);
173 gsc_hw_set_in_size(ctx
);
174 gsc_hw_set_in_image_format(ctx
);
176 gsc_hw_set_output_path(ctx
);
177 gsc_hw_set_out_size(ctx
);
178 gsc_hw_set_out_image_format(ctx
);
180 gsc_hw_set_prescaler(ctx
);
181 gsc_hw_set_mainscaler(ctx
);
182 gsc_hw_set_rotation(ctx
);
183 gsc_hw_set_global_alpha(ctx
);
186 /* update shadow registers */
187 gsc_hw_set_sfr_update(ctx
);
189 ctx
->state
&= ~GSC_PARAMS
;
190 gsc_hw_enable_control(gsc
, true);
192 spin_unlock_irqrestore(&gsc
->slock
, flags
);
196 ctx
->state
&= ~GSC_PARAMS
;
197 spin_unlock_irqrestore(&gsc
->slock
, flags
);
200 static int gsc_m2m_queue_setup(struct vb2_queue
*vq
,
201 const struct v4l2_format
*fmt
,
202 unsigned int *num_buffers
, unsigned int *num_planes
,
203 unsigned int sizes
[], void *allocators
[])
205 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vq
);
206 struct gsc_frame
*frame
;
209 frame
= ctx_get_frame(ctx
, vq
->type
);
211 return PTR_ERR(frame
);
216 *num_planes
= frame
->fmt
->num_planes
;
217 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++) {
218 sizes
[i
] = frame
->payload
[i
];
219 allocators
[i
] = ctx
->gsc_dev
->alloc_ctx
;
224 static int gsc_m2m_buf_prepare(struct vb2_buffer
*vb
)
226 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
227 struct gsc_frame
*frame
;
230 frame
= ctx_get_frame(ctx
, vb
->vb2_queue
->type
);
232 return PTR_ERR(frame
);
234 if (!V4L2_TYPE_IS_OUTPUT(vb
->vb2_queue
->type
)) {
235 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
236 vb2_set_plane_payload(vb
, i
, frame
->payload
[i
]);
242 static void gsc_m2m_buf_queue(struct vb2_buffer
*vb
)
244 struct gsc_ctx
*ctx
= vb2_get_drv_priv(vb
->vb2_queue
);
246 pr_debug("ctx: %p, ctx->state: 0x%x", ctx
, ctx
->state
);
249 v4l2_m2m_buf_queue(ctx
->m2m_ctx
, vb
);
252 static struct vb2_ops gsc_m2m_qops
= {
253 .queue_setup
= gsc_m2m_queue_setup
,
254 .buf_prepare
= gsc_m2m_buf_prepare
,
255 .buf_queue
= gsc_m2m_buf_queue
,
256 .wait_prepare
= gsc_unlock
,
257 .wait_finish
= gsc_lock
,
258 .stop_streaming
= gsc_m2m_stop_streaming
,
259 .start_streaming
= gsc_m2m_start_streaming
,
262 static int gsc_m2m_querycap(struct file
*file
, void *fh
,
263 struct v4l2_capability
*cap
)
265 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
266 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
268 strlcpy(cap
->driver
, gsc
->pdev
->name
, sizeof(cap
->driver
));
269 strlcpy(cap
->card
, gsc
->pdev
->name
, sizeof(cap
->card
));
270 strlcpy(cap
->bus_info
, "platform", sizeof(cap
->bus_info
));
271 cap
->device_caps
= V4L2_CAP_STREAMING
| V4L2_CAP_VIDEO_M2M_MPLANE
|
272 V4L2_CAP_VIDEO_CAPTURE_MPLANE
| V4L2_CAP_VIDEO_OUTPUT_MPLANE
;
274 cap
->capabilities
= cap
->device_caps
| V4L2_CAP_DEVICE_CAPS
;
278 static int gsc_m2m_enum_fmt_mplane(struct file
*file
, void *priv
,
279 struct v4l2_fmtdesc
*f
)
281 return gsc_enum_fmt_mplane(f
);
284 static int gsc_m2m_g_fmt_mplane(struct file
*file
, void *fh
,
285 struct v4l2_format
*f
)
287 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
289 return gsc_g_fmt_mplane(ctx
, f
);
292 static int gsc_m2m_try_fmt_mplane(struct file
*file
, void *fh
,
293 struct v4l2_format
*f
)
295 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
297 return gsc_try_fmt_mplane(ctx
, f
);
300 static int gsc_m2m_s_fmt_mplane(struct file
*file
, void *fh
,
301 struct v4l2_format
*f
)
303 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
304 struct vb2_queue
*vq
;
305 struct gsc_frame
*frame
;
306 struct v4l2_pix_format_mplane
*pix
;
309 ret
= gsc_m2m_try_fmt_mplane(file
, fh
, f
);
313 vq
= v4l2_m2m_get_vq(ctx
->m2m_ctx
, f
->type
);
315 if (vb2_is_streaming(vq
)) {
316 pr_err("queue (%d) busy", f
->type
);
320 if (V4L2_TYPE_IS_OUTPUT(f
->type
))
321 frame
= &ctx
->s_frame
;
323 frame
= &ctx
->d_frame
;
325 pix
= &f
->fmt
.pix_mp
;
326 frame
->fmt
= find_fmt(&pix
->pixelformat
, NULL
, 0);
327 frame
->colorspace
= pix
->colorspace
;
331 for (i
= 0; i
< frame
->fmt
->num_planes
; i
++)
332 frame
->payload
[i
] = pix
->plane_fmt
[i
].sizeimage
;
334 gsc_set_frame_size(frame
, pix
->width
, pix
->height
);
336 if (f
->type
== V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
)
337 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_DST_FMT
, ctx
);
339 gsc_ctx_state_lock_set(GSC_PARAMS
| GSC_SRC_FMT
, ctx
);
341 pr_debug("f_w: %d, f_h: %d", frame
->f_width
, frame
->f_height
);
346 static int gsc_m2m_reqbufs(struct file
*file
, void *fh
,
347 struct v4l2_requestbuffers
*reqbufs
)
349 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
350 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
351 struct gsc_frame
*frame
;
354 max_cnt
= (reqbufs
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) ?
355 gsc
->variant
->in_buf_cnt
: gsc
->variant
->out_buf_cnt
;
356 if (reqbufs
->count
> max_cnt
) {
358 } else if (reqbufs
->count
== 0) {
359 if (reqbufs
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
)
360 gsc_ctx_state_lock_clear(GSC_SRC_FMT
, ctx
);
362 gsc_ctx_state_lock_clear(GSC_DST_FMT
, ctx
);
365 frame
= ctx_get_frame(ctx
, reqbufs
->type
);
367 return v4l2_m2m_reqbufs(file
, ctx
->m2m_ctx
, reqbufs
);
370 static int gsc_m2m_querybuf(struct file
*file
, void *fh
,
371 struct v4l2_buffer
*buf
)
373 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
374 return v4l2_m2m_querybuf(file
, ctx
->m2m_ctx
, buf
);
377 static int gsc_m2m_qbuf(struct file
*file
, void *fh
,
378 struct v4l2_buffer
*buf
)
380 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
381 return v4l2_m2m_qbuf(file
, ctx
->m2m_ctx
, buf
);
384 static int gsc_m2m_dqbuf(struct file
*file
, void *fh
,
385 struct v4l2_buffer
*buf
)
387 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
388 return v4l2_m2m_dqbuf(file
, ctx
->m2m_ctx
, buf
);
391 static int gsc_m2m_streamon(struct file
*file
, void *fh
,
392 enum v4l2_buf_type type
)
394 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
396 /* The source and target color format need to be set */
397 if (V4L2_TYPE_IS_OUTPUT(type
)) {
398 if (!gsc_ctx_state_is_set(GSC_SRC_FMT
, ctx
))
400 } else if (!gsc_ctx_state_is_set(GSC_DST_FMT
, ctx
)) {
404 return v4l2_m2m_streamon(file
, ctx
->m2m_ctx
, type
);
407 static int gsc_m2m_streamoff(struct file
*file
, void *fh
,
408 enum v4l2_buf_type type
)
410 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
411 return v4l2_m2m_streamoff(file
, ctx
->m2m_ctx
, type
);
414 /* Return 1 if rectangle a is enclosed in rectangle b, or 0 otherwise. */
415 static int is_rectangle_enclosed(struct v4l2_rect
*a
, struct v4l2_rect
*b
)
417 if (a
->left
< b
->left
|| a
->top
< b
->top
)
420 if (a
->left
+ a
->width
> b
->left
+ b
->width
)
423 if (a
->top
+ a
->height
> b
->top
+ b
->height
)
429 static int gsc_m2m_g_selection(struct file
*file
, void *fh
,
430 struct v4l2_selection
*s
)
432 struct gsc_frame
*frame
;
433 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
435 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
) &&
436 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
))
439 frame
= ctx_get_frame(ctx
, s
->type
);
441 return PTR_ERR(frame
);
444 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
445 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
446 case V4L2_SEL_TGT_CROP_BOUNDS
:
447 case V4L2_SEL_TGT_CROP_DEFAULT
:
450 s
->r
.width
= frame
->f_width
;
451 s
->r
.height
= frame
->f_height
;
454 case V4L2_SEL_TGT_COMPOSE
:
455 case V4L2_SEL_TGT_CROP
:
456 s
->r
.left
= frame
->crop
.left
;
457 s
->r
.top
= frame
->crop
.top
;
458 s
->r
.width
= frame
->crop
.width
;
459 s
->r
.height
= frame
->crop
.height
;
466 static int gsc_m2m_s_selection(struct file
*file
, void *fh
,
467 struct v4l2_selection
*s
)
469 struct gsc_frame
*frame
;
470 struct gsc_ctx
*ctx
= fh_to_ctx(fh
);
472 struct gsc_variant
*variant
= ctx
->gsc_dev
->variant
;
478 if ((s
->type
!= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
) &&
479 (s
->type
!= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
))
482 ret
= gsc_try_crop(ctx
, &cr
);
486 if (s
->flags
& V4L2_SEL_FLAG_LE
&&
487 !is_rectangle_enclosed(&cr
.c
, &s
->r
))
490 if (s
->flags
& V4L2_SEL_FLAG_GE
&&
491 !is_rectangle_enclosed(&s
->r
, &cr
.c
))
497 case V4L2_SEL_TGT_COMPOSE_BOUNDS
:
498 case V4L2_SEL_TGT_COMPOSE_DEFAULT
:
499 case V4L2_SEL_TGT_COMPOSE
:
500 frame
= &ctx
->s_frame
;
503 case V4L2_SEL_TGT_CROP_BOUNDS
:
504 case V4L2_SEL_TGT_CROP
:
505 case V4L2_SEL_TGT_CROP_DEFAULT
:
506 frame
= &ctx
->d_frame
;
513 /* Check to see if scaling ratio is within supported range */
514 if (gsc_ctx_state_is_set(GSC_DST_FMT
| GSC_SRC_FMT
, ctx
)) {
515 if (s
->type
== V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
) {
516 ret
= gsc_check_scaler_ratio(variant
, cr
.c
.width
,
517 cr
.c
.height
, ctx
->d_frame
.crop
.width
,
518 ctx
->d_frame
.crop
.height
,
519 ctx
->gsc_ctrls
.rotate
->val
, ctx
->out_path
);
521 ret
= gsc_check_scaler_ratio(variant
,
522 ctx
->s_frame
.crop
.width
,
523 ctx
->s_frame
.crop
.height
, cr
.c
.width
,
524 cr
.c
.height
, ctx
->gsc_ctrls
.rotate
->val
,
529 pr_err("Out of scaler range");
536 gsc_ctx_state_lock_set(GSC_PARAMS
, ctx
);
540 static const struct v4l2_ioctl_ops gsc_m2m_ioctl_ops
= {
541 .vidioc_querycap
= gsc_m2m_querycap
,
542 .vidioc_enum_fmt_vid_cap_mplane
= gsc_m2m_enum_fmt_mplane
,
543 .vidioc_enum_fmt_vid_out_mplane
= gsc_m2m_enum_fmt_mplane
,
544 .vidioc_g_fmt_vid_cap_mplane
= gsc_m2m_g_fmt_mplane
,
545 .vidioc_g_fmt_vid_out_mplane
= gsc_m2m_g_fmt_mplane
,
546 .vidioc_try_fmt_vid_cap_mplane
= gsc_m2m_try_fmt_mplane
,
547 .vidioc_try_fmt_vid_out_mplane
= gsc_m2m_try_fmt_mplane
,
548 .vidioc_s_fmt_vid_cap_mplane
= gsc_m2m_s_fmt_mplane
,
549 .vidioc_s_fmt_vid_out_mplane
= gsc_m2m_s_fmt_mplane
,
550 .vidioc_reqbufs
= gsc_m2m_reqbufs
,
551 .vidioc_querybuf
= gsc_m2m_querybuf
,
552 .vidioc_qbuf
= gsc_m2m_qbuf
,
553 .vidioc_dqbuf
= gsc_m2m_dqbuf
,
554 .vidioc_streamon
= gsc_m2m_streamon
,
555 .vidioc_streamoff
= gsc_m2m_streamoff
,
556 .vidioc_g_selection
= gsc_m2m_g_selection
,
557 .vidioc_s_selection
= gsc_m2m_s_selection
560 static int queue_init(void *priv
, struct vb2_queue
*src_vq
,
561 struct vb2_queue
*dst_vq
)
563 struct gsc_ctx
*ctx
= priv
;
566 memset(src_vq
, 0, sizeof(*src_vq
));
567 src_vq
->type
= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
;
568 src_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
;
569 src_vq
->drv_priv
= ctx
;
570 src_vq
->ops
= &gsc_m2m_qops
;
571 src_vq
->mem_ops
= &vb2_dma_contig_memops
;
572 src_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
574 ret
= vb2_queue_init(src_vq
);
578 memset(dst_vq
, 0, sizeof(*dst_vq
));
579 dst_vq
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
580 dst_vq
->io_modes
= VB2_MMAP
| VB2_USERPTR
;
581 dst_vq
->drv_priv
= ctx
;
582 dst_vq
->ops
= &gsc_m2m_qops
;
583 dst_vq
->mem_ops
= &vb2_dma_contig_memops
;
584 dst_vq
->buf_struct_size
= sizeof(struct v4l2_m2m_buffer
);
586 return vb2_queue_init(dst_vq
);
589 static int gsc_m2m_open(struct file
*file
)
591 struct gsc_dev
*gsc
= video_drvdata(file
);
592 struct gsc_ctx
*ctx
= NULL
;
595 pr_debug("pid: %d, state: 0x%lx", task_pid_nr(current
), gsc
->state
);
597 if (mutex_lock_interruptible(&gsc
->lock
))
600 ctx
= kzalloc(sizeof (*ctx
), GFP_KERNEL
);
606 v4l2_fh_init(&ctx
->fh
, gsc
->m2m
.vfd
);
607 ret
= gsc_ctrls_create(ctx
);
611 /* Use separate control handler per file handle */
612 ctx
->fh
.ctrl_handler
= &ctx
->ctrl_handler
;
613 file
->private_data
= &ctx
->fh
;
614 v4l2_fh_add(&ctx
->fh
);
617 /* Default color format */
618 ctx
->s_frame
.fmt
= get_format(0);
619 ctx
->d_frame
.fmt
= get_format(0);
620 /* Setup the device context for mem2mem mode. */
621 ctx
->state
= GSC_CTX_M2M
;
623 ctx
->in_path
= GSC_DMA
;
624 ctx
->out_path
= GSC_DMA
;
626 ctx
->m2m_ctx
= v4l2_m2m_ctx_init(gsc
->m2m
.m2m_dev
, ctx
, queue_init
);
627 if (IS_ERR(ctx
->m2m_ctx
)) {
628 pr_err("Failed to initialize m2m context");
629 ret
= PTR_ERR(ctx
->m2m_ctx
);
633 if (gsc
->m2m
.refcnt
++ == 0)
634 set_bit(ST_M2M_OPEN
, &gsc
->state
);
636 pr_debug("gsc m2m driver is opened, ctx(0x%p)", ctx
);
638 mutex_unlock(&gsc
->lock
);
642 gsc_ctrls_delete(ctx
);
644 v4l2_fh_del(&ctx
->fh
);
645 v4l2_fh_exit(&ctx
->fh
);
648 mutex_unlock(&gsc
->lock
);
652 static int gsc_m2m_release(struct file
*file
)
654 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
655 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
657 pr_debug("pid: %d, state: 0x%lx, refcnt= %d",
658 task_pid_nr(current
), gsc
->state
, gsc
->m2m
.refcnt
);
660 if (mutex_lock_interruptible(&gsc
->lock
))
663 v4l2_m2m_ctx_release(ctx
->m2m_ctx
);
664 gsc_ctrls_delete(ctx
);
665 v4l2_fh_del(&ctx
->fh
);
666 v4l2_fh_exit(&ctx
->fh
);
668 if (--gsc
->m2m
.refcnt
<= 0)
669 clear_bit(ST_M2M_OPEN
, &gsc
->state
);
672 mutex_unlock(&gsc
->lock
);
676 static unsigned int gsc_m2m_poll(struct file
*file
,
677 struct poll_table_struct
*wait
)
679 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
680 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
683 if (mutex_lock_interruptible(&gsc
->lock
))
686 ret
= v4l2_m2m_poll(file
, ctx
->m2m_ctx
, wait
);
687 mutex_unlock(&gsc
->lock
);
692 static int gsc_m2m_mmap(struct file
*file
, struct vm_area_struct
*vma
)
694 struct gsc_ctx
*ctx
= fh_to_ctx(file
->private_data
);
695 struct gsc_dev
*gsc
= ctx
->gsc_dev
;
698 if (mutex_lock_interruptible(&gsc
->lock
))
701 ret
= v4l2_m2m_mmap(file
, ctx
->m2m_ctx
, vma
);
702 mutex_unlock(&gsc
->lock
);
707 static const struct v4l2_file_operations gsc_m2m_fops
= {
708 .owner
= THIS_MODULE
,
709 .open
= gsc_m2m_open
,
710 .release
= gsc_m2m_release
,
711 .poll
= gsc_m2m_poll
,
712 .unlocked_ioctl
= video_ioctl2
,
713 .mmap
= gsc_m2m_mmap
,
716 static struct v4l2_m2m_ops gsc_m2m_ops
= {
717 .device_run
= gsc_m2m_device_run
,
718 .job_abort
= gsc_m2m_job_abort
,
721 int gsc_register_m2m_device(struct gsc_dev
*gsc
)
723 struct platform_device
*pdev
;
731 gsc
->vdev
.fops
= &gsc_m2m_fops
;
732 gsc
->vdev
.ioctl_ops
= &gsc_m2m_ioctl_ops
;
733 gsc
->vdev
.release
= video_device_release_empty
;
734 gsc
->vdev
.lock
= &gsc
->lock
;
735 snprintf(gsc
->vdev
.name
, sizeof(gsc
->vdev
.name
), "%s.%d:m2m",
736 GSC_MODULE_NAME
, gsc
->id
);
738 video_set_drvdata(&gsc
->vdev
, gsc
);
740 gsc
->m2m
.vfd
= &gsc
->vdev
;
741 gsc
->m2m
.m2m_dev
= v4l2_m2m_init(&gsc_m2m_ops
);
742 if (IS_ERR(gsc
->m2m
.m2m_dev
)) {
743 dev_err(&pdev
->dev
, "failed to initialize v4l2-m2m device\n");
744 ret
= PTR_ERR(gsc
->m2m
.m2m_dev
);
748 ret
= video_register_device(&gsc
->vdev
, VFL_TYPE_GRABBER
, -1);
751 "%s(): failed to register video device\n", __func__
);
755 pr_debug("gsc m2m driver registered as /dev/video%d", gsc
->vdev
.num
);
759 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);
761 video_device_release(gsc
->m2m
.vfd
);
766 void gsc_unregister_m2m_device(struct gsc_dev
*gsc
)
769 v4l2_m2m_release(gsc
->m2m
.m2m_dev
);