Lines Matching +full:scaled +full:- +full:output +full:- +full:hz
1 // SPDX-License-Identifier: GPL-2.0-only
3 * vivid-kthread-cap.h - video/vbi capture thread support functions.
20 #include <linux/v4l2-dv-timings.h>
22 #include <media/videobuf2-vmalloc.h>
23 #include <media/v4l2-dv-timings.h>
24 #include <media/v4l2-ioctl.h>
25 #include <media/v4l2-fh.h>
26 #include <media/v4l2-event.h>
27 #include <media/v4l2-rect.h>
29 #include "vivid-core.h"
30 #include "vivid-vid-common.h"
31 #include "vivid-vid-cap.h"
32 #include "vivid-vid-out.h"
33 #include "vivid-radio-common.h"
34 #include "vivid-radio-rx.h"
35 #include "vivid-radio-tx.h"
36 #include "vivid-sdr-cap.h"
37 #include "vivid-vbi-cap.h"
38 #include "vivid-vbi-out.h"
39 #include "vivid-osd.h"
40 #include "vivid-ctrls.h"
41 #include "vivid-kthread-cap.h"
42 #include "vivid-meta-cap.h"
47 return dev->std_cap[dev->input]; in vivid_get_std_cap()
55 int left = dev->overlay_out_left; in copy_pix()
56 int top = dev->overlay_out_top; in copy_pix()
63 if (dev->bitmap_out) { in copy_pix()
64 const u8 *p = dev->bitmap_out; in copy_pix()
65 unsigned stride = (dev->compose_out.width + 7) / 8; in copy_pix()
67 win_x -= dev->compose_out.left; in copy_pix()
68 win_y -= dev->compose_out.top; in copy_pix()
73 for (i = 0; i < dev->clipcount_out; i++) { in copy_pix()
74 struct v4l2_rect *r = &dev->clips_out[i].c; in copy_pix()
76 if (fb_y >= r->top && fb_y < r->top + r->height && in copy_pix()
77 fb_x >= r->left && fb_x < r->left + r->width) in copy_pix()
80 if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_CHROMAKEY) && in copy_pix()
81 *osd != dev->chromakey_out) in copy_pix()
83 if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_SRC_CHROMAKEY) && in copy_pix()
84 out == dev->chromakey_out) in copy_pix()
86 if (dev->fmt_cap->alpha_mask) { in copy_pix()
87 if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_GLOBAL_ALPHA) && in copy_pix()
88 dev->global_alpha_out) in copy_pix()
90 if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_LOCAL_ALPHA) && in copy_pix()
91 *cap & dev->fmt_cap->alpha_mask) in copy_pix()
93 if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_LOCAL_INV_ALPHA) && in copy_pix()
94 !(*cap & dev->fmt_cap->alpha_mask)) in copy_pix()
134 error -= dstw; in scale_line()
143 * The nominal pipeline is that the video output buffer is cropped by
144 * crop_out, scaled to compose_out, overlaid with the output overlay,
145 * cropped on the capture side by crop_cap and scaled again to the video
151 * video output buffer that is and which part of the video capture buffer
152 * so we can scale the video straight from the output buffer to the capture
155 * If we need to deal with an output overlay, then there is no choice and
156 * that intermediate step still has to be taken. For the output overlay
169 0, 0, dev->display_width, dev->display_height in vivid_precalc_copy_rects()
173 dev->overlay_out_left, dev->overlay_out_top, in vivid_precalc_copy_rects()
174 dev->compose_out.width, dev->compose_out.height in vivid_precalc_copy_rects()
177 v4l2_rect_intersect(&dev->loop_vid_copy, &dev->crop_cap, &dev->compose_out); in vivid_precalc_copy_rects()
179 dev->loop_vid_out = dev->loop_vid_copy; in vivid_precalc_copy_rects()
180 v4l2_rect_scale(&dev->loop_vid_out, &dev->compose_out, &dev->crop_out); in vivid_precalc_copy_rects()
181 dev->loop_vid_out.left += dev->crop_out.left; in vivid_precalc_copy_rects()
182 dev->loop_vid_out.top += dev->crop_out.top; in vivid_precalc_copy_rects()
184 dev->loop_vid_cap = dev->loop_vid_copy; in vivid_precalc_copy_rects()
185 v4l2_rect_scale(&dev->loop_vid_cap, &dev->crop_cap, &dev->compose_cap); in vivid_precalc_copy_rects()
189 dev->loop_vid_copy.width, dev->loop_vid_copy.height, in vivid_precalc_copy_rects()
190 dev->loop_vid_copy.left, dev->loop_vid_copy.top, in vivid_precalc_copy_rects()
191 dev->loop_vid_out.width, dev->loop_vid_out.height, in vivid_precalc_copy_rects()
192 dev->loop_vid_out.left, dev->loop_vid_out.top, in vivid_precalc_copy_rects()
193 dev->loop_vid_cap.width, dev->loop_vid_cap.height, in vivid_precalc_copy_rects()
194 dev->loop_vid_cap.left, dev->loop_vid_cap.top); in vivid_precalc_copy_rects()
199 r_overlay.left += dev->compose_out.left - dev->overlay_out_left; in vivid_precalc_copy_rects()
200 r_overlay.top += dev->compose_out.top - dev->overlay_out_top; in vivid_precalc_copy_rects()
202 v4l2_rect_intersect(&dev->loop_vid_overlay, &r_overlay, &dev->loop_vid_copy); in vivid_precalc_copy_rects()
203 dev->loop_fb_copy = dev->loop_vid_overlay; in vivid_precalc_copy_rects()
205 /* shift dev->loop_fb_copy back again to the fb origin */ in vivid_precalc_copy_rects()
206 dev->loop_fb_copy.left -= dev->compose_out.left - dev->overlay_out_left; in vivid_precalc_copy_rects()
207 dev->loop_fb_copy.top -= dev->compose_out.top - dev->overlay_out_top; in vivid_precalc_copy_rects()
209 dev->loop_vid_overlay_cap = dev->loop_vid_overlay; in vivid_precalc_copy_rects()
210 v4l2_rect_scale(&dev->loop_vid_overlay_cap, &dev->crop_cap, &dev->compose_cap); in vivid_precalc_copy_rects()
214 dev->loop_fb_copy.width, dev->loop_fb_copy.height, in vivid_precalc_copy_rects()
215 dev->loop_fb_copy.left, dev->loop_fb_copy.top, in vivid_precalc_copy_rects()
216 dev->loop_vid_overlay.width, dev->loop_vid_overlay.height, in vivid_precalc_copy_rects()
217 dev->loop_vid_overlay.left, dev->loop_vid_overlay.top, in vivid_precalc_copy_rects()
218 dev->loop_vid_overlay_cap.width, dev->loop_vid_overlay_cap.height, in vivid_precalc_copy_rects()
219 dev->loop_vid_overlay_cap.left, dev->loop_vid_overlay_cap.top); in vivid_precalc_copy_rects()
229 return vb2_plane_vaddr(&buf->vb.vb2_buf, p); in plane_vaddr()
230 vbuf = vb2_plane_vaddr(&buf->vb.vb2_buf, 0); in plane_vaddr()
232 vbuf += bpl[i] * h / tpg->vdownsampling[i]; in plane_vaddr()
239 bool blank = dev->must_blank[vid_cap_buf->vb.vb2_buf.index]; in vivid_copy_buffer()
240 struct tpg_data *tpg = &dev->tpg; in vivid_copy_buffer()
242 unsigned vdiv = dev->fmt_out->vdownsampling[p]; in vivid_copy_buffer()
244 unsigned img_width = tpg_hdiv(tpg, p, dev->compose_cap.width); in vivid_copy_buffer()
245 unsigned img_height = dev->compose_cap.height; in vivid_copy_buffer()
246 unsigned stride_cap = tpg->bytesperline[p]; in vivid_copy_buffer()
247 unsigned stride_out = dev->bytesperline_out[p]; in vivid_copy_buffer()
248 unsigned stride_osd = dev->display_byte_stride; in vivid_copy_buffer()
249 unsigned hmax = (img_height * tpg->perc_fill) / 100; in vivid_copy_buffer()
253 bool blend = dev->bitmap_out || dev->clipcount_out || dev->fbuf_out_flags; in vivid_copy_buffer()
263 unsigned vid_cap_left = tpg_hdiv(tpg, p, dev->loop_vid_cap.left); in vivid_copy_buffer()
267 vid_out_int_part = dev->loop_vid_out.height / dev->loop_vid_cap.height; in vivid_copy_buffer()
268 vid_out_fract_part = dev->loop_vid_out.height % dev->loop_vid_cap.height; in vivid_copy_buffer()
270 if (!list_empty(&dev->vid_out_active)) in vivid_copy_buffer()
271 vid_out_buf = list_entry(dev->vid_out_active.next, in vivid_copy_buffer()
274 return -ENODATA; in vivid_copy_buffer()
276 vid_cap_buf->vb.field = vid_out_buf->vb.field; in vivid_copy_buffer()
279 dev->bytesperline_out, dev->fmt_out_rect.height); in vivid_copy_buffer()
280 if (p < dev->fmt_out->buffers) in vivid_copy_buffer()
281 voutbuf += vid_out_buf->vb.vb2_buf.planes[p].data_offset; in vivid_copy_buffer()
282 voutbuf += tpg_hdiv(tpg, p, dev->loop_vid_out.left) + in vivid_copy_buffer()
283 (dev->loop_vid_out.top / vdiv) * stride_out; in vivid_copy_buffer()
284 vcapbuf += tpg_hdiv(tpg, p, dev->compose_cap.left) + in vivid_copy_buffer()
285 (dev->compose_cap.top / vdiv) * stride_cap; in vivid_copy_buffer()
287 if (dev->loop_vid_copy.width == 0 || dev->loop_vid_copy.height == 0) { in vivid_copy_buffer()
293 memcpy(vcapbuf, tpg->black_line[p], img_width); in vivid_copy_buffer()
297 if (dev->overlay_out_enabled && in vivid_copy_buffer()
298 dev->loop_vid_overlay.width && dev->loop_vid_overlay.height) { in vivid_copy_buffer()
299 vosdbuf = dev->video_vbase; in vivid_copy_buffer()
300 vosdbuf += (dev->loop_fb_copy.left * twopixsize) / 2 + in vivid_copy_buffer()
301 dev->loop_fb_copy.top * stride_osd; in vivid_copy_buffer()
302 vid_overlay_int_part = dev->loop_vid_overlay.height / in vivid_copy_buffer()
303 dev->loop_vid_overlay_cap.height; in vivid_copy_buffer()
304 vid_overlay_fract_part = dev->loop_vid_overlay.height % in vivid_copy_buffer()
305 dev->loop_vid_overlay_cap.height; in vivid_copy_buffer()
308 vid_cap_right = tpg_hdiv(tpg, p, dev->loop_vid_cap.left + dev->loop_vid_cap.width); in vivid_copy_buffer()
310 quick = dev->loop_vid_out.width == dev->loop_vid_cap.width; in vivid_copy_buffer()
312 dev->cur_scaled_line = dev->loop_vid_out.height; in vivid_copy_buffer()
315 bool osdline = vosdbuf && y >= dev->loop_vid_overlay_cap.top && in vivid_copy_buffer()
316 y < dev->loop_vid_overlay_cap.top + dev->loop_vid_overlay_cap.height; in vivid_copy_buffer()
322 if (y < dev->loop_vid_cap.top || in vivid_copy_buffer()
323 y >= dev->loop_vid_cap.top + dev->loop_vid_cap.height) { in vivid_copy_buffer()
324 memcpy(vcapbuf, tpg->black_line[p], img_width); in vivid_copy_buffer()
329 if (dev->loop_vid_cap.left) in vivid_copy_buffer()
330 memcpy(vcapbuf, tpg->black_line[p], vid_cap_left); in vivid_copy_buffer()
334 memcpy(vcapbuf + vid_cap_right, tpg->black_line[p], in vivid_copy_buffer()
335 img_width - vid_cap_right); in vivid_copy_buffer()
340 tpg_hdiv(tpg, p, dev->loop_vid_cap.width)); in vivid_copy_buffer()
343 if (dev->cur_scaled_line == vid_out_y) { in vivid_copy_buffer()
344 memcpy(vcapbuf + vid_cap_left, dev->scaled_line, in vivid_copy_buffer()
345 tpg_hdiv(tpg, p, dev->loop_vid_cap.width)); in vivid_copy_buffer()
349 scale_line(voutbuf + vid_out_y * stride_out, dev->scaled_line, in vivid_copy_buffer()
350 tpg_hdiv(tpg, p, dev->loop_vid_out.width), in vivid_copy_buffer()
351 tpg_hdiv(tpg, p, dev->loop_vid_cap.width), in vivid_copy_buffer()
359 ((dev->loop_vid_overlay.left - dev->loop_vid_copy.left) * in vivid_copy_buffer()
363 scale_line(voutbuf + vid_out_y * stride_out, dev->blended_line, in vivid_copy_buffer()
364 dev->loop_vid_out.width, dev->loop_vid_copy.width, in vivid_copy_buffer()
367 blend_line(dev, vid_overlay_y + dev->loop_vid_overlay.top, in vivid_copy_buffer()
368 dev->loop_vid_overlay.left, in vivid_copy_buffer()
369 dev->blended_line + offset, osd, in vivid_copy_buffer()
370 dev->loop_vid_overlay.width, twopixsize / 2); in vivid_copy_buffer()
372 memcpy(dev->blended_line + offset, in vivid_copy_buffer()
373 osd, (dev->loop_vid_overlay.width * twopixsize) / 2); in vivid_copy_buffer()
374 scale_line(dev->blended_line, dev->scaled_line, in vivid_copy_buffer()
375 dev->loop_vid_copy.width, dev->loop_vid_cap.width, in vivid_copy_buffer()
378 dev->cur_scaled_line = vid_out_y; in vivid_copy_buffer()
379 memcpy(vcapbuf + vid_cap_left, dev->scaled_line, in vivid_copy_buffer()
380 tpg_hdiv(tpg, p, dev->loop_vid_cap.width)); in vivid_copy_buffer()
386 if (vid_overlay_error >= dev->loop_vid_overlay_cap.height) { in vivid_copy_buffer()
387 vid_overlay_error -= dev->loop_vid_overlay_cap.height; in vivid_copy_buffer()
393 if (vid_out_error >= dev->loop_vid_cap.height / vdiv) { in vivid_copy_buffer()
394 vid_out_error -= dev->loop_vid_cap.height / vdiv; in vivid_copy_buffer()
402 memcpy(vcapbuf, tpg->contrast_line[p], img_width); in vivid_copy_buffer()
408 struct tpg_data *tpg = &dev->tpg; in vivid_fillbuff()
409 unsigned factor = V4L2_FIELD_HAS_T_OR_B(dev->field_cap) ? 2 : 1; in vivid_fillbuff()
412 bool is_60hz = is_tv && (dev->std_cap[dev->input] & V4L2_STD_525_60); in vivid_fillbuff()
421 if (dev->loop_video && dev->can_loop_video && in vivid_fillbuff()
423 !VIVID_INVALID_SIGNAL(dev->std_signal_mode[dev->input])) || in vivid_fillbuff()
425 !VIVID_INVALID_SIGNAL(dev->dv_timings_signal_mode[dev->input])))) in vivid_fillbuff()
428 buf->vb.sequence = dev->vid_cap_seq_count; in vivid_fillbuff()
429 if (dev->field_cap == V4L2_FIELD_ALTERNATE) { in vivid_fillbuff()
431 * 60 Hz standards start with the bottom field, 50 Hz standards in vivid_fillbuff()
432 * with the top field. So if the 0-based seq_count is even, in vivid_fillbuff()
433 * then the field is TOP for 50 Hz and BOTTOM for 60 Hz in vivid_fillbuff()
436 buf->vb.field = ((dev->vid_cap_seq_count & 1) ^ is_60hz) ? in vivid_fillbuff()
442 buf->vb.sequence /= 2; in vivid_fillbuff()
444 buf->vb.field = dev->field_cap; in vivid_fillbuff()
446 tpg_s_field(tpg, buf->vb.field, in vivid_fillbuff()
447 dev->field_cap == V4L2_FIELD_ALTERNATE); in vivid_fillbuff()
448 tpg_s_perc_fill_blank(tpg, dev->must_blank[buf->vb.vb2_buf.index]); in vivid_fillbuff()
454 tpg->bytesperline, tpg->buf_height); in vivid_fillbuff()
457 * The first plane of a multiplanar format has a non-zero in vivid_fillbuff()
459 * correctly supports non-zero data offsets. in vivid_fillbuff()
461 if (p < tpg_g_buffers(tpg) && dev->fmt_cap->data_offset[p]) { in vivid_fillbuff()
462 memset(vbuf, dev->fmt_cap->data_offset[p] & 0xff, in vivid_fillbuff()
463 dev->fmt_cap->data_offset[p]); in vivid_fillbuff()
464 vbuf += dev->fmt_cap->data_offset[p]; in vivid_fillbuff()
471 dev->must_blank[buf->vb.vb2_buf.index] = false; in vivid_fillbuff()
474 if (dev->field_cap != V4L2_FIELD_ALTERNATE || in vivid_fillbuff()
475 (dev->vid_cap_seq_count & 1) == 0) in vivid_fillbuff()
476 dev->ms_vid_cap = in vivid_fillbuff()
477 jiffies_to_msecs(jiffies - dev->jiffies_vid_cap); in vivid_fillbuff()
479 ms = dev->ms_vid_cap; in vivid_fillbuff()
480 if (dev->osd_mode <= 1) { in vivid_fillbuff()
486 buf->vb.sequence, in vivid_fillbuff()
487 (dev->field_cap == V4L2_FIELD_ALTERNATE) ? in vivid_fillbuff()
488 (buf->vb.field == V4L2_FIELD_TOP ? in vivid_fillbuff()
492 if (dev->osd_mode == 0) { in vivid_fillbuff()
494 dev->src_rect.width, dev->src_rect.height, dev->input); in vivid_fillbuff()
497 gain = v4l2_ctrl_g_ctrl(dev->gain); in vivid_fillbuff()
498 mutex_lock(dev->ctrl_hdl_user_vid.lock); in vivid_fillbuff()
501 dev->brightness->cur.val, in vivid_fillbuff()
502 dev->contrast->cur.val, in vivid_fillbuff()
503 dev->saturation->cur.val, in vivid_fillbuff()
504 dev->hue->cur.val); in vivid_fillbuff()
508 dev->autogain->cur.val, gain, dev->alpha->cur.val); in vivid_fillbuff()
509 mutex_unlock(dev->ctrl_hdl_user_vid.lock); in vivid_fillbuff()
511 mutex_lock(dev->ctrl_hdl_user_aud.lock); in vivid_fillbuff()
514 dev->volume->cur.val, dev->mute->cur.val); in vivid_fillbuff()
515 mutex_unlock(dev->ctrl_hdl_user_aud.lock); in vivid_fillbuff()
517 mutex_lock(dev->ctrl_hdl_user_gen.lock); in vivid_fillbuff()
519 dev->int32->cur.val, in vivid_fillbuff()
520 *dev->int64->p_cur.p_s64, in vivid_fillbuff()
521 dev->bitmask->cur.val); in vivid_fillbuff()
524 dev->boolean->cur.val, in vivid_fillbuff()
525 dev->menu->qmenu[dev->menu->cur.val], in vivid_fillbuff()
526 dev->string->p_cur.p_char); in vivid_fillbuff()
529 dev->int_menu->qmenu_int[dev->int_menu->cur.val], in vivid_fillbuff()
530 dev->int_menu->cur.val); in vivid_fillbuff()
531 mutex_unlock(dev->ctrl_hdl_user_gen.lock); in vivid_fillbuff()
533 if (dev->button_pressed) { in vivid_fillbuff()
534 dev->button_pressed--; in vivid_fillbuff()
538 if (dev->osd[0]) { in vivid_fillbuff()
541 " OSD \"%s\"", dev->osd); in vivid_fillbuff()
545 if (dev->osd_jiffies && in vivid_fillbuff()
546 time_is_before_jiffies(dev->osd_jiffies + 5 * HZ)) { in vivid_fillbuff()
547 dev->osd[0] = 0; in vivid_fillbuff()
548 dev->osd_jiffies = 0; in vivid_fillbuff()
561 if (dev->bitmap_cap) { in valid_pix()
567 const u8 *p = dev->bitmap_cap; in valid_pix()
568 unsigned stride = (dev->compose_cap.width + 7) / 8; in valid_pix()
574 for (i = 0; i < dev->clipcount_cap; i++) { in valid_pix()
579 struct v4l2_rect *r = &dev->clips_cap[i].c; in valid_pix()
581 if (fb_y >= r->top && fb_y < r->top + r->height && in valid_pix()
582 fb_x >= r->left && fb_x < r->left + r->width) in valid_pix()
594 struct tpg_data *tpg = &dev->tpg; in vivid_overlay()
596 void *vbase = dev->fb_vbase_cap; in vivid_overlay()
597 void *vbuf = vb2_plane_vaddr(&buf->vb.vb2_buf, 0); in vivid_overlay()
598 unsigned img_width = dev->compose_cap.width; in vivid_overlay()
599 unsigned img_height = dev->compose_cap.height; in vivid_overlay()
600 unsigned stride = tpg->bytesperline[0]; in vivid_overlay()
602 bool quick = dev->bitmap_cap == NULL && dev->clipcount_cap == 0; in vivid_overlay()
611 if ((dev->overlay_cap_field == V4L2_FIELD_TOP || in vivid_overlay()
612 dev->overlay_cap_field == V4L2_FIELD_BOTTOM) && in vivid_overlay()
613 dev->overlay_cap_field != buf->vb.field) in vivid_overlay()
616 vbuf += dev->compose_cap.left * pixsize + dev->compose_cap.top * stride; in vivid_overlay()
617 x = dev->overlay_cap_left; in vivid_overlay()
620 out_x = -x; in vivid_overlay()
621 w = w - out_x; in vivid_overlay()
624 w = dev->fb_cap.fmt.width - x; in vivid_overlay()
630 if (dev->overlay_cap_top >= 0) in vivid_overlay()
631 vbase += dev->overlay_cap_top * dev->fb_cap.fmt.bytesperline; in vivid_overlay()
632 for (y = dev->overlay_cap_top; in vivid_overlay()
633 y < dev->overlay_cap_top + (int)img_height; in vivid_overlay()
637 if (y < 0 || y > dev->fb_cap.fmt.height) in vivid_overlay()
642 vbase += dev->fb_cap.fmt.bytesperline; in vivid_overlay()
646 if (!valid_pix(dev, y - dev->overlay_cap_top, in vivid_overlay()
653 vbase += dev->fb_cap.fmt.bytesperline; in vivid_overlay()
661 f_period = (u64)dev->timeperframe_vid_cap.numerator * 1000000000; in vivid_cap_update_frame_period()
662 if (WARN_ON(dev->timeperframe_vid_cap.denominator == 0)) in vivid_cap_update_frame_period()
663 dev->timeperframe_vid_cap.denominator = 1; in vivid_cap_update_frame_period()
664 do_div(f_period, dev->timeperframe_vid_cap.denominator); in vivid_cap_update_frame_period()
665 if (dev->field_cap == V4L2_FIELD_ALTERNATE) in vivid_cap_update_frame_period()
671 dev->cap_frame_eof_offset = f_period * 9; in vivid_cap_update_frame_period()
672 do_div(dev->cap_frame_eof_offset, 10); in vivid_cap_update_frame_period()
673 dev->cap_frame_period = f_period; in vivid_cap_update_frame_period()
686 while (dropped_bufs-- > 1) in vivid_thread_vid_cap_tick()
687 tpg_update_mv_count(&dev->tpg, in vivid_thread_vid_cap_tick()
688 dev->field_cap == V4L2_FIELD_NONE || in vivid_thread_vid_cap_tick()
689 dev->field_cap == V4L2_FIELD_ALTERNATE); in vivid_thread_vid_cap_tick()
692 if (dev->perc_dropped_buffers && in vivid_thread_vid_cap_tick()
693 prandom_u32_max(100) < dev->perc_dropped_buffers) in vivid_thread_vid_cap_tick()
696 spin_lock(&dev->slock); in vivid_thread_vid_cap_tick()
697 if (!list_empty(&dev->vid_cap_active)) { in vivid_thread_vid_cap_tick()
698 vid_cap_buf = list_entry(dev->vid_cap_active.next, struct vivid_buffer, list); in vivid_thread_vid_cap_tick()
699 list_del(&vid_cap_buf->list); in vivid_thread_vid_cap_tick()
701 if (!list_empty(&dev->vbi_cap_active)) { in vivid_thread_vid_cap_tick()
702 if (dev->field_cap != V4L2_FIELD_ALTERNATE || in vivid_thread_vid_cap_tick()
703 (dev->vbi_cap_seq_count & 1)) { in vivid_thread_vid_cap_tick()
704 vbi_cap_buf = list_entry(dev->vbi_cap_active.next, in vivid_thread_vid_cap_tick()
706 list_del(&vbi_cap_buf->list); in vivid_thread_vid_cap_tick()
709 if (!list_empty(&dev->meta_cap_active)) { in vivid_thread_vid_cap_tick()
710 meta_cap_buf = list_entry(dev->meta_cap_active.next, in vivid_thread_vid_cap_tick()
712 list_del(&meta_cap_buf->list); in vivid_thread_vid_cap_tick()
715 spin_unlock(&dev->slock); in vivid_thread_vid_cap_tick()
720 f_time = dev->cap_frame_period * dev->vid_cap_seq_count + in vivid_thread_vid_cap_tick()
721 dev->cap_stream_start + dev->time_wrap_offset; in vivid_thread_vid_cap_tick()
724 v4l2_ctrl_request_setup(vid_cap_buf->vb.vb2_buf.req_obj.req, in vivid_thread_vid_cap_tick()
725 &dev->ctrl_hdl_vid_cap); in vivid_thread_vid_cap_tick()
729 vid_cap_buf->vb.vb2_buf.index); in vivid_thread_vid_cap_tick()
732 if (dev->overlay_cap_owner && dev->fb_cap.base && in vivid_thread_vid_cap_tick()
733 dev->fb_cap.fmt.pixelformat == dev->fmt_cap->fourcc) in vivid_thread_vid_cap_tick()
736 v4l2_ctrl_request_complete(vid_cap_buf->vb.vb2_buf.req_obj.req, in vivid_thread_vid_cap_tick()
737 &dev->ctrl_hdl_vid_cap); in vivid_thread_vid_cap_tick()
738 vb2_buffer_done(&vid_cap_buf->vb.vb2_buf, dev->dqbuf_error ? in vivid_thread_vid_cap_tick()
741 vid_cap_buf->vb.vb2_buf.index); in vivid_thread_vid_cap_tick()
743 vid_cap_buf->vb.vb2_buf.timestamp = f_time; in vivid_thread_vid_cap_tick()
744 if (!dev->tstamp_src_is_soe) in vivid_thread_vid_cap_tick()
745 vid_cap_buf->vb.vb2_buf.timestamp += dev->cap_frame_eof_offset; in vivid_thread_vid_cap_tick()
751 v4l2_ctrl_request_setup(vbi_cap_buf->vb.vb2_buf.req_obj.req, in vivid_thread_vid_cap_tick()
752 &dev->ctrl_hdl_vbi_cap); in vivid_thread_vid_cap_tick()
753 if (dev->stream_sliced_vbi_cap) in vivid_thread_vid_cap_tick()
757 v4l2_ctrl_request_complete(vbi_cap_buf->vb.vb2_buf.req_obj.req, in vivid_thread_vid_cap_tick()
758 &dev->ctrl_hdl_vbi_cap); in vivid_thread_vid_cap_tick()
759 vb2_buffer_done(&vbi_cap_buf->vb.vb2_buf, dev->dqbuf_error ? in vivid_thread_vid_cap_tick()
762 vbi_cap_buf->vb.vb2_buf.index); in vivid_thread_vid_cap_tick()
765 vbi_period = dev->cap_frame_period * 5; in vivid_thread_vid_cap_tick()
767 vbi_cap_buf->vb.vb2_buf.timestamp = f_time + dev->cap_frame_eof_offset + vbi_period; in vivid_thread_vid_cap_tick()
771 v4l2_ctrl_request_setup(meta_cap_buf->vb.vb2_buf.req_obj.req, in vivid_thread_vid_cap_tick()
772 &dev->ctrl_hdl_meta_cap); in vivid_thread_vid_cap_tick()
774 v4l2_ctrl_request_complete(meta_cap_buf->vb.vb2_buf.req_obj.req, in vivid_thread_vid_cap_tick()
775 &dev->ctrl_hdl_meta_cap); in vivid_thread_vid_cap_tick()
776 vb2_buffer_done(&meta_cap_buf->vb.vb2_buf, dev->dqbuf_error ? in vivid_thread_vid_cap_tick()
779 meta_cap_buf->vb.vb2_buf.index); in vivid_thread_vid_cap_tick()
780 meta_cap_buf->vb.vb2_buf.timestamp = f_time + dev->cap_frame_eof_offset; in vivid_thread_vid_cap_tick()
783 dev->dqbuf_error = false; in vivid_thread_vid_cap_tick()
787 tpg_update_mv_count(&dev->tpg, dev->field_cap == V4L2_FIELD_NONE || in vivid_thread_vid_cap_tick()
788 dev->field_cap == V4L2_FIELD_ALTERNATE); in vivid_thread_vid_cap_tick()
809 dev->cap_seq_offset = 0; in vivid_thread_vid_cap()
810 dev->cap_seq_count = 0; in vivid_thread_vid_cap()
811 dev->cap_seq_resync = false; in vivid_thread_vid_cap()
812 dev->jiffies_vid_cap = jiffies; in vivid_thread_vid_cap()
813 dev->cap_stream_start = ktime_get_ns(); in vivid_thread_vid_cap()
821 if (!mutex_trylock(&dev->mutex)) { in vivid_thread_vid_cap()
827 if (dev->cap_seq_resync) { in vivid_thread_vid_cap()
828 dev->jiffies_vid_cap = cur_jiffies; in vivid_thread_vid_cap()
829 dev->cap_seq_offset = dev->cap_seq_count + 1; in vivid_thread_vid_cap()
830 dev->cap_seq_count = 0; in vivid_thread_vid_cap()
831 dev->cap_stream_start += dev->cap_frame_period * in vivid_thread_vid_cap()
832 dev->cap_seq_offset; in vivid_thread_vid_cap()
834 dev->cap_seq_resync = false; in vivid_thread_vid_cap()
836 numerator = dev->timeperframe_vid_cap.numerator; in vivid_thread_vid_cap()
837 denominator = dev->timeperframe_vid_cap.denominator; in vivid_thread_vid_cap()
839 if (dev->field_cap == V4L2_FIELD_ALTERNATE) in vivid_thread_vid_cap()
843 jiffies_since_start = cur_jiffies - dev->jiffies_vid_cap; in vivid_thread_vid_cap()
846 (HZ * numerator) / 2; in vivid_thread_vid_cap()
847 do_div(buffers_since_start, HZ * numerator); in vivid_thread_vid_cap()
851 * 'jiffies-per-day' to ease jiffies_to_msecs calculation) in vivid_thread_vid_cap()
856 dev->jiffies_vid_cap = cur_jiffies; in vivid_thread_vid_cap()
857 dev->cap_seq_offset = buffers_since_start; in vivid_thread_vid_cap()
860 dropped_bufs = buffers_since_start + dev->cap_seq_offset - dev->cap_seq_count; in vivid_thread_vid_cap()
861 dev->cap_seq_count = buffers_since_start + dev->cap_seq_offset; in vivid_thread_vid_cap()
862 dev->vid_cap_seq_count = dev->cap_seq_count - dev->vid_cap_seq_start; in vivid_thread_vid_cap()
863 dev->vbi_cap_seq_count = dev->cap_seq_count - dev->vbi_cap_seq_start; in vivid_thread_vid_cap()
864 dev->meta_cap_seq_count = dev->cap_seq_count - dev->meta_cap_seq_start; in vivid_thread_vid_cap()
875 jiffies_since_start = jiffies - dev->jiffies_vid_cap; in vivid_thread_vid_cap()
877 mutex_unlock(&dev->mutex); in vivid_thread_vid_cap()
883 next_jiffies_since_start = numerators_since_start * HZ + in vivid_thread_vid_cap()
890 wait_jiffies = next_jiffies_since_start - jiffies_since_start; in vivid_thread_vid_cap()
899 v4l2_ctrl_grab(dev->ctrl_has_crop_cap, grab); in vivid_grab_controls()
900 v4l2_ctrl_grab(dev->ctrl_has_compose_cap, grab); in vivid_grab_controls()
901 v4l2_ctrl_grab(dev->ctrl_has_scaler_cap, grab); in vivid_grab_controls()
908 if (dev->kthread_vid_cap) { in vivid_start_generating_vid_cap()
909 u32 seq_count = dev->cap_seq_count + dev->seq_wrap * 128; in vivid_start_generating_vid_cap()
911 if (pstreaming == &dev->vid_cap_streaming) in vivid_start_generating_vid_cap()
912 dev->vid_cap_seq_start = seq_count; in vivid_start_generating_vid_cap()
913 else if (pstreaming == &dev->vbi_cap_streaming) in vivid_start_generating_vid_cap()
914 dev->vbi_cap_seq_start = seq_count; in vivid_start_generating_vid_cap()
916 dev->meta_cap_seq_start = seq_count; in vivid_start_generating_vid_cap()
922 tpg_init_mv_count(&dev->tpg); in vivid_start_generating_vid_cap()
924 dev->vid_cap_seq_start = dev->seq_wrap * 128; in vivid_start_generating_vid_cap()
925 dev->vbi_cap_seq_start = dev->seq_wrap * 128; in vivid_start_generating_vid_cap()
926 dev->meta_cap_seq_start = dev->seq_wrap * 128; in vivid_start_generating_vid_cap()
928 dev->kthread_vid_cap = kthread_run(vivid_thread_vid_cap, dev, in vivid_start_generating_vid_cap()
929 "%s-vid-cap", dev->v4l2_dev.name); in vivid_start_generating_vid_cap()
931 if (IS_ERR(dev->kthread_vid_cap)) { in vivid_start_generating_vid_cap()
932 int err = PTR_ERR(dev->kthread_vid_cap); in vivid_start_generating_vid_cap()
934 dev->kthread_vid_cap = NULL; in vivid_start_generating_vid_cap()
935 v4l2_err(&dev->v4l2_dev, "kernel_thread() failed\n"); in vivid_start_generating_vid_cap()
949 if (dev->kthread_vid_cap == NULL) in vivid_stop_generating_vid_cap()
953 if (pstreaming == &dev->vid_cap_streaming) { in vivid_stop_generating_vid_cap()
955 while (!list_empty(&dev->vid_cap_active)) { in vivid_stop_generating_vid_cap()
958 buf = list_entry(dev->vid_cap_active.next, in vivid_stop_generating_vid_cap()
960 list_del(&buf->list); in vivid_stop_generating_vid_cap()
961 v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req, in vivid_stop_generating_vid_cap()
962 &dev->ctrl_hdl_vid_cap); in vivid_stop_generating_vid_cap()
963 vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR); in vivid_stop_generating_vid_cap()
965 buf->vb.vb2_buf.index); in vivid_stop_generating_vid_cap()
969 if (pstreaming == &dev->vbi_cap_streaming) { in vivid_stop_generating_vid_cap()
970 while (!list_empty(&dev->vbi_cap_active)) { in vivid_stop_generating_vid_cap()
973 buf = list_entry(dev->vbi_cap_active.next, in vivid_stop_generating_vid_cap()
975 list_del(&buf->list); in vivid_stop_generating_vid_cap()
976 v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req, in vivid_stop_generating_vid_cap()
977 &dev->ctrl_hdl_vbi_cap); in vivid_stop_generating_vid_cap()
978 vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR); in vivid_stop_generating_vid_cap()
980 buf->vb.vb2_buf.index); in vivid_stop_generating_vid_cap()
984 if (pstreaming == &dev->meta_cap_streaming) { in vivid_stop_generating_vid_cap()
985 while (!list_empty(&dev->meta_cap_active)) { in vivid_stop_generating_vid_cap()
988 buf = list_entry(dev->meta_cap_active.next, in vivid_stop_generating_vid_cap()
990 list_del(&buf->list); in vivid_stop_generating_vid_cap()
991 v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req, in vivid_stop_generating_vid_cap()
992 &dev->ctrl_hdl_meta_cap); in vivid_stop_generating_vid_cap()
993 vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR); in vivid_stop_generating_vid_cap()
995 buf->vb.vb2_buf.index); in vivid_stop_generating_vid_cap()
999 if (dev->vid_cap_streaming || dev->vbi_cap_streaming || in vivid_stop_generating_vid_cap()
1000 dev->meta_cap_streaming) in vivid_stop_generating_vid_cap()
1005 kthread_stop(dev->kthread_vid_cap); in vivid_stop_generating_vid_cap()
1006 dev->kthread_vid_cap = NULL; in vivid_stop_generating_vid_cap()