Lines Matching full:vb
54 #define log_memop(vb, op) \ argument
55 dprintk((vb)->vb2_queue, 2, "call_memop(%d, %s)%s\n", \
56 (vb)->index, #op, \
57 (vb)->vb2_queue->mem_ops->op ? "" : " (nop)")
59 #define call_memop(vb, op, args...) \ argument
61 struct vb2_queue *_q = (vb)->vb2_queue; \
64 log_memop(vb, op); \
67 (vb)->cnt_mem_ ## op++; \
71 #define call_ptr_memop(vb, op, args...) \ argument
73 struct vb2_queue *_q = (vb)->vb2_queue; \
76 log_memop(vb, op); \
79 (vb)->cnt_mem_ ## op++; \
83 #define call_void_memop(vb, op, args...) \ argument
85 struct vb2_queue *_q = (vb)->vb2_queue; \
87 log_memop(vb, op); \
90 (vb)->cnt_mem_ ## op++; \
116 #define log_vb_qop(vb, op, args...) \ argument
117 dprintk((vb)->vb2_queue, 2, "call_vb_qop(%d, %s)%s\n", \
118 (vb)->index, #op, \
119 (vb)->vb2_queue->ops->op ? "" : " (nop)")
121 #define call_vb_qop(vb, op, args...) \ argument
125 log_vb_qop(vb, op); \
126 err = (vb)->vb2_queue->ops->op ? \
127 (vb)->vb2_queue->ops->op(args) : 0; \
129 (vb)->cnt_ ## op++; \
133 #define call_void_vb_qop(vb, op, args...) \ argument
135 log_vb_qop(vb, op); \
136 if ((vb)->vb2_queue->ops->op) \
137 (vb)->vb2_queue->ops->op(args); \
138 (vb)->cnt_ ## op++; \
143 #define call_memop(vb, op, args...) \ argument
144 ((vb)->vb2_queue->mem_ops->op ? \
145 (vb)->vb2_queue->mem_ops->op(args) : 0)
147 #define call_ptr_memop(vb, op, args...) \ argument
148 ((vb)->vb2_queue->mem_ops->op ? \
149 (vb)->vb2_queue->mem_ops->op(args) : NULL)
151 #define call_void_memop(vb, op, args...) \ argument
153 if ((vb)->vb2_queue->mem_ops->op) \
154 (vb)->vb2_queue->mem_ops->op(args); \
166 #define call_vb_qop(vb, op, args...) \ argument
167 ((vb)->vb2_queue->ops->op ? (vb)->vb2_queue->ops->op(args) : 0)
169 #define call_void_vb_qop(vb, op, args...) \ argument
171 if ((vb)->vb2_queue->ops->op) \
172 (vb)->vb2_queue->ops->op(args); \
192 static void __enqueue_in_driver(struct vb2_buffer *vb);
214 static int __vb2_buf_mem_alloc(struct vb2_buffer *vb) in __vb2_buf_mem_alloc() argument
216 struct vb2_queue *q = vb->vb2_queue; in __vb2_buf_mem_alloc()
225 for (plane = 0; plane < vb->num_planes; ++plane) { in __vb2_buf_mem_alloc()
227 unsigned long size = PAGE_ALIGN(vb->planes[plane].length); in __vb2_buf_mem_alloc()
230 if (size < vb->planes[plane].length) in __vb2_buf_mem_alloc()
233 mem_priv = call_ptr_memop(vb, alloc, in __vb2_buf_mem_alloc()
243 vb->planes[plane].mem_priv = mem_priv; in __vb2_buf_mem_alloc()
250 call_void_memop(vb, put, vb->planes[plane - 1].mem_priv); in __vb2_buf_mem_alloc()
251 vb->planes[plane - 1].mem_priv = NULL; in __vb2_buf_mem_alloc()
260 static void __vb2_buf_mem_free(struct vb2_buffer *vb) in __vb2_buf_mem_free() argument
264 for (plane = 0; plane < vb->num_planes; ++plane) { in __vb2_buf_mem_free()
265 call_void_memop(vb, put, vb->planes[plane].mem_priv); in __vb2_buf_mem_free()
266 vb->planes[plane].mem_priv = NULL; in __vb2_buf_mem_free()
267 dprintk(vb->vb2_queue, 3, "freed plane %d of buffer %d\n", in __vb2_buf_mem_free()
268 plane, vb->index); in __vb2_buf_mem_free()
276 static void __vb2_buf_userptr_put(struct vb2_buffer *vb) in __vb2_buf_userptr_put() argument
280 for (plane = 0; plane < vb->num_planes; ++plane) { in __vb2_buf_userptr_put()
281 if (vb->planes[plane].mem_priv) in __vb2_buf_userptr_put()
282 call_void_memop(vb, put_userptr, vb->planes[plane].mem_priv); in __vb2_buf_userptr_put()
283 vb->planes[plane].mem_priv = NULL; in __vb2_buf_userptr_put()
291 static void __vb2_plane_dmabuf_put(struct vb2_buffer *vb, struct vb2_plane *p) in __vb2_plane_dmabuf_put() argument
297 call_void_memop(vb, unmap_dmabuf, p->mem_priv); in __vb2_plane_dmabuf_put()
299 call_void_memop(vb, detach_dmabuf, p->mem_priv); in __vb2_plane_dmabuf_put()
310 static void __vb2_buf_dmabuf_put(struct vb2_buffer *vb) in __vb2_buf_dmabuf_put() argument
314 for (plane = 0; plane < vb->num_planes; ++plane) in __vb2_buf_dmabuf_put()
315 __vb2_plane_dmabuf_put(vb, &vb->planes[plane]); in __vb2_buf_dmabuf_put()
322 static void __vb2_buf_mem_prepare(struct vb2_buffer *vb) in __vb2_buf_mem_prepare() argument
326 if (vb->synced) in __vb2_buf_mem_prepare()
329 if (vb->need_cache_sync_on_prepare) { in __vb2_buf_mem_prepare()
330 for (plane = 0; plane < vb->num_planes; ++plane) in __vb2_buf_mem_prepare()
331 call_void_memop(vb, prepare, in __vb2_buf_mem_prepare()
332 vb->planes[plane].mem_priv); in __vb2_buf_mem_prepare()
334 vb->synced = 1; in __vb2_buf_mem_prepare()
341 static void __vb2_buf_mem_finish(struct vb2_buffer *vb) in __vb2_buf_mem_finish() argument
345 if (!vb->synced) in __vb2_buf_mem_finish()
348 if (vb->need_cache_sync_on_finish) { in __vb2_buf_mem_finish()
349 for (plane = 0; plane < vb->num_planes; ++plane) in __vb2_buf_mem_finish()
350 call_void_memop(vb, finish, in __vb2_buf_mem_finish()
351 vb->planes[plane].mem_priv); in __vb2_buf_mem_finish()
353 vb->synced = 0; in __vb2_buf_mem_finish()
360 static void __setup_offsets(struct vb2_buffer *vb) in __setup_offsets() argument
362 struct vb2_queue *q = vb->vb2_queue; in __setup_offsets()
366 if (vb->index) { in __setup_offsets()
367 struct vb2_buffer *prev = q->bufs[vb->index - 1]; in __setup_offsets()
373 for (plane = 0; plane < vb->num_planes; ++plane) { in __setup_offsets()
374 vb->planes[plane].m.offset = off; in __setup_offsets()
377 vb->index, plane, off); in __setup_offsets()
379 off += vb->planes[plane].length; in __setup_offsets()
396 struct vb2_buffer *vb; in __vb2_queue_alloc() local
405 vb = kzalloc(q->buf_struct_size, GFP_KERNEL); in __vb2_queue_alloc()
406 if (!vb) { in __vb2_queue_alloc()
411 vb->state = VB2_BUF_STATE_DEQUEUED; in __vb2_queue_alloc()
412 vb->vb2_queue = q; in __vb2_queue_alloc()
413 vb->num_planes = num_planes; in __vb2_queue_alloc()
414 vb->index = q->num_buffers + buffer; in __vb2_queue_alloc()
415 vb->type = q->type; in __vb2_queue_alloc()
416 vb->memory = memory; in __vb2_queue_alloc()
425 vb->need_cache_sync_on_prepare = 1; in __vb2_queue_alloc()
426 vb->need_cache_sync_on_finish = 1; in __vb2_queue_alloc()
429 vb->planes[plane].length = plane_sizes[plane]; in __vb2_queue_alloc()
430 vb->planes[plane].min_length = plane_sizes[plane]; in __vb2_queue_alloc()
432 call_void_bufop(q, init_buffer, vb); in __vb2_queue_alloc()
434 q->bufs[vb->index] = vb; in __vb2_queue_alloc()
438 ret = __vb2_buf_mem_alloc(vb); in __vb2_queue_alloc()
442 q->bufs[vb->index] = NULL; in __vb2_queue_alloc()
443 kfree(vb); in __vb2_queue_alloc()
446 __setup_offsets(vb); in __vb2_queue_alloc()
452 ret = call_vb_qop(vb, buf_init, vb); in __vb2_queue_alloc()
455 buffer, vb); in __vb2_queue_alloc()
456 __vb2_buf_mem_free(vb); in __vb2_queue_alloc()
457 q->bufs[vb->index] = NULL; in __vb2_queue_alloc()
458 kfree(vb); in __vb2_queue_alloc()
476 struct vb2_buffer *vb; in __vb2_free_mem() local
480 vb = q->bufs[buffer]; in __vb2_free_mem()
481 if (!vb) in __vb2_free_mem()
486 __vb2_buf_mem_free(vb); in __vb2_free_mem()
488 __vb2_buf_dmabuf_put(vb); in __vb2_free_mem()
490 __vb2_buf_userptr_put(vb); in __vb2_free_mem()
524 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free() local
526 if (vb && vb->planes[0].mem_priv) in __vb2_queue_free()
527 call_void_vb_qop(vb, buf_cleanup, vb); in __vb2_queue_free()
559 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free() local
560 bool unbalanced = vb->cnt_mem_alloc != vb->cnt_mem_put || in __vb2_queue_free()
561 vb->cnt_mem_prepare != vb->cnt_mem_finish || in __vb2_queue_free()
562 vb->cnt_mem_get_userptr != vb->cnt_mem_put_userptr || in __vb2_queue_free()
563 vb->cnt_mem_attach_dmabuf != vb->cnt_mem_detach_dmabuf || in __vb2_queue_free()
564 vb->cnt_mem_map_dmabuf != vb->cnt_mem_unmap_dmabuf || in __vb2_queue_free()
565 vb->cnt_buf_queue != vb->cnt_buf_done || in __vb2_queue_free()
566 vb->cnt_buf_prepare != vb->cnt_buf_finish || in __vb2_queue_free()
567 vb->cnt_buf_init != vb->cnt_buf_cleanup; in __vb2_queue_free()
573 vb->cnt_buf_init, vb->cnt_buf_cleanup, in __vb2_queue_free()
574 vb->cnt_buf_prepare, vb->cnt_buf_finish); in __vb2_queue_free()
576 vb->cnt_buf_out_validate, vb->cnt_buf_queue, in __vb2_queue_free()
577 vb->cnt_buf_done, vb->cnt_buf_request_complete); in __vb2_queue_free()
579 vb->cnt_mem_alloc, vb->cnt_mem_put, in __vb2_queue_free()
580 vb->cnt_mem_prepare, vb->cnt_mem_finish, in __vb2_queue_free()
581 vb->cnt_mem_mmap); in __vb2_queue_free()
583 vb->cnt_mem_get_userptr, vb->cnt_mem_put_userptr); in __vb2_queue_free()
585 vb->cnt_mem_attach_dmabuf, vb->cnt_mem_detach_dmabuf, in __vb2_queue_free()
586 vb->cnt_mem_map_dmabuf, vb->cnt_mem_unmap_dmabuf); in __vb2_queue_free()
588 vb->cnt_mem_get_dmabuf, in __vb2_queue_free()
589 vb->cnt_mem_num_users, in __vb2_queue_free()
590 vb->cnt_mem_vaddr, in __vb2_queue_free()
591 vb->cnt_mem_cookie); in __vb2_queue_free()
611 bool vb2_buffer_in_use(struct vb2_queue *q, struct vb2_buffer *vb) in vb2_buffer_in_use() argument
614 for (plane = 0; plane < vb->num_planes; ++plane) { in vb2_buffer_in_use()
615 void *mem_priv = vb->planes[plane].mem_priv; in vb2_buffer_in_use()
622 if (mem_priv && call_memop(vb, num_users, mem_priv) > 1) in vb2_buffer_in_use()
973 void *vb2_plane_vaddr(struct vb2_buffer *vb, unsigned int plane_no) in vb2_plane_vaddr() argument
975 if (plane_no >= vb->num_planes || !vb->planes[plane_no].mem_priv) in vb2_plane_vaddr()
978 return call_ptr_memop(vb, vaddr, vb->planes[plane_no].mem_priv); in vb2_plane_vaddr()
983 void *vb2_plane_cookie(struct vb2_buffer *vb, unsigned int plane_no) in vb2_plane_cookie() argument
985 if (plane_no >= vb->num_planes || !vb->planes[plane_no].mem_priv) in vb2_plane_cookie()
988 return call_ptr_memop(vb, cookie, vb->planes[plane_no].mem_priv); in vb2_plane_cookie()
992 void vb2_buffer_done(struct vb2_buffer *vb, enum vb2_buffer_state state) in vb2_buffer_done() argument
994 struct vb2_queue *q = vb->vb2_queue; in vb2_buffer_done()
997 if (WARN_ON(vb->state != VB2_BUF_STATE_ACTIVE)) in vb2_buffer_done()
1010 vb->cnt_buf_done++; in vb2_buffer_done()
1013 vb->index, vb2_state_name(state)); in vb2_buffer_done()
1016 __vb2_buf_mem_finish(vb); in vb2_buffer_done()
1020 vb->state = VB2_BUF_STATE_QUEUED; in vb2_buffer_done()
1023 list_add_tail(&vb->done_entry, &q->done_list); in vb2_buffer_done()
1024 vb->state = state; in vb2_buffer_done()
1028 if (state != VB2_BUF_STATE_QUEUED && vb->req_obj.req) { in vb2_buffer_done()
1029 media_request_object_unbind(&vb->req_obj); in vb2_buffer_done()
1030 media_request_object_put(&vb->req_obj); in vb2_buffer_done()
1035 trace_vb2_buf_done(q, vb); in vb2_buffer_done()
1050 struct vb2_buffer *vb; in vb2_discard_done() local
1054 list_for_each_entry(vb, &q->done_list, done_entry) in vb2_discard_done()
1055 vb->state = VB2_BUF_STATE_ERROR; in vb2_discard_done()
1063 static int __prepare_mmap(struct vb2_buffer *vb) in __prepare_mmap() argument
1067 ret = call_bufop(vb->vb2_queue, fill_vb2_buffer, in __prepare_mmap()
1068 vb, vb->planes); in __prepare_mmap()
1069 return ret ? ret : call_vb_qop(vb, buf_prepare, vb); in __prepare_mmap()
1075 static int __prepare_userptr(struct vb2_buffer *vb) in __prepare_userptr() argument
1078 struct vb2_queue *q = vb->vb2_queue; in __prepare_userptr()
1082 bool reacquired = vb->planes[0].mem_priv == NULL; in __prepare_userptr()
1084 memset(planes, 0, sizeof(planes[0]) * vb->num_planes); in __prepare_userptr()
1086 ret = call_bufop(vb->vb2_queue, fill_vb2_buffer, in __prepare_userptr()
1087 vb, planes); in __prepare_userptr()
1091 for (plane = 0; plane < vb->num_planes; ++plane) { in __prepare_userptr()
1093 if (vb->planes[plane].m.userptr && in __prepare_userptr()
1094 vb->planes[plane].m.userptr == planes[plane].m.userptr in __prepare_userptr()
1095 && vb->planes[plane].length == planes[plane].length) in __prepare_userptr()
1102 if (planes[plane].length < vb->planes[plane].min_length) { in __prepare_userptr()
1105 vb->planes[plane].min_length, in __prepare_userptr()
1112 if (vb->planes[plane].mem_priv) { in __prepare_userptr()
1115 vb->copied_timestamp = 0; in __prepare_userptr()
1116 call_void_vb_qop(vb, buf_cleanup, vb); in __prepare_userptr()
1118 call_void_memop(vb, put_userptr, vb->planes[plane].mem_priv); in __prepare_userptr()
1121 vb->planes[plane].mem_priv = NULL; in __prepare_userptr()
1122 vb->planes[plane].bytesused = 0; in __prepare_userptr()
1123 vb->planes[plane].length = 0; in __prepare_userptr()
1124 vb->planes[plane].m.userptr = 0; in __prepare_userptr()
1125 vb->planes[plane].data_offset = 0; in __prepare_userptr()
1128 mem_priv = call_ptr_memop(vb, get_userptr, in __prepare_userptr()
1138 vb->planes[plane].mem_priv = mem_priv; in __prepare_userptr()
1145 for (plane = 0; plane < vb->num_planes; ++plane) { in __prepare_userptr()
1146 vb->planes[plane].bytesused = planes[plane].bytesused; in __prepare_userptr()
1147 vb->planes[plane].length = planes[plane].length; in __prepare_userptr()
1148 vb->planes[plane].m.userptr = planes[plane].m.userptr; in __prepare_userptr()
1149 vb->planes[plane].data_offset = planes[plane].data_offset; in __prepare_userptr()
1158 ret = call_vb_qop(vb, buf_init, vb); in __prepare_userptr()
1165 ret = call_vb_qop(vb, buf_prepare, vb); in __prepare_userptr()
1168 call_void_vb_qop(vb, buf_cleanup, vb); in __prepare_userptr()
1175 for (plane = 0; plane < vb->num_planes; ++plane) { in __prepare_userptr()
1176 if (vb->planes[plane].mem_priv) in __prepare_userptr()
1177 call_void_memop(vb, put_userptr, in __prepare_userptr()
1178 vb->planes[plane].mem_priv); in __prepare_userptr()
1179 vb->planes[plane].mem_priv = NULL; in __prepare_userptr()
1180 vb->planes[plane].m.userptr = 0; in __prepare_userptr()
1181 vb->planes[plane].length = 0; in __prepare_userptr()
1190 static int __prepare_dmabuf(struct vb2_buffer *vb) in __prepare_dmabuf() argument
1193 struct vb2_queue *q = vb->vb2_queue; in __prepare_dmabuf()
1197 bool reacquired = vb->planes[0].mem_priv == NULL; in __prepare_dmabuf()
1199 memset(planes, 0, sizeof(planes[0]) * vb->num_planes); in __prepare_dmabuf()
1201 ret = call_bufop(vb->vb2_queue, fill_vb2_buffer, in __prepare_dmabuf()
1202 vb, planes); in __prepare_dmabuf()
1206 for (plane = 0; plane < vb->num_planes; ++plane) { in __prepare_dmabuf()
1220 if (planes[plane].length < vb->planes[plane].min_length) { in __prepare_dmabuf()
1223 vb->planes[plane].min_length); in __prepare_dmabuf()
1230 if (dbuf == vb->planes[plane].dbuf && in __prepare_dmabuf()
1231 vb->planes[plane].length == planes[plane].length) { in __prepare_dmabuf()
1240 vb->copied_timestamp = 0; in __prepare_dmabuf()
1241 call_void_vb_qop(vb, buf_cleanup, vb); in __prepare_dmabuf()
1245 __vb2_plane_dmabuf_put(vb, &vb->planes[plane]); in __prepare_dmabuf()
1246 vb->planes[plane].bytesused = 0; in __prepare_dmabuf()
1247 vb->planes[plane].length = 0; in __prepare_dmabuf()
1248 vb->planes[plane].m.fd = 0; in __prepare_dmabuf()
1249 vb->planes[plane].data_offset = 0; in __prepare_dmabuf()
1252 mem_priv = call_ptr_memop(vb, attach_dmabuf, in __prepare_dmabuf()
1262 vb->planes[plane].dbuf = dbuf; in __prepare_dmabuf()
1263 vb->planes[plane].mem_priv = mem_priv; in __prepare_dmabuf()
1271 for (plane = 0; plane < vb->num_planes; ++plane) { in __prepare_dmabuf()
1272 if (vb->planes[plane].dbuf_mapped) in __prepare_dmabuf()
1275 ret = call_memop(vb, map_dmabuf, vb->planes[plane].mem_priv); in __prepare_dmabuf()
1281 vb->planes[plane].dbuf_mapped = 1; in __prepare_dmabuf()
1288 for (plane = 0; plane < vb->num_planes; ++plane) { in __prepare_dmabuf()
1289 vb->planes[plane].bytesused = planes[plane].bytesused; in __prepare_dmabuf()
1290 vb->planes[plane].length = planes[plane].length; in __prepare_dmabuf()
1291 vb->planes[plane].m.fd = planes[plane].m.fd; in __prepare_dmabuf()
1292 vb->planes[plane].data_offset = planes[plane].data_offset; in __prepare_dmabuf()
1300 ret = call_vb_qop(vb, buf_init, vb); in __prepare_dmabuf()
1307 ret = call_vb_qop(vb, buf_prepare, vb); in __prepare_dmabuf()
1310 call_void_vb_qop(vb, buf_cleanup, vb); in __prepare_dmabuf()
1317 __vb2_buf_dmabuf_put(vb); in __prepare_dmabuf()
1325 static void __enqueue_in_driver(struct vb2_buffer *vb) in __enqueue_in_driver() argument
1327 struct vb2_queue *q = vb->vb2_queue; in __enqueue_in_driver()
1329 vb->state = VB2_BUF_STATE_ACTIVE; in __enqueue_in_driver()
1332 trace_vb2_buf_queue(q, vb); in __enqueue_in_driver()
1334 call_void_vb_qop(vb, buf_queue, vb); in __enqueue_in_driver()
1337 static int __buf_prepare(struct vb2_buffer *vb) in __buf_prepare() argument
1339 struct vb2_queue *q = vb->vb2_queue; in __buf_prepare()
1340 enum vb2_buffer_state orig_state = vb->state; in __buf_prepare()
1348 if (vb->prepared) in __buf_prepare()
1350 WARN_ON(vb->synced); in __buf_prepare()
1353 ret = call_vb_qop(vb, buf_out_validate, vb); in __buf_prepare()
1360 vb->state = VB2_BUF_STATE_PREPARING; in __buf_prepare()
1364 ret = __prepare_mmap(vb); in __buf_prepare()
1367 ret = __prepare_userptr(vb); in __buf_prepare()
1370 ret = __prepare_dmabuf(vb); in __buf_prepare()
1380 vb->state = orig_state; in __buf_prepare()
1384 __vb2_buf_mem_prepare(vb); in __buf_prepare()
1385 vb->prepared = 1; in __buf_prepare()
1386 vb->state = orig_state; in __buf_prepare()
1393 struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); in vb2_req_prepare() local
1396 if (WARN_ON(vb->state != VB2_BUF_STATE_IN_REQUEST)) in vb2_req_prepare()
1399 mutex_lock(vb->vb2_queue->lock); in vb2_req_prepare()
1400 ret = __buf_prepare(vb); in vb2_req_prepare()
1401 mutex_unlock(vb->vb2_queue->lock); in vb2_req_prepare()
1405 static void __vb2_dqbuf(struct vb2_buffer *vb);
1409 struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); in vb2_req_unprepare() local
1411 mutex_lock(vb->vb2_queue->lock); in vb2_req_unprepare()
1412 __vb2_dqbuf(vb); in vb2_req_unprepare()
1413 vb->state = VB2_BUF_STATE_IN_REQUEST; in vb2_req_unprepare()
1414 mutex_unlock(vb->vb2_queue->lock); in vb2_req_unprepare()
1415 WARN_ON(!vb->req_obj.req); in vb2_req_unprepare()
1423 struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); in vb2_req_queue() local
1425 mutex_lock(vb->vb2_queue->lock); in vb2_req_queue()
1426 vb2_core_qbuf(vb->vb2_queue, vb->index, NULL, NULL); in vb2_req_queue()
1427 mutex_unlock(vb->vb2_queue->lock); in vb2_req_queue()
1432 struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); in vb2_req_unbind() local
1434 if (vb->state == VB2_BUF_STATE_IN_REQUEST) in vb2_req_unbind()
1435 call_void_bufop(vb->vb2_queue, init_buffer, vb); in vb2_req_unbind()
1440 struct vb2_buffer *vb = container_of(obj, struct vb2_buffer, req_obj); in vb2_req_release() local
1442 if (vb->state == VB2_BUF_STATE_IN_REQUEST) { in vb2_req_release()
1443 vb->state = VB2_BUF_STATE_DEQUEUED; in vb2_req_release()
1444 if (vb->request) in vb2_req_release()
1445 media_request_put(vb->request); in vb2_req_release()
1446 vb->request = NULL; in vb2_req_release()
1482 struct vb2_buffer *vb; in vb2_core_prepare_buf() local
1485 vb = q->bufs[index]; in vb2_core_prepare_buf()
1486 if (vb->state != VB2_BUF_STATE_DEQUEUED) { in vb2_core_prepare_buf()
1488 vb2_state_name(vb->state)); in vb2_core_prepare_buf()
1491 if (vb->prepared) { in vb2_core_prepare_buf()
1496 ret = __buf_prepare(vb); in vb2_core_prepare_buf()
1501 call_void_bufop(q, fill_user_buffer, vb, pb); in vb2_core_prepare_buf()
1503 dprintk(q, 2, "prepare of buffer %d succeeded\n", vb->index); in vb2_core_prepare_buf()
1522 struct vb2_buffer *vb; in vb2_start_streaming() local
1529 list_for_each_entry(vb, &q->queued_list, queued_entry) in vb2_start_streaming()
1530 __enqueue_in_driver(vb); in vb2_start_streaming()
1556 vb = q->bufs[i]; in vb2_start_streaming()
1557 if (vb->state == VB2_BUF_STATE_ACTIVE) in vb2_start_streaming()
1558 vb2_buffer_done(vb, VB2_BUF_STATE_QUEUED); in vb2_start_streaming()
1565 * vb2_buffer_done(vb, VB2_BUF_STATE_QUEUED) but STATE_ERROR or in vb2_start_streaming()
1575 struct vb2_buffer *vb; in vb2_core_qbuf() local
1583 vb = q->bufs[index]; in vb2_core_qbuf()
1585 if (!req && vb->state != VB2_BUF_STATE_IN_REQUEST && in vb2_core_qbuf()
1592 (!req && vb->state != VB2_BUF_STATE_IN_REQUEST && in vb2_core_qbuf()
1602 if (vb->state != VB2_BUF_STATE_DEQUEUED) { in vb2_core_qbuf()
1604 vb->index); in vb2_core_qbuf()
1608 if (q->is_output && !vb->prepared) { in vb2_core_qbuf()
1609 ret = call_vb_qop(vb, buf_out_validate, vb); in vb2_core_qbuf()
1616 media_request_object_init(&vb->req_obj); in vb2_core_qbuf()
1623 q, true, &vb->req_obj); in vb2_core_qbuf()
1628 vb->state = VB2_BUF_STATE_IN_REQUEST; in vb2_core_qbuf()
1639 vb->request = req; in vb2_core_qbuf()
1643 call_void_bufop(q, copy_timestamp, vb, pb); in vb2_core_qbuf()
1644 call_void_bufop(q, fill_user_buffer, vb, pb); in vb2_core_qbuf()
1647 dprintk(q, 2, "qbuf of buffer %d succeeded\n", vb->index); in vb2_core_qbuf()
1651 if (vb->state != VB2_BUF_STATE_IN_REQUEST) in vb2_core_qbuf()
1654 switch (vb->state) { in vb2_core_qbuf()
1657 if (!vb->prepared) { in vb2_core_qbuf()
1658 ret = __buf_prepare(vb); in vb2_core_qbuf()
1668 vb2_state_name(vb->state)); in vb2_core_qbuf()
1676 list_add_tail(&vb->queued_entry, &q->queued_list); in vb2_core_qbuf()
1679 vb->state = VB2_BUF_STATE_QUEUED; in vb2_core_qbuf()
1682 call_void_bufop(q, copy_timestamp, vb, pb); in vb2_core_qbuf()
1684 trace_vb2_qbuf(q, vb); in vb2_core_qbuf()
1691 __enqueue_in_driver(vb); in vb2_core_qbuf()
1695 call_void_bufop(q, fill_user_buffer, vb, pb); in vb2_core_qbuf()
1710 dprintk(q, 2, "qbuf of buffer %d succeeded\n", vb->index); in vb2_core_qbuf()
1802 static int __vb2_get_done_vb(struct vb2_queue *q, struct vb2_buffer **vb, in __vb2_get_done_vb() argument
1820 *vb = list_first_entry(&q->done_list, struct vb2_buffer, done_entry); in __vb2_get_done_vb()
1827 ret = call_bufop(q, verify_planes_array, *vb, pb); in __vb2_get_done_vb()
1829 list_del(&(*vb)->done_entry); in __vb2_get_done_vb()
1851 static void __vb2_dqbuf(struct vb2_buffer *vb) in __vb2_dqbuf() argument
1853 struct vb2_queue *q = vb->vb2_queue; in __vb2_dqbuf()
1856 if (vb->state == VB2_BUF_STATE_DEQUEUED) in __vb2_dqbuf()
1859 vb->state = VB2_BUF_STATE_DEQUEUED; in __vb2_dqbuf()
1861 call_void_bufop(q, init_buffer, vb); in __vb2_dqbuf()
1867 struct vb2_buffer *vb = NULL; in vb2_core_dqbuf() local
1870 ret = __vb2_get_done_vb(q, &vb, pb, nonblocking); in vb2_core_dqbuf()
1874 switch (vb->state) { in vb2_core_dqbuf()
1883 vb2_state_name(vb->state)); in vb2_core_dqbuf()
1887 call_void_vb_qop(vb, buf_finish, vb); in vb2_core_dqbuf()
1888 vb->prepared = 0; in vb2_core_dqbuf()
1891 *pindex = vb->index; in vb2_core_dqbuf()
1895 call_void_bufop(q, fill_user_buffer, vb, pb); in vb2_core_dqbuf()
1898 list_del(&vb->queued_entry); in vb2_core_dqbuf()
1901 trace_vb2_dqbuf(q, vb); in vb2_core_dqbuf()
1904 __vb2_dqbuf(vb); in vb2_core_dqbuf()
1906 if (WARN_ON(vb->req_obj.req)) { in vb2_core_dqbuf()
1907 media_request_object_unbind(&vb->req_obj); in vb2_core_dqbuf()
1908 media_request_object_put(&vb->req_obj); in vb2_core_dqbuf()
1910 if (vb->request) in vb2_core_dqbuf()
1911 media_request_put(vb->request); in vb2_core_dqbuf()
1912 vb->request = NULL; in vb2_core_dqbuf()
1915 vb->index, vb2_state_name(vb->state)); in vb2_core_dqbuf()
1985 struct vb2_buffer *vb = q->bufs[i]; in __vb2_queue_cancel() local
1986 struct media_request *req = vb->req_obj.req; in __vb2_queue_cancel()
2003 call_void_vb_qop(vb, buf_request_complete, vb); in __vb2_queue_cancel()
2006 __vb2_buf_mem_finish(vb); in __vb2_queue_cancel()
2008 if (vb->prepared) { in __vb2_queue_cancel()
2009 call_void_vb_qop(vb, buf_finish, vb); in __vb2_queue_cancel()
2010 vb->prepared = 0; in __vb2_queue_cancel()
2012 __vb2_dqbuf(vb); in __vb2_queue_cancel()
2014 if (vb->req_obj.req) { in __vb2_queue_cancel()
2015 media_request_object_unbind(&vb->req_obj); in __vb2_queue_cancel()
2016 media_request_object_put(&vb->req_obj); in __vb2_queue_cancel()
2018 if (vb->request) in __vb2_queue_cancel()
2019 media_request_put(vb->request); in __vb2_queue_cancel()
2020 vb->request = NULL; in __vb2_queue_cancel()
2021 vb->copied_timestamp = 0; in __vb2_queue_cancel()
2109 struct vb2_buffer *vb; in __find_plane_by_offset() local
2118 vb = q->bufs[buffer]; in __find_plane_by_offset()
2120 for (plane = 0; plane < vb->num_planes; ++plane) { in __find_plane_by_offset()
2121 if (vb->planes[plane].m.offset == off) { in __find_plane_by_offset()
2135 struct vb2_buffer *vb = NULL; in vb2_core_expbuf() local
2165 vb = q->bufs[index]; in vb2_core_expbuf()
2167 if (plane >= vb->num_planes) { in vb2_core_expbuf()
2177 vb_plane = &vb->planes[plane]; in vb2_core_expbuf()
2179 dbuf = call_ptr_memop(vb, get_dmabuf, vb_plane->mem_priv, in vb2_core_expbuf()
2206 struct vb2_buffer *vb; in vb2_mmap() local
2250 vb = q->bufs[buffer]; in vb2_mmap()
2257 length = PAGE_ALIGN(vb->planes[plane].length); in vb2_mmap()
2272 ret = call_memop(vb, mmap, vb->planes[plane].mem_priv, vma); in vb2_mmap()
2292 struct vb2_buffer *vb; in vb2_get_unmapped_area() local
2309 vb = q->bufs[buffer]; in vb2_get_unmapped_area()
2311 vaddr = vb2_plane_vaddr(vb, plane); in vb2_get_unmapped_area()
2374 struct vb2_buffer *vb = NULL; in vb2_core_poll() local
2442 vb = list_first_entry(&q->done_list, struct vb2_buffer, in vb2_core_poll()
2446 if (vb && (vb->state == VB2_BUF_STATE_DONE in vb2_core_poll()
2447 || vb->state == VB2_BUF_STATE_ERROR)) { in vb2_core_poll()
2854 struct vb2_buffer *vb; in vb2_thread() local
2860 vb = q->bufs[index++]; in vb2_thread()
2869 vb = q->bufs[index]; in vb2_thread()
2875 if (vb->state != VB2_BUF_STATE_ERROR) in vb2_thread()
2876 if (threadio->fnc(vb, threadio->priv)) in vb2_thread()
2880 vb->timestamp = ktime_get_ns(); in vb2_thread()
2882 ret = vb2_core_qbuf(q, vb->index, NULL, NULL); in vb2_thread()