Lines Matching refs:vbuf
92 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_get_vbuf() local
94 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL); in virtio_gpu_get_vbuf()
95 if (!vbuf) in virtio_gpu_get_vbuf()
100 vbuf->buf = (void *)vbuf + sizeof(*vbuf); in virtio_gpu_get_vbuf()
101 vbuf->size = size; in virtio_gpu_get_vbuf()
103 vbuf->resp_cb = resp_cb; in virtio_gpu_get_vbuf()
104 vbuf->resp_size = resp_size; in virtio_gpu_get_vbuf()
106 vbuf->resp_buf = (void *)vbuf->buf + size; in virtio_gpu_get_vbuf()
108 vbuf->resp_buf = resp_buf; in virtio_gpu_get_vbuf()
109 BUG_ON(!vbuf->resp_buf); in virtio_gpu_get_vbuf()
110 return vbuf; in virtio_gpu_get_vbuf()
114 virtio_gpu_vbuf_ctrl_hdr(struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_vbuf_ctrl_hdr() argument
120 return (struct virtio_gpu_ctrl_hdr *)vbuf->buf; in virtio_gpu_vbuf_ctrl_hdr()
127 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_alloc_cursor() local
129 vbuf = virtio_gpu_get_vbuf in virtio_gpu_alloc_cursor()
132 if (IS_ERR(vbuf)) { in virtio_gpu_alloc_cursor()
134 return ERR_CAST(vbuf); in virtio_gpu_alloc_cursor()
136 *vbuffer_p = vbuf; in virtio_gpu_alloc_cursor()
137 return (struct virtio_gpu_update_cursor *)vbuf->buf; in virtio_gpu_alloc_cursor()
146 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_alloc_cmd_resp() local
148 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
150 if (IS_ERR(vbuf)) { in virtio_gpu_alloc_cmd_resp()
152 return ERR_CAST(vbuf); in virtio_gpu_alloc_cmd_resp()
154 *vbuffer_p = vbuf; in virtio_gpu_alloc_cmd_resp()
155 return (struct virtio_gpu_command *)vbuf->buf; in virtio_gpu_alloc_cmd_resp()
178 struct virtio_gpu_vbuffer *vbuf) in free_vbuf() argument
180 if (vbuf->resp_size > MAX_INLINE_RESP_SIZE) in free_vbuf()
181 kfree(vbuf->resp_buf); in free_vbuf()
182 kvfree(vbuf->data_buf); in free_vbuf()
183 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
188 struct virtio_gpu_vbuffer *vbuf; in reclaim_vbufs() local
192 while ((vbuf = virtqueue_get_buf(vq, &len))) { in reclaim_vbufs()
193 list_add_tail(&vbuf->list, reclaim_list); in reclaim_vbufs()
324 struct virtio_gpu_vbuffer *vbuf, in virtio_gpu_queue_ctrl_sgs() argument
335 if (fence && vbuf->objs) in virtio_gpu_queue_ctrl_sgs()
336 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_ctrl_sgs()
337 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
358 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
360 if (vbuf->objs) { in virtio_gpu_queue_ctrl_sgs()
361 virtio_gpu_array_add_fence(vbuf->objs, &fence->f); in virtio_gpu_queue_ctrl_sgs()
362 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_ctrl_sgs()
366 ret = virtqueue_add_sgs(vq, sgs, outcnt, incnt, vbuf, GFP_ATOMIC); in virtio_gpu_queue_ctrl_sgs()
369 trace_virtio_gpu_cmd_queue(vq, virtio_gpu_vbuf_ctrl_hdr(vbuf)); in virtio_gpu_queue_ctrl_sgs()
380 struct virtio_gpu_vbuffer *vbuf, in virtio_gpu_queue_fenced_ctrl_buffer() argument
388 sg_init_one(&vcmd, vbuf->buf, vbuf->size); in virtio_gpu_queue_fenced_ctrl_buffer()
394 if (vbuf->data_size) { in virtio_gpu_queue_fenced_ctrl_buffer()
395 if (is_vmalloc_addr(vbuf->data_buf)) { in virtio_gpu_queue_fenced_ctrl_buffer()
397 sgt = vmalloc_to_sgt(vbuf->data_buf, vbuf->data_size, in virtio_gpu_queue_fenced_ctrl_buffer()
400 if (fence && vbuf->objs) in virtio_gpu_queue_fenced_ctrl_buffer()
401 virtio_gpu_array_unlock_resv(vbuf->objs); in virtio_gpu_queue_fenced_ctrl_buffer()
408 sg_init_one(&vout, vbuf->data_buf, vbuf->data_size); in virtio_gpu_queue_fenced_ctrl_buffer()
416 if (vbuf->resp_size) { in virtio_gpu_queue_fenced_ctrl_buffer()
417 sg_init_one(&vresp, vbuf->resp_buf, vbuf->resp_size); in virtio_gpu_queue_fenced_ctrl_buffer()
423 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
450 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_queue_ctrl_buffer() argument
452 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
456 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_queue_cursor() argument
464 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
468 sg_init_one(&ccmd, vbuf->buf, vbuf->size); in virtio_gpu_queue_cursor()
474 ret = virtqueue_add_sgs(vq, sgs, outcnt, 0, vbuf, GFP_ATOMIC); in virtio_gpu_queue_cursor()
482 virtio_gpu_vbuf_ctrl_hdr(vbuf)); in virtio_gpu_queue_cursor()
507 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_create_resource() local
509 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
511 vbuf->objs = objs; in virtio_gpu_cmd_create_resource()
519 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
524 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_unref_cb() argument
528 bo = vbuf->resp_cb_data; in virtio_gpu_cmd_unref_cb()
529 vbuf->resp_cb_data = NULL; in virtio_gpu_cmd_unref_cb()
538 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_unref_resource() local
541 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
548 vbuf->resp_cb_data = bo; in virtio_gpu_cmd_unref_resource()
549 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
560 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_set_scanout() local
562 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
573 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
584 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_flush() local
586 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
588 vbuf->objs = objs; in virtio_gpu_cmd_resource_flush()
597 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_flush()
609 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_to_host_2d() local
617 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
619 vbuf->objs = objs; in virtio_gpu_cmd_transfer_to_host_2d()
629 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
640 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_attach_backing() local
642 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
649 vbuf->data_buf = ents; in virtio_gpu_cmd_resource_attach_backing()
650 vbuf->data_size = sizeof(*ents) * nents; in virtio_gpu_cmd_resource_attach_backing()
652 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
656 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_display_info_cb() argument
659 (struct virtio_gpu_resp_display_info *)vbuf->resp_buf; in virtio_gpu_cmd_get_display_info_cb()
685 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_capset_info_cb() argument
688 (struct virtio_gpu_get_capset_info *)vbuf->buf; in virtio_gpu_cmd_get_capset_info_cb()
690 (struct virtio_gpu_resp_capset_info *)vbuf->resp_buf; in virtio_gpu_cmd_get_capset_info_cb()
706 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_capset_cb() argument
709 (struct virtio_gpu_get_capset *)vbuf->buf; in virtio_gpu_cmd_capset_cb()
711 (struct virtio_gpu_resp_capset *)vbuf->resp_buf; in virtio_gpu_cmd_capset_cb()
743 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_get_edid_cb() argument
746 (struct virtio_gpu_cmd_get_edid *)vbuf->buf; in virtio_gpu_cmd_get_edid_cb()
748 (struct virtio_gpu_resp_edid *)vbuf->resp_buf; in virtio_gpu_cmd_get_edid_cb()
772 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_display_info() local
781 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
788 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
795 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_capset_info() local
804 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
811 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
820 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_capset() local
879 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
886 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
894 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_get_edids() local
908 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
913 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
923 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_create() local
925 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
933 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
940 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_destroy() local
942 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
947 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
956 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_attach_resource() local
958 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
960 vbuf->objs = objs; in virtio_gpu_cmd_context_attach_resource()
965 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
974 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_context_detach_resource() local
976 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
978 vbuf->objs = objs; in virtio_gpu_cmd_context_detach_resource()
983 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
994 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_create_3d() local
996 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
998 vbuf->objs = objs; in virtio_gpu_cmd_resource_create_3d()
1014 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1030 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_to_host_3d() local
1039 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1042 vbuf->objs = objs; in virtio_gpu_cmd_transfer_to_host_3d()
1053 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1067 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_transfer_from_host_3d() local
1069 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1072 vbuf->objs = objs; in virtio_gpu_cmd_transfer_from_host_3d()
1083 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1093 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_submit() local
1095 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1098 vbuf->data_buf = data; in virtio_gpu_cmd_submit()
1099 vbuf->data_size = data_size; in virtio_gpu_cmd_submit()
1100 vbuf->objs = objs; in virtio_gpu_cmd_submit()
1106 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1121 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cursor_ping() local
1125 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1127 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1131 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_resource_uuid_cb() argument
1134 gem_to_virtio_gpu_obj(vbuf->objs->objs[0]); in virtio_gpu_cmd_resource_uuid_cb()
1136 (struct virtio_gpu_resp_resource_uuid *)vbuf->resp_buf; in virtio_gpu_cmd_resource_uuid_cb()
1160 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_assign_uuid() local
1173 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1180 vbuf->objs = objs; in virtio_gpu_cmd_resource_assign_uuid()
1181 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()
1186 struct virtio_gpu_vbuffer *vbuf) in virtio_gpu_cmd_resource_map_cb() argument
1189 gem_to_virtio_gpu_obj(vbuf->objs->objs[0]); in virtio_gpu_cmd_resource_map_cb()
1191 (struct virtio_gpu_resp_map_info *)vbuf->resp_buf; in virtio_gpu_cmd_resource_map_cb()
1213 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_map() local
1221 (vgdev, virtio_gpu_cmd_resource_map_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_map()
1228 vbuf->objs = objs; in virtio_gpu_cmd_map()
1230 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_map()
1238 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_unmap() local
1240 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_unmap()
1246 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unmap()
1257 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_resource_create_blob() local
1259 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_blob()
1271 vbuf->data_buf = ents; in virtio_gpu_cmd_resource_create_blob()
1272 vbuf->data_size = sizeof(*ents) * nents; in virtio_gpu_cmd_resource_create_blob()
1274 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_create_blob()
1287 struct virtio_gpu_vbuffer *vbuf; in virtio_gpu_cmd_set_scanout_blob() local
1290 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout_blob()
1311 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout_blob()