Lines Matching refs:vgdev
59 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_ctrl_ack() local
61 schedule_work(&vgdev->ctrlq.dequeue_work); in virtio_gpu_ctrl_ack()
67 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_cursor_ack() local
69 schedule_work(&vgdev->cursorq.dequeue_work); in virtio_gpu_cursor_ack()
72 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_alloc_vbufs() argument
74 vgdev->vbufs = kmem_cache_create("virtio-gpu-vbufs", in virtio_gpu_alloc_vbufs()
78 if (!vgdev->vbufs) in virtio_gpu_alloc_vbufs()
83 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_free_vbufs() argument
85 kmem_cache_destroy(vgdev->vbufs); in virtio_gpu_free_vbufs()
86 vgdev->vbufs = NULL; in virtio_gpu_free_vbufs()
90 virtio_gpu_get_vbuf(struct virtio_gpu_device *vgdev, in virtio_gpu_get_vbuf() argument
96 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL | __GFP_NOFAIL); in virtio_gpu_get_vbuf()
124 virtio_gpu_alloc_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cursor() argument
130 (vgdev, sizeof(struct virtio_gpu_update_cursor), in virtio_gpu_alloc_cursor()
140 static void *virtio_gpu_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_resp() argument
148 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
154 static void *virtio_gpu_alloc_cmd(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd() argument
158 return virtio_gpu_alloc_cmd_resp(vgdev, NULL, vbuffer_p, size, in virtio_gpu_alloc_cmd()
163 static void *virtio_gpu_alloc_cmd_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_cb() argument
168 return virtio_gpu_alloc_cmd_resp(vgdev, cb, vbuffer_p, size, in virtio_gpu_alloc_cmd_cb()
173 static void free_vbuf(struct virtio_gpu_device *vgdev, in free_vbuf() argument
179 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
198 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_ctrl_func() local
207 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
209 virtqueue_disable_cb(vgdev->ctrlq.vq); in virtio_gpu_dequeue_ctrl_func()
210 reclaim_vbufs(vgdev->ctrlq.vq, &reclaim_list); in virtio_gpu_dequeue_ctrl_func()
212 } while (!virtqueue_enable_cb(vgdev->ctrlq.vq)); in virtio_gpu_dequeue_ctrl_func()
213 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
218 trace_virtio_gpu_cmd_response(vgdev->ctrlq.vq, resp); in virtio_gpu_dequeue_ctrl_func()
232 virtio_gpu_fence_event_process(vgdev, fence_id); in virtio_gpu_dequeue_ctrl_func()
235 entry->resp_cb(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
237 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()
241 virtio_gpu_array_put_free_delayed(vgdev, entry->objs); in virtio_gpu_dequeue_ctrl_func()
243 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
249 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_cursor_func() local
256 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
258 virtqueue_disable_cb(vgdev->cursorq.vq); in virtio_gpu_dequeue_cursor_func()
259 reclaim_vbufs(vgdev->cursorq.vq, &reclaim_list); in virtio_gpu_dequeue_cursor_func()
260 } while (!virtqueue_enable_cb(vgdev->cursorq.vq)); in virtio_gpu_dequeue_cursor_func()
261 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
265 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_cursor_func()
267 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()
310 static int virtio_gpu_queue_ctrl_sgs(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_sgs() argument
318 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_ctrl_sgs()
321 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_ctrl_sgs()
324 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
328 if (vgdev->has_indirect) in virtio_gpu_queue_ctrl_sgs()
332 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
335 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
336 virtio_gpu_notify(vgdev); in virtio_gpu_queue_ctrl_sgs()
337 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= elemcnt); in virtio_gpu_queue_ctrl_sgs()
345 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
358 atomic_inc(&vgdev->pending_commands); in virtio_gpu_queue_ctrl_sgs()
360 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
366 static int virtio_gpu_queue_fenced_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_fenced_ctrl_buffer() argument
410 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
420 void virtio_gpu_notify(struct virtio_gpu_device *vgdev) in virtio_gpu_notify() argument
424 if (!atomic_read(&vgdev->pending_commands)) in virtio_gpu_notify()
427 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
428 atomic_set(&vgdev->pending_commands, 0); in virtio_gpu_notify()
429 notify = virtqueue_kick_prepare(vgdev->ctrlq.vq); in virtio_gpu_notify()
430 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
433 virtqueue_notify(vgdev->ctrlq.vq); in virtio_gpu_notify()
436 static int virtio_gpu_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer() argument
439 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
442 static void virtio_gpu_queue_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_cursor() argument
445 struct virtqueue *vq = vgdev->cursorq.vq; in virtio_gpu_queue_cursor()
450 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_cursor()
451 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
459 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
463 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
464 wait_event(vgdev->cursorq.ack_queue, vq->num_free >= outcnt); in virtio_gpu_queue_cursor()
465 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
474 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
487 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_create_resource() argument
496 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
506 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
510 static void virtio_gpu_cmd_unref_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_cb() argument
521 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_resource() argument
528 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
536 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
541 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout() argument
549 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
560 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
563 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_flush() argument
573 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
584 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_flush()
587 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_2d() argument
597 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_2d()
600 dma_sync_sgtable_for_device(&vgdev->vdev->dev, in virtio_gpu_cmd_transfer_to_host_2d()
603 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
615 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
619 virtio_gpu_cmd_resource_attach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_attach_backing() argument
628 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
638 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
641 static void virtio_gpu_cmd_get_display_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_display_info_cb() argument
648 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
649 for (i = 0; i < vgdev->num_scanouts; i++) { in virtio_gpu_cmd_get_display_info_cb()
650 vgdev->outputs[i].info = resp->pmodes[i]; in virtio_gpu_cmd_get_display_info_cb()
662 vgdev->display_info_pending = false; in virtio_gpu_cmd_get_display_info_cb()
663 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
664 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_display_info_cb()
666 if (!drm_helper_hpd_irq_event(vgdev->ddev)) in virtio_gpu_cmd_get_display_info_cb()
667 drm_kms_helper_hotplug_event(vgdev->ddev); in virtio_gpu_cmd_get_display_info_cb()
670 static void virtio_gpu_cmd_get_capset_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset_info_cb() argument
679 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
680 if (vgdev->capsets) { in virtio_gpu_cmd_get_capset_info_cb()
681 vgdev->capsets[i].id = le32_to_cpu(resp->capset_id); in virtio_gpu_cmd_get_capset_info_cb()
682 vgdev->capsets[i].max_version = le32_to_cpu(resp->capset_max_version); in virtio_gpu_cmd_get_capset_info_cb()
683 vgdev->capsets[i].max_size = le32_to_cpu(resp->capset_max_size); in virtio_gpu_cmd_get_capset_info_cb()
687 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
688 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_capset_info_cb()
691 static void virtio_gpu_cmd_capset_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_capset_cb() argument
700 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
701 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
712 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
713 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_capset_cb()
728 static void virtio_gpu_cmd_get_edid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_edid_cb() argument
739 if (scanout >= vgdev->num_scanouts) in virtio_gpu_cmd_get_edid_cb()
741 output = vgdev->outputs + scanout; in virtio_gpu_cmd_get_edid_cb()
746 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
749 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
752 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_edid_cb()
755 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_display_info() argument
767 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
772 vgdev->display_info_pending = true; in virtio_gpu_cmd_get_display_info()
774 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
778 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx) in virtio_gpu_cmd_get_capset_info() argument
790 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
797 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
801 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset() argument
814 if (idx >= vgdev->num_capsets) in virtio_gpu_cmd_get_capset()
817 if (version > vgdev->capsets[idx].max_version) in virtio_gpu_cmd_get_capset()
824 max_size = vgdev->capsets[idx].max_size; in virtio_gpu_cmd_get_capset()
840 cache_ent->id = vgdev->capsets[idx].id; in virtio_gpu_cmd_get_capset()
843 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
845 list_for_each_entry(search_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_get_capset()
846 if (search_ent->id == vgdev->capsets[idx].id && in virtio_gpu_cmd_get_capset()
853 list_add_tail(&cache_ent->head, &vgdev->cap_cache); in virtio_gpu_cmd_get_capset()
854 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
865 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
869 cmd_p->capset_id = cpu_to_le32(vgdev->capsets[idx].id); in virtio_gpu_cmd_get_capset()
872 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
877 int virtio_gpu_cmd_get_edids(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_edids() argument
884 if (WARN_ON(!vgdev->has_edid)) in virtio_gpu_cmd_get_edids()
887 for (scanout = 0; scanout < vgdev->num_scanouts; scanout++) { in virtio_gpu_cmd_get_edids()
894 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
899 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
905 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id, in virtio_gpu_cmd_context_create() argument
912 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
921 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
924 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_destroy() argument
930 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
935 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
938 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_attach_resource() argument
946 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
953 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
956 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_detach_resource() argument
964 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
971 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
975 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_3d() argument
984 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
1002 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1007 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_3d() argument
1019 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_3d()
1022 dma_sync_sgtable_for_device(&vgdev->vdev->dev, in virtio_gpu_cmd_transfer_to_host_3d()
1025 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1039 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1042 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_from_host_3d() argument
1055 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1069 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1072 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_submit() argument
1081 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1092 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1095 void virtio_gpu_object_attach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_attach() argument
1100 virtio_gpu_cmd_resource_attach_backing(vgdev, obj->hw_res_handle, in virtio_gpu_object_attach()
1104 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev, in virtio_gpu_cursor_ping() argument
1111 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1113 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1116 static void virtio_gpu_cmd_resource_uuid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_uuid_cb() argument
1125 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1135 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1137 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_uuid_cb()
1141 virtio_gpu_cmd_resource_assign_uuid(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_assign_uuid() argument
1151 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1153 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1159 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1167 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()
1171 static void virtio_gpu_cmd_resource_map_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_map_cb() argument
1181 spin_lock(&vgdev->host_visible_lock); in virtio_gpu_cmd_resource_map_cb()
1190 spin_unlock(&vgdev->host_visible_lock); in virtio_gpu_cmd_resource_map_cb()
1191 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_map_cb()
1194 int virtio_gpu_cmd_map(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_map() argument
1207 (vgdev, virtio_gpu_cmd_resource_map_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_map()
1216 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_map()
1220 void virtio_gpu_cmd_unmap(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unmap() argument
1226 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_unmap()
1232 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unmap()
1236 virtio_gpu_cmd_resource_create_blob(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_blob() argument
1245 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_blob()
1260 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_create_blob()
1264 void virtio_gpu_cmd_set_scanout_blob(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout_blob() argument
1276 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout_blob()
1297 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout_blob()