Lines Matching refs:vgdev

57 	struct virtio_gpu_device *vgdev = dev->dev_private;  in virtio_gpu_ctrl_ack()  local
59 schedule_work(&vgdev->ctrlq.dequeue_work); in virtio_gpu_ctrl_ack()
65 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_cursor_ack() local
67 schedule_work(&vgdev->cursorq.dequeue_work); in virtio_gpu_cursor_ack()
70 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_alloc_vbufs() argument
72 vgdev->vbufs = kmem_cache_create("virtio-gpu-vbufs", in virtio_gpu_alloc_vbufs()
76 if (!vgdev->vbufs) in virtio_gpu_alloc_vbufs()
81 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_free_vbufs() argument
83 kmem_cache_destroy(vgdev->vbufs); in virtio_gpu_free_vbufs()
84 vgdev->vbufs = NULL; in virtio_gpu_free_vbufs()
88 virtio_gpu_get_vbuf(struct virtio_gpu_device *vgdev, in virtio_gpu_get_vbuf() argument
94 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL); in virtio_gpu_get_vbuf()
124 virtio_gpu_alloc_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cursor() argument
130 (vgdev, sizeof(struct virtio_gpu_update_cursor), in virtio_gpu_alloc_cursor()
140 static void *virtio_gpu_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_resp() argument
148 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
158 static void *virtio_gpu_alloc_cmd(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd() argument
162 return virtio_gpu_alloc_cmd_resp(vgdev, NULL, vbuffer_p, size, in virtio_gpu_alloc_cmd()
167 static void *virtio_gpu_alloc_cmd_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_cb() argument
172 return virtio_gpu_alloc_cmd_resp(vgdev, cb, vbuffer_p, size, in virtio_gpu_alloc_cmd_cb()
177 static void free_vbuf(struct virtio_gpu_device *vgdev, in free_vbuf() argument
183 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
202 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_ctrl_func() local
211 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
213 virtqueue_disable_cb(vgdev->ctrlq.vq); in virtio_gpu_dequeue_ctrl_func()
214 reclaim_vbufs(vgdev->ctrlq.vq, &reclaim_list); in virtio_gpu_dequeue_ctrl_func()
216 } while (!virtqueue_enable_cb(vgdev->ctrlq.vq)); in virtio_gpu_dequeue_ctrl_func()
217 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
222 trace_virtio_gpu_cmd_response(vgdev->ctrlq.vq, resp); in virtio_gpu_dequeue_ctrl_func()
245 entry->resp_cb(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
247 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()
250 virtio_gpu_fence_event_process(vgdev, fence_id); in virtio_gpu_dequeue_ctrl_func()
254 virtio_gpu_array_put_free_delayed(vgdev, entry->objs); in virtio_gpu_dequeue_ctrl_func()
256 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
262 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_cursor_func() local
269 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
271 virtqueue_disable_cb(vgdev->cursorq.vq); in virtio_gpu_dequeue_cursor_func()
272 reclaim_vbufs(vgdev->cursorq.vq, &reclaim_list); in virtio_gpu_dequeue_cursor_func()
273 } while (!virtqueue_enable_cb(vgdev->cursorq.vq)); in virtio_gpu_dequeue_cursor_func()
274 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
278 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_cursor_func()
280 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()
323 static int virtio_gpu_queue_ctrl_sgs(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_sgs() argument
331 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_ctrl_sgs()
334 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_ctrl_sgs()
337 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_ctrl_sgs()
341 if (vgdev->has_indirect) in virtio_gpu_queue_ctrl_sgs()
345 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
348 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
349 virtio_gpu_notify(vgdev); in virtio_gpu_queue_ctrl_sgs()
350 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= elemcnt); in virtio_gpu_queue_ctrl_sgs()
358 virtio_gpu_fence_emit(vgdev, virtio_gpu_vbuf_ctrl_hdr(vbuf), in virtio_gpu_queue_ctrl_sgs()
371 atomic_inc(&vgdev->pending_commands); in virtio_gpu_queue_ctrl_sgs()
373 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_sgs()
379 static int virtio_gpu_queue_fenced_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_fenced_ctrl_buffer() argument
423 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer()
433 void virtio_gpu_notify(struct virtio_gpu_device *vgdev) in virtio_gpu_notify() argument
437 if (!atomic_read(&vgdev->pending_commands)) in virtio_gpu_notify()
440 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
441 atomic_set(&vgdev->pending_commands, 0); in virtio_gpu_notify()
442 notify = virtqueue_kick_prepare(vgdev->ctrlq.vq); in virtio_gpu_notify()
443 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_notify()
446 virtqueue_notify(vgdev->ctrlq.vq); in virtio_gpu_notify()
449 static int virtio_gpu_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer() argument
452 return virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, NULL); in virtio_gpu_queue_ctrl_buffer()
455 static void virtio_gpu_queue_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_cursor() argument
458 struct virtqueue *vq = vgdev->cursorq.vq; in virtio_gpu_queue_cursor()
463 if (!drm_dev_enter(vgdev->ddev, &idx)) { in virtio_gpu_queue_cursor()
464 free_vbuf(vgdev, vbuf); in virtio_gpu_queue_cursor()
472 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
476 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
477 wait_event(vgdev->cursorq.ack_queue, vq->num_free >= outcnt); in virtio_gpu_queue_cursor()
478 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
487 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
500 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_create_resource() argument
509 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
519 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_create_resource()
523 static void virtio_gpu_cmd_unref_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_cb() argument
534 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_resource() argument
541 cmd_p = virtio_gpu_alloc_cmd_cb(vgdev, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_unref_resource()
549 ret = virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
554 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout() argument
562 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
573 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
576 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_flush() argument
584 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
594 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_flush()
597 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_2d() argument
607 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_2d()
611 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_2d()
614 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
626 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_2d()
630 virtio_gpu_cmd_resource_attach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_attach_backing() argument
639 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
649 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_attach_backing()
652 static void virtio_gpu_cmd_get_display_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_display_info_cb() argument
659 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
660 for (i = 0; i < vgdev->num_scanouts; i++) { in virtio_gpu_cmd_get_display_info_cb()
661 vgdev->outputs[i].info = resp->pmodes[i]; in virtio_gpu_cmd_get_display_info_cb()
673 vgdev->display_info_pending = false; in virtio_gpu_cmd_get_display_info_cb()
674 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
675 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_display_info_cb()
677 if (!drm_helper_hpd_irq_event(vgdev->ddev)) in virtio_gpu_cmd_get_display_info_cb()
678 drm_kms_helper_hotplug_event(vgdev->ddev); in virtio_gpu_cmd_get_display_info_cb()
681 static void virtio_gpu_cmd_get_capset_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset_info_cb() argument
690 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
691 if (vgdev->capsets) { in virtio_gpu_cmd_get_capset_info_cb()
692 vgdev->capsets[i].id = le32_to_cpu(resp->capset_id); in virtio_gpu_cmd_get_capset_info_cb()
693 vgdev->capsets[i].max_version = le32_to_cpu(resp->capset_max_version); in virtio_gpu_cmd_get_capset_info_cb()
694 vgdev->capsets[i].max_size = le32_to_cpu(resp->capset_max_size); in virtio_gpu_cmd_get_capset_info_cb()
698 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
699 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_capset_info_cb()
702 static void virtio_gpu_cmd_capset_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_capset_cb() argument
711 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
712 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
723 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
724 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_capset_cb()
739 static void virtio_gpu_cmd_get_edid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_edid_cb() argument
750 if (scanout >= vgdev->num_scanouts) in virtio_gpu_cmd_get_edid_cb()
752 output = vgdev->outputs + scanout; in virtio_gpu_cmd_get_edid_cb()
757 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
760 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
763 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_edid_cb()
766 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_display_info() argument
778 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
783 vgdev->display_info_pending = true; in virtio_gpu_cmd_get_display_info()
785 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
789 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx) in virtio_gpu_cmd_get_capset_info() argument
801 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
808 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
812 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset() argument
825 if (idx >= vgdev->num_capsets) in virtio_gpu_cmd_get_capset()
828 if (version > vgdev->capsets[idx].max_version) in virtio_gpu_cmd_get_capset()
835 max_size = vgdev->capsets[idx].max_size; in virtio_gpu_cmd_get_capset()
851 cache_ent->id = vgdev->capsets[idx].id; in virtio_gpu_cmd_get_capset()
854 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
856 list_for_each_entry(search_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_get_capset()
857 if (search_ent->id == vgdev->capsets[idx].id && in virtio_gpu_cmd_get_capset()
864 list_add_tail(&cache_ent->head, &vgdev->cap_cache); in virtio_gpu_cmd_get_capset()
865 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
876 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
880 cmd_p->capset_id = cpu_to_le32(vgdev->capsets[idx].id); in virtio_gpu_cmd_get_capset()
883 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
888 int virtio_gpu_cmd_get_edids(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_edids() argument
895 if (WARN_ON(!vgdev->has_edid)) in virtio_gpu_cmd_get_edids()
898 for (scanout = 0; scanout < vgdev->num_scanouts; scanout++) { in virtio_gpu_cmd_get_edids()
905 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
910 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
916 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id, in virtio_gpu_cmd_context_create() argument
922 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
930 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
933 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_destroy() argument
939 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
944 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
947 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_attach_resource() argument
955 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
962 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
965 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_detach_resource() argument
973 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
980 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
984 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_3d() argument
993 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
1011 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_resource_create_3d()
1016 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_3d() argument
1026 bool use_dma_api = !virtio_has_dma_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_3d()
1030 dma_sync_sgtable_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_3d()
1033 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
1045 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_to_host_3d()
1048 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_from_host_3d() argument
1059 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
1071 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_transfer_from_host_3d()
1074 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_submit() argument
1083 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
1094 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, fence); in virtio_gpu_cmd_submit()
1097 void virtio_gpu_object_attach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_attach() argument
1102 virtio_gpu_cmd_resource_attach_backing(vgdev, obj->hw_res_handle, in virtio_gpu_object_attach()
1106 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev, in virtio_gpu_cursor_ping() argument
1113 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1115 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()
1118 static void virtio_gpu_cmd_resource_uuid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_uuid_cb() argument
1127 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1137 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_uuid_cb()
1139 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_resource_uuid_cb()
1143 virtio_gpu_cmd_resource_assign_uuid(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_assign_uuid() argument
1153 spin_lock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1155 spin_unlock(&vgdev->resource_export_lock); in virtio_gpu_cmd_resource_assign_uuid()
1161 (vgdev, virtio_gpu_cmd_resource_uuid_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_resource_assign_uuid()
1169 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_assign_uuid()