Lines Matching refs:vgdev
46 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_ctrl_ack() local
48 schedule_work(&vgdev->ctrlq.dequeue_work); in virtio_gpu_ctrl_ack()
54 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_cursor_ack() local
56 schedule_work(&vgdev->cursorq.dequeue_work); in virtio_gpu_cursor_ack()
59 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_alloc_vbufs() argument
61 vgdev->vbufs = kmem_cache_create("virtio-gpu-vbufs", in virtio_gpu_alloc_vbufs()
65 if (!vgdev->vbufs) in virtio_gpu_alloc_vbufs()
70 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_free_vbufs() argument
72 kmem_cache_destroy(vgdev->vbufs); in virtio_gpu_free_vbufs()
73 vgdev->vbufs = NULL; in virtio_gpu_free_vbufs()
77 virtio_gpu_get_vbuf(struct virtio_gpu_device *vgdev, in virtio_gpu_get_vbuf() argument
83 vbuf = kmem_cache_zalloc(vgdev->vbufs, GFP_KERNEL); in virtio_gpu_get_vbuf()
101 static void *virtio_gpu_alloc_cmd(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd() argument
107 vbuf = virtio_gpu_get_vbuf(vgdev, size, in virtio_gpu_alloc_cmd()
119 virtio_gpu_alloc_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cursor() argument
125 (vgdev, sizeof(struct virtio_gpu_update_cursor), in virtio_gpu_alloc_cursor()
135 static void *virtio_gpu_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_resp() argument
143 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
153 static void free_vbuf(struct virtio_gpu_device *vgdev, in free_vbuf() argument
159 kmem_cache_free(vgdev->vbufs, vbuf); in free_vbuf()
178 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_ctrl_func() local
187 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
189 virtqueue_disable_cb(vgdev->ctrlq.vq); in virtio_gpu_dequeue_ctrl_func()
190 reclaim_vbufs(vgdev->ctrlq.vq, &reclaim_list); in virtio_gpu_dequeue_ctrl_func()
192 } while (!virtqueue_enable_cb(vgdev->ctrlq.vq)); in virtio_gpu_dequeue_ctrl_func()
193 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
198 trace_virtio_gpu_cmd_response(vgdev->ctrlq.vq, resp); in virtio_gpu_dequeue_ctrl_func()
221 entry->resp_cb(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
224 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
226 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()
229 virtio_gpu_fence_event_process(vgdev, fence_id); in virtio_gpu_dequeue_ctrl_func()
234 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_cursor_func() local
241 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
243 virtqueue_disable_cb(vgdev->cursorq.vq); in virtio_gpu_dequeue_cursor_func()
244 reclaim_vbufs(vgdev->cursorq.vq, &reclaim_list); in virtio_gpu_dequeue_cursor_func()
245 } while (!virtqueue_enable_cb(vgdev->cursorq.vq)); in virtio_gpu_dequeue_cursor_func()
246 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
250 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_cursor_func()
252 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()
255 static int virtio_gpu_queue_ctrl_buffer_locked(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer_locked() argument
257 __releases(&vgdev->ctrlq.qlock) in virtio_gpu_queue_ctrl_buffer_locked()
258 __acquires(&vgdev->ctrlq.qlock) in virtio_gpu_queue_ctrl_buffer_locked()
260 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_ctrl_buffer_locked()
265 if (!vgdev->vqs_ready) in virtio_gpu_queue_ctrl_buffer_locked()
287 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer_locked()
288 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= outcnt + incnt); in virtio_gpu_queue_ctrl_buffer_locked()
289 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer_locked()
303 static int virtio_gpu_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer() argument
308 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer()
309 rc = virtio_gpu_queue_ctrl_buffer_locked(vgdev, vbuf); in virtio_gpu_queue_ctrl_buffer()
310 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer()
314 static int virtio_gpu_queue_fenced_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_fenced_ctrl_buffer() argument
319 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_fenced_ctrl_buffer()
323 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_fenced_ctrl_buffer()
334 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_fenced_ctrl_buffer()
335 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= 3); in virtio_gpu_queue_fenced_ctrl_buffer()
340 virtio_gpu_fence_emit(vgdev, hdr, fence); in virtio_gpu_queue_fenced_ctrl_buffer()
341 rc = virtio_gpu_queue_ctrl_buffer_locked(vgdev, vbuf); in virtio_gpu_queue_fenced_ctrl_buffer()
342 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_fenced_ctrl_buffer()
346 static int virtio_gpu_queue_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_cursor() argument
349 struct virtqueue *vq = vgdev->cursorq.vq; in virtio_gpu_queue_cursor()
354 if (!vgdev->vqs_ready) in virtio_gpu_queue_cursor()
361 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
365 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
366 wait_event(vgdev->cursorq.ack_queue, vq->num_free >= outcnt); in virtio_gpu_queue_cursor()
367 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
376 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
388 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_create_resource() argument
396 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
405 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_create_resource()
409 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_resource() argument
415 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_unref_resource()
421 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
424 static void virtio_gpu_cmd_resource_inval_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_inval_backing() argument
431 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_inval_backing()
437 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_resource_inval_backing()
440 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout() argument
448 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
459 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
462 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_flush() argument
470 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
480 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_flush()
483 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_2d() argument
492 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_2d()
495 dma_sync_sg_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_2d()
499 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
510 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_transfer_to_host_2d()
514 virtio_gpu_cmd_resource_attach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_attach_backing() argument
523 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
533 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_resource_attach_backing()
536 static void virtio_gpu_cmd_get_display_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_display_info_cb() argument
543 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
544 for (i = 0; i < vgdev->num_scanouts; i++) { in virtio_gpu_cmd_get_display_info_cb()
545 vgdev->outputs[i].info = resp->pmodes[i]; in virtio_gpu_cmd_get_display_info_cb()
557 vgdev->display_info_pending = false; in virtio_gpu_cmd_get_display_info_cb()
558 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
559 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_display_info_cb()
561 if (!drm_helper_hpd_irq_event(vgdev->ddev)) in virtio_gpu_cmd_get_display_info_cb()
562 drm_kms_helper_hotplug_event(vgdev->ddev); in virtio_gpu_cmd_get_display_info_cb()
565 static void virtio_gpu_cmd_get_capset_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset_info_cb() argument
574 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
575 vgdev->capsets[i].id = le32_to_cpu(resp->capset_id); in virtio_gpu_cmd_get_capset_info_cb()
576 vgdev->capsets[i].max_version = le32_to_cpu(resp->capset_max_version); in virtio_gpu_cmd_get_capset_info_cb()
577 vgdev->capsets[i].max_size = le32_to_cpu(resp->capset_max_size); in virtio_gpu_cmd_get_capset_info_cb()
578 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
579 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_capset_info_cb()
582 static void virtio_gpu_cmd_capset_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_capset_cb() argument
591 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
592 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
603 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
604 wake_up_all(&vgdev->resp_wq); in virtio_gpu_cmd_capset_cb()
619 static void virtio_gpu_cmd_get_edid_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_edid_cb() argument
630 if (scanout >= vgdev->num_scanouts) in virtio_gpu_cmd_get_edid_cb()
632 output = vgdev->outputs + scanout; in virtio_gpu_cmd_get_edid_cb()
637 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
640 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_edid_cb()
643 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_edid_cb()
646 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_display_info() argument
658 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
663 vgdev->display_info_pending = true; in virtio_gpu_cmd_get_display_info()
665 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
669 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx) in virtio_gpu_cmd_get_capset_info() argument
681 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
688 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
692 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset() argument
705 if (idx >= vgdev->num_capsets) in virtio_gpu_cmd_get_capset()
708 if (version > vgdev->capsets[idx].max_version) in virtio_gpu_cmd_get_capset()
715 max_size = vgdev->capsets[idx].max_size; in virtio_gpu_cmd_get_capset()
731 cache_ent->id = vgdev->capsets[idx].id; in virtio_gpu_cmd_get_capset()
734 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
736 list_for_each_entry(search_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_get_capset()
737 if (search_ent->id == vgdev->capsets[idx].id && in virtio_gpu_cmd_get_capset()
744 list_add_tail(&cache_ent->head, &vgdev->cap_cache); in virtio_gpu_cmd_get_capset()
745 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
756 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
760 cmd_p->capset_id = cpu_to_le32(vgdev->capsets[idx].id); in virtio_gpu_cmd_get_capset()
763 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
768 int virtio_gpu_cmd_get_edids(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_edids() argument
775 if (WARN_ON(!vgdev->has_edid)) in virtio_gpu_cmd_get_edids()
778 for (scanout = 0; scanout < vgdev->num_scanouts; scanout++) { in virtio_gpu_cmd_get_edids()
785 (vgdev, &virtio_gpu_cmd_get_edid_cb, &vbuf, in virtio_gpu_cmd_get_edids()
790 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_edids()
796 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id, in virtio_gpu_cmd_context_create() argument
802 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
810 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
813 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_destroy() argument
819 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
824 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
827 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_attach_resource() argument
834 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
840 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
844 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_detach_resource() argument
851 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
857 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
861 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_3d() argument
869 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
886 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_resource_create_3d()
890 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_3d() argument
899 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev); in virtio_gpu_cmd_transfer_to_host_3d()
902 dma_sync_sg_for_device(vgdev->vdev->dev.parent, in virtio_gpu_cmd_transfer_to_host_3d()
906 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
916 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_transfer_to_host_3d()
919 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_from_host_3d() argument
928 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
938 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_transfer_from_host_3d()
941 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_submit() argument
948 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
958 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_submit()
961 int virtio_gpu_object_attach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_attach() argument
965 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev); in virtio_gpu_object_attach()
976 ret = virtio_gpu_object_get_sg_table(vgdev, obj); in virtio_gpu_object_attach()
982 obj->mapped = dma_map_sg(vgdev->vdev->dev.parent, in virtio_gpu_object_attach()
1006 virtio_gpu_cmd_resource_attach_backing(vgdev, obj->hw_res_handle, in virtio_gpu_object_attach()
1012 void virtio_gpu_object_detach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_detach() argument
1015 bool use_dma_api = !virtio_has_iommu_quirk(vgdev->vdev); in virtio_gpu_object_detach()
1018 struct virtio_gpu_fence *fence = virtio_gpu_fence_alloc(vgdev); in virtio_gpu_object_detach()
1020 virtio_gpu_cmd_resource_inval_backing(vgdev, obj->hw_res_handle, fence); in virtio_gpu_object_detach()
1025 dma_unmap_sg(vgdev->vdev->dev.parent, in virtio_gpu_object_detach()
1030 virtio_gpu_cmd_resource_inval_backing(vgdev, obj->hw_res_handle, NULL); in virtio_gpu_object_detach()
1034 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev, in virtio_gpu_cursor_ping() argument
1041 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
1043 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()