Lines Matching full:vdev
41 static int vfio_pci_iowrite##size(struct vfio_pci_core_device *vdev, \
45 down_read(&vdev->memory_lock); \
46 if (!__vfio_pci_memory_enabled(vdev)) { \
47 up_read(&vdev->memory_lock); \
55 up_read(&vdev->memory_lock); \
68 static int vfio_pci_ioread##size(struct vfio_pci_core_device *vdev, \
72 down_read(&vdev->memory_lock); \
73 if (!__vfio_pci_memory_enabled(vdev)) { \
74 up_read(&vdev->memory_lock); \
82 up_read(&vdev->memory_lock); \
97 static ssize_t do_io_rw(struct vfio_pci_core_device *vdev, bool test_mem, in do_io_rw() argument
122 ret = vfio_pci_iowrite32(vdev, test_mem, in do_io_rw()
127 ret = vfio_pci_ioread32(vdev, test_mem, in do_io_rw()
144 ret = vfio_pci_iowrite16(vdev, test_mem, in do_io_rw()
149 ret = vfio_pci_ioread16(vdev, test_mem, in do_io_rw()
166 ret = vfio_pci_iowrite8(vdev, test_mem, in do_io_rw()
171 ret = vfio_pci_ioread8(vdev, test_mem, in do_io_rw()
203 static int vfio_pci_setup_barmap(struct vfio_pci_core_device *vdev, int bar) in vfio_pci_setup_barmap() argument
205 struct pci_dev *pdev = vdev->pdev; in vfio_pci_setup_barmap()
209 if (vdev->barmap[bar]) in vfio_pci_setup_barmap()
222 vdev->barmap[bar] = io; in vfio_pci_setup_barmap()
227 ssize_t vfio_pci_bar_rw(struct vfio_pci_core_device *vdev, char __user *buf, in vfio_pci_bar_rw() argument
230 struct pci_dev *pdev = vdev->pdev; in vfio_pci_bar_rw()
236 struct resource *res = &vdev->pdev->resource[bar]; in vfio_pci_bar_rw()
265 int ret = vfio_pci_setup_barmap(vdev, bar); in vfio_pci_bar_rw()
271 io = vdev->barmap[bar]; in vfio_pci_bar_rw()
274 if (bar == vdev->msix_bar) { in vfio_pci_bar_rw()
275 x_start = vdev->msix_offset; in vfio_pci_bar_rw()
276 x_end = vdev->msix_offset + vdev->msix_size; in vfio_pci_bar_rw()
279 done = do_io_rw(vdev, res->flags & IORESOURCE_MEM, io, buf, pos, in vfio_pci_bar_rw()
291 ssize_t vfio_pci_vga_rw(struct vfio_pci_core_device *vdev, char __user *buf, in vfio_pci_vga_rw() argument
301 if (!vdev->has_vga) in vfio_pci_vga_rw()
336 ret = vga_get_interruptible(vdev->pdev, rsrc); in vfio_pci_vga_rw()
347 done = do_io_rw(vdev, false, iomem, buf, off, count, 0, 0, iswrite); in vfio_pci_vga_rw()
349 vga_put(vdev->pdev, rsrc); in vfio_pci_vga_rw()
364 vfio_pci_iowrite8(ioeventfd->vdev, test_mem, in vfio_pci_ioeventfd_do_write()
368 vfio_pci_iowrite16(ioeventfd->vdev, test_mem, in vfio_pci_ioeventfd_do_write()
372 vfio_pci_iowrite32(ioeventfd->vdev, test_mem, in vfio_pci_ioeventfd_do_write()
377 vfio_pci_iowrite64(ioeventfd->vdev, test_mem, in vfio_pci_ioeventfd_do_write()
387 struct vfio_pci_core_device *vdev = ioeventfd->vdev; in vfio_pci_ioeventfd_handler() local
390 if (!down_read_trylock(&vdev->memory_lock)) in vfio_pci_ioeventfd_handler()
392 if (!__vfio_pci_memory_enabled(vdev)) { in vfio_pci_ioeventfd_handler()
393 up_read(&vdev->memory_lock); in vfio_pci_ioeventfd_handler()
401 up_read(&vdev->memory_lock); in vfio_pci_ioeventfd_handler()
413 long vfio_pci_ioeventfd(struct vfio_pci_core_device *vdev, loff_t offset, in vfio_pci_ioeventfd() argument
416 struct pci_dev *pdev = vdev->pdev; in vfio_pci_ioeventfd()
429 if (bar == vdev->msix_bar && in vfio_pci_ioeventfd()
430 !(pos + count <= vdev->msix_offset || in vfio_pci_ioeventfd()
431 pos >= vdev->msix_offset + vdev->msix_size)) in vfio_pci_ioeventfd()
439 ret = vfio_pci_setup_barmap(vdev, bar); in vfio_pci_ioeventfd()
443 mutex_lock(&vdev->ioeventfds_lock); in vfio_pci_ioeventfd()
445 list_for_each_entry(ioeventfd, &vdev->ioeventfds_list, next) { in vfio_pci_ioeventfd()
451 vdev->ioeventfds_nr--; in vfio_pci_ioeventfd()
466 if (vdev->ioeventfds_nr >= VFIO_PCI_IOEVENTFD_MAX) { in vfio_pci_ioeventfd()
477 ioeventfd->vdev = vdev; in vfio_pci_ioeventfd()
478 ioeventfd->addr = vdev->barmap[bar] + pos; in vfio_pci_ioeventfd()
483 ioeventfd->test_mem = vdev->pdev->resource[bar].flags & IORESOURCE_MEM; in vfio_pci_ioeventfd()
493 list_add(&ioeventfd->next, &vdev->ioeventfds_list); in vfio_pci_ioeventfd()
494 vdev->ioeventfds_nr++; in vfio_pci_ioeventfd()
497 mutex_unlock(&vdev->ioeventfds_lock); in vfio_pci_ioeventfd()