Lines Matching refs:zdev

22 static int zpci_refresh_global(struct zpci_dev *zdev)  in zpci_refresh_global()  argument
24 return zpci_refresh_trans((u64) zdev->fh << 32, zdev->start_dma, in zpci_refresh_global()
25 zdev->iommu_pages * PAGE_SIZE); in zpci_refresh_global()
133 static int __dma_update_trans(struct zpci_dev *zdev, unsigned long pa, in __dma_update_trans() argument
145 spin_lock_irqsave(&zdev->dma_table_lock, irq_flags); in __dma_update_trans()
146 if (!zdev->dma_table) { in __dma_update_trans()
152 entry = dma_walk_cpu_trans(zdev->dma_table, dma_addr); in __dma_update_trans()
168 entry = dma_walk_cpu_trans(zdev->dma_table, dma_addr); in __dma_update_trans()
175 spin_unlock_irqrestore(&zdev->dma_table_lock, irq_flags); in __dma_update_trans()
179 static int __dma_purge_tlb(struct zpci_dev *zdev, dma_addr_t dma_addr, in __dma_purge_tlb() argument
193 if (!zdev->tlb_refresh) in __dma_purge_tlb()
200 ret = zpci_refresh_trans((u64) zdev->fh << 32, dma_addr, in __dma_purge_tlb()
204 if (zpci_refresh_global(zdev)) in __dma_purge_tlb()
207 spin_lock_irqsave(&zdev->iommu_bitmap_lock, irqflags); in __dma_purge_tlb()
208 bitmap_andnot(zdev->iommu_bitmap, zdev->iommu_bitmap, in __dma_purge_tlb()
209 zdev->lazy_bitmap, zdev->iommu_pages); in __dma_purge_tlb()
210 bitmap_zero(zdev->lazy_bitmap, zdev->iommu_pages); in __dma_purge_tlb()
211 spin_unlock_irqrestore(&zdev->iommu_bitmap_lock, irqflags); in __dma_purge_tlb()
218 static int dma_update_trans(struct zpci_dev *zdev, unsigned long pa, in dma_update_trans() argument
223 rc = __dma_update_trans(zdev, pa, dma_addr, size, flags); in dma_update_trans()
227 rc = __dma_purge_tlb(zdev, dma_addr, size, flags); in dma_update_trans()
229 __dma_update_trans(zdev, pa, dma_addr, size, ZPCI_PTE_INVALID); in dma_update_trans()
263 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in __dma_alloc_iommu() local
265 return iommu_area_alloc(zdev->iommu_bitmap, zdev->iommu_pages, in __dma_alloc_iommu()
266 start, size, zdev->start_dma >> PAGE_SHIFT, in __dma_alloc_iommu()
273 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in dma_alloc_address() local
276 spin_lock_irqsave(&zdev->iommu_bitmap_lock, flags); in dma_alloc_address()
277 offset = __dma_alloc_iommu(dev, zdev->next_bit, size); in dma_alloc_address()
281 if (zpci_refresh_global(zdev)) in dma_alloc_address()
284 bitmap_andnot(zdev->iommu_bitmap, zdev->iommu_bitmap, in dma_alloc_address()
285 zdev->lazy_bitmap, zdev->iommu_pages); in dma_alloc_address()
286 bitmap_zero(zdev->lazy_bitmap, zdev->iommu_pages); in dma_alloc_address()
293 zdev->next_bit = offset + size; in dma_alloc_address()
294 spin_unlock_irqrestore(&zdev->iommu_bitmap_lock, flags); in dma_alloc_address()
296 return zdev->start_dma + offset * PAGE_SIZE; in dma_alloc_address()
299 spin_unlock_irqrestore(&zdev->iommu_bitmap_lock, flags); in dma_alloc_address()
305 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in dma_free_address() local
308 offset = (dma_addr - zdev->start_dma) >> PAGE_SHIFT; in dma_free_address()
310 spin_lock_irqsave(&zdev->iommu_bitmap_lock, flags); in dma_free_address()
311 if (!zdev->iommu_bitmap) in dma_free_address()
315 bitmap_clear(zdev->iommu_bitmap, offset, size); in dma_free_address()
317 bitmap_set(zdev->lazy_bitmap, offset, size); in dma_free_address()
320 spin_unlock_irqrestore(&zdev->iommu_bitmap_lock, flags); in dma_free_address()
338 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in s390_dma_map_pages() local
359 ret = dma_update_trans(zdev, pa, dma_addr, size, flags); in s390_dma_map_pages()
363 atomic64_add(nr_pages, &zdev->mapped_pages); in s390_dma_map_pages()
378 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in s390_dma_unmap_pages() local
383 ret = dma_update_trans(zdev, 0, dma_addr, npages * PAGE_SIZE, in s390_dma_unmap_pages()
391 atomic64_add(npages, &zdev->unmapped_pages); in s390_dma_unmap_pages()
399 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in s390_dma_alloc() local
416 atomic64_add(size / PAGE_SIZE, &zdev->allocated_pages); in s390_dma_alloc()
426 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in s390_dma_free() local
429 atomic64_sub(size / PAGE_SIZE, &zdev->allocated_pages); in s390_dma_free()
440 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); in __s390_dma_map_sg() local
457 ret = __dma_update_trans(zdev, pa, dma_addr, in __s390_dma_map_sg()
464 ret = __dma_purge_tlb(zdev, dma_addr_base, size, flags); in __s390_dma_map_sg()
469 atomic64_add(nr_pages, &zdev->mapped_pages); in __s390_dma_map_sg()
474 dma_update_trans(zdev, 0, dma_addr_base, dma_addr - dma_addr_base, in __s390_dma_map_sg()
545 int zpci_dma_init_device(struct zpci_dev *zdev) in zpci_dma_init_device() argument
554 WARN_ON(zdev->s390_domain); in zpci_dma_init_device()
556 spin_lock_init(&zdev->iommu_bitmap_lock); in zpci_dma_init_device()
557 spin_lock_init(&zdev->dma_table_lock); in zpci_dma_init_device()
559 zdev->dma_table = dma_alloc_cpu_table(); in zpci_dma_init_device()
560 if (!zdev->dma_table) { in zpci_dma_init_device()
574 zdev->start_dma = PAGE_ALIGN(zdev->start_dma); in zpci_dma_init_device()
575 zdev->iommu_size = min3((u64) high_memory, in zpci_dma_init_device()
576 ZPCI_TABLE_SIZE_RT - zdev->start_dma, in zpci_dma_init_device()
577 zdev->end_dma - zdev->start_dma + 1); in zpci_dma_init_device()
578 zdev->end_dma = zdev->start_dma + zdev->iommu_size - 1; in zpci_dma_init_device()
579 zdev->iommu_pages = zdev->iommu_size >> PAGE_SHIFT; in zpci_dma_init_device()
580 zdev->iommu_bitmap = vzalloc(zdev->iommu_pages / 8); in zpci_dma_init_device()
581 if (!zdev->iommu_bitmap) { in zpci_dma_init_device()
586 zdev->lazy_bitmap = vzalloc(zdev->iommu_pages / 8); in zpci_dma_init_device()
587 if (!zdev->lazy_bitmap) { in zpci_dma_init_device()
593 rc = zpci_register_ioat(zdev, 0, zdev->start_dma, zdev->end_dma, in zpci_dma_init_device()
594 (u64) zdev->dma_table); in zpci_dma_init_device()
600 vfree(zdev->iommu_bitmap); in zpci_dma_init_device()
601 zdev->iommu_bitmap = NULL; in zpci_dma_init_device()
602 vfree(zdev->lazy_bitmap); in zpci_dma_init_device()
603 zdev->lazy_bitmap = NULL; in zpci_dma_init_device()
605 dma_free_cpu_table(zdev->dma_table); in zpci_dma_init_device()
606 zdev->dma_table = NULL; in zpci_dma_init_device()
611 void zpci_dma_exit_device(struct zpci_dev *zdev) in zpci_dma_exit_device() argument
618 WARN_ON(zdev->s390_domain); in zpci_dma_exit_device()
620 if (zpci_unregister_ioat(zdev, 0)) in zpci_dma_exit_device()
623 dma_cleanup_tables(zdev->dma_table); in zpci_dma_exit_device()
624 zdev->dma_table = NULL; in zpci_dma_exit_device()
625 vfree(zdev->iommu_bitmap); in zpci_dma_exit_device()
626 zdev->iommu_bitmap = NULL; in zpci_dma_exit_device()
627 vfree(zdev->lazy_bitmap); in zpci_dma_exit_device()
628 zdev->lazy_bitmap = NULL; in zpci_dma_exit_device()
630 zdev->next_bit = 0; in zpci_dma_exit_device()