Lines Matching full:ua
344 unsigned long *ua) in kvmppc_tce_to_ua() argument
353 *ua = __gfn_to_hva_memslot(memslot, gfn) | in kvmppc_tce_to_ua()
365 unsigned long ua = 0; in kvmppc_tce_validate() local
374 if (kvmppc_tce_to_ua(stt->kvm, tce, &ua)) in kvmppc_tce_validate()
383 mem = mm_iommu_lookup(stt->kvm->mm, ua, 1ULL << shift); in kvmppc_tce_validate()
384 if (!mem || mm_iommu_ua_to_hpa(mem, ua, shift, &hpa)) { in kvmppc_tce_validate()
493 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_do_map() argument
505 mem = mm_iommu_lookup(kvm->mm, ua, 1ULL << tbl->it_page_shift); in kvmppc_tce_iommu_do_map()
510 if (WARN_ON_ONCE(mm_iommu_ua_to_hpa(mem, ua, tbl->it_page_shift, &hpa))) in kvmppc_tce_iommu_do_map()
525 *pua = cpu_to_be64(ua); in kvmppc_tce_iommu_do_map()
532 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_map() argument
543 io_entry + i, ua + pgoff, dir); in kvmppc_tce_iommu_map()
557 unsigned long entry, ua = 0; in kvmppc_h_put_tce() local
579 if ((dir != DMA_NONE) && kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce()
592 entry, ua, dir); in kvmppc_h_put_tce()
617 unsigned long entry, ua = 0; in kvmppc_h_put_tce_indirect() local
642 if (kvmppc_tce_to_ua(vcpu->kvm, tce_list, &ua)) { in kvmppc_h_put_tce_indirect()
646 tces = (u64 __user *) ua; in kvmppc_h_put_tce_indirect()
677 if (kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce_indirect()
684 stit->tbl, entry + i, ua, in kvmppc_h_put_tce_indirect()