Lines Matching refs:tce
354 static long kvmppc_tce_to_ua(struct kvm *kvm, unsigned long tce, in kvmppc_tce_to_ua() argument
357 unsigned long gfn = tce >> PAGE_SHIFT; in kvmppc_tce_to_ua()
365 (tce & ~(PAGE_MASK | TCE_PCI_READ | TCE_PCI_WRITE)); in kvmppc_tce_to_ua()
371 unsigned long tce) in kvmppc_tce_validate() argument
373 unsigned long gpa = tce & ~(TCE_PCI_READ | TCE_PCI_WRITE); in kvmppc_tce_validate()
374 enum dma_data_direction dir = iommu_tce_direction(tce); in kvmppc_tce_validate()
385 if (kvmppc_tce_to_ua(stt->kvm, tce, &ua)) in kvmppc_tce_validate()
411 unsigned long idx, unsigned long tce) in kvmppc_tce_put() argument
423 if (!tce) in kvmppc_tce_put()
432 tbl[idx % TCES_PER_PAGE] = tce; in kvmppc_tce_put()
573 unsigned long ioba, unsigned long tce) in kvmppc_h_put_tce() argument
594 ret = kvmppc_tce_validate(stt, tce); in kvmppc_h_put_tce()
598 dir = iommu_tce_direction(tce); in kvmppc_h_put_tce()
600 if ((dir != DMA_NONE) && kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce()
622 kvmppc_tce_put(stt, entry, tce); in kvmppc_h_put_tce()
639 u64 tce; in kvmppc_h_put_tce_indirect() local
669 if (get_user(tce, tces + i)) { in kvmppc_h_put_tce_indirect()
673 tce = be64_to_cpu(tce); in kvmppc_h_put_tce_indirect()
675 ret = kvmppc_tce_validate(stt, tce); in kvmppc_h_put_tce_indirect()
691 if (get_user(tce, tces + i)) { in kvmppc_h_put_tce_indirect()
695 tce = be64_to_cpu(tce); in kvmppc_h_put_tce_indirect()
697 if (kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce_indirect()
705 iommu_tce_direction(tce)); in kvmppc_h_put_tce_indirect()
714 kvmppc_tce_put(stt, entry + i, tce); in kvmppc_h_put_tce_indirect()