Lines Matching refs:stit

98 	struct kvmppc_spapr_tce_iommu_table *stit = container_of(head,  in kvm_spapr_tce_iommu_table_free()  local
101 iommu_tce_table_put(stit->tbl); in kvm_spapr_tce_iommu_table_free()
103 kfree(stit); in kvm_spapr_tce_iommu_table_free()
108 struct kvmppc_spapr_tce_iommu_table *stit = container_of(kref, in kvm_spapr_tce_liobn_put() local
111 list_del_rcu(&stit->next); in kvm_spapr_tce_liobn_put()
113 call_rcu(&stit->rcu, kvm_spapr_tce_iommu_table_free); in kvm_spapr_tce_liobn_put()
121 struct kvmppc_spapr_tce_iommu_table *stit, *tmp; in kvm_spapr_tce_release_iommu_group() local
130 list_for_each_entry_safe(stit, tmp, &stt->iommu_tables, next) { in kvm_spapr_tce_release_iommu_group()
132 if (table_group->tables[i] != stit->tbl) in kvm_spapr_tce_release_iommu_group()
135 kref_put(&stit->kref, kvm_spapr_tce_liobn_put); in kvm_spapr_tce_release_iommu_group()
150 struct kvmppc_spapr_tce_iommu_table *stit; in kvm_spapr_tce_attach_iommu_group() local
195 list_for_each_entry_rcu(stit, &stt->iommu_tables, next) { in kvm_spapr_tce_attach_iommu_group()
196 if (tbl != stit->tbl) in kvm_spapr_tce_attach_iommu_group()
199 if (!kref_get_unless_zero(&stit->kref)) { in kvm_spapr_tce_attach_iommu_group()
211 stit = kzalloc(sizeof(*stit), GFP_KERNEL); in kvm_spapr_tce_attach_iommu_group()
212 if (!stit) { in kvm_spapr_tce_attach_iommu_group()
217 stit->tbl = tbl; in kvm_spapr_tce_attach_iommu_group()
218 kref_init(&stit->kref); in kvm_spapr_tce_attach_iommu_group()
220 list_add_rcu(&stit->next, &stt->iommu_tables); in kvm_spapr_tce_attach_iommu_group()
264 struct kvmppc_spapr_tce_iommu_table *stit, *tmp; in kvm_spapr_tce_release() local
271 list_for_each_entry_safe(stit, tmp, &stt->iommu_tables, next) { in kvm_spapr_tce_release()
272 WARN_ON(!kref_read(&stit->kref)); in kvm_spapr_tce_release()
274 if (kref_put(&stit->kref, kvm_spapr_tce_liobn_put)) in kvm_spapr_tce_release()
497 struct kvmppc_spapr_tce_iommu_table *stit; in kvmppc_h_put_tce() local
528 list_for_each_entry_lockless(stit, &stt->iommu_tables, next) { in kvmppc_h_put_tce()
531 stit->tbl, entry); in kvmppc_h_put_tce()
533 ret = kvmppc_tce_iommu_map(vcpu->kvm, stt, stit->tbl, in kvmppc_h_put_tce()
543 kvmppc_clear_tce(stit->tbl, entry); in kvmppc_h_put_tce()
564 struct kvmppc_spapr_tce_iommu_table *stit; in kvmppc_h_put_tce_indirect() local
608 list_for_each_entry_lockless(stit, &stt->iommu_tables, next) { in kvmppc_h_put_tce_indirect()
610 stit->tbl, entry + i, ua, in kvmppc_h_put_tce_indirect()
620 kvmppc_clear_tce(stit->tbl, entry); in kvmppc_h_put_tce_indirect()
639 struct kvmppc_spapr_tce_iommu_table *stit; in kvmppc_h_stuff_tce() local
653 list_for_each_entry_lockless(stit, &stt->iommu_tables, next) { in kvmppc_h_stuff_tce()
658 stit->tbl, entry + i); in kvmppc_h_stuff_tce()
667 kvmppc_clear_tce(stit->tbl, entry); in kvmppc_h_stuff_tce()