Lines Matching refs:mmu
128 static void etnaviv_iommu_remove_mapping(struct etnaviv_iommu *mmu, in etnaviv_iommu_remove_mapping() argument
133 etnaviv_iommu_unmap(mmu, mapping->vram_node.start, in etnaviv_iommu_remove_mapping()
138 static int etnaviv_iommu_find_iova(struct etnaviv_iommu *mmu, in etnaviv_iommu_find_iova() argument
145 lockdep_assert_held(&mmu->lock); in etnaviv_iommu_find_iova()
153 ret = drm_mm_insert_node_in_range(&mmu->mm, node, in etnaviv_iommu_find_iova()
159 drm_mm_scan_init(&scan, &mmu->mm, size, 0, 0, mode); in etnaviv_iommu_find_iova()
163 list_for_each_entry(free, &mmu->mappings, mmu_node) { in etnaviv_iommu_find_iova()
205 etnaviv_iommu_remove_mapping(mmu, m); in etnaviv_iommu_find_iova()
206 m->mmu = NULL; in etnaviv_iommu_find_iova()
222 int etnaviv_iommu_map_gem(struct etnaviv_iommu *mmu, in etnaviv_iommu_map_gem() argument
232 mutex_lock(&mmu->lock); in etnaviv_iommu_map_gem()
235 if (mmu->version == ETNAVIV_IOMMU_V1 && in etnaviv_iommu_map_gem()
242 list_add_tail(&mapping->mmu_node, &mmu->mappings); in etnaviv_iommu_map_gem()
250 ret = etnaviv_iommu_find_iova(mmu, node, etnaviv_obj->base.size); in etnaviv_iommu_map_gem()
255 ret = etnaviv_iommu_map(mmu, node->start, sgt, etnaviv_obj->base.size, in etnaviv_iommu_map_gem()
263 list_add_tail(&mapping->mmu_node, &mmu->mappings); in etnaviv_iommu_map_gem()
264 mmu->need_flush = true; in etnaviv_iommu_map_gem()
266 mutex_unlock(&mmu->lock); in etnaviv_iommu_map_gem()
271 void etnaviv_iommu_unmap_gem(struct etnaviv_iommu *mmu, in etnaviv_iommu_unmap_gem() argument
276 mutex_lock(&mmu->lock); in etnaviv_iommu_unmap_gem()
279 if (mapping->vram_node.mm == &mmu->mm) in etnaviv_iommu_unmap_gem()
280 etnaviv_iommu_remove_mapping(mmu, mapping); in etnaviv_iommu_unmap_gem()
283 mmu->need_flush = true; in etnaviv_iommu_unmap_gem()
284 mutex_unlock(&mmu->lock); in etnaviv_iommu_unmap_gem()
287 void etnaviv_iommu_destroy(struct etnaviv_iommu *mmu) in etnaviv_iommu_destroy() argument
289 drm_mm_takedown(&mmu->mm); in etnaviv_iommu_destroy()
290 mmu->domain->ops->free(mmu->domain); in etnaviv_iommu_destroy()
291 kfree(mmu); in etnaviv_iommu_destroy()
297 struct etnaviv_iommu *mmu; in etnaviv_iommu_new() local
299 mmu = kzalloc(sizeof(*mmu), GFP_KERNEL); in etnaviv_iommu_new()
300 if (!mmu) in etnaviv_iommu_new()
304 mmu->domain = etnaviv_iommuv1_domain_alloc(gpu); in etnaviv_iommu_new()
307 mmu->domain = etnaviv_iommuv2_domain_alloc(gpu); in etnaviv_iommu_new()
311 if (!mmu->domain) { in etnaviv_iommu_new()
313 kfree(mmu); in etnaviv_iommu_new()
317 mmu->gpu = gpu; in etnaviv_iommu_new()
318 mmu->version = version; in etnaviv_iommu_new()
319 mutex_init(&mmu->lock); in etnaviv_iommu_new()
320 INIT_LIST_HEAD(&mmu->mappings); in etnaviv_iommu_new()
322 drm_mm_init(&mmu->mm, mmu->domain->base, mmu->domain->size); in etnaviv_iommu_new()
324 return mmu; in etnaviv_iommu_new()
329 if (gpu->mmu->version == ETNAVIV_IOMMU_V1) in etnaviv_iommu_restore()
339 struct etnaviv_iommu *mmu = gpu->mmu; in etnaviv_iommu_get_suballoc_va() local
341 if (mmu->version == ETNAVIV_IOMMU_V1) { in etnaviv_iommu_get_suballoc_va()
347 mutex_lock(&mmu->lock); in etnaviv_iommu_get_suballoc_va()
348 ret = etnaviv_iommu_find_iova(mmu, vram_node, size); in etnaviv_iommu_get_suballoc_va()
350 mutex_unlock(&mmu->lock); in etnaviv_iommu_get_suballoc_va()
353 ret = etnaviv_domain_map(mmu->domain, vram_node->start, paddr, in etnaviv_iommu_get_suballoc_va()
357 mutex_unlock(&mmu->lock); in etnaviv_iommu_get_suballoc_va()
360 gpu->mmu->need_flush = true; in etnaviv_iommu_get_suballoc_va()
361 mutex_unlock(&mmu->lock); in etnaviv_iommu_get_suballoc_va()
372 struct etnaviv_iommu *mmu = gpu->mmu; in etnaviv_iommu_put_suballoc_va() local
374 if (mmu->version == ETNAVIV_IOMMU_V2) { in etnaviv_iommu_put_suballoc_va()
375 mutex_lock(&mmu->lock); in etnaviv_iommu_put_suballoc_va()
376 etnaviv_domain_unmap(mmu->domain, iova, size); in etnaviv_iommu_put_suballoc_va()
378 mutex_unlock(&mmu->lock); in etnaviv_iommu_put_suballoc_va()