Lines Matching refs:tegra

60 	struct tegra_drm *tegra = drm->dev_private;  in tegra_atomic_commit_tail()  local
62 if (tegra->hub) { in tegra_atomic_commit_tail()
83 struct tegra_drm *tegra; in tegra_drm_load() local
86 tegra = kzalloc(sizeof(*tegra), GFP_KERNEL); in tegra_drm_load()
87 if (!tegra) in tegra_drm_load()
95 tegra->domain = iommu_domain_alloc(&platform_bus_type); in tegra_drm_load()
96 if (!tegra->domain) { in tegra_drm_load()
105 geometry = &tegra->domain->geometry; in tegra_drm_load()
111 order = __ffs(tegra->domain->pgsize_bitmap); in tegra_drm_load()
112 init_iova_domain(&tegra->carveout.domain, 1UL << order, in tegra_drm_load()
115 tegra->carveout.shift = iova_shift(&tegra->carveout.domain); in tegra_drm_load()
116 tegra->carveout.limit = carveout_end >> tegra->carveout.shift; in tegra_drm_load()
118 drm_mm_init(&tegra->mm, gem_start, gem_end - gem_start + 1); in tegra_drm_load()
119 mutex_init(&tegra->mm_lock); in tegra_drm_load()
127 mutex_init(&tegra->clients_lock); in tegra_drm_load()
128 INIT_LIST_HEAD(&tegra->clients); in tegra_drm_load()
130 drm->dev_private = tegra; in tegra_drm_load()
131 tegra->drm = drm; in tegra_drm_load()
158 if (tegra->hub) { in tegra_drm_load()
159 err = tegra_display_hub_prepare(tegra->hub); in tegra_drm_load()
187 if (tegra->hub) in tegra_drm_load()
188 tegra_display_hub_cleanup(tegra->hub); in tegra_drm_load()
197 if (tegra->domain) { in tegra_drm_load()
198 mutex_destroy(&tegra->mm_lock); in tegra_drm_load()
199 drm_mm_takedown(&tegra->mm); in tegra_drm_load()
200 put_iova_domain(&tegra->carveout.domain); in tegra_drm_load()
204 if (tegra->domain) in tegra_drm_load()
205 iommu_domain_free(tegra->domain); in tegra_drm_load()
207 kfree(tegra); in tegra_drm_load()
214 struct tegra_drm *tegra = drm->dev_private; in tegra_drm_unload() local
226 if (tegra->domain) { in tegra_drm_unload()
227 mutex_destroy(&tegra->mm_lock); in tegra_drm_unload()
228 drm_mm_takedown(&tegra->mm); in tegra_drm_unload()
229 put_iova_domain(&tegra->carveout.domain); in tegra_drm_unload()
231 iommu_domain_free(tegra->domain); in tegra_drm_unload()
234 kfree(tegra); in tegra_drm_unload()
597 struct tegra_drm *tegra = drm->dev_private; in tegra_open_channel() local
609 list_for_each_entry(client, &tegra->clients, list) in tegra_open_channel()
975 struct tegra_drm *tegra = drm->dev_private; in tegra_debugfs_iova() local
978 if (tegra->domain) { in tegra_debugfs_iova()
979 mutex_lock(&tegra->mm_lock); in tegra_debugfs_iova()
980 drm_mm_print(&tegra->mm, &p); in tegra_debugfs_iova()
981 mutex_unlock(&tegra->mm_lock); in tegra_debugfs_iova()
1035 int tegra_drm_register_client(struct tegra_drm *tegra, in tegra_drm_register_client() argument
1038 mutex_lock(&tegra->clients_lock); in tegra_drm_register_client()
1039 list_add_tail(&client->list, &tegra->clients); in tegra_drm_register_client()
1040 mutex_unlock(&tegra->clients_lock); in tegra_drm_register_client()
1045 int tegra_drm_unregister_client(struct tegra_drm *tegra, in tegra_drm_unregister_client() argument
1048 mutex_lock(&tegra->clients_lock); in tegra_drm_unregister_client()
1050 mutex_unlock(&tegra->clients_lock); in tegra_drm_unregister_client()
1059 struct tegra_drm *tegra = drm->dev_private; in host1x_client_iommu_attach() local
1063 if (tegra->domain) { in host1x_client_iommu_attach()
1070 if (!shared || (shared && (group != tegra->group))) { in host1x_client_iommu_attach()
1071 err = iommu_attach_group(tegra->domain, group); in host1x_client_iommu_attach()
1077 if (shared && !tegra->group) in host1x_client_iommu_attach()
1078 tegra->group = group; in host1x_client_iommu_attach()
1089 struct tegra_drm *tegra = drm->dev_private; in host1x_client_iommu_detach() local
1092 if (group == tegra->group) { in host1x_client_iommu_detach()
1093 iommu_detach_group(tegra->domain, group); in host1x_client_iommu_detach()
1094 tegra->group = NULL; in host1x_client_iommu_detach()
1101 void *tegra_drm_alloc(struct tegra_drm *tegra, size_t size, dma_addr_t *dma) in tegra_drm_alloc() argument
1108 if (tegra->domain) in tegra_drm_alloc()
1109 size = iova_align(&tegra->carveout.domain, size); in tegra_drm_alloc()
1114 if (!tegra->domain) { in tegra_drm_alloc()
1128 if (!tegra->domain) { in tegra_drm_alloc()
1137 alloc = alloc_iova(&tegra->carveout.domain, in tegra_drm_alloc()
1138 size >> tegra->carveout.shift, in tegra_drm_alloc()
1139 tegra->carveout.limit, true); in tegra_drm_alloc()
1145 *dma = iova_dma_addr(&tegra->carveout.domain, alloc); in tegra_drm_alloc()
1146 err = iommu_map(tegra->domain, *dma, virt_to_phys(virt), in tegra_drm_alloc()
1154 __free_iova(&tegra->carveout.domain, alloc); in tegra_drm_alloc()
1161 void tegra_drm_free(struct tegra_drm *tegra, size_t size, void *virt, in tegra_drm_free() argument
1164 if (tegra->domain) in tegra_drm_free()
1165 size = iova_align(&tegra->carveout.domain, size); in tegra_drm_free()
1169 if (tegra->domain) { in tegra_drm_free()
1170 iommu_unmap(tegra->domain, dma, size); in tegra_drm_free()
1171 free_iova(&tegra->carveout.domain, in tegra_drm_free()
1172 iova_pfn(&tegra->carveout.domain, dma)); in tegra_drm_free()
1215 struct tegra_drm *tegra = drm->dev_private; in host1x_drm_suspend() local
1220 tegra->state = drm_atomic_helper_suspend(drm); in host1x_drm_suspend()
1221 if (IS_ERR(tegra->state)) { in host1x_drm_suspend()
1224 return PTR_ERR(tegra->state); in host1x_drm_suspend()
1233 struct tegra_drm *tegra = drm->dev_private; in host1x_drm_resume() local
1235 drm_atomic_helper_resume(drm, tegra->state); in host1x_drm_resume()