Lines Matching +full:prefetch +full:- +full:dma

1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2016-2018 Etnaviv Project
7 #include <linux/dma-mapping.h>
52 drm_mm_takedown(&context->mm); in etnaviv_iommuv2_free()
55 if (v2_context->stlb_cpu[i]) in etnaviv_iommuv2_free()
56 dma_free_wc(context->global->dev, SZ_4K, in etnaviv_iommuv2_free()
57 v2_context->stlb_cpu[i], in etnaviv_iommuv2_free()
58 v2_context->stlb_dma[i]); in etnaviv_iommuv2_free()
61 dma_free_wc(context->global->dev, SZ_4K, v2_context->mtlb_cpu, in etnaviv_iommuv2_free()
62 v2_context->mtlb_dma); in etnaviv_iommuv2_free()
64 clear_bit(v2_context->id, context->global->v2.pta_alloc); in etnaviv_iommuv2_free()
72 if (v2_context->stlb_cpu[stlb]) in etnaviv_iommuv2_ensure_stlb()
75 v2_context->stlb_cpu[stlb] = in etnaviv_iommuv2_ensure_stlb()
76 dma_alloc_wc(v2_context->base.global->dev, SZ_4K, in etnaviv_iommuv2_ensure_stlb()
77 &v2_context->stlb_dma[stlb], in etnaviv_iommuv2_ensure_stlb()
80 if (!v2_context->stlb_cpu[stlb]) in etnaviv_iommuv2_ensure_stlb()
81 return -ENOMEM; in etnaviv_iommuv2_ensure_stlb()
83 memset32(v2_context->stlb_cpu[stlb], MMUv2_PTE_EXCEPTION, in etnaviv_iommuv2_ensure_stlb()
86 v2_context->mtlb_cpu[stlb] = in etnaviv_iommuv2_ensure_stlb()
87 v2_context->stlb_dma[stlb] | MMUv2_PTE_PRESENT; in etnaviv_iommuv2_ensure_stlb()
101 return -EINVAL; in etnaviv_iommuv2_map()
116 v2_context->stlb_cpu[mtlb_entry][stlb_entry] = entry; in etnaviv_iommuv2_map()
128 return -EINVAL; in etnaviv_iommuv2_unmap()
133 etnaviv_domain->stlb_cpu[mtlb_entry][stlb_entry] = MMUv2_PTE_EXCEPTION; in etnaviv_iommuv2_unmap()
145 if (v2_context->mtlb_cpu[i] & MMUv2_PTE_PRESENT) in etnaviv_iommuv2_dump_size()
156 memcpy(buf, v2_context->mtlb_cpu, SZ_4K); in etnaviv_iommuv2_dump()
159 if (v2_context->mtlb_cpu[i] & MMUv2_PTE_PRESENT) { in etnaviv_iommuv2_dump()
160 memcpy(buf, v2_context->stlb_cpu[i], SZ_4K); in etnaviv_iommuv2_dump()
169 u16 prefetch; in etnaviv_iommuv2_restore_nonsec() local
175 if (gpu->mmu_context) in etnaviv_iommuv2_restore_nonsec()
176 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv2_restore_nonsec()
177 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv2_restore_nonsec()
179 prefetch = etnaviv_buffer_config_mmuv2(gpu, in etnaviv_iommuv2_restore_nonsec()
180 (u32)v2_context->mtlb_dma, in etnaviv_iommuv2_restore_nonsec()
181 (u32)context->global->bad_page_dma); in etnaviv_iommuv2_restore_nonsec()
182 etnaviv_gpu_start_fe(gpu, (u32)etnaviv_cmdbuf_get_pa(&gpu->buffer), in etnaviv_iommuv2_restore_nonsec()
183 prefetch); in etnaviv_iommuv2_restore_nonsec()
193 u16 prefetch; in etnaviv_iommuv2_restore_sec() local
199 if (gpu->mmu_context) in etnaviv_iommuv2_restore_sec()
200 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv2_restore_sec()
201 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv2_restore_sec()
204 lower_32_bits(context->global->v2.pta_dma)); in etnaviv_iommuv2_restore_sec()
206 upper_32_bits(context->global->v2.pta_dma)); in etnaviv_iommuv2_restore_sec()
210 lower_32_bits(context->global->bad_page_dma)); in etnaviv_iommuv2_restore_sec()
212 lower_32_bits(context->global->bad_page_dma)); in etnaviv_iommuv2_restore_sec()
215 upper_32_bits(context->global->bad_page_dma)) | in etnaviv_iommuv2_restore_sec()
217 upper_32_bits(context->global->bad_page_dma))); in etnaviv_iommuv2_restore_sec()
219 context->global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma | in etnaviv_iommuv2_restore_sec()
223 prefetch = etnaviv_buffer_config_pta(gpu, v2_context->id); in etnaviv_iommuv2_restore_sec()
224 etnaviv_gpu_start_fe(gpu, (u32)etnaviv_cmdbuf_get_pa(&gpu->buffer), in etnaviv_iommuv2_restore_sec()
225 prefetch); in etnaviv_iommuv2_restore_sec()
235 return v2_context->mtlb_dma; in etnaviv_iommuv2_get_mtlb_addr()
242 return v2_context->id; in etnaviv_iommuv2_get_pta_id()
247 switch (gpu->sec_mode) { in etnaviv_iommuv2_restore()
279 mutex_lock(&global->lock); in etnaviv_iommuv2_context_alloc()
280 v2_context->id = find_first_zero_bit(global->v2.pta_alloc, in etnaviv_iommuv2_context_alloc()
282 if (v2_context->id < ETNAVIV_PTA_ENTRIES) { in etnaviv_iommuv2_context_alloc()
283 set_bit(v2_context->id, global->v2.pta_alloc); in etnaviv_iommuv2_context_alloc()
285 mutex_unlock(&global->lock); in etnaviv_iommuv2_context_alloc()
288 mutex_unlock(&global->lock); in etnaviv_iommuv2_context_alloc()
290 v2_context->mtlb_cpu = dma_alloc_wc(global->dev, SZ_4K, in etnaviv_iommuv2_context_alloc()
291 &v2_context->mtlb_dma, GFP_KERNEL); in etnaviv_iommuv2_context_alloc()
292 if (!v2_context->mtlb_cpu) in etnaviv_iommuv2_context_alloc()
295 memset32(v2_context->mtlb_cpu, MMUv2_PTE_EXCEPTION, in etnaviv_iommuv2_context_alloc()
298 global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma; in etnaviv_iommuv2_context_alloc()
300 context = &v2_context->base; in etnaviv_iommuv2_context_alloc()
301 context->global = global; in etnaviv_iommuv2_context_alloc()
302 kref_init(&context->refcount); in etnaviv_iommuv2_context_alloc()
303 mutex_init(&context->lock); in etnaviv_iommuv2_context_alloc()
304 INIT_LIST_HEAD(&context->mappings); in etnaviv_iommuv2_context_alloc()
305 drm_mm_init(&context->mm, SZ_4K, (u64)SZ_1G * 4 - SZ_4K); in etnaviv_iommuv2_context_alloc()
310 clear_bit(v2_context->id, global->v2.pta_alloc); in etnaviv_iommuv2_context_alloc()