Lines Matching full:v3d
22 v3d_init_core(struct v3d_dev *v3d, int core) in v3d_init_core() argument
30 if (v3d->ver < 40) in v3d_init_core()
42 v3d_init_hw_state(struct v3d_dev *v3d) in v3d_init_hw_state() argument
44 v3d_init_core(v3d, 0); in v3d_init_hw_state()
48 v3d_idle_axi(struct v3d_dev *v3d, int core) in v3d_idle_axi() argument
61 v3d_idle_gca(struct v3d_dev *v3d) in v3d_idle_gca() argument
63 if (v3d->ver >= 41) in v3d_idle_gca()
76 v3d_reset_by_bridge(struct v3d_dev *v3d) in v3d_reset_by_bridge() argument
99 v3d_reset_v3d(struct v3d_dev *v3d) in v3d_reset_v3d() argument
101 if (v3d->reset) in v3d_reset_v3d()
102 reset_control_reset(v3d->reset); in v3d_reset_v3d()
104 v3d_reset_by_bridge(v3d); in v3d_reset_v3d()
106 v3d_init_hw_state(v3d); in v3d_reset_v3d()
110 v3d_reset(struct v3d_dev *v3d) in v3d_reset() argument
112 struct drm_device *dev = &v3d->drm; in v3d_reset()
121 v3d_idle_axi(v3d, 0); in v3d_reset()
123 v3d_idle_gca(v3d); in v3d_reset()
124 v3d_reset_v3d(v3d); in v3d_reset()
126 v3d_mmu_set_page_table(v3d); in v3d_reset()
127 v3d_irq_reset(v3d); in v3d_reset()
133 v3d_flush_l3(struct v3d_dev *v3d) in v3d_flush_l3() argument
135 if (v3d->ver < 41) { in v3d_flush_l3()
141 if (v3d->ver < 33) { in v3d_flush_l3()
149 * uniforms and instructions on V3D 3.2.
152 v3d_invalidate_l2c(struct v3d_dev *v3d, int core) in v3d_invalidate_l2c() argument
154 if (v3d->ver > 32) in v3d_invalidate_l2c()
164 v3d_flush_l2t(struct v3d_dev *v3d, int core) in v3d_flush_l2t() argument
173 mutex_lock(&v3d->cache_clean_lock); in v3d_flush_l2t()
177 mutex_unlock(&v3d->cache_clean_lock); in v3d_flush_l2t()
189 v3d_clean_caches(struct v3d_dev *v3d) in v3d_clean_caches() argument
191 struct drm_device *dev = &v3d->drm; in v3d_clean_caches()
202 mutex_lock(&v3d->cache_clean_lock); in v3d_clean_caches()
212 mutex_unlock(&v3d->cache_clean_lock); in v3d_clean_caches()
219 v3d_invalidate_slices(struct v3d_dev *v3d, int core) in v3d_invalidate_slices() argument
229 v3d_invalidate_caches(struct v3d_dev *v3d) in v3d_invalidate_caches() argument
236 v3d_flush_l3(v3d); in v3d_invalidate_caches()
237 v3d_invalidate_l2c(v3d, 0); in v3d_invalidate_caches()
238 v3d_flush_l2t(v3d, 0); in v3d_invalidate_caches()
239 v3d_invalidate_slices(v3d, 0); in v3d_invalidate_caches()
247 * to v3d, so we don't attach dma-buf fences to them.
277 * @job: V3D job being set up
373 pm_runtime_mark_last_busy(job->v3d->drm.dev); in v3d_job_free()
374 pm_runtime_put_autosuspend(job->v3d->drm.dev); in v3d_job_free()
432 v3d_job_init(struct v3d_dev *v3d, struct drm_file *file_priv, in v3d_job_init() argument
439 job->v3d = v3d; in v3d_job_init()
442 ret = pm_runtime_get_sync(v3d->drm.dev); in v3d_job_init()
461 pm_runtime_put_autosuspend(v3d->drm.dev); in v3d_job_init()
513 * v3d_submit_cl_ioctl() - Submits a job (frame) to the V3D.
528 struct v3d_dev *v3d = to_v3d_dev(dev); in v3d_submit_cl_ioctl() local
538 trace_v3d_submit_cl_ioctl(&v3d->drm, args->rcl_start, args->rcl_end); in v3d_submit_cl_ioctl()
554 ret = v3d_job_init(v3d, file_priv, &render->base, in v3d_submit_cl_ioctl()
568 ret = v3d_job_init(v3d, file_priv, &bin->base, in v3d_submit_cl_ioctl()
591 ret = v3d_job_init(v3d, file_priv, clean_job, v3d_job_free, 0); in v3d_submit_cl_ioctl()
612 mutex_lock(&v3d->sched_lock); in v3d_submit_cl_ioctl()
639 mutex_unlock(&v3d->sched_lock); in v3d_submit_cl_ioctl()
656 mutex_unlock(&v3d->sched_lock); in v3d_submit_cl_ioctl()
670 * v3d_submit_tfu_ioctl() - Submits a TFU (texture formatting) job to the V3D.
682 struct v3d_dev *v3d = to_v3d_dev(dev); in v3d_submit_tfu_ioctl() local
689 trace_v3d_submit_tfu_ioctl(&v3d->drm, args->iia); in v3d_submit_tfu_ioctl()
695 ret = v3d_job_init(v3d, file_priv, &job->base, in v3d_submit_tfu_ioctl()
739 mutex_lock(&v3d->sched_lock); in v3d_submit_tfu_ioctl()
743 mutex_unlock(&v3d->sched_lock); in v3d_submit_tfu_ioctl()
755 mutex_unlock(&v3d->sched_lock); in v3d_submit_tfu_ioctl()
765 * v3d_submit_csd_ioctl() - Submits a CSD (texture formatting) job to the V3D.
777 struct v3d_dev *v3d = to_v3d_dev(dev); in v3d_submit_csd_ioctl() local
785 trace_v3d_submit_csd_ioctl(&v3d->drm, args->cfg[5], args->cfg[6]); in v3d_submit_csd_ioctl()
787 if (!v3d_has_csd(v3d)) { in v3d_submit_csd_ioctl()
796 ret = v3d_job_init(v3d, file_priv, &job->base, in v3d_submit_csd_ioctl()
810 ret = v3d_job_init(v3d, file_priv, clean_job, v3d_job_free, 0); in v3d_submit_csd_ioctl()
828 mutex_lock(&v3d->sched_lock); in v3d_submit_csd_ioctl()
841 mutex_unlock(&v3d->sched_lock); in v3d_submit_csd_ioctl()
855 mutex_unlock(&v3d->sched_lock); in v3d_submit_csd_ioctl()
868 struct v3d_dev *v3d = to_v3d_dev(dev); in v3d_gem_init() local
873 v3d->queue[i].fence_context = dma_fence_context_alloc(1); in v3d_gem_init()
875 spin_lock_init(&v3d->mm_lock); in v3d_gem_init()
876 spin_lock_init(&v3d->job_lock); in v3d_gem_init()
877 mutex_init(&v3d->bo_lock); in v3d_gem_init()
878 mutex_init(&v3d->reset_lock); in v3d_gem_init()
879 mutex_init(&v3d->sched_lock); in v3d_gem_init()
880 mutex_init(&v3d->cache_clean_lock); in v3d_gem_init()
886 drm_mm_init(&v3d->mm, 1, pt_size / sizeof(u32) - 1); in v3d_gem_init()
888 v3d->pt = dma_alloc_wc(v3d->drm.dev, pt_size, in v3d_gem_init()
889 &v3d->pt_paddr, in v3d_gem_init()
891 if (!v3d->pt) { in v3d_gem_init()
892 drm_mm_takedown(&v3d->mm); in v3d_gem_init()
893 dev_err(v3d->drm.dev, in v3d_gem_init()
899 v3d_init_hw_state(v3d); in v3d_gem_init()
900 v3d_mmu_set_page_table(v3d); in v3d_gem_init()
902 ret = v3d_sched_init(v3d); in v3d_gem_init()
904 drm_mm_takedown(&v3d->mm); in v3d_gem_init()
905 dma_free_coherent(v3d->drm.dev, 4096 * 1024, (void *)v3d->pt, in v3d_gem_init()
906 v3d->pt_paddr); in v3d_gem_init()
915 struct v3d_dev *v3d = to_v3d_dev(dev); in v3d_gem_destroy() local
917 v3d_sched_fini(v3d); in v3d_gem_destroy()
920 * unregistering V3D. in v3d_gem_destroy()
922 WARN_ON(v3d->bin_job); in v3d_gem_destroy()
923 WARN_ON(v3d->render_job); in v3d_gem_destroy()
925 drm_mm_takedown(&v3d->mm); in v3d_gem_destroy()
927 dma_free_coherent(v3d->drm.dev, 4096 * 1024, (void *)v3d->pt, in v3d_gem_destroy()
928 v3d->pt_paddr); in v3d_gem_destroy()