Lines Matching full:job
169 * need to wait for completion before dispatching the job -- in v3d_flush_l2t()
173 * synchronously clean after a job. in v3d_flush_l2t()
186 * signaling job completion. So, we synchronously wait before
252 v3d_lock_bo_reservations(struct v3d_job *job, in v3d_lock_bo_reservations() argument
257 ret = drm_gem_lock_reservations(job->bo, job->bo_count, acquire_ctx); in v3d_lock_bo_reservations()
261 for (i = 0; i < job->bo_count; i++) { in v3d_lock_bo_reservations()
262 ret = drm_gem_fence_array_add_implicit(&job->deps, in v3d_lock_bo_reservations()
263 job->bo[i], true); in v3d_lock_bo_reservations()
265 drm_gem_unlock_reservations(job->bo, job->bo_count, in v3d_lock_bo_reservations()
275 * v3d_lookup_bos() - Sets up job->bo[] with the GEM objects
276 * referenced by the job.
279 * @job: V3D job being set up
284 * the submitted job's BO list. This does the validation of the job's
285 * BO list and reference counting for the lifetime of the job.
293 struct v3d_job *job, in v3d_lookup_bos() argument
301 job->bo_count = bo_count; in v3d_lookup_bos()
303 if (!job->bo_count) { in v3d_lookup_bos()
311 job->bo = kvmalloc_array(job->bo_count, in v3d_lookup_bos()
314 if (!job->bo) { in v3d_lookup_bos()
319 handles = kvmalloc_array(job->bo_count, sizeof(u32), GFP_KERNEL); in v3d_lookup_bos()
328 job->bo_count * sizeof(u32))) { in v3d_lookup_bos()
335 for (i = 0; i < job->bo_count; i++) { in v3d_lookup_bos()
346 job->bo[i] = bo; in v3d_lookup_bos()
358 struct v3d_job *job = container_of(ref, struct v3d_job, refcount); in v3d_job_free() local
363 for (i = 0; i < job->bo_count; i++) { in v3d_job_free()
364 if (job->bo[i]) in v3d_job_free()
365 drm_gem_object_put(job->bo[i]); in v3d_job_free()
367 kvfree(job->bo); in v3d_job_free()
369 xa_for_each(&job->deps, index, fence) { in v3d_job_free()
372 xa_destroy(&job->deps); in v3d_job_free()
374 dma_fence_put(job->irq_fence); in v3d_job_free()
375 dma_fence_put(job->done_fence); in v3d_job_free()
377 pm_runtime_mark_last_busy(job->v3d->drm.dev); in v3d_job_free()
378 pm_runtime_put_autosuspend(job->v3d->drm.dev); in v3d_job_free()
380 if (job->perfmon) in v3d_job_free()
381 v3d_perfmon_put(job->perfmon); in v3d_job_free()
383 kfree(job); in v3d_job_free()
389 struct v3d_render_job *job = container_of(ref, struct v3d_render_job, in v3d_render_job_free() local
393 list_for_each_entry_safe(bo, save, &job->unref_list, unref_head) { in v3d_render_job_free()
400 void v3d_job_put(struct v3d_job *job) in v3d_job_put() argument
402 kref_put(&job->refcount, job->free); in v3d_job_put()
440 struct v3d_job *job, void (*free)(struct kref *ref), in v3d_job_init() argument
446 job->v3d = v3d; in v3d_job_init()
447 job->free = free; in v3d_job_init()
453 xa_init_flags(&job->deps, XA_FLAGS_ALLOC); in v3d_job_init()
459 ret = drm_gem_fence_array_add(&job->deps, in_fence); in v3d_job_init()
463 kref_init(&job->refcount); in v3d_job_init()
467 xa_destroy(&job->deps); in v3d_job_init()
474 struct v3d_job *job, enum v3d_queue queue) in v3d_push_job() argument
478 ret = drm_sched_job_init(&job->base, &v3d_priv->sched_entity[queue], in v3d_push_job()
483 job->done_fence = dma_fence_get(&job->base.s_fence->finished); in v3d_push_job()
485 /* put by scheduler job completion */ in v3d_push_job()
486 kref_get(&job->refcount); in v3d_push_job()
488 drm_sched_entity_push_job(&job->base, &v3d_priv->sched_entity[queue]); in v3d_push_job()
495 struct v3d_job *job, in v3d_attach_fences_and_unlock_reservation() argument
503 for (i = 0; i < job->bo_count; i++) { in v3d_attach_fences_and_unlock_reservation()
505 dma_resv_add_excl_fence(job->bo[i]->resv, in v3d_attach_fences_and_unlock_reservation()
506 job->done_fence); in v3d_attach_fences_and_unlock_reservation()
509 drm_gem_unlock_reservations(job->bo, job->bo_count, acquire_ctx); in v3d_attach_fences_and_unlock_reservation()
511 /* Update the return sync object for the job */ in v3d_attach_fences_and_unlock_reservation()
520 * v3d_submit_cl_ioctl() - Submits a job (frame) to the V3D.
694 * v3d_submit_tfu_ioctl() - Submits a TFU (texture formatting) job to the V3D.
709 struct v3d_tfu_job *job; in v3d_submit_tfu_ioctl() local
715 job = kcalloc(1, sizeof(*job), GFP_KERNEL); in v3d_submit_tfu_ioctl()
716 if (!job) in v3d_submit_tfu_ioctl()
719 ret = v3d_job_init(v3d, file_priv, &job->base, in v3d_submit_tfu_ioctl()
722 kfree(job); in v3d_submit_tfu_ioctl()
726 job->base.bo = kcalloc(ARRAY_SIZE(args->bo_handles), in v3d_submit_tfu_ioctl()
727 sizeof(*job->base.bo), GFP_KERNEL); in v3d_submit_tfu_ioctl()
728 if (!job->base.bo) { in v3d_submit_tfu_ioctl()
729 v3d_job_put(&job->base); in v3d_submit_tfu_ioctl()
733 job->args = *args; in v3d_submit_tfu_ioctl()
736 for (job->base.bo_count = 0; in v3d_submit_tfu_ioctl()
737 job->base.bo_count < ARRAY_SIZE(args->bo_handles); in v3d_submit_tfu_ioctl()
738 job->base.bo_count++) { in v3d_submit_tfu_ioctl()
741 if (!args->bo_handles[job->base.bo_count]) in v3d_submit_tfu_ioctl()
745 args->bo_handles[job->base.bo_count]); in v3d_submit_tfu_ioctl()
748 job->base.bo_count, in v3d_submit_tfu_ioctl()
749 args->bo_handles[job->base.bo_count]); in v3d_submit_tfu_ioctl()
755 job->base.bo[job->base.bo_count] = bo; in v3d_submit_tfu_ioctl()
759 ret = v3d_lock_bo_reservations(&job->base, &acquire_ctx); in v3d_submit_tfu_ioctl()
764 ret = v3d_push_job(v3d_priv, &job->base, V3D_TFU); in v3d_submit_tfu_ioctl()
770 &job->base, &acquire_ctx, in v3d_submit_tfu_ioctl()
772 job->base.done_fence); in v3d_submit_tfu_ioctl()
774 v3d_job_put(&job->base); in v3d_submit_tfu_ioctl()
780 drm_gem_unlock_reservations(job->base.bo, job->base.bo_count, in v3d_submit_tfu_ioctl()
783 v3d_job_put(&job->base); in v3d_submit_tfu_ioctl()
789 * v3d_submit_csd_ioctl() - Submits a CSD (texture formatting) job to the V3D.
804 struct v3d_csd_job *job; in v3d_submit_csd_ioctl() local
816 job = kcalloc(1, sizeof(*job), GFP_KERNEL); in v3d_submit_csd_ioctl()
817 if (!job) in v3d_submit_csd_ioctl()
820 ret = v3d_job_init(v3d, file_priv, &job->base, in v3d_submit_csd_ioctl()
823 kfree(job); in v3d_submit_csd_ioctl()
829 v3d_job_put(&job->base); in v3d_submit_csd_ioctl()
830 kfree(job); in v3d_submit_csd_ioctl()
836 v3d_job_put(&job->base); in v3d_submit_csd_ioctl()
841 job->args = *args; in v3d_submit_csd_ioctl()
853 job->base.perfmon = v3d_perfmon_find(v3d_priv, in v3d_submit_csd_ioctl()
855 if (!job->base.perfmon) { in v3d_submit_csd_ioctl()
862 ret = v3d_push_job(v3d_priv, &job->base, V3D_CSD); in v3d_submit_csd_ioctl()
867 dma_fence_get(job->base.done_fence)); in v3d_submit_csd_ioctl()
882 v3d_job_put(&job->base); in v3d_submit_csd_ioctl()
892 v3d_job_put(&job->base); in v3d_submit_csd_ioctl()