Lines Matching refs:s_fence
242 drm_sched_fence_finished(job->s_fence); in drm_sched_entity_kill_jobs_cb()
243 WARN_ON(job->s_fence->parent); in drm_sched_entity_kill_jobs_cb()
244 dma_fence_put(&job->s_fence->finished); in drm_sched_entity_kill_jobs_cb()
327 struct drm_sched_fence *s_fence = job->s_fence; in drm_sched_entity_fini() local
328 drm_sched_fence_scheduled(s_fence); in drm_sched_entity_fini()
329 dma_fence_set_error(&s_fence->finished, -ESRCH); in drm_sched_entity_fini()
422 struct drm_sched_fence *s_fence; in drm_sched_dependency_optimized() local
428 s_fence = to_drm_sched_fence(fence); in drm_sched_dependency_optimized()
429 if (s_fence && s_fence->sched == sched) in drm_sched_dependency_optimized()
440 struct drm_sched_fence *s_fence; in drm_sched_entity_add_dependency_cb() local
453 s_fence = to_drm_sched_fence(fence); in drm_sched_entity_add_dependency_cb()
454 if (s_fence && s_fence->sched == sched) { in drm_sched_entity_add_dependency_cb()
460 fence = dma_fence_get(&s_fence->scheduled); in drm_sched_entity_add_dependency_cb()
496 dma_fence_set_error(&sched_job->s_fence->finished, -ECANCELED); in drm_sched_entity_pop_job()
499 entity->last_scheduled = dma_fence_get(&sched_job->s_fence->finished); in drm_sched_entity_pop_job()
567 if (!dma_fence_is_signaled(&next->s_fence->finished)) in drm_sched_job_finish()
574 dma_fence_put(&s_job->s_fence->finished); in drm_sched_job_finish()
590 dma_fence_add_callback(&s_job->s_fence->finished, &s_job->finish_cb, in drm_sched_job_begin()
625 if (s_job->s_fence->parent && in drm_sched_hw_job_reset()
626 dma_fence_remove_callback(s_job->s_fence->parent, in drm_sched_hw_job_reset()
627 &s_job->s_fence->cb)) { in drm_sched_hw_job_reset()
628 dma_fence_put(s_job->s_fence->parent); in drm_sched_hw_job_reset()
629 s_job->s_fence->parent = NULL; in drm_sched_hw_job_reset()
646 if (bad->s_fence->scheduled.context == entity->fence_context) { in drm_sched_hw_job_reset()
680 struct drm_sched_fence *s_fence = s_job->s_fence; in drm_sched_job_recovery() local
686 guilty_context = s_job->s_fence->scheduled.context; in drm_sched_job_recovery()
689 if (found_guilty && s_job->s_fence->scheduled.context == guilty_context) in drm_sched_job_recovery()
690 dma_fence_set_error(&s_fence->finished, -ECANCELED); in drm_sched_job_recovery()
697 s_fence->parent = dma_fence_get(fence); in drm_sched_job_recovery()
698 r = dma_fence_add_callback(fence, &s_fence->cb, in drm_sched_job_recovery()
701 drm_sched_process_job(fence, &s_fence->cb); in drm_sched_job_recovery()
707 drm_sched_process_job(NULL, &s_fence->cb); in drm_sched_job_recovery()
736 job->s_fence = drm_sched_fence_create(entity, owner); in drm_sched_job_init()
737 if (!job->s_fence) in drm_sched_job_init()
810 struct drm_sched_fence *s_fence = in drm_sched_process_job() local
812 struct drm_gpu_scheduler *sched = s_fence->sched; in drm_sched_process_job()
814 dma_fence_get(&s_fence->finished); in drm_sched_process_job()
816 drm_sched_fence_finished(s_fence); in drm_sched_process_job()
818 trace_drm_sched_process_job(s_fence); in drm_sched_process_job()
819 dma_fence_put(&s_fence->finished); in drm_sched_process_job()
857 struct drm_sched_fence *s_fence; in drm_sched_main() local
873 s_fence = sched_job->s_fence; in drm_sched_main()
879 drm_sched_fence_scheduled(s_fence); in drm_sched_main()
882 s_fence->parent = dma_fence_get(fence); in drm_sched_main()
883 r = dma_fence_add_callback(fence, &s_fence->cb, in drm_sched_main()
886 drm_sched_process_job(fence, &s_fence->cb); in drm_sched_main()
892 drm_sched_process_job(NULL, &s_fence->cb); in drm_sched_main()