Home
last modified time | relevance | path

Searched refs:s_fence (Results 1 – 15 of 15) sorted by relevance

/Linux-v5.4/drivers/gpu/drm/scheduler/
Dsched_main.c172 struct drm_sched_fence *s_fence; in drm_sched_dependency_optimized() local
178 s_fence = to_drm_sched_fence(fence); in drm_sched_dependency_optimized()
179 if (s_fence && s_fence->sched == sched) in drm_sched_dependency_optimized()
336 if (bad->s_fence->scheduled.context == in drm_sched_increase_karma()
379 if (s_job->s_fence->parent && in drm_sched_stop()
380 dma_fence_remove_callback(s_job->s_fence->parent, in drm_sched_stop()
398 dma_fence_wait(&s_job->s_fence->finished, false); in drm_sched_stop()
442 struct dma_fence *fence = s_job->s_fence->parent; in drm_sched_start()
485 struct drm_sched_fence *s_fence = s_job->s_fence; in drm_sched_resubmit_jobs() local
489 guilty_context = s_job->s_fence->scheduled.context; in drm_sched_resubmit_jobs()
[all …]
Dsched_entity.c222 drm_sched_fence_finished(job->s_fence); in drm_sched_entity_kill_jobs_cb()
223 WARN_ON(job->s_fence->parent); in drm_sched_entity_kill_jobs_cb()
241 struct drm_sched_fence *s_fence = job->s_fence; in drm_sched_entity_kill_jobs() local
243 drm_sched_fence_scheduled(s_fence); in drm_sched_entity_kill_jobs()
244 dma_fence_set_error(&s_fence->finished, -ESRCH); in drm_sched_entity_kill_jobs()
401 struct drm_sched_fence *s_fence; in drm_sched_entity_add_dependency_cb() local
414 s_fence = to_drm_sched_fence(fence); in drm_sched_entity_add_dependency_cb()
415 if (s_fence && s_fence->sched == sched) { in drm_sched_entity_add_dependency_cb()
421 fence = dma_fence_get(&s_fence->scheduled); in drm_sched_entity_add_dependency_cb()
467 dma_fence_set_error(&sched_job->s_fence->finished, -ECANCELED); in drm_sched_entity_pop_job()
[all …]
Dgpu_scheduler_trace.h50 __entry->fence = &sched_job->s_fence->finished;
/Linux-v5.4/drivers/gpu/drm/amd/amdgpu/
Damdgpu_sync.c67 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_same_dev() local
69 if (s_fence) { in amdgpu_sync_same_dev()
72 ring = container_of(s_fence->sched, struct amdgpu_ring, sched); in amdgpu_sync_same_dev()
88 struct drm_sched_fence *s_fence; in amdgpu_sync_get_owner() local
94 s_fence = to_drm_sched_fence(f); in amdgpu_sync_get_owner()
95 if (s_fence) in amdgpu_sync_get_owner()
96 return s_fence->owner; in amdgpu_sync_get_owner()
267 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_peek_fence() local
275 if (ring && s_fence) { in amdgpu_sync_peek_fence()
279 if (s_fence->sched == &ring->sched) { in amdgpu_sync_peek_fence()
[all …]
Damdgpu_job.c39 if (amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) { in amdgpu_job_timedout()
112 f = job->base.s_fence ? &job->base.s_fence->finished : job->fence; in amdgpu_job_free_resources()
157 *f = dma_fence_get(&job->base.s_fence->finished); in amdgpu_job_submit()
205 &job->base.s_fence->finished, in amdgpu_job_dependency()
224 finished = &job->base.s_fence->finished; in amdgpu_job_run()
Damdgpu_trace.h36 job->base.s_fence->finished.ops->get_timeline_name(&job->base.s_fence->finished)
180 __entry->context = job->base.s_fence->finished.context;
181 __entry->seqno = job->base.s_fence->finished.seqno;
205 __entry->context = job->base.s_fence->finished.context;
206 __entry->seqno = job->base.s_fence->finished.seqno;
Damdgpu_ib.c146 fence_ctx = job->base.s_fence ? in amdgpu_ib_schedule()
147 job->base.s_fence->scheduled.context : 0; in amdgpu_ib_schedule()
Damdgpu_cs.c1052 struct drm_sched_fence *s_fence; in amdgpu_cs_process_fence_dep() local
1055 s_fence = to_drm_sched_fence(fence); in amdgpu_cs_process_fence_dep()
1056 fence = dma_fence_get(&s_fence->scheduled); in amdgpu_cs_process_fence_dep()
1310 p->fence = dma_fence_get(&job->base.s_fence->finished); in amdgpu_cs_submit()
Damdgpu_device.c3863 if (job && job->base.s_fence->parent && in amdgpu_device_gpu_recover()
3864 dma_fence_is_signaled(job->base.s_fence->parent)) in amdgpu_device_gpu_recover()
/Linux-v5.4/drivers/gpu/drm/etnaviv/
Detnaviv_sched.c77 if (likely(!sched_job->s_fence->finished.error)) in etnaviv_sched_run_job()
160 submit->out_fence = dma_fence_get(&submit->sched_job.s_fence->finished); in etnaviv_sched_push_job()
/Linux-v5.4/drivers/gpu/drm/lima/
Dlima_sched.c176 struct dma_fence *fence = dma_fence_get(&task->base.s_fence->finished); in lima_sched_context_queue_task()
203 if (job->s_fence->finished.error < 0) in lima_sched_run_job()
/Linux-v5.4/drivers/gpu/drm/v3d/
Dv3d_sched.c96 if (unlikely(job->base.base.s_fence->finished.error)) in v3d_bin_job_run()
148 if (unlikely(job->base.base.s_fence->finished.error)) in v3d_render_job_run()
Dv3d_gem.c476 job->done_fence = dma_fence_get(&job->base.s_fence->finished); in v3d_push_job()
/Linux-v5.4/include/drm/
Dgpu_scheduler.h186 struct drm_sched_fence *s_fence; member
/Linux-v5.4/drivers/gpu/drm/panfrost/
Dpanfrost_job.c233 job->render_done_fence = dma_fence_get(&job->base.s_fence->finished); in panfrost_job_push()
330 if (unlikely(job->base.s_fence->finished.error)) in panfrost_job_run()