Searched refs:s_fence (Results 1 – 15 of 15) sorted by relevance
172 struct drm_sched_fence *s_fence; in drm_sched_dependency_optimized() local178 s_fence = to_drm_sched_fence(fence); in drm_sched_dependency_optimized()179 if (s_fence && s_fence->sched == sched) in drm_sched_dependency_optimized()336 if (bad->s_fence->scheduled.context == in drm_sched_increase_karma()379 if (s_job->s_fence->parent && in drm_sched_stop()380 dma_fence_remove_callback(s_job->s_fence->parent, in drm_sched_stop()398 dma_fence_wait(&s_job->s_fence->finished, false); in drm_sched_stop()442 struct dma_fence *fence = s_job->s_fence->parent; in drm_sched_start()485 struct drm_sched_fence *s_fence = s_job->s_fence; in drm_sched_resubmit_jobs() local489 guilty_context = s_job->s_fence->scheduled.context; in drm_sched_resubmit_jobs()[all …]
222 drm_sched_fence_finished(job->s_fence); in drm_sched_entity_kill_jobs_cb()223 WARN_ON(job->s_fence->parent); in drm_sched_entity_kill_jobs_cb()241 struct drm_sched_fence *s_fence = job->s_fence; in drm_sched_entity_kill_jobs() local243 drm_sched_fence_scheduled(s_fence); in drm_sched_entity_kill_jobs()244 dma_fence_set_error(&s_fence->finished, -ESRCH); in drm_sched_entity_kill_jobs()401 struct drm_sched_fence *s_fence; in drm_sched_entity_add_dependency_cb() local414 s_fence = to_drm_sched_fence(fence); in drm_sched_entity_add_dependency_cb()415 if (s_fence && s_fence->sched == sched) { in drm_sched_entity_add_dependency_cb()421 fence = dma_fence_get(&s_fence->scheduled); in drm_sched_entity_add_dependency_cb()467 dma_fence_set_error(&sched_job->s_fence->finished, -ECANCELED); in drm_sched_entity_pop_job()[all …]
50 __entry->fence = &sched_job->s_fence->finished;
67 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_same_dev() local69 if (s_fence) { in amdgpu_sync_same_dev()72 ring = container_of(s_fence->sched, struct amdgpu_ring, sched); in amdgpu_sync_same_dev()88 struct drm_sched_fence *s_fence; in amdgpu_sync_get_owner() local94 s_fence = to_drm_sched_fence(f); in amdgpu_sync_get_owner()95 if (s_fence) in amdgpu_sync_get_owner()96 return s_fence->owner; in amdgpu_sync_get_owner()267 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_peek_fence() local275 if (ring && s_fence) { in amdgpu_sync_peek_fence()279 if (s_fence->sched == &ring->sched) { in amdgpu_sync_peek_fence()[all …]
39 if (amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) { in amdgpu_job_timedout()112 f = job->base.s_fence ? &job->base.s_fence->finished : job->fence; in amdgpu_job_free_resources()157 *f = dma_fence_get(&job->base.s_fence->finished); in amdgpu_job_submit()205 &job->base.s_fence->finished, in amdgpu_job_dependency()224 finished = &job->base.s_fence->finished; in amdgpu_job_run()
36 job->base.s_fence->finished.ops->get_timeline_name(&job->base.s_fence->finished)180 __entry->context = job->base.s_fence->finished.context;181 __entry->seqno = job->base.s_fence->finished.seqno;205 __entry->context = job->base.s_fence->finished.context;206 __entry->seqno = job->base.s_fence->finished.seqno;
146 fence_ctx = job->base.s_fence ? in amdgpu_ib_schedule()147 job->base.s_fence->scheduled.context : 0; in amdgpu_ib_schedule()
1052 struct drm_sched_fence *s_fence; in amdgpu_cs_process_fence_dep() local1055 s_fence = to_drm_sched_fence(fence); in amdgpu_cs_process_fence_dep()1056 fence = dma_fence_get(&s_fence->scheduled); in amdgpu_cs_process_fence_dep()1310 p->fence = dma_fence_get(&job->base.s_fence->finished); in amdgpu_cs_submit()
3863 if (job && job->base.s_fence->parent && in amdgpu_device_gpu_recover()3864 dma_fence_is_signaled(job->base.s_fence->parent)) in amdgpu_device_gpu_recover()
77 if (likely(!sched_job->s_fence->finished.error)) in etnaviv_sched_run_job()160 submit->out_fence = dma_fence_get(&submit->sched_job.s_fence->finished); in etnaviv_sched_push_job()
176 struct dma_fence *fence = dma_fence_get(&task->base.s_fence->finished); in lima_sched_context_queue_task()203 if (job->s_fence->finished.error < 0) in lima_sched_run_job()
96 if (unlikely(job->base.base.s_fence->finished.error)) in v3d_bin_job_run()148 if (unlikely(job->base.base.s_fence->finished.error)) in v3d_render_job_run()
476 job->done_fence = dma_fence_get(&job->base.s_fence->finished); in v3d_push_job()
186 struct drm_sched_fence *s_fence; member
233 job->render_done_fence = dma_fence_get(&job->base.s_fence->finished); in panfrost_job_push()330 if (unlikely(job->base.s_fence->finished.error)) in panfrost_job_run()