Home
last modified time | relevance | path

Searched refs:sched_engine (Results 1 – 22 of 22) sorted by relevance

/Linux-v6.1/drivers/gpu/drm/i915/
Di915_scheduler.c39 static void assert_priolists(struct i915_sched_engine * const sched_engine) in assert_priolists() argument
47 GEM_BUG_ON(rb_first_cached(&sched_engine->queue) != in assert_priolists()
48 rb_first(&sched_engine->queue.rb_root)); in assert_priolists()
51 for (rb = rb_first_cached(&sched_engine->queue); rb; rb = rb_next(rb)) { in assert_priolists()
60 i915_sched_lookup_priolist(struct i915_sched_engine *sched_engine, int prio) in i915_sched_lookup_priolist() argument
66 lockdep_assert_held(&sched_engine->lock); in i915_sched_lookup_priolist()
67 assert_priolists(sched_engine); in i915_sched_lookup_priolist()
69 if (unlikely(sched_engine->no_priolist)) in i915_sched_lookup_priolist()
75 parent = &sched_engine->queue.rb_root.rb_node; in i915_sched_lookup_priolist()
90 p = &sched_engine->default_priolist; in i915_sched_lookup_priolist()
[all …]
Di915_scheduler.h43 i915_sched_lookup_priolist(struct i915_sched_engine *sched_engine, int prio);
56 i915_sched_engine_get(struct i915_sched_engine *sched_engine) in i915_sched_engine_get() argument
58 kref_get(&sched_engine->ref); in i915_sched_engine_get()
59 return sched_engine; in i915_sched_engine_get()
63 i915_sched_engine_put(struct i915_sched_engine *sched_engine) in i915_sched_engine_put() argument
65 kref_put(&sched_engine->ref, sched_engine->destroy); in i915_sched_engine_put()
69 i915_sched_engine_is_empty(struct i915_sched_engine *sched_engine) in i915_sched_engine_is_empty() argument
71 return RB_EMPTY_ROOT(&sched_engine->queue.rb_root); in i915_sched_engine_is_empty()
75 i915_sched_engine_reset_on_empty(struct i915_sched_engine *sched_engine) in i915_sched_engine_reset_on_empty() argument
77 if (i915_sched_engine_is_empty(sched_engine)) in i915_sched_engine_reset_on_empty()
[all …]
Di915_request.c262 spin_lock_irq(&locked->sched_engine->lock); in i915_request_active_engine()
264 spin_unlock(&locked->sched_engine->lock); in i915_request_active_engine()
266 spin_lock(&locked->sched_engine->lock); in i915_request_active_engine()
275 spin_unlock_irq(&locked->sched_engine->lock); in i915_request_active_engine()
616 lockdep_assert_held(&engine->sched_engine->lock); in __i915_request_submit()
707 spin_lock_irqsave(&engine->sched_engine->lock, flags); in i915_request_submit()
711 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in i915_request_submit()
725 lockdep_assert_held(&engine->sched_engine->lock); in __i915_request_unsubmit()
758 spin_lock_irqsave(&engine->sched_engine->lock, flags); in i915_request_unsubmit()
762 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in i915_request_unsubmit()
[all …]
Di915_scheduler_types.h174 bool (*disabled)(struct i915_sched_engine *sched_engine);
Di915_request.h684 lockdep_is_held(&rq->engine->sched_engine->lock)); in i915_request_active_timeline()
Di915_gpu_error.c1615 spin_lock_irqsave(&engine->sched_engine->lock, flags); in capture_engine()
1617 spin_unlock_irqrestore(&engine->sched_engine->lock, in capture_engine()
/Linux-v6.1/drivers/gpu/drm/i915/gt/
Dintel_execlists_submission.c278 static int queue_prio(const struct i915_sched_engine *sched_engine) in queue_prio() argument
282 rb = rb_first_cached(&sched_engine->queue); in queue_prio()
323 if (engine->sched_engine->queue_priority_hint <= last_prio) in need_preempt()
330 if (!list_is_last(&rq->sched.link, &engine->sched_engine->requests) && in need_preempt()
345 queue_prio(engine->sched_engine)) > last_prio; in need_preempt()
372 lockdep_assert_held(&engine->sched_engine->lock); in __unwind_incomplete_requests()
375 &engine->sched_engine->requests, in __unwind_incomplete_requests()
387 pl = i915_sched_lookup_priolist(engine->sched_engine, in __unwind_incomplete_requests()
390 GEM_BUG_ON(i915_sched_engine_is_empty(engine->sched_engine)); in __unwind_incomplete_requests()
550 spin_lock_irq(&engine->sched_engine->lock); in resubmit_virtual_request()
[all …]
Dmock_engine.c258 lockdep_assert_held(&rq->engine->sched_engine->lock); in mock_add_to_engine()
259 list_move_tail(&rq->sched.link, &rq->engine->sched_engine->requests); in mock_add_to_engine()
274 spin_lock_irq(&locked->sched_engine->lock); in mock_remove_from_engine()
276 spin_unlock(&locked->sched_engine->lock); in mock_remove_from_engine()
277 spin_lock(&engine->sched_engine->lock); in mock_remove_from_engine()
281 spin_unlock_irq(&locked->sched_engine->lock); in mock_remove_from_engine()
302 spin_lock_irqsave(&engine->sched_engine->lock, flags); in mock_reset_cancel()
305 list_for_each_entry(rq, &engine->sched_engine->requests, sched.link) in mock_reset_cancel()
318 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in mock_reset_cancel()
332 i915_sched_engine_put(engine->sched_engine); in mock_engine_release()
[all …]
Dintel_engine_cs.c1059 engine->sched_engine = i915_sched_engine_create(ENGINE_PHYSICAL); in engine_setup_common()
1060 if (!engine->sched_engine) { in engine_setup_common()
1064 engine->sched_engine->private_data = engine; in engine_setup_common()
1088 i915_sched_engine_put(engine->sched_engine); in engine_setup_common()
1128 spin_lock_irq(&engine->sched_engine->lock); in measure_breadcrumb_dw()
1132 spin_unlock_irq(&engine->sched_engine->lock); in measure_breadcrumb_dw()
1299 GEM_BUG_ON(!list_empty(&engine->sched_engine->requests)); in intel_engine_cleanup_common()
1301 i915_sched_engine_put(engine->sched_engine); in intel_engine_cleanup_common()
1624 struct tasklet_struct *t = &engine->sched_engine->tasklet; in __intel_engine_flush_submission()
1664 if (!i915_sched_engine_is_empty(engine->sched_engine)) in intel_engine_is_idle()
[all …]
Dintel_ring_submission.c357 spin_lock_irqsave(&engine->sched_engine->lock, flags); in reset_rewind()
359 list_for_each_entry(pos, &engine->sched_engine->requests, sched.link) { in reset_rewind()
414 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in reset_rewind()
426 spin_lock_irqsave(&engine->sched_engine->lock, flags); in reset_cancel()
429 list_for_each_entry(request, &engine->sched_engine->requests, sched.link) in reset_cancel()
435 spin_unlock_irqrestore(&engine->sched_engine->lock, flags); in reset_cancel()
611 lockdep_assert_held(&engine->sched_engine->lock); in ring_context_revoke()
612 list_for_each_entry_continue(rq, &engine->sched_engine->requests, in ring_context_revoke()
1100 lockdep_assert_held(&rq->engine->sched_engine->lock); in add_to_engine()
1101 list_move_tail(&rq->sched.link, &rq->engine->sched_engine->requests); in add_to_engine()
[all …]
Dintel_engine_heartbeat.c131 if (i915_sched_engine_disabled(engine->sched_engine)) { in heartbeat()
155 } else if (engine->sched_engine->schedule && in heartbeat()
170 engine->sched_engine->schedule(rq, &attr); in heartbeat()
Dselftest_execlists.c47 tasklet_hi_schedule(&engine->sched_engine->tasklet); in wait_for_submit()
277 engine->sched_engine->schedule(rq[1], &attr); in live_unlite_restore()
557 tasklet_disable(&engine->sched_engine->tasklet); in engine_lock_reset_tasklet()
563 tasklet_enable(&engine->sched_engine->tasklet); in engine_lock_reset_tasklet()
578 tasklet_enable(&engine->sched_engine->tasklet); in engine_unlock_reset_tasklet()
632 engine->sched_engine->tasklet.callback(&engine->sched_engine->tasklet); in live_hold_reset()
921 engine->sched_engine->schedule(rq, &attr); in release_queue()
1204 tasklet_hi_schedule(&engine->sched_engine->tasklet); in live_timeslice_rewind()
1346 engine->sched_engine->schedule(rq, &attr); in live_timeslice_queue()
1888 engine->sched_engine->schedule(rq, &attr); in live_late_preempt()
[all …]
Dintel_breadcrumbs.c249 if (rq->engine->sched_engine->retire_inflight_request_prio) in signal_irq_work()
250 rq->engine->sched_engine->retire_inflight_request_prio(rq); in signal_irq_work()
Dintel_engine_pm.c257 GEM_BUG_ON(engine->sched_engine->queue_priority_hint != INT_MIN); in __engine_park()
Dintel_engine_types.h390 struct i915_sched_engine *sched_engine; member
Dintel_engine_user.c113 if (engine->sched_engine->schedule) in set_scheduler_caps()
Dselftest_reset.c324 struct tasklet_struct *t = &engine->sched_engine->tasklet; in igt_atomic_engine_reset()
Dselftest_lrc.c57 tasklet_hi_schedule(&engine->sched_engine->tasklet); in wait_for_submit()
1742 tasklet_disable(&engine->sched_engine->tasklet); in garbage_reset()
1747 tasklet_enable(&engine->sched_engine->tasklet); in garbage_reset()
Dselftest_hangcheck.c940 if (engine->sched_engine->schedule && arg->flags & TEST_PRIORITY) { in active_engine()
945 engine->sched_engine->schedule(rq[idx], &attr); in active_engine()
1895 struct tasklet_struct * const t = &engine->sched_engine->tasklet; in __igt_atomic_reset_engine()
/Linux-v6.1/drivers/gpu/drm/i915/gt/uc/
Dintel_guc_submission.c681 lockdep_assert_held(&rq->engine->sched_engine->lock); in __guc_add_request()
905 struct i915_sched_engine * const sched_engine = guc->sched_engine; in guc_dequeue_one_context() local
911 lockdep_assert_held(&sched_engine->lock); in guc_dequeue_one_context()
929 while ((rb = rb_first_cached(&sched_engine->queue))) { in guc_dequeue_one_context()
960 rb_erase_cached(&p->node, &sched_engine->queue); in guc_dequeue_one_context()
1014 sched_engine->tasklet.callback = NULL; in guc_dequeue_one_context()
1015 tasklet_disable_nosync(&sched_engine->tasklet); in guc_dequeue_one_context()
1019 tasklet_schedule(&sched_engine->tasklet); in guc_dequeue_one_context()
1025 struct i915_sched_engine *sched_engine = in guc_submission_tasklet() local
1026 from_tasklet(sched_engine, t, tasklet); in guc_submission_tasklet()
[all …]
Dintel_guc.h46 struct i915_sched_engine *sched_engine; member
/Linux-v6.1/drivers/gpu/drm/i915/gem/
Di915_gem_wait.c105 if (engine->sched_engine->schedule) in fence_set_priority()
106 engine->sched_engine->schedule(rq, attr); in fence_set_priority()