Home
last modified time | relevance | path

Searched refs:I915_NUM_ENGINES (Results 1 – 25 of 30) sorted by relevance

12

/Linux-v4.19/drivers/gpu/drm/i915/
Di915_gpu_error.h120 u32 semaphore_mboxes[I915_NUM_ENGINES - 1];
176 } engine[I915_NUM_ENGINES];
192 } *active_bo[I915_NUM_ENGINES], *pinned_bo;
193 u32 active_bo_count[I915_NUM_ENGINES], pinned_bo_count;
194 struct i915_address_space *active_vm[I915_NUM_ENGINES];
272 #define I915_RESET_ENGINE (I915_WEDGED - I915_NUM_ENGINES)
275 u32 reset_engine_count[I915_NUM_ENGINES];
Dintel_guc_submission.h73 u64 submissions[I915_NUM_ENGINES];
Di915_gem.h74 #define I915_NUM_ENGINES 8 macro
Di915_timeline.c24 BUILD_BUG_ON(KSYNCMAP < I915_NUM_ENGINES); in i915_timeline_init()
Di915_timeline.h73 u32 global_sync[I915_NUM_ENGINES];
Dintel_guc.h73 struct guc_preempt_work preempt_work[I915_NUM_ENGINES];
Di915_gem_context.h163 } __engine[I915_NUM_ENGINES];
Dintel_hangcheck.c142 if (signaller->hangcheck.deadlock >= I915_NUM_ENGINES) in semaphore_passed()
Dintel_uncore.c1893 const u32 hw_engine_mask[I915_NUM_ENGINES] = { in gen6_reset_engines()
1932 const u32 hw_engine_mask[I915_NUM_ENGINES] = { in gen11_reset_engines()
1944 BUILD_BUG_ON(VECS2 + 1 != I915_NUM_ENGINES); in gen11_reset_engines()
Di915_gpu_error.c42 if (id >= I915_NUM_ENGINES) in engine_lookup()
1092 for (i = 0; i < I915_NUM_ENGINES; i++) { in i915_error_generate_code()
1454 for (i = 0; i < I915_NUM_ENGINES; i++) { in gem_record_rings()
Dintel_device_info.c753 BUILD_BUG_ON(I915_NUM_ENGINES > in intel_device_info_runtime_init()
Dintel_guc_submission.c1315 I915_NUM_ENGINES > GUC_WQ_SIZE); in intel_guc_submission_enable()
Di915_drv.h1627 struct intel_engine_cs *engine[I915_NUM_ENGINES];
2154 (id__) < I915_NUM_ENGINES; \
/Linux-v4.19/drivers/gpu/drm/i915/gvt/
Dscheduler.h42 struct intel_vgpu_workload *current_workload[I915_NUM_ENGINES];
47 struct intel_vgpu *engine_owner[I915_NUM_ENGINES];
50 struct task_struct *thread[I915_NUM_ENGINES];
51 wait_queue_head_t waitq[I915_NUM_ENGINES];
Dgvt.h157 struct intel_vgpu_execlist execlist[I915_NUM_ENGINES];
158 struct list_head workload_q_head[I915_NUM_ENGINES];
162 DECLARE_BITMAP(shadow_ctx_desc_updated, I915_NUM_ENGINES);
163 DECLARE_BITMAP(tlb_handle_pending, I915_NUM_ENGINES);
164 void *ring_scan_buffer[I915_NUM_ENGINES];
165 int ring_scan_buffer_size[I915_NUM_ENGINES];
198 u32 hws_pga[I915_NUM_ENGINES];
323 struct notifier_block shadow_ctx_notifier_block[I915_NUM_ENGINES];
339 int ctx_mmio_count[I915_NUM_ENGINES];
Ddebugfs.c150 val &= (1 << I915_NUM_ENGINES) - 1; in vgpu_scan_nonprivbb_set()
164 for (id = 0; id < I915_NUM_ENGINES; id++) { in vgpu_scan_nonprivbb_set()
Dsched_policy.c470 for (ring_id = 0; ring_id < I915_NUM_ENGINES; ring_id++) { in intel_vgpu_stop_schedule()
Dvgpu.c327 for (i = 0; i < I915_NUM_ENGINES; i++) in intel_gvt_create_idle_vgpu()
Dmmio_context.c157 u32 control_table[I915_NUM_ENGINES][GEN9_MOCS_SIZE];
Dscheduler.c1111 bitmap_zero(s->shadow_ctx_desc_updated, I915_NUM_ENGINES); in intel_vgpu_setup_submission()
1129 bitmap_zero(s->tlb_handle_pending, I915_NUM_ENGINES); in intel_vgpu_setup_submission()
Dcmd_parser.c560 static struct decode_info *ring_decode_info[I915_NUM_ENGINES][8] = {
2897 for_each_set_bit(ring, &rings, I915_NUM_ENGINES) { in find_cmd_entry_any_ring()
/Linux-v4.19/drivers/gpu/drm/i915/selftests/
Di915_timeline.c100 return i915_prandom_u32_max_state(I915_NUM_ENGINES, rnd); in random_engine()
Dmock_engine.c184 GEM_BUG_ON(id >= I915_NUM_ENGINES); in mock_engine()
Di915_request.c635 struct i915_request *request[I915_NUM_ENGINES]; in live_all_engines()
736 struct i915_request *request[I915_NUM_ENGINES] = {}; in live_sequential_engines()
Dhuge_pages.c1110 static struct intel_engine_cs *engines[I915_NUM_ENGINES]; in igt_write_huge()
1149 order = i915_random_order(n * I915_NUM_ENGINES, &prng); in igt_write_huge()
1165 i = (i + 1) % (n * I915_NUM_ENGINES); in igt_write_huge()

12