Home
last modified time | relevance | path

Searched refs:i915 (Results 1 – 25 of 208) sorted by relevance

123456789

/Linux-v5.4/drivers/gpu/drm/i915/selftests/
Dmock_gem_device.c39 void mock_device_flush(struct drm_i915_private *i915) in mock_device_flush() argument
44 lockdep_assert_held(&i915->drm.struct_mutex); in mock_device_flush()
47 for_each_engine(engine, i915, id) in mock_device_flush()
49 } while (i915_retire_requests(i915)); in mock_device_flush()
54 struct drm_i915_private *i915 = to_i915(dev); in mock_device_release() local
58 mutex_lock(&i915->drm.struct_mutex); in mock_device_release()
59 mock_device_flush(i915); in mock_device_release()
60 mutex_unlock(&i915->drm.struct_mutex); in mock_device_release()
62 flush_work(&i915->gem.idle_work); in mock_device_release()
63 i915_gem_drain_workqueue(i915); in mock_device_release()
[all …]
Di915_gem.c18 static int switch_to_context(struct drm_i915_private *i915, in switch_to_context() argument
24 for_each_engine(engine, i915, id) { in switch_to_context()
37 static void trash_stolen(struct drm_i915_private *i915) in trash_stolen() argument
39 struct i915_ggtt *ggtt = &i915->ggtt; in trash_stolen()
41 const resource_size_t size = resource_size(&i915->dsm); in trash_stolen()
46 const dma_addr_t dma = i915->dsm.start + page; in trash_stolen()
63 static void simulate_hibernate(struct drm_i915_private *i915) in simulate_hibernate() argument
67 wakeref = intel_runtime_pm_get(&i915->runtime_pm); in simulate_hibernate()
76 trash_stolen(i915); in simulate_hibernate()
78 intel_runtime_pm_put(&i915->runtime_pm, wakeref); in simulate_hibernate()
[all …]
Di915_gem_evict.c46 static int populate_ggtt(struct drm_i915_private *i915, in populate_ggtt() argument
56 obj = i915_gem_object_create_internal(i915, I915_GTT_PAGE_SIZE); in populate_ggtt()
73 count, i915->ggtt.vm.total / PAGE_SIZE); in populate_ggtt()
99 if (list_empty(&i915->ggtt.vm.bound_list)) { in populate_ggtt()
107 static void unpin_ggtt(struct drm_i915_private *i915) in unpin_ggtt() argument
109 struct i915_ggtt *ggtt = &i915->ggtt; in unpin_ggtt()
113 list_for_each_entry(vma, &i915->ggtt.vm.bound_list, vm_link) in unpin_ggtt()
119 static void cleanup_objects(struct drm_i915_private *i915, in cleanup_objects() argument
130 mutex_unlock(&i915->drm.struct_mutex); in cleanup_objects()
132 i915_gem_drain_freed_objects(i915); in cleanup_objects()
[all …]
Digt_live_test.c14 struct drm_i915_private *i915, in igt_live_test_begin() argument
22 lockdep_assert_held(&i915->drm.struct_mutex); in igt_live_test_begin()
24 t->i915 = i915; in igt_live_test_begin()
28 err = i915_gem_wait_for_idle(i915, in igt_live_test_begin()
38 t->reset_global = i915_reset_count(&i915->gpu_error); in igt_live_test_begin()
40 for_each_engine(engine, i915, id) in igt_live_test_begin()
42 i915_reset_engine_count(&i915->gpu_error, engine); in igt_live_test_begin()
49 struct drm_i915_private *i915 = t->i915; in igt_live_test_end() local
53 lockdep_assert_held(&i915->drm.struct_mutex); in igt_live_test_end()
55 if (igt_flush_test(i915, I915_WAIT_LOCKED)) in igt_live_test_end()
[all …]
Di915_request.c42 struct drm_i915_private *i915 = arg; in igt_add_request() local
48 mutex_lock(&i915->drm.struct_mutex); in igt_add_request()
49 request = mock_request(i915->engine[RCS0]->kernel_context, HZ / 10); in igt_add_request()
57 mutex_unlock(&i915->drm.struct_mutex); in igt_add_request()
64 struct drm_i915_private *i915 = arg; in igt_wait_request() local
70 mutex_lock(&i915->drm.struct_mutex); in igt_wait_request()
71 request = mock_request(i915->engine[RCS0]->kernel_context, T); in igt_wait_request()
129 mock_device_flush(i915); in igt_wait_request()
130 mutex_unlock(&i915->drm.struct_mutex); in igt_wait_request()
137 struct drm_i915_private *i915 = arg; in igt_fence_wait() local
[all …]
Di915_active.c62 static struct live_active *__live_alloc(struct drm_i915_private *i915) in __live_alloc() argument
71 i915_active_init(i915, &active->base, __live_active, __live_retire); in __live_alloc()
77 __live_active_setup(struct drm_i915_private *i915) in __live_active_setup() argument
86 active = __live_alloc(i915); in __live_active_setup()
100 for_each_engine(engine, i915, id) { in __live_active_setup()
147 struct drm_i915_private *i915 = arg; in live_active_wait() local
154 mutex_lock(&i915->drm.struct_mutex); in live_active_wait()
155 wakeref = intel_runtime_pm_get(&i915->runtime_pm); in live_active_wait()
157 active = __live_active_setup(i915); in live_active_wait()
171 if (igt_flush_test(i915, I915_WAIT_LOCKED)) in live_active_wait()
[all …]
/Linux-v5.4/drivers/gpu/drm/i915/gem/
Di915_gem_pm.c30 static void i915_gem_park(struct drm_i915_private *i915) in i915_gem_park() argument
35 lockdep_assert_held(&i915->drm.struct_mutex); in i915_gem_park()
37 for_each_engine(engine, i915, id) in i915_gem_park()
40 i915_vma_parked(i915); in i915_gem_park()
47 struct drm_i915_private *i915 = in idle_work_handler() local
48 container_of(work, typeof(*i915), gem.idle_work); in idle_work_handler()
51 cancel_delayed_work_sync(&i915->gem.retire_work); in idle_work_handler()
52 mutex_lock(&i915->drm.struct_mutex); in idle_work_handler()
54 intel_wakeref_lock(&i915->gt.wakeref); in idle_work_handler()
55 park = (!intel_wakeref_is_active(&i915->gt.wakeref) && in idle_work_handler()
[all …]
Di915_gem_shrinker.c19 static bool shrinker_lock(struct drm_i915_private *i915, in shrinker_lock() argument
23 struct mutex *m = &i915->drm.struct_mutex; in shrinker_lock()
45 static void shrinker_unlock(struct drm_i915_private *i915, bool unlock) in shrinker_unlock() argument
50 mutex_unlock(&i915->drm.struct_mutex); in shrinker_unlock()
146 i915_gem_shrink(struct drm_i915_private *i915, in i915_gem_shrink() argument
155 { &i915->mm.purge_list, ~0u }, in i915_gem_shrink()
157 &i915->mm.shrink_list, in i915_gem_shrink()
167 if (!shrinker_lock(i915, shrink, &unlock)) in i915_gem_shrink()
179 trace_i915_gem_shrink(i915, target, shrink); in i915_gem_shrink()
187 wakeref = intel_runtime_pm_get_if_in_use(&i915->runtime_pm); in i915_gem_shrink()
[all …]
/Linux-v5.4/drivers/gpu/drm/i915/
Dintel_sideband.h28 void vlv_iosf_sb_get(struct drm_i915_private *i915, unsigned long ports);
29 u32 vlv_iosf_sb_read(struct drm_i915_private *i915, u8 port, u32 reg);
30 void vlv_iosf_sb_write(struct drm_i915_private *i915,
32 void vlv_iosf_sb_put(struct drm_i915_private *i915, unsigned long ports);
34 static inline void vlv_bunit_get(struct drm_i915_private *i915) in vlv_bunit_get() argument
36 vlv_iosf_sb_get(i915, BIT(VLV_IOSF_SB_BUNIT)); in vlv_bunit_get()
39 u32 vlv_bunit_read(struct drm_i915_private *i915, u32 reg);
40 void vlv_bunit_write(struct drm_i915_private *i915, u32 reg, u32 val);
42 static inline void vlv_bunit_put(struct drm_i915_private *i915) in vlv_bunit_put() argument
44 vlv_iosf_sb_put(i915, BIT(VLV_IOSF_SB_BUNIT)); in vlv_bunit_put()
[all …]
Dintel_sideband.c48 static void __vlv_punit_get(struct drm_i915_private *i915) in __vlv_punit_get() argument
62 if (IS_VALLEYVIEW(i915)) { in __vlv_punit_get()
63 pm_qos_update_request(&i915->sb_qos, 0); in __vlv_punit_get()
68 static void __vlv_punit_put(struct drm_i915_private *i915) in __vlv_punit_put() argument
70 if (IS_VALLEYVIEW(i915)) in __vlv_punit_put()
71 pm_qos_update_request(&i915->sb_qos, PM_QOS_DEFAULT_VALUE); in __vlv_punit_put()
76 void vlv_iosf_sb_get(struct drm_i915_private *i915, unsigned long ports) in vlv_iosf_sb_get() argument
79 __vlv_punit_get(i915); in vlv_iosf_sb_get()
81 mutex_lock(&i915->sb_lock); in vlv_iosf_sb_get()
84 void vlv_iosf_sb_put(struct drm_i915_private *i915, unsigned long ports) in vlv_iosf_sb_put() argument
[all …]
Di915_getparam.c12 struct drm_i915_private *i915 = to_i915(dev); in i915_getparam_ioctl() local
13 const struct sseu_dev_info *sseu = &RUNTIME_INFO(i915)->sseu; in i915_getparam_ioctl()
25 value = i915->drm.pdev->device; in i915_getparam_ioctl()
28 value = i915->drm.pdev->revision; in i915_getparam_ioctl()
31 value = i915->ggtt.num_fences; in i915_getparam_ioctl()
34 value = !!i915->overlay; in i915_getparam_ioctl()
37 value = !!intel_engine_lookup_user(i915, in i915_getparam_ioctl()
41 value = !!intel_engine_lookup_user(i915, in i915_getparam_ioctl()
45 value = !!intel_engine_lookup_user(i915, in i915_getparam_ioctl()
49 value = !!intel_engine_lookup_user(i915, in i915_getparam_ioctl()
[all …]
Dintel_wopcm.c79 struct drm_i915_private *i915 = wopcm_to_i915(wopcm); in intel_wopcm_init_early() local
81 if (!HAS_GT_UC(i915)) in intel_wopcm_init_early()
84 if (INTEL_GEN(i915) >= 11) in intel_wopcm_init_early()
89 DRM_DEV_DEBUG_DRIVER(i915->drm.dev, "WOPCM: %uK\n", wopcm->size / 1024); in intel_wopcm_init_early()
92 static inline u32 context_reserved_size(struct drm_i915_private *i915) in context_reserved_size() argument
94 if (IS_GEN9_LP(i915)) in context_reserved_size()
96 else if (INTEL_GEN(i915) >= 10) in context_reserved_size()
102 static inline bool gen9_check_dword_gap(struct drm_i915_private *i915, in gen9_check_dword_gap() argument
115 dev_err(i915->drm.dev, in gen9_check_dword_gap()
125 static inline bool gen9_check_huc_fw_fits(struct drm_i915_private *i915, in gen9_check_huc_fw_fits() argument
[all …]
Di915_gem_fence_reg.c69 if (INTEL_GEN(fence->i915) >= 6) { in i965_write_fence_reg()
98 struct intel_uncore *uncore = &fence->i915->uncore; in i965_write_fence_reg()
135 if (is_y_tiled && HAS_128_BYTE_Y_TILING(fence->i915)) in i915_write_fence_reg()
151 struct intel_uncore *uncore = &fence->i915->uncore; in i915_write_fence_reg()
183 struct intel_uncore *uncore = &fence->i915->uncore; in i830_write_fence_reg()
200 if (IS_GEN(fence->i915, 2)) in fence_write()
202 else if (IS_GEN(fence->i915, 3)) in fence_write()
258 list_move(&fence->link, &fence->i915->ggtt.fence_list); in fence_update()
271 wakeref = intel_runtime_pm_get_if_in_use(&fence->i915->runtime_pm); in fence_update()
282 list_move_tail(&fence->link, &fence->i915->ggtt.fence_list); in fence_update()
[all …]
Di915_pmu.c82 struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu); in pmu_needs_timer() local
110 else if (i915->caps.scheduler & I915_SCHEDULER_CAP_ENGINE_BUSY_STATS) in pmu_needs_timer()
119 void i915_pmu_gt_parked(struct drm_i915_private *i915) in i915_pmu_gt_parked() argument
121 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_gt_parked()
146 void i915_pmu_gt_unparked(struct drm_i915_private *i915) in i915_pmu_gt_unparked() argument
148 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_gt_unparked()
170 struct drm_i915_private *i915 = gt->i915; in engines_sample() local
174 if ((i915->pmu.enable & ENGINE_SAMPLE_MASK) == 0) in engines_sample()
177 for_each_engine(engine, i915, id) { in engines_sample()
227 struct drm_i915_private *i915 = gt->i915; in frequency_sample() local
[all …]
/Linux-v5.4/drivers/gpu/drm/i915/display/
Dintel_frontbuffer.c78 static void frontbuffer_flush(struct drm_i915_private *i915, in frontbuffer_flush() argument
83 spin_lock(&i915->fb_tracking.lock); in frontbuffer_flush()
84 frontbuffer_bits &= ~i915->fb_tracking.busy_bits; in frontbuffer_flush()
85 spin_unlock(&i915->fb_tracking.lock); in frontbuffer_flush()
91 intel_edp_drrs_flush(i915, frontbuffer_bits); in frontbuffer_flush()
92 intel_psr_flush(i915, frontbuffer_bits, origin); in frontbuffer_flush()
93 intel_fbc_flush(i915, frontbuffer_bits, origin); in frontbuffer_flush()
108 void intel_frontbuffer_flip_prepare(struct drm_i915_private *i915, in intel_frontbuffer_flip_prepare() argument
111 spin_lock(&i915->fb_tracking.lock); in intel_frontbuffer_flip_prepare()
112 i915->fb_tracking.flip_bits |= frontbuffer_bits; in intel_frontbuffer_flip_prepare()
[all …]
Dintel_quirks.c14 static void quirk_ssc_force_disable(struct drm_i915_private *i915) in quirk_ssc_force_disable() argument
16 i915->quirks |= QUIRK_LVDS_SSC_DISABLE; in quirk_ssc_force_disable()
24 static void quirk_invert_brightness(struct drm_i915_private *i915) in quirk_invert_brightness() argument
26 i915->quirks |= QUIRK_INVERT_BRIGHTNESS; in quirk_invert_brightness()
31 static void quirk_backlight_present(struct drm_i915_private *i915) in quirk_backlight_present() argument
33 i915->quirks |= QUIRK_BACKLIGHT_PRESENT; in quirk_backlight_present()
40 static void quirk_increase_t12_delay(struct drm_i915_private *i915) in quirk_increase_t12_delay() argument
42 i915->quirks |= QUIRK_INCREASE_T12_DELAY; in quirk_increase_t12_delay()
50 static void quirk_increase_ddi_disabled_time(struct drm_i915_private *i915) in quirk_increase_ddi_disabled_time() argument
52 i915->quirks |= QUIRK_INCREASE_DDI_DISABLED_TIME; in quirk_increase_ddi_disabled_time()
[all …]
/Linux-v5.4/drivers/gpu/drm/i915/gt/
Dintel_gt_pm.c15 static void pm_notify(struct drm_i915_private *i915, int state) in pm_notify() argument
17 blocking_notifier_call_chain(&i915->gt.pm_notifications, state, i915); in pm_notify()
23 struct drm_i915_private *i915 = gt->i915; in __gt_unpark() local
38 gt->awake = intel_display_power_get(i915, POWER_DOMAIN_GT_IRQ); in __gt_unpark()
41 if (NEEDS_RC6_CTX_CORRUPTION_WA(i915)) in __gt_unpark()
42 intel_uncore_forcewake_get(&i915->uncore, FORCEWAKE_ALL); in __gt_unpark()
44 intel_enable_gt_powersave(i915); in __gt_unpark()
46 i915_update_gfx_val(i915); in __gt_unpark()
47 if (INTEL_GEN(i915) >= 6) in __gt_unpark()
48 gen6_rps_busy(i915); in __gt_unpark()
[all …]
Dintel_workarounds.c240 struct drm_i915_private *i915 = engine->i915; in bdw_ctx_workarounds_init() local
262 (IS_BDW_GT3(i915) ? HDC_FENCE_DEST_SLM_DISABLE : 0)); in bdw_ctx_workarounds_init()
280 struct drm_i915_private *i915 = engine->i915; in gen9_ctx_workarounds_init() local
282 if (HAS_LLC(i915)) { in gen9_ctx_workarounds_init()
339 if (IS_SKYLAKE(i915) || IS_KABYLAKE(i915) || IS_COFFEELAKE(i915)) in gen9_ctx_workarounds_init()
366 if (IS_GEN9_LP(i915)) in gen9_ctx_workarounds_init()
373 struct drm_i915_private *i915 = engine->i915; in skl_tune_iz_hashing() local
384 if (!is_power_of_2(RUNTIME_INFO(i915)->sseu.subslice_7eu[i])) in skl_tune_iz_hashing()
393 ss = ffs(RUNTIME_INFO(i915)->sseu.subslice_7eu[i]) - 1; in skl_tune_iz_hashing()
434 struct drm_i915_private *i915 = engine->i915; in kbl_ctx_workarounds_init() local
[all …]
Dselftest_lrc.c24 struct drm_i915_private *i915 = arg; in live_sanitycheck() local
32 if (!HAS_LOGICAL_RING_CONTEXTS(i915)) in live_sanitycheck()
35 mutex_lock(&i915->drm.struct_mutex); in live_sanitycheck()
36 wakeref = intel_runtime_pm_get(&i915->runtime_pm); in live_sanitycheck()
38 if (igt_spinner_init(&spin, &i915->gt)) in live_sanitycheck()
41 ctx = kernel_context(i915); in live_sanitycheck()
58 intel_gt_set_wedged(&i915->gt); in live_sanitycheck()
64 if (igt_flush_test(i915, I915_WAIT_LOCKED)) { in live_sanitycheck()
77 intel_runtime_pm_put(&i915->runtime_pm, wakeref); in live_sanitycheck()
78 mutex_unlock(&i915->drm.struct_mutex); in live_sanitycheck()
[all …]
Dintel_gt.c11 void intel_gt_init_early(struct intel_gt *gt, struct drm_i915_private *i915) in intel_gt_init_early() argument
13 gt->i915 = i915; in intel_gt_init_early()
14 gt->uncore = &i915->uncore; in intel_gt_init_early()
27 void intel_gt_init_hw(struct drm_i915_private *i915) in intel_gt_init_hw() argument
29 i915->gt.ggtt = &i915->ggtt; in intel_gt_init_hw()
57 struct drm_i915_private *i915 = gt->i915; in intel_gt_clear_error_registers() local
61 if (!IS_GEN(i915, 2)) in intel_gt_clear_error_registers()
64 if (INTEL_GEN(i915) < 4) in intel_gt_clear_error_registers()
82 if (INTEL_GEN(i915) >= 12) { in intel_gt_clear_error_registers()
85 } else if (INTEL_GEN(i915) >= 8) { in intel_gt_clear_error_registers()
[all …]
Dselftest_hangcheck.c61 h->ctx = kernel_context(gt->i915); in hang_init()
67 h->hws = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); in hang_init()
73 h->obj = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); in hang_init()
88 i915_coherent_map_type(gt->i915)); in hang_init()
143 obj = i915_gem_object_create_internal(gt->i915, PAGE_SIZE); in hang_create_request()
147 vaddr = i915_gem_object_pin_map(obj, i915_coherent_map_type(gt->i915)); in hang_create_request()
190 if (INTEL_GEN(gt->i915) >= 8) { in hang_create_request()
204 } else if (INTEL_GEN(gt->i915) >= 6) { in hang_create_request()
217 } else if (INTEL_GEN(gt->i915) >= 4) { in hang_create_request()
253 if (INTEL_GEN(gt->i915) <= 5) in hang_create_request()
[all …]
Dselftest_workarounds.c37 reference_lists_init(struct drm_i915_private *i915, struct wa_lists *lists) in reference_lists_init() argument
45 gt_init_workarounds(i915, &lists->gt_wa_list); in reference_lists_init()
48 for_each_engine(engine, i915, id) { in reference_lists_init()
62 reference_lists_fini(struct drm_i915_private *i915, struct wa_lists *lists) in reference_lists_fini() argument
67 for_each_engine(engine, i915, id) in reference_lists_fini()
84 result = i915_gem_object_create_internal(engine->i915, PAGE_SIZE); in read_nonprivs()
124 if (INTEL_GEN(ctx->i915) >= 8) in read_nonprivs()
194 intel_wedge_on_timeout(&wedge, &ctx->i915->gt, HZ / 5) /* safety net! */ in check_whitelist()
197 if (intel_gt_is_wedged(&ctx->i915->gt)) in check_whitelist()
249 ctx = kernel_context(engine->i915); in switch_to_scratch_context()
[all …]
/Linux-v5.4/drivers/gpu/drm/i915/gem/selftests/
Di915_gem_mman.c181 struct drm_i915_private *i915 = arg; in igt_partial_tiling() local
195 obj = huge_gem_object(i915, in igt_partial_tiling()
197 (1 + next_prime_number(i915->ggtt.vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); in igt_partial_tiling()
208 mutex_lock(&i915->drm.struct_mutex); in igt_partial_tiling()
209 wakeref = intel_runtime_pm_get(&i915->runtime_pm); in igt_partial_tiling()
233 if (i915->quirks & QUIRK_PIN_SWIZZLED_PAGES) in igt_partial_tiling()
244 tile.swizzle = i915->mm.bit_6_swizzle_x; in igt_partial_tiling()
247 tile.swizzle = i915->mm.bit_6_swizzle_y; in igt_partial_tiling()
256 if (INTEL_GEN(i915) <= 2) { in igt_partial_tiling()
261 HAS_128_BYTE_Y_TILING(i915)) { in igt_partial_tiling()
[all …]
Di915_gem_context.c31 struct drm_i915_private *i915 = arg; in live_nop_switch() local
48 if (!DRIVER_CAPS(i915)->has_logical_contexts) in live_nop_switch()
51 file = mock_file(i915); in live_nop_switch()
55 mutex_lock(&i915->drm.struct_mutex); in live_nop_switch()
64 ctx[n] = live_context(i915, file); in live_nop_switch()
71 for_each_engine(engine, i915, id) { in live_nop_switch()
87 intel_gt_set_wedged(&i915->gt); in live_nop_switch()
97 err = igt_live_test_begin(&t, i915, __func__, engine->name); in live_nop_switch()
131 intel_gt_set_wedged(&i915->gt); in live_nop_switch()
154 mutex_unlock(&i915->drm.struct_mutex); in live_nop_switch()
[all …]
/Linux-v5.4/Documentation/gpu/
Di915.rst2 drm/i915 Intel GFX Driver
5 The drm/i915 driver supports all (with the exception of some very early
19 .. kernel-doc:: drivers/gpu/drm/i915/intel_runtime_pm.c
22 .. kernel-doc:: drivers/gpu/drm/i915/intel_runtime_pm.c
25 .. kernel-doc:: drivers/gpu/drm/i915/intel_uncore.c
31 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c
34 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c
37 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c
40 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c
46 .. kernel-doc:: drivers/gpu/drm/i915/i915_vgpu.c
[all …]

123456789