/Linux-v4.19/drivers/gpu/drm/i915/selftests/ |
D | mock_gem_device.c | 36 void mock_device_flush(struct drm_i915_private *i915) in mock_device_flush() argument 41 lockdep_assert_held(&i915->drm.struct_mutex); in mock_device_flush() 43 for_each_engine(engine, i915, id) in mock_device_flush() 46 i915_retire_requests(i915); in mock_device_flush() 47 GEM_BUG_ON(i915->gt.active_requests); in mock_device_flush() 52 struct drm_i915_private *i915 = to_i915(dev); in mock_device_release() local 56 mutex_lock(&i915->drm.struct_mutex); in mock_device_release() 57 mock_device_flush(i915); in mock_device_release() 58 i915_gem_contexts_lost(i915); in mock_device_release() 59 mutex_unlock(&i915->drm.struct_mutex); in mock_device_release() [all …]
|
D | i915_gem_evict.c | 32 static int populate_ggtt(struct drm_i915_private *i915) in populate_ggtt() argument 38 size + I915_GTT_PAGE_SIZE <= i915->ggtt.vm.total; in populate_ggtt() 42 obj = i915_gem_object_create_internal(i915, I915_GTT_PAGE_SIZE); in populate_ggtt() 51 if (!list_empty(&i915->mm.unbound_list)) { in populate_ggtt() 53 list_for_each_entry(obj, &i915->mm.unbound_list, mm.link) in populate_ggtt() 60 if (list_empty(&i915->ggtt.vm.inactive_list)) { in populate_ggtt() 68 static void unpin_ggtt(struct drm_i915_private *i915) in unpin_ggtt() argument 72 list_for_each_entry(vma, &i915->ggtt.vm.inactive_list, vm_link) in unpin_ggtt() 76 static void cleanup_objects(struct drm_i915_private *i915) in cleanup_objects() argument 80 list_for_each_entry_safe(obj, on, &i915->mm.unbound_list, mm.link) in cleanup_objects() [all …]
|
D | intel_hangcheck.c | 38 struct drm_i915_private *i915; member 46 static int hang_init(struct hang *h, struct drm_i915_private *i915) in hang_init() argument 52 h->i915 = i915; in hang_init() 54 h->ctx = kernel_context(i915); in hang_init() 58 h->hws = i915_gem_object_create_internal(i915, PAGE_SIZE); in hang_init() 64 h->obj = i915_gem_object_create_internal(i915, PAGE_SIZE); in hang_init() 79 HAS_LLC(i915) ? I915_MAP_WB : I915_MAP_WC); in hang_init() 108 struct drm_i915_private *i915 = h->i915; in emit_recurse_batch() local 112 &i915->ggtt.vm; in emit_recurse_batch() 153 if (INTEL_GEN(i915) >= 8) { in emit_recurse_batch() [all …]
|
D | i915_request.c | 34 struct drm_i915_private *i915 = arg; in igt_add_request() local 40 mutex_lock(&i915->drm.struct_mutex); in igt_add_request() 41 request = mock_request(i915->engine[RCS], in igt_add_request() 42 i915->kernel_context, in igt_add_request() 51 mutex_unlock(&i915->drm.struct_mutex); in igt_add_request() 58 struct drm_i915_private *i915 = arg; in igt_wait_request() local 64 mutex_lock(&i915->drm.struct_mutex); in igt_wait_request() 65 request = mock_request(i915->engine[RCS], i915->kernel_context, T); in igt_wait_request() 120 mock_device_flush(i915); in igt_wait_request() 121 mutex_unlock(&i915->drm.struct_mutex); in igt_wait_request() [all …]
|
D | i915_gem_object.c | 32 struct drm_i915_private *i915 = arg; in igt_gem_object() local 38 obj = i915_gem_object_create(i915, PAGE_SIZE); in igt_gem_object() 53 struct drm_i915_private *i915 = arg; in igt_phys_object() local 61 obj = i915_gem_object_create(i915, PAGE_SIZE); in igt_phys_object() 68 mutex_lock(&i915->drm.struct_mutex); in igt_phys_object() 70 mutex_unlock(&i915->drm.struct_mutex); in igt_phys_object() 89 mutex_lock(&i915->drm.struct_mutex); in igt_phys_object() 91 mutex_unlock(&i915->drm.struct_mutex); in igt_phys_object() 107 struct drm_i915_private *i915 = arg; in igt_gem_huge() local 114 obj = huge_gem_object(i915, in igt_gem_huge() [all …]
|
D | intel_lrc.c | 13 struct drm_i915_private *i915; member 20 static int spinner_init(struct spinner *spin, struct drm_i915_private *i915) in spinner_init() argument 26 GEM_BUG_ON(INTEL_GEN(i915) < 8); in spinner_init() 29 spin->i915 = i915; in spinner_init() 31 spin->hws = i915_gem_object_create_internal(i915, PAGE_SIZE); in spinner_init() 37 spin->obj = i915_gem_object_create_internal(i915, PAGE_SIZE); in spinner_init() 51 mode = HAS_LLC(i915) ? I915_MAP_WB : I915_MAP_WC; in spinner_init() 139 i915_gem_chipset_flush(spin->i915); in emit_recurse_batch() 182 i915_gem_chipset_flush(spin->i915); in spinner_end() 213 struct drm_i915_private *i915 = arg; in live_sanitycheck() local [all …]
|
D | i915_gem_context.c | 39 const int gen = INTEL_GEN(vma->vm->i915); in gpu_fill_dw() 46 obj = i915_gem_object_create_internal(vma->vm->i915, size); in gpu_fill_dw() 117 struct drm_i915_private *i915 = to_i915(obj->base.dev); in gpu_fill() local 119 ctx->ppgtt ? &ctx->ppgtt->vm : &i915->ggtt.vm; in gpu_fill() 165 if (INTEL_GEN(vm->i915) <= 5) in gpu_fill() 298 ctx->ppgtt ? &ctx->ppgtt->vm : &ctx->i915->ggtt.vm; in create_test_object() 305 obj = huge_gem_object(ctx->i915, DW_PER_PAGE * PAGE_SIZE, size); in create_test_object() 335 struct drm_i915_private *i915 = arg; in igt_ctx_exec() local 350 if (!DRIVER_CAPS(i915)->has_logical_contexts) in igt_ctx_exec() 353 file = mock_file(i915); in igt_ctx_exec() [all …]
|
D | mock_context.c | 29 mock_context(struct drm_i915_private *i915, in mock_context() argument 42 ctx->i915 = i915; in mock_context() 53 ret = ida_simple_get(&i915->contexts.hw_ida, in mock_context() 64 ctx->ppgtt = mock_ppgtt(i915, name); in mock_context() 86 void mock_init_contexts(struct drm_i915_private *i915) in mock_init_contexts() argument 88 INIT_LIST_HEAD(&i915->contexts.list); in mock_init_contexts() 89 ida_init(&i915->contexts.hw_ida); in mock_init_contexts() 91 INIT_WORK(&i915->contexts.free_work, contexts_free_worker); in mock_init_contexts() 92 init_llist_head(&i915->contexts.free_list); in mock_init_contexts() 96 live_context(struct drm_i915_private *i915, struct drm_file *file) in live_context() argument [all …]
|
D | i915_gem_dmabuf.c | 32 struct drm_i915_private *i915 = arg; in igt_dmabuf_export() local 36 obj = i915_gem_object_create(i915, PAGE_SIZE); in igt_dmabuf_export() 40 dmabuf = i915_gem_prime_export(&i915->drm, &obj->base, 0); in igt_dmabuf_export() 54 struct drm_i915_private *i915 = arg; in igt_dmabuf_import_self() local 60 obj = i915_gem_object_create(i915, PAGE_SIZE); in igt_dmabuf_import_self() 64 dmabuf = i915_gem_prime_export(&i915->drm, &obj->base, 0); in igt_dmabuf_import_self() 72 import = i915_gem_prime_import(&i915->drm, dmabuf); in igt_dmabuf_import_self() 98 struct drm_i915_private *i915 = arg; in igt_dmabuf_import() local 109 obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf)); in igt_dmabuf_import() 117 if (obj->base.dev != &i915->drm) { in igt_dmabuf_import() [all …]
|
D | i915_gem_gtt.c | 35 static void cleanup_freed_objects(struct drm_i915_private *i915) in cleanup_freed_objects() argument 42 mutex_unlock(&i915->drm.struct_mutex); in cleanup_freed_objects() 44 i915_gem_drain_freed_objects(i915); in cleanup_freed_objects() 46 mutex_lock(&i915->drm.struct_mutex); in cleanup_freed_objects() 113 fake_dma_object(struct drm_i915_private *i915, u64 size) in fake_dma_object() argument 123 obj = i915_gem_object_alloc(i915); in fake_dma_object() 127 drm_gem_private_object_init(&i915->drm, &obj->base, size); in fake_dma_object() 216 static int lowlevel_hole(struct drm_i915_private *i915, in lowlevel_hole() argument 262 obj = fake_dma_object(i915, BIT_ULL(size)); in lowlevel_hole() 296 intel_runtime_pm_get(i915); in lowlevel_hole() [all …]
|
D | i915_vma.c | 102 static int create_vmas(struct drm_i915_private *i915, in create_vmas() argument 144 struct drm_i915_private *i915 = arg; in igt_vma_create() local 161 obj = i915_gem_object_create_internal(i915, PAGE_SIZE); in igt_vma_create() 171 ctx = mock_context(i915, "mock"); in igt_vma_create() 178 err = create_vmas(i915, &objects, &contexts); in igt_vma_create() 196 err = create_vmas(i915, &objects, &contexts); in igt_vma_create() 248 struct drm_i915_private *i915 = arg; in igt_vma_pin1() local 259 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (i915->ggtt.mappable_end - 4096)), in igt_vma_pin1() 260 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (i915->ggtt.mappable_end - 4096)), in igt_vma_pin1() 261 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (i915->ggtt.vm.total - 4096)), in igt_vma_pin1() [all …]
|
D | intel_workarounds.c | 23 result = i915_gem_object_create_internal(engine->i915, PAGE_SIZE); in read_nonprivs() 37 vma = i915_vma_instance(result, &engine->i915->ggtt.vm, NULL); in read_nonprivs() 58 if (INTEL_GEN(ctx->i915) >= 8) in read_nonprivs() 125 igt_wedge_on_timeout(&wedge, ctx->i915, HZ / 5) /* a safety net! */ in check_whitelist() 127 if (i915_terminally_wedged(&ctx->i915->gpu_error)) in check_whitelist() 160 i915_reset(engine->i915, ENGINE_MASK(engine->id), NULL); in do_device_reset() 174 ctx = kernel_context(engine->i915); in switch_to_scratch_context() 196 ctx = kernel_context(engine->i915); in check_whitelist_across_reset() 225 ctx = kernel_context(engine->i915); in check_whitelist_across_reset() 243 struct drm_i915_private *i915 = arg; in live_reset_whitelist() local [all …]
|
D | i915_gem_coherency.c | 195 struct drm_i915_private *i915 = to_i915(obj->base.dev); in gpu_set() local 209 rq = i915_request_alloc(i915->engine[RCS], i915->kernel_context); in gpu_set() 222 if (INTEL_GEN(i915) >= 8) { in gpu_set() 227 } else if (INTEL_GEN(i915) >= 4) { in gpu_set() 248 static bool always_valid(struct drm_i915_private *i915) in always_valid() argument 253 static bool needs_fence_registers(struct drm_i915_private *i915) in needs_fence_registers() argument 255 return !i915_terminally_wedged(&i915->gpu_error); in needs_fence_registers() 258 static bool needs_mi_store_dword(struct drm_i915_private *i915) in needs_mi_store_dword() argument 260 if (i915_terminally_wedged(&i915->gpu_error)) in needs_mi_store_dword() 263 return intel_engine_can_store_dword(i915->engine[RCS]); in needs_mi_store_dword() [all …]
|
D | mock_gtt.c | 59 mock_ppgtt(struct drm_i915_private *i915, in mock_ppgtt() argument 69 ppgtt->vm.i915 = i915; in mock_ppgtt() 73 i915_address_space_init(&ppgtt->vm, i915); in mock_ppgtt() 100 void mock_init_ggtt(struct drm_i915_private *i915) in mock_init_ggtt() argument 102 struct i915_ggtt *ggtt = &i915->ggtt; in mock_init_ggtt() 104 ggtt->vm.i915 = i915; in mock_init_ggtt() 120 i915_address_space_init(&ggtt->vm, i915); in mock_init_ggtt() 123 void mock_fini_ggtt(struct drm_i915_private *i915) in mock_fini_ggtt() argument 125 struct i915_ggtt *ggtt = &i915->ggtt; in mock_fini_ggtt()
|
D | huge_pages.c | 38 static unsigned int get_largest_page_size(struct drm_i915_private *i915, in get_largest_page_size() argument 46 if (HAS_PAGE_SIZES(i915, page_size) && rem >= page_size) in get_largest_page_size() 159 huge_pages_object(struct drm_i915_private *i915, in huge_pages_object() argument 174 obj = i915_gem_object_alloc(i915); in huge_pages_object() 178 drm_gem_private_object_init(&i915->drm, &obj->base, size); in huge_pages_object() 192 struct drm_i915_private *i915 = to_i915(obj->base.dev); in fake_get_huge_pages() local 214 unsigned int page_size = get_largest_page_size(i915, rem); in fake_get_huge_pages() 247 struct drm_i915_private *i915 = to_i915(obj->base.dev); in fake_get_huge_pages_single() local 264 page_size = get_largest_page_size(i915, obj->base.size); in fake_get_huge_pages_single() 308 fake_huge_pages_object(struct drm_i915_private *i915, u64 size, bool single) in fake_huge_pages_object() argument [all …]
|
/Linux-v4.19/drivers/gpu/drm/i915/ |
D | intel_uc.c | 53 static int __get_platform_enable_guc(struct drm_i915_private *i915) in __get_platform_enable_guc() argument 55 struct intel_uc_fw *guc_fw = &i915->guc.fw; in __get_platform_enable_guc() 56 struct intel_uc_fw *huc_fw = &i915->huc.fw; in __get_platform_enable_guc() 70 static int __get_default_guc_log_level(struct drm_i915_private *i915) in __get_default_guc_log_level() argument 74 if (!HAS_GUC(i915) || !intel_uc_is_using_guc()) in __get_default_guc_log_level() 104 static void sanitize_options_early(struct drm_i915_private *i915) in sanitize_options_early() argument 106 struct intel_uc_fw *guc_fw = &i915->guc.fw; in sanitize_options_early() 107 struct intel_uc_fw *huc_fw = &i915->huc.fw; in sanitize_options_early() 111 i915_modparams.enable_guc = __get_platform_enable_guc(i915); in sanitize_options_early() 122 !HAS_GUC(i915) ? "no GuC hardware" : in sanitize_options_early() [all …]
|
D | i915_gem_shrinker.c | 39 static bool shrinker_lock(struct drm_i915_private *i915, bool *unlock) in shrinker_lock() argument 41 switch (mutex_trylock_recursive(&i915->drm.struct_mutex)) { in shrinker_lock() 51 if (mutex_trylock(&i915->drm.struct_mutex)) { in shrinker_lock() 67 static void shrinker_unlock(struct drm_i915_private *i915, bool unlock) in shrinker_unlock() argument 72 mutex_unlock(&i915->drm.struct_mutex); in shrinker_unlock() 146 i915_gem_shrink(struct drm_i915_private *i915, in i915_gem_shrink() argument 155 { &i915->mm.unbound_list, I915_SHRINK_UNBOUND }, in i915_gem_shrink() 156 { &i915->mm.bound_list, I915_SHRINK_BOUND }, in i915_gem_shrink() 163 if (!shrinker_lock(i915, &unlock)) in i915_gem_shrink() 176 i915_gem_wait_for_idle(i915, in i915_gem_shrink() [all …]
|
D | i915_pmu.c | 73 static bool pmu_needs_timer(struct drm_i915_private *i915, bool gpu_active) in pmu_needs_timer() argument 82 enable = i915->pmu.enable; in pmu_needs_timer() 104 else if (intel_engine_supports_stats(i915->engine[RCS])) in pmu_needs_timer() 113 void i915_pmu_gt_parked(struct drm_i915_private *i915) in i915_pmu_gt_parked() argument 115 if (!i915->pmu.base.event_init) in i915_pmu_gt_parked() 118 spin_lock_irq(&i915->pmu.lock); in i915_pmu_gt_parked() 123 i915->pmu.timer_enabled = pmu_needs_timer(i915, false); in i915_pmu_gt_parked() 124 spin_unlock_irq(&i915->pmu.lock); in i915_pmu_gt_parked() 127 static void __i915_pmu_maybe_start_timer(struct drm_i915_private *i915) in __i915_pmu_maybe_start_timer() argument 129 if (!i915->pmu.timer_enabled && pmu_needs_timer(i915, true)) { in __i915_pmu_maybe_start_timer() [all …]
|
D | i915_gem.c | 50 static void i915_gem_flush_free_objects(struct drm_i915_private *i915); 140 static u32 __i915_gem_park(struct drm_i915_private *i915) in __i915_gem_park() argument 144 lockdep_assert_held(&i915->drm.struct_mutex); in __i915_gem_park() 145 GEM_BUG_ON(i915->gt.active_requests); in __i915_gem_park() 146 GEM_BUG_ON(!list_empty(&i915->gt.active_rings)); in __i915_gem_park() 148 if (!i915->gt.awake) in __i915_gem_park() 151 GEM_BUG_ON(i915->gt.epoch == I915_EPOCH_INVALID); in __i915_gem_park() 164 synchronize_irq(i915->drm.irq); in __i915_gem_park() 166 intel_engines_park(i915); in __i915_gem_park() 167 i915_timelines_park(i915); in __i915_gem_park() [all …]
|
D | i915_gem_context.c | 105 kmem_cache_free(ctx->i915->luts, lut); in lut_close() 122 lockdep_assert_held(&ctx->i915->drm.struct_mutex); in i915_gem_context_free() 139 ida_simple_remove(&ctx->i915->contexts.hw_ida, ctx->hw_id); in i915_gem_context_free() 143 static void contexts_free(struct drm_i915_private *i915) in contexts_free() argument 145 struct llist_node *freed = llist_del_all(&i915->contexts.free_list); in contexts_free() 148 lockdep_assert_held(&i915->drm.struct_mutex); in contexts_free() 154 static void contexts_free_first(struct drm_i915_private *i915) in contexts_free_first() argument 159 lockdep_assert_held(&i915->drm.struct_mutex); in contexts_free_first() 161 freed = llist_del_first(&i915->contexts.free_list); in contexts_free_first() 171 struct drm_i915_private *i915 = in contexts_free_worker() local [all …]
|
D | i915_request.c | 85 kmem_cache_free(rq->i915->requests, rq); in i915_fence_release() 115 i915_dependency_alloc(struct drm_i915_private *i915) in i915_dependency_alloc() argument 117 return kmem_cache_alloc(i915->dependencies, GFP_KERNEL); in i915_dependency_alloc() 121 i915_dependency_free(struct drm_i915_private *i915, in i915_dependency_free() argument 124 kmem_cache_free(i915->dependencies, dep); in i915_dependency_free() 141 i915_sched_node_add_dependency(struct drm_i915_private *i915, in i915_sched_node_add_dependency() argument 147 dep = i915_dependency_alloc(i915); in i915_sched_node_add_dependency() 157 i915_sched_node_fini(struct drm_i915_private *i915, in i915_sched_node_fini() argument 176 i915_dependency_free(i915, dep); in i915_sched_node_fini() 186 i915_dependency_free(i915, dep); in i915_sched_node_fini() [all …]
|
D | Makefile | 37 i915-y := i915_drv.o \ 53 i915-$(CONFIG_COMPAT) += i915_ioc32.o 54 i915-$(CONFIG_DEBUG_FS) += i915_debugfs.o intel_pipe_crc.o 55 i915-$(CONFIG_PERF_EVENTS) += i915_pmu.o 58 i915-y += i915_cmd_parser.o \ 91 i915-y += intel_uc.o \ 103 i915-y += intel_renderstate_gen6.o \ 109 i915-y += intel_audio.o \ 128 i915-$(CONFIG_ACPI) += intel_acpi.o intel_opregion.o 129 i915-$(CONFIG_DRM_FBDEV_EMULATION) += intel_fbdev.o [all …]
|
D | i915_pmu.h | 112 void i915_pmu_register(struct drm_i915_private *i915); 113 void i915_pmu_unregister(struct drm_i915_private *i915); 114 void i915_pmu_gt_parked(struct drm_i915_private *i915); 115 void i915_pmu_gt_unparked(struct drm_i915_private *i915); 117 static inline void i915_pmu_register(struct drm_i915_private *i915) {} in i915_pmu_register() argument 118 static inline void i915_pmu_unregister(struct drm_i915_private *i915) {} in i915_pmu_unregister() argument 119 static inline void i915_pmu_gt_parked(struct drm_i915_private *i915) {} in i915_pmu_gt_parked() argument 120 static inline void i915_pmu_gt_unparked(struct drm_i915_private *i915) {} in i915_pmu_gt_unparked() argument
|
D | i915_gem_tiling.c | 71 u32 i915_gem_fence_size(struct drm_i915_private *i915, in i915_gem_fence_size() argument 83 if (INTEL_GEN(i915) >= 4) { in i915_gem_fence_size() 90 if (IS_GEN3(i915)) in i915_gem_fence_size() 111 u32 i915_gem_fence_alignment(struct drm_i915_private *i915, u32 size, in i915_gem_fence_alignment() argument 123 if (INTEL_GEN(i915) >= 4) in i915_gem_fence_alignment() 130 return i915_gem_fence_size(i915, size, tiling, stride); in i915_gem_fence_alignment() 138 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_tiling_ok() local 151 if (INTEL_GEN(i915) >= 7) { in i915_tiling_ok() 154 } else if (INTEL_GEN(i915) >= 4) { in i915_tiling_ok() 165 if (IS_GEN2(i915) || in i915_tiling_ok() [all …]
|
/Linux-v4.19/Documentation/gpu/ |
D | i915.rst | 2 drm/i915 Intel GFX Driver 5 The drm/i915 driver supports all (with the exception of some very early 19 .. kernel-doc:: drivers/gpu/drm/i915/intel_runtime_pm.c 22 .. kernel-doc:: drivers/gpu/drm/i915/intel_runtime_pm.c 25 .. kernel-doc:: drivers/gpu/drm/i915/intel_uncore.c 31 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c 34 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c 37 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c 40 .. kernel-doc:: drivers/gpu/drm/i915/i915_irq.c 46 .. kernel-doc:: drivers/gpu/drm/i915/i915_vgpu.c [all …]
|