| /Linux-v5.4/drivers/gpu/drm/i915/gem/selftests/ |
| D | i915_gem_object_blt.c | 52 vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); in igt_fill_blt() 137 vaddr = i915_gem_object_pin_map(src, I915_MAP_WB); in igt_copy_blt() 157 vaddr = i915_gem_object_pin_map(dst, I915_MAP_WB); in igt_copy_blt()
|
| D | i915_gem_client_blt.c | 46 vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); in igt_client_fill()
|
| D | i915_gem_dmabuf.c | 121 obj_map = i915_gem_object_pin_map(obj, I915_MAP_WB); in igt_dmabuf_import() 294 ptr = i915_gem_object_pin_map(obj, I915_MAP_WB); in igt_dmabuf_export_kmap()
|
| D | i915_gem_coherency.c | 146 map = i915_gem_object_pin_map(obj, I915_MAP_WC); in wc_set() 169 map = i915_gem_object_pin_map(obj, I915_MAP_WC); in wc_get()
|
| D | igt_gem_utils.c | 56 cmd = i915_gem_object_pin_map(obj, I915_MAP_WC); in igt_emit_store_dw()
|
| D | i915_gem_context.c | 592 cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); in rpcs_query_batch() 783 buf = i915_gem_object_pin_map(obj, I915_MAP_WB); in __read_slice_count() 1181 cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); in write_to_scratch() 1270 cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); in read_from_scratch() 1342 cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); in read_from_scratch()
|
| /Linux-v5.4/drivers/gpu/drm/i915/selftests/ |
| D | igt_spinner.c | 36 vaddr = i915_gem_object_pin_map(spin->hws, I915_MAP_WB); in igt_spinner_init() 44 vaddr = i915_gem_object_pin_map(spin->obj, mode); in igt_spinner_init()
|
| D | i915_request.c | 627 cmd = i915_gem_object_pin_map(obj, I915_MAP_WB); in empty_batch() 785 cmd = i915_gem_object_pin_map(obj, I915_MAP_WC); in recursive_batch() 820 cmd = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); in recursive_batch_resolve() 1047 cmd = i915_gem_object_pin_map(request[id]->batch->obj, in live_sequential_engines()
|
| /Linux-v5.4/drivers/gpu/drm/i915/gt/ |
| D | selftest_workarounds.c | 90 cs = i915_gem_object_pin_map(result, I915_MAP_WB); in read_nonprivs() 202 vaddr = i915_gem_object_pin_map(results, I915_MAP_WB); in check_whitelist() 503 cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); in check_dirty_whitelist() 587 results = i915_gem_object_pin_map(scratch->obj, I915_MAP_WB); in check_dirty_whitelist() 820 cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC); in scrub_whitelisted_registers() 946 a = i915_gem_object_pin_map(A->obj, I915_MAP_WB); in check_whitelisted_registers() 950 b = i915_gem_object_pin_map(B->obj, I915_MAP_WB); in check_whitelisted_registers()
|
| D | intel_engine_pm.c | 28 map = i915_gem_object_pin_map(engine->default_state, in __engine_unpark()
|
| D | intel_timeline.c | 170 vaddr = i915_gem_object_pin_map(hwsp->vma->obj, I915_MAP_WB); in cacheline_alloc() 242 vaddr = i915_gem_object_pin_map(hwsp->obj, I915_MAP_WB); in intel_timeline_init()
|
| D | selftest_context.c | 84 vaddr = i915_gem_object_pin_map(ce->state->obj, in __live_context_size()
|
| D | selftest_hangcheck.c | 80 vaddr = i915_gem_object_pin_map(h->hws, I915_MAP_WB); in hang_init() 87 vaddr = i915_gem_object_pin_map(h->obj, in hang_init() 147 vaddr = i915_gem_object_pin_map(obj, i915_coherent_map_type(gt->i915)); in hang_create_request()
|
| D | intel_ringbuffer.c | 1214 addr = i915_gem_object_pin_map(vma->obj, in intel_ring_pin() 1433 vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); in alloc_context_vma() 1439 defaults = i915_gem_object_pin_map(engine->default_state, in alloc_context_vma()
|
| D | selftest_lrc.c | 258 vaddr = i915_gem_object_pin_map(obj, I915_MAP_WC); in live_timeslice_preempt() 340 map = i915_gem_object_pin_map(obj, I915_MAP_WC); in live_busywait_preempt() 1619 cs = i915_gem_object_pin_map(smoke.batch, I915_MAP_WB); in live_preempt_smoke()
|
| /Linux-v5.4/drivers/gpu/drm/i915/gt/uc/ |
| D | intel_huc.c | 57 vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); in intel_huc_rsa_data_create()
|
| D | intel_guc_ads.c | 148 blob = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); in intel_guc_ads_create()
|
| D | intel_guc_submission.c | 322 vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); in guc_stage_desc_pool_create() 831 vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); in guc_client_alloc()
|
| D | intel_guc_log.c | 343 vaddr = i915_gem_object_pin_map(log->vma->obj, I915_MAP_WC); in guc_log_map()
|
| D | intel_guc.c | 94 vaddr = i915_gem_object_pin_map(vma->obj, I915_MAP_WB); in guc_shared_data_create()
|
| /Linux-v5.4/drivers/gpu/drm/i915/gem/ |
| D | i915_gem_object_blt.c | 41 cmd = i915_gem_object_pin_map(pool->obj, I915_MAP_WC); in intel_emit_vma_fill_blt() 225 cmd = i915_gem_object_pin_map(pool->obj, I915_MAP_WC); in intel_emit_vma_copy_blt()
|
| D | i915_gem_dmabuf.c | 85 return i915_gem_object_pin_map(obj, I915_MAP_WB); in i915_gem_dmabuf_vmap()
|
| D | i915_gem_object.h | 322 void *__must_check i915_gem_object_pin_map(struct drm_i915_gem_object *obj,
|
| D | i915_gem_pages.c | 277 void *i915_gem_object_pin_map(struct drm_i915_gem_object *obj, in i915_gem_object_pin_map() function
|
| /Linux-v5.4/drivers/gpu/drm/i915/ |
| D | i915_cmd_parser.c | 1143 dst = i915_gem_object_pin_map(dst_obj, I915_MAP_FORCE_WB); in copy_batch() 1157 src = i915_gem_object_pin_map(src_obj, I915_MAP_WC); in copy_batch()
|