Lines Matching full:vm

60 	struct drm_i915_private *i915 = ppgtt->vm.i915;  in gen8_ppgtt_notify_vgt()
61 struct intel_uncore *uncore = ppgtt->vm.gt->uncore; in gen8_ppgtt_notify_vgt()
72 if (i915_vm_is_4lvl(&ppgtt->vm)) { in gen8_ppgtt_notify_vgt()
150 static unsigned int gen8_pd_top_count(const struct i915_address_space *vm) in gen8_pd_top_count() argument
152 unsigned int shift = __gen8_pte_shift(vm->top); in gen8_pd_top_count()
154 return (vm->total + (1ull << shift) - 1) >> shift; in gen8_pd_top_count()
158 gen8_pdp_for_page_index(struct i915_address_space * const vm, const u64 idx) in gen8_pdp_for_page_index() argument
160 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); in gen8_pdp_for_page_index()
162 if (vm->top == 2) in gen8_pdp_for_page_index()
165 return i915_pd_entry(ppgtt->pd, gen8_pd_index(idx, vm->top)); in gen8_pdp_for_page_index()
169 gen8_pdp_for_page_address(struct i915_address_space * const vm, const u64 addr) in gen8_pdp_for_page_address() argument
171 return gen8_pdp_for_page_index(vm, addr >> GEN8_PTE_SHIFT); in gen8_pdp_for_page_address()
174 static void __gen8_ppgtt_cleanup(struct i915_address_space *vm, in __gen8_ppgtt_cleanup() argument
185 __gen8_ppgtt_cleanup(vm, *pde, GEN8_PDES, lvl - 1); in __gen8_ppgtt_cleanup()
189 free_px(vm, &pd->pt, lvl); in __gen8_ppgtt_cleanup()
192 static void gen8_ppgtt_cleanup(struct i915_address_space *vm) in gen8_ppgtt_cleanup() argument
194 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in gen8_ppgtt_cleanup()
196 if (intel_vgpu_active(vm->i915)) in gen8_ppgtt_cleanup()
200 __gen8_ppgtt_cleanup(vm, ppgtt->pd, in gen8_ppgtt_cleanup()
201 gen8_pd_top_count(vm), vm->top); in gen8_ppgtt_cleanup()
203 free_scratch(vm); in gen8_ppgtt_cleanup()
206 static u64 __gen8_ppgtt_clear(struct i915_address_space * const vm, in __gen8_ppgtt_clear() argument
210 const struct drm_i915_gem_object * const scratch = vm->scratch[lvl]; in __gen8_ppgtt_clear()
213 GEM_BUG_ON(end > vm->total >> GEN8_PTE_SHIFT); in __gen8_ppgtt_clear()
217 __func__, vm, lvl + 1, start, end, in __gen8_ppgtt_clear()
227 __func__, vm, lvl + 1, idx, start, end); in __gen8_ppgtt_clear()
229 __gen8_ppgtt_cleanup(vm, as_pd(pt), I915_PDES, lvl); in __gen8_ppgtt_clear()
235 start = __gen8_ppgtt_clear(vm, as_pd(pt), in __gen8_ppgtt_clear()
245 __func__, vm, lvl, start, end, in __gen8_ppgtt_clear()
260 vm->scratch[0]->encode, in __gen8_ppgtt_clear()
268 free_px(vm, pt, lvl); in __gen8_ppgtt_clear()
274 static void gen8_ppgtt_clear(struct i915_address_space *vm, in gen8_ppgtt_clear() argument
279 GEM_BUG_ON(range_overflows(start, length, vm->total)); in gen8_ppgtt_clear()
285 __gen8_ppgtt_clear(vm, i915_vm_to_ppgtt(vm)->pd, in gen8_ppgtt_clear()
286 start, start + length, vm->top); in gen8_ppgtt_clear()
289 static void __gen8_ppgtt_alloc(struct i915_address_space * const vm, in __gen8_ppgtt_alloc() argument
296 GEM_BUG_ON(end > vm->total >> GEN8_PTE_SHIFT); in __gen8_ppgtt_alloc()
300 __func__, vm, lvl + 1, *start, end, in __gen8_ppgtt_alloc()
313 __func__, vm, lvl + 1, idx); in __gen8_ppgtt_alloc()
318 fill_px(pt, vm->scratch[lvl]->encode); in __gen8_ppgtt_alloc()
334 __gen8_ppgtt_alloc(vm, stash, in __gen8_ppgtt_alloc()
344 __func__, vm, lvl, *start, end, in __gen8_ppgtt_alloc()
357 static void gen8_ppgtt_alloc(struct i915_address_space *vm, in gen8_ppgtt_alloc() argument
363 GEM_BUG_ON(range_overflows(start, length, vm->total)); in gen8_ppgtt_alloc()
369 __gen8_ppgtt_alloc(vm, stash, i915_vm_to_ppgtt(vm)->pd, in gen8_ppgtt_alloc()
370 &start, start + length, vm->top); in gen8_ppgtt_alloc()
373 static void __gen8_ppgtt_foreach(struct i915_address_space *vm, in __gen8_ppgtt_foreach() argument
376 void (*fn)(struct i915_address_space *vm, in __gen8_ppgtt_foreach() argument
393 __gen8_ppgtt_foreach(vm, as_pd(pt), start, end, lvl, in __gen8_ppgtt_foreach()
396 fn(vm, pt, data); in __gen8_ppgtt_foreach()
406 static void gen8_ppgtt_foreach(struct i915_address_space *vm, in gen8_ppgtt_foreach() argument
408 void (*fn)(struct i915_address_space *vm, in gen8_ppgtt_foreach() argument
416 __gen8_ppgtt_foreach(vm, i915_vm_to_ppgtt(vm)->pd, in gen8_ppgtt_foreach()
417 &start, start + length, vm->top, in gen8_ppgtt_foreach()
470 xehpsdv_ppgtt_insert_huge(struct i915_address_space *vm, in xehpsdv_ppgtt_insert_huge() argument
476 const gen8_pte_t pte_encode = vm->pte_encode(0, cache_level, flags); in xehpsdv_ppgtt_insert_huge()
480 GEM_BUG_ON(!i915_vm_is_4lvl(vm)); in xehpsdv_ppgtt_insert_huge()
484 gen8_pdp_for_page_address(vm, start); in xehpsdv_ppgtt_insert_huge()
558 static void gen8_ppgtt_insert_huge(struct i915_address_space *vm, in gen8_ppgtt_insert_huge() argument
568 GEM_BUG_ON(!i915_vm_is_4lvl(vm)); in gen8_ppgtt_insert_huge()
572 gen8_pdp_for_page_address(vm, start); in gen8_ppgtt_insert_huge()
647 (i915_vm_has_scratch_64K(vm) && in gen8_ppgtt_insert_huge()
665 if (I915_SELFTEST_ONLY(vm->scrub_64K)) { in gen8_ppgtt_insert_huge()
668 encode = vm->scratch[0]->encode; in gen8_ppgtt_insert_huge()
682 static void gen8_ppgtt_insert(struct i915_address_space *vm, in gen8_ppgtt_insert() argument
687 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); in gen8_ppgtt_insert()
691 if (HAS_64K_PAGES(vm->i915)) in gen8_ppgtt_insert()
692 xehpsdv_ppgtt_insert_huge(vm, vma_res, &iter, cache_level, flags); in gen8_ppgtt_insert()
694 gen8_ppgtt_insert_huge(vm, vma_res, &iter, cache_level, flags); in gen8_ppgtt_insert()
700 gen8_pdp_for_page_index(vm, idx); in gen8_ppgtt_insert()
710 static void gen8_ppgtt_insert_entry(struct i915_address_space *vm, in gen8_ppgtt_insert_entry() argument
718 gen8_pdp_for_page_index(vm, idx); in gen8_ppgtt_insert_entry()
731 static void __xehpsdv_ppgtt_insert_entry_lm(struct i915_address_space *vm, in __xehpsdv_ppgtt_insert_entry_lm() argument
739 gen8_pdp_for_page_index(vm, idx); in __xehpsdv_ppgtt_insert_entry_lm()
758 static void xehpsdv_ppgtt_insert_entry(struct i915_address_space *vm, in xehpsdv_ppgtt_insert_entry() argument
765 return __xehpsdv_ppgtt_insert_entry_lm(vm, addr, offset, in xehpsdv_ppgtt_insert_entry()
768 return gen8_ppgtt_insert_entry(vm, addr, offset, level, flags); in xehpsdv_ppgtt_insert_entry()
771 static int gen8_init_scratch(struct i915_address_space *vm) in gen8_init_scratch() argument
779 * we can reuse it for all vm, keeping contexts and processes separate. in gen8_init_scratch()
781 if (vm->has_read_only && vm->gt->vm && !i915_is_ggtt(vm->gt->vm)) { in gen8_init_scratch()
782 struct i915_address_space *clone = vm->gt->vm; in gen8_init_scratch()
786 vm->scratch_order = clone->scratch_order; in gen8_init_scratch()
787 for (i = 0; i <= vm->top; i++) in gen8_init_scratch()
788 vm->scratch[i] = i915_gem_object_get(clone->scratch[i]); in gen8_init_scratch()
793 ret = setup_scratch_page(vm); in gen8_init_scratch()
797 pte_flags = vm->has_read_only; in gen8_init_scratch()
798 if (i915_gem_object_is_lmem(vm->scratch[0])) in gen8_init_scratch()
801 vm->scratch[0]->encode = in gen8_init_scratch()
802 gen8_pte_encode(px_dma(vm->scratch[0]), in gen8_init_scratch()
805 for (i = 1; i <= vm->top; i++) { in gen8_init_scratch()
808 obj = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in gen8_init_scratch()
814 ret = map_pt_dma(vm, obj); in gen8_init_scratch()
820 fill_px(obj, vm->scratch[i - 1]->encode); in gen8_init_scratch()
823 vm->scratch[i] = obj; in gen8_init_scratch()
830 i915_gem_object_put(vm->scratch[i]); in gen8_init_scratch()
831 vm->scratch[0] = NULL; in gen8_init_scratch()
837 struct i915_address_space *vm = &ppgtt->vm; in gen8_preallocate_top_level_pdp() local
841 GEM_BUG_ON(vm->top != 2); in gen8_preallocate_top_level_pdp()
842 GEM_BUG_ON(gen8_pd_top_count(vm) != GEN8_3LVL_PDPES); in gen8_preallocate_top_level_pdp()
848 pde = alloc_pd(vm); in gen8_preallocate_top_level_pdp()
852 err = map_pt_dma(vm, pde->pt.base); in gen8_preallocate_top_level_pdp()
854 free_pd(vm, pde); in gen8_preallocate_top_level_pdp()
858 fill_px(pde, vm->scratch[1]->encode); in gen8_preallocate_top_level_pdp()
868 gen8_alloc_top_pd(struct i915_address_space *vm) in gen8_alloc_top_pd() argument
870 const unsigned int count = gen8_pd_top_count(vm); in gen8_alloc_top_pd()
880 pd->pt.base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in gen8_alloc_top_pd()
887 err = map_pt_dma(vm, pd->pt.base); in gen8_alloc_top_pd()
891 fill_page_dma(px_base(pd), vm->scratch[vm->top]->encode, count); in gen8_alloc_top_pd()
896 free_pd(vm, pd); in gen8_alloc_top_pd()
919 ppgtt->vm.top = i915_vm_is_4lvl(&ppgtt->vm) ? 3 : 2; in gen8_ppgtt_create()
920 ppgtt->vm.pd_shift = ilog2(SZ_4K * SZ_4K / sizeof(gen8_pte_t)); in gen8_ppgtt_create()
930 ppgtt->vm.has_read_only = !IS_GRAPHICS_VER(gt->i915, 11, 12); in gen8_ppgtt_create()
933 ppgtt->vm.alloc_pt_dma = alloc_pt_lmem; in gen8_ppgtt_create()
948 ppgtt->vm.alloc_scratch_dma = alloc_pt_dma; in gen8_ppgtt_create()
950 ppgtt->vm.alloc_scratch_dma = alloc_pt_lmem; in gen8_ppgtt_create()
952 ppgtt->vm.alloc_pt_dma = alloc_pt_dma; in gen8_ppgtt_create()
953 ppgtt->vm.alloc_scratch_dma = alloc_pt_dma; in gen8_ppgtt_create()
956 ppgtt->vm.pte_encode = gen8_pte_encode; in gen8_ppgtt_create()
958 ppgtt->vm.bind_async_flags = I915_VMA_LOCAL_BIND; in gen8_ppgtt_create()
959 ppgtt->vm.insert_entries = gen8_ppgtt_insert; in gen8_ppgtt_create()
961 ppgtt->vm.insert_page = xehpsdv_ppgtt_insert_entry; in gen8_ppgtt_create()
963 ppgtt->vm.insert_page = gen8_ppgtt_insert_entry; in gen8_ppgtt_create()
964 ppgtt->vm.allocate_va_range = gen8_ppgtt_alloc; in gen8_ppgtt_create()
965 ppgtt->vm.clear_range = gen8_ppgtt_clear; in gen8_ppgtt_create()
966 ppgtt->vm.foreach = gen8_ppgtt_foreach; in gen8_ppgtt_create()
967 ppgtt->vm.cleanup = gen8_ppgtt_cleanup; in gen8_ppgtt_create()
969 err = gen8_init_scratch(&ppgtt->vm); in gen8_ppgtt_create()
973 pd = gen8_alloc_top_pd(&ppgtt->vm); in gen8_ppgtt_create()
980 if (!i915_vm_is_4lvl(&ppgtt->vm)) { in gen8_ppgtt_create()
992 i915_vm_put(&ppgtt->vm); in gen8_ppgtt_create()