Lines Matching refs:vma

391 		struct i915_vma *vma;  in close_object_list()  local
393 vma = i915_vma_instance(obj, vm, NULL); in close_object_list()
394 if (!IS_ERR(vma)) in close_object_list()
395 ignored = i915_vma_unbind_unlocked(vma); in close_object_list()
414 struct i915_vma *vma; in fill_hole() local
455 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
456 if (IS_ERR(vma)) in fill_hole()
465 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
472 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
473 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
475 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
481 i915_vma_unpin(vma); in fill_hole()
495 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
496 if (IS_ERR(vma)) in fill_hole()
505 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
506 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
508 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
514 err = i915_vma_unbind_unlocked(vma); in fill_hole()
517 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
534 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
535 if (IS_ERR(vma)) in fill_hole()
544 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
551 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
552 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
554 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
560 i915_vma_unpin(vma); in fill_hole()
574 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
575 if (IS_ERR(vma)) in fill_hole()
584 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
585 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
587 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
593 err = i915_vma_unbind_unlocked(vma); in fill_hole()
596 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
648 struct i915_vma *vma; in walk_hole() local
656 vma = i915_vma_instance(obj, vm, NULL); in walk_hole()
657 if (IS_ERR(vma)) { in walk_hole()
658 err = PTR_ERR(vma); in walk_hole()
665 err = i915_vma_pin(vma, 0, 0, addr | flags); in walk_hole()
668 __func__, addr, vma->size, in walk_hole()
672 i915_vma_unpin(vma); in walk_hole()
674 if (!drm_mm_node_allocated(&vma->node) || in walk_hole()
675 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in walk_hole()
677 __func__, addr, vma->size); in walk_hole()
682 err = i915_vma_unbind_unlocked(vma); in walk_hole()
685 __func__, addr, vma->size, err); in walk_hole()
689 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in walk_hole()
715 struct i915_vma *vma; in pot_hole() local
731 vma = i915_vma_instance(obj, vm, NULL); in pot_hole()
732 if (IS_ERR(vma)) { in pot_hole()
733 err = PTR_ERR(vma); in pot_hole()
747 err = i915_vma_pin(vma, 0, 0, addr | flags); in pot_hole()
757 if (!drm_mm_node_allocated(&vma->node) || in pot_hole()
758 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in pot_hole()
760 __func__, addr, vma->size); in pot_hole()
761 i915_vma_unpin(vma); in pot_hole()
762 err = i915_vma_unbind_unlocked(vma); in pot_hole()
767 i915_vma_unpin(vma); in pot_hole()
768 err = i915_vma_unbind_unlocked(vma); in pot_hole()
804 struct i915_vma *vma; in drunk_hole() local
840 vma = i915_vma_instance(obj, vm, NULL); in drunk_hole()
841 if (IS_ERR(vma)) { in drunk_hole()
842 err = PTR_ERR(vma); in drunk_hole()
846 GEM_BUG_ON(vma->size != BIT_ULL(size)); in drunk_hole()
851 err = i915_vma_pin(vma, 0, 0, addr | flags); in drunk_hole()
861 if (!drm_mm_node_allocated(&vma->node) || in drunk_hole()
862 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in drunk_hole()
865 i915_vma_unpin(vma); in drunk_hole()
866 err = i915_vma_unbind_unlocked(vma); in drunk_hole()
871 i915_vma_unpin(vma); in drunk_hole()
872 err = i915_vma_unbind_unlocked(vma); in drunk_hole()
911 struct i915_vma *vma; in __shrink_hole() local
923 vma = i915_vma_instance(obj, vm, NULL); in __shrink_hole()
924 if (IS_ERR(vma)) { in __shrink_hole()
925 err = PTR_ERR(vma); in __shrink_hole()
929 GEM_BUG_ON(vma->size != size); in __shrink_hole()
931 err = i915_vma_pin(vma, 0, 0, addr | flags); in __shrink_hole()
938 if (!drm_mm_node_allocated(&vma->node) || in __shrink_hole()
939 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in __shrink_hole()
942 i915_vma_unpin(vma); in __shrink_hole()
943 err = i915_vma_unbind_unlocked(vma); in __shrink_hole()
948 i915_vma_unpin(vma); in __shrink_hole()
956 err = i915_vma_sync(vma); in __shrink_hole()
1015 struct i915_vma *vma; in shrink_boom() local
1021 vma = i915_vma_instance(purge, vm, NULL); in shrink_boom()
1022 if (IS_ERR(vma)) { in shrink_boom()
1023 err = PTR_ERR(vma); in shrink_boom()
1027 err = i915_vma_pin(vma, 0, 0, flags); in shrink_boom()
1032 i915_vma_unpin(vma); in shrink_boom()
1044 vma = i915_vma_instance(explode, vm, NULL); in shrink_boom()
1045 if (IS_ERR(vma)) { in shrink_boom()
1046 err = PTR_ERR(vma); in shrink_boom()
1050 err = i915_vma_pin(vma, 0, 0, flags | size); in shrink_boom()
1054 i915_vma_unpin(vma); in shrink_boom()
1077 struct i915_vma *vma; in misaligned_case() local
1091 vma = i915_vma_instance(obj, vm, NULL); in misaligned_case()
1092 if (IS_ERR(vma)) { in misaligned_case()
1093 err = PTR_ERR(vma); in misaligned_case()
1097 err = i915_vma_pin(vma, 0, 0, addr | flags); in misaligned_case()
1100 i915_vma_unpin(vma); in misaligned_case()
1102 if (!drm_mm_node_allocated(&vma->node)) { in misaligned_case()
1107 if (i915_vma_misplaced(vma, 0, 0, addr | flags)) { in misaligned_case()
1112 expected_vma_size = round_up(size, 1 << (ffs(vma->resource->page_sizes_gtt) - 1)); in misaligned_case()
1127 if (vma->size != expected_vma_size || vma->node.size != expected_node_size) { in misaligned_case()
1128 err = i915_vma_unbind_unlocked(vma); in misaligned_case()
1133 err = i915_vma_unbind_unlocked(vma); in misaligned_case()
1137 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in misaligned_case()
1434 static void track_vma_bind(struct i915_vma *vma) in track_vma_bind() argument
1436 struct drm_i915_gem_object *obj = vma->obj; in track_vma_bind()
1440 GEM_BUG_ON(atomic_read(&vma->pages_count)); in track_vma_bind()
1441 atomic_set(&vma->pages_count, I915_VMA_PAGES_ACTIVE); in track_vma_bind()
1443 vma->pages = obj->mm.pages; in track_vma_bind()
1444 vma->resource->bi.pages = vma->pages; in track_vma_bind()
1446 mutex_lock(&vma->vm->mutex); in track_vma_bind()
1447 list_move_tail(&vma->vm_link, &vma->vm->bound_list); in track_vma_bind()
1448 mutex_unlock(&vma->vm->mutex); in track_vma_bind()
1502 static int reserve_gtt_with_resource(struct i915_vma *vma, u64 offset) in reserve_gtt_with_resource() argument
1504 struct i915_address_space *vm = vma->vm; in reserve_gtt_with_resource()
1506 struct drm_i915_gem_object *obj = vma->obj; in reserve_gtt_with_resource()
1514 err = i915_gem_gtt_reserve(vm, NULL, &vma->node, obj->base.size, in reserve_gtt_with_resource()
1519 i915_vma_resource_init_from_vma(vma_res, vma); in reserve_gtt_with_resource()
1520 vma->resource = vma_res; in reserve_gtt_with_resource()
1547 struct i915_vma *vma; in igt_gtt_reserve() local
1563 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1564 if (IS_ERR(vma)) { in igt_gtt_reserve()
1565 err = PTR_ERR(vma); in igt_gtt_reserve()
1569 err = reserve_gtt_with_resource(vma, total); in igt_gtt_reserve()
1575 track_vma_bind(vma); in igt_gtt_reserve()
1577 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1578 if (vma->node.start != total || in igt_gtt_reserve()
1579 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1581 vma->node.start, vma->node.size, in igt_gtt_reserve()
1592 struct i915_vma *vma; in igt_gtt_reserve() local
1609 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1610 if (IS_ERR(vma)) { in igt_gtt_reserve()
1611 err = PTR_ERR(vma); in igt_gtt_reserve()
1615 err = reserve_gtt_with_resource(vma, total); in igt_gtt_reserve()
1621 track_vma_bind(vma); in igt_gtt_reserve()
1623 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1624 if (vma->node.start != total || in igt_gtt_reserve()
1625 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1627 vma->node.start, vma->node.size, in igt_gtt_reserve()
1636 struct i915_vma *vma; in igt_gtt_reserve() local
1639 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1640 if (IS_ERR(vma)) { in igt_gtt_reserve()
1641 err = PTR_ERR(vma); in igt_gtt_reserve()
1645 err = i915_vma_unbind_unlocked(vma); in igt_gtt_reserve()
1656 err = reserve_gtt_with_resource(vma, offset); in igt_gtt_reserve()
1662 track_vma_bind(vma); in igt_gtt_reserve()
1664 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1665 if (vma->node.start != offset || in igt_gtt_reserve()
1666 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1668 vma->node.start, vma->node.size, in igt_gtt_reserve()
1683 static int insert_gtt_with_resource(struct i915_vma *vma) in insert_gtt_with_resource() argument
1685 struct i915_address_space *vm = vma->vm; in insert_gtt_with_resource()
1687 struct drm_i915_gem_object *obj = vma->obj; in insert_gtt_with_resource()
1695 err = i915_gem_gtt_insert(vm, NULL, &vma->node, obj->base.size, 0, in insert_gtt_with_resource()
1698 i915_vma_resource_init_from_vma(vma_res, vma); in insert_gtt_with_resource()
1699 vma->resource = vma_res; in insert_gtt_with_resource()
1769 struct i915_vma *vma; in igt_gtt_insert() local
1786 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1787 if (IS_ERR(vma)) { in igt_gtt_insert()
1788 err = PTR_ERR(vma); in igt_gtt_insert()
1792 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1803 track_vma_bind(vma); in igt_gtt_insert()
1804 __i915_vma_pin(vma); in igt_gtt_insert()
1806 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1810 struct i915_vma *vma; in igt_gtt_insert() local
1812 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1813 if (IS_ERR(vma)) { in igt_gtt_insert()
1814 err = PTR_ERR(vma); in igt_gtt_insert()
1818 if (!drm_mm_node_allocated(&vma->node)) { in igt_gtt_insert()
1824 __i915_vma_unpin(vma); in igt_gtt_insert()
1829 struct i915_vma *vma; in igt_gtt_insert() local
1832 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1833 if (IS_ERR(vma)) { in igt_gtt_insert()
1834 err = PTR_ERR(vma); in igt_gtt_insert()
1838 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1839 offset = vma->node.start; in igt_gtt_insert()
1841 err = i915_vma_unbind_unlocked(vma); in igt_gtt_insert()
1847 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1853 track_vma_bind(vma); in igt_gtt_insert()
1855 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1856 if (vma->node.start != offset) { in igt_gtt_insert()
1858 offset, vma->node.start); in igt_gtt_insert()
1868 struct i915_vma *vma; in igt_gtt_insert() local
1885 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1886 if (IS_ERR(vma)) { in igt_gtt_insert()
1887 err = PTR_ERR(vma); in igt_gtt_insert()
1891 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1897 track_vma_bind(vma); in igt_gtt_insert()
1899 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
2010 struct i915_vma *vma; in igt_cs_tlb() local
2094 vma = i915_vma_instance(out, vm, NULL); in igt_cs_tlb()
2095 if (IS_ERR(vma)) { in igt_cs_tlb()
2096 err = PTR_ERR(vma); in igt_cs_tlb()
2100 err = i915_vma_pin(vma, 0, 0, in igt_cs_tlb()
2106 GEM_BUG_ON(vma->node.start != vm->total - PAGE_SIZE); in igt_cs_tlb()
2134 vma = i915_vma_instance(bbe, vm, NULL); in igt_cs_tlb()
2135 if (IS_ERR(vma)) { in igt_cs_tlb()
2136 err = PTR_ERR(vma); in igt_cs_tlb()
2141 err = i915_vma_get_pages(vma); in igt_cs_tlb()
2148 i915_vma_put_pages(vma); in igt_cs_tlb()
2179 i915_vma_resource_init_from_vma(vma_res, vma); in igt_cs_tlb()
2196 i915_vma_put_pages(vma); in igt_cs_tlb()
2206 vma = i915_vma_instance(act, vm, NULL); in igt_cs_tlb()
2207 if (IS_ERR(vma)) { in igt_cs_tlb()
2209 err = PTR_ERR(vma); in igt_cs_tlb()
2214 err = i915_vma_get_pages(vma); in igt_cs_tlb()
2221 i915_vma_resource_init_from_vma(vma_res, vma); in igt_cs_tlb()
2260 i915_vma_put_pages(vma); in igt_cs_tlb()