Lines Matching refs:bo_va

915 	struct amdgpu_bo_va *bo_va, *tmp;  in amdgpu_vm_get_memory()  local
918 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_vm_get_memory()
919 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
921 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
924 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_vm_get_memory()
925 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
927 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
930 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_vm_get_memory()
931 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
933 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
936 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_vm_get_memory()
937 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
939 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
942 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_vm_get_memory()
943 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
945 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
948 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_vm_get_memory()
949 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
951 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
968 int amdgpu_vm_bo_update(struct amdgpu_device *adev, struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
971 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
972 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
990 if (obj->import_attach && bo_va->is_xgmi) { in amdgpu_vm_bo_update()
1023 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1025 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1027 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1029 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1030 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1033 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1067 amdgpu_vm_bo_evicted(&bo_va->base); in amdgpu_vm_bo_update()
1069 amdgpu_vm_bo_idle(&bo_va->base); in amdgpu_vm_bo_update()
1071 amdgpu_vm_bo_done(&bo_va->base); in amdgpu_vm_bo_update()
1074 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1075 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1076 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1079 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
1283 struct amdgpu_bo_va *bo_va; in amdgpu_vm_handle_moved() local
1290 bo_va = list_first_entry(&vm->moved, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1295 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_vm_handle_moved()
1302 bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
1304 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
1314 r = amdgpu_vm_bo_update(adev, bo_va, clear); in amdgpu_vm_handle_moved()
1346 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
1348 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
1349 if (bo_va == NULL) { in amdgpu_vm_bo_add()
1352 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
1354 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
1355 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
1356 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
1359 return bo_va; in amdgpu_vm_bo_add()
1363 bo_va->is_xgmi = true; in amdgpu_vm_bo_add()
1368 return bo_va; in amdgpu_vm_bo_add()
1382 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
1385 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
1386 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
1388 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
1389 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
1396 !bo_va->base.moved) { in amdgpu_vm_bo_insert_map()
1397 amdgpu_vm_bo_moved(&bo_va->base); in amdgpu_vm_bo_insert_map()
1399 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
1420 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
1425 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
1426 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
1462 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
1486 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
1491 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
1512 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
1526 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
1546 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
1550 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
1555 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
1560 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
1563 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
1568 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
1574 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
1575 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
1581 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
1633 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1634 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1644 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
1645 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
1664 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
1725 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
1728 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
1749 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_del() argument
1752 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_del()
1753 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_del()
1763 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_del()
1765 if (*base != &bo_va->base) in amdgpu_vm_bo_del()
1768 *base = bo_va->base.next; in amdgpu_vm_bo_del()
1774 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_del()
1777 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_del()
1780 mapping->bo_va = NULL; in amdgpu_vm_bo_del()
1781 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_del()
1784 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_del()
1788 bo_va->last_pt_update); in amdgpu_vm_bo_del()
1791 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_del()
1793 if (bo && bo_va->is_xgmi) in amdgpu_vm_bo_del()
1796 kfree(bo_va); in amdgpu_vm_bo_del()
2550 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_debugfs_vm_bo_info() local
2567 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2568 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2570 total_idle += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2576 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2577 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2579 total_evicted += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2585 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2586 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2588 total_relocated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2594 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2595 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2597 total_moved += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2603 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2604 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2606 total_invalidated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
2612 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
2613 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
2615 total_done += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()