Lines Matching refs:iterator
1505 static void rmap_walk_init_level(struct slot_rmap_walk_iterator *iterator, in rmap_walk_init_level() argument
1508 iterator->level = level; in rmap_walk_init_level()
1509 iterator->gfn = iterator->start_gfn; in rmap_walk_init_level()
1510 iterator->rmap = gfn_to_rmap(iterator->gfn, level, iterator->slot); in rmap_walk_init_level()
1511 iterator->end_rmap = gfn_to_rmap(iterator->end_gfn, level, iterator->slot); in rmap_walk_init_level()
1514 static void slot_rmap_walk_init(struct slot_rmap_walk_iterator *iterator, in slot_rmap_walk_init() argument
1519 iterator->slot = slot; in slot_rmap_walk_init()
1520 iterator->start_level = start_level; in slot_rmap_walk_init()
1521 iterator->end_level = end_level; in slot_rmap_walk_init()
1522 iterator->start_gfn = start_gfn; in slot_rmap_walk_init()
1523 iterator->end_gfn = end_gfn; in slot_rmap_walk_init()
1525 rmap_walk_init_level(iterator, iterator->start_level); in slot_rmap_walk_init()
1528 static bool slot_rmap_walk_okay(struct slot_rmap_walk_iterator *iterator) in slot_rmap_walk_okay() argument
1530 return !!iterator->rmap; in slot_rmap_walk_okay()
1533 static void slot_rmap_walk_next(struct slot_rmap_walk_iterator *iterator) in slot_rmap_walk_next() argument
1535 while (++iterator->rmap <= iterator->end_rmap) { in slot_rmap_walk_next()
1536 iterator->gfn += (1UL << KVM_HPAGE_GFN_SHIFT(iterator->level)); in slot_rmap_walk_next()
1538 if (iterator->rmap->val) in slot_rmap_walk_next()
1542 if (++iterator->level > iterator->end_level) { in slot_rmap_walk_next()
1543 iterator->rmap = NULL; in slot_rmap_walk_next()
1547 rmap_walk_init_level(iterator, iterator->level); in slot_rmap_walk_next()
1565 struct slot_rmap_walk_iterator iterator; in kvm_handle_gfn_range() local
1569 range->start, range->end - 1, &iterator) in kvm_handle_gfn_range()
1570 ret |= handler(kvm, iterator.rmap, range->slot, iterator.gfn, in kvm_handle_gfn_range()
1571 iterator.level, range->arg.pte); in kvm_handle_gfn_range()
2360 static void shadow_walk_init_using_root(struct kvm_shadow_walk_iterator *iterator, in shadow_walk_init_using_root() argument
2364 iterator->addr = addr; in shadow_walk_init_using_root()
2365 iterator->shadow_addr = root; in shadow_walk_init_using_root()
2366 iterator->level = vcpu->arch.mmu->root_role.level; in shadow_walk_init_using_root()
2368 if (iterator->level >= PT64_ROOT_4LEVEL && in shadow_walk_init_using_root()
2371 iterator->level = PT32E_ROOT_LEVEL; in shadow_walk_init_using_root()
2373 if (iterator->level == PT32E_ROOT_LEVEL) { in shadow_walk_init_using_root()
2380 iterator->shadow_addr in shadow_walk_init_using_root()
2382 iterator->shadow_addr &= SPTE_BASE_ADDR_MASK; in shadow_walk_init_using_root()
2383 --iterator->level; in shadow_walk_init_using_root()
2384 if (!iterator->shadow_addr) in shadow_walk_init_using_root()
2385 iterator->level = 0; in shadow_walk_init_using_root()
2389 static void shadow_walk_init(struct kvm_shadow_walk_iterator *iterator, in shadow_walk_init() argument
2392 shadow_walk_init_using_root(iterator, vcpu, vcpu->arch.mmu->root.hpa, in shadow_walk_init()
2396 static bool shadow_walk_okay(struct kvm_shadow_walk_iterator *iterator) in shadow_walk_okay() argument
2398 if (iterator->level < PG_LEVEL_4K) in shadow_walk_okay()
2401 iterator->index = SPTE_INDEX(iterator->addr, iterator->level); in shadow_walk_okay()
2402 iterator->sptep = ((u64 *)__va(iterator->shadow_addr)) + iterator->index; in shadow_walk_okay()
2406 static void __shadow_walk_next(struct kvm_shadow_walk_iterator *iterator, in __shadow_walk_next() argument
2409 if (!is_shadow_present_pte(spte) || is_last_spte(spte, iterator->level)) { in __shadow_walk_next()
2410 iterator->level = 0; in __shadow_walk_next()
2414 iterator->shadow_addr = spte & SPTE_BASE_ADDR_MASK; in __shadow_walk_next()
2415 --iterator->level; in __shadow_walk_next()
2418 static void shadow_walk_next(struct kvm_shadow_walk_iterator *iterator) in shadow_walk_next() argument
2420 __shadow_walk_next(iterator, *iterator->sptep); in shadow_walk_next()
3409 struct kvm_shadow_walk_iterator iterator; in fast_pf_get_last_sptep() local
3413 for_each_shadow_entry_lockless(vcpu, gpa, iterator, old_spte) { in fast_pf_get_last_sptep()
3414 sptep = iterator.sptep; in fast_pf_get_last_sptep()
4079 struct kvm_shadow_walk_iterator iterator; in get_walk() local
4083 for (shadow_walk_init(&iterator, vcpu, addr), in get_walk()
4084 *root_level = iterator.level; in get_walk()
4085 shadow_walk_okay(&iterator); in get_walk()
4086 __shadow_walk_next(&iterator, spte)) { in get_walk()
4087 leaf = iterator.level; in get_walk()
4088 spte = mmu_spte_get_lockless(iterator.sptep); in get_walk()
4201 struct kvm_shadow_walk_iterator iterator; in shadow_page_table_clear_flood() local
4205 for_each_shadow_entry_lockless(vcpu, addr, iterator, spte) in shadow_page_table_clear_flood()
4206 clear_sp_write_flooding_count(iterator.sptep); in shadow_page_table_clear_flood()
5771 struct kvm_shadow_walk_iterator iterator; in __kvm_mmu_invalidate_addr() local
5787 for_each_shadow_entry_using_root(vcpu, root_hpa, addr, iterator) { in __kvm_mmu_invalidate_addr()
5788 struct kvm_mmu_page *sp = sptep_to_sp(iterator.sptep); in __kvm_mmu_invalidate_addr()
5791 int ret = kvm_sync_spte(vcpu, sp, iterator.index); in __kvm_mmu_invalidate_addr()
5794 mmu_page_zap_pte(vcpu->kvm, sp, iterator.sptep, NULL); in __kvm_mmu_invalidate_addr()
5796 kvm_flush_remote_tlbs_sptep(vcpu->kvm, iterator.sptep); in __kvm_mmu_invalidate_addr()
5916 struct slot_rmap_walk_iterator iterator; in __walk_slot_rmaps() local
5921 end_gfn, &iterator) { in __walk_slot_rmaps()
5922 if (iterator.rmap) in __walk_slot_rmaps()
5923 flush |= fn(kvm, iterator.rmap, slot); in __walk_slot_rmaps()
5928 iterator.gfn - start_gfn + 1); in __walk_slot_rmaps()