Lines Matching refs:base_gfn
532 cur_gfn = memslot->base_gfn; in kvm_s390_sync_dirty_log()
533 last_gfn = memslot->base_gfn + memslot->npages; in kvm_s390_sync_dirty_log()
1642 if (gfn >= memslots[slot].base_gfn && in gfn_to_memslot_approx()
1643 gfn < memslots[slot].base_gfn + memslots[slot].npages) in gfn_to_memslot_approx()
1649 if (gfn >= memslots[slot].base_gfn) in gfn_to_memslot_approx()
1655 if (gfn >= memslots[start].base_gfn && in gfn_to_memslot_approx()
1656 gfn < memslots[start].base_gfn + memslots[start].npages) { in gfn_to_memslot_approx()
1691 unsigned long ofs = cur_gfn - ms->base_gfn; in kvm_s390_next_dirty_cmma()
1693 if (ms->base_gfn + ms->npages <= cur_gfn) { in kvm_s390_next_dirty_cmma()
1708 return ms->base_gfn + ofs; in kvm_s390_next_dirty_cmma()
1725 mem_end = slots->memslots[0].base_gfn + slots->memslots[0].npages; in kvm_s390_get_cmma()
1732 if (test_and_clear_bit(cur_gfn - ms->base_gfn, kvm_second_dirty_bitmap(ms))) in kvm_s390_get_cmma()
1750 if (cur_gfn - ms->base_gfn >= ms->npages) { in kvm_s390_get_cmma()
4167 old->base_gfn * PAGE_SIZE == mem->guest_phys_addr && in kvm_arch_commit_memory_region()