Home
last modified time | relevance | path

Searched refs:memslot (Results 1 – 25 of 29) sorted by relevance

12

/Linux-v4.19/arch/powerpc/kvm/
Dbook3s_64_mmu_hv.c215 void kvmppc_map_vrma(struct kvm_vcpu *vcpu, struct kvm_memory_slot *memslot, in kvmppc_map_vrma() argument
229 npages = memslot->npages >> (porder - PAGE_SHIFT); in kvmppc_map_vrma()
495 struct kvm_memory_slot *memslot; in kvmppc_book3s_hv_page_fault() local
557 memslot = gfn_to_memslot(kvm, gfn); in kvmppc_book3s_hv_page_fault()
559 trace_kvm_page_fault_enter(vcpu, hpte, memslot, ea, dsisr); in kvmppc_book3s_hv_page_fault()
562 if (!memslot || (memslot->flags & KVM_MEMSLOT_INVALID)) in kvmppc_book3s_hv_page_fault()
570 if (gfn_base < memslot->base_gfn) in kvmppc_book3s_hv_page_fault()
585 hva = gfn_to_hva_memslot(memslot, gfn); in kvmppc_book3s_hv_page_fault()
681 rmap = &memslot->arch.rmap[gfn_base - memslot->base_gfn]; in kvmppc_book3s_hv_page_fault()
741 struct kvm_memory_slot *memslot; in kvmppc_rmap_reset() local
[all …]
Dbook3s_64_mmu_radix.c209 struct kvm_memory_slot *memslot; in kvmppc_unmap_pte() local
211 memslot = gfn_to_memslot(kvm, gfn); in kvmppc_unmap_pte()
212 if (memslot && memslot->dirty_bitmap) in kvmppc_unmap_pte()
213 kvmppc_update_dirty_map(memslot, gfn, page_size); in kvmppc_unmap_pte()
530 struct kvm_memory_slot *memslot; in kvmppc_book3s_radix_page_fault() local
558 memslot = gfn_to_memslot(kvm, gfn); in kvmppc_book3s_radix_page_fault()
561 if (!memslot || (memslot->flags & KVM_MEMSLOT_INVALID)) { in kvmppc_book3s_radix_page_fault()
576 if (memslot->flags & KVM_MEM_READONLY) { in kvmppc_book3s_radix_page_fault()
624 hva = gfn_to_hva_memslot(memslot, gfn); in kvmppc_book3s_radix_page_fault()
631 pfn = __gfn_to_pfn_memslot(memslot, gfn, false, NULL, in kvmppc_book3s_radix_page_fault()
[all …]
Dbook3s_hv_rm_mmu.c110 void kvmppc_update_dirty_map(struct kvm_memory_slot *memslot, in kvmppc_update_dirty_map() argument
115 if (!psize || !memslot->dirty_bitmap) in kvmppc_update_dirty_map()
118 gfn -= memslot->base_gfn; in kvmppc_update_dirty_map()
119 set_dirty_bits_atomic(memslot->dirty_bitmap, gfn, npages); in kvmppc_update_dirty_map()
126 struct kvm_memory_slot *memslot; in kvmppc_set_dirty_from_hpte() local
132 memslot = __gfn_to_memslot(kvm_memslots_raw(kvm), gfn); in kvmppc_set_dirty_from_hpte()
133 if (memslot && memslot->dirty_bitmap) in kvmppc_set_dirty_from_hpte()
134 kvmppc_update_dirty_map(memslot, gfn, psize); in kvmppc_set_dirty_from_hpte()
143 struct kvm_memory_slot *memslot; in revmap_for_hpte() local
148 memslot = __gfn_to_memslot(kvm_memslots_raw(kvm), gfn); in revmap_for_hpte()
[all …]
Dtrace_hv.h276 struct kvm_memory_slot *memslot, unsigned long ea,
279 TP_ARGS(vcpu, hptep, memslot, ea, dsisr),
299 __entry->base_gfn = memslot ? memslot->base_gfn : -1UL;
300 __entry->slot_flags = memslot ? memslot->flags : 0;
Dbook3s_64_vio_hv.c171 struct kvm_memory_slot *memslot; in kvmppc_gpa_to_ua() local
173 memslot = search_memslots(kvm_memslots(kvm), gfn); in kvmppc_gpa_to_ua()
174 if (!memslot) in kvmppc_gpa_to_ua()
177 *ua = __gfn_to_hva_memslot(memslot, gfn) | in kvmppc_gpa_to_ua()
182 *prmap = &memslot->arch.rmap[gfn - memslot->base_gfn]; in kvmppc_gpa_to_ua()
Dbook3s_pr.c401 struct kvm_memory_slot *memslot; in do_kvm_unmap_hva() local
404 kvm_for_each_memslot(memslot, slots) { in do_kvm_unmap_hva()
408 hva_start = max(start, memslot->userspace_addr); in do_kvm_unmap_hva()
409 hva_end = min(end, memslot->userspace_addr + in do_kvm_unmap_hva()
410 (memslot->npages << PAGE_SHIFT)); in do_kvm_unmap_hva()
417 gfn = hva_to_gfn_memslot(hva_start, memslot); in do_kvm_unmap_hva()
418 gfn_end = hva_to_gfn_memslot(hva_end + PAGE_SIZE - 1, memslot); in do_kvm_unmap_hva()
1867 struct kvm_memory_slot *memslot; in kvm_vm_ioctl_get_dirty_log_pr() local
1883 memslot = id_to_memslot(slots, log->slot); in kvm_vm_ioctl_get_dirty_log_pr()
1885 ga = memslot->base_gfn << PAGE_SHIFT; in kvm_vm_ioctl_get_dirty_log_pr()
[all …]
Dbook3s.h16 struct kvm_memory_slot *memslot);
Dbook3s_hv.c3572 struct kvm_memory_slot *memslot; in kvm_vm_ioctl_get_dirty_log_hv() local
3585 memslot = id_to_memslot(slots, log->slot); in kvm_vm_ioctl_get_dirty_log_hv()
3587 if (!memslot->dirty_bitmap) in kvm_vm_ioctl_get_dirty_log_hv()
3594 n = kvm_dirty_bitmap_bytes(memslot); in kvm_vm_ioctl_get_dirty_log_hv()
3595 buf = memslot->dirty_bitmap + n / sizeof(long); in kvm_vm_ioctl_get_dirty_log_hv()
3599 r = kvmppc_hv_get_dirty_log_radix(kvm, memslot, buf); in kvm_vm_ioctl_get_dirty_log_hv()
3601 r = kvmppc_hv_get_dirty_log_hpt(kvm, memslot, buf); in kvm_vm_ioctl_get_dirty_log_hv()
3611 p = memslot->dirty_bitmap; in kvm_vm_ioctl_get_dirty_log_hv()
3619 kvmppc_harvest_vpa_dirty(&vcpu->arch.vpa, memslot, buf); in kvm_vm_ioctl_get_dirty_log_hv()
3620 kvmppc_harvest_vpa_dirty(&vcpu->arch.dtl, memslot, buf); in kvm_vm_ioctl_get_dirty_log_hv()
[all …]
Dbook3s.c792 void kvmppc_core_flush_memslot(struct kvm *kvm, struct kvm_memory_slot *memslot) in kvmppc_core_flush_memslot() argument
794 kvm->arch.kvm_ops->flush_memslot(kvm, memslot); in kvmppc_core_flush_memslot()
798 struct kvm_memory_slot *memslot, in kvmppc_core_prepare_memory_region() argument
801 return kvm->arch.kvm_ops->prepare_memory_region(kvm, memslot, mem); in kvmppc_core_prepare_memory_region()
/Linux-v4.19/virt/kvm/arm/
Dmmu.c54 static bool memslot_is_logging(struct kvm_memory_slot *memslot) in memslot_is_logging() argument
56 return memslot->dirty_bitmap && !(memslot->flags & KVM_MEM_READONLY); in memslot_is_logging()
405 struct kvm_memory_slot *memslot) in stage2_flush_memslot() argument
407 phys_addr_t addr = memslot->base_gfn << PAGE_SHIFT; in stage2_flush_memslot()
408 phys_addr_t end = addr + PAGE_SIZE * memslot->npages; in stage2_flush_memslot()
429 struct kvm_memory_slot *memslot; in stage2_flush_vm() local
436 kvm_for_each_memslot(memslot, slots) in stage2_flush_vm()
437 stage2_flush_memslot(kvm, memslot); in stage2_flush_vm()
909 struct kvm_memory_slot *memslot) in stage2_unmap_memslot() argument
911 hva_t hva = memslot->userspace_addr; in stage2_unmap_memslot()
[all …]
/Linux-v4.19/virt/kvm/
Dkvm_main.c130 static void mark_page_dirty_in_slot(struct kvm_memory_slot *memslot, gfn_t gfn);
542 static void kvm_destroy_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_destroy_dirty_bitmap() argument
544 if (!memslot->dirty_bitmap) in kvm_destroy_dirty_bitmap()
547 kvfree(memslot->dirty_bitmap); in kvm_destroy_dirty_bitmap()
548 memslot->dirty_bitmap = NULL; in kvm_destroy_dirty_bitmap()
567 struct kvm_memory_slot *memslot; in kvm_free_memslots() local
572 kvm_for_each_memslot(memslot, slots) in kvm_free_memslots()
573 kvm_free_memslot(kvm, memslot, NULL); in kvm_free_memslots()
792 static int kvm_create_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_create_dirty_bitmap() argument
794 unsigned long dirty_bytes = 2 * kvm_dirty_bitmap_bytes(memslot); in kvm_create_dirty_bitmap()
[all …]
/Linux-v4.19/arch/mips/kvm/
Dmmu.c473 struct kvm_memory_slot *memslot, in handle_hva_to_gpa() argument
478 struct kvm_memory_slot *memslot; in handle_hva_to_gpa() local
484 kvm_for_each_memslot(memslot, slots) { in handle_hva_to_gpa()
488 hva_start = max(start, memslot->userspace_addr); in handle_hva_to_gpa()
489 hva_end = min(end, memslot->userspace_addr + in handle_hva_to_gpa()
490 (memslot->npages << PAGE_SHIFT)); in handle_hva_to_gpa()
498 gfn = hva_to_gfn_memslot(hva_start, memslot); in handle_hva_to_gpa()
499 gfn_end = hva_to_gfn_memslot(hva_end + PAGE_SIZE - 1, memslot); in handle_hva_to_gpa()
501 ret |= handler(kvm, gfn, gfn_end, memslot, data); in handle_hva_to_gpa()
509 struct kvm_memory_slot *memslot, void *data) in kvm_unmap_hva_handler() argument
[all …]
Dmips.c234 struct kvm_memory_slot *memslot, in kvm_arch_prepare_memory_region() argument
1006 struct kvm_memory_slot *memslot; in kvm_vm_ioctl_get_dirty_log() local
1016 memslot = id_to_memslot(slots, log->slot); in kvm_vm_ioctl_get_dirty_log()
1019 kvm_mips_callbacks->flush_shadow_memslot(kvm, memslot); in kvm_vm_ioctl_get_dirty_log()
/Linux-v4.19/arch/powerpc/include/asm/
Dkvm_book3s.h197 extern int kvm_unmap_radix(struct kvm *kvm, struct kvm_memory_slot *memslot,
199 extern int kvm_age_radix(struct kvm *kvm, struct kvm_memory_slot *memslot,
201 extern int kvm_test_age_radix(struct kvm *kvm, struct kvm_memory_slot *memslot,
204 struct kvm_memory_slot *memslot, unsigned long *map);
222 extern void kvmppc_update_dirty_map(struct kvm_memory_slot *memslot,
239 struct kvm_memory_slot *memslot, unsigned long *map);
241 struct kvm_memory_slot *memslot,
Dkvm_book3s_64.h382 static inline bool slot_is_aligned(struct kvm_memory_slot *memslot, in slot_is_aligned() argument
389 return !(memslot->base_gfn & mask) && !(memslot->npages & mask); in slot_is_aligned()
Dkvm_ppc.h179 struct kvm_memory_slot *memslot, unsigned long porder);
224 struct kvm_memory_slot *memslot,
233 struct kvm_memory_slot *memslot);
292 void (*flush_memslot)(struct kvm *kvm, struct kvm_memory_slot *memslot);
294 struct kvm_memory_slot *memslot,
/Linux-v4.19/include/linux/
Dkvm_host.h307 static inline unsigned long kvm_dirty_bitmap_bytes(struct kvm_memory_slot *memslot) in kvm_dirty_bitmap_bytes() argument
309 return ALIGN(memslot->npages, BITS_PER_LONG) / 8; in kvm_dirty_bitmap_bytes()
312 static inline unsigned long *kvm_second_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_second_dirty_bitmap() argument
314 unsigned long len = kvm_dirty_bitmap_bytes(memslot); in kvm_second_dirty_bitmap()
316 return memslot->dirty_bitmap + len / sizeof(*memslot->dirty_bitmap); in kvm_second_dirty_bitmap()
536 #define kvm_for_each_memslot(memslot, slots) \ argument
537 for (memslot = &slots->memslots[0]; \
538 memslot < slots->memslots + KVM_MEM_SLOTS_NUM && memslot->npages;\
539 memslot++)
638 struct kvm_memory_slot *memslot,
Dkvm_types.h63 struct kvm_memory_slot *memslot; member
/Linux-v4.19/arch/x86/kvm/
Dmmu.c1826 struct kvm_memory_slot *memslot; in kvm_handle_hva_range() local
1833 kvm_for_each_memslot(memslot, slots) { in kvm_handle_hva_range()
1837 hva_start = max(start, memslot->userspace_addr); in kvm_handle_hva_range()
1838 hva_end = min(end, memslot->userspace_addr + in kvm_handle_hva_range()
1839 (memslot->npages << PAGE_SHIFT)); in kvm_handle_hva_range()
1846 gfn_start = hva_to_gfn_memslot(hva_start, memslot); in kvm_handle_hva_range()
1847 gfn_end = hva_to_gfn_memslot(hva_end + PAGE_SIZE - 1, memslot); in kvm_handle_hva_range()
1849 for_each_slot_rmap_range(memslot, PT_PAGE_TABLE_LEVEL, in kvm_handle_hva_range()
1853 ret |= handler(kvm, iterator.rmap, memslot, in kvm_handle_hva_range()
5472 slot_handle_level_range(struct kvm *kvm, struct kvm_memory_slot *memslot, in slot_handle_level_range() argument
[all …]
/Linux-v4.19/arch/ia64/include/asm/sn/
Dgeo.h62 char memslot; /* The memory slot on the bus */ member
/Linux-v4.19/tools/testing/selftests/kvm/lib/
Dkvm_util.c695 uint32_t memslot) in memslot2region() argument
701 if (region->region.slot == memslot) in memslot2region()
706 " requested slot: %u\n", memslot); in memslot2region()
1623 vm_paddr_t paddr_min, uint32_t memslot) in vm_phy_page_alloc() argument
1634 region = memslot2region(vm, memslot); in vm_phy_page_alloc()
1644 paddr_min, vm->page_size, memslot); in vm_phy_page_alloc()
/Linux-v4.19/arch/x86/include/asm/
Dkvm_host.h1182 struct kvm_memory_slot *memslot);
1184 const struct kvm_memory_slot *memslot);
1186 struct kvm_memory_slot *memslot);
1188 struct kvm_memory_slot *memslot);
1190 struct kvm_memory_slot *memslot);
/Linux-v4.19/tools/testing/selftests/kvm/include/
Dkvm_util.h122 vm_paddr_t paddr_min, uint32_t memslot);
/Linux-v4.19/arch/s390/kvm/
Dkvm-s390.c523 struct kvm_memory_slot *memslot) in kvm_s390_sync_dirty_log() argument
532 cur_gfn = memslot->base_gfn; in kvm_s390_sync_dirty_log()
533 last_gfn = memslot->base_gfn + memslot->npages; in kvm_s390_sync_dirty_log()
536 vmaddr = gfn_to_hva_memslot(memslot, cur_gfn); in kvm_s390_sync_dirty_log()
565 struct kvm_memory_slot *memslot; in kvm_vm_ioctl_get_dirty_log() local
578 memslot = id_to_memslot(slots, log->slot); in kvm_vm_ioctl_get_dirty_log()
580 if (!memslot->dirty_bitmap) in kvm_vm_ioctl_get_dirty_log()
583 kvm_s390_sync_dirty_log(kvm, memslot); in kvm_vm_ioctl_get_dirty_log()
590 n = kvm_dirty_bitmap_bytes(memslot); in kvm_vm_ioctl_get_dirty_log()
591 memset(memslot->dirty_bitmap, 0, n); in kvm_vm_ioctl_get_dirty_log()
[all …]
/Linux-v4.19/Documentation/virtual/kvm/
Dmmu.txt180 to a memslot, through the kvm_memslots_for_spte_role macro and
430 information in leaf sptes. When a new memslot is added or an existing
431 memslot is changed, this information may become stale and needs to be
456 memslot update, while some SRCU readers might be using the old copy. We do not

12