Lines Matching refs:memslot
444 static inline unsigned long kvm_dirty_bitmap_bytes(struct kvm_memory_slot *memslot) in kvm_dirty_bitmap_bytes() argument
446 return ALIGN(memslot->npages, BITS_PER_LONG) / 8; in kvm_dirty_bitmap_bytes()
449 static inline unsigned long *kvm_second_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_second_dirty_bitmap() argument
451 unsigned long len = kvm_dirty_bitmap_bytes(memslot); in kvm_second_dirty_bitmap()
453 return memslot->dirty_bitmap + len / sizeof(*memslot->dirty_bitmap); in kvm_second_dirty_bitmap()
723 #define kvm_for_each_memslot(memslot, slots) \ argument
724 for (memslot = &slots->memslots[0]; \
725 memslot < slots->memslots + slots->used_slots; memslot++) \
726 if (WARN_ON_ONCE(!memslot->npages)) { \
829 struct kvm_memory_slot *memslot,
938 void mark_page_dirty_in_slot(struct kvm *kvm, struct kvm_memory_slot *memslot, gfn_t gfn);
1004 void kvm_arch_sync_dirty_log(struct kvm *kvm, struct kvm_memory_slot *memslot);
1008 const struct kvm_memory_slot *memslot);
1012 int *is_dirty, struct kvm_memory_slot **memslot);
1749 static inline bool kvm_is_visible_memslot(struct kvm_memory_slot *memslot) in kvm_is_visible_memslot() argument
1751 return (memslot && memslot->id < KVM_USER_MEM_SLOTS && in kvm_is_visible_memslot()
1752 !(memslot->flags & KVM_MEMSLOT_INVALID)); in kvm_is_visible_memslot()