Lines Matching refs:memslot
352 static inline unsigned long kvm_dirty_bitmap_bytes(struct kvm_memory_slot *memslot) in kvm_dirty_bitmap_bytes() argument
354 return ALIGN(memslot->npages, BITS_PER_LONG) / 8; in kvm_dirty_bitmap_bytes()
357 static inline unsigned long *kvm_second_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_second_dirty_bitmap() argument
359 unsigned long len = kvm_dirty_bitmap_bytes(memslot); in kvm_second_dirty_bitmap()
361 return memslot->dirty_bitmap + len / sizeof(*memslot->dirty_bitmap); in kvm_second_dirty_bitmap()
586 #define kvm_for_each_memslot(memslot, slots) \ argument
587 for (memslot = &slots->memslots[0]; \
588 memslot < slots->memslots + slots->used_slots; memslot++) \
589 if (WARN_ON_ONCE(!memslot->npages)) { \
690 struct kvm_memory_slot *memslot,
801 void mark_page_dirty_in_slot(struct kvm_memory_slot *memslot, gfn_t gfn);
871 void kvm_arch_sync_dirty_log(struct kvm *kvm, struct kvm_memory_slot *memslot);
875 struct kvm_memory_slot *memslot);
879 int *is_dirty, struct kvm_memory_slot **memslot);
1398 static inline bool kvm_is_visible_memslot(struct kvm_memory_slot *memslot) in kvm_is_visible_memslot() argument
1400 return (memslot && memslot->id < KVM_USER_MEM_SLOTS && in kvm_is_visible_memslot()
1401 !(memslot->flags & KVM_MEMSLOT_INVALID)); in kvm_is_visible_memslot()