Lines Matching refs:memslot
353 static inline unsigned long kvm_dirty_bitmap_bytes(struct kvm_memory_slot *memslot) in kvm_dirty_bitmap_bytes() argument
355 return ALIGN(memslot->npages, BITS_PER_LONG) / 8; in kvm_dirty_bitmap_bytes()
358 static inline unsigned long *kvm_second_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_second_dirty_bitmap() argument
360 unsigned long len = kvm_dirty_bitmap_bytes(memslot); in kvm_second_dirty_bitmap()
362 return memslot->dirty_bitmap + len / sizeof(*memslot->dirty_bitmap); in kvm_second_dirty_bitmap()
613 #define kvm_for_each_memslot(memslot, slots) \ argument
614 for (memslot = &slots->memslots[0]; \
615 memslot < slots->memslots + slots->used_slots; memslot++) \
616 if (WARN_ON_ONCE(!memslot->npages)) { \
717 struct kvm_memory_slot *memslot,
828 void mark_page_dirty_in_slot(struct kvm_memory_slot *memslot, gfn_t gfn);
897 void kvm_arch_sync_dirty_log(struct kvm *kvm, struct kvm_memory_slot *memslot);
901 struct kvm_memory_slot *memslot);
905 int *is_dirty, struct kvm_memory_slot **memslot);
1434 static inline bool kvm_is_visible_memslot(struct kvm_memory_slot *memslot) in kvm_is_visible_memslot() argument
1436 return (memslot && memslot->id < KVM_USER_MEM_SLOTS && in kvm_is_visible_memslot()
1437 !(memslot->flags & KVM_MEMSLOT_INVALID)); in kvm_is_visible_memslot()