Lines Matching refs:ghc
2573 struct gfn_to_hva_cache *ghc, in __kvm_gfn_to_hva_cache_init() argument
2583 ghc->generation = slots->generation; in __kvm_gfn_to_hva_cache_init()
2586 ghc->hva = KVM_HVA_ERR_BAD; in __kvm_gfn_to_hva_cache_init()
2595 ghc->memslot = __gfn_to_memslot(slots, start_gfn); in __kvm_gfn_to_hva_cache_init()
2596 ghc->hva = gfn_to_hva_many(ghc->memslot, start_gfn, in __kvm_gfn_to_hva_cache_init()
2598 if (kvm_is_error_hva(ghc->hva)) in __kvm_gfn_to_hva_cache_init()
2604 ghc->hva += offset; in __kvm_gfn_to_hva_cache_init()
2606 ghc->memslot = NULL; in __kvm_gfn_to_hva_cache_init()
2608 ghc->gpa = gpa; in __kvm_gfn_to_hva_cache_init()
2609 ghc->len = len; in __kvm_gfn_to_hva_cache_init()
2613 int kvm_gfn_to_hva_cache_init(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_gfn_to_hva_cache_init() argument
2617 return __kvm_gfn_to_hva_cache_init(slots, ghc, gpa, len); in kvm_gfn_to_hva_cache_init()
2621 int kvm_write_guest_offset_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_write_guest_offset_cached() argument
2627 gpa_t gpa = ghc->gpa + offset; in kvm_write_guest_offset_cached()
2629 if (WARN_ON_ONCE(len + offset > ghc->len)) in kvm_write_guest_offset_cached()
2632 if (slots->generation != ghc->generation) { in kvm_write_guest_offset_cached()
2633 if (__kvm_gfn_to_hva_cache_init(slots, ghc, ghc->gpa, ghc->len)) in kvm_write_guest_offset_cached()
2637 if (kvm_is_error_hva(ghc->hva)) in kvm_write_guest_offset_cached()
2640 if (unlikely(!ghc->memslot)) in kvm_write_guest_offset_cached()
2643 r = __copy_to_user((void __user *)ghc->hva + offset, data, len); in kvm_write_guest_offset_cached()
2646 mark_page_dirty_in_slot(ghc->memslot, gpa >> PAGE_SHIFT); in kvm_write_guest_offset_cached()
2652 int kvm_write_guest_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_write_guest_cached() argument
2655 return kvm_write_guest_offset_cached(kvm, ghc, data, 0, len); in kvm_write_guest_cached()
2659 int kvm_read_guest_offset_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_read_guest_offset_cached() argument
2665 gpa_t gpa = ghc->gpa + offset; in kvm_read_guest_offset_cached()
2667 if (WARN_ON_ONCE(len + offset > ghc->len)) in kvm_read_guest_offset_cached()
2670 if (slots->generation != ghc->generation) { in kvm_read_guest_offset_cached()
2671 if (__kvm_gfn_to_hva_cache_init(slots, ghc, ghc->gpa, ghc->len)) in kvm_read_guest_offset_cached()
2675 if (kvm_is_error_hva(ghc->hva)) in kvm_read_guest_offset_cached()
2678 if (unlikely(!ghc->memslot)) in kvm_read_guest_offset_cached()
2681 r = __copy_from_user(data, (void __user *)ghc->hva + offset, len); in kvm_read_guest_offset_cached()
2689 int kvm_read_guest_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_read_guest_cached() argument
2692 return kvm_read_guest_offset_cached(kvm, ghc, data, 0, len); in kvm_read_guest_cached()