Lines Matching refs:ghc

1917 				       struct gfn_to_hva_cache *ghc,  in __kvm_gfn_to_hva_cache_init()  argument
1926 ghc->gpa = gpa; in __kvm_gfn_to_hva_cache_init()
1927 ghc->generation = slots->generation; in __kvm_gfn_to_hva_cache_init()
1928 ghc->len = len; in __kvm_gfn_to_hva_cache_init()
1929 ghc->memslot = __gfn_to_memslot(slots, start_gfn); in __kvm_gfn_to_hva_cache_init()
1930 ghc->hva = gfn_to_hva_many(ghc->memslot, start_gfn, NULL); in __kvm_gfn_to_hva_cache_init()
1931 if (!kvm_is_error_hva(ghc->hva) && nr_pages_needed <= 1) { in __kvm_gfn_to_hva_cache_init()
1932 ghc->hva += offset; in __kvm_gfn_to_hva_cache_init()
1940 ghc->memslot = __gfn_to_memslot(slots, start_gfn); in __kvm_gfn_to_hva_cache_init()
1941 ghc->hva = gfn_to_hva_many(ghc->memslot, start_gfn, in __kvm_gfn_to_hva_cache_init()
1943 if (kvm_is_error_hva(ghc->hva)) in __kvm_gfn_to_hva_cache_init()
1948 ghc->memslot = NULL; in __kvm_gfn_to_hva_cache_init()
1953 int kvm_gfn_to_hva_cache_init(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_gfn_to_hva_cache_init() argument
1957 return __kvm_gfn_to_hva_cache_init(slots, ghc, gpa, len); in kvm_gfn_to_hva_cache_init()
1961 int kvm_write_guest_offset_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_write_guest_offset_cached() argument
1966 gpa_t gpa = ghc->gpa + offset; in kvm_write_guest_offset_cached()
1968 BUG_ON(len + offset > ghc->len); in kvm_write_guest_offset_cached()
1970 if (slots->generation != ghc->generation) in kvm_write_guest_offset_cached()
1971 __kvm_gfn_to_hva_cache_init(slots, ghc, ghc->gpa, ghc->len); in kvm_write_guest_offset_cached()
1973 if (unlikely(!ghc->memslot)) in kvm_write_guest_offset_cached()
1976 if (kvm_is_error_hva(ghc->hva)) in kvm_write_guest_offset_cached()
1979 r = __copy_to_user((void __user *)ghc->hva + offset, data, len); in kvm_write_guest_offset_cached()
1982 mark_page_dirty_in_slot(ghc->memslot, gpa >> PAGE_SHIFT); in kvm_write_guest_offset_cached()
1988 int kvm_write_guest_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_write_guest_cached() argument
1991 return kvm_write_guest_offset_cached(kvm, ghc, data, 0, len); in kvm_write_guest_cached()
1995 int kvm_read_guest_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_read_guest_cached() argument
2001 BUG_ON(len > ghc->len); in kvm_read_guest_cached()
2003 if (slots->generation != ghc->generation) in kvm_read_guest_cached()
2004 __kvm_gfn_to_hva_cache_init(slots, ghc, ghc->gpa, ghc->len); in kvm_read_guest_cached()
2006 if (unlikely(!ghc->memslot)) in kvm_read_guest_cached()
2007 return kvm_read_guest(kvm, ghc->gpa, data, len); in kvm_read_guest_cached()
2009 if (kvm_is_error_hva(ghc->hva)) in kvm_read_guest_cached()
2012 r = __copy_from_user(data, (void __user *)ghc->hva, len); in kvm_read_guest_cached()