Lines Matching refs:gpa
520 gpa_t gpa = addr + (vm_start - memslot->userspace_addr); in stage2_unmap_memslot() local
521 unmap_stage2_range(&kvm->arch.mmu, gpa, vm_end - vm_start); in stage2_unmap_memslot()
1154 gpa_t gpa, u64 size, in handle_hva_to_gpa() argument
1167 gfn_t gpa; in handle_hva_to_gpa() local
1175 gpa = hva_to_gfn_memslot(hva_start, memslot) << PAGE_SHIFT; in handle_hva_to_gpa()
1176 ret |= handler(kvm, gpa, (u64)(hva_end - hva_start), data); in handle_hva_to_gpa()
1182 static int kvm_unmap_hva_handler(struct kvm *kvm, gpa_t gpa, u64 size, void *data) in kvm_unmap_hva_handler() argument
1187 __unmap_stage2_range(&kvm->arch.mmu, gpa, size, may_block); in kvm_unmap_hva_handler()
1202 static int kvm_set_spte_handler(struct kvm *kvm, gpa_t gpa, u64 size, void *data) in kvm_set_spte_handler() argument
1214 kvm_pgtable_stage2_map(kvm->arch.mmu.pgt, gpa, PAGE_SIZE, in kvm_set_spte_handler()
1238 static int kvm_age_hva_handler(struct kvm *kvm, gpa_t gpa, u64 size, void *data) in kvm_age_hva_handler() argument
1244 kpte = kvm_pgtable_stage2_mkold(kvm->arch.mmu.pgt, gpa); in kvm_age_hva_handler()
1249 static int kvm_test_age_hva_handler(struct kvm *kvm, gpa_t gpa, u64 size, void *data) in kvm_test_age_hva_handler() argument
1252 return kvm_pgtable_stage2_is_young(kvm->arch.mmu.pgt, gpa); in kvm_test_age_hva_handler()
1440 gpa_t gpa = mem->guest_phys_addr + in kvm_arch_prepare_memory_region() local
1453 ret = kvm_phys_addr_ioremap(kvm, gpa, pa, in kvm_arch_prepare_memory_region()
1492 gpa_t gpa = slot->base_gfn << PAGE_SHIFT; in kvm_arch_flush_shadow_memslot() local
1496 unmap_stage2_range(&kvm->arch.mmu, gpa, size); in kvm_arch_flush_shadow_memslot()