| /OK3568_Linux_fs/kernel/arch/x86/kvm/ |
| H A D | mtrr.c | 336 kvm_zap_gfn_range(vcpu->kvm, gpa_to_gfn(start), gpa_to_gfn(end)); in update_mtrr()
|
| H A D | x86.c | 749 real_gfn = gpa_to_gfn(real_gfn); in kvm_read_guest_page_mmu() 6543 if (kvm_vcpu_map(vcpu, gpa_to_gfn(gpa), &map)) in emulator_cmpxchg_emulated() 7240 pfn = gfn_to_pfn(vcpu->kvm, gpa_to_gfn(gpa)); in reexecute_instruction() 7260 kvm_mmu_unprotect_page(vcpu->kvm, gpa_to_gfn(gpa)); in reexecute_instruction() 7270 kvm_mmu_unprotect_page(vcpu->kvm, gpa_to_gfn(gpa)); in reexecute_instruction() 7323 kvm_mmu_unprotect_page(vcpu->kvm, gpa_to_gfn(gpa)); in retry_instruction()
|
| /OK3568_Linux_fs/kernel/arch/s390/kvm/ |
| H A D | priv.c | 280 vmaddr = gfn_to_hva(vcpu->kvm, gpa_to_gfn(gaddr)); in handle_iske() 327 vmaddr = gfn_to_hva(vcpu->kvm, gpa_to_gfn(gaddr)); in handle_rrbe() 394 unsigned long vmaddr = gfn_to_hva(vcpu->kvm, gpa_to_gfn(start)); in handle_sske() 1094 vmaddr = gfn_to_hva(vcpu->kvm, gpa_to_gfn(start)); in handle_pfmf() 1494 hva = gfn_to_hva_prot(vcpu->kvm, gpa_to_gfn(gpa), &writable); in handle_tprot()
|
| H A D | vsie.c | 645 page = gfn_to_page(kvm, gpa_to_gfn(gpa)); in pin_guest_page() 657 mark_page_dirty(kvm, gpa_to_gfn(gpa)); in unpin_guest_page()
|
| H A D | kvm-s390.c | 4000 hva = gfn_to_hva(vcpu->kvm, gpa_to_gfn(current->thread.gmap_addr)); in kvm_arch_setup_async_pf()
|
| /OK3568_Linux_fs/kernel/include/linux/ |
| H A D | kvm_host.h | 1164 static inline gfn_t gpa_to_gfn(gpa_t gpa) in gpa_to_gfn() function 1177 return kvm_vcpu_gfn_to_page(vcpu, gpa_to_gfn(gpa)); in kvm_vcpu_gpa_to_page() 1182 unsigned long hva = gfn_to_hva(kvm, gpa_to_gfn(gpa)); in kvm_is_error_gpa()
|
| /OK3568_Linux_fs/kernel/arch/x86/kvm/svm/ |
| H A D | nested.c | 78 ret = kvm_vcpu_read_guest_page(vcpu, gpa_to_gfn(cr3), &pdpte, in nested_svm_get_tdp_pdptr() 507 ret = kvm_vcpu_map(&svm->vcpu, gpa_to_gfn(vmcb12_gpa), &map); in nested_svm_vmrun() 622 rc = kvm_vcpu_map(&svm->vcpu, gpa_to_gfn(svm->nested.vmcb12_gpa), &map); in nested_svm_vmexit()
|
| H A D | svm.c | 2053 ret = kvm_vcpu_map(&svm->vcpu, gpa_to_gfn(svm->vmcb->save.rax), &map); in vmload_interception() 2079 ret = kvm_vcpu_map(&svm->vcpu, gpa_to_gfn(svm->vmcb->save.rax), &map); in vmsave_interception() 4059 gpa_to_gfn(vmcb12_gpa), &map) == -EINVAL) in svm_pre_leave_smm()
|
| /OK3568_Linux_fs/kernel/arch/powerpc/kvm/ |
| H A D | book3s_xive_native.c | 645 gfn = gpa_to_gfn(kvm_eq.qaddr); in kvmppc_xive_native_set_queue_config() 906 mark_page_dirty(vcpu->kvm, gpa_to_gfn(q->guest_qaddr)); in kvmppc_xive_native_vcpu_eq_sync()
|
| /OK3568_Linux_fs/kernel/arch/x86/kvm/vmx/ |
| H A D | nested.c | 577 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->msr_bitmap), map)) in nested_vmx_prepare_msr_bitmap() 675 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map)) in nested_cache_shadow_vmcs12() 2019 if (kvm_vcpu_map(vcpu, gpa_to_gfn(evmcs_gpa), in nested_vmx_handle_enlightened_vmptrld() 2972 if (CC(kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map))) in nested_vmx_check_vmcs_link_ptr() 3194 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->virtual_apic_page_addr), map)) { in nested_get_vmcs12_pages() 3220 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->posted_intr_desc_addr), map)) { in nested_get_vmcs12_pages() 3283 if (kvm_write_guest_page(vcpu->kvm, gpa_to_gfn(dst), &gpa, in nested_vmx_write_pml_buffer() 5292 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmptr), &map)) { in handle_vmptrld()
|
| /OK3568_Linux_fs/kernel/arch/x86/kvm/mmu/ |
| H A D | paging_tmpl.h | 391 host_addr = kvm_vcpu_gfn_to_hva_prot(vcpu, gpa_to_gfn(real_gpa), in FNAME()
|
| H A D | mmu.c | 5103 kvm_mmu_unprotect_page(vcpu->kvm, gpa_to_gfn(cr2_or_gpa)); in kvm_mmu_page_fault()
|
| /OK3568_Linux_fs/kernel/drivers/gpu/drm/i915/gvt/ |
| H A D | kvmgt.c | 1761 if (kvmgt_gfn_is_write_protected(info, gpa_to_gfn(gpa))) in kvmgt_page_track_write()
|