Lines Matching refs:efer

850 	if ((vcpu->arch.efer & EFER_LME) && !is_paging(vcpu) &&  in kvm_set_cr0()
861 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) && in kvm_set_cr0()
1488 static bool __kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in __kvm_valid_efer() argument
1490 if (efer & EFER_FFXSR && !guest_cpuid_has(vcpu, X86_FEATURE_FXSR_OPT)) in __kvm_valid_efer()
1493 if (efer & EFER_SVME && !guest_cpuid_has(vcpu, X86_FEATURE_SVM)) in __kvm_valid_efer()
1496 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()
1500 if (efer & EFER_NX && !guest_cpuid_has(vcpu, X86_FEATURE_NX)) in __kvm_valid_efer()
1506 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in kvm_valid_efer() argument
1508 if (efer & efer_reserved_bits) in kvm_valid_efer()
1511 return __kvm_valid_efer(vcpu, efer); in kvm_valid_efer()
1517 u64 old_efer = vcpu->arch.efer; in set_efer()
1518 u64 efer = msr_info->data; in set_efer() local
1521 if (efer & efer_reserved_bits) in set_efer()
1525 if (!__kvm_valid_efer(vcpu, efer)) in set_efer()
1529 (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()
1533 efer &= ~EFER_LMA; in set_efer()
1534 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()
1536 r = kvm_x86_ops.set_efer(vcpu, efer); in set_efer()
1543 if ((efer ^ old_efer) & EFER_NX) in set_efer()
3542 msr_info->data = vcpu->arch.efer; in kvm_get_msr_common()
8732 put_smstate(u64, buf, 0x7ed0, vcpu->arch.efer); in enter_smm_save_state_64()
9710 sregs->efer = vcpu->arch.efer; in __get_sregs()
9808 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_valid_sregs()
9815 || !(sregs->efer & EFER_LMA)) in kvm_valid_sregs()
9824 if (sregs->efer & EFER_LMA || sregs->cs.l) in kvm_valid_sregs()
9862 mmu_reset_needed |= vcpu->arch.efer != sregs->efer; in __set_sregs()
9863 kvm_x86_ops.set_efer(vcpu, sregs->efer); in __set_sregs()