Lines Matching refs:rmode
888 if (to_vmx(vcpu)->rmode.vm86_active) in update_exception_bitmap()
1506 if (vmx->rmode.vm86_active) { in vmx_get_rflags()
1508 save_rflags = vmx->rmode.save_rflags; in vmx_get_rflags()
1530 if (vmx->rmode.vm86_active) { in vmx_set_rflags()
1531 vmx->rmode.save_rflags = rflags; in vmx_set_rflags()
1754 if (vmx->rmode.vm86_active) { in vmx_queue_exception()
2817 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_ES], VCPU_SREG_ES); in enter_pmode()
2818 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_DS], VCPU_SREG_DS); in enter_pmode()
2819 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_FS], VCPU_SREG_FS); in enter_pmode()
2820 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_GS], VCPU_SREG_GS); in enter_pmode()
2821 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_SS], VCPU_SREG_SS); in enter_pmode()
2822 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_CS], VCPU_SREG_CS); in enter_pmode()
2824 vmx->rmode.vm86_active = 0; in enter_pmode()
2826 vmx_set_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_TR], VCPU_SREG_TR); in enter_pmode()
2830 flags |= vmx->rmode.save_rflags & ~RMODE_GUEST_OWNED_EFLAGS_BITS; in enter_pmode()
2838 fix_pmode_seg(vcpu, VCPU_SREG_CS, &vmx->rmode.segs[VCPU_SREG_CS]); in enter_pmode()
2839 fix_pmode_seg(vcpu, VCPU_SREG_SS, &vmx->rmode.segs[VCPU_SREG_SS]); in enter_pmode()
2840 fix_pmode_seg(vcpu, VCPU_SREG_ES, &vmx->rmode.segs[VCPU_SREG_ES]); in enter_pmode()
2841 fix_pmode_seg(vcpu, VCPU_SREG_DS, &vmx->rmode.segs[VCPU_SREG_DS]); in enter_pmode()
2842 fix_pmode_seg(vcpu, VCPU_SREG_FS, &vmx->rmode.segs[VCPU_SREG_FS]); in enter_pmode()
2843 fix_pmode_seg(vcpu, VCPU_SREG_GS, &vmx->rmode.segs[VCPU_SREG_GS]); in enter_pmode()
2885 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_TR], VCPU_SREG_TR); in enter_rmode()
2886 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_ES], VCPU_SREG_ES); in enter_rmode()
2887 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_DS], VCPU_SREG_DS); in enter_rmode()
2888 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_FS], VCPU_SREG_FS); in enter_rmode()
2889 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_GS], VCPU_SREG_GS); in enter_rmode()
2890 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_SS], VCPU_SREG_SS); in enter_rmode()
2891 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_CS], VCPU_SREG_CS); in enter_rmode()
2893 vmx->rmode.vm86_active = 1; in enter_rmode()
2910 vmx->rmode.save_rflags = flags; in enter_rmode()
2918 fix_rmode_seg(VCPU_SREG_SS, &vmx->rmode.segs[VCPU_SREG_SS]); in enter_rmode()
2919 fix_rmode_seg(VCPU_SREG_CS, &vmx->rmode.segs[VCPU_SREG_CS]); in enter_rmode()
2920 fix_rmode_seg(VCPU_SREG_ES, &vmx->rmode.segs[VCPU_SREG_ES]); in enter_rmode()
2921 fix_rmode_seg(VCPU_SREG_DS, &vmx->rmode.segs[VCPU_SREG_DS]); in enter_rmode()
2922 fix_rmode_seg(VCPU_SREG_GS, &vmx->rmode.segs[VCPU_SREG_GS]); in enter_rmode()
2923 fix_rmode_seg(VCPU_SREG_FS, &vmx->rmode.segs[VCPU_SREG_FS]); in enter_rmode()
3111 if (vmx->rmode.vm86_active && (cr0 & X86_CR0_PE)) in vmx_set_cr0()
3114 if (!vmx->rmode.vm86_active && !(cr0 & X86_CR0_PE)) in vmx_set_cr0()
3225 else if (vmx->rmode.vm86_active) in vmx_set_cr4()
3277 if (vmx->rmode.vm86_active && seg != VCPU_SREG_LDTR) { in vmx_get_segment()
3278 *var = vmx->rmode.segs[seg]; in vmx_get_segment()
3312 if (to_vmx(vcpu)->rmode.vm86_active) { in vmx_get_segment_base()
3323 if (unlikely(vmx->rmode.vm86_active)) in vmx_get_cpl()
3358 if (vmx->rmode.vm86_active && seg != VCPU_SREG_LDTR) { in vmx_set_segment()
3359 vmx->rmode.segs[seg] = *var; in vmx_set_segment()
3363 fix_rmode_seg(seg, &vmx->rmode.segs[seg]); in vmx_set_segment()
4488 vmx->rmode.vm86_active = 0; in vmx_vcpu_reset()
4604 if (vmx->rmode.vm86_active) { in vmx_inject_irq()
4643 if (vmx->rmode.vm86_active) { in vmx_inject_nmi()
4884 if (!vmx->rmode.vm86_active && is_gp_fault(intr_info)) { in handle_exception_nmi()
4932 if (vmx->rmode.vm86_active && rmode_exception(vcpu, ex_no)) in handle_exception_nmi()
5531 if (vmx->emulation_required && !vmx->rmode.vm86_active && in handle_invalid_guest_state()