Lines Matching refs:ctxt

195 		int (*execute)(struct x86_emulate_ctxt *ctxt);
204 int (*check_perm)(struct x86_emulate_ctxt *ctxt);
243 static ulong reg_read(struct x86_emulate_ctxt *ctxt, unsigned nr) in reg_read() argument
245 if (!(ctxt->regs_valid & (1 << nr))) { in reg_read()
246 ctxt->regs_valid |= 1 << nr; in reg_read()
247 ctxt->_regs[nr] = ctxt->ops->read_gpr(ctxt, nr); in reg_read()
249 return ctxt->_regs[nr]; in reg_read()
252 static ulong *reg_write(struct x86_emulate_ctxt *ctxt, unsigned nr) in reg_write() argument
254 ctxt->regs_valid |= 1 << nr; in reg_write()
255 ctxt->regs_dirty |= 1 << nr; in reg_write()
256 return &ctxt->_regs[nr]; in reg_write()
259 static ulong *reg_rmw(struct x86_emulate_ctxt *ctxt, unsigned nr) in reg_rmw() argument
261 reg_read(ctxt, nr); in reg_rmw()
262 return reg_write(ctxt, nr); in reg_rmw()
265 static void writeback_registers(struct x86_emulate_ctxt *ctxt) in writeback_registers() argument
269 for_each_set_bit(reg, (ulong *)&ctxt->regs_dirty, 16) in writeback_registers()
270 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]); in writeback_registers()
273 static void invalidate_registers(struct x86_emulate_ctxt *ctxt) in invalidate_registers() argument
275 ctxt->regs_dirty = 0; in invalidate_registers()
276 ctxt->regs_valid = 0; in invalidate_registers()
309 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop);
503 static int emulator_check_intercept(struct x86_emulate_ctxt *ctxt, in emulator_check_intercept() argument
509 .rep_prefix = ctxt->rep_prefix, in emulator_check_intercept()
510 .modrm_mod = ctxt->modrm_mod, in emulator_check_intercept()
511 .modrm_reg = ctxt->modrm_reg, in emulator_check_intercept()
512 .modrm_rm = ctxt->modrm_rm, in emulator_check_intercept()
513 .src_val = ctxt->src.val64, in emulator_check_intercept()
514 .dst_val = ctxt->dst.val64, in emulator_check_intercept()
515 .src_bytes = ctxt->src.bytes, in emulator_check_intercept()
516 .dst_bytes = ctxt->dst.bytes, in emulator_check_intercept()
517 .ad_bytes = ctxt->ad_bytes, in emulator_check_intercept()
518 .next_rip = ctxt->eip, in emulator_check_intercept()
521 return ctxt->ops->intercept(ctxt, &info, stage); in emulator_check_intercept()
548 static inline unsigned long ad_mask(struct x86_emulate_ctxt *ctxt) in ad_mask() argument
550 return (1UL << (ctxt->ad_bytes << 3)) - 1; in ad_mask()
553 static ulong stack_mask(struct x86_emulate_ctxt *ctxt) in stack_mask() argument
558 if (ctxt->mode == X86EMUL_MODE_PROT64) in stack_mask()
560 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS); in stack_mask()
564 static int stack_size(struct x86_emulate_ctxt *ctxt) in stack_size() argument
566 return (__fls(stack_mask(ctxt)) + 1) >> 3; in stack_size()
571 address_mask(struct x86_emulate_ctxt *ctxt, unsigned long reg) in address_mask() argument
573 if (ctxt->ad_bytes == sizeof(unsigned long)) in address_mask()
576 return reg & ad_mask(ctxt); in address_mask()
580 register_address(struct x86_emulate_ctxt *ctxt, int reg) in register_address() argument
582 return address_mask(ctxt, reg_read(ctxt, reg)); in register_address()
591 register_address_increment(struct x86_emulate_ctxt *ctxt, int reg, int inc) in register_address_increment() argument
593 ulong *preg = reg_rmw(ctxt, reg); in register_address_increment()
595 assign_register(preg, *preg + inc, ctxt->ad_bytes); in register_address_increment()
598 static void rsp_increment(struct x86_emulate_ctxt *ctxt, int inc) in rsp_increment() argument
600 masked_increment(reg_rmw(ctxt, VCPU_REGS_RSP), stack_mask(ctxt), inc); in rsp_increment()
610 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg) in seg_base() argument
612 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS) in seg_base()
615 return ctxt->ops->get_cached_segment_base(ctxt, seg); in seg_base()
618 static int emulate_exception(struct x86_emulate_ctxt *ctxt, int vec, in emulate_exception() argument
622 ctxt->exception.vector = vec; in emulate_exception()
623 ctxt->exception.error_code = error; in emulate_exception()
624 ctxt->exception.error_code_valid = valid; in emulate_exception()
628 static int emulate_db(struct x86_emulate_ctxt *ctxt) in emulate_db() argument
630 return emulate_exception(ctxt, DB_VECTOR, 0, false); in emulate_db()
633 static int emulate_gp(struct x86_emulate_ctxt *ctxt, int err) in emulate_gp() argument
635 return emulate_exception(ctxt, GP_VECTOR, err, true); in emulate_gp()
638 static int emulate_ss(struct x86_emulate_ctxt *ctxt, int err) in emulate_ss() argument
640 return emulate_exception(ctxt, SS_VECTOR, err, true); in emulate_ss()
643 static int emulate_ud(struct x86_emulate_ctxt *ctxt) in emulate_ud() argument
645 return emulate_exception(ctxt, UD_VECTOR, 0, false); in emulate_ud()
648 static int emulate_ts(struct x86_emulate_ctxt *ctxt, int err) in emulate_ts() argument
650 return emulate_exception(ctxt, TS_VECTOR, err, true); in emulate_ts()
653 static int emulate_de(struct x86_emulate_ctxt *ctxt) in emulate_de() argument
655 return emulate_exception(ctxt, DE_VECTOR, 0, false); in emulate_de()
658 static int emulate_nm(struct x86_emulate_ctxt *ctxt) in emulate_nm() argument
660 return emulate_exception(ctxt, NM_VECTOR, 0, false); in emulate_nm()
663 static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg) in get_segment_selector() argument
668 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg); in get_segment_selector()
672 static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector, in set_segment_selector() argument
679 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg); in set_segment_selector()
680 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg); in set_segment_selector()
683 static inline u8 ctxt_virt_addr_bits(struct x86_emulate_ctxt *ctxt) in ctxt_virt_addr_bits() argument
685 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48; in ctxt_virt_addr_bits()
689 struct x86_emulate_ctxt *ctxt) in emul_is_noncanonical_address() argument
691 return get_canonical(la, ctxt_virt_addr_bits(ctxt)) != la; in emul_is_noncanonical_address()
703 static unsigned insn_alignment(struct x86_emulate_ctxt *ctxt, unsigned size) in insn_alignment() argument
705 u64 alignment = ctxt->d & AlignMask; in insn_alignment()
722 static __always_inline int __linearize(struct x86_emulate_ctxt *ctxt, in __linearize() argument
735 la = seg_base(ctxt, addr.seg) + addr.ea; in __linearize()
740 va_bits = ctxt_virt_addr_bits(ctxt); in __linearize()
750 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL, in __linearize()
755 if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8)) in __linearize()
779 if (la & (insn_alignment(ctxt, size) - 1)) in __linearize()
780 return emulate_gp(ctxt, 0); in __linearize()
784 return emulate_ss(ctxt, 0); in __linearize()
786 return emulate_gp(ctxt, 0); in __linearize()
789 static int linearize(struct x86_emulate_ctxt *ctxt, in linearize() argument
795 return __linearize(ctxt, addr, &max_size, size, write, false, in linearize()
796 ctxt->mode, linear); in linearize()
799 static inline int assign_eip(struct x86_emulate_ctxt *ctxt, ulong dst) in assign_eip() argument
807 if (ctxt->op_bytes != sizeof(unsigned long)) in assign_eip()
808 addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1); in assign_eip()
809 rc = __linearize(ctxt, addr, &max_size, 1, false, true, ctxt->mode, &linear); in assign_eip()
811 ctxt->_eip = addr.ea; in assign_eip()
815 static inline int emulator_recalc_and_set_mode(struct x86_emulate_ctxt *ctxt) in emulator_recalc_and_set_mode() argument
822 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in emulator_recalc_and_set_mode()
824 if (!(ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PE)) { in emulator_recalc_and_set_mode()
828 ctxt->mode = X86EMUL_MODE_REAL; in emulator_recalc_and_set_mode()
832 if (ctxt->eflags & X86_EFLAGS_VM) { in emulator_recalc_and_set_mode()
836 ctxt->mode = X86EMUL_MODE_VM86; in emulator_recalc_and_set_mode()
840 if (!ctxt->ops->get_segment(ctxt, &selector, &cs, &base3, VCPU_SREG_CS)) in emulator_recalc_and_set_mode()
846 ctxt->mode = X86EMUL_MODE_PROT64; in emulator_recalc_and_set_mode()
849 ctxt->mode = X86EMUL_MODE_PROT32; in emulator_recalc_and_set_mode()
851 ctxt->mode = X86EMUL_MODE_PROT16; in emulator_recalc_and_set_mode()
855 ctxt->mode = cs.d ? X86EMUL_MODE_PROT32 : X86EMUL_MODE_PROT16; in emulator_recalc_and_set_mode()
861 static inline int assign_eip_near(struct x86_emulate_ctxt *ctxt, ulong dst) in assign_eip_near() argument
863 return assign_eip(ctxt, dst); in assign_eip_near()
866 static int assign_eip_far(struct x86_emulate_ctxt *ctxt, ulong dst) in assign_eip_far() argument
868 int rc = emulator_recalc_and_set_mode(ctxt); in assign_eip_far()
873 return assign_eip(ctxt, dst); in assign_eip_far()
876 static inline int jmp_rel(struct x86_emulate_ctxt *ctxt, int rel) in jmp_rel() argument
878 return assign_eip_near(ctxt, ctxt->_eip + rel); in jmp_rel()
881 static int linear_read_system(struct x86_emulate_ctxt *ctxt, ulong linear, in linear_read_system() argument
884 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true); in linear_read_system()
887 static int linear_write_system(struct x86_emulate_ctxt *ctxt, in linear_write_system() argument
891 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true); in linear_write_system()
894 static int segmented_read_std(struct x86_emulate_ctxt *ctxt, in segmented_read_std() argument
902 rc = linearize(ctxt, addr, size, false, &linear); in segmented_read_std()
905 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false); in segmented_read_std()
908 static int segmented_write_std(struct x86_emulate_ctxt *ctxt, in segmented_write_std() argument
916 rc = linearize(ctxt, addr, size, true, &linear); in segmented_write_std()
919 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false); in segmented_write_std()
926 static int __do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, int op_size) in __do_insn_fetch_bytes() argument
931 int cur_size = ctxt->fetch.end - ctxt->fetch.data; in __do_insn_fetch_bytes()
933 .ea = ctxt->eip + cur_size }; in __do_insn_fetch_bytes()
945 rc = __linearize(ctxt, addr, &max_size, 0, false, true, ctxt->mode, in __do_insn_fetch_bytes()
960 return emulate_gp(ctxt, 0); in __do_insn_fetch_bytes()
962 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end, in __do_insn_fetch_bytes()
963 size, &ctxt->exception); in __do_insn_fetch_bytes()
966 ctxt->fetch.end += size; in __do_insn_fetch_bytes()
970 static __always_inline int do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, in do_insn_fetch_bytes() argument
973 unsigned done_size = ctxt->fetch.end - ctxt->fetch.ptr; in do_insn_fetch_bytes()
976 return __do_insn_fetch_bytes(ctxt, size - done_size); in do_insn_fetch_bytes()
988 ctxt->_eip += sizeof(_type); \
989 memcpy(&_x, ctxt->fetch.ptr, sizeof(_type)); \
990 ctxt->fetch.ptr += sizeof(_type); \
999 ctxt->_eip += (_size); \
1000 memcpy(_arr, ctxt->fetch.ptr, _size); \
1001 ctxt->fetch.ptr += (_size); \
1009 static void *decode_register(struct x86_emulate_ctxt *ctxt, u8 modrm_reg, in decode_register() argument
1013 int highbyte_regs = (ctxt->rex_prefix == 0) && byteop; in decode_register()
1016 p = (unsigned char *)reg_rmw(ctxt, modrm_reg & 3) + 1; in decode_register()
1018 p = reg_rmw(ctxt, modrm_reg); in decode_register()
1022 static int read_descriptor(struct x86_emulate_ctxt *ctxt, in read_descriptor() argument
1031 rc = segmented_read_std(ctxt, addr, size, 2); in read_descriptor()
1035 rc = segmented_read_std(ctxt, addr, address, op_bytes); in read_descriptor()
1083 static int em_bsf_c(struct x86_emulate_ctxt *ctxt) in em_bsf_c() argument
1086 if (ctxt->src.val == 0) in em_bsf_c()
1087 ctxt->dst.type = OP_NONE; in em_bsf_c()
1088 return fastop(ctxt, em_bsf); in em_bsf_c()
1091 static int em_bsr_c(struct x86_emulate_ctxt *ctxt) in em_bsr_c() argument
1094 if (ctxt->src.val == 0) in em_bsr_c()
1095 ctxt->dst.type = OP_NONE; in em_bsr_c()
1096 return fastop(ctxt, em_bsr); in em_bsr_c()
1230 static int em_fninit(struct x86_emulate_ctxt *ctxt) in em_fninit() argument
1232 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fninit()
1233 return emulate_nm(ctxt); in em_fninit()
1241 static int em_fnstcw(struct x86_emulate_ctxt *ctxt) in em_fnstcw() argument
1245 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstcw()
1246 return emulate_nm(ctxt); in em_fnstcw()
1252 ctxt->dst.val = fcw; in em_fnstcw()
1257 static int em_fnstsw(struct x86_emulate_ctxt *ctxt) in em_fnstsw() argument
1261 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstsw()
1262 return emulate_nm(ctxt); in em_fnstsw()
1268 ctxt->dst.val = fsw; in em_fnstsw()
1273 static void decode_register_operand(struct x86_emulate_ctxt *ctxt, in decode_register_operand() argument
1276 unsigned reg = ctxt->modrm_reg; in decode_register_operand()
1278 if (!(ctxt->d & ModRM)) in decode_register_operand()
1279 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3); in decode_register_operand()
1281 if (ctxt->d & Sse) { in decode_register_operand()
1288 if (ctxt->d & Mmx) { in decode_register_operand()
1297 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_register_operand()
1298 op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp); in decode_register_operand()
1304 static void adjust_modrm_seg(struct x86_emulate_ctxt *ctxt, int base_reg) in adjust_modrm_seg() argument
1307 ctxt->modrm_seg = VCPU_SREG_SS; in adjust_modrm_seg()
1310 static int decode_modrm(struct x86_emulate_ctxt *ctxt, in decode_modrm() argument
1318 ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */ in decode_modrm()
1319 index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */ in decode_modrm()
1320 base_reg = (ctxt->rex_prefix << 3) & 8; /* REX.B */ in decode_modrm()
1322 ctxt->modrm_mod = (ctxt->modrm & 0xc0) >> 6; in decode_modrm()
1323 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3; in decode_modrm()
1324 ctxt->modrm_rm = base_reg | (ctxt->modrm & 0x07); in decode_modrm()
1325 ctxt->modrm_seg = VCPU_SREG_DS; in decode_modrm()
1327 if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) { in decode_modrm()
1329 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_modrm()
1330 op->addr.reg = decode_register(ctxt, ctxt->modrm_rm, in decode_modrm()
1331 ctxt->d & ByteOp); in decode_modrm()
1332 if (ctxt->d & Sse) { in decode_modrm()
1335 op->addr.xmm = ctxt->modrm_rm; in decode_modrm()
1336 read_sse_reg(&op->vec_val, ctxt->modrm_rm); in decode_modrm()
1339 if (ctxt->d & Mmx) { in decode_modrm()
1342 op->addr.mm = ctxt->modrm_rm & 7; in decode_modrm()
1351 if (ctxt->ad_bytes == 2) { in decode_modrm()
1352 unsigned bx = reg_read(ctxt, VCPU_REGS_RBX); in decode_modrm()
1353 unsigned bp = reg_read(ctxt, VCPU_REGS_RBP); in decode_modrm()
1354 unsigned si = reg_read(ctxt, VCPU_REGS_RSI); in decode_modrm()
1355 unsigned di = reg_read(ctxt, VCPU_REGS_RDI); in decode_modrm()
1358 switch (ctxt->modrm_mod) { in decode_modrm()
1360 if (ctxt->modrm_rm == 6) in decode_modrm()
1361 modrm_ea += insn_fetch(u16, ctxt); in decode_modrm()
1364 modrm_ea += insn_fetch(s8, ctxt); in decode_modrm()
1367 modrm_ea += insn_fetch(u16, ctxt); in decode_modrm()
1370 switch (ctxt->modrm_rm) { in decode_modrm()
1390 if (ctxt->modrm_mod != 0) in decode_modrm()
1397 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 || in decode_modrm()
1398 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0)) in decode_modrm()
1399 ctxt->modrm_seg = VCPU_SREG_SS; in decode_modrm()
1403 if ((ctxt->modrm_rm & 7) == 4) { in decode_modrm()
1404 sib = insn_fetch(u8, ctxt); in decode_modrm()
1409 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0) in decode_modrm()
1410 modrm_ea += insn_fetch(s32, ctxt); in decode_modrm()
1412 modrm_ea += reg_read(ctxt, base_reg); in decode_modrm()
1413 adjust_modrm_seg(ctxt, base_reg); in decode_modrm()
1415 if ((ctxt->d & IncSP) && in decode_modrm()
1417 modrm_ea += ctxt->op_bytes; in decode_modrm()
1420 modrm_ea += reg_read(ctxt, index_reg) << scale; in decode_modrm()
1421 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) { in decode_modrm()
1422 modrm_ea += insn_fetch(s32, ctxt); in decode_modrm()
1423 if (ctxt->mode == X86EMUL_MODE_PROT64) in decode_modrm()
1424 ctxt->rip_relative = 1; in decode_modrm()
1426 base_reg = ctxt->modrm_rm; in decode_modrm()
1427 modrm_ea += reg_read(ctxt, base_reg); in decode_modrm()
1428 adjust_modrm_seg(ctxt, base_reg); in decode_modrm()
1430 switch (ctxt->modrm_mod) { in decode_modrm()
1432 modrm_ea += insn_fetch(s8, ctxt); in decode_modrm()
1435 modrm_ea += insn_fetch(s32, ctxt); in decode_modrm()
1440 if (ctxt->ad_bytes != 8) in decode_modrm()
1441 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea; in decode_modrm()
1447 static int decode_abs(struct x86_emulate_ctxt *ctxt, in decode_abs() argument
1453 switch (ctxt->ad_bytes) { in decode_abs()
1455 op->addr.mem.ea = insn_fetch(u16, ctxt); in decode_abs()
1458 op->addr.mem.ea = insn_fetch(u32, ctxt); in decode_abs()
1461 op->addr.mem.ea = insn_fetch(u64, ctxt); in decode_abs()
1468 static void fetch_bit_operand(struct x86_emulate_ctxt *ctxt) in fetch_bit_operand() argument
1472 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) { in fetch_bit_operand()
1473 mask = ~((long)ctxt->dst.bytes * 8 - 1); in fetch_bit_operand()
1475 if (ctxt->src.bytes == 2) in fetch_bit_operand()
1476 sv = (s16)ctxt->src.val & (s16)mask; in fetch_bit_operand()
1477 else if (ctxt->src.bytes == 4) in fetch_bit_operand()
1478 sv = (s32)ctxt->src.val & (s32)mask; in fetch_bit_operand()
1480 sv = (s64)ctxt->src.val & (s64)mask; in fetch_bit_operand()
1482 ctxt->dst.addr.mem.ea = address_mask(ctxt, in fetch_bit_operand()
1483 ctxt->dst.addr.mem.ea + (sv >> 3)); in fetch_bit_operand()
1487 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1; in fetch_bit_operand()
1490 static int read_emulated(struct x86_emulate_ctxt *ctxt, in read_emulated() argument
1494 struct read_cache *mc = &ctxt->mem_read; in read_emulated()
1501 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size, in read_emulated()
1502 &ctxt->exception); in read_emulated()
1514 static int segmented_read(struct x86_emulate_ctxt *ctxt, in segmented_read() argument
1522 rc = linearize(ctxt, addr, size, false, &linear); in segmented_read()
1525 return read_emulated(ctxt, linear, data, size); in segmented_read()
1528 static int segmented_write(struct x86_emulate_ctxt *ctxt, in segmented_write() argument
1536 rc = linearize(ctxt, addr, size, true, &linear); in segmented_write()
1539 return ctxt->ops->write_emulated(ctxt, linear, data, size, in segmented_write()
1540 &ctxt->exception); in segmented_write()
1543 static int segmented_cmpxchg(struct x86_emulate_ctxt *ctxt, in segmented_cmpxchg() argument
1551 rc = linearize(ctxt, addr, size, true, &linear); in segmented_cmpxchg()
1554 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data, in segmented_cmpxchg()
1555 size, &ctxt->exception); in segmented_cmpxchg()
1558 static int pio_in_emulated(struct x86_emulate_ctxt *ctxt, in pio_in_emulated() argument
1562 struct read_cache *rc = &ctxt->io_read; in pio_in_emulated()
1566 unsigned int count = ctxt->rep_prefix ? in pio_in_emulated()
1567 address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) : 1; in pio_in_emulated()
1568 in_page = (ctxt->eflags & X86_EFLAGS_DF) ? in pio_in_emulated()
1569 offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)) : in pio_in_emulated()
1570 PAGE_SIZE - offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)); in pio_in_emulated()
1575 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n)) in pio_in_emulated()
1580 if (ctxt->rep_prefix && (ctxt->d & String) && in pio_in_emulated()
1581 !(ctxt->eflags & X86_EFLAGS_DF)) { in pio_in_emulated()
1582 ctxt->dst.data = rc->data + rc->pos; in pio_in_emulated()
1583 ctxt->dst.type = OP_MEM_STR; in pio_in_emulated()
1584 ctxt->dst.count = (rc->end - rc->pos) / size; in pio_in_emulated()
1593 static int read_interrupt_descriptor(struct x86_emulate_ctxt *ctxt, in read_interrupt_descriptor() argument
1599 ctxt->ops->get_idt(ctxt, &dt); in read_interrupt_descriptor()
1602 return emulate_gp(ctxt, index << 3 | 0x2); in read_interrupt_descriptor()
1605 return linear_read_system(ctxt, addr, desc, sizeof(*desc)); in read_interrupt_descriptor()
1608 static void get_descriptor_table_ptr(struct x86_emulate_ctxt *ctxt, in get_descriptor_table_ptr() argument
1611 const struct x86_emulate_ops *ops = ctxt->ops; in get_descriptor_table_ptr()
1619 if (!ops->get_segment(ctxt, &sel, &desc, &base3, in get_descriptor_table_ptr()
1626 ops->get_gdt(ctxt, dt); in get_descriptor_table_ptr()
1629 static int get_descriptor_ptr(struct x86_emulate_ctxt *ctxt, in get_descriptor_ptr() argument
1636 get_descriptor_table_ptr(ctxt, selector, &dt); in get_descriptor_ptr()
1639 return emulate_gp(ctxt, selector & 0xfffc); in get_descriptor_ptr()
1647 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1658 static int read_segment_descriptor(struct x86_emulate_ctxt *ctxt, in read_segment_descriptor() argument
1664 rc = get_descriptor_ptr(ctxt, selector, desc_addr_p); in read_segment_descriptor()
1668 return linear_read_system(ctxt, *desc_addr_p, desc, sizeof(*desc)); in read_segment_descriptor()
1672 static int write_segment_descriptor(struct x86_emulate_ctxt *ctxt, in write_segment_descriptor() argument
1678 rc = get_descriptor_ptr(ctxt, selector, &addr); in write_segment_descriptor()
1682 return linear_write_system(ctxt, addr, desc, sizeof(*desc)); in write_segment_descriptor()
1685 static int __load_segment_descriptor(struct x86_emulate_ctxt *ctxt, in __load_segment_descriptor() argument
1702 if (ctxt->mode == X86EMUL_MODE_REAL) { in __load_segment_descriptor()
1705 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg); in __load_segment_descriptor()
1708 } else if (seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86) { in __load_segment_descriptor()
1731 if (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl) in __load_segment_descriptor()
1750 ret = read_segment_descriptor(ctxt, selector, &seg_desc, &desc_addr); in __load_segment_descriptor()
1793 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
1831 ret = write_segment_descriptor(ctxt, selector, in __load_segment_descriptor()
1836 } else if (ctxt->mode == X86EMUL_MODE_PROT64) { in __load_segment_descriptor()
1837 ret = linear_read_system(ctxt, desc_addr+8, &base3, sizeof(base3)); in __load_segment_descriptor()
1841 ((u64)base3 << 32), ctxt)) in __load_segment_descriptor()
1842 return emulate_gp(ctxt, err_code); in __load_segment_descriptor()
1848 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc, in __load_segment_descriptor()
1849 sizeof(seg_desc), &ctxt->exception); in __load_segment_descriptor()
1854 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg); in __load_segment_descriptor()
1859 return emulate_exception(ctxt, err_vec, err_code, true); in __load_segment_descriptor()
1862 static int load_segment_descriptor(struct x86_emulate_ctxt *ctxt, in load_segment_descriptor() argument
1865 u8 cpl = ctxt->ops->cpl(ctxt); in load_segment_descriptor()
1878 ctxt->mode == X86EMUL_MODE_PROT64) in load_segment_descriptor()
1879 return emulate_exception(ctxt, GP_VECTOR, 0, true); in load_segment_descriptor()
1881 return __load_segment_descriptor(ctxt, selector, seg, cpl, in load_segment_descriptor()
1890 static int writeback(struct x86_emulate_ctxt *ctxt, struct operand *op) in writeback() argument
1897 if (ctxt->lock_prefix) in writeback()
1898 return segmented_cmpxchg(ctxt, in writeback()
1904 return segmented_write(ctxt, in writeback()
1910 return segmented_write(ctxt, in writeback()
1930 static int push(struct x86_emulate_ctxt *ctxt, void *data, int bytes) in push() argument
1934 rsp_increment(ctxt, -bytes); in push()
1935 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt); in push()
1938 return segmented_write(ctxt, addr, data, bytes); in push()
1941 static int em_push(struct x86_emulate_ctxt *ctxt) in em_push() argument
1944 ctxt->dst.type = OP_NONE; in em_push()
1945 return push(ctxt, &ctxt->src.val, ctxt->op_bytes); in em_push()
1948 static int emulate_pop(struct x86_emulate_ctxt *ctxt, in emulate_pop() argument
1954 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt); in emulate_pop()
1956 rc = segmented_read(ctxt, addr, dest, len); in emulate_pop()
1960 rsp_increment(ctxt, len); in emulate_pop()
1964 static int em_pop(struct x86_emulate_ctxt *ctxt) in em_pop() argument
1966 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_pop()
1969 static int emulate_popf(struct x86_emulate_ctxt *ctxt, in emulate_popf() argument
1974 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulate_popf()
1975 int cpl = ctxt->ops->cpl(ctxt); in emulate_popf()
1977 rc = emulate_pop(ctxt, &val, len); in emulate_popf()
1986 switch(ctxt->mode) { in emulate_popf()
1997 return emulate_gp(ctxt, 0); in emulate_popf()
2006 (ctxt->eflags & ~change_mask) | (val & change_mask); in emulate_popf()
2011 static int em_popf(struct x86_emulate_ctxt *ctxt) in em_popf() argument
2013 ctxt->dst.type = OP_REG; in em_popf()
2014 ctxt->dst.addr.reg = &ctxt->eflags; in em_popf()
2015 ctxt->dst.bytes = ctxt->op_bytes; in em_popf()
2016 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_popf()
2019 static int em_enter(struct x86_emulate_ctxt *ctxt) in em_enter() argument
2022 unsigned frame_size = ctxt->src.val; in em_enter()
2023 unsigned nesting_level = ctxt->src2.val & 31; in em_enter()
2029 rbp = reg_read(ctxt, VCPU_REGS_RBP); in em_enter()
2030 rc = push(ctxt, &rbp, stack_size(ctxt)); in em_enter()
2033 assign_masked(reg_rmw(ctxt, VCPU_REGS_RBP), reg_read(ctxt, VCPU_REGS_RSP), in em_enter()
2034 stack_mask(ctxt)); in em_enter()
2035 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP), in em_enter()
2036 reg_read(ctxt, VCPU_REGS_RSP) - frame_size, in em_enter()
2037 stack_mask(ctxt)); in em_enter()
2041 static int em_leave(struct x86_emulate_ctxt *ctxt) in em_leave() argument
2043 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP), reg_read(ctxt, VCPU_REGS_RBP), in em_leave()
2044 stack_mask(ctxt)); in em_leave()
2045 return emulate_pop(ctxt, reg_rmw(ctxt, VCPU_REGS_RBP), ctxt->op_bytes); in em_leave()
2048 static int em_push_sreg(struct x86_emulate_ctxt *ctxt) in em_push_sreg() argument
2050 int seg = ctxt->src2.val; in em_push_sreg()
2052 ctxt->src.val = get_segment_selector(ctxt, seg); in em_push_sreg()
2053 if (ctxt->op_bytes == 4) { in em_push_sreg()
2054 rsp_increment(ctxt, -2); in em_push_sreg()
2055 ctxt->op_bytes = 2; in em_push_sreg()
2058 return em_push(ctxt); in em_push_sreg()
2061 static int em_pop_sreg(struct x86_emulate_ctxt *ctxt) in em_pop_sreg() argument
2063 int seg = ctxt->src2.val; in em_pop_sreg()
2067 rc = emulate_pop(ctxt, &selector, 2); in em_pop_sreg()
2072 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_pop_sreg()
2073 if (ctxt->op_bytes > 2) in em_pop_sreg()
2074 rsp_increment(ctxt, ctxt->op_bytes - 2); in em_pop_sreg()
2076 rc = load_segment_descriptor(ctxt, (u16)selector, seg); in em_pop_sreg()
2080 static int em_pusha(struct x86_emulate_ctxt *ctxt) in em_pusha() argument
2082 unsigned long old_esp = reg_read(ctxt, VCPU_REGS_RSP); in em_pusha()
2088 (ctxt->src.val = old_esp) : (ctxt->src.val = reg_read(ctxt, reg)); in em_pusha()
2090 rc = em_push(ctxt); in em_pusha()
2100 static int em_pushf(struct x86_emulate_ctxt *ctxt) in em_pushf() argument
2102 ctxt->src.val = (unsigned long)ctxt->eflags & ~X86_EFLAGS_VM; in em_pushf()
2103 return em_push(ctxt); in em_pushf()
2106 static int em_popa(struct x86_emulate_ctxt *ctxt) in em_popa() argument
2114 rsp_increment(ctxt, ctxt->op_bytes); in em_popa()
2118 rc = emulate_pop(ctxt, &val, ctxt->op_bytes); in em_popa()
2121 assign_register(reg_rmw(ctxt, reg), val, ctxt->op_bytes); in em_popa()
2127 static int __emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq) in __emulate_int_real() argument
2129 const struct x86_emulate_ops *ops = ctxt->ops; in __emulate_int_real()
2137 ctxt->src.val = ctxt->eflags; in __emulate_int_real()
2138 rc = em_push(ctxt); in __emulate_int_real()
2142 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC); in __emulate_int_real()
2144 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS); in __emulate_int_real()
2145 rc = em_push(ctxt); in __emulate_int_real()
2149 ctxt->src.val = ctxt->_eip; in __emulate_int_real()
2150 rc = em_push(ctxt); in __emulate_int_real()
2154 ops->get_idt(ctxt, &dt); in __emulate_int_real()
2159 rc = linear_read_system(ctxt, cs_addr, &cs, 2); in __emulate_int_real()
2163 rc = linear_read_system(ctxt, eip_addr, &eip, 2); in __emulate_int_real()
2167 rc = load_segment_descriptor(ctxt, cs, VCPU_SREG_CS); in __emulate_int_real()
2171 ctxt->_eip = eip; in __emulate_int_real()
2176 int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq) in emulate_int_real() argument
2180 invalidate_registers(ctxt); in emulate_int_real()
2181 rc = __emulate_int_real(ctxt, irq); in emulate_int_real()
2183 writeback_registers(ctxt); in emulate_int_real()
2187 static int emulate_int(struct x86_emulate_ctxt *ctxt, int irq) in emulate_int() argument
2189 switch(ctxt->mode) { in emulate_int()
2191 return __emulate_int_real(ctxt, irq); in emulate_int()
2202 static int emulate_iret_real(struct x86_emulate_ctxt *ctxt) in emulate_iret_real() argument
2219 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes); in emulate_iret_real()
2225 return emulate_gp(ctxt, 0); in emulate_iret_real()
2227 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in emulate_iret_real()
2232 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes); in emulate_iret_real()
2237 rc = load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS); in emulate_iret_real()
2242 ctxt->_eip = temp_eip; in emulate_iret_real()
2244 if (ctxt->op_bytes == 4) in emulate_iret_real()
2245 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask)); in emulate_iret_real()
2246 else if (ctxt->op_bytes == 2) { in emulate_iret_real()
2247 ctxt->eflags &= ~0xffff; in emulate_iret_real()
2248 ctxt->eflags |= temp_eflags; in emulate_iret_real()
2251 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */ in emulate_iret_real()
2252 ctxt->eflags |= X86_EFLAGS_FIXED; in emulate_iret_real()
2253 ctxt->ops->set_nmi_mask(ctxt, false); in emulate_iret_real()
2258 static int em_iret(struct x86_emulate_ctxt *ctxt) in em_iret() argument
2260 switch(ctxt->mode) { in em_iret()
2262 return emulate_iret_real(ctxt); in em_iret()
2273 static int em_jmp_far(struct x86_emulate_ctxt *ctxt) in em_jmp_far() argument
2278 u8 cpl = ctxt->ops->cpl(ctxt); in em_jmp_far()
2280 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_jmp_far()
2282 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl, in em_jmp_far()
2288 rc = assign_eip_far(ctxt, ctxt->src.val); in em_jmp_far()
2296 static int em_jmp_abs(struct x86_emulate_ctxt *ctxt) in em_jmp_abs() argument
2298 return assign_eip_near(ctxt, ctxt->src.val); in em_jmp_abs()
2301 static int em_call_near_abs(struct x86_emulate_ctxt *ctxt) in em_call_near_abs() argument
2306 old_eip = ctxt->_eip; in em_call_near_abs()
2307 rc = assign_eip_near(ctxt, ctxt->src.val); in em_call_near_abs()
2310 ctxt->src.val = old_eip; in em_call_near_abs()
2311 rc = em_push(ctxt); in em_call_near_abs()
2315 static int em_cmpxchg8b(struct x86_emulate_ctxt *ctxt) in em_cmpxchg8b() argument
2317 u64 old = ctxt->dst.orig_val64; in em_cmpxchg8b()
2319 if (ctxt->dst.bytes == 16) in em_cmpxchg8b()
2322 if (((u32) (old >> 0) != (u32) reg_read(ctxt, VCPU_REGS_RAX)) || in em_cmpxchg8b()
2323 ((u32) (old >> 32) != (u32) reg_read(ctxt, VCPU_REGS_RDX))) { in em_cmpxchg8b()
2324 *reg_write(ctxt, VCPU_REGS_RAX) = (u32) (old >> 0); in em_cmpxchg8b()
2325 *reg_write(ctxt, VCPU_REGS_RDX) = (u32) (old >> 32); in em_cmpxchg8b()
2326 ctxt->eflags &= ~X86_EFLAGS_ZF; in em_cmpxchg8b()
2328 ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) | in em_cmpxchg8b()
2329 (u32) reg_read(ctxt, VCPU_REGS_RBX); in em_cmpxchg8b()
2331 ctxt->eflags |= X86_EFLAGS_ZF; in em_cmpxchg8b()
2336 static int em_ret(struct x86_emulate_ctxt *ctxt) in em_ret() argument
2341 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret()
2345 return assign_eip_near(ctxt, eip); in em_ret()
2348 static int em_ret_far(struct x86_emulate_ctxt *ctxt) in em_ret_far() argument
2352 int cpl = ctxt->ops->cpl(ctxt); in em_ret_far()
2355 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_far()
2358 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in em_ret_far()
2362 if (ctxt->mode >= X86EMUL_MODE_PROT16 && (cs & 3) > cpl) in em_ret_far()
2364 rc = __load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS, cpl, in em_ret_far()
2369 rc = assign_eip_far(ctxt, eip); in em_ret_far()
2377 static int em_ret_far_imm(struct x86_emulate_ctxt *ctxt) in em_ret_far_imm() argument
2381 rc = em_ret_far(ctxt); in em_ret_far_imm()
2384 rsp_increment(ctxt, ctxt->src.val); in em_ret_far_imm()
2388 static int em_cmpxchg(struct x86_emulate_ctxt *ctxt) in em_cmpxchg() argument
2391 ctxt->dst.orig_val = ctxt->dst.val; in em_cmpxchg()
2392 ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2393 ctxt->src.orig_val = ctxt->src.val; in em_cmpxchg()
2394 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2395 fastop(ctxt, em_cmp); in em_cmpxchg()
2397 if (ctxt->eflags & X86_EFLAGS_ZF) { in em_cmpxchg()
2399 ctxt->src.type = OP_NONE; in em_cmpxchg()
2400 ctxt->dst.val = ctxt->src.orig_val; in em_cmpxchg()
2403 ctxt->src.type = OP_REG; in em_cmpxchg()
2404 ctxt->src.addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2405 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2407 ctxt->dst.val = ctxt->dst.orig_val; in em_cmpxchg()
2412 static int em_lseg(struct x86_emulate_ctxt *ctxt) in em_lseg() argument
2414 int seg = ctxt->src2.val; in em_lseg()
2418 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_lseg()
2420 rc = load_segment_descriptor(ctxt, sel, seg); in em_lseg()
2424 ctxt->dst.val = ctxt->src.val; in em_lseg()
2428 static int emulator_has_longmode(struct x86_emulate_ctxt *ctxt) in emulator_has_longmode() argument
2431 return ctxt->ops->guest_has_long_mode(ctxt); in emulator_has_longmode()
2449 static int rsm_load_seg_32(struct x86_emulate_ctxt *ctxt, const char *smstate, in rsm_load_seg_32() argument
2466 ctxt->ops->set_segment(ctxt, selector, &desc, 0, n); in rsm_load_seg_32()
2471 static int rsm_load_seg_64(struct x86_emulate_ctxt *ctxt, const char *smstate, in rsm_load_seg_64() argument
2487 ctxt->ops->set_segment(ctxt, selector, &desc, base3, n); in rsm_load_seg_64()
2492 static int rsm_enter_protected_mode(struct x86_emulate_ctxt *ctxt, in rsm_enter_protected_mode() argument
2505 bad = ctxt->ops->set_cr(ctxt, 3, cr3); in rsm_enter_protected_mode()
2514 bad = ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE); in rsm_enter_protected_mode()
2518 bad = ctxt->ops->set_cr(ctxt, 0, cr0); in rsm_enter_protected_mode()
2523 bad = ctxt->ops->set_cr(ctxt, 4, cr4); in rsm_enter_protected_mode()
2527 bad = ctxt->ops->set_cr(ctxt, 3, cr3 | pcid); in rsm_enter_protected_mode()
2537 static int rsm_load_state_32(struct x86_emulate_ctxt *ctxt, in rsm_load_state_32() argument
2548 ctxt->eflags = GET_SMSTATE(u32, smstate, 0x7ff4) | X86_EFLAGS_FIXED; in rsm_load_state_32()
2549 ctxt->_eip = GET_SMSTATE(u32, smstate, 0x7ff0); in rsm_load_state_32()
2552 *reg_write(ctxt, i) = GET_SMSTATE(u32, smstate, 0x7fd0 + i * 4); in rsm_load_state_32()
2556 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1)) in rsm_load_state_32()
2561 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1)) in rsm_load_state_32()
2568 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_TR); in rsm_load_state_32()
2574 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_LDTR); in rsm_load_state_32()
2578 ctxt->ops->set_gdt(ctxt, &dt); in rsm_load_state_32()
2582 ctxt->ops->set_idt(ctxt, &dt); in rsm_load_state_32()
2585 int r = rsm_load_seg_32(ctxt, smstate, i); in rsm_load_state_32()
2592 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7ef8)); in rsm_load_state_32()
2594 return rsm_enter_protected_mode(ctxt, cr0, cr3, cr4); in rsm_load_state_32()
2598 static int rsm_load_state_64(struct x86_emulate_ctxt *ctxt, in rsm_load_state_64() argument
2609 *reg_write(ctxt, i) = GET_SMSTATE(u64, smstate, 0x7ff8 - i * 8); in rsm_load_state_64()
2611 ctxt->_eip = GET_SMSTATE(u64, smstate, 0x7f78); in rsm_load_state_64()
2612 ctxt->eflags = GET_SMSTATE(u32, smstate, 0x7f70) | X86_EFLAGS_FIXED; in rsm_load_state_64()
2616 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1)) in rsm_load_state_64()
2621 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1)) in rsm_load_state_64()
2627 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7f00)); in rsm_load_state_64()
2630 if (ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA)) in rsm_load_state_64()
2638 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_TR); in rsm_load_state_64()
2642 ctxt->ops->set_idt(ctxt, &dt); in rsm_load_state_64()
2649 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_LDTR); in rsm_load_state_64()
2653 ctxt->ops->set_gdt(ctxt, &dt); in rsm_load_state_64()
2655 r = rsm_enter_protected_mode(ctxt, cr0, cr3, cr4); in rsm_load_state_64()
2660 r = rsm_load_seg_64(ctxt, smstate, i); in rsm_load_state_64()
2669 static int em_rsm(struct x86_emulate_ctxt *ctxt) in em_rsm() argument
2676 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_MASK) == 0) in em_rsm()
2677 return emulate_ud(ctxt); in em_rsm()
2679 smbase = ctxt->ops->get_smbase(ctxt); in em_rsm()
2681 ret = ctxt->ops->read_phys(ctxt, smbase + 0xfe00, buf, sizeof(buf)); in em_rsm()
2685 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_INSIDE_NMI_MASK) == 0) in em_rsm()
2686 ctxt->ops->set_nmi_mask(ctxt, false); in em_rsm()
2688 ctxt->ops->set_hflags(ctxt, ctxt->ops->get_hflags(ctxt) & in em_rsm()
2696 if (emulator_has_longmode(ctxt)) { in em_rsm()
2700 cr4 = ctxt->ops->get_cr(ctxt, 4); in em_rsm()
2702 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE); in em_rsm()
2708 ctxt->ops->set_segment(ctxt, 0, &cs_desc, 0, VCPU_SREG_CS); in em_rsm()
2712 cr0 = ctxt->ops->get_cr(ctxt, 0); in em_rsm()
2714 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in em_rsm()
2716 if (emulator_has_longmode(ctxt)) { in em_rsm()
2718 cr4 = ctxt->ops->get_cr(ctxt, 4); in em_rsm()
2720 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PAE); in em_rsm()
2724 ctxt->ops->set_msr(ctxt, MSR_EFER, efer); in em_rsm()
2732 if (ctxt->ops->pre_leave_smm(ctxt, buf)) in em_rsm()
2736 if (emulator_has_longmode(ctxt)) in em_rsm()
2737 ret = rsm_load_state_64(ctxt, buf); in em_rsm()
2740 ret = rsm_load_state_32(ctxt, buf); in em_rsm()
2747 ctxt->ops->post_leave_smm(ctxt); in em_rsm()
2753 setup_syscalls_segments(struct x86_emulate_ctxt *ctxt, in setup_syscalls_segments() argument
2779 static bool vendor_intel(struct x86_emulate_ctxt *ctxt) in vendor_intel() argument
2784 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true); in vendor_intel()
2788 static bool em_syscall_is_enabled(struct x86_emulate_ctxt *ctxt) in em_syscall_is_enabled() argument
2790 const struct x86_emulate_ops *ops = ctxt->ops; in em_syscall_is_enabled()
2797 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_syscall_is_enabled()
2802 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true); in em_syscall_is_enabled()
2823 static int em_syscall(struct x86_emulate_ctxt *ctxt) in em_syscall() argument
2825 const struct x86_emulate_ops *ops = ctxt->ops; in em_syscall()
2832 if (ctxt->mode == X86EMUL_MODE_REAL || in em_syscall()
2833 ctxt->mode == X86EMUL_MODE_VM86) in em_syscall()
2834 return emulate_ud(ctxt); in em_syscall()
2836 if (!(em_syscall_is_enabled(ctxt))) in em_syscall()
2837 return emulate_ud(ctxt); in em_syscall()
2839 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall()
2841 return emulate_ud(ctxt); in em_syscall()
2843 setup_syscalls_segments(ctxt, &cs, &ss); in em_syscall()
2844 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2853 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_syscall()
2854 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_syscall()
2856 *reg_write(ctxt, VCPU_REGS_RCX) = ctxt->_eip; in em_syscall()
2859 *reg_write(ctxt, VCPU_REGS_R11) = ctxt->eflags; in em_syscall()
2861 ops->get_msr(ctxt, in em_syscall()
2862 ctxt->mode == X86EMUL_MODE_PROT64 ? in em_syscall()
2864 ctxt->_eip = msr_data; in em_syscall()
2866 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data); in em_syscall()
2867 ctxt->eflags &= ~msr_data; in em_syscall()
2868 ctxt->eflags |= X86_EFLAGS_FIXED; in em_syscall()
2872 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2873 ctxt->_eip = (u32)msr_data; in em_syscall()
2875 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_syscall()
2878 ctxt->tf = (ctxt->eflags & X86_EFLAGS_TF) != 0; in em_syscall()
2882 static int em_sysenter(struct x86_emulate_ctxt *ctxt) in em_sysenter() argument
2884 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysenter()
2890 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter()
2892 if (ctxt->mode == X86EMUL_MODE_REAL) in em_sysenter()
2893 return emulate_gp(ctxt, 0); in em_sysenter()
2899 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) in em_sysenter()
2900 && !vendor_intel(ctxt)) in em_sysenter()
2901 return emulate_ud(ctxt); in em_sysenter()
2904 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_sysenter()
2907 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysenter()
2909 return emulate_gp(ctxt, 0); in em_sysenter()
2911 setup_syscalls_segments(ctxt, &cs, &ss); in em_sysenter()
2912 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_sysenter()
2920 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysenter()
2921 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysenter()
2923 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data); in em_sysenter()
2924 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data; in em_sysenter()
2926 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data); in em_sysenter()
2927 *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data : in em_sysenter()
2930 ctxt->mode = X86EMUL_MODE_PROT64; in em_sysenter()
2935 static int em_sysexit(struct x86_emulate_ctxt *ctxt) in em_sysexit() argument
2937 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysexit()
2944 if (ctxt->mode == X86EMUL_MODE_REAL || in em_sysexit()
2945 ctxt->mode == X86EMUL_MODE_VM86) in em_sysexit()
2946 return emulate_gp(ctxt, 0); in em_sysexit()
2948 setup_syscalls_segments(ctxt, &cs, &ss); in em_sysexit()
2950 if ((ctxt->rex_prefix & 0x8) != 0x0) in em_sysexit()
2955 rcx = reg_read(ctxt, VCPU_REGS_RCX); in em_sysexit()
2956 rdx = reg_read(ctxt, VCPU_REGS_RDX); in em_sysexit()
2960 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysexit()
2965 return emulate_gp(ctxt, 0); in em_sysexit()
2973 return emulate_gp(ctxt, 0); in em_sysexit()
2977 if (emul_is_noncanonical_address(rcx, ctxt) || in em_sysexit()
2978 emul_is_noncanonical_address(rdx, ctxt)) in em_sysexit()
2979 return emulate_gp(ctxt, 0); in em_sysexit()
2985 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysexit()
2986 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysexit()
2988 ctxt->_eip = rdx; in em_sysexit()
2989 ctxt->mode = usermode; in em_sysexit()
2990 *reg_write(ctxt, VCPU_REGS_RSP) = rcx; in em_sysexit()
2995 static bool emulator_bad_iopl(struct x86_emulate_ctxt *ctxt) in emulator_bad_iopl() argument
2998 if (ctxt->mode == X86EMUL_MODE_REAL) in emulator_bad_iopl()
3000 if (ctxt->mode == X86EMUL_MODE_VM86) in emulator_bad_iopl()
3002 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulator_bad_iopl()
3003 return ctxt->ops->cpl(ctxt) > iopl; in emulator_bad_iopl()
3009 static bool emulator_io_port_access_allowed(struct x86_emulate_ctxt *ctxt, in emulator_io_port_access_allowed() argument
3012 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_io_port_access_allowed()
3028 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR); in emulator_io_port_access_allowed()
3037 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true); in emulator_io_port_access_allowed()
3042 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true); in emulator_io_port_access_allowed()
3050 static bool emulator_io_permited(struct x86_emulate_ctxt *ctxt, in emulator_io_permited() argument
3053 if (ctxt->perm_ok) in emulator_io_permited()
3056 if (emulator_bad_iopl(ctxt)) in emulator_io_permited()
3057 if (!emulator_io_port_access_allowed(ctxt, port, len)) in emulator_io_permited()
3060 ctxt->perm_ok = true; in emulator_io_permited()
3065 static void string_registers_quirk(struct x86_emulate_ctxt *ctxt) in string_registers_quirk() argument
3072 if (ctxt->ad_bytes != 4 || !vendor_intel(ctxt)) in string_registers_quirk()
3075 *reg_write(ctxt, VCPU_REGS_RCX) = 0; in string_registers_quirk()
3077 switch (ctxt->b) { in string_registers_quirk()
3080 *reg_rmw(ctxt, VCPU_REGS_RSI) &= (u32)-1; in string_registers_quirk()
3084 *reg_rmw(ctxt, VCPU_REGS_RDI) &= (u32)-1; in string_registers_quirk()
3089 static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt, in save_state_to_tss16() argument
3092 tss->ip = ctxt->_eip; in save_state_to_tss16()
3093 tss->flag = ctxt->eflags; in save_state_to_tss16()
3094 tss->ax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss16()
3095 tss->cx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss16()
3096 tss->dx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss16()
3097 tss->bx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss16()
3098 tss->sp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss16()
3099 tss->bp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss16()
3100 tss->si = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss16()
3101 tss->di = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss16()
3103 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss16()
3104 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss16()
3105 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss16()
3106 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss16()
3107 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR); in save_state_to_tss16()
3110 static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt, in load_state_from_tss16() argument
3116 ctxt->_eip = tss->ip; in load_state_from_tss16()
3117 ctxt->eflags = tss->flag | 2; in load_state_from_tss16()
3118 *reg_write(ctxt, VCPU_REGS_RAX) = tss->ax; in load_state_from_tss16()
3119 *reg_write(ctxt, VCPU_REGS_RCX) = tss->cx; in load_state_from_tss16()
3120 *reg_write(ctxt, VCPU_REGS_RDX) = tss->dx; in load_state_from_tss16()
3121 *reg_write(ctxt, VCPU_REGS_RBX) = tss->bx; in load_state_from_tss16()
3122 *reg_write(ctxt, VCPU_REGS_RSP) = tss->sp; in load_state_from_tss16()
3123 *reg_write(ctxt, VCPU_REGS_RBP) = tss->bp; in load_state_from_tss16()
3124 *reg_write(ctxt, VCPU_REGS_RSI) = tss->si; in load_state_from_tss16()
3125 *reg_write(ctxt, VCPU_REGS_RDI) = tss->di; in load_state_from_tss16()
3131 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR); in load_state_from_tss16()
3132 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss16()
3133 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss16()
3134 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss16()
3135 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss16()
3143 ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl, in load_state_from_tss16()
3147 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss16()
3151 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss16()
3155 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss16()
3159 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss16()
3167 static int task_switch_16(struct x86_emulate_ctxt *ctxt, in task_switch_16() argument
3175 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg)); in task_switch_16()
3179 save_state_to_tss16(ctxt, &tss_seg); in task_switch_16()
3181 ret = linear_write_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg)); in task_switch_16()
3185 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg)); in task_switch_16()
3192 ret = linear_write_system(ctxt, new_tss_base, in task_switch_16()
3199 return load_state_from_tss16(ctxt, &tss_seg); in task_switch_16()
3202 static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt, in save_state_to_tss32() argument
3206 tss->eip = ctxt->_eip; in save_state_to_tss32()
3207 tss->eflags = ctxt->eflags; in save_state_to_tss32()
3208 tss->eax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss32()
3209 tss->ecx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss32()
3210 tss->edx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss32()
3211 tss->ebx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss32()
3212 tss->esp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss32()
3213 tss->ebp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss32()
3214 tss->esi = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss32()
3215 tss->edi = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss32()
3217 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss32()
3218 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss32()
3219 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss32()
3220 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss32()
3221 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS); in save_state_to_tss32()
3222 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS); in save_state_to_tss32()
3225 static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt, in load_state_from_tss32() argument
3231 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3)) in load_state_from_tss32()
3232 return emulate_gp(ctxt, 0); in load_state_from_tss32()
3233 ctxt->_eip = tss->eip; in load_state_from_tss32()
3234 ctxt->eflags = tss->eflags | 2; in load_state_from_tss32()
3237 *reg_write(ctxt, VCPU_REGS_RAX) = tss->eax; in load_state_from_tss32()
3238 *reg_write(ctxt, VCPU_REGS_RCX) = tss->ecx; in load_state_from_tss32()
3239 *reg_write(ctxt, VCPU_REGS_RDX) = tss->edx; in load_state_from_tss32()
3240 *reg_write(ctxt, VCPU_REGS_RBX) = tss->ebx; in load_state_from_tss32()
3241 *reg_write(ctxt, VCPU_REGS_RSP) = tss->esp; in load_state_from_tss32()
3242 *reg_write(ctxt, VCPU_REGS_RBP) = tss->ebp; in load_state_from_tss32()
3243 *reg_write(ctxt, VCPU_REGS_RSI) = tss->esi; in load_state_from_tss32()
3244 *reg_write(ctxt, VCPU_REGS_RDI) = tss->edi; in load_state_from_tss32()
3251 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR); in load_state_from_tss32()
3252 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss32()
3253 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss32()
3254 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss32()
3255 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss32()
3256 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS); in load_state_from_tss32()
3257 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS); in load_state_from_tss32()
3264 if (ctxt->eflags & X86_EFLAGS_VM) { in load_state_from_tss32()
3265 ctxt->mode = X86EMUL_MODE_VM86; in load_state_from_tss32()
3268 ctxt->mode = X86EMUL_MODE_PROT32; in load_state_from_tss32()
3276 ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR, in load_state_from_tss32()
3280 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss32()
3284 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss32()
3288 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss32()
3292 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss32()
3296 ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl, in load_state_from_tss32()
3300 ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl, in load_state_from_tss32()
3306 static int task_switch_32(struct x86_emulate_ctxt *ctxt, in task_switch_32() argument
3316 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg)); in task_switch_32()
3320 save_state_to_tss32(ctxt, &tss_seg); in task_switch_32()
3323 ret = linear_write_system(ctxt, old_tss_base + eip_offset, &tss_seg.eip, in task_switch_32()
3328 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg)); in task_switch_32()
3335 ret = linear_write_system(ctxt, new_tss_base, in task_switch_32()
3342 return load_state_from_tss32(ctxt, &tss_seg); in task_switch_32()
3345 static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt, in emulator_do_task_switch() argument
3349 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_do_task_switch()
3352 u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR); in emulator_do_task_switch()
3354 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR); in emulator_do_task_switch()
3360 ret = read_segment_descriptor(ctxt, tss_selector, &next_tss_desc, &desc_addr); in emulator_do_task_switch()
3363 ret = read_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc, &desc_addr); in emulator_do_task_switch()
3383 ret = read_interrupt_descriptor(ctxt, idt_index, in emulator_do_task_switch()
3389 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl) in emulator_do_task_switch()
3390 return emulate_gp(ctxt, (idt_index << 3) | 0x2); in emulator_do_task_switch()
3398 return emulate_ts(ctxt, tss_selector & 0xfffc); in emulator_do_task_switch()
3403 write_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc); in emulator_do_task_switch()
3407 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT; in emulator_do_task_switch()
3415 ret = task_switch_32(ctxt, tss_selector, old_tss_sel, in emulator_do_task_switch()
3418 ret = task_switch_16(ctxt, tss_selector, old_tss_sel, in emulator_do_task_switch()
3424 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT; in emulator_do_task_switch()
3428 write_segment_descriptor(ctxt, tss_selector, &next_tss_desc); in emulator_do_task_switch()
3431 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS); in emulator_do_task_switch()
3432 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR); in emulator_do_task_switch()
3435 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2; in emulator_do_task_switch()
3436 ctxt->lock_prefix = 0; in emulator_do_task_switch()
3437 ctxt->src.val = (unsigned long) error_code; in emulator_do_task_switch()
3438 ret = em_push(ctxt); in emulator_do_task_switch()
3441 ops->get_dr(ctxt, 7, &dr7); in emulator_do_task_switch()
3442 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN)); in emulator_do_task_switch()
3447 int emulator_task_switch(struct x86_emulate_ctxt *ctxt, in emulator_task_switch() argument
3453 invalidate_registers(ctxt); in emulator_task_switch()
3454 ctxt->_eip = ctxt->eip; in emulator_task_switch()
3455 ctxt->dst.type = OP_NONE; in emulator_task_switch()
3457 rc = emulator_do_task_switch(ctxt, tss_selector, idt_index, reason, in emulator_task_switch()
3461 ctxt->eip = ctxt->_eip; in emulator_task_switch()
3462 writeback_registers(ctxt); in emulator_task_switch()
3468 static void string_addr_inc(struct x86_emulate_ctxt *ctxt, int reg, in string_addr_inc() argument
3471 int df = (ctxt->eflags & X86_EFLAGS_DF) ? -op->count : op->count; in string_addr_inc()
3473 register_address_increment(ctxt, reg, df * op->bytes); in string_addr_inc()
3474 op->addr.mem.ea = register_address(ctxt, reg); in string_addr_inc()
3477 static int em_das(struct x86_emulate_ctxt *ctxt) in em_das() argument
3482 cf = ctxt->eflags & X86_EFLAGS_CF; in em_das()
3483 al = ctxt->dst.val; in em_das()
3488 af = ctxt->eflags & X86_EFLAGS_AF; in em_das()
3501 ctxt->dst.val = al; in em_das()
3503 ctxt->src.type = OP_IMM; in em_das()
3504 ctxt->src.val = 0; in em_das()
3505 ctxt->src.bytes = 1; in em_das()
3506 fastop(ctxt, em_or); in em_das()
3507 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF); in em_das()
3509 ctxt->eflags |= X86_EFLAGS_CF; in em_das()
3511 ctxt->eflags |= X86_EFLAGS_AF; in em_das()
3515 static int em_aam(struct x86_emulate_ctxt *ctxt) in em_aam() argument
3519 if (ctxt->src.val == 0) in em_aam()
3520 return emulate_de(ctxt); in em_aam()
3522 al = ctxt->dst.val & 0xff; in em_aam()
3523 ah = al / ctxt->src.val; in em_aam()
3524 al %= ctxt->src.val; in em_aam()
3526 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8); in em_aam()
3529 ctxt->src.type = OP_IMM; in em_aam()
3530 ctxt->src.val = 0; in em_aam()
3531 ctxt->src.bytes = 1; in em_aam()
3532 fastop(ctxt, em_or); in em_aam()
3537 static int em_aad(struct x86_emulate_ctxt *ctxt) in em_aad() argument
3539 u8 al = ctxt->dst.val & 0xff; in em_aad()
3540 u8 ah = (ctxt->dst.val >> 8) & 0xff; in em_aad()
3542 al = (al + (ah * ctxt->src.val)) & 0xff; in em_aad()
3544 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al; in em_aad()
3547 ctxt->src.type = OP_IMM; in em_aad()
3548 ctxt->src.val = 0; in em_aad()
3549 ctxt->src.bytes = 1; in em_aad()
3550 fastop(ctxt, em_or); in em_aad()
3555 static int em_call(struct x86_emulate_ctxt *ctxt) in em_call() argument
3558 long rel = ctxt->src.val; in em_call()
3560 ctxt->src.val = (unsigned long)ctxt->_eip; in em_call()
3561 rc = jmp_rel(ctxt, rel); in em_call()
3564 return em_push(ctxt); in em_call()
3567 static int em_call_far(struct x86_emulate_ctxt *ctxt) in em_call_far() argument
3573 const struct x86_emulate_ops *ops = ctxt->ops; in em_call_far()
3574 int cpl = ctxt->ops->cpl(ctxt); in em_call_far()
3575 enum x86emul_mode prev_mode = ctxt->mode; in em_call_far()
3577 old_eip = ctxt->_eip; in em_call_far()
3578 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS); in em_call_far()
3580 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_call_far()
3581 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl, in em_call_far()
3586 rc = assign_eip_far(ctxt, ctxt->src.val); in em_call_far()
3590 ctxt->src.val = old_cs; in em_call_far()
3591 rc = em_push(ctxt); in em_call_far()
3595 ctxt->src.val = old_eip; in em_call_far()
3596 rc = em_push(ctxt); in em_call_far()
3605 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS); in em_call_far()
3606 ctxt->mode = prev_mode; in em_call_far()
3611 static int em_ret_near_imm(struct x86_emulate_ctxt *ctxt) in em_ret_near_imm() argument
3616 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_near_imm()
3619 rc = assign_eip_near(ctxt, eip); in em_ret_near_imm()
3622 rsp_increment(ctxt, ctxt->src.val); in em_ret_near_imm()
3626 static int em_xchg(struct x86_emulate_ctxt *ctxt) in em_xchg() argument
3629 ctxt->src.val = ctxt->dst.val; in em_xchg()
3630 write_register_operand(&ctxt->src); in em_xchg()
3633 ctxt->dst.val = ctxt->src.orig_val; in em_xchg()
3634 ctxt->lock_prefix = 1; in em_xchg()
3638 static int em_imul_3op(struct x86_emulate_ctxt *ctxt) in em_imul_3op() argument
3640 ctxt->dst.val = ctxt->src2.val; in em_imul_3op()
3641 return fastop(ctxt, em_imul); in em_imul_3op()
3644 static int em_cwd(struct x86_emulate_ctxt *ctxt) in em_cwd() argument
3646 ctxt->dst.type = OP_REG; in em_cwd()
3647 ctxt->dst.bytes = ctxt->src.bytes; in em_cwd()
3648 ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in em_cwd()
3649 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1); in em_cwd()
3654 static int em_rdpid(struct x86_emulate_ctxt *ctxt) in em_rdpid() argument
3658 if (!ctxt->ops->guest_has_rdpid(ctxt)) in em_rdpid()
3659 return emulate_ud(ctxt); in em_rdpid()
3661 ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux); in em_rdpid()
3662 ctxt->dst.val = tsc_aux; in em_rdpid()
3666 static int em_rdtsc(struct x86_emulate_ctxt *ctxt) in em_rdtsc() argument
3670 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc); in em_rdtsc()
3671 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)tsc; in em_rdtsc()
3672 *reg_write(ctxt, VCPU_REGS_RDX) = tsc >> 32; in em_rdtsc()
3676 static int em_rdpmc(struct x86_emulate_ctxt *ctxt) in em_rdpmc() argument
3680 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc)) in em_rdpmc()
3681 return emulate_gp(ctxt, 0); in em_rdpmc()
3682 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)pmc; in em_rdpmc()
3683 *reg_write(ctxt, VCPU_REGS_RDX) = pmc >> 32; in em_rdpmc()
3687 static int em_mov(struct x86_emulate_ctxt *ctxt) in em_mov() argument
3689 memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr)); in em_mov()
3693 static int em_movbe(struct x86_emulate_ctxt *ctxt) in em_movbe() argument
3697 if (!ctxt->ops->guest_has_movbe(ctxt)) in em_movbe()
3698 return emulate_ud(ctxt); in em_movbe()
3700 switch (ctxt->op_bytes) { in em_movbe()
3710 tmp = (u16)ctxt->src.val; in em_movbe()
3711 ctxt->dst.val &= ~0xffffUL; in em_movbe()
3712 ctxt->dst.val |= (unsigned long)swab16(tmp); in em_movbe()
3715 ctxt->dst.val = swab32((u32)ctxt->src.val); in em_movbe()
3718 ctxt->dst.val = swab64(ctxt->src.val); in em_movbe()
3726 static int em_cr_write(struct x86_emulate_ctxt *ctxt) in em_cr_write() argument
3728 int cr_num = ctxt->modrm_reg; in em_cr_write()
3731 if (ctxt->ops->set_cr(ctxt, cr_num, ctxt->src.val)) in em_cr_write()
3732 return emulate_gp(ctxt, 0); in em_cr_write()
3735 ctxt->dst.type = OP_NONE; in em_cr_write()
3742 r = emulator_recalc_and_set_mode(ctxt); in em_cr_write()
3750 static int em_dr_write(struct x86_emulate_ctxt *ctxt) in em_dr_write() argument
3754 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_dr_write()
3755 val = ctxt->src.val & ~0ULL; in em_dr_write()
3757 val = ctxt->src.val & ~0U; in em_dr_write()
3760 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0) in em_dr_write()
3761 return emulate_gp(ctxt, 0); in em_dr_write()
3764 ctxt->dst.type = OP_NONE; in em_dr_write()
3768 static int em_wrmsr(struct x86_emulate_ctxt *ctxt) in em_wrmsr() argument
3770 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX); in em_wrmsr()
3774 msr_data = (u32)reg_read(ctxt, VCPU_REGS_RAX) in em_wrmsr()
3775 | ((u64)reg_read(ctxt, VCPU_REGS_RDX) << 32); in em_wrmsr()
3776 r = ctxt->ops->set_msr(ctxt, msr_index, msr_data); in em_wrmsr()
3782 return emulate_gp(ctxt, 0); in em_wrmsr()
3787 static int em_rdmsr(struct x86_emulate_ctxt *ctxt) in em_rdmsr() argument
3789 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX); in em_rdmsr()
3793 r = ctxt->ops->get_msr(ctxt, msr_index, &msr_data); in em_rdmsr()
3799 return emulate_gp(ctxt, 0); in em_rdmsr()
3801 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)msr_data; in em_rdmsr()
3802 *reg_write(ctxt, VCPU_REGS_RDX) = msr_data >> 32; in em_rdmsr()
3806 static int em_store_sreg(struct x86_emulate_ctxt *ctxt, int segment) in em_store_sreg() argument
3809 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in em_store_sreg()
3810 ctxt->ops->cpl(ctxt) > 0) in em_store_sreg()
3811 return emulate_gp(ctxt, 0); in em_store_sreg()
3813 ctxt->dst.val = get_segment_selector(ctxt, segment); in em_store_sreg()
3814 if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM) in em_store_sreg()
3815 ctxt->dst.bytes = 2; in em_store_sreg()
3819 static int em_mov_rm_sreg(struct x86_emulate_ctxt *ctxt) in em_mov_rm_sreg() argument
3821 if (ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_rm_sreg()
3822 return emulate_ud(ctxt); in em_mov_rm_sreg()
3824 return em_store_sreg(ctxt, ctxt->modrm_reg); in em_mov_rm_sreg()
3827 static int em_mov_sreg_rm(struct x86_emulate_ctxt *ctxt) in em_mov_sreg_rm() argument
3829 u16 sel = ctxt->src.val; in em_mov_sreg_rm()
3831 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_sreg_rm()
3832 return emulate_ud(ctxt); in em_mov_sreg_rm()
3834 if (ctxt->modrm_reg == VCPU_SREG_SS) in em_mov_sreg_rm()
3835 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_mov_sreg_rm()
3838 ctxt->dst.type = OP_NONE; in em_mov_sreg_rm()
3839 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg); in em_mov_sreg_rm()
3842 static int em_sldt(struct x86_emulate_ctxt *ctxt) in em_sldt() argument
3844 return em_store_sreg(ctxt, VCPU_SREG_LDTR); in em_sldt()
3847 static int em_lldt(struct x86_emulate_ctxt *ctxt) in em_lldt() argument
3849 u16 sel = ctxt->src.val; in em_lldt()
3852 ctxt->dst.type = OP_NONE; in em_lldt()
3853 return load_segment_descriptor(ctxt, sel, VCPU_SREG_LDTR); in em_lldt()
3856 static int em_str(struct x86_emulate_ctxt *ctxt) in em_str() argument
3858 return em_store_sreg(ctxt, VCPU_SREG_TR); in em_str()
3861 static int em_ltr(struct x86_emulate_ctxt *ctxt) in em_ltr() argument
3863 u16 sel = ctxt->src.val; in em_ltr()
3866 ctxt->dst.type = OP_NONE; in em_ltr()
3867 return load_segment_descriptor(ctxt, sel, VCPU_SREG_TR); in em_ltr()
3870 static int em_invlpg(struct x86_emulate_ctxt *ctxt) in em_invlpg() argument
3875 rc = linearize(ctxt, ctxt->src.addr.mem, 1, false, &linear); in em_invlpg()
3877 ctxt->ops->invlpg(ctxt, linear); in em_invlpg()
3879 ctxt->dst.type = OP_NONE; in em_invlpg()
3883 static int em_clts(struct x86_emulate_ctxt *ctxt) in em_clts() argument
3887 cr0 = ctxt->ops->get_cr(ctxt, 0); in em_clts()
3889 ctxt->ops->set_cr(ctxt, 0, cr0); in em_clts()
3893 static int em_hypercall(struct x86_emulate_ctxt *ctxt) in em_hypercall() argument
3895 int rc = ctxt->ops->fix_hypercall(ctxt); in em_hypercall()
3901 ctxt->_eip = ctxt->eip; in em_hypercall()
3903 ctxt->dst.type = OP_NONE; in em_hypercall()
3907 static int emulate_store_desc_ptr(struct x86_emulate_ctxt *ctxt, in emulate_store_desc_ptr() argument
3908 void (*get)(struct x86_emulate_ctxt *ctxt, in emulate_store_desc_ptr() argument
3913 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in emulate_store_desc_ptr()
3914 ctxt->ops->cpl(ctxt) > 0) in emulate_store_desc_ptr()
3915 return emulate_gp(ctxt, 0); in emulate_store_desc_ptr()
3917 if (ctxt->mode == X86EMUL_MODE_PROT64) in emulate_store_desc_ptr()
3918 ctxt->op_bytes = 8; in emulate_store_desc_ptr()
3919 get(ctxt, &desc_ptr); in emulate_store_desc_ptr()
3920 if (ctxt->op_bytes == 2) { in emulate_store_desc_ptr()
3921 ctxt->op_bytes = 4; in emulate_store_desc_ptr()
3925 ctxt->dst.type = OP_NONE; in emulate_store_desc_ptr()
3926 return segmented_write_std(ctxt, ctxt->dst.addr.mem, in emulate_store_desc_ptr()
3927 &desc_ptr, 2 + ctxt->op_bytes); in emulate_store_desc_ptr()
3930 static int em_sgdt(struct x86_emulate_ctxt *ctxt) in em_sgdt() argument
3932 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt); in em_sgdt()
3935 static int em_sidt(struct x86_emulate_ctxt *ctxt) in em_sidt() argument
3937 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt); in em_sidt()
3940 static int em_lgdt_lidt(struct x86_emulate_ctxt *ctxt, bool lgdt) in em_lgdt_lidt() argument
3945 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_lgdt_lidt()
3946 ctxt->op_bytes = 8; in em_lgdt_lidt()
3947 rc = read_descriptor(ctxt, ctxt->src.addr.mem, in em_lgdt_lidt()
3949 ctxt->op_bytes); in em_lgdt_lidt()
3952 if (ctxt->mode == X86EMUL_MODE_PROT64 && in em_lgdt_lidt()
3953 emul_is_noncanonical_address(desc_ptr.address, ctxt)) in em_lgdt_lidt()
3954 return emulate_gp(ctxt, 0); in em_lgdt_lidt()
3956 ctxt->ops->set_gdt(ctxt, &desc_ptr); in em_lgdt_lidt()
3958 ctxt->ops->set_idt(ctxt, &desc_ptr); in em_lgdt_lidt()
3960 ctxt->dst.type = OP_NONE; in em_lgdt_lidt()
3964 static int em_lgdt(struct x86_emulate_ctxt *ctxt) in em_lgdt() argument
3966 return em_lgdt_lidt(ctxt, true); in em_lgdt()
3969 static int em_lidt(struct x86_emulate_ctxt *ctxt) in em_lidt() argument
3971 return em_lgdt_lidt(ctxt, false); in em_lidt()
3974 static int em_smsw(struct x86_emulate_ctxt *ctxt) in em_smsw() argument
3976 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in em_smsw()
3977 ctxt->ops->cpl(ctxt) > 0) in em_smsw()
3978 return emulate_gp(ctxt, 0); in em_smsw()
3980 if (ctxt->dst.type == OP_MEM) in em_smsw()
3981 ctxt->dst.bytes = 2; in em_smsw()
3982 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0); in em_smsw()
3986 static int em_lmsw(struct x86_emulate_ctxt *ctxt) in em_lmsw() argument
3988 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul) in em_lmsw()
3989 | (ctxt->src.val & 0x0f)); in em_lmsw()
3990 ctxt->dst.type = OP_NONE; in em_lmsw()
3994 static int em_loop(struct x86_emulate_ctxt *ctxt) in em_loop() argument
3998 register_address_increment(ctxt, VCPU_REGS_RCX, -1); in em_loop()
3999 if ((address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) != 0) && in em_loop()
4000 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags))) in em_loop()
4001 rc = jmp_rel(ctxt, ctxt->src.val); in em_loop()
4006 static int em_jcxz(struct x86_emulate_ctxt *ctxt) in em_jcxz() argument
4010 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0) in em_jcxz()
4011 rc = jmp_rel(ctxt, ctxt->src.val); in em_jcxz()
4016 static int em_in(struct x86_emulate_ctxt *ctxt) in em_in() argument
4018 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val, in em_in()
4019 &ctxt->dst.val)) in em_in()
4025 static int em_out(struct x86_emulate_ctxt *ctxt) in em_out() argument
4027 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val, in em_out()
4028 &ctxt->src.val, 1); in em_out()
4030 ctxt->dst.type = OP_NONE; in em_out()
4034 static int em_cli(struct x86_emulate_ctxt *ctxt) in em_cli() argument
4036 if (emulator_bad_iopl(ctxt)) in em_cli()
4037 return emulate_gp(ctxt, 0); in em_cli()
4039 ctxt->eflags &= ~X86_EFLAGS_IF; in em_cli()
4043 static int em_sti(struct x86_emulate_ctxt *ctxt) in em_sti() argument
4045 if (emulator_bad_iopl(ctxt)) in em_sti()
4046 return emulate_gp(ctxt, 0); in em_sti()
4048 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI; in em_sti()
4049 ctxt->eflags |= X86_EFLAGS_IF; in em_sti()
4053 static int em_cpuid(struct x86_emulate_ctxt *ctxt) in em_cpuid() argument
4058 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr); in em_cpuid()
4060 ctxt->ops->cpl(ctxt)) { in em_cpuid()
4061 return emulate_gp(ctxt, 0); in em_cpuid()
4064 eax = reg_read(ctxt, VCPU_REGS_RAX); in em_cpuid()
4065 ecx = reg_read(ctxt, VCPU_REGS_RCX); in em_cpuid()
4066 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false); in em_cpuid()
4067 *reg_write(ctxt, VCPU_REGS_RAX) = eax; in em_cpuid()
4068 *reg_write(ctxt, VCPU_REGS_RBX) = ebx; in em_cpuid()
4069 *reg_write(ctxt, VCPU_REGS_RCX) = ecx; in em_cpuid()
4070 *reg_write(ctxt, VCPU_REGS_RDX) = edx; in em_cpuid()
4074 static int em_sahf(struct x86_emulate_ctxt *ctxt) in em_sahf() argument
4080 flags &= *reg_rmw(ctxt, VCPU_REGS_RAX) >> 8; in em_sahf()
4082 ctxt->eflags &= ~0xffUL; in em_sahf()
4083 ctxt->eflags |= flags | X86_EFLAGS_FIXED; in em_sahf()
4087 static int em_lahf(struct x86_emulate_ctxt *ctxt) in em_lahf() argument
4089 *reg_rmw(ctxt, VCPU_REGS_RAX) &= ~0xff00UL; in em_lahf()
4090 *reg_rmw(ctxt, VCPU_REGS_RAX) |= (ctxt->eflags & 0xff) << 8; in em_lahf()
4094 static int em_bswap(struct x86_emulate_ctxt *ctxt) in em_bswap() argument
4096 switch (ctxt->op_bytes) { in em_bswap()
4099 asm("bswap %0" : "+r"(ctxt->dst.val)); in em_bswap()
4103 asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val)); in em_bswap()
4109 static int em_clflush(struct x86_emulate_ctxt *ctxt) in em_clflush() argument
4115 static int em_clflushopt(struct x86_emulate_ctxt *ctxt) in em_clflushopt() argument
4121 static int em_movsxd(struct x86_emulate_ctxt *ctxt) in em_movsxd() argument
4123 ctxt->dst.val = (s32) ctxt->src.val; in em_movsxd()
4127 static int check_fxsr(struct x86_emulate_ctxt *ctxt) in check_fxsr() argument
4129 if (!ctxt->ops->guest_has_fxsr(ctxt)) in check_fxsr()
4130 return emulate_ud(ctxt); in check_fxsr()
4132 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in check_fxsr()
4133 return emulate_nm(ctxt); in check_fxsr()
4139 if (ctxt->mode >= X86EMUL_MODE_PROT64) in check_fxsr()
4154 static inline size_t fxstate_size(struct x86_emulate_ctxt *ctxt) in fxstate_size() argument
4157 if (ctxt->mode == X86EMUL_MODE_PROT64) in fxstate_size()
4160 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR; in fxstate_size()
4182 static int em_fxsave(struct x86_emulate_ctxt *ctxt) in em_fxsave() argument
4187 rc = check_fxsr(ctxt); in em_fxsave()
4200 return segmented_write_std(ctxt, ctxt->memop.addr.mem, &fx_state, in em_fxsave()
4201 fxstate_size(ctxt)); in em_fxsave()
4224 static int em_fxrstor(struct x86_emulate_ctxt *ctxt) in em_fxrstor() argument
4230 rc = check_fxsr(ctxt); in em_fxrstor()
4234 size = fxstate_size(ctxt); in em_fxrstor()
4235 rc = segmented_read_std(ctxt, ctxt->memop.addr.mem, &fx_state, size); in em_fxrstor()
4248 rc = emulate_gp(ctxt, 0); in em_fxrstor()
4261 static int em_xsetbv(struct x86_emulate_ctxt *ctxt) in em_xsetbv() argument
4265 eax = reg_read(ctxt, VCPU_REGS_RAX); in em_xsetbv()
4266 edx = reg_read(ctxt, VCPU_REGS_RDX); in em_xsetbv()
4267 ecx = reg_read(ctxt, VCPU_REGS_RCX); in em_xsetbv()
4269 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax)) in em_xsetbv()
4270 return emulate_gp(ctxt, 0); in em_xsetbv()
4287 static int check_cr_access(struct x86_emulate_ctxt *ctxt) in check_cr_access() argument
4289 if (!valid_cr(ctxt->modrm_reg)) in check_cr_access()
4290 return emulate_ud(ctxt); in check_cr_access()
4295 static int check_dr7_gd(struct x86_emulate_ctxt *ctxt) in check_dr7_gd() argument
4299 ctxt->ops->get_dr(ctxt, 7, &dr7); in check_dr7_gd()
4305 static int check_dr_read(struct x86_emulate_ctxt *ctxt) in check_dr_read() argument
4307 int dr = ctxt->modrm_reg; in check_dr_read()
4311 return emulate_ud(ctxt); in check_dr_read()
4313 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_dr_read()
4315 return emulate_ud(ctxt); in check_dr_read()
4317 if (check_dr7_gd(ctxt)) { in check_dr_read()
4320 ctxt->ops->get_dr(ctxt, 6, &dr6); in check_dr_read()
4323 ctxt->ops->set_dr(ctxt, 6, dr6); in check_dr_read()
4324 return emulate_db(ctxt); in check_dr_read()
4330 static int check_dr_write(struct x86_emulate_ctxt *ctxt) in check_dr_write() argument
4332 u64 new_val = ctxt->src.val64; in check_dr_write()
4333 int dr = ctxt->modrm_reg; in check_dr_write()
4336 return emulate_gp(ctxt, 0); in check_dr_write()
4338 return check_dr_read(ctxt); in check_dr_write()
4341 static int check_svme(struct x86_emulate_ctxt *ctxt) in check_svme() argument
4345 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_svme()
4348 return emulate_ud(ctxt); in check_svme()
4353 static int check_svme_pa(struct x86_emulate_ctxt *ctxt) in check_svme_pa() argument
4355 u64 rax = reg_read(ctxt, VCPU_REGS_RAX); in check_svme_pa()
4359 return emulate_gp(ctxt, 0); in check_svme_pa()
4361 return check_svme(ctxt); in check_svme_pa()
4364 static int check_rdtsc(struct x86_emulate_ctxt *ctxt) in check_rdtsc() argument
4366 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdtsc()
4368 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt)) in check_rdtsc()
4369 return emulate_ud(ctxt); in check_rdtsc()
4374 static int check_rdpmc(struct x86_emulate_ctxt *ctxt) in check_rdpmc() argument
4376 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdpmc()
4377 u64 rcx = reg_read(ctxt, VCPU_REGS_RCX); in check_rdpmc()
4386 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) || in check_rdpmc()
4387 ctxt->ops->check_pmc(ctxt, rcx)) in check_rdpmc()
4388 return emulate_gp(ctxt, 0); in check_rdpmc()
4393 static int check_perm_in(struct x86_emulate_ctxt *ctxt) in check_perm_in() argument
4395 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u); in check_perm_in()
4396 if (!emulator_io_permited(ctxt, ctxt->src.val, ctxt->dst.bytes)) in check_perm_in()
4397 return emulate_gp(ctxt, 0); in check_perm_in()
4402 static int check_perm_out(struct x86_emulate_ctxt *ctxt) in check_perm_out() argument
4404 ctxt->src.bytes = min(ctxt->src.bytes, 4u); in check_perm_out()
4405 if (!emulator_io_permited(ctxt, ctxt->dst.val, ctxt->src.bytes)) in check_perm_out()
4406 return emulate_gp(ctxt, 0); in check_perm_out()
4962 static unsigned imm_size(struct x86_emulate_ctxt *ctxt) in imm_size() argument
4966 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in imm_size()
4972 static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_imm() argument
4979 op->addr.mem.ea = ctxt->_eip; in decode_imm()
4983 op->val = insn_fetch(s8, ctxt); in decode_imm()
4986 op->val = insn_fetch(s16, ctxt); in decode_imm()
4989 op->val = insn_fetch(s32, ctxt); in decode_imm()
4992 op->val = insn_fetch(s64, ctxt); in decode_imm()
5012 static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_operand() argument
5019 decode_register_operand(ctxt, op); in decode_operand()
5022 rc = decode_imm(ctxt, op, 1, false); in decode_operand()
5025 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5027 *op = ctxt->memop; in decode_operand()
5028 ctxt->memopp = op; in decode_operand()
5029 if (ctxt->d & BitOp) in decode_operand()
5030 fetch_bit_operand(ctxt); in decode_operand()
5034 ctxt->memop.bytes = (ctxt->op_bytes == 8) ? 16 : 8; in decode_operand()
5038 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5039 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
5045 op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes; in decode_operand()
5046 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
5051 if (ctxt->d & ByteOp) { in decode_operand()
5056 op->bytes = ctxt->op_bytes; in decode_operand()
5057 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
5063 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5065 register_address(ctxt, VCPU_REGS_RDI); in decode_operand()
5073 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
5079 op->val = reg_read(ctxt, VCPU_REGS_RCX) & 0xff; in decode_operand()
5082 rc = decode_imm(ctxt, op, 1, true); in decode_operand()
5090 rc = decode_imm(ctxt, op, imm_size(ctxt), true); in decode_operand()
5093 rc = decode_imm(ctxt, op, ctxt->op_bytes, true); in decode_operand()
5096 ctxt->memop.bytes = 1; in decode_operand()
5097 if (ctxt->memop.type == OP_REG) { in decode_operand()
5098 ctxt->memop.addr.reg = decode_register(ctxt, in decode_operand()
5099 ctxt->modrm_rm, true); in decode_operand()
5100 fetch_register_operand(&ctxt->memop); in decode_operand()
5104 ctxt->memop.bytes = 2; in decode_operand()
5107 ctxt->memop.bytes = 4; in decode_operand()
5110 rc = decode_imm(ctxt, op, 2, false); in decode_operand()
5113 rc = decode_imm(ctxt, op, imm_size(ctxt), false); in decode_operand()
5117 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5119 register_address(ctxt, VCPU_REGS_RSI); in decode_operand()
5120 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
5126 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5128 address_mask(ctxt, in decode_operand()
5129 reg_read(ctxt, VCPU_REGS_RBX) + in decode_operand()
5130 (reg_read(ctxt, VCPU_REGS_RAX) & 0xff)); in decode_operand()
5131 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
5136 op->addr.mem.ea = ctxt->_eip; in decode_operand()
5137 op->bytes = ctxt->op_bytes + 2; in decode_operand()
5138 insn_fetch_arr(op->valptr, op->bytes, ctxt); in decode_operand()
5141 ctxt->memop.bytes = ctxt->op_bytes + 2; in decode_operand()
5178 int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len) in x86_decode_insn() argument
5181 int mode = ctxt->mode; in x86_decode_insn()
5189 ctxt->memop.type = OP_NONE; in x86_decode_insn()
5190 ctxt->memopp = NULL; in x86_decode_insn()
5191 ctxt->_eip = ctxt->eip; in x86_decode_insn()
5192 ctxt->fetch.ptr = ctxt->fetch.data; in x86_decode_insn()
5193 ctxt->fetch.end = ctxt->fetch.data + insn_len; in x86_decode_insn()
5194 ctxt->opcode_len = 1; in x86_decode_insn()
5195 ctxt->intercept = x86_intercept_none; in x86_decode_insn()
5197 memcpy(ctxt->fetch.data, insn, insn_len); in x86_decode_insn()
5199 rc = __do_insn_fetch_bytes(ctxt, 1); in x86_decode_insn()
5208 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS); in x86_decode_insn()
5228 ctxt->op_bytes = def_op_bytes; in x86_decode_insn()
5229 ctxt->ad_bytes = def_ad_bytes; in x86_decode_insn()
5233 switch (ctxt->b = insn_fetch(u8, ctxt)) { in x86_decode_insn()
5237 ctxt->op_bytes = def_op_bytes ^ 6; in x86_decode_insn()
5242 ctxt->ad_bytes = def_ad_bytes ^ 12; in x86_decode_insn()
5245 ctxt->ad_bytes = def_ad_bytes ^ 6; in x86_decode_insn()
5249 ctxt->seg_override = VCPU_SREG_ES; in x86_decode_insn()
5253 ctxt->seg_override = VCPU_SREG_CS; in x86_decode_insn()
5257 ctxt->seg_override = VCPU_SREG_SS; in x86_decode_insn()
5261 ctxt->seg_override = VCPU_SREG_DS; in x86_decode_insn()
5265 ctxt->seg_override = VCPU_SREG_FS; in x86_decode_insn()
5269 ctxt->seg_override = VCPU_SREG_GS; in x86_decode_insn()
5274 ctxt->rex_prefix = ctxt->b; in x86_decode_insn()
5277 ctxt->lock_prefix = 1; in x86_decode_insn()
5281 ctxt->rep_prefix = ctxt->b; in x86_decode_insn()
5289 ctxt->rex_prefix = 0; in x86_decode_insn()
5295 if (ctxt->rex_prefix & 8) in x86_decode_insn()
5296 ctxt->op_bytes = 8; /* REX.W */ in x86_decode_insn()
5299 opcode = opcode_table[ctxt->b]; in x86_decode_insn()
5301 if (ctxt->b == 0x0f) { in x86_decode_insn()
5302 ctxt->opcode_len = 2; in x86_decode_insn()
5303 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
5304 opcode = twobyte_table[ctxt->b]; in x86_decode_insn()
5307 if (ctxt->b == 0x38) { in x86_decode_insn()
5308 ctxt->opcode_len = 3; in x86_decode_insn()
5309 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
5310 opcode = opcode_map_0f_38[ctxt->b]; in x86_decode_insn()
5313 ctxt->d = opcode.flags; in x86_decode_insn()
5315 if (ctxt->d & ModRM) in x86_decode_insn()
5316 ctxt->modrm = insn_fetch(u8, ctxt); in x86_decode_insn()
5319 if (ctxt->opcode_len == 1 && (ctxt->b == 0xc5 || ctxt->b == 0xc4) && in x86_decode_insn()
5320 (mode == X86EMUL_MODE_PROT64 || (ctxt->modrm & 0xc0) == 0xc0)) { in x86_decode_insn()
5321 ctxt->d = NotImpl; in x86_decode_insn()
5324 while (ctxt->d & GroupMask) { in x86_decode_insn()
5325 switch (ctxt->d & GroupMask) { in x86_decode_insn()
5327 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
5331 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
5332 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
5338 goffset = ctxt->modrm & 7; in x86_decode_insn()
5342 if (ctxt->rep_prefix && op_prefix) in x86_decode_insn()
5344 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix; in x86_decode_insn()
5353 if (ctxt->modrm > 0xbf) { in x86_decode_insn()
5356 ctxt->modrm - 0xc0, size); in x86_decode_insn()
5360 opcode = opcode.u.esc->op[(ctxt->modrm >> 3) & 7]; in x86_decode_insn()
5364 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
5370 if (ctxt->mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
5379 ctxt->d &= ~(u64)GroupMask; in x86_decode_insn()
5380 ctxt->d |= opcode.flags; in x86_decode_insn()
5384 if (ctxt->d == 0) in x86_decode_insn()
5387 ctxt->execute = opcode.u.execute; in x86_decode_insn()
5389 if (unlikely(ctxt->ud) && likely(!(ctxt->d & EmulateOnUD))) in x86_decode_insn()
5392 if (unlikely(ctxt->d & in x86_decode_insn()
5399 ctxt->check_perm = opcode.check_perm; in x86_decode_insn()
5400 ctxt->intercept = opcode.intercept; in x86_decode_insn()
5402 if (ctxt->d & NotImpl) in x86_decode_insn()
5406 if (ctxt->op_bytes == 4 && (ctxt->d & Stack)) in x86_decode_insn()
5407 ctxt->op_bytes = 8; in x86_decode_insn()
5408 else if (ctxt->d & NearBranch) in x86_decode_insn()
5409 ctxt->op_bytes = 8; in x86_decode_insn()
5412 if (ctxt->d & Op3264) { in x86_decode_insn()
5414 ctxt->op_bytes = 8; in x86_decode_insn()
5416 ctxt->op_bytes = 4; in x86_decode_insn()
5419 if ((ctxt->d & No16) && ctxt->op_bytes == 2) in x86_decode_insn()
5420 ctxt->op_bytes = 4; in x86_decode_insn()
5422 if (ctxt->d & Sse) in x86_decode_insn()
5423 ctxt->op_bytes = 16; in x86_decode_insn()
5424 else if (ctxt->d & Mmx) in x86_decode_insn()
5425 ctxt->op_bytes = 8; in x86_decode_insn()
5429 if (ctxt->d & ModRM) { in x86_decode_insn()
5430 rc = decode_modrm(ctxt, &ctxt->memop); in x86_decode_insn()
5433 ctxt->seg_override = ctxt->modrm_seg; in x86_decode_insn()
5435 } else if (ctxt->d & MemAbs) in x86_decode_insn()
5436 rc = decode_abs(ctxt, &ctxt->memop); in x86_decode_insn()
5441 ctxt->seg_override = VCPU_SREG_DS; in x86_decode_insn()
5443 ctxt->memop.addr.mem.seg = ctxt->seg_override; in x86_decode_insn()
5449 rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask); in x86_decode_insn()
5457 rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask); in x86_decode_insn()
5462 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask); in x86_decode_insn()
5464 if (ctxt->rip_relative && likely(ctxt->memopp)) in x86_decode_insn()
5465 ctxt->memopp->addr.mem.ea = address_mask(ctxt, in x86_decode_insn()
5466 ctxt->memopp->addr.mem.ea + ctxt->_eip); in x86_decode_insn()
5470 ctxt->have_exception = true; in x86_decode_insn()
5474 bool x86_page_table_writing_insn(struct x86_emulate_ctxt *ctxt) in x86_page_table_writing_insn() argument
5476 return ctxt->d & PageTable; in x86_page_table_writing_insn()
5479 static bool string_insn_completed(struct x86_emulate_ctxt *ctxt) in string_insn_completed() argument
5488 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) || in string_insn_completed()
5489 (ctxt->b == 0xae) || (ctxt->b == 0xaf)) in string_insn_completed()
5490 && (((ctxt->rep_prefix == REPE_PREFIX) && in string_insn_completed()
5491 ((ctxt->eflags & X86_EFLAGS_ZF) == 0)) in string_insn_completed()
5492 || ((ctxt->rep_prefix == REPNE_PREFIX) && in string_insn_completed()
5493 ((ctxt->eflags & X86_EFLAGS_ZF) == X86_EFLAGS_ZF)))) in string_insn_completed()
5499 static int flush_pending_x87_faults(struct x86_emulate_ctxt *ctxt) in flush_pending_x87_faults() argument
5508 return emulate_exception(ctxt, MF_VECTOR, 0, false); in flush_pending_x87_faults()
5519 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop) in fastop() argument
5521 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF; in fastop()
5523 if (!(ctxt->d & ByteOp)) in fastop()
5524 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE; in fastop()
5527 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags), in fastop()
5529 : "c"(ctxt->src2.val)); in fastop()
5531 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK); in fastop()
5533 return emulate_de(ctxt); in fastop()
5537 void init_decode_cache(struct x86_emulate_ctxt *ctxt) in init_decode_cache() argument
5539 memset(&ctxt->rip_relative, 0, in init_decode_cache()
5540 (void *)&ctxt->modrm - (void *)&ctxt->rip_relative); in init_decode_cache()
5542 ctxt->io_read.pos = 0; in init_decode_cache()
5543 ctxt->io_read.end = 0; in init_decode_cache()
5544 ctxt->mem_read.end = 0; in init_decode_cache()
5547 int x86_emulate_insn(struct x86_emulate_ctxt *ctxt) in x86_emulate_insn() argument
5549 const struct x86_emulate_ops *ops = ctxt->ops; in x86_emulate_insn()
5551 int saved_dst_type = ctxt->dst.type; in x86_emulate_insn()
5554 ctxt->mem_read.pos = 0; in x86_emulate_insn()
5557 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) { in x86_emulate_insn()
5558 rc = emulate_ud(ctxt); in x86_emulate_insn()
5562 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) { in x86_emulate_insn()
5563 rc = emulate_ud(ctxt); in x86_emulate_insn()
5567 emul_flags = ctxt->ops->get_hflags(ctxt); in x86_emulate_insn()
5568 if (unlikely(ctxt->d & in x86_emulate_insn()
5570 if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) || in x86_emulate_insn()
5571 (ctxt->d & Undefined)) { in x86_emulate_insn()
5572 rc = emulate_ud(ctxt); in x86_emulate_insn()
5576 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM))) in x86_emulate_insn()
5577 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) { in x86_emulate_insn()
5578 rc = emulate_ud(ctxt); in x86_emulate_insn()
5582 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) { in x86_emulate_insn()
5583 rc = emulate_nm(ctxt); in x86_emulate_insn()
5587 if (ctxt->d & Mmx) { in x86_emulate_insn()
5588 rc = flush_pending_x87_faults(ctxt); in x86_emulate_insn()
5595 fetch_possible_mmx_operand(&ctxt->src); in x86_emulate_insn()
5596 fetch_possible_mmx_operand(&ctxt->src2); in x86_emulate_insn()
5597 if (!(ctxt->d & Mov)) in x86_emulate_insn()
5598 fetch_possible_mmx_operand(&ctxt->dst); in x86_emulate_insn()
5601 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && ctxt->intercept) { in x86_emulate_insn()
5602 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5609 if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) { in x86_emulate_insn()
5610 rc = emulate_ud(ctxt); in x86_emulate_insn()
5615 if ((ctxt->d & Priv) && ops->cpl(ctxt)) { in x86_emulate_insn()
5616 if (ctxt->d & PrivUD) in x86_emulate_insn()
5617 rc = emulate_ud(ctxt); in x86_emulate_insn()
5619 rc = emulate_gp(ctxt, 0); in x86_emulate_insn()
5624 if (ctxt->d & CheckPerm) { in x86_emulate_insn()
5625 rc = ctxt->check_perm(ctxt); in x86_emulate_insn()
5630 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5631 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5637 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5639 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0) { in x86_emulate_insn()
5640 string_registers_quirk(ctxt); in x86_emulate_insn()
5641 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5642 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5648 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) { in x86_emulate_insn()
5649 rc = segmented_read(ctxt, ctxt->src.addr.mem, in x86_emulate_insn()
5650 ctxt->src.valptr, ctxt->src.bytes); in x86_emulate_insn()
5653 ctxt->src.orig_val64 = ctxt->src.val64; in x86_emulate_insn()
5656 if (ctxt->src2.type == OP_MEM) { in x86_emulate_insn()
5657 rc = segmented_read(ctxt, ctxt->src2.addr.mem, in x86_emulate_insn()
5658 &ctxt->src2.val, ctxt->src2.bytes); in x86_emulate_insn()
5663 if ((ctxt->d & DstMask) == ImplicitOps) in x86_emulate_insn()
5667 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) { in x86_emulate_insn()
5669 rc = segmented_read(ctxt, ctxt->dst.addr.mem, in x86_emulate_insn()
5670 &ctxt->dst.val, ctxt->dst.bytes); in x86_emulate_insn()
5672 if (!(ctxt->d & NoWrite) && in x86_emulate_insn()
5674 ctxt->exception.vector == PF_VECTOR) in x86_emulate_insn()
5675 ctxt->exception.error_code |= PFERR_WRITE_MASK; in x86_emulate_insn()
5680 ctxt->dst.orig_val64 = ctxt->dst.val64; in x86_emulate_insn()
5684 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5685 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5691 if (ctxt->rep_prefix && (ctxt->d & String)) in x86_emulate_insn()
5692 ctxt->eflags |= X86_EFLAGS_RF; in x86_emulate_insn()
5694 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5696 if (ctxt->execute) { in x86_emulate_insn()
5697 if (ctxt->d & Fastop) in x86_emulate_insn()
5698 rc = fastop(ctxt, ctxt->fop); in x86_emulate_insn()
5700 rc = ctxt->execute(ctxt); in x86_emulate_insn()
5706 if (ctxt->opcode_len == 2) in x86_emulate_insn()
5708 else if (ctxt->opcode_len == 3) in x86_emulate_insn()
5711 switch (ctxt->b) { in x86_emulate_insn()
5713 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5714 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5717 ctxt->dst.val = ctxt->src.addr.mem.ea; in x86_emulate_insn()
5720 if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX)) in x86_emulate_insn()
5721 ctxt->dst.type = OP_NONE; in x86_emulate_insn()
5723 rc = em_xchg(ctxt); in x86_emulate_insn()
5726 switch (ctxt->op_bytes) { in x86_emulate_insn()
5727 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break; in x86_emulate_insn()
5728 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break; in x86_emulate_insn()
5729 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break; in x86_emulate_insn()
5733 rc = emulate_int(ctxt, 3); in x86_emulate_insn()
5736 rc = emulate_int(ctxt, ctxt->src.val); in x86_emulate_insn()
5739 if (ctxt->eflags & X86_EFLAGS_OF) in x86_emulate_insn()
5740 rc = emulate_int(ctxt, 4); in x86_emulate_insn()
5744 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5745 ctxt->dst.type = OP_NONE; /* Disable writeback. */ in x86_emulate_insn()
5748 ctxt->ops->halt(ctxt); in x86_emulate_insn()
5752 ctxt->eflags ^= X86_EFLAGS_CF; in x86_emulate_insn()
5755 ctxt->eflags &= ~X86_EFLAGS_CF; in x86_emulate_insn()
5758 ctxt->eflags |= X86_EFLAGS_CF; in x86_emulate_insn()
5761 ctxt->eflags &= ~X86_EFLAGS_DF; in x86_emulate_insn()
5764 ctxt->eflags |= X86_EFLAGS_DF; in x86_emulate_insn()
5774 if (ctxt->d & SrcWrite) { in x86_emulate_insn()
5775 BUG_ON(ctxt->src.type == OP_MEM || ctxt->src.type == OP_MEM_STR); in x86_emulate_insn()
5776 rc = writeback(ctxt, &ctxt->src); in x86_emulate_insn()
5780 if (!(ctxt->d & NoWrite)) { in x86_emulate_insn()
5781 rc = writeback(ctxt, &ctxt->dst); in x86_emulate_insn()
5790 ctxt->dst.type = saved_dst_type; in x86_emulate_insn()
5792 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5793 string_addr_inc(ctxt, VCPU_REGS_RSI, &ctxt->src); in x86_emulate_insn()
5795 if ((ctxt->d & DstMask) == DstDI) in x86_emulate_insn()
5796 string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst); in x86_emulate_insn()
5798 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5800 struct read_cache *r = &ctxt->io_read; in x86_emulate_insn()
5801 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5802 count = ctxt->src.count; in x86_emulate_insn()
5804 count = ctxt->dst.count; in x86_emulate_insn()
5805 register_address_increment(ctxt, VCPU_REGS_RCX, -count); in x86_emulate_insn()
5807 if (!string_insn_completed(ctxt)) { in x86_emulate_insn()
5812 if ((r->end != 0 || reg_read(ctxt, VCPU_REGS_RCX) & 0x3ff) && in x86_emulate_insn()
5819 ctxt->mem_read.end = 0; in x86_emulate_insn()
5820 writeback_registers(ctxt); in x86_emulate_insn()
5825 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5828 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5829 if (ctxt->mode != X86EMUL_MODE_PROT64) in x86_emulate_insn()
5830 ctxt->eip = (u32)ctxt->_eip; in x86_emulate_insn()
5834 WARN_ON(ctxt->exception.vector > 0x1f); in x86_emulate_insn()
5835 ctxt->have_exception = true; in x86_emulate_insn()
5841 writeback_registers(ctxt); in x86_emulate_insn()
5846 switch (ctxt->b) { in x86_emulate_insn()
5848 (ctxt->ops->wbinvd)(ctxt); in x86_emulate_insn()
5856 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg); in x86_emulate_insn()
5859 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val); in x86_emulate_insn()
5862 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5863 ctxt->dst.val = ctxt->src.val; in x86_emulate_insn()
5864 else if (ctxt->op_bytes != 4) in x86_emulate_insn()
5865 ctxt->dst.type = OP_NONE; /* no writeback */ in x86_emulate_insn()
5868 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5869 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5872 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags); in x86_emulate_insn()
5875 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5876 ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val in x86_emulate_insn()
5877 : (u16) ctxt->src.val; in x86_emulate_insn()
5880 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5881 ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val : in x86_emulate_insn()
5882 (s16) ctxt->src.val; in x86_emulate_insn()
5899 void emulator_invalidate_register_cache(struct x86_emulate_ctxt *ctxt) in emulator_invalidate_register_cache() argument
5901 invalidate_registers(ctxt); in emulator_invalidate_register_cache()
5904 void emulator_writeback_register_cache(struct x86_emulate_ctxt *ctxt) in emulator_writeback_register_cache() argument
5906 writeback_registers(ctxt); in emulator_writeback_register_cache()
5909 bool emulator_can_use_gpa(struct x86_emulate_ctxt *ctxt) in emulator_can_use_gpa() argument
5911 if (ctxt->rep_prefix && (ctxt->d & String)) in emulator_can_use_gpa()
5914 if (ctxt->d & TwoMemOp) in emulator_can_use_gpa()