• Home
  • Raw
  • Download

Lines Matching +full:op +full:- +full:mode

1 // SPDX-License-Identifier: GPL-2.0-only
5 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
9 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
18 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
27 #include <asm/nospec-branch.h>
43 #define OpMem64 6ull /* Memory, 64-bit */
44 #define OpImmUByte 7ull /* Zero-extended 8-bit immediate */
47 #define OpImmByte 10ull /* 8-bit sign extended immediate */
49 #define OpImm 12ull /* Sign extended up to 32-bit immediate */
50 #define OpMem16 13ull /* Memory operand (16-bit). */
51 #define OpMem32 14ull /* Memory operand (32-bit). */
63 #define OpMem8 26ull /* 8-bit zero extended memory operand */
64 #define OpImm64 27ull /* Sign extended 16/32/64-bit immediate */
65 #define OpXLat 28ull /* memory at BX/EBX/RBX + zero-extended AL */
67 #define OpAccHi 30ull /* High part of extended acc (-/DX/EDX/RDX) */
70 #define OpMask ((1ull << OpBits) - 1)
73 * Opcode effective-address decode tables.
81 /* Operand sizes: 8-bit operands or specified/overridden size. */
82 #define ByteOp (1<<0) /* 8-bit operands. */
137 #define Prot (1<<21) /* instruction generates #UD if not in prot-mode */
140 #define Op3264 (1<<24) /* Operand is 64b in long mode, 32b otherwise */
220 struct opcode op[8]; member
245 if (!(ctxt->regs_valid & (1 << nr))) { in reg_read()
246 ctxt->regs_valid |= 1 << nr; in reg_read()
247 ctxt->_regs[nr] = ctxt->ops->read_gpr(ctxt, nr); in reg_read()
249 return ctxt->_regs[nr]; in reg_read()
254 ctxt->regs_valid |= 1 << nr; in reg_write()
255 ctxt->regs_dirty |= 1 << nr; in reg_write()
256 return &ctxt->_regs[nr]; in reg_write()
269 for_each_set_bit(reg, (ulong *)&ctxt->regs_dirty, 16) in writeback_registers()
270 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]); in writeback_registers()
275 ctxt->regs_dirty = 0; in invalidate_registers()
276 ctxt->regs_valid = 0; in invalidate_registers()
323 ".size " name ", .-" name "\n\t"
328 #define __FOP_START(op, align) \ argument
329 extern void em_##op(struct fastop *fake); \
331 ".global em_" #op " \n\t" \
333 "em_" #op ":\n\t"
335 #define FOP_START(op) __FOP_START(op, FASTOP_SIZE) argument
347 #define FOP1E(op, dst) \ argument
348 __FOP_FUNC(#op "_" #dst) \
349 "10: " #op " %" #dst " \n\t" \
350 __FOP_RET(#op "_" #dst)
352 #define FOP1EEX(op, dst) \ argument
353 FOP1E(op, dst) _ASM_EXTABLE(10b, kvm_fastop_exception)
355 #define FASTOP1(op) \ argument
356 FOP_START(op) \
357 FOP1E(op##b, al) \
358 FOP1E(op##w, ax) \
359 FOP1E(op##l, eax) \
360 ON64(FOP1E(op##q, rax)) \
363 /* 1-operand, using src2 (for MUL/DIV r/m) */
364 #define FASTOP1SRC2(op, name) \ argument
366 FOP1E(op, cl) \
367 FOP1E(op, cx) \
368 FOP1E(op, ecx) \
369 ON64(FOP1E(op, rcx)) \
372 /* 1-operand, using src2 (for MUL/DIV r/m), with exceptions */
373 #define FASTOP1SRC2EX(op, name) \ argument
375 FOP1EEX(op, cl) \
376 FOP1EEX(op, cx) \
377 FOP1EEX(op, ecx) \
378 ON64(FOP1EEX(op, rcx)) \
381 #define FOP2E(op, dst, src) \ argument
382 __FOP_FUNC(#op "_" #dst "_" #src) \
383 #op " %" #src ", %" #dst " \n\t" \
384 __FOP_RET(#op "_" #dst "_" #src)
386 #define FASTOP2(op) \ argument
387 FOP_START(op) \
388 FOP2E(op##b, al, dl) \
389 FOP2E(op##w, ax, dx) \
390 FOP2E(op##l, eax, edx) \
391 ON64(FOP2E(op##q, rax, rdx)) \
395 #define FASTOP2W(op) \ argument
396 FOP_START(op) \
398 FOP2E(op##w, ax, dx) \
399 FOP2E(op##l, eax, edx) \
400 ON64(FOP2E(op##q, rax, rdx)) \
404 #define FASTOP2CL(op) \ argument
405 FOP_START(op) \
406 FOP2E(op##b, al, cl) \
407 FOP2E(op##w, ax, cl) \
408 FOP2E(op##l, eax, cl) \
409 ON64(FOP2E(op##q, rax, cl)) \
413 #define FASTOP2R(op, name) \ argument
415 FOP2E(op##b, dl, al) \
416 FOP2E(op##w, dx, ax) \
417 FOP2E(op##l, edx, eax) \
418 ON64(FOP2E(op##q, rdx, rax)) \
421 #define FOP3E(op, dst, src, src2) \ argument
422 __FOP_FUNC(#op "_" #dst "_" #src "_" #src2) \
423 #op " %" #src2 ", %" #src ", %" #dst " \n\t"\
424 __FOP_RET(#op "_" #dst "_" #src "_" #src2)
426 /* 3-operand, word-only, src2=cl */
427 #define FASTOP3WCL(op) \ argument
428 FOP_START(op) \
430 FOP3E(op##w, ax, dx, cl) \
431 FOP3E(op##l, eax, edx, cl) \
432 ON64(FOP3E(op##q, rax, rdx, cl)) \
435 /* Special case for SETcc - 1 instruction per cc */
446 #define FOP_SETCC(op) \ argument
448 ".type " #op ", @function \n\t" \
449 #op ": \n\t" \
450 #op " %al \n\t" \
451 __FOP_RET(#op) \
452 ".skip " __stringify(SETCC_ALIGN) " - (.-" #op "), 0xcc \n\t"
509 .rep_prefix = ctxt->rep_prefix, in emulator_check_intercept()
510 .modrm_mod = ctxt->modrm_mod, in emulator_check_intercept()
511 .modrm_reg = ctxt->modrm_reg, in emulator_check_intercept()
512 .modrm_rm = ctxt->modrm_rm, in emulator_check_intercept()
513 .src_val = ctxt->src.val64, in emulator_check_intercept()
514 .dst_val = ctxt->dst.val64, in emulator_check_intercept()
515 .src_bytes = ctxt->src.bytes, in emulator_check_intercept()
516 .dst_bytes = ctxt->dst.bytes, in emulator_check_intercept()
517 .ad_bytes = ctxt->ad_bytes, in emulator_check_intercept()
518 .next_rip = ctxt->eip, in emulator_check_intercept()
521 return ctxt->ops->intercept(ctxt, &info, stage); in emulator_check_intercept()
531 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */ in assign_register()
541 break; /* 64b: zero-extend */ in assign_register()
550 return (1UL << (ctxt->ad_bytes << 3)) - 1; in ad_mask()
558 if (ctxt->mode == X86EMUL_MODE_PROT64) in stack_mask()
560 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS); in stack_mask()
569 /* Access/update address held in a register, based on addressing mode. */
573 if (ctxt->ad_bytes == sizeof(unsigned long)) in address_mask()
595 assign_register(preg, *preg + inc, ctxt->ad_bytes); in register_address_increment()
607 return desc->g ? (limit << 12) | 0xfff : limit; in desc_limit_scaled()
612 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS) in seg_base()
615 return ctxt->ops->get_cached_segment_base(ctxt, seg); in seg_base()
622 ctxt->exception.vector = vec; in emulate_exception()
623 ctxt->exception.error_code = error; in emulate_exception()
624 ctxt->exception.error_code_valid = valid; in emulate_exception()
668 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg); in get_segment_selector()
679 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg); in set_segment_selector()
680 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg); in set_segment_selector()
685 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48; in ctxt_virt_addr_bits()
705 u64 alignment = ctxt->d & AlignMask; in insn_alignment()
726 enum x86emul_mode mode, ulong *linear) in __linearize() argument
737 switch (mode) { in __linearize()
744 *max_size = min_t(u64, ~0u, (1ull << va_bits) - la); in __linearize()
750 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL, in __linearize()
754 /* code segment in protected mode or read-only data segment */ in __linearize()
755 if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8)) in __linearize()
763 /* expand-down segment */ in __linearize()
773 *max_size = (u64)lim + 1 - addr.ea; in __linearize()
779 if (la & (insn_alignment(ctxt, size) - 1)) in __linearize()
796 ctxt->mode, linear); in linearize()
800 enum x86emul_mode mode) in assign_eip() argument
808 if (ctxt->op_bytes != sizeof(unsigned long)) in assign_eip()
809 addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1); in assign_eip()
810 rc = __linearize(ctxt, addr, &max_size, 1, false, true, mode, &linear); in assign_eip()
812 ctxt->_eip = addr.ea; in assign_eip()
818 return assign_eip(ctxt, dst, ctxt->mode); in assign_eip_near()
824 enum x86emul_mode mode = ctxt->mode; in assign_eip_far() local
828 if (ctxt->mode >= X86EMUL_MODE_PROT16) { in assign_eip_far()
829 if (cs_desc->l) { in assign_eip_far()
832 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in assign_eip_far()
834 mode = X86EMUL_MODE_PROT64; in assign_eip_far()
836 mode = X86EMUL_MODE_PROT32; /* temporary value */ in assign_eip_far()
839 if (mode == X86EMUL_MODE_PROT16 || mode == X86EMUL_MODE_PROT32) in assign_eip_far()
840 mode = cs_desc->d ? X86EMUL_MODE_PROT32 : X86EMUL_MODE_PROT16; in assign_eip_far()
841 rc = assign_eip(ctxt, dst, mode); in assign_eip_far()
843 ctxt->mode = mode; in assign_eip_far()
849 return assign_eip_near(ctxt, ctxt->_eip + rel); in jmp_rel()
855 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true); in linear_read_system()
862 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true); in linear_write_system()
876 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false); in segmented_read_std()
890 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false); in segmented_write_std()
902 int cur_size = ctxt->fetch.end - ctxt->fetch.data; in __do_insn_fetch_bytes()
904 .ea = ctxt->eip + cur_size }; in __do_insn_fetch_bytes()
916 rc = __linearize(ctxt, addr, &max_size, 0, false, true, ctxt->mode, in __do_insn_fetch_bytes()
922 size = min_t(unsigned, size, PAGE_SIZE - offset_in_page(linear)); in __do_insn_fetch_bytes()
928 * still, we must have hit the 15-byte boundary. in __do_insn_fetch_bytes()
933 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end, in __do_insn_fetch_bytes()
934 size, &ctxt->exception); in __do_insn_fetch_bytes()
937 ctxt->fetch.end += size; in __do_insn_fetch_bytes()
944 unsigned done_size = ctxt->fetch.end - ctxt->fetch.ptr; in do_insn_fetch_bytes()
947 return __do_insn_fetch_bytes(ctxt, size - done_size); in do_insn_fetch_bytes()
959 ctxt->_eip += sizeof(_type); \
960 memcpy(&_x, ctxt->fetch.ptr, sizeof(_type)); \
961 ctxt->fetch.ptr += sizeof(_type); \
970 ctxt->_eip += (_size); \
971 memcpy(_arr, ctxt->fetch.ptr, _size); \
972 ctxt->fetch.ptr += (_size); \
984 int highbyte_regs = (ctxt->rex_prefix == 0) && byteop; in decode_register()
1057 if (ctxt->src.val == 0) in em_bsf_c()
1058 ctxt->dst.type = OP_NONE; in em_bsf_c()
1065 if (ctxt->src.val == 0) in em_bsr_c()
1066 ctxt->dst.type = OP_NONE; in em_bsr_c()
1081 static void fetch_register_operand(struct operand *op) in fetch_register_operand() argument
1083 switch (op->bytes) { in fetch_register_operand()
1085 op->val = *(u8 *)op->addr.reg; in fetch_register_operand()
1088 op->val = *(u16 *)op->addr.reg; in fetch_register_operand()
1091 op->val = *(u32 *)op->addr.reg; in fetch_register_operand()
1094 op->val = *(u64 *)op->addr.reg; in fetch_register_operand()
1203 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fninit()
1216 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstcw()
1223 ctxt->dst.val = fcw; in em_fnstcw()
1232 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstsw()
1239 ctxt->dst.val = fsw; in em_fnstsw()
1245 struct operand *op) in decode_register_operand() argument
1247 unsigned reg = ctxt->modrm_reg; in decode_register_operand()
1249 if (!(ctxt->d & ModRM)) in decode_register_operand()
1250 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3); in decode_register_operand()
1252 if (ctxt->d & Sse) { in decode_register_operand()
1253 op->type = OP_XMM; in decode_register_operand()
1254 op->bytes = 16; in decode_register_operand()
1255 op->addr.xmm = reg; in decode_register_operand()
1256 read_sse_reg(&op->vec_val, reg); in decode_register_operand()
1259 if (ctxt->d & Mmx) { in decode_register_operand()
1261 op->type = OP_MM; in decode_register_operand()
1262 op->bytes = 8; in decode_register_operand()
1263 op->addr.mm = reg; in decode_register_operand()
1267 op->type = OP_REG; in decode_register_operand()
1268 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_register_operand()
1269 op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp); in decode_register_operand()
1271 fetch_register_operand(op); in decode_register_operand()
1272 op->orig_val = op->val; in decode_register_operand()
1278 ctxt->modrm_seg = VCPU_SREG_SS; in adjust_modrm_seg()
1282 struct operand *op) in decode_modrm() argument
1289 ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */ in decode_modrm()
1290 index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */ in decode_modrm()
1291 base_reg = (ctxt->rex_prefix << 3) & 8; /* REX.B */ in decode_modrm()
1293 ctxt->modrm_mod = (ctxt->modrm & 0xc0) >> 6; in decode_modrm()
1294 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3; in decode_modrm()
1295 ctxt->modrm_rm = base_reg | (ctxt->modrm & 0x07); in decode_modrm()
1296 ctxt->modrm_seg = VCPU_SREG_DS; in decode_modrm()
1298 if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) { in decode_modrm()
1299 op->type = OP_REG; in decode_modrm()
1300 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_modrm()
1301 op->addr.reg = decode_register(ctxt, ctxt->modrm_rm, in decode_modrm()
1302 ctxt->d & ByteOp); in decode_modrm()
1303 if (ctxt->d & Sse) { in decode_modrm()
1304 op->type = OP_XMM; in decode_modrm()
1305 op->bytes = 16; in decode_modrm()
1306 op->addr.xmm = ctxt->modrm_rm; in decode_modrm()
1307 read_sse_reg(&op->vec_val, ctxt->modrm_rm); in decode_modrm()
1310 if (ctxt->d & Mmx) { in decode_modrm()
1311 op->type = OP_MM; in decode_modrm()
1312 op->bytes = 8; in decode_modrm()
1313 op->addr.mm = ctxt->modrm_rm & 7; in decode_modrm()
1316 fetch_register_operand(op); in decode_modrm()
1320 op->type = OP_MEM; in decode_modrm()
1322 if (ctxt->ad_bytes == 2) { in decode_modrm()
1328 /* 16-bit ModR/M decode. */ in decode_modrm()
1329 switch (ctxt->modrm_mod) { in decode_modrm()
1331 if (ctxt->modrm_rm == 6) in decode_modrm()
1341 switch (ctxt->modrm_rm) { in decode_modrm()
1361 if (ctxt->modrm_mod != 0) in decode_modrm()
1368 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 || in decode_modrm()
1369 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0)) in decode_modrm()
1370 ctxt->modrm_seg = VCPU_SREG_SS; in decode_modrm()
1373 /* 32/64-bit ModR/M decode. */ in decode_modrm()
1374 if ((ctxt->modrm_rm & 7) == 4) { in decode_modrm()
1380 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0) in decode_modrm()
1386 if ((ctxt->d & IncSP) && in decode_modrm()
1388 modrm_ea += ctxt->op_bytes; in decode_modrm()
1392 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) { in decode_modrm()
1394 if (ctxt->mode == X86EMUL_MODE_PROT64) in decode_modrm()
1395 ctxt->rip_relative = 1; in decode_modrm()
1397 base_reg = ctxt->modrm_rm; in decode_modrm()
1401 switch (ctxt->modrm_mod) { in decode_modrm()
1410 op->addr.mem.ea = modrm_ea; in decode_modrm()
1411 if (ctxt->ad_bytes != 8) in decode_modrm()
1412 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea; in decode_modrm()
1419 struct operand *op) in decode_abs() argument
1423 op->type = OP_MEM; in decode_abs()
1424 switch (ctxt->ad_bytes) { in decode_abs()
1426 op->addr.mem.ea = insn_fetch(u16, ctxt); in decode_abs()
1429 op->addr.mem.ea = insn_fetch(u32, ctxt); in decode_abs()
1432 op->addr.mem.ea = insn_fetch(u64, ctxt); in decode_abs()
1443 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) { in fetch_bit_operand()
1444 mask = ~((long)ctxt->dst.bytes * 8 - 1); in fetch_bit_operand()
1446 if (ctxt->src.bytes == 2) in fetch_bit_operand()
1447 sv = (s16)ctxt->src.val & (s16)mask; in fetch_bit_operand()
1448 else if (ctxt->src.bytes == 4) in fetch_bit_operand()
1449 sv = (s32)ctxt->src.val & (s32)mask; in fetch_bit_operand()
1451 sv = (s64)ctxt->src.val & (s64)mask; in fetch_bit_operand()
1453 ctxt->dst.addr.mem.ea = address_mask(ctxt, in fetch_bit_operand()
1454 ctxt->dst.addr.mem.ea + (sv >> 3)); in fetch_bit_operand()
1458 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1; in fetch_bit_operand()
1465 struct read_cache *mc = &ctxt->mem_read; in read_emulated()
1467 if (mc->pos < mc->end) in read_emulated()
1470 WARN_ON((mc->end + size) >= sizeof(mc->data)); in read_emulated()
1472 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size, in read_emulated()
1473 &ctxt->exception); in read_emulated()
1477 mc->end += size; in read_emulated()
1480 memcpy(dest, mc->data + mc->pos, size); in read_emulated()
1481 mc->pos += size; in read_emulated()
1510 return ctxt->ops->write_emulated(ctxt, linear, data, size, in segmented_write()
1511 &ctxt->exception); in segmented_write()
1525 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data, in segmented_cmpxchg()
1526 size, &ctxt->exception); in segmented_cmpxchg()
1533 struct read_cache *rc = &ctxt->io_read; in pio_in_emulated()
1535 if (rc->pos == rc->end) { /* refill pio read ahead */ in pio_in_emulated()
1537 unsigned int count = ctxt->rep_prefix ? in pio_in_emulated()
1539 in_page = (ctxt->eflags & X86_EFLAGS_DF) ? in pio_in_emulated()
1541 PAGE_SIZE - offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)); in pio_in_emulated()
1542 n = min3(in_page, (unsigned int)sizeof(rc->data) / size, count); in pio_in_emulated()
1545 rc->pos = rc->end = 0; in pio_in_emulated()
1546 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n)) in pio_in_emulated()
1548 rc->end = n * size; in pio_in_emulated()
1551 if (ctxt->rep_prefix && (ctxt->d & String) && in pio_in_emulated()
1552 !(ctxt->eflags & X86_EFLAGS_DF)) { in pio_in_emulated()
1553 ctxt->dst.data = rc->data + rc->pos; in pio_in_emulated()
1554 ctxt->dst.type = OP_MEM_STR; in pio_in_emulated()
1555 ctxt->dst.count = (rc->end - rc->pos) / size; in pio_in_emulated()
1556 rc->pos = rc->end; in pio_in_emulated()
1558 memcpy(dest, rc->data + rc->pos, size); in pio_in_emulated()
1559 rc->pos += size; in pio_in_emulated()
1570 ctxt->ops->get_idt(ctxt, &dt); in read_interrupt_descriptor()
1582 const struct x86_emulate_ops *ops = ctxt->ops; in get_descriptor_table_ptr()
1590 if (!ops->get_segment(ctxt, &sel, &desc, &base3, in get_descriptor_table_ptr()
1594 dt->size = desc_limit_scaled(&desc); /* what if limit > 65535? */ in get_descriptor_table_ptr()
1595 dt->address = get_desc_base(&desc) | ((u64)base3 << 32); in get_descriptor_table_ptr()
1597 ops->get_gdt(ctxt, dt); in get_descriptor_table_ptr()
1618 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1620 addr &= (u32)-1; in get_descriptor_ptr()
1665 bool null_selector = !(selector & ~0x3); /* 0000-0003 are null */ in __load_segment_descriptor()
1673 if (ctxt->mode == X86EMUL_MODE_REAL) { in __load_segment_descriptor()
1674 /* set real mode segment descriptor (keep limit etc. for in __load_segment_descriptor()
1675 * unreal mode) */ in __load_segment_descriptor()
1676 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg); in __load_segment_descriptor()
1679 } else if (seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86) { in __load_segment_descriptor()
1696 /* NULL selector is not valid for TR, CS and (except for long mode) SS */ in __load_segment_descriptor()
1702 if (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl) in __load_segment_descriptor()
1706 * ctxt->ops->set_segment expects the CPL to be in in __load_segment_descriptor()
1707 * SS.DPL, so fake an expand-up 32-bit data segment. in __load_segment_descriptor()
1765 /* in long-mode d/b must be clear if l is set */ in __load_segment_descriptor()
1769 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
1774 /* CS(RPL) <- CPL */ in __load_segment_descriptor()
1782 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc, in __load_segment_descriptor()
1783 sizeof(seg_desc), &ctxt->exception); in __load_segment_descriptor()
1813 } else if (ctxt->mode == X86EMUL_MODE_PROT64) { in __load_segment_descriptor()
1822 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg); in __load_segment_descriptor()
1833 u8 cpl = ctxt->ops->cpl(ctxt); in load_segment_descriptor()
1846 ctxt->mode == X86EMUL_MODE_PROT64) in load_segment_descriptor()
1853 static void write_register_operand(struct operand *op) in write_register_operand() argument
1855 return assign_register(op->addr.reg, op->val, op->bytes); in write_register_operand()
1858 static int writeback(struct x86_emulate_ctxt *ctxt, struct operand *op) in writeback() argument
1860 switch (op->type) { in writeback()
1862 write_register_operand(op); in writeback()
1865 if (ctxt->lock_prefix) in writeback()
1867 op->addr.mem, in writeback()
1868 &op->orig_val, in writeback()
1869 &op->val, in writeback()
1870 op->bytes); in writeback()
1873 op->addr.mem, in writeback()
1874 &op->val, in writeback()
1875 op->bytes); in writeback()
1879 op->addr.mem, in writeback()
1880 op->data, in writeback()
1881 op->bytes * op->count); in writeback()
1884 write_sse_reg(&op->vec_val, op->addr.xmm); in writeback()
1887 write_mmx_reg(&op->mm_val, op->addr.mm); in writeback()
1902 rsp_increment(ctxt, -bytes); in push()
1912 ctxt->dst.type = OP_NONE; in em_push()
1913 return push(ctxt, &ctxt->src.val, ctxt->op_bytes); in em_push()
1934 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_pop()
1942 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulate_popf()
1943 int cpl = ctxt->ops->cpl(ctxt); in emulate_popf()
1954 switch(ctxt->mode) { in emulate_popf()
1968 default: /* real mode */ in emulate_popf()
1974 (ctxt->eflags & ~change_mask) | (val & change_mask); in emulate_popf()
1981 ctxt->dst.type = OP_REG; in em_popf()
1982 ctxt->dst.addr.reg = &ctxt->eflags; in em_popf()
1983 ctxt->dst.bytes = ctxt->op_bytes; in em_popf()
1984 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_popf()
1990 unsigned frame_size = ctxt->src.val; in em_enter()
1991 unsigned nesting_level = ctxt->src2.val & 31; in em_enter()
2004 reg_read(ctxt, VCPU_REGS_RSP) - frame_size, in em_enter()
2013 return emulate_pop(ctxt, reg_rmw(ctxt, VCPU_REGS_RBP), ctxt->op_bytes); in em_leave()
2018 int seg = ctxt->src2.val; in em_push_sreg()
2020 ctxt->src.val = get_segment_selector(ctxt, seg); in em_push_sreg()
2021 if (ctxt->op_bytes == 4) { in em_push_sreg()
2022 rsp_increment(ctxt, -2); in em_push_sreg()
2023 ctxt->op_bytes = 2; in em_push_sreg()
2031 int seg = ctxt->src2.val; in em_pop_sreg()
2039 if (ctxt->modrm_reg == VCPU_SREG_SS) in em_pop_sreg()
2040 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_pop_sreg()
2041 if (ctxt->op_bytes > 2) in em_pop_sreg()
2042 rsp_increment(ctxt, ctxt->op_bytes - 2); in em_pop_sreg()
2056 (ctxt->src.val = old_esp) : (ctxt->src.val = reg_read(ctxt, reg)); in em_pusha()
2070 ctxt->src.val = (unsigned long)ctxt->eflags & ~X86_EFLAGS_VM; in em_pushf()
2082 rsp_increment(ctxt, ctxt->op_bytes); in em_popa()
2083 --reg; in em_popa()
2086 rc = emulate_pop(ctxt, &val, ctxt->op_bytes); in em_popa()
2089 assign_register(reg_rmw(ctxt, reg), val, ctxt->op_bytes); in em_popa()
2090 --reg; in em_popa()
2097 const struct x86_emulate_ops *ops = ctxt->ops; in __emulate_int_real()
2105 ctxt->src.val = ctxt->eflags; in __emulate_int_real()
2110 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC); in __emulate_int_real()
2112 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS); in __emulate_int_real()
2117 ctxt->src.val = ctxt->_eip; in __emulate_int_real()
2122 ops->get_idt(ctxt, &dt); in __emulate_int_real()
2139 ctxt->_eip = eip; in __emulate_int_real()
2157 switch(ctxt->mode) { in emulate_int()
2165 /* Protected mode interrupts unimplemented yet */ in emulate_int()
2187 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes); in emulate_iret_real()
2195 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in emulate_iret_real()
2200 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes); in emulate_iret_real()
2210 ctxt->_eip = temp_eip; in emulate_iret_real()
2212 if (ctxt->op_bytes == 4) in emulate_iret_real()
2213 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask)); in emulate_iret_real()
2214 else if (ctxt->op_bytes == 2) { in emulate_iret_real()
2215 ctxt->eflags &= ~0xffff; in emulate_iret_real()
2216 ctxt->eflags |= temp_eflags; in emulate_iret_real()
2219 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */ in emulate_iret_real()
2220 ctxt->eflags |= X86_EFLAGS_FIXED; in emulate_iret_real()
2221 ctxt->ops->set_nmi_mask(ctxt, false); in emulate_iret_real()
2228 switch(ctxt->mode) { in em_iret()
2236 /* iret from protected mode unimplemented yet */ in em_iret()
2246 u8 cpl = ctxt->ops->cpl(ctxt); in em_jmp_far()
2248 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_jmp_far()
2256 rc = assign_eip_far(ctxt, ctxt->src.val, &new_desc); in em_jmp_far()
2266 return assign_eip_near(ctxt, ctxt->src.val); in em_jmp_abs()
2274 old_eip = ctxt->_eip; in em_call_near_abs()
2275 rc = assign_eip_near(ctxt, ctxt->src.val); in em_call_near_abs()
2278 ctxt->src.val = old_eip; in em_call_near_abs()
2285 u64 old = ctxt->dst.orig_val64; in em_cmpxchg8b()
2287 if (ctxt->dst.bytes == 16) in em_cmpxchg8b()
2294 ctxt->eflags &= ~X86_EFLAGS_ZF; in em_cmpxchg8b()
2296 ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) | in em_cmpxchg8b()
2299 ctxt->eflags |= X86_EFLAGS_ZF; in em_cmpxchg8b()
2309 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret()
2320 int cpl = ctxt->ops->cpl(ctxt); in em_ret_far()
2323 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_far()
2326 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in em_ret_far()
2329 /* Outer-privilege level return is not implemented */ in em_ret_far()
2330 if (ctxt->mode >= X86EMUL_MODE_PROT16 && (cs & 3) > cpl) in em_ret_far()
2352 rsp_increment(ctxt, ctxt->src.val); in em_ret_far_imm()
2359 ctxt->dst.orig_val = ctxt->dst.val; in em_cmpxchg()
2360 ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2361 ctxt->src.orig_val = ctxt->src.val; in em_cmpxchg()
2362 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2365 if (ctxt->eflags & X86_EFLAGS_ZF) { in em_cmpxchg()
2367 ctxt->src.type = OP_NONE; in em_cmpxchg()
2368 ctxt->dst.val = ctxt->src.orig_val; in em_cmpxchg()
2371 ctxt->src.type = OP_REG; in em_cmpxchg()
2372 ctxt->src.addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2373 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2374 /* Create write-cycle to dest by writing the same value */ in em_cmpxchg()
2375 ctxt->dst.val = ctxt->dst.orig_val; in em_cmpxchg()
2382 int seg = ctxt->src2.val; in em_lseg()
2386 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_lseg()
2392 ctxt->dst.val = ctxt->src.val; in em_lseg()
2399 return ctxt->ops->guest_has_long_mode(ctxt); in emulator_has_longmode()
2407 desc->g = (flags >> 23) & 1; in rsm_set_desc_flags()
2408 desc->d = (flags >> 22) & 1; in rsm_set_desc_flags()
2409 desc->l = (flags >> 21) & 1; in rsm_set_desc_flags()
2410 desc->avl = (flags >> 20) & 1; in rsm_set_desc_flags()
2411 desc->p = (flags >> 15) & 1; in rsm_set_desc_flags()
2412 desc->dpl = (flags >> 13) & 3; in rsm_set_desc_flags()
2413 desc->s = (flags >> 12) & 1; in rsm_set_desc_flags()
2414 desc->type = (flags >> 8) & 15; in rsm_set_desc_flags()
2429 offset = 0x7f2c + (n - 3) * 12; in rsm_load_seg_32()
2434 ctxt->ops->set_segment(ctxt, selector, &desc, 0, n); in rsm_load_seg_32()
2455 ctxt->ops->set_segment(ctxt, selector, &desc, base3, n); in rsm_load_seg_64()
2473 bad = ctxt->ops->set_cr(ctxt, 3, cr3); in rsm_enter_protected_mode()
2478 * First enable PAE, long mode needs it before CR0.PG = 1 is set. in rsm_enter_protected_mode()
2479 * Then enable protected mode. However, PCID cannot be enabled in rsm_enter_protected_mode()
2482 bad = ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE); in rsm_enter_protected_mode()
2486 bad = ctxt->ops->set_cr(ctxt, 0, cr0); in rsm_enter_protected_mode()
2491 bad = ctxt->ops->set_cr(ctxt, 4, cr4); in rsm_enter_protected_mode()
2495 bad = ctxt->ops->set_cr(ctxt, 3, cr3 | pcid); in rsm_enter_protected_mode()
2516 ctxt->eflags = GET_SMSTATE(u32, smstate, 0x7ff4) | X86_EFLAGS_FIXED; in rsm_load_state_32()
2517 ctxt->_eip = GET_SMSTATE(u32, smstate, 0x7ff0); in rsm_load_state_32()
2524 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1)) in rsm_load_state_32()
2529 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1)) in rsm_load_state_32()
2536 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_TR); in rsm_load_state_32()
2542 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_LDTR); in rsm_load_state_32()
2546 ctxt->ops->set_gdt(ctxt, &dt); in rsm_load_state_32()
2550 ctxt->ops->set_idt(ctxt, &dt); in rsm_load_state_32()
2560 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7ef8)); in rsm_load_state_32()
2577 *reg_write(ctxt, i) = GET_SMSTATE(u64, smstate, 0x7ff8 - i * 8); in rsm_load_state_64()
2579 ctxt->_eip = GET_SMSTATE(u64, smstate, 0x7f78); in rsm_load_state_64()
2580 ctxt->eflags = GET_SMSTATE(u32, smstate, 0x7f70) | X86_EFLAGS_FIXED; in rsm_load_state_64()
2584 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1)) in rsm_load_state_64()
2589 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1)) in rsm_load_state_64()
2595 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7f00)); in rsm_load_state_64()
2598 if (ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA)) in rsm_load_state_64()
2606 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_TR); in rsm_load_state_64()
2610 ctxt->ops->set_idt(ctxt, &dt); in rsm_load_state_64()
2617 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_LDTR); in rsm_load_state_64()
2621 ctxt->ops->set_gdt(ctxt, &dt); in rsm_load_state_64()
2644 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_MASK) == 0) in em_rsm()
2647 smbase = ctxt->ops->get_smbase(ctxt); in em_rsm()
2649 ret = ctxt->ops->read_phys(ctxt, smbase + 0xfe00, buf, sizeof(buf)); in em_rsm()
2653 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_INSIDE_NMI_MASK) == 0) in em_rsm()
2654 ctxt->ops->set_nmi_mask(ctxt, false); in em_rsm()
2656 ctxt->ops->set_hflags(ctxt, ctxt->ops->get_hflags(ctxt) & in em_rsm()
2660 * Get back to real mode, to prepare a safe state in which to load in em_rsm()
2662 * supports long mode. in em_rsm()
2668 cr4 = ctxt->ops->get_cr(ctxt, 4); in em_rsm()
2670 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE); in em_rsm()
2672 /* A 32-bit code segment is required to clear EFER.LMA. */ in em_rsm()
2676 ctxt->ops->set_segment(ctxt, 0, &cs_desc, 0, VCPU_SREG_CS); in em_rsm()
2679 /* For the 64-bit case, this will clear EFER.LMA. */ in em_rsm()
2680 cr0 = ctxt->ops->get_cr(ctxt, 0); in em_rsm()
2682 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in em_rsm()
2686 cr4 = ctxt->ops->get_cr(ctxt, 4); in em_rsm()
2688 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PAE); in em_rsm()
2690 /* And finally go back to 32-bit mode. */ in em_rsm()
2692 ctxt->ops->set_msr(ctxt, MSR_EFER, efer); in em_rsm()
2696 * Give pre_leave_smm() a chance to make ISA-specific changes to the in em_rsm()
2697 * vCPU state (e.g. enter guest mode) before loading state from the SMM in em_rsm()
2698 * state-save area. in em_rsm()
2700 if (ctxt->ops->pre_leave_smm(ctxt, buf)) in em_rsm()
2715 ctxt->ops->post_leave_smm(ctxt); in em_rsm()
2724 cs->l = 0; /* will be adjusted later */ in setup_syscalls_segments()
2726 cs->g = 1; /* 4kb granularity */ in setup_syscalls_segments()
2728 cs->type = 0x0b; /* Read, Execute, Accessed */ in setup_syscalls_segments()
2729 cs->s = 1; in setup_syscalls_segments()
2730 cs->dpl = 0; /* will be adjusted later */ in setup_syscalls_segments()
2731 cs->p = 1; in setup_syscalls_segments()
2732 cs->d = 1; in setup_syscalls_segments()
2733 cs->avl = 0; in setup_syscalls_segments()
2737 ss->g = 1; /* 4kb granularity */ in setup_syscalls_segments()
2738 ss->s = 1; in setup_syscalls_segments()
2739 ss->type = 0x03; /* Read/Write, Accessed */ in setup_syscalls_segments()
2740 ss->d = 1; /* 32bit stack segment */ in setup_syscalls_segments()
2741 ss->dpl = 0; in setup_syscalls_segments()
2742 ss->p = 1; in setup_syscalls_segments()
2743 ss->l = 0; in setup_syscalls_segments()
2744 ss->avl = 0; in setup_syscalls_segments()
2752 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true); in vendor_intel()
2758 const struct x86_emulate_ops *ops = ctxt->ops; in em_syscall_is_enabled()
2762 * syscall should always be enabled in longmode - so only become in em_syscall_is_enabled()
2765 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_syscall_is_enabled()
2770 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true); in em_syscall_is_enabled()
2773 * 64bit guest with a 32bit compat-app running will #UD !! While this in em_syscall_is_enabled()
2774 * behaviour can be fixed (by emulating) into AMD response - CPUs of in em_syscall_is_enabled()
2793 const struct x86_emulate_ops *ops = ctxt->ops; in em_syscall()
2799 /* syscall is not available in real mode */ in em_syscall()
2800 if (ctxt->mode == X86EMUL_MODE_REAL || in em_syscall()
2801 ctxt->mode == X86EMUL_MODE_VM86) in em_syscall()
2807 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall()
2812 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2821 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_syscall()
2822 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_syscall()
2824 *reg_write(ctxt, VCPU_REGS_RCX) = ctxt->_eip; in em_syscall()
2827 *reg_write(ctxt, VCPU_REGS_R11) = ctxt->eflags; in em_syscall()
2829 ops->get_msr(ctxt, in em_syscall()
2830 ctxt->mode == X86EMUL_MODE_PROT64 ? in em_syscall()
2832 ctxt->_eip = msr_data; in em_syscall()
2834 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data); in em_syscall()
2835 ctxt->eflags &= ~msr_data; in em_syscall()
2836 ctxt->eflags |= X86_EFLAGS_FIXED; in em_syscall()
2839 /* legacy mode */ in em_syscall()
2840 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2841 ctxt->_eip = (u32)msr_data; in em_syscall()
2843 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_syscall()
2846 ctxt->tf = (ctxt->eflags & X86_EFLAGS_TF) != 0; in em_syscall()
2852 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysenter()
2858 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter()
2859 /* inject #GP if in real mode */ in em_sysenter()
2860 if (ctxt->mode == X86EMUL_MODE_REAL) in em_sysenter()
2864 * Not recognized on AMD in compat mode (but is recognized in legacy in em_sysenter()
2865 * mode). in em_sysenter()
2867 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) in em_sysenter()
2871 /* sysenter/sysexit have not been tested in 64bit mode. */ in em_sysenter()
2872 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_sysenter()
2875 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysenter()
2880 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_sysenter()
2888 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysenter()
2889 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysenter()
2891 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data); in em_sysenter()
2892 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data; in em_sysenter()
2894 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data); in em_sysenter()
2898 ctxt->mode = X86EMUL_MODE_PROT64; in em_sysenter()
2905 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysexit()
2911 /* inject #GP if in real mode or Virtual 8086 mode */ in em_sysexit()
2912 if (ctxt->mode == X86EMUL_MODE_REAL || in em_sysexit()
2913 ctxt->mode == X86EMUL_MODE_VM86) in em_sysexit()
2918 if ((ctxt->rex_prefix & 0x8) != 0x0) in em_sysexit()
2928 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysexit()
2953 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysexit()
2954 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysexit()
2956 ctxt->_eip = rdx; in em_sysexit()
2965 if (ctxt->mode == X86EMUL_MODE_REAL) in emulator_bad_iopl()
2967 if (ctxt->mode == X86EMUL_MODE_VM86) in emulator_bad_iopl()
2969 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulator_bad_iopl()
2970 return ctxt->ops->cpl(ctxt) > iopl; in emulator_bad_iopl()
2979 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_io_port_access_allowed()
2984 unsigned mask = (1 << len) - 1; in emulator_io_port_access_allowed()
2995 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR); in emulator_io_port_access_allowed()
3004 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true); in emulator_io_port_access_allowed()
3009 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true); in emulator_io_port_access_allowed()
3020 if (ctxt->perm_ok) in emulator_io_permited()
3027 ctxt->perm_ok = true; in emulator_io_permited()
3036 * manner when ECX is zero due to REP-string optimizations. in string_registers_quirk()
3039 if (ctxt->ad_bytes != 4 || !vendor_intel(ctxt)) in string_registers_quirk()
3044 switch (ctxt->b) { in string_registers_quirk()
3047 *reg_rmw(ctxt, VCPU_REGS_RSI) &= (u32)-1; in string_registers_quirk()
3051 *reg_rmw(ctxt, VCPU_REGS_RDI) &= (u32)-1; in string_registers_quirk()
3059 tss->ip = ctxt->_eip; in save_state_to_tss16()
3060 tss->flag = ctxt->eflags; in save_state_to_tss16()
3061 tss->ax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss16()
3062 tss->cx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss16()
3063 tss->dx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss16()
3064 tss->bx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss16()
3065 tss->sp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss16()
3066 tss->bp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss16()
3067 tss->si = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss16()
3068 tss->di = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss16()
3070 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss16()
3071 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss16()
3072 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss16()
3073 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss16()
3074 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR); in save_state_to_tss16()
3083 ctxt->_eip = tss->ip; in load_state_from_tss16()
3084 ctxt->eflags = tss->flag | 2; in load_state_from_tss16()
3085 *reg_write(ctxt, VCPU_REGS_RAX) = tss->ax; in load_state_from_tss16()
3086 *reg_write(ctxt, VCPU_REGS_RCX) = tss->cx; in load_state_from_tss16()
3087 *reg_write(ctxt, VCPU_REGS_RDX) = tss->dx; in load_state_from_tss16()
3088 *reg_write(ctxt, VCPU_REGS_RBX) = tss->bx; in load_state_from_tss16()
3089 *reg_write(ctxt, VCPU_REGS_RSP) = tss->sp; in load_state_from_tss16()
3090 *reg_write(ctxt, VCPU_REGS_RBP) = tss->bp; in load_state_from_tss16()
3091 *reg_write(ctxt, VCPU_REGS_RSI) = tss->si; in load_state_from_tss16()
3092 *reg_write(ctxt, VCPU_REGS_RDI) = tss->di; in load_state_from_tss16()
3098 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR); in load_state_from_tss16()
3099 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss16()
3100 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss16()
3101 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss16()
3102 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss16()
3104 cpl = tss->cs & 3; in load_state_from_tss16()
3110 ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl, in load_state_from_tss16()
3114 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss16()
3118 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss16()
3122 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss16()
3126 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss16()
3173 tss->eip = ctxt->_eip; in save_state_to_tss32()
3174 tss->eflags = ctxt->eflags; in save_state_to_tss32()
3175 tss->eax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss32()
3176 tss->ecx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss32()
3177 tss->edx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss32()
3178 tss->ebx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss32()
3179 tss->esp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss32()
3180 tss->ebp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss32()
3181 tss->esi = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss32()
3182 tss->edi = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss32()
3184 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss32()
3185 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss32()
3186 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss32()
3187 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss32()
3188 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS); in save_state_to_tss32()
3189 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS); in save_state_to_tss32()
3198 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3)) in load_state_from_tss32()
3200 ctxt->_eip = tss->eip; in load_state_from_tss32()
3201 ctxt->eflags = tss->eflags | 2; in load_state_from_tss32()
3204 *reg_write(ctxt, VCPU_REGS_RAX) = tss->eax; in load_state_from_tss32()
3205 *reg_write(ctxt, VCPU_REGS_RCX) = tss->ecx; in load_state_from_tss32()
3206 *reg_write(ctxt, VCPU_REGS_RDX) = tss->edx; in load_state_from_tss32()
3207 *reg_write(ctxt, VCPU_REGS_RBX) = tss->ebx; in load_state_from_tss32()
3208 *reg_write(ctxt, VCPU_REGS_RSP) = tss->esp; in load_state_from_tss32()
3209 *reg_write(ctxt, VCPU_REGS_RBP) = tss->ebp; in load_state_from_tss32()
3210 *reg_write(ctxt, VCPU_REGS_RSI) = tss->esi; in load_state_from_tss32()
3211 *reg_write(ctxt, VCPU_REGS_RDI) = tss->edi; in load_state_from_tss32()
3218 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR); in load_state_from_tss32()
3219 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss32()
3220 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss32()
3221 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss32()
3222 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss32()
3223 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS); in load_state_from_tss32()
3224 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS); in load_state_from_tss32()
3227 * If we're switching between Protected Mode and VM86, we need to make in load_state_from_tss32()
3228 * sure to update the mode before loading the segment descriptors so in load_state_from_tss32()
3231 if (ctxt->eflags & X86_EFLAGS_VM) { in load_state_from_tss32()
3232 ctxt->mode = X86EMUL_MODE_VM86; in load_state_from_tss32()
3235 ctxt->mode = X86EMUL_MODE_PROT32; in load_state_from_tss32()
3236 cpl = tss->cs & 3; in load_state_from_tss32()
3243 ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR, in load_state_from_tss32()
3247 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss32()
3251 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss32()
3255 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss32()
3259 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss32()
3263 ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl, in load_state_from_tss32()
3267 ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl, in load_state_from_tss32()
3291 ldt_sel_offset - eip_offset); in task_switch_32()
3316 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_do_task_switch()
3321 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR); in emulator_do_task_switch()
3341 * 3. jmp/call to TSS/task-gate: No check is performed since the in emulator_do_task_switch()
3345 if (idt_index != -1) { in emulator_do_task_switch()
3356 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl) in emulator_do_task_switch()
3374 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT; in emulator_do_task_switch()
3391 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT; in emulator_do_task_switch()
3398 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS); in emulator_do_task_switch()
3399 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR); in emulator_do_task_switch()
3402 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2; in emulator_do_task_switch()
3403 ctxt->lock_prefix = 0; in emulator_do_task_switch()
3404 ctxt->src.val = (unsigned long) error_code; in emulator_do_task_switch()
3408 ops->get_dr(ctxt, 7, &dr7); in emulator_do_task_switch()
3409 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN)); in emulator_do_task_switch()
3421 ctxt->_eip = ctxt->eip; in emulator_task_switch()
3422 ctxt->dst.type = OP_NONE; in emulator_task_switch()
3428 ctxt->eip = ctxt->_eip; in emulator_task_switch()
3436 struct operand *op) in string_addr_inc() argument
3438 int df = (ctxt->eflags & X86_EFLAGS_DF) ? -op->count : op->count; in string_addr_inc()
3440 register_address_increment(ctxt, reg, df * op->bytes); in string_addr_inc()
3441 op->addr.mem.ea = register_address(ctxt, reg); in string_addr_inc()
3449 cf = ctxt->eflags & X86_EFLAGS_CF; in em_das()
3450 al = ctxt->dst.val; in em_das()
3455 af = ctxt->eflags & X86_EFLAGS_AF; in em_das()
3457 al -= 6; in em_das()
3464 al -= 0x60; in em_das()
3468 ctxt->dst.val = al; in em_das()
3470 ctxt->src.type = OP_IMM; in em_das()
3471 ctxt->src.val = 0; in em_das()
3472 ctxt->src.bytes = 1; in em_das()
3474 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF); in em_das()
3476 ctxt->eflags |= X86_EFLAGS_CF; in em_das()
3478 ctxt->eflags |= X86_EFLAGS_AF; in em_das()
3486 if (ctxt->src.val == 0) in em_aam()
3489 al = ctxt->dst.val & 0xff; in em_aam()
3490 ah = al / ctxt->src.val; in em_aam()
3491 al %= ctxt->src.val; in em_aam()
3493 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8); in em_aam()
3496 ctxt->src.type = OP_IMM; in em_aam()
3497 ctxt->src.val = 0; in em_aam()
3498 ctxt->src.bytes = 1; in em_aam()
3506 u8 al = ctxt->dst.val & 0xff; in em_aad()
3507 u8 ah = (ctxt->dst.val >> 8) & 0xff; in em_aad()
3509 al = (al + (ah * ctxt->src.val)) & 0xff; in em_aad()
3511 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al; in em_aad()
3514 ctxt->src.type = OP_IMM; in em_aad()
3515 ctxt->src.val = 0; in em_aad()
3516 ctxt->src.bytes = 1; in em_aad()
3525 long rel = ctxt->src.val; in em_call()
3527 ctxt->src.val = (unsigned long)ctxt->_eip; in em_call()
3540 const struct x86_emulate_ops *ops = ctxt->ops; in em_call_far()
3541 int cpl = ctxt->ops->cpl(ctxt); in em_call_far()
3542 enum x86emul_mode prev_mode = ctxt->mode; in em_call_far()
3544 old_eip = ctxt->_eip; in em_call_far()
3545 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS); in em_call_far()
3547 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_call_far()
3553 rc = assign_eip_far(ctxt, ctxt->src.val, &new_desc); in em_call_far()
3557 ctxt->src.val = old_cs; in em_call_far()
3562 ctxt->src.val = old_eip; in em_call_far()
3572 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS); in em_call_far()
3573 ctxt->mode = prev_mode; in em_call_far()
3583 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_near_imm()
3589 rsp_increment(ctxt, ctxt->src.val); in em_ret_near_imm()
3596 ctxt->src.val = ctxt->dst.val; in em_xchg()
3597 write_register_operand(&ctxt->src); in em_xchg()
3600 ctxt->dst.val = ctxt->src.orig_val; in em_xchg()
3601 ctxt->lock_prefix = 1; in em_xchg()
3607 ctxt->dst.val = ctxt->src2.val; in em_imul_3op()
3613 ctxt->dst.type = OP_REG; in em_cwd()
3614 ctxt->dst.bytes = ctxt->src.bytes; in em_cwd()
3615 ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in em_cwd()
3616 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1); in em_cwd()
3625 if (ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux)) in em_rdpid()
3627 ctxt->dst.val = tsc_aux; in em_rdpid()
3635 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc); in em_rdtsc()
3645 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc)) in em_rdpmc()
3654 memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr)); in em_mov()
3662 if (!ctxt->ops->guest_has_movbe(ctxt)) in em_movbe()
3665 switch (ctxt->op_bytes) { in em_movbe()
3672 * Both casting ->valptr and ->val to u16 breaks strict aliasing in em_movbe()
3675 tmp = (u16)ctxt->src.val; in em_movbe()
3676 ctxt->dst.val &= ~0xffffUL; in em_movbe()
3677 ctxt->dst.val |= (unsigned long)swab16(tmp); in em_movbe()
3680 ctxt->dst.val = swab32((u32)ctxt->src.val); in em_movbe()
3683 ctxt->dst.val = swab64(ctxt->src.val); in em_movbe()
3693 if (ctxt->ops->set_cr(ctxt, ctxt->modrm_reg, ctxt->src.val)) in em_cr_write()
3697 ctxt->dst.type = OP_NONE; in em_cr_write()
3705 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_dr_write()
3706 val = ctxt->src.val & ~0ULL; in em_dr_write()
3708 val = ctxt->src.val & ~0U; in em_dr_write()
3711 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0) in em_dr_write()
3715 ctxt->dst.type = OP_NONE; in em_dr_write()
3727 r = ctxt->ops->set_msr(ctxt, msr_index, msr_data); in em_wrmsr()
3744 r = ctxt->ops->get_msr(ctxt, msr_index, &msr_data); in em_rdmsr()
3760 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in em_store_sreg()
3761 ctxt->ops->cpl(ctxt) > 0) in em_store_sreg()
3764 ctxt->dst.val = get_segment_selector(ctxt, segment); in em_store_sreg()
3765 if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM) in em_store_sreg()
3766 ctxt->dst.bytes = 2; in em_store_sreg()
3772 if (ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_rm_sreg()
3775 return em_store_sreg(ctxt, ctxt->modrm_reg); in em_mov_rm_sreg()
3780 u16 sel = ctxt->src.val; in em_mov_sreg_rm()
3782 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_sreg_rm()
3785 if (ctxt->modrm_reg == VCPU_SREG_SS) in em_mov_sreg_rm()
3786 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_mov_sreg_rm()
3789 ctxt->dst.type = OP_NONE; in em_mov_sreg_rm()
3790 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg); in em_mov_sreg_rm()
3800 u16 sel = ctxt->src.val; in em_lldt()
3803 ctxt->dst.type = OP_NONE; in em_lldt()
3814 u16 sel = ctxt->src.val; in em_ltr()
3817 ctxt->dst.type = OP_NONE; in em_ltr()
3826 rc = linearize(ctxt, ctxt->src.addr.mem, 1, false, &linear); in em_invlpg()
3828 ctxt->ops->invlpg(ctxt, linear); in em_invlpg()
3830 ctxt->dst.type = OP_NONE; in em_invlpg()
3838 cr0 = ctxt->ops->get_cr(ctxt, 0); in em_clts()
3840 ctxt->ops->set_cr(ctxt, 0, cr0); in em_clts()
3846 int rc = ctxt->ops->fix_hypercall(ctxt); in em_hypercall()
3851 /* Let the processor re-execute the fixed hypercall */ in em_hypercall()
3852 ctxt->_eip = ctxt->eip; in em_hypercall()
3854 ctxt->dst.type = OP_NONE; in em_hypercall()
3864 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in emulate_store_desc_ptr()
3865 ctxt->ops->cpl(ctxt) > 0) in emulate_store_desc_ptr()
3868 if (ctxt->mode == X86EMUL_MODE_PROT64) in emulate_store_desc_ptr()
3869 ctxt->op_bytes = 8; in emulate_store_desc_ptr()
3871 if (ctxt->op_bytes == 2) { in emulate_store_desc_ptr()
3872 ctxt->op_bytes = 4; in emulate_store_desc_ptr()
3876 ctxt->dst.type = OP_NONE; in emulate_store_desc_ptr()
3877 return segmented_write_std(ctxt, ctxt->dst.addr.mem, in emulate_store_desc_ptr()
3878 &desc_ptr, 2 + ctxt->op_bytes); in emulate_store_desc_ptr()
3883 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt); in em_sgdt()
3888 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt); in em_sidt()
3896 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_lgdt_lidt()
3897 ctxt->op_bytes = 8; in em_lgdt_lidt()
3898 rc = read_descriptor(ctxt, ctxt->src.addr.mem, in em_lgdt_lidt()
3900 ctxt->op_bytes); in em_lgdt_lidt()
3903 if (ctxt->mode == X86EMUL_MODE_PROT64 && in em_lgdt_lidt()
3907 ctxt->ops->set_gdt(ctxt, &desc_ptr); in em_lgdt_lidt()
3909 ctxt->ops->set_idt(ctxt, &desc_ptr); in em_lgdt_lidt()
3911 ctxt->dst.type = OP_NONE; in em_lgdt_lidt()
3927 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in em_smsw()
3928 ctxt->ops->cpl(ctxt) > 0) in em_smsw()
3931 if (ctxt->dst.type == OP_MEM) in em_smsw()
3932 ctxt->dst.bytes = 2; in em_smsw()
3933 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0); in em_smsw()
3939 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul) in em_lmsw()
3940 | (ctxt->src.val & 0x0f)); in em_lmsw()
3941 ctxt->dst.type = OP_NONE; in em_lmsw()
3949 register_address_increment(ctxt, VCPU_REGS_RCX, -1); in em_loop()
3951 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags))) in em_loop()
3952 rc = jmp_rel(ctxt, ctxt->src.val); in em_loop()
3962 rc = jmp_rel(ctxt, ctxt->src.val); in em_jcxz()
3969 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val, in em_in()
3970 &ctxt->dst.val)) in em_in()
3978 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val, in em_out()
3979 &ctxt->src.val, 1); in em_out()
3981 ctxt->dst.type = OP_NONE; in em_out()
3990 ctxt->eflags &= ~X86_EFLAGS_IF; in em_cli()
3999 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI; in em_sti()
4000 ctxt->eflags |= X86_EFLAGS_IF; in em_sti()
4009 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr); in em_cpuid()
4011 ctxt->ops->cpl(ctxt)) { in em_cpuid()
4017 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false); in em_cpuid()
4033 ctxt->eflags &= ~0xffUL; in em_sahf()
4034 ctxt->eflags |= flags | X86_EFLAGS_FIXED; in em_sahf()
4041 *reg_rmw(ctxt, VCPU_REGS_RAX) |= (ctxt->eflags & 0xff) << 8; in em_lahf()
4047 switch (ctxt->op_bytes) { in em_bswap()
4050 asm("bswap %0" : "+r"(ctxt->dst.val)); in em_bswap()
4054 asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val)); in em_bswap()
4074 ctxt->dst.val = (s32) ctxt->src.val; in em_movsxd()
4080 if (!ctxt->ops->guest_has_fxsr(ctxt)) in check_fxsr()
4083 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in check_fxsr()
4090 if (ctxt->mode >= X86EMUL_MODE_PROT64) in check_fxsr()
4097 * Hardware doesn't save and restore XMM 0-7 without CR4.OSFXSR, but does save
4108 if (ctxt->mode == X86EMUL_MODE_PROT64) in fxstate_size()
4111 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR; in fxstate_size()
4116 * FXSAVE and FXRSTOR have 4 different formats depending on execution mode,
4117 * 1) 16 bit mode
4118 * 2) 32 bit mode
4119 * - like (1), but FIP and FDP (foo) are only 16 bit. At least Intel CPUs
4122 * 3) 64-bit mode with REX.W prefix
4123 * - like (2), but XMM 8-15 are being saved and restored
4124 * 4) 64-bit mode without REX.W prefix
4125 * - like (3), but FIP and FDP are 64 bit
4127 * Emulation uses (3) for (1) and (2) and preserves XMM 8-15 to reach the
4151 return segmented_write_std(ctxt, ctxt->memop.addr.mem, &fx_state, in em_fxsave()
4170 __fxstate_size(16) - used_size); in fxregs_fixup()
4186 rc = segmented_read_std(ctxt, ctxt->memop.addr.mem, &fx_state, size); in em_fxrstor()
4220 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax)) in em_xsetbv()
4240 if (!valid_cr(ctxt->modrm_reg)) in check_cr_access()
4250 ctxt->ops->get_dr(ctxt, 7, &dr7); in check_dr7_gd()
4258 int dr = ctxt->modrm_reg; in check_dr_read()
4264 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_dr_read()
4271 ctxt->ops->get_dr(ctxt, 6, &dr6); in check_dr_read()
4274 ctxt->ops->set_dr(ctxt, 6, dr6); in check_dr_read()
4283 u64 new_val = ctxt->src.val64; in check_dr_write()
4284 int dr = ctxt->modrm_reg; in check_dr_write()
4296 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_svme()
4317 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdtsc()
4319 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt)) in check_rdtsc()
4327 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdpmc()
4331 * VMware allows access to these Pseduo-PMCs even when read via RDPMC in check_rdpmc()
4337 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) || in check_rdpmc()
4338 ctxt->ops->check_pmc(ctxt, rcx)) in check_rdpmc()
4346 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u); in check_perm_in()
4347 if (!emulator_io_permited(ctxt, ctxt->src.val, ctxt->dst.bytes)) in check_perm_in()
4355 ctxt->src.bytes = min(ctxt->src.bytes, 4u); in check_perm_out()
4356 if (!emulator_io_permited(ctxt, ctxt->dst.val, ctxt->src.bytes)) in check_perm_out()
4575 /* 0xC0 - 0xC7 */
4577 /* 0xC8 - 0xCF */
4579 /* 0xD0 - 0xC7 */
4581 /* 0xD8 - 0xDF */
4583 /* 0xE0 - 0xE7 */
4585 /* 0xE8 - 0xEF */
4587 /* 0xF0 - 0xF7 */
4589 /* 0xF8 - 0xFF */
4596 /* 0xC0 - 0xC7 */
4598 /* 0xC8 - 0xCF */
4600 /* 0xD0 - 0xC7 */
4602 /* 0xD8 - 0xDF */
4604 /* 0xE0 - 0xE7 */
4606 /* 0xE8 - 0xEF */
4608 /* 0xF0 - 0xF7 */
4610 /* 0xF8 - 0xFF */
4617 /* 0xC0 - 0xC7 */
4619 /* 0xC8 - 0xCF */
4621 /* 0xD0 - 0xC7 */
4623 /* 0xD8 - 0xDF */
4625 /* 0xE0 - 0xE7 */
4627 /* 0xE8 - 0xEF */
4629 /* 0xF0 - 0xF7 */
4631 /* 0xF8 - 0xFF */
4644 /* 0x00 - 0x07 */
4648 /* 0x08 - 0x0F */
4652 /* 0x10 - 0x17 */
4656 /* 0x18 - 0x1F */
4660 /* 0x20 - 0x27 */
4662 /* 0x28 - 0x2F */
4664 /* 0x30 - 0x37 */
4666 /* 0x38 - 0x3F */
4668 /* 0x40 - 0x4F */
4670 /* 0x50 - 0x57 */
4672 /* 0x58 - 0x5F */
4674 /* 0x60 - 0x67 */
4679 /* 0x68 - 0x6F */
4686 /* 0x70 - 0x7F */
4688 /* 0x80 - 0x87 */
4695 /* 0x88 - 0x8F */
4702 /* 0x90 - 0x97 */
4704 /* 0x98 - 0x9F */
4710 /* 0xA0 - 0xA7 */
4715 /* 0xA8 - 0xAF */
4720 /* 0xB0 - 0xB7 */
4722 /* 0xB8 - 0xBF */
4724 /* 0xC0 - 0xC7 */
4731 /* 0xC8 - 0xCF */
4737 /* 0xD0 - 0xD7 */
4744 /* 0xD8 - 0xDF */
4746 /* 0xE0 - 0xE7 */
4751 /* 0xE8 - 0xEF */
4757 /* 0xF0 - 0xF7 */
4761 /* 0xF8 - 0xFF */
4768 /* 0x00 - 0x0F */
4774 /* 0x10 - 0x1F */
4784 /* 0x20 - 0x2F */
4796 /* 0x30 - 0x3F */
4805 /* 0x40 - 0x4F */
4807 /* 0x50 - 0x5F */
4809 /* 0x60 - 0x6F */
4814 /* 0x70 - 0x7F */
4819 /* 0x80 - 0x8F */
4821 /* 0x90 - 0x9F */
4823 /* 0xA0 - 0xA7 */
4829 /* 0xA8 - 0xAF */
4836 /* 0xB0 - 0xB7 */
4843 /* 0xB8 - 0xBF */
4850 /* 0xC0 - 0xC7 */
4854 /* 0xC8 - 0xCF */
4856 /* 0xD0 - 0xDF */
4858 /* 0xE0 - 0xEF */
4861 /* 0xF0 - 0xFF */
4886 /* 0x00 - 0x7f */
4888 /* 0x80 - 0xef */
4890 /* 0xf0 - 0xf1 */
4893 /* 0xf2 - 0xff */
4917 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in imm_size()
4923 static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_imm() argument
4928 op->type = OP_IMM; in decode_imm()
4929 op->bytes = size; in decode_imm()
4930 op->addr.mem.ea = ctxt->_eip; in decode_imm()
4931 /* NB. Immediates are sign-extended as necessary. */ in decode_imm()
4932 switch (op->bytes) { in decode_imm()
4934 op->val = insn_fetch(s8, ctxt); in decode_imm()
4937 op->val = insn_fetch(s16, ctxt); in decode_imm()
4940 op->val = insn_fetch(s32, ctxt); in decode_imm()
4943 op->val = insn_fetch(s64, ctxt); in decode_imm()
4947 switch (op->bytes) { in decode_imm()
4949 op->val &= 0xff; in decode_imm()
4952 op->val &= 0xffff; in decode_imm()
4955 op->val &= 0xffffffff; in decode_imm()
4963 static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_operand() argument
4970 decode_register_operand(ctxt, op); in decode_operand()
4973 rc = decode_imm(ctxt, op, 1, false); in decode_operand()
4976 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4978 *op = ctxt->memop; in decode_operand()
4979 ctxt->memopp = op; in decode_operand()
4980 if (ctxt->d & BitOp) in decode_operand()
4982 op->orig_val = op->val; in decode_operand()
4985 ctxt->memop.bytes = (ctxt->op_bytes == 8) ? 16 : 8; in decode_operand()
4988 op->type = OP_REG; in decode_operand()
4989 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4990 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
4991 fetch_register_operand(op); in decode_operand()
4992 op->orig_val = op->val; in decode_operand()
4995 op->type = OP_REG; in decode_operand()
4996 op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes; in decode_operand()
4997 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
4998 fetch_register_operand(op); in decode_operand()
4999 op->orig_val = op->val; in decode_operand()
5002 if (ctxt->d & ByteOp) { in decode_operand()
5003 op->type = OP_NONE; in decode_operand()
5006 op->type = OP_REG; in decode_operand()
5007 op->bytes = ctxt->op_bytes; in decode_operand()
5008 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
5009 fetch_register_operand(op); in decode_operand()
5010 op->orig_val = op->val; in decode_operand()
5013 op->type = OP_MEM; in decode_operand()
5014 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5015 op->addr.mem.ea = in decode_operand()
5017 op->addr.mem.seg = VCPU_SREG_ES; in decode_operand()
5018 op->val = 0; in decode_operand()
5019 op->count = 1; in decode_operand()
5022 op->type = OP_REG; in decode_operand()
5023 op->bytes = 2; in decode_operand()
5024 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
5025 fetch_register_operand(op); in decode_operand()
5028 op->type = OP_IMM; in decode_operand()
5029 op->bytes = 1; in decode_operand()
5030 op->val = reg_read(ctxt, VCPU_REGS_RCX) & 0xff; in decode_operand()
5033 rc = decode_imm(ctxt, op, 1, true); in decode_operand()
5036 op->type = OP_IMM; in decode_operand()
5037 op->bytes = 1; in decode_operand()
5038 op->val = 1; in decode_operand()
5041 rc = decode_imm(ctxt, op, imm_size(ctxt), true); in decode_operand()
5044 rc = decode_imm(ctxt, op, ctxt->op_bytes, true); in decode_operand()
5047 ctxt->memop.bytes = 1; in decode_operand()
5048 if (ctxt->memop.type == OP_REG) { in decode_operand()
5049 ctxt->memop.addr.reg = decode_register(ctxt, in decode_operand()
5050 ctxt->modrm_rm, true); in decode_operand()
5051 fetch_register_operand(&ctxt->memop); in decode_operand()
5055 ctxt->memop.bytes = 2; in decode_operand()
5058 ctxt->memop.bytes = 4; in decode_operand()
5061 rc = decode_imm(ctxt, op, 2, false); in decode_operand()
5064 rc = decode_imm(ctxt, op, imm_size(ctxt), false); in decode_operand()
5067 op->type = OP_MEM; in decode_operand()
5068 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5069 op->addr.mem.ea = in decode_operand()
5071 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
5072 op->val = 0; in decode_operand()
5073 op->count = 1; in decode_operand()
5076 op->type = OP_MEM; in decode_operand()
5077 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
5078 op->addr.mem.ea = in decode_operand()
5082 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
5083 op->val = 0; in decode_operand()
5086 op->type = OP_IMM; in decode_operand()
5087 op->addr.mem.ea = ctxt->_eip; in decode_operand()
5088 op->bytes = ctxt->op_bytes + 2; in decode_operand()
5089 insn_fetch_arr(op->valptr, op->bytes, ctxt); in decode_operand()
5092 ctxt->memop.bytes = ctxt->op_bytes + 2; in decode_operand()
5095 op->type = OP_IMM; in decode_operand()
5096 op->val = VCPU_SREG_ES; in decode_operand()
5099 op->type = OP_IMM; in decode_operand()
5100 op->val = VCPU_SREG_CS; in decode_operand()
5103 op->type = OP_IMM; in decode_operand()
5104 op->val = VCPU_SREG_SS; in decode_operand()
5107 op->type = OP_IMM; in decode_operand()
5108 op->val = VCPU_SREG_DS; in decode_operand()
5111 op->type = OP_IMM; in decode_operand()
5112 op->val = VCPU_SREG_FS; in decode_operand()
5115 op->type = OP_IMM; in decode_operand()
5116 op->val = VCPU_SREG_GS; in decode_operand()
5121 op->type = OP_NONE; /* Disable writeback. */ in decode_operand()
5132 int mode = ctxt->mode; in x86_decode_insn() local
5140 ctxt->memop.type = OP_NONE; in x86_decode_insn()
5141 ctxt->memopp = NULL; in x86_decode_insn()
5142 ctxt->_eip = ctxt->eip; in x86_decode_insn()
5143 ctxt->fetch.ptr = ctxt->fetch.data; in x86_decode_insn()
5144 ctxt->fetch.end = ctxt->fetch.data + insn_len; in x86_decode_insn()
5145 ctxt->opcode_len = 1; in x86_decode_insn()
5146 ctxt->intercept = x86_intercept_none; in x86_decode_insn()
5148 memcpy(ctxt->fetch.data, insn, insn_len); in x86_decode_insn()
5155 switch (mode) { in x86_decode_insn()
5159 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS); in x86_decode_insn()
5179 ctxt->op_bytes = def_op_bytes; in x86_decode_insn()
5180 ctxt->ad_bytes = def_ad_bytes; in x86_decode_insn()
5184 switch (ctxt->b = insn_fetch(u8, ctxt)) { in x86_decode_insn()
5185 case 0x66: /* operand-size override */ in x86_decode_insn()
5188 ctxt->op_bytes = def_op_bytes ^ 6; in x86_decode_insn()
5190 case 0x67: /* address-size override */ in x86_decode_insn()
5191 if (mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
5193 ctxt->ad_bytes = def_ad_bytes ^ 12; in x86_decode_insn()
5196 ctxt->ad_bytes = def_ad_bytes ^ 6; in x86_decode_insn()
5200 ctxt->seg_override = VCPU_SREG_ES; in x86_decode_insn()
5204 ctxt->seg_override = VCPU_SREG_CS; in x86_decode_insn()
5208 ctxt->seg_override = VCPU_SREG_SS; in x86_decode_insn()
5212 ctxt->seg_override = VCPU_SREG_DS; in x86_decode_insn()
5216 ctxt->seg_override = VCPU_SREG_FS; in x86_decode_insn()
5220 ctxt->seg_override = VCPU_SREG_GS; in x86_decode_insn()
5223 if (mode != X86EMUL_MODE_PROT64) in x86_decode_insn()
5225 ctxt->rex_prefix = ctxt->b; in x86_decode_insn()
5228 ctxt->lock_prefix = 1; in x86_decode_insn()
5232 ctxt->rep_prefix = ctxt->b; in x86_decode_insn()
5240 ctxt->rex_prefix = 0; in x86_decode_insn()
5246 if (ctxt->rex_prefix & 8) in x86_decode_insn()
5247 ctxt->op_bytes = 8; /* REX.W */ in x86_decode_insn()
5250 opcode = opcode_table[ctxt->b]; in x86_decode_insn()
5251 /* Two-byte opcode? */ in x86_decode_insn()
5252 if (ctxt->b == 0x0f) { in x86_decode_insn()
5253 ctxt->opcode_len = 2; in x86_decode_insn()
5254 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
5255 opcode = twobyte_table[ctxt->b]; in x86_decode_insn()
5258 if (ctxt->b == 0x38) { in x86_decode_insn()
5259 ctxt->opcode_len = 3; in x86_decode_insn()
5260 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
5261 opcode = opcode_map_0f_38[ctxt->b]; in x86_decode_insn()
5264 ctxt->d = opcode.flags; in x86_decode_insn()
5266 if (ctxt->d & ModRM) in x86_decode_insn()
5267 ctxt->modrm = insn_fetch(u8, ctxt); in x86_decode_insn()
5269 /* vex-prefix instructions are not implemented */ in x86_decode_insn()
5270 if (ctxt->opcode_len == 1 && (ctxt->b == 0xc5 || ctxt->b == 0xc4) && in x86_decode_insn()
5271 (mode == X86EMUL_MODE_PROT64 || (ctxt->modrm & 0xc0) == 0xc0)) { in x86_decode_insn()
5272 ctxt->d = NotImpl; in x86_decode_insn()
5275 while (ctxt->d & GroupMask) { in x86_decode_insn()
5276 switch (ctxt->d & GroupMask) { in x86_decode_insn()
5278 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
5282 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
5283 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
5284 opcode = opcode.u.gdual->mod3[goffset]; in x86_decode_insn()
5286 opcode = opcode.u.gdual->mod012[goffset]; in x86_decode_insn()
5289 goffset = ctxt->modrm & 7; in x86_decode_insn()
5293 if (ctxt->rep_prefix && op_prefix) in x86_decode_insn()
5295 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix; in x86_decode_insn()
5297 case 0x00: opcode = opcode.u.gprefix->pfx_no; break; in x86_decode_insn()
5298 case 0x66: opcode = opcode.u.gprefix->pfx_66; break; in x86_decode_insn()
5299 case 0xf2: opcode = opcode.u.gprefix->pfx_f2; break; in x86_decode_insn()
5300 case 0xf3: opcode = opcode.u.gprefix->pfx_f3; break; in x86_decode_insn()
5304 if (ctxt->modrm > 0xbf) { in x86_decode_insn()
5305 size_t size = ARRAY_SIZE(opcode.u.esc->high); in x86_decode_insn()
5307 ctxt->modrm - 0xc0, size); in x86_decode_insn()
5309 opcode = opcode.u.esc->high[index]; in x86_decode_insn()
5311 opcode = opcode.u.esc->op[(ctxt->modrm >> 3) & 7]; in x86_decode_insn()
5315 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
5316 opcode = opcode.u.idual->mod3; in x86_decode_insn()
5318 opcode = opcode.u.idual->mod012; in x86_decode_insn()
5321 if (ctxt->mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
5322 opcode = opcode.u.mdual->mode64; in x86_decode_insn()
5324 opcode = opcode.u.mdual->mode32; in x86_decode_insn()
5330 ctxt->d &= ~(u64)GroupMask; in x86_decode_insn()
5331 ctxt->d |= opcode.flags; in x86_decode_insn()
5335 if (ctxt->d == 0) in x86_decode_insn()
5338 ctxt->execute = opcode.u.execute; in x86_decode_insn()
5340 if (unlikely(ctxt->ud) && likely(!(ctxt->d & EmulateOnUD))) in x86_decode_insn()
5343 if (unlikely(ctxt->d & in x86_decode_insn()
5350 ctxt->check_perm = opcode.check_perm; in x86_decode_insn()
5351 ctxt->intercept = opcode.intercept; in x86_decode_insn()
5353 if (ctxt->d & NotImpl) in x86_decode_insn()
5356 if (mode == X86EMUL_MODE_PROT64) { in x86_decode_insn()
5357 if (ctxt->op_bytes == 4 && (ctxt->d & Stack)) in x86_decode_insn()
5358 ctxt->op_bytes = 8; in x86_decode_insn()
5359 else if (ctxt->d & NearBranch) in x86_decode_insn()
5360 ctxt->op_bytes = 8; in x86_decode_insn()
5363 if (ctxt->d & Op3264) { in x86_decode_insn()
5364 if (mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
5365 ctxt->op_bytes = 8; in x86_decode_insn()
5367 ctxt->op_bytes = 4; in x86_decode_insn()
5370 if ((ctxt->d & No16) && ctxt->op_bytes == 2) in x86_decode_insn()
5371 ctxt->op_bytes = 4; in x86_decode_insn()
5373 if (ctxt->d & Sse) in x86_decode_insn()
5374 ctxt->op_bytes = 16; in x86_decode_insn()
5375 else if (ctxt->d & Mmx) in x86_decode_insn()
5376 ctxt->op_bytes = 8; in x86_decode_insn()
5380 if (ctxt->d & ModRM) { in x86_decode_insn()
5381 rc = decode_modrm(ctxt, &ctxt->memop); in x86_decode_insn()
5384 ctxt->seg_override = ctxt->modrm_seg; in x86_decode_insn()
5386 } else if (ctxt->d & MemAbs) in x86_decode_insn()
5387 rc = decode_abs(ctxt, &ctxt->memop); in x86_decode_insn()
5392 ctxt->seg_override = VCPU_SREG_DS; in x86_decode_insn()
5394 ctxt->memop.addr.mem.seg = ctxt->seg_override; in x86_decode_insn()
5400 rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask); in x86_decode_insn()
5408 rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask); in x86_decode_insn()
5413 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask); in x86_decode_insn()
5415 if (ctxt->rip_relative && likely(ctxt->memopp)) in x86_decode_insn()
5416 ctxt->memopp->addr.mem.ea = address_mask(ctxt, in x86_decode_insn()
5417 ctxt->memopp->addr.mem.ea + ctxt->_eip); in x86_decode_insn()
5421 ctxt->have_exception = true; in x86_decode_insn()
5427 return ctxt->d & PageTable; in x86_page_table_writing_insn()
5436 * - if REPE/REPZ and ZF = 0 then done in string_insn_completed()
5437 * - if REPNE/REPNZ and ZF = 1 then done in string_insn_completed()
5439 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) || in string_insn_completed()
5440 (ctxt->b == 0xae) || (ctxt->b == 0xaf)) in string_insn_completed()
5441 && (((ctxt->rep_prefix == REPE_PREFIX) && in string_insn_completed()
5442 ((ctxt->eflags & X86_EFLAGS_ZF) == 0)) in string_insn_completed()
5443 || ((ctxt->rep_prefix == REPNE_PREFIX) && in string_insn_completed()
5444 ((ctxt->eflags & X86_EFLAGS_ZF) == X86_EFLAGS_ZF)))) in string_insn_completed()
5464 static void fetch_possible_mmx_operand(struct operand *op) in fetch_possible_mmx_operand() argument
5466 if (op->type == OP_MM) in fetch_possible_mmx_operand()
5467 read_mmx_reg(&op->mm_val, op->addr.mm); in fetch_possible_mmx_operand()
5472 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF; in fastop()
5474 if (!(ctxt->d & ByteOp)) in fastop()
5475 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE; in fastop()
5478 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags), in fastop()
5480 : "c"(ctxt->src2.val)); in fastop()
5482 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK); in fastop()
5490 memset(&ctxt->rip_relative, 0, in init_decode_cache()
5491 (void *)&ctxt->modrm - (void *)&ctxt->rip_relative); in init_decode_cache()
5493 ctxt->io_read.pos = 0; in init_decode_cache()
5494 ctxt->io_read.end = 0; in init_decode_cache()
5495 ctxt->mem_read.end = 0; in init_decode_cache()
5500 const struct x86_emulate_ops *ops = ctxt->ops; in x86_emulate_insn()
5502 int saved_dst_type = ctxt->dst.type; in x86_emulate_insn()
5505 ctxt->mem_read.pos = 0; in x86_emulate_insn()
5508 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) { in x86_emulate_insn()
5513 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) { in x86_emulate_insn()
5518 emul_flags = ctxt->ops->get_hflags(ctxt); in x86_emulate_insn()
5519 if (unlikely(ctxt->d & in x86_emulate_insn()
5521 if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) || in x86_emulate_insn()
5522 (ctxt->d & Undefined)) { in x86_emulate_insn()
5527 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM))) in x86_emulate_insn()
5528 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) { in x86_emulate_insn()
5533 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) { in x86_emulate_insn()
5538 if (ctxt->d & Mmx) { in x86_emulate_insn()
5546 fetch_possible_mmx_operand(&ctxt->src); in x86_emulate_insn()
5547 fetch_possible_mmx_operand(&ctxt->src2); in x86_emulate_insn()
5548 if (!(ctxt->d & Mov)) in x86_emulate_insn()
5549 fetch_possible_mmx_operand(&ctxt->dst); in x86_emulate_insn()
5552 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && ctxt->intercept) { in x86_emulate_insn()
5553 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5559 /* Instruction can only be executed in protected mode */ in x86_emulate_insn()
5560 if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) { in x86_emulate_insn()
5566 if ((ctxt->d & Priv) && ops->cpl(ctxt)) { in x86_emulate_insn()
5567 if (ctxt->d & PrivUD) in x86_emulate_insn()
5575 if (ctxt->d & CheckPerm) { in x86_emulate_insn()
5576 rc = ctxt->check_perm(ctxt); in x86_emulate_insn()
5581 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5582 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5588 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5592 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5593 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5599 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) { in x86_emulate_insn()
5600 rc = segmented_read(ctxt, ctxt->src.addr.mem, in x86_emulate_insn()
5601 ctxt->src.valptr, ctxt->src.bytes); in x86_emulate_insn()
5604 ctxt->src.orig_val64 = ctxt->src.val64; in x86_emulate_insn()
5607 if (ctxt->src2.type == OP_MEM) { in x86_emulate_insn()
5608 rc = segmented_read(ctxt, ctxt->src2.addr.mem, in x86_emulate_insn()
5609 &ctxt->src2.val, ctxt->src2.bytes); in x86_emulate_insn()
5614 if ((ctxt->d & DstMask) == ImplicitOps) in x86_emulate_insn()
5618 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) { in x86_emulate_insn()
5619 /* optimisation - avoid slow emulated read if Mov */ in x86_emulate_insn()
5620 rc = segmented_read(ctxt, ctxt->dst.addr.mem, in x86_emulate_insn()
5621 &ctxt->dst.val, ctxt->dst.bytes); in x86_emulate_insn()
5623 if (!(ctxt->d & NoWrite) && in x86_emulate_insn()
5625 ctxt->exception.vector == PF_VECTOR) in x86_emulate_insn()
5626 ctxt->exception.error_code |= PFERR_WRITE_MASK; in x86_emulate_insn()
5630 /* Copy full 64-bit value for CMPXCHG8B. */ in x86_emulate_insn()
5631 ctxt->dst.orig_val64 = ctxt->dst.val64; in x86_emulate_insn()
5635 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5636 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5642 if (ctxt->rep_prefix && (ctxt->d & String)) in x86_emulate_insn()
5643 ctxt->eflags |= X86_EFLAGS_RF; in x86_emulate_insn()
5645 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5647 if (ctxt->execute) { in x86_emulate_insn()
5648 if (ctxt->d & Fastop) in x86_emulate_insn()
5649 rc = fastop(ctxt, ctxt->fop); in x86_emulate_insn()
5651 rc = ctxt->execute(ctxt); in x86_emulate_insn()
5657 if (ctxt->opcode_len == 2) in x86_emulate_insn()
5659 else if (ctxt->opcode_len == 3) in x86_emulate_insn()
5662 switch (ctxt->b) { in x86_emulate_insn()
5664 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5665 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5668 ctxt->dst.val = ctxt->src.addr.mem.ea; in x86_emulate_insn()
5671 if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX)) in x86_emulate_insn()
5672 ctxt->dst.type = OP_NONE; in x86_emulate_insn()
5677 switch (ctxt->op_bytes) { in x86_emulate_insn()
5678 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break; in x86_emulate_insn()
5679 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break; in x86_emulate_insn()
5680 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break; in x86_emulate_insn()
5687 rc = emulate_int(ctxt, ctxt->src.val); in x86_emulate_insn()
5690 if (ctxt->eflags & X86_EFLAGS_OF) in x86_emulate_insn()
5695 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5696 ctxt->dst.type = OP_NONE; /* Disable writeback. */ in x86_emulate_insn()
5699 ctxt->ops->halt(ctxt); in x86_emulate_insn()
5703 ctxt->eflags ^= X86_EFLAGS_CF; in x86_emulate_insn()
5706 ctxt->eflags &= ~X86_EFLAGS_CF; in x86_emulate_insn()
5709 ctxt->eflags |= X86_EFLAGS_CF; in x86_emulate_insn()
5712 ctxt->eflags &= ~X86_EFLAGS_DF; in x86_emulate_insn()
5715 ctxt->eflags |= X86_EFLAGS_DF; in x86_emulate_insn()
5725 if (ctxt->d & SrcWrite) { in x86_emulate_insn()
5726 BUG_ON(ctxt->src.type == OP_MEM || ctxt->src.type == OP_MEM_STR); in x86_emulate_insn()
5727 rc = writeback(ctxt, &ctxt->src); in x86_emulate_insn()
5731 if (!(ctxt->d & NoWrite)) { in x86_emulate_insn()
5732 rc = writeback(ctxt, &ctxt->dst); in x86_emulate_insn()
5741 ctxt->dst.type = saved_dst_type; in x86_emulate_insn()
5743 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5744 string_addr_inc(ctxt, VCPU_REGS_RSI, &ctxt->src); in x86_emulate_insn()
5746 if ((ctxt->d & DstMask) == DstDI) in x86_emulate_insn()
5747 string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst); in x86_emulate_insn()
5749 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5751 struct read_cache *r = &ctxt->io_read; in x86_emulate_insn()
5752 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5753 count = ctxt->src.count; in x86_emulate_insn()
5755 count = ctxt->dst.count; in x86_emulate_insn()
5756 register_address_increment(ctxt, VCPU_REGS_RCX, -count); in x86_emulate_insn()
5760 * Re-enter guest when pio read ahead buffer is empty in x86_emulate_insn()
5763 if ((r->end != 0 || reg_read(ctxt, VCPU_REGS_RCX) & 0x3ff) && in x86_emulate_insn()
5764 (r->end == 0 || r->end != r->pos)) { in x86_emulate_insn()
5770 ctxt->mem_read.end = 0; in x86_emulate_insn()
5776 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5779 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5780 if (ctxt->mode != X86EMUL_MODE_PROT64) in x86_emulate_insn()
5781 ctxt->eip = (u32)ctxt->_eip; in x86_emulate_insn()
5785 WARN_ON(ctxt->exception.vector > 0x1f); in x86_emulate_insn()
5786 ctxt->have_exception = true; in x86_emulate_insn()
5797 switch (ctxt->b) { in x86_emulate_insn()
5799 (ctxt->ops->wbinvd)(ctxt); in x86_emulate_insn()
5807 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg); in x86_emulate_insn()
5810 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val); in x86_emulate_insn()
5813 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5814 ctxt->dst.val = ctxt->src.val; in x86_emulate_insn()
5815 else if (ctxt->op_bytes != 4) in x86_emulate_insn()
5816 ctxt->dst.type = OP_NONE; /* no writeback */ in x86_emulate_insn()
5819 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5820 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5823 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags); in x86_emulate_insn()
5826 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5827 ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val in x86_emulate_insn()
5828 : (u16) ctxt->src.val; in x86_emulate_insn()
5831 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5832 ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val : in x86_emulate_insn()
5833 (s16) ctxt->src.val; in x86_emulate_insn()
5862 if (ctxt->rep_prefix && (ctxt->d & String)) in emulator_can_use_gpa()
5865 if (ctxt->d & TwoMemOp) in emulator_can_use_gpa()