Lines Matching +full:op +full:- +full:mode
1 // SPDX-License-Identifier: GPL-2.0-only
5 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
9 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
18 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
27 #include <asm/nospec-branch.h>
29 #include <asm/text-patching.h>
45 #define OpMem64 6ull /* Memory, 64-bit */
46 #define OpImmUByte 7ull /* Zero-extended 8-bit immediate */
49 #define OpImmByte 10ull /* 8-bit sign extended immediate */
51 #define OpImm 12ull /* Sign extended up to 32-bit immediate */
52 #define OpMem16 13ull /* Memory operand (16-bit). */
53 #define OpMem32 14ull /* Memory operand (32-bit). */
65 #define OpMem8 26ull /* 8-bit zero extended memory operand */
66 #define OpImm64 27ull /* Sign extended 16/32/64-bit immediate */
67 #define OpXLat 28ull /* memory at BX/EBX/RBX + zero-extended AL */
69 #define OpAccHi 30ull /* High part of extended acc (-/DX/EDX/RDX) */
72 #define OpMask ((1ull << OpBits) - 1)
75 * Opcode effective-address decode tables.
83 /* Operand sizes: 8-bit operands or specified/overridden size. */
84 #define ByteOp (1<<0) /* 8-bit operands. */
139 #define Prot (1<<21) /* instruction generates #UD if not in prot-mode */
142 #define Op3264 (1<<24) /* Operand is 64b in long mode, 32b otherwise */
223 struct opcode op[8]; member
248 unsigned long dirty = ctxt->regs_dirty; in writeback_registers()
252 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]); in writeback_registers()
257 ctxt->regs_dirty = 0; in invalidate_registers()
258 ctxt->regs_valid = 0; in invalidate_registers()
274 #define EM_ASM_START(op) \ argument
275 static int em_##op(struct x86_emulate_ctxt *ctxt) \
277 unsigned long flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF; \
279 if (!(ctxt->d & ByteOp)) \
280 bytes = ctxt->dst.bytes; \
288 : "+a" (ctxt->dst.val), \
289 "+d" (ctxt->src.val), \
292 : "c" (ctxt->src2.val))
294 #define __EM_ASM_1(op, dst) \ argument
295 __EM_ASM(#op " %%" #dst " \n\t")
297 #define __EM_ASM_1_EX(op, dst) \ argument
298 __EM_ASM(#op " %%" #dst " \n\t" \
301 #define __EM_ASM_2(op, dst, src) \ argument
302 __EM_ASM(#op " %%" #src ", %%" #dst " \n\t")
304 #define __EM_ASM_3(op, dst, src, src2) \ argument
305 __EM_ASM(#op " %%" #src2 ", %%" #src ", %%" #dst " \n\t")
309 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK); \
313 /* 1-operand, using "a" (dst) */
314 #define EM_ASM_1(op) \ argument
315 EM_ASM_START(op) \
316 case 1: __EM_ASM_1(op##b, al); break; \
317 case 2: __EM_ASM_1(op##w, ax); break; \
318 case 4: __EM_ASM_1(op##l, eax); break; \
319 ON64(case 8: __EM_ASM_1(op##q, rax); break;) \
322 /* 1-operand, using "c" (src2) */
323 #define EM_ASM_1SRC2(op, name) \ argument
325 case 1: __EM_ASM_1(op##b, cl); break; \
326 case 2: __EM_ASM_1(op##w, cx); break; \
327 case 4: __EM_ASM_1(op##l, ecx); break; \
328 ON64(case 8: __EM_ASM_1(op##q, rcx); break;) \
331 /* 1-operand, using "c" (src2) with exception */
332 #define EM_ASM_1SRC2EX(op, name) \ argument
334 case 1: __EM_ASM_1_EX(op##b, cl); break; \
335 case 2: __EM_ASM_1_EX(op##w, cx); break; \
336 case 4: __EM_ASM_1_EX(op##l, ecx); break; \
337 ON64(case 8: __EM_ASM_1_EX(op##q, rcx); break;) \
340 /* 2-operand, using "a" (dst), "d" (src) */
341 #define EM_ASM_2(op) \ argument
342 EM_ASM_START(op) \
343 case 1: __EM_ASM_2(op##b, al, dl); break; \
344 case 2: __EM_ASM_2(op##w, ax, dx); break; \
345 case 4: __EM_ASM_2(op##l, eax, edx); break; \
346 ON64(case 8: __EM_ASM_2(op##q, rax, rdx); break;) \
349 /* 2-operand, reversed */
350 #define EM_ASM_2R(op, name) \ argument
352 case 1: __EM_ASM_2(op##b, dl, al); break; \
353 case 2: __EM_ASM_2(op##w, dx, ax); break; \
354 case 4: __EM_ASM_2(op##l, edx, eax); break; \
355 ON64(case 8: __EM_ASM_2(op##q, rdx, rax); break;) \
358 /* 2-operand, word only (no byte op) */
359 #define EM_ASM_2W(op) \ argument
360 EM_ASM_START(op) \
362 case 2: __EM_ASM_2(op##w, ax, dx); break; \
363 case 4: __EM_ASM_2(op##l, eax, edx); break; \
364 ON64(case 8: __EM_ASM_2(op##q, rax, rdx); break;) \
367 /* 2-operand, using "a" (dst) and CL (src2) */
368 #define EM_ASM_2CL(op) \ argument
369 EM_ASM_START(op) \
370 case 1: __EM_ASM_2(op##b, al, cl); break; \
371 case 2: __EM_ASM_2(op##w, ax, cl); break; \
372 case 4: __EM_ASM_2(op##l, eax, cl); break; \
373 ON64(case 8: __EM_ASM_2(op##q, rax, cl); break;) \
376 /* 3-operand, using "a" (dst), "d" (src) and CL (src2) */
377 #define EM_ASM_3WCL(op) \ argument
378 EM_ASM_START(op) \
380 case 2: __EM_ASM_3(op##w, ax, dx, cl); break; \
381 case 4: __EM_ASM_3(op##l, eax, edx, cl); break; \
382 ON64(case 8: __EM_ASM_3(op##q, rax, rdx, cl); break;) \
390 ctxt->dst.val = 0xFF * !!(ctxt->eflags & X86_EFLAGS_CF); in em_salc()
416 .rep_prefix = ctxt->rep_prefix, in emulator_check_intercept()
417 .modrm_mod = ctxt->modrm_mod, in emulator_check_intercept()
418 .modrm_reg = ctxt->modrm_reg, in emulator_check_intercept()
419 .modrm_rm = ctxt->modrm_rm, in emulator_check_intercept()
420 .src_val = ctxt->src.val64, in emulator_check_intercept()
421 .dst_val = ctxt->dst.val64, in emulator_check_intercept()
422 .src_bytes = ctxt->src.bytes, in emulator_check_intercept()
423 .dst_bytes = ctxt->dst.bytes, in emulator_check_intercept()
424 .src_type = ctxt->src.type, in emulator_check_intercept()
425 .dst_type = ctxt->dst.type, in emulator_check_intercept()
426 .ad_bytes = ctxt->ad_bytes, in emulator_check_intercept()
427 .rip = ctxt->eip, in emulator_check_intercept()
428 .next_rip = ctxt->_eip, in emulator_check_intercept()
431 return ctxt->ops->intercept(ctxt, &info, stage); in emulator_check_intercept()
441 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */ in assign_register()
451 break; /* 64b: zero-extend */ in assign_register()
460 return (1UL << (ctxt->ad_bytes << 3)) - 1; in ad_mask()
468 if (ctxt->mode == X86EMUL_MODE_PROT64) in stack_mask()
470 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS); in stack_mask()
479 /* Access/update address held in a register, based on addressing mode. */
483 if (ctxt->ad_bytes == sizeof(unsigned long)) in address_mask()
505 assign_register(preg, *preg + inc, ctxt->ad_bytes); in register_address_increment()
517 return desc->g ? (limit << 12) | 0xfff : limit; in desc_limit_scaled()
522 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS) in seg_base()
525 return ctxt->ops->get_cached_segment_base(ctxt, seg); in seg_base()
534 ctxt->exception.vector = vec; in emulate_exception()
535 ctxt->exception.error_code = error; in emulate_exception()
536 ctxt->exception.error_code_valid = valid; in emulate_exception()
580 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg); in get_segment_selector()
591 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg); in set_segment_selector()
592 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg); in set_segment_selector()
597 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48; in ctxt_virt_addr_bits()
604 return !ctxt->ops->is_canonical_addr(ctxt, la, flags); in emul_is_noncanonical_address()
618 u64 alignment = ctxt->d & AlignMask; in insn_alignment()
638 enum x86emul_mode mode, ulong *linear, in __linearize() argument
650 switch (mode) { in __linearize()
652 *linear = la = ctxt->ops->get_untagged_addr(ctxt, la, flags); in __linearize()
657 *max_size = min_t(u64, ~0u, (1ull << va_bits) - la); in __linearize()
663 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL, in __linearize()
667 /* code segment in protected mode or read-only data segment */ in __linearize()
668 if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8)) || !(desc.type & 2)) && in __linearize()
676 /* expand-down segment */ in __linearize()
686 *max_size = (u64)lim + 1 - addr.ea; in __linearize()
692 if (la & (insn_alignment(ctxt, size) - 1)) in __linearize()
708 return __linearize(ctxt, addr, &max_size, size, ctxt->mode, linear, in linearize()
720 if (ctxt->op_bytes != sizeof(unsigned long)) in assign_eip()
721 addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1); in assign_eip()
722 rc = __linearize(ctxt, addr, &max_size, 1, ctxt->mode, &linear, in assign_eip()
725 ctxt->_eip = addr.ea; in assign_eip()
736 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in emulator_recalc_and_set_mode()
738 if (!(ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PE)) { in emulator_recalc_and_set_mode()
739 /* Real mode. cpu must not have long mode active */ in emulator_recalc_and_set_mode()
742 ctxt->mode = X86EMUL_MODE_REAL; in emulator_recalc_and_set_mode()
746 if (ctxt->eflags & X86_EFLAGS_VM) { in emulator_recalc_and_set_mode()
747 /* Protected/VM86 mode. cpu must not have long mode active */ in emulator_recalc_and_set_mode()
750 ctxt->mode = X86EMUL_MODE_VM86; in emulator_recalc_and_set_mode()
754 if (!ctxt->ops->get_segment(ctxt, &selector, &cs, &base3, VCPU_SREG_CS)) in emulator_recalc_and_set_mode()
759 /* Proper long mode */ in emulator_recalc_and_set_mode()
760 ctxt->mode = X86EMUL_MODE_PROT64; in emulator_recalc_and_set_mode()
762 /* 32 bit compatibility mode*/ in emulator_recalc_and_set_mode()
763 ctxt->mode = X86EMUL_MODE_PROT32; in emulator_recalc_and_set_mode()
765 ctxt->mode = X86EMUL_MODE_PROT16; in emulator_recalc_and_set_mode()
768 /* Legacy 32 bit / 16 bit mode */ in emulator_recalc_and_set_mode()
769 ctxt->mode = cs.d ? X86EMUL_MODE_PROT32 : X86EMUL_MODE_PROT16; in emulator_recalc_and_set_mode()
792 return assign_eip_near(ctxt, ctxt->_eip + rel); in jmp_rel()
798 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true); in linear_read_system()
805 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true); in linear_write_system()
819 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false); in segmented_read_std()
833 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false); in segmented_write_std()
845 int cur_size = ctxt->fetch.end - ctxt->fetch.data; in __do_insn_fetch_bytes()
847 .ea = ctxt->eip + cur_size }; in __do_insn_fetch_bytes()
859 rc = __linearize(ctxt, addr, &max_size, 0, ctxt->mode, &linear, in __do_insn_fetch_bytes()
865 size = min_t(unsigned, size, PAGE_SIZE - offset_in_page(linear)); in __do_insn_fetch_bytes()
871 * still, we must have hit the 15-byte boundary. in __do_insn_fetch_bytes()
876 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end, in __do_insn_fetch_bytes()
877 size, &ctxt->exception); in __do_insn_fetch_bytes()
880 ctxt->fetch.end += size; in __do_insn_fetch_bytes()
887 unsigned done_size = ctxt->fetch.end - ctxt->fetch.ptr; in do_insn_fetch_bytes()
890 return __do_insn_fetch_bytes(ctxt, size - done_size); in do_insn_fetch_bytes()
902 ctxt->_eip += sizeof(_type); \
903 memcpy(&_x, ctxt->fetch.ptr, sizeof(_type)); \
904 ctxt->fetch.ptr += sizeof(_type); \
913 ctxt->_eip += (_size); \
914 memcpy(_arr, ctxt->fetch.ptr, _size); \
915 ctxt->fetch.ptr += (_size); \
927 int highbyte_regs = (ctxt->rex_prefix == 0) && byteop; in decode_register()
999 if (ctxt->src.val == 0) in em_bsf_c()
1000 ctxt->dst.type = OP_NONE; in em_bsf_c()
1007 if (ctxt->src.val == 0) in em_bsr_c()
1008 ctxt->dst.type = OP_NONE; in em_bsr_c()
1017 static void fetch_register_operand(struct operand *op) in fetch_register_operand() argument
1019 switch (op->bytes) { in fetch_register_operand()
1021 op->val = *(u8 *)op->addr.reg; in fetch_register_operand()
1024 op->val = *(u16 *)op->addr.reg; in fetch_register_operand()
1027 op->val = *(u32 *)op->addr.reg; in fetch_register_operand()
1030 op->val = *(u64 *)op->addr.reg; in fetch_register_operand()
1037 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fninit()
1050 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstcw()
1057 ctxt->dst.val = fcw; in em_fnstcw()
1066 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in em_fnstsw()
1073 ctxt->dst.val = fsw; in em_fnstsw()
1079 struct operand *op) in decode_register_operand() argument
1083 if (ctxt->d & ModRM) in decode_register_operand()
1084 reg = ctxt->modrm_reg; in decode_register_operand()
1086 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3); in decode_register_operand()
1088 if (ctxt->d & Sse) { in decode_register_operand()
1089 op->type = OP_XMM; in decode_register_operand()
1090 op->bytes = 16; in decode_register_operand()
1091 op->addr.xmm = reg; in decode_register_operand()
1092 kvm_read_sse_reg(reg, &op->vec_val); in decode_register_operand()
1095 if (ctxt->d & Mmx) { in decode_register_operand()
1097 op->type = OP_MM; in decode_register_operand()
1098 op->bytes = 8; in decode_register_operand()
1099 op->addr.mm = reg; in decode_register_operand()
1103 op->type = OP_REG; in decode_register_operand()
1104 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_register_operand()
1105 op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp); in decode_register_operand()
1107 fetch_register_operand(op); in decode_register_operand()
1108 op->orig_val = op->val; in decode_register_operand()
1114 ctxt->modrm_seg = VCPU_SREG_SS; in adjust_modrm_seg()
1118 struct operand *op) in decode_modrm() argument
1125 ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */ in decode_modrm()
1126 index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */ in decode_modrm()
1127 base_reg = (ctxt->rex_prefix << 3) & 8; /* REX.B */ in decode_modrm()
1129 ctxt->modrm_mod = (ctxt->modrm & 0xc0) >> 6; in decode_modrm()
1130 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3; in decode_modrm()
1131 ctxt->modrm_rm = base_reg | (ctxt->modrm & 0x07); in decode_modrm()
1132 ctxt->modrm_seg = VCPU_SREG_DS; in decode_modrm()
1134 if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) { in decode_modrm()
1135 op->type = OP_REG; in decode_modrm()
1136 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_modrm()
1137 op->addr.reg = decode_register(ctxt, ctxt->modrm_rm, in decode_modrm()
1138 ctxt->d & ByteOp); in decode_modrm()
1139 if (ctxt->d & Sse) { in decode_modrm()
1140 op->type = OP_XMM; in decode_modrm()
1141 op->bytes = 16; in decode_modrm()
1142 op->addr.xmm = ctxt->modrm_rm; in decode_modrm()
1143 kvm_read_sse_reg(ctxt->modrm_rm, &op->vec_val); in decode_modrm()
1146 if (ctxt->d & Mmx) { in decode_modrm()
1147 op->type = OP_MM; in decode_modrm()
1148 op->bytes = 8; in decode_modrm()
1149 op->addr.mm = ctxt->modrm_rm & 7; in decode_modrm()
1152 fetch_register_operand(op); in decode_modrm()
1156 op->type = OP_MEM; in decode_modrm()
1158 if (ctxt->ad_bytes == 2) { in decode_modrm()
1164 /* 16-bit ModR/M decode. */ in decode_modrm()
1165 switch (ctxt->modrm_mod) { in decode_modrm()
1167 if (ctxt->modrm_rm == 6) in decode_modrm()
1177 switch (ctxt->modrm_rm) { in decode_modrm()
1197 if (ctxt->modrm_mod != 0) in decode_modrm()
1204 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 || in decode_modrm()
1205 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0)) in decode_modrm()
1206 ctxt->modrm_seg = VCPU_SREG_SS; in decode_modrm()
1209 /* 32/64-bit ModR/M decode. */ in decode_modrm()
1210 if ((ctxt->modrm_rm & 7) == 4) { in decode_modrm()
1216 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0) in decode_modrm()
1222 if ((ctxt->d & IncSP) && in decode_modrm()
1224 modrm_ea += ctxt->op_bytes; in decode_modrm()
1228 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) { in decode_modrm()
1230 if (ctxt->mode == X86EMUL_MODE_PROT64) in decode_modrm()
1231 ctxt->rip_relative = 1; in decode_modrm()
1233 base_reg = ctxt->modrm_rm; in decode_modrm()
1237 switch (ctxt->modrm_mod) { in decode_modrm()
1246 op->addr.mem.ea = modrm_ea; in decode_modrm()
1247 if (ctxt->ad_bytes != 8) in decode_modrm()
1248 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea; in decode_modrm()
1255 struct operand *op) in decode_abs() argument
1259 op->type = OP_MEM; in decode_abs()
1260 switch (ctxt->ad_bytes) { in decode_abs()
1262 op->addr.mem.ea = insn_fetch(u16, ctxt); in decode_abs()
1265 op->addr.mem.ea = insn_fetch(u32, ctxt); in decode_abs()
1268 op->addr.mem.ea = insn_fetch(u64, ctxt); in decode_abs()
1279 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) { in fetch_bit_operand()
1280 mask = ~((long)ctxt->dst.bytes * 8 - 1); in fetch_bit_operand()
1282 if (ctxt->src.bytes == 2) in fetch_bit_operand()
1283 sv = (s16)ctxt->src.val & (s16)mask; in fetch_bit_operand()
1284 else if (ctxt->src.bytes == 4) in fetch_bit_operand()
1285 sv = (s32)ctxt->src.val & (s32)mask; in fetch_bit_operand()
1287 sv = (s64)ctxt->src.val & (s64)mask; in fetch_bit_operand()
1289 ctxt->dst.addr.mem.ea = address_mask(ctxt, in fetch_bit_operand()
1290 ctxt->dst.addr.mem.ea + (sv >> 3)); in fetch_bit_operand()
1294 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1; in fetch_bit_operand()
1301 struct read_cache *mc = &ctxt->mem_read; in read_emulated()
1303 if (mc->pos < mc->end) in read_emulated()
1306 if (KVM_EMULATOR_BUG_ON((mc->end + size) >= sizeof(mc->data), ctxt)) in read_emulated()
1309 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size, in read_emulated()
1310 &ctxt->exception); in read_emulated()
1314 mc->end += size; in read_emulated()
1317 memcpy(dest, mc->data + mc->pos, size); in read_emulated()
1318 mc->pos += size; in read_emulated()
1347 return ctxt->ops->write_emulated(ctxt, linear, data, size, in segmented_write()
1348 &ctxt->exception); in segmented_write()
1362 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data, in segmented_cmpxchg()
1363 size, &ctxt->exception); in segmented_cmpxchg()
1370 struct read_cache *rc = &ctxt->io_read; in pio_in_emulated()
1372 if (rc->pos == rc->end) { /* refill pio read ahead */ in pio_in_emulated()
1374 unsigned int count = ctxt->rep_prefix ? in pio_in_emulated()
1376 in_page = (ctxt->eflags & X86_EFLAGS_DF) ? in pio_in_emulated()
1378 PAGE_SIZE - offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)); in pio_in_emulated()
1379 n = min3(in_page, (unsigned int)sizeof(rc->data) / size, count); in pio_in_emulated()
1382 rc->pos = rc->end = 0; in pio_in_emulated()
1383 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n)) in pio_in_emulated()
1385 rc->end = n * size; in pio_in_emulated()
1388 if (ctxt->rep_prefix && (ctxt->d & String) && in pio_in_emulated()
1389 !(ctxt->eflags & X86_EFLAGS_DF)) { in pio_in_emulated()
1390 ctxt->dst.data = rc->data + rc->pos; in pio_in_emulated()
1391 ctxt->dst.type = OP_MEM_STR; in pio_in_emulated()
1392 ctxt->dst.count = (rc->end - rc->pos) / size; in pio_in_emulated()
1393 rc->pos = rc->end; in pio_in_emulated()
1395 memcpy(dest, rc->data + rc->pos, size); in pio_in_emulated()
1396 rc->pos += size; in pio_in_emulated()
1407 ctxt->ops->get_idt(ctxt, &dt); in read_interrupt_descriptor()
1419 const struct x86_emulate_ops *ops = ctxt->ops; in get_descriptor_table_ptr()
1427 if (!ops->get_segment(ctxt, &sel, &desc, &base3, in get_descriptor_table_ptr()
1431 dt->size = desc_limit_scaled(&desc); /* what if limit > 65535? */ in get_descriptor_table_ptr()
1432 dt->address = get_desc_base(&desc) | ((u64)base3 << 32); in get_descriptor_table_ptr()
1434 ops->get_gdt(ctxt, dt); in get_descriptor_table_ptr()
1455 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1457 addr &= (u32)-1; in get_descriptor_ptr()
1498 if (!(ctxt->ops->get_cr(ctxt, 4) & X86_CR4_CET)) in emulator_is_ssp_invalid()
1501 if (ctxt->ops->get_msr(ctxt, MSR_EFER, &efer)) in emulator_is_ssp_invalid()
1504 /* SSP is guaranteed to be valid if the vCPU was already in 32-bit mode. */ in emulator_is_ssp_invalid()
1508 if (ctxt->ops->get_msr(ctxt, MSR_IA32_X_CET, &cet)) in emulator_is_ssp_invalid()
1514 if (ctxt->ops->get_msr(ctxt, MSR_KVM_INTERNAL_GUEST_SSP, &ssp)) in emulator_is_ssp_invalid()
1518 * On transfer from 64-bit mode to compatibility mode, SSP[63:32] must in emulator_is_ssp_invalid()
1519 * be 0, i.e. SSP must be a 32-bit value outside of 64-bit mode. in emulator_is_ssp_invalid()
1533 bool null_selector = !(selector & ~0x3); /* 0000-0003 are null */ in __load_segment_descriptor()
1541 if (ctxt->mode == X86EMUL_MODE_REAL) { in __load_segment_descriptor()
1542 /* set real mode segment descriptor (keep limit etc. for in __load_segment_descriptor()
1543 * unreal mode) */ in __load_segment_descriptor()
1544 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg); in __load_segment_descriptor()
1547 } else if (seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86) { in __load_segment_descriptor()
1564 /* NULL selector is not valid for TR, CS and (except for long mode) SS */ in __load_segment_descriptor()
1570 if (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl) in __load_segment_descriptor()
1574 * ctxt->ops->set_segment expects the CPL to be in in __load_segment_descriptor()
1575 * SS.DPL, so fake an expand-up 32-bit data segment. in __load_segment_descriptor()
1618 * Mode exceptions and IRET (handled above). In all other in __load_segment_descriptor()
1631 /* Outer-privilege level return is not implemented */ in __load_segment_descriptor()
1656 /* in long-mode d/b must be clear if l is set */ in __load_segment_descriptor()
1660 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
1669 /* CS(RPL) <- CPL */ in __load_segment_descriptor()
1707 } else if (ctxt->mode == X86EMUL_MODE_PROT64) { in __load_segment_descriptor()
1720 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc, in __load_segment_descriptor()
1721 sizeof(seg_desc), &ctxt->exception); in __load_segment_descriptor()
1726 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg); in __load_segment_descriptor()
1737 u8 cpl = ctxt->ops->cpl(ctxt); in load_segment_descriptor()
1750 ctxt->mode == X86EMUL_MODE_PROT64) in load_segment_descriptor()
1757 static void write_register_operand(struct operand *op) in write_register_operand() argument
1759 return assign_register(op->addr.reg, op->val, op->bytes); in write_register_operand()
1762 static int writeback(struct x86_emulate_ctxt *ctxt, struct operand *op) in writeback() argument
1764 switch (op->type) { in writeback()
1766 write_register_operand(op); in writeback()
1769 if (ctxt->lock_prefix) in writeback()
1771 op->addr.mem, in writeback()
1772 &op->orig_val, in writeback()
1773 &op->val, in writeback()
1774 op->bytes); in writeback()
1777 op->addr.mem, in writeback()
1778 &op->val, in writeback()
1779 op->bytes); in writeback()
1782 op->addr.mem, in writeback()
1783 op->data, in writeback()
1784 op->bytes * op->count); in writeback()
1786 kvm_write_sse_reg(op->addr.xmm, &op->vec_val); in writeback()
1789 kvm_write_mmx_reg(op->addr.mm, &op->mm_val); in writeback()
1804 rsp_increment(ctxt, -len); in emulate_push()
1814 ctxt->dst.type = OP_NONE; in em_push()
1815 return emulate_push(ctxt, &ctxt->src.val, ctxt->op_bytes); in em_push()
1836 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_pop()
1845 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulate_popf()
1846 int cpl = ctxt->ops->cpl(ctxt); in emulate_popf()
1857 switch(ctxt->mode) { in emulate_popf()
1871 default: /* real mode */ in emulate_popf()
1877 (ctxt->eflags & ~change_mask) | (val & change_mask); in emulate_popf()
1884 ctxt->dst.type = OP_REG; in em_popf()
1885 ctxt->dst.addr.reg = &ctxt->eflags; in em_popf()
1886 ctxt->dst.bytes = ctxt->op_bytes; in em_popf()
1887 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes); in em_popf()
1893 unsigned frame_size = ctxt->src.val; in em_enter()
1894 unsigned nesting_level = ctxt->src2.val & 31; in em_enter()
1907 reg_read(ctxt, VCPU_REGS_RSP) - frame_size, in em_enter()
1916 return emulate_pop(ctxt, reg_rmw(ctxt, VCPU_REGS_RBP), ctxt->op_bytes); in em_leave()
1921 int seg = ctxt->src2.val; in em_push_sreg()
1923 ctxt->src.val = get_segment_selector(ctxt, seg); in em_push_sreg()
1924 if (ctxt->op_bytes == 4) { in em_push_sreg()
1925 rsp_increment(ctxt, -2); in em_push_sreg()
1926 ctxt->op_bytes = 2; in em_push_sreg()
1934 int seg = ctxt->src2.val; in em_pop_sreg()
1943 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_pop_sreg()
1944 if (ctxt->op_bytes > 2) in em_pop_sreg()
1945 rsp_increment(ctxt, ctxt->op_bytes - 2); in em_pop_sreg()
1959 (ctxt->src.val = old_esp) : (ctxt->src.val = reg_read(ctxt, reg)); in em_pusha()
1973 ctxt->src.val = (unsigned long)ctxt->eflags & ~X86_EFLAGS_VM; in em_pushf()
1985 rsp_increment(ctxt, ctxt->op_bytes); in em_popa()
1986 --reg; in em_popa()
1989 rc = emulate_pop(ctxt, &val, ctxt->op_bytes); in em_popa()
1992 assign_register(reg_rmw(ctxt, reg), val, ctxt->op_bytes); in em_popa()
1993 --reg; in em_popa()
2000 const struct x86_emulate_ops *ops = ctxt->ops; in __emulate_int_real()
2008 ctxt->src.val = ctxt->eflags; in __emulate_int_real()
2013 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC); in __emulate_int_real()
2015 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS); in __emulate_int_real()
2020 ctxt->src.val = ctxt->_eip; in __emulate_int_real()
2025 ops->get_idt(ctxt, &dt); in __emulate_int_real()
2042 ctxt->_eip = eip; in __emulate_int_real()
2060 switch(ctxt->mode) { in emulate_int()
2068 /* Protected mode interrupts unimplemented yet */ in emulate_int()
2090 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes); in emulate_iret_real()
2098 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in emulate_iret_real()
2103 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes); in emulate_iret_real()
2113 ctxt->_eip = temp_eip; in emulate_iret_real()
2115 if (ctxt->op_bytes == 4) in emulate_iret_real()
2116 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask)); in emulate_iret_real()
2117 else if (ctxt->op_bytes == 2) { in emulate_iret_real()
2118 ctxt->eflags &= ~0xffff; in emulate_iret_real()
2119 ctxt->eflags |= temp_eflags; in emulate_iret_real()
2122 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */ in emulate_iret_real()
2123 ctxt->eflags |= X86_EFLAGS_FIXED; in emulate_iret_real()
2124 ctxt->ops->set_nmi_mask(ctxt, false); in emulate_iret_real()
2131 switch(ctxt->mode) { in em_iret()
2139 /* iret from protected mode unimplemented yet */ in em_iret()
2149 u8 cpl = ctxt->ops->cpl(ctxt); in em_jmp_far()
2151 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_jmp_far()
2159 rc = assign_eip_far(ctxt, ctxt->src.val); in em_jmp_far()
2169 return assign_eip_near(ctxt, ctxt->src.val); in em_jmp_abs()
2177 old_eip = ctxt->_eip; in em_call_near_abs()
2178 rc = assign_eip_near(ctxt, ctxt->src.val); in em_call_near_abs()
2181 ctxt->src.val = old_eip; in em_call_near_abs()
2188 u64 old = ctxt->dst.orig_val64; in em_cmpxchg8b()
2190 if (ctxt->dst.bytes == 16) in em_cmpxchg8b()
2197 ctxt->eflags &= ~X86_EFLAGS_ZF; in em_cmpxchg8b()
2199 ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) | in em_cmpxchg8b()
2202 ctxt->eflags |= X86_EFLAGS_ZF; in em_cmpxchg8b()
2212 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret()
2224 int cpl = ctxt->ops->cpl(ctxt); in em_ret_far()
2227 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_far()
2230 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes); in em_ret_far()
2253 rsp_increment(ctxt, ctxt->src.val); in em_ret_far_imm()
2260 ctxt->dst.orig_val = ctxt->dst.val; in em_cmpxchg()
2261 ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2262 ctxt->src.orig_val = ctxt->src.val; in em_cmpxchg()
2263 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2266 if (ctxt->eflags & X86_EFLAGS_ZF) { in em_cmpxchg()
2268 ctxt->src.type = OP_NONE; in em_cmpxchg()
2269 ctxt->dst.val = ctxt->src.orig_val; in em_cmpxchg()
2272 ctxt->src.type = OP_REG; in em_cmpxchg()
2273 ctxt->src.addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in em_cmpxchg()
2274 ctxt->src.val = ctxt->dst.orig_val; in em_cmpxchg()
2275 /* Create write-cycle to dest by writing the same value */ in em_cmpxchg()
2276 ctxt->dst.val = ctxt->dst.orig_val; in em_cmpxchg()
2283 int seg = ctxt->src2.val; in em_lseg()
2287 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_lseg()
2293 ctxt->dst.val = ctxt->src.val; in em_lseg()
2299 if (!ctxt->ops->is_smm(ctxt)) in em_rsm()
2302 if (ctxt->ops->leave_smm(ctxt)) in em_rsm()
2303 ctxt->ops->triple_fault(ctxt); in em_rsm()
2311 cs->l = 0; /* will be adjusted later */ in setup_syscalls_segments()
2313 cs->g = 1; /* 4kb granularity */ in setup_syscalls_segments()
2315 cs->type = 0x0b; /* Read, Execute, Accessed */ in setup_syscalls_segments()
2316 cs->s = 1; in setup_syscalls_segments()
2317 cs->dpl = 0; /* will be adjusted later */ in setup_syscalls_segments()
2318 cs->p = 1; in setup_syscalls_segments()
2319 cs->d = 1; in setup_syscalls_segments()
2320 cs->avl = 0; in setup_syscalls_segments()
2324 ss->g = 1; /* 4kb granularity */ in setup_syscalls_segments()
2325 ss->s = 1; in setup_syscalls_segments()
2326 ss->type = 0x03; /* Read/Write, Accessed */ in setup_syscalls_segments()
2327 ss->d = 1; /* 32bit stack segment */ in setup_syscalls_segments()
2328 ss->dpl = 0; in setup_syscalls_segments()
2329 ss->p = 1; in setup_syscalls_segments()
2330 ss->l = 0; in setup_syscalls_segments()
2331 ss->avl = 0; in setup_syscalls_segments()
2336 const struct x86_emulate_ops *ops = ctxt->ops; in em_syscall()
2342 /* syscall is not available in real mode */ in em_syscall()
2343 if (ctxt->mode == X86EMUL_MODE_REAL || in em_syscall()
2344 ctxt->mode == X86EMUL_MODE_VM86) in em_syscall()
2348 * Intel compatible CPUs only support SYSCALL in 64-bit mode, whereas in em_syscall()
2349 * AMD allows SYSCALL in any flavor of protected mode. Note, it's in em_syscall()
2351 * as SYSCALL won't fault in the "wrong" mode, i.e. there is no #UD in em_syscall()
2352 * for KVM to trap-and-emulate, unlike emulating AMD on Intel. in em_syscall()
2354 if (ctxt->mode != X86EMUL_MODE_PROT64 && in em_syscall()
2355 ctxt->ops->guest_cpuid_is_intel_compatible(ctxt)) in em_syscall()
2358 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall()
2363 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2372 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_syscall()
2373 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_syscall()
2375 *reg_write(ctxt, VCPU_REGS_RCX) = ctxt->_eip; in em_syscall()
2378 *reg_write(ctxt, VCPU_REGS_R11) = ctxt->eflags; in em_syscall()
2380 ops->get_msr(ctxt, in em_syscall()
2381 ctxt->mode == X86EMUL_MODE_PROT64 ? in em_syscall()
2383 ctxt->_eip = msr_data; in em_syscall()
2385 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data); in em_syscall()
2386 ctxt->eflags &= ~msr_data; in em_syscall()
2387 ctxt->eflags |= X86_EFLAGS_FIXED; in em_syscall()
2390 /* legacy mode */ in em_syscall()
2391 ops->get_msr(ctxt, MSR_STAR, &msr_data); in em_syscall()
2392 ctxt->_eip = (u32)msr_data; in em_syscall()
2394 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_syscall()
2397 ctxt->tf = (ctxt->eflags & X86_EFLAGS_TF) != 0; in em_syscall()
2403 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysenter()
2409 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter()
2410 /* inject #GP if in real mode */ in em_sysenter()
2411 if (ctxt->mode == X86EMUL_MODE_REAL) in em_sysenter()
2415 * Intel's architecture allows SYSENTER in compatibility mode, but AMD in em_sysenter()
2416 * does not. Note, AMD does allow SYSENTER in legacy protected mode. in em_sysenter()
2418 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) && in em_sysenter()
2419 !ctxt->ops->guest_cpuid_is_intel_compatible(ctxt)) in em_sysenter()
2422 /* sysenter/sysexit have not been tested in 64bit mode. */ in em_sysenter()
2423 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_sysenter()
2426 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysenter()
2431 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF); in em_sysenter()
2439 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysenter()
2440 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysenter()
2442 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data); in em_sysenter()
2443 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data; in em_sysenter()
2445 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data); in em_sysenter()
2449 ctxt->mode = X86EMUL_MODE_PROT64; in em_sysenter()
2456 const struct x86_emulate_ops *ops = ctxt->ops; in em_sysexit()
2462 /* inject #GP if in real mode or Virtual 8086 mode */ in em_sysexit()
2463 if (ctxt->mode == X86EMUL_MODE_REAL || in em_sysexit()
2464 ctxt->mode == X86EMUL_MODE_VM86) in em_sysexit()
2469 if ((ctxt->rex_prefix & 0x8) != 0x0) in em_sysexit()
2479 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data); in em_sysexit()
2504 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); in em_sysexit()
2505 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); in em_sysexit()
2507 ctxt->_eip = rdx; in em_sysexit()
2508 ctxt->mode = usermode; in em_sysexit()
2517 if (ctxt->mode == X86EMUL_MODE_REAL) in emulator_bad_iopl()
2519 if (ctxt->mode == X86EMUL_MODE_VM86) in emulator_bad_iopl()
2521 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT; in emulator_bad_iopl()
2522 return ctxt->ops->cpl(ctxt) > iopl; in emulator_bad_iopl()
2531 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_io_port_access_allowed()
2536 unsigned mask = (1 << len) - 1; in emulator_io_port_access_allowed()
2547 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR); in emulator_io_port_access_allowed()
2556 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true); in emulator_io_port_access_allowed()
2561 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true); in emulator_io_port_access_allowed()
2572 if (ctxt->perm_ok) in emulator_io_permitted()
2579 ctxt->perm_ok = true; in emulator_io_permitted()
2588 * manner when ECX is zero due to REP-string optimizations. in string_registers_quirk()
2593 if (ctxt->ad_bytes != 4) in string_registers_quirk()
2597 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true); in string_registers_quirk()
2603 switch (ctxt->b) { in string_registers_quirk()
2606 *reg_rmw(ctxt, VCPU_REGS_RSI) &= (u32)-1; in string_registers_quirk()
2610 *reg_rmw(ctxt, VCPU_REGS_RDI) &= (u32)-1; in string_registers_quirk()
2618 tss->ip = ctxt->_eip; in save_state_to_tss16()
2619 tss->flag = ctxt->eflags; in save_state_to_tss16()
2620 tss->ax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss16()
2621 tss->cx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss16()
2622 tss->dx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss16()
2623 tss->bx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss16()
2624 tss->sp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss16()
2625 tss->bp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss16()
2626 tss->si = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss16()
2627 tss->di = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss16()
2629 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss16()
2630 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss16()
2631 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss16()
2632 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss16()
2633 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR); in save_state_to_tss16()
2642 ctxt->_eip = tss->ip; in load_state_from_tss16()
2643 ctxt->eflags = tss->flag | 2; in load_state_from_tss16()
2644 *reg_write(ctxt, VCPU_REGS_RAX) = tss->ax; in load_state_from_tss16()
2645 *reg_write(ctxt, VCPU_REGS_RCX) = tss->cx; in load_state_from_tss16()
2646 *reg_write(ctxt, VCPU_REGS_RDX) = tss->dx; in load_state_from_tss16()
2647 *reg_write(ctxt, VCPU_REGS_RBX) = tss->bx; in load_state_from_tss16()
2648 *reg_write(ctxt, VCPU_REGS_RSP) = tss->sp; in load_state_from_tss16()
2649 *reg_write(ctxt, VCPU_REGS_RBP) = tss->bp; in load_state_from_tss16()
2650 *reg_write(ctxt, VCPU_REGS_RSI) = tss->si; in load_state_from_tss16()
2651 *reg_write(ctxt, VCPU_REGS_RDI) = tss->di; in load_state_from_tss16()
2657 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR); in load_state_from_tss16()
2658 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss16()
2659 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss16()
2660 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss16()
2661 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss16()
2663 cpl = tss->cs & 3; in load_state_from_tss16()
2669 ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl, in load_state_from_tss16()
2673 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss16()
2677 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss16()
2681 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss16()
2685 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss16()
2731 tss->eip = ctxt->_eip; in save_state_to_tss32()
2732 tss->eflags = ctxt->eflags; in save_state_to_tss32()
2733 tss->eax = reg_read(ctxt, VCPU_REGS_RAX); in save_state_to_tss32()
2734 tss->ecx = reg_read(ctxt, VCPU_REGS_RCX); in save_state_to_tss32()
2735 tss->edx = reg_read(ctxt, VCPU_REGS_RDX); in save_state_to_tss32()
2736 tss->ebx = reg_read(ctxt, VCPU_REGS_RBX); in save_state_to_tss32()
2737 tss->esp = reg_read(ctxt, VCPU_REGS_RSP); in save_state_to_tss32()
2738 tss->ebp = reg_read(ctxt, VCPU_REGS_RBP); in save_state_to_tss32()
2739 tss->esi = reg_read(ctxt, VCPU_REGS_RSI); in save_state_to_tss32()
2740 tss->edi = reg_read(ctxt, VCPU_REGS_RDI); in save_state_to_tss32()
2742 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); in save_state_to_tss32()
2743 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); in save_state_to_tss32()
2744 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); in save_state_to_tss32()
2745 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); in save_state_to_tss32()
2746 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS); in save_state_to_tss32()
2747 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS); in save_state_to_tss32()
2756 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3)) in load_state_from_tss32()
2758 ctxt->_eip = tss->eip; in load_state_from_tss32()
2759 ctxt->eflags = tss->eflags | 2; in load_state_from_tss32()
2762 *reg_write(ctxt, VCPU_REGS_RAX) = tss->eax; in load_state_from_tss32()
2763 *reg_write(ctxt, VCPU_REGS_RCX) = tss->ecx; in load_state_from_tss32()
2764 *reg_write(ctxt, VCPU_REGS_RDX) = tss->edx; in load_state_from_tss32()
2765 *reg_write(ctxt, VCPU_REGS_RBX) = tss->ebx; in load_state_from_tss32()
2766 *reg_write(ctxt, VCPU_REGS_RSP) = tss->esp; in load_state_from_tss32()
2767 *reg_write(ctxt, VCPU_REGS_RBP) = tss->ebp; in load_state_from_tss32()
2768 *reg_write(ctxt, VCPU_REGS_RSI) = tss->esi; in load_state_from_tss32()
2769 *reg_write(ctxt, VCPU_REGS_RDI) = tss->edi; in load_state_from_tss32()
2776 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR); in load_state_from_tss32()
2777 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); in load_state_from_tss32()
2778 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); in load_state_from_tss32()
2779 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); in load_state_from_tss32()
2780 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); in load_state_from_tss32()
2781 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS); in load_state_from_tss32()
2782 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS); in load_state_from_tss32()
2785 * If we're switching between Protected Mode and VM86, we need to make in load_state_from_tss32()
2786 * sure to update the mode before loading the segment descriptors so in load_state_from_tss32()
2789 if (ctxt->eflags & X86_EFLAGS_VM) { in load_state_from_tss32()
2790 ctxt->mode = X86EMUL_MODE_VM86; in load_state_from_tss32()
2793 ctxt->mode = X86EMUL_MODE_PROT32; in load_state_from_tss32()
2794 cpl = tss->cs & 3; in load_state_from_tss32()
2801 ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR, in load_state_from_tss32()
2805 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, in load_state_from_tss32()
2809 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, in load_state_from_tss32()
2813 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, in load_state_from_tss32()
2817 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, in load_state_from_tss32()
2821 ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl, in load_state_from_tss32()
2825 ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl, in load_state_from_tss32()
2848 ldt_sel_offset - eip_offset); in task_switch_32()
2873 const struct x86_emulate_ops *ops = ctxt->ops; in emulator_do_task_switch()
2878 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR); in emulator_do_task_switch()
2898 * 3. jmp/call to TSS/task-gate: No check is performed since the in emulator_do_task_switch()
2902 if (idt_index != -1) { in emulator_do_task_switch()
2913 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl) in emulator_do_task_switch()
2931 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT; in emulator_do_task_switch()
2947 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT; in emulator_do_task_switch()
2954 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS); in emulator_do_task_switch()
2955 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR); in emulator_do_task_switch()
2958 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2; in emulator_do_task_switch()
2959 ctxt->lock_prefix = 0; in emulator_do_task_switch()
2960 ctxt->src.val = (unsigned long) error_code; in emulator_do_task_switch()
2964 dr7 = ops->get_dr(ctxt, 7); in emulator_do_task_switch()
2965 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN)); in emulator_do_task_switch()
2977 ctxt->_eip = ctxt->eip; in emulator_task_switch()
2978 ctxt->dst.type = OP_NONE; in emulator_task_switch()
2984 ctxt->eip = ctxt->_eip; in emulator_task_switch()
2992 struct operand *op) in string_addr_inc() argument
2994 int df = (ctxt->eflags & X86_EFLAGS_DF) ? -op->count : op->count; in string_addr_inc()
2996 register_address_increment(ctxt, reg, df * op->bytes); in string_addr_inc()
2997 op->addr.mem.ea = register_address(ctxt, reg); in string_addr_inc()
3005 cf = ctxt->eflags & X86_EFLAGS_CF; in em_das()
3006 al = ctxt->dst.val; in em_das()
3011 af = ctxt->eflags & X86_EFLAGS_AF; in em_das()
3013 al -= 6; in em_das()
3020 al -= 0x60; in em_das()
3024 ctxt->dst.val = al; in em_das()
3026 ctxt->src.type = OP_IMM; in em_das()
3027 ctxt->src.val = 0; in em_das()
3028 ctxt->src.bytes = 1; in em_das()
3030 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF); in em_das()
3032 ctxt->eflags |= X86_EFLAGS_CF; in em_das()
3034 ctxt->eflags |= X86_EFLAGS_AF; in em_das()
3042 if (ctxt->src.val == 0) in em_aam()
3045 al = ctxt->dst.val & 0xff; in em_aam()
3046 ah = al / ctxt->src.val; in em_aam()
3047 al %= ctxt->src.val; in em_aam()
3049 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8); in em_aam()
3052 ctxt->src.type = OP_IMM; in em_aam()
3053 ctxt->src.val = 0; in em_aam()
3054 ctxt->src.bytes = 1; in em_aam()
3062 u8 al = ctxt->dst.val & 0xff; in em_aad()
3063 u8 ah = (ctxt->dst.val >> 8) & 0xff; in em_aad()
3065 al = (al + (ah * ctxt->src.val)) & 0xff; in em_aad()
3067 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al; in em_aad()
3070 ctxt->src.type = OP_IMM; in em_aad()
3071 ctxt->src.val = 0; in em_aad()
3072 ctxt->src.bytes = 1; in em_aad()
3081 long rel = ctxt->src.val; in em_call()
3083 ctxt->src.val = (unsigned long)ctxt->_eip; in em_call()
3096 const struct x86_emulate_ops *ops = ctxt->ops; in em_call_far()
3097 int cpl = ctxt->ops->cpl(ctxt); in em_call_far()
3098 enum x86emul_mode prev_mode = ctxt->mode; in em_call_far()
3100 old_eip = ctxt->_eip; in em_call_far()
3101 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS); in em_call_far()
3103 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2); in em_call_far()
3109 rc = assign_eip_far(ctxt, ctxt->src.val); in em_call_far()
3113 ctxt->src.val = old_cs; in em_call_far()
3118 ctxt->src.val = old_eip; in em_call_far()
3128 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS); in em_call_far()
3129 ctxt->mode = prev_mode; in em_call_far()
3139 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes); in em_ret_near_imm()
3145 rsp_increment(ctxt, ctxt->src.val); in em_ret_near_imm()
3152 ctxt->src.val = ctxt->dst.val; in em_xchg()
3153 write_register_operand(&ctxt->src); in em_xchg()
3156 ctxt->dst.val = ctxt->src.orig_val; in em_xchg()
3157 ctxt->lock_prefix = 1; in em_xchg()
3163 ctxt->dst.val = ctxt->src2.val; in em_imul_3op()
3169 ctxt->dst.type = OP_REG; in em_cwd()
3170 ctxt->dst.bytes = ctxt->src.bytes; in em_cwd()
3171 ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in em_cwd()
3172 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1); in em_cwd()
3181 if (!ctxt->ops->guest_has_rdpid(ctxt)) in em_rdpid()
3184 ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux); in em_rdpid()
3185 ctxt->dst.val = tsc_aux; in em_rdpid()
3193 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc); in em_rdtsc()
3203 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc)) in em_rdpmc()
3212 memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr)); in em_mov()
3220 if (!ctxt->ops->guest_has_movbe(ctxt)) in em_movbe()
3223 switch (ctxt->op_bytes) { in em_movbe()
3230 * Both casting ->valptr and ->val to u16 breaks strict aliasing in em_movbe()
3233 tmp = (u16)ctxt->src.val; in em_movbe()
3234 ctxt->dst.val &= ~0xffffUL; in em_movbe()
3235 ctxt->dst.val |= (unsigned long)swab16(tmp); in em_movbe()
3238 ctxt->dst.val = swab32((u32)ctxt->src.val); in em_movbe()
3241 ctxt->dst.val = swab64(ctxt->src.val); in em_movbe()
3251 int cr_num = ctxt->modrm_reg; in em_cr_write()
3254 if (ctxt->ops->set_cr(ctxt, cr_num, ctxt->src.val)) in em_cr_write()
3258 ctxt->dst.type = OP_NONE; in em_cr_write()
3263 * which can affect the cpu's execution mode. in em_cr_write()
3277 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_dr_write()
3278 val = ctxt->src.val & ~0ULL; in em_dr_write()
3280 val = ctxt->src.val & ~0U; in em_dr_write()
3283 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0) in em_dr_write()
3287 ctxt->dst.type = OP_NONE; in em_dr_write()
3299 r = ctxt->ops->set_msr_with_filter(ctxt, msr_index, msr_data); in em_wrmsr()
3313 r = ctxt->ops->get_msr_with_filter(ctxt, msr_index, &msr_data); in em_rdmsr()
3328 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in em_store_sreg()
3329 ctxt->ops->cpl(ctxt) > 0) in em_store_sreg()
3332 ctxt->dst.val = get_segment_selector(ctxt, segment); in em_store_sreg()
3333 if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM) in em_store_sreg()
3334 ctxt->dst.bytes = 2; in em_store_sreg()
3340 if (ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_rm_sreg()
3343 return em_store_sreg(ctxt, ctxt->modrm_reg); in em_mov_rm_sreg()
3348 u16 sel = ctxt->src.val; in em_mov_sreg_rm()
3350 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS) in em_mov_sreg_rm()
3353 if (ctxt->modrm_reg == VCPU_SREG_SS) in em_mov_sreg_rm()
3354 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS; in em_mov_sreg_rm()
3357 ctxt->dst.type = OP_NONE; in em_mov_sreg_rm()
3358 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg); in em_mov_sreg_rm()
3368 u16 sel = ctxt->src.val; in em_lldt()
3371 ctxt->dst.type = OP_NONE; in em_lldt()
3382 u16 sel = ctxt->src.val; in em_ltr()
3385 ctxt->dst.type = OP_NONE; in em_ltr()
3395 rc = __linearize(ctxt, ctxt->src.addr.mem, &max_size, 1, ctxt->mode, in em_invlpg()
3398 ctxt->ops->invlpg(ctxt, linear); in em_invlpg()
3400 ctxt->dst.type = OP_NONE; in em_invlpg()
3408 cr0 = ctxt->ops->get_cr(ctxt, 0); in em_clts()
3410 ctxt->ops->set_cr(ctxt, 0, cr0); in em_clts()
3416 int rc = ctxt->ops->fix_hypercall(ctxt); in em_hypercall()
3421 /* Let the processor re-execute the fixed hypercall */ in em_hypercall()
3422 ctxt->_eip = ctxt->eip; in em_hypercall()
3424 ctxt->dst.type = OP_NONE; in em_hypercall()
3434 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in emulate_store_desc_ptr()
3435 ctxt->ops->cpl(ctxt) > 0) in emulate_store_desc_ptr()
3438 if (ctxt->mode == X86EMUL_MODE_PROT64) in emulate_store_desc_ptr()
3439 ctxt->op_bytes = 8; in emulate_store_desc_ptr()
3441 if (ctxt->op_bytes == 2) { in emulate_store_desc_ptr()
3442 ctxt->op_bytes = 4; in emulate_store_desc_ptr()
3446 ctxt->dst.type = OP_NONE; in emulate_store_desc_ptr()
3447 return segmented_write_std(ctxt, ctxt->dst.addr.mem, in emulate_store_desc_ptr()
3448 &desc_ptr, 2 + ctxt->op_bytes); in emulate_store_desc_ptr()
3453 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt); in em_sgdt()
3458 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt); in em_sidt()
3466 if (ctxt->mode == X86EMUL_MODE_PROT64) in em_lgdt_lidt()
3467 ctxt->op_bytes = 8; in em_lgdt_lidt()
3468 rc = read_descriptor(ctxt, ctxt->src.addr.mem, in em_lgdt_lidt()
3470 ctxt->op_bytes); in em_lgdt_lidt()
3473 if (ctxt->mode == X86EMUL_MODE_PROT64 && in em_lgdt_lidt()
3478 ctxt->ops->set_gdt(ctxt, &desc_ptr); in em_lgdt_lidt()
3480 ctxt->ops->set_idt(ctxt, &desc_ptr); in em_lgdt_lidt()
3482 ctxt->dst.type = OP_NONE; in em_lgdt_lidt()
3498 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) && in em_smsw()
3499 ctxt->ops->cpl(ctxt) > 0) in em_smsw()
3502 if (ctxt->dst.type == OP_MEM) in em_smsw()
3503 ctxt->dst.bytes = 2; in em_smsw()
3504 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0); in em_smsw()
3510 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul) in em_lmsw()
3511 | (ctxt->src.val & 0x0f)); in em_lmsw()
3512 ctxt->dst.type = OP_NONE; in em_lmsw()
3520 register_address_increment(ctxt, VCPU_REGS_RCX, -1); in em_loop()
3522 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags))) in em_loop()
3523 rc = jmp_rel(ctxt, ctxt->src.val); in em_loop()
3533 rc = jmp_rel(ctxt, ctxt->src.val); in em_jcxz()
3540 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val, in em_in()
3541 &ctxt->dst.val)) in em_in()
3549 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val, in em_out()
3550 &ctxt->src.val, 1); in em_out()
3552 ctxt->dst.type = OP_NONE; in em_out()
3561 ctxt->eflags &= ~X86_EFLAGS_IF; in em_cli()
3570 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI; in em_sti()
3571 ctxt->eflags |= X86_EFLAGS_IF; in em_sti()
3580 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr); in em_cpuid()
3582 ctxt->ops->cpl(ctxt)) { in em_cpuid()
3588 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false); in em_cpuid()
3604 ctxt->eflags &= ~0xffUL; in em_sahf()
3605 ctxt->eflags |= flags | X86_EFLAGS_FIXED; in em_sahf()
3612 *reg_rmw(ctxt, VCPU_REGS_RAX) |= (ctxt->eflags & 0xff) << 8; in em_lahf()
3618 switch (ctxt->op_bytes) { in em_bswap()
3621 asm("bswap %0" : "+r"(ctxt->dst.val)); in em_bswap()
3625 asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val)); in em_bswap()
3645 ctxt->dst.val = (s32) ctxt->src.val; in em_movsxd()
3651 if (!ctxt->ops->guest_has_fxsr(ctxt)) in check_fxsr()
3654 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM)) in check_fxsr()
3661 if (ctxt->mode >= X86EMUL_MODE_PROT64) in check_fxsr()
3668 * Hardware doesn't save and restore XMM 0-7 without CR4.OSFXSR, but does save
3679 if (ctxt->mode == X86EMUL_MODE_PROT64) in fxstate_size()
3682 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR; in fxstate_size()
3687 * FXSAVE and FXRSTOR have 4 different formats depending on execution mode,
3688 * 1) 16 bit mode
3689 * 2) 32 bit mode
3690 * - like (1), but FIP and FDP (foo) are only 16 bit. At least Intel CPUs
3693 * 3) 64-bit mode with REX.W prefix
3694 * - like (2), but XMM 8-15 are being saved and restored
3695 * 4) 64-bit mode without REX.W prefix
3696 * - like (3), but FIP and FDP are 64 bit
3698 * Emulation uses (3) for (1) and (2) and preserves XMM 8-15 to reach the
3722 return segmented_write_std(ctxt, ctxt->memop.addr.mem, &fx_state, in em_fxsave()
3741 __fxstate_size(16) - used_size); in fxregs_fixup()
3757 rc = segmented_read_std(ctxt, ctxt->memop.addr.mem, &fx_state, size); in em_fxrstor()
3787 if (!(ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSXSAVE)) in em_xsetbv()
3794 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax)) in em_xsetbv()
3814 if (!valid_cr(ctxt->modrm_reg)) in check_cr_access()
3822 int dr = ctxt->modrm_reg; in check_dr_read()
3828 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_dr_read()
3832 if (ctxt->ops->get_dr(ctxt, 7) & DR7_GD) { in check_dr_read()
3835 dr6 = ctxt->ops->get_dr(ctxt, 6); in check_dr_read()
3838 ctxt->ops->set_dr(ctxt, 6, dr6); in check_dr_read()
3847 u64 new_val = ctxt->src.val64; in check_dr_write()
3848 int dr = ctxt->modrm_reg; in check_dr_write()
3860 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_svme()
3881 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdtsc()
3883 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt)) in check_rdtsc()
3891 u64 cr4 = ctxt->ops->get_cr(ctxt, 4); in check_rdpmc()
3895 * VMware allows access to these Pseduo-PMCs even when read via RDPMC in check_rdpmc()
3904 * protected mode. in check_rdpmc()
3906 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) || in check_rdpmc()
3907 ctxt->ops->check_rdpmc_early(ctxt, rcx)) in check_rdpmc()
3915 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u); in check_perm_in()
3916 if (!emulator_io_permitted(ctxt, ctxt->src.val, ctxt->dst.bytes)) in check_perm_in()
3924 ctxt->src.bytes = min(ctxt->src.bytes, 4u); in check_perm_out()
3925 if (!emulator_io_permitted(ctxt, ctxt->dst.val, ctxt->src.bytes)) in check_perm_out()
4143 /* 0xC0 - 0xC7 */
4145 /* 0xC8 - 0xCF */
4147 /* 0xD0 - 0xC7 */
4149 /* 0xD8 - 0xDF */
4151 /* 0xE0 - 0xE7 */
4153 /* 0xE8 - 0xEF */
4155 /* 0xF0 - 0xF7 */
4157 /* 0xF8 - 0xFF */
4164 /* 0xC0 - 0xC7 */
4166 /* 0xC8 - 0xCF */
4168 /* 0xD0 - 0xC7 */
4170 /* 0xD8 - 0xDF */
4172 /* 0xE0 - 0xE7 */
4174 /* 0xE8 - 0xEF */
4176 /* 0xF0 - 0xF7 */
4178 /* 0xF8 - 0xFF */
4185 /* 0xC0 - 0xC7 */
4187 /* 0xC8 - 0xCF */
4189 /* 0xD0 - 0xC7 */
4191 /* 0xD8 - 0xDF */
4193 /* 0xE0 - 0xE7 */
4195 /* 0xE8 - 0xEF */
4197 /* 0xF0 - 0xF7 */
4199 /* 0xF8 - 0xFF */
4216 /* 0x00 - 0x07 */
4220 /* 0x08 - 0x0F */
4224 /* 0x10 - 0x17 */
4228 /* 0x18 - 0x1F */
4232 /* 0x20 - 0x27 */
4234 /* 0x28 - 0x2F */
4236 /* 0x30 - 0x37 */
4238 /* 0x38 - 0x3F */
4240 /* 0x40 - 0x4F */
4242 /* 0x50 - 0x57 */
4244 /* 0x58 - 0x5F */
4246 /* 0x60 - 0x67 */
4251 /* 0x68 - 0x6F */
4258 /* 0x70 - 0x7F */
4260 /* 0x80 - 0x87 */
4267 /* 0x88 - 0x8F */
4274 /* 0x90 - 0x97 */
4276 /* 0x98 - 0x9F */
4282 /* 0xA0 - 0xA7 */
4287 /* 0xA8 - 0xAF */
4292 /* 0xB0 - 0xB7 */
4294 /* 0xB8 - 0xBF */
4296 /* 0xC0 - 0xC7 */
4303 /* 0xC8 - 0xCF */
4311 /* 0xD0 - 0xD7 */
4318 /* 0xD8 - 0xDF */
4320 /* 0xE0 - 0xE7 */
4325 /* 0xE8 - 0xEF */
4332 /* 0xF0 - 0xF7 */
4336 /* 0xF8 - 0xFF */
4343 /* 0x00 - 0x0F */
4349 /* 0x10 - 0x1F */
4359 /* 0x20 - 0x2F */
4371 /* 0x30 - 0x3F */
4380 /* 0x40 - 0x4F */
4382 /* 0x50 - 0x5F */
4384 /* 0x60 - 0x6F */
4389 /* 0x70 - 0x7F */
4394 /* 0x80 - 0x8F */
4396 /* 0x90 - 0x9F */
4398 /* 0xA0 - 0xA7 */
4404 /* 0xA8 - 0xAF */
4411 /* 0xB0 - 0xB7 */
4418 /* 0xB8 - 0xBF */
4425 /* 0xC0 - 0xC7 */
4429 /* 0xC8 - 0xCF */
4431 /* 0xD0 - 0xDF */
4433 /* 0xE0 - 0xEF */
4436 /* 0xF0 - 0xFF */
4461 /* 0x00 - 0x7f */
4463 /* 0x80 - 0xef */
4465 /* 0xf0 - 0xf1 */
4468 /* 0xf2 - 0xff */
4490 return ctxt->d & ShadowStack; in is_shstk_instruction()
4495 u64 flags = ctxt->d; in is_ibt_instruction()
4504 * state. IRET #GPs when returning to virtual-8086 and IBT or SHSTK is in is_ibt_instruction()
4509 return ctxt->execute != em_iret && in is_ibt_instruction()
4510 ctxt->execute != em_ret_far && in is_ibt_instruction()
4511 ctxt->execute != em_ret_far_imm && in is_ibt_instruction()
4512 ctxt->execute != em_sysexit; in is_ibt_instruction()
4546 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in imm_size()
4552 static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_imm() argument
4557 op->type = OP_IMM; in decode_imm()
4558 op->bytes = size; in decode_imm()
4559 op->addr.mem.ea = ctxt->_eip; in decode_imm()
4560 /* NB. Immediates are sign-extended as necessary. */ in decode_imm()
4561 switch (op->bytes) { in decode_imm()
4563 op->val = insn_fetch(s8, ctxt); in decode_imm()
4566 op->val = insn_fetch(s16, ctxt); in decode_imm()
4569 op->val = insn_fetch(s32, ctxt); in decode_imm()
4572 op->val = insn_fetch(s64, ctxt); in decode_imm()
4576 switch (op->bytes) { in decode_imm()
4578 op->val &= 0xff; in decode_imm()
4581 op->val &= 0xffff; in decode_imm()
4584 op->val &= 0xffffffff; in decode_imm()
4592 static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op, in decode_operand() argument
4599 decode_register_operand(ctxt, op); in decode_operand()
4602 rc = decode_imm(ctxt, op, 1, false); in decode_operand()
4605 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4607 *op = ctxt->memop; in decode_operand()
4608 ctxt->memopp = op; in decode_operand()
4609 if (ctxt->d & BitOp) in decode_operand()
4611 op->orig_val = op->val; in decode_operand()
4614 ctxt->memop.bytes = (ctxt->op_bytes == 8) ? 16 : 8; in decode_operand()
4617 op->type = OP_REG; in decode_operand()
4618 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4619 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
4620 fetch_register_operand(op); in decode_operand()
4621 op->orig_val = op->val; in decode_operand()
4624 op->type = OP_REG; in decode_operand()
4625 op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes; in decode_operand()
4626 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX); in decode_operand()
4627 fetch_register_operand(op); in decode_operand()
4628 op->orig_val = op->val; in decode_operand()
4631 if (ctxt->d & ByteOp) { in decode_operand()
4632 op->type = OP_NONE; in decode_operand()
4635 op->type = OP_REG; in decode_operand()
4636 op->bytes = ctxt->op_bytes; in decode_operand()
4637 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
4638 fetch_register_operand(op); in decode_operand()
4639 op->orig_val = op->val; in decode_operand()
4642 op->type = OP_MEM; in decode_operand()
4643 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4644 op->addr.mem.ea = in decode_operand()
4646 op->addr.mem.seg = VCPU_SREG_ES; in decode_operand()
4647 op->val = 0; in decode_operand()
4648 op->count = 1; in decode_operand()
4651 op->type = OP_REG; in decode_operand()
4652 op->bytes = 2; in decode_operand()
4653 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); in decode_operand()
4654 fetch_register_operand(op); in decode_operand()
4657 op->type = OP_IMM; in decode_operand()
4658 op->bytes = 1; in decode_operand()
4659 op->val = reg_read(ctxt, VCPU_REGS_RCX) & 0xff; in decode_operand()
4662 rc = decode_imm(ctxt, op, 1, true); in decode_operand()
4665 op->type = OP_IMM; in decode_operand()
4666 op->bytes = 1; in decode_operand()
4667 op->val = 1; in decode_operand()
4670 rc = decode_imm(ctxt, op, imm_size(ctxt), true); in decode_operand()
4673 rc = decode_imm(ctxt, op, ctxt->op_bytes, true); in decode_operand()
4676 ctxt->memop.bytes = 1; in decode_operand()
4677 if (ctxt->memop.type == OP_REG) { in decode_operand()
4678 ctxt->memop.addr.reg = decode_register(ctxt, in decode_operand()
4679 ctxt->modrm_rm, true); in decode_operand()
4680 fetch_register_operand(&ctxt->memop); in decode_operand()
4684 ctxt->memop.bytes = 2; in decode_operand()
4687 ctxt->memop.bytes = 4; in decode_operand()
4690 rc = decode_imm(ctxt, op, 2, false); in decode_operand()
4693 rc = decode_imm(ctxt, op, imm_size(ctxt), false); in decode_operand()
4696 op->type = OP_MEM; in decode_operand()
4697 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4698 op->addr.mem.ea = in decode_operand()
4700 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
4701 op->val = 0; in decode_operand()
4702 op->count = 1; in decode_operand()
4705 op->type = OP_MEM; in decode_operand()
4706 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes; in decode_operand()
4707 op->addr.mem.ea = in decode_operand()
4711 op->addr.mem.seg = ctxt->seg_override; in decode_operand()
4712 op->val = 0; in decode_operand()
4715 op->type = OP_IMM; in decode_operand()
4716 op->addr.mem.ea = ctxt->_eip; in decode_operand()
4717 op->bytes = ctxt->op_bytes + 2; in decode_operand()
4718 insn_fetch_arr(op->valptr, op->bytes, ctxt); in decode_operand()
4721 ctxt->memop.bytes = ctxt->op_bytes + 2; in decode_operand()
4724 op->type = OP_IMM; in decode_operand()
4725 op->val = VCPU_SREG_ES; in decode_operand()
4728 op->type = OP_IMM; in decode_operand()
4729 op->val = VCPU_SREG_CS; in decode_operand()
4732 op->type = OP_IMM; in decode_operand()
4733 op->val = VCPU_SREG_SS; in decode_operand()
4736 op->type = OP_IMM; in decode_operand()
4737 op->val = VCPU_SREG_DS; in decode_operand()
4740 op->type = OP_IMM; in decode_operand()
4741 op->val = VCPU_SREG_FS; in decode_operand()
4744 op->type = OP_IMM; in decode_operand()
4745 op->val = VCPU_SREG_GS; in decode_operand()
4750 op->type = OP_NONE; /* Disable writeback. */ in decode_operand()
4761 int mode = ctxt->mode; in x86_decode_insn() local
4769 ctxt->memop.type = OP_NONE; in x86_decode_insn()
4770 ctxt->memopp = NULL; in x86_decode_insn()
4771 ctxt->_eip = ctxt->eip; in x86_decode_insn()
4772 ctxt->fetch.ptr = ctxt->fetch.data; in x86_decode_insn()
4773 ctxt->fetch.end = ctxt->fetch.data + insn_len; in x86_decode_insn()
4774 ctxt->opcode_len = 1; in x86_decode_insn()
4775 ctxt->intercept = x86_intercept_none; in x86_decode_insn()
4777 memcpy(ctxt->fetch.data, insn, insn_len); in x86_decode_insn()
4784 switch (mode) { in x86_decode_insn()
4788 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS); in x86_decode_insn()
4808 ctxt->op_bytes = def_op_bytes; in x86_decode_insn()
4809 ctxt->ad_bytes = def_ad_bytes; in x86_decode_insn()
4813 switch (ctxt->b = insn_fetch(u8, ctxt)) { in x86_decode_insn()
4814 case 0x66: /* operand-size override */ in x86_decode_insn()
4817 ctxt->op_bytes = def_op_bytes ^ 6; in x86_decode_insn()
4819 case 0x67: /* address-size override */ in x86_decode_insn()
4820 if (mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
4822 ctxt->ad_bytes = def_ad_bytes ^ 12; in x86_decode_insn()
4825 ctxt->ad_bytes = def_ad_bytes ^ 6; in x86_decode_insn()
4829 ctxt->seg_override = VCPU_SREG_ES; in x86_decode_insn()
4833 ctxt->seg_override = VCPU_SREG_CS; in x86_decode_insn()
4837 ctxt->seg_override = VCPU_SREG_SS; in x86_decode_insn()
4841 ctxt->seg_override = VCPU_SREG_DS; in x86_decode_insn()
4845 ctxt->seg_override = VCPU_SREG_FS; in x86_decode_insn()
4849 ctxt->seg_override = VCPU_SREG_GS; in x86_decode_insn()
4852 if (mode != X86EMUL_MODE_PROT64) in x86_decode_insn()
4854 ctxt->rex_prefix = ctxt->b; in x86_decode_insn()
4857 ctxt->lock_prefix = 1; in x86_decode_insn()
4861 ctxt->rep_prefix = ctxt->b; in x86_decode_insn()
4869 ctxt->rex_prefix = 0; in x86_decode_insn()
4875 if (ctxt->rex_prefix & 8) in x86_decode_insn()
4876 ctxt->op_bytes = 8; /* REX.W */ in x86_decode_insn()
4879 opcode = opcode_table[ctxt->b]; in x86_decode_insn()
4880 /* Two-byte opcode? */ in x86_decode_insn()
4881 if (ctxt->b == 0x0f) { in x86_decode_insn()
4882 ctxt->opcode_len = 2; in x86_decode_insn()
4883 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
4884 opcode = twobyte_table[ctxt->b]; in x86_decode_insn()
4887 if (ctxt->b == 0x38) { in x86_decode_insn()
4888 ctxt->opcode_len = 3; in x86_decode_insn()
4889 ctxt->b = insn_fetch(u8, ctxt); in x86_decode_insn()
4890 opcode = opcode_map_0f_38[ctxt->b]; in x86_decode_insn()
4893 ctxt->d = opcode.flags; in x86_decode_insn()
4895 if (ctxt->d & ModRM) in x86_decode_insn()
4896 ctxt->modrm = insn_fetch(u8, ctxt); in x86_decode_insn()
4898 /* vex-prefix instructions are not implemented */ in x86_decode_insn()
4899 if (ctxt->opcode_len == 1 && (ctxt->b == 0xc5 || ctxt->b == 0xc4) && in x86_decode_insn()
4900 (mode == X86EMUL_MODE_PROT64 || (ctxt->modrm & 0xc0) == 0xc0)) { in x86_decode_insn()
4901 ctxt->d = NotImpl; in x86_decode_insn()
4904 while (ctxt->d & GroupMask) { in x86_decode_insn()
4905 switch (ctxt->d & GroupMask) { in x86_decode_insn()
4907 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
4911 goffset = (ctxt->modrm >> 3) & 7; in x86_decode_insn()
4912 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
4913 opcode = opcode.u.gdual->mod3[goffset]; in x86_decode_insn()
4915 opcode = opcode.u.gdual->mod012[goffset]; in x86_decode_insn()
4918 goffset = ctxt->modrm & 7; in x86_decode_insn()
4922 if (ctxt->rep_prefix && op_prefix) in x86_decode_insn()
4924 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix; in x86_decode_insn()
4926 case 0x00: opcode = opcode.u.gprefix->pfx_no; break; in x86_decode_insn()
4927 case 0x66: opcode = opcode.u.gprefix->pfx_66; break; in x86_decode_insn()
4928 case 0xf2: opcode = opcode.u.gprefix->pfx_f2; break; in x86_decode_insn()
4929 case 0xf3: opcode = opcode.u.gprefix->pfx_f3; break; in x86_decode_insn()
4933 if (ctxt->modrm > 0xbf) { in x86_decode_insn()
4934 size_t size = ARRAY_SIZE(opcode.u.esc->high); in x86_decode_insn()
4936 ctxt->modrm - 0xc0, size); in x86_decode_insn()
4938 opcode = opcode.u.esc->high[index]; in x86_decode_insn()
4940 opcode = opcode.u.esc->op[(ctxt->modrm >> 3) & 7]; in x86_decode_insn()
4944 if ((ctxt->modrm >> 6) == 3) in x86_decode_insn()
4945 opcode = opcode.u.idual->mod3; in x86_decode_insn()
4947 opcode = opcode.u.idual->mod012; in x86_decode_insn()
4950 if (ctxt->mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
4951 opcode = opcode.u.mdual->mode64; in x86_decode_insn()
4953 opcode = opcode.u.mdual->mode32; in x86_decode_insn()
4959 ctxt->d &= ~(u64)GroupMask; in x86_decode_insn()
4960 ctxt->d |= opcode.flags; in x86_decode_insn()
4963 ctxt->is_branch = opcode.flags & IsBranch; in x86_decode_insn()
4966 if (ctxt->d == 0) in x86_decode_insn()
4969 ctxt->execute = opcode.u.execute; in x86_decode_insn()
4977 ctxt->ops->get_cr(ctxt, 4) & X86_CR4_CET) { in x86_decode_insn()
4981 * Check both User and Supervisor on far transfers as inter- in x86_decode_insn()
4985 * any CET-affected instructions at any privilege level. in x86_decode_insn()
4987 if (!(ctxt->d & NearBranch)) in x86_decode_insn()
4989 else if (ctxt->ops->cpl(ctxt) == 3) in x86_decode_insn()
4994 if ((u_cet && ctxt->ops->get_msr(ctxt, MSR_IA32_U_CET, &u_cet)) || in x86_decode_insn()
4995 (s_cet && ctxt->ops->get_msr(ctxt, MSR_IA32_S_CET, &s_cet))) in x86_decode_insn()
5006 likely(!(ctxt->d & EmulateOnUD))) in x86_decode_insn()
5009 if (unlikely(ctxt->d & in x86_decode_insn()
5016 ctxt->check_perm = opcode.check_perm; in x86_decode_insn()
5017 ctxt->intercept = opcode.intercept; in x86_decode_insn()
5019 if (ctxt->d & NotImpl) in x86_decode_insn()
5022 if (mode == X86EMUL_MODE_PROT64) { in x86_decode_insn()
5023 if (ctxt->op_bytes == 4 && (ctxt->d & Stack)) in x86_decode_insn()
5024 ctxt->op_bytes = 8; in x86_decode_insn()
5025 else if (ctxt->d & NearBranch) in x86_decode_insn()
5026 ctxt->op_bytes = 8; in x86_decode_insn()
5029 if (ctxt->d & Op3264) { in x86_decode_insn()
5030 if (mode == X86EMUL_MODE_PROT64) in x86_decode_insn()
5031 ctxt->op_bytes = 8; in x86_decode_insn()
5033 ctxt->op_bytes = 4; in x86_decode_insn()
5036 if ((ctxt->d & No16) && ctxt->op_bytes == 2) in x86_decode_insn()
5037 ctxt->op_bytes = 4; in x86_decode_insn()
5039 if (ctxt->d & Sse) in x86_decode_insn()
5040 ctxt->op_bytes = 16; in x86_decode_insn()
5041 else if (ctxt->d & Mmx) in x86_decode_insn()
5042 ctxt->op_bytes = 8; in x86_decode_insn()
5046 if (ctxt->d & ModRM) { in x86_decode_insn()
5047 rc = decode_modrm(ctxt, &ctxt->memop); in x86_decode_insn()
5050 ctxt->seg_override = ctxt->modrm_seg; in x86_decode_insn()
5052 } else if (ctxt->d & MemAbs) in x86_decode_insn()
5053 rc = decode_abs(ctxt, &ctxt->memop); in x86_decode_insn()
5058 ctxt->seg_override = VCPU_SREG_DS; in x86_decode_insn()
5060 ctxt->memop.addr.mem.seg = ctxt->seg_override; in x86_decode_insn()
5066 rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask); in x86_decode_insn()
5074 rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask); in x86_decode_insn()
5079 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask); in x86_decode_insn()
5081 if (ctxt->rip_relative && likely(ctxt->memopp)) in x86_decode_insn()
5082 ctxt->memopp->addr.mem.ea = address_mask(ctxt, in x86_decode_insn()
5083 ctxt->memopp->addr.mem.ea + ctxt->_eip); in x86_decode_insn()
5087 ctxt->have_exception = true; in x86_decode_insn()
5093 return ctxt->d & PageTable; in x86_page_table_writing_insn()
5102 * - if REPE/REPZ and ZF = 0 then done in string_insn_completed()
5103 * - if REPNE/REPNZ and ZF = 1 then done in string_insn_completed()
5105 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) || in string_insn_completed()
5106 (ctxt->b == 0xae) || (ctxt->b == 0xaf)) in string_insn_completed()
5107 && (((ctxt->rep_prefix == REPE_PREFIX) && in string_insn_completed()
5108 ((ctxt->eflags & X86_EFLAGS_ZF) == 0)) in string_insn_completed()
5109 || ((ctxt->rep_prefix == REPNE_PREFIX) && in string_insn_completed()
5110 ((ctxt->eflags & X86_EFLAGS_ZF) == X86_EFLAGS_ZF)))) in string_insn_completed()
5130 static void fetch_possible_mmx_operand(struct operand *op) in fetch_possible_mmx_operand() argument
5132 if (op->type == OP_MM) in fetch_possible_mmx_operand()
5133 kvm_read_mmx_reg(op->addr.mm, &op->mm_val); in fetch_possible_mmx_operand()
5139 ctxt->rip_relative = false; in init_decode_cache()
5140 ctxt->rex_prefix = 0; in init_decode_cache()
5141 ctxt->lock_prefix = 0; in init_decode_cache()
5142 ctxt->rep_prefix = 0; in init_decode_cache()
5143 ctxt->regs_valid = 0; in init_decode_cache()
5144 ctxt->regs_dirty = 0; in init_decode_cache()
5146 ctxt->io_read.pos = 0; in init_decode_cache()
5147 ctxt->io_read.end = 0; in init_decode_cache()
5148 ctxt->mem_read.end = 0; in init_decode_cache()
5153 const struct x86_emulate_ops *ops = ctxt->ops; in x86_emulate_insn()
5155 int saved_dst_type = ctxt->dst.type; in x86_emulate_insn()
5157 ctxt->mem_read.pos = 0; in x86_emulate_insn()
5160 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) { in x86_emulate_insn()
5165 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) { in x86_emulate_insn()
5170 if (unlikely(ctxt->d & in x86_emulate_insn()
5172 if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) || in x86_emulate_insn()
5173 (ctxt->d & Undefined)) { in x86_emulate_insn()
5178 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM))) in x86_emulate_insn()
5179 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) { in x86_emulate_insn()
5184 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) { in x86_emulate_insn()
5189 if (ctxt->d & Mmx) { in x86_emulate_insn()
5197 fetch_possible_mmx_operand(&ctxt->src); in x86_emulate_insn()
5198 fetch_possible_mmx_operand(&ctxt->src2); in x86_emulate_insn()
5199 if (!(ctxt->d & Mov)) in x86_emulate_insn()
5200 fetch_possible_mmx_operand(&ctxt->dst); in x86_emulate_insn()
5203 if (unlikely(check_intercepts) && ctxt->intercept) { in x86_emulate_insn()
5204 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5210 /* Instruction can only be executed in protected mode */ in x86_emulate_insn()
5211 if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) { in x86_emulate_insn()
5217 if ((ctxt->d & Priv) && ops->cpl(ctxt)) { in x86_emulate_insn()
5218 if (ctxt->d & PrivUD) in x86_emulate_insn()
5226 if (ctxt->d & CheckPerm) { in x86_emulate_insn()
5227 rc = ctxt->check_perm(ctxt); in x86_emulate_insn()
5232 if (unlikely(check_intercepts) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5233 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5239 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5243 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5244 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5250 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) { in x86_emulate_insn()
5251 rc = segmented_read(ctxt, ctxt->src.addr.mem, in x86_emulate_insn()
5252 ctxt->src.valptr, ctxt->src.bytes); in x86_emulate_insn()
5255 ctxt->src.orig_val64 = ctxt->src.val64; in x86_emulate_insn()
5258 if (ctxt->src2.type == OP_MEM) { in x86_emulate_insn()
5259 rc = segmented_read(ctxt, ctxt->src2.addr.mem, in x86_emulate_insn()
5260 &ctxt->src2.val, ctxt->src2.bytes); in x86_emulate_insn()
5265 if ((ctxt->d & DstMask) == ImplicitOps) in x86_emulate_insn()
5269 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) { in x86_emulate_insn()
5270 /* optimisation - avoid slow emulated read if Mov */ in x86_emulate_insn()
5271 rc = segmented_read(ctxt, ctxt->dst.addr.mem, in x86_emulate_insn()
5272 &ctxt->dst.val, ctxt->dst.bytes); in x86_emulate_insn()
5274 if (!(ctxt->d & NoWrite) && in x86_emulate_insn()
5276 ctxt->exception.vector == PF_VECTOR) in x86_emulate_insn()
5277 ctxt->exception.error_code |= PFERR_WRITE_MASK; in x86_emulate_insn()
5281 /* Copy full 64-bit value for CMPXCHG8B. */ in x86_emulate_insn()
5282 ctxt->dst.orig_val64 = ctxt->dst.val64; in x86_emulate_insn()
5286 if (unlikely(check_intercepts) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5287 rc = emulator_check_intercept(ctxt, ctxt->intercept, in x86_emulate_insn()
5293 if (ctxt->rep_prefix && (ctxt->d & String)) in x86_emulate_insn()
5294 ctxt->eflags |= X86_EFLAGS_RF; in x86_emulate_insn()
5296 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5298 if (ctxt->execute) { in x86_emulate_insn()
5299 rc = ctxt->execute(ctxt); in x86_emulate_insn()
5305 if (ctxt->opcode_len == 2) in x86_emulate_insn()
5307 else if (ctxt->opcode_len == 3) in x86_emulate_insn()
5310 switch (ctxt->b) { in x86_emulate_insn()
5312 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5313 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5316 ctxt->dst.val = ctxt->src.addr.mem.ea; in x86_emulate_insn()
5319 if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX)) in x86_emulate_insn()
5320 ctxt->dst.type = OP_NONE; in x86_emulate_insn()
5325 switch (ctxt->op_bytes) { in x86_emulate_insn()
5326 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break; in x86_emulate_insn()
5327 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break; in x86_emulate_insn()
5328 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break; in x86_emulate_insn()
5335 rc = emulate_int(ctxt, ctxt->src.val); in x86_emulate_insn()
5338 if (ctxt->eflags & X86_EFLAGS_OF) in x86_emulate_insn()
5343 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5344 ctxt->dst.type = OP_NONE; /* Disable writeback. */ in x86_emulate_insn()
5347 ctxt->ops->halt(ctxt); in x86_emulate_insn()
5351 ctxt->eflags ^= X86_EFLAGS_CF; in x86_emulate_insn()
5354 ctxt->eflags &= ~X86_EFLAGS_CF; in x86_emulate_insn()
5357 ctxt->eflags |= X86_EFLAGS_CF; in x86_emulate_insn()
5360 ctxt->eflags &= ~X86_EFLAGS_DF; in x86_emulate_insn()
5363 ctxt->eflags |= X86_EFLAGS_DF; in x86_emulate_insn()
5373 if (ctxt->d & SrcWrite) { in x86_emulate_insn()
5374 BUG_ON(ctxt->src.type == OP_MEM || ctxt->src.type == OP_MEM_STR); in x86_emulate_insn()
5375 rc = writeback(ctxt, &ctxt->src); in x86_emulate_insn()
5379 if (!(ctxt->d & NoWrite)) { in x86_emulate_insn()
5380 rc = writeback(ctxt, &ctxt->dst); in x86_emulate_insn()
5389 ctxt->dst.type = saved_dst_type; in x86_emulate_insn()
5391 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5392 string_addr_inc(ctxt, VCPU_REGS_RSI, &ctxt->src); in x86_emulate_insn()
5394 if ((ctxt->d & DstMask) == DstDI) in x86_emulate_insn()
5395 string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst); in x86_emulate_insn()
5397 if (ctxt->rep_prefix && (ctxt->d & String)) { in x86_emulate_insn()
5399 struct read_cache *r = &ctxt->io_read; in x86_emulate_insn()
5400 if ((ctxt->d & SrcMask) == SrcSI) in x86_emulate_insn()
5401 count = ctxt->src.count; in x86_emulate_insn()
5403 count = ctxt->dst.count; in x86_emulate_insn()
5404 register_address_increment(ctxt, VCPU_REGS_RCX, -count); in x86_emulate_insn()
5408 * Re-enter guest when pio read ahead buffer is empty in x86_emulate_insn()
5411 if ((r->end != 0 || reg_read(ctxt, VCPU_REGS_RCX) & 0x3ff) && in x86_emulate_insn()
5412 (r->end == 0 || r->end != r->pos)) { in x86_emulate_insn()
5418 ctxt->mem_read.end = 0; in x86_emulate_insn()
5424 ctxt->eflags &= ~X86_EFLAGS_RF; in x86_emulate_insn()
5427 ctxt->eip = ctxt->_eip; in x86_emulate_insn()
5428 if (ctxt->mode != X86EMUL_MODE_PROT64) in x86_emulate_insn()
5429 ctxt->eip = (u32)ctxt->_eip; in x86_emulate_insn()
5433 if (KVM_EMULATOR_BUG_ON(ctxt->exception.vector > 0x1f, ctxt)) in x86_emulate_insn()
5435 ctxt->have_exception = true; in x86_emulate_insn()
5446 switch (ctxt->b) { in x86_emulate_insn()
5448 (ctxt->ops->wbinvd)(ctxt); in x86_emulate_insn()
5456 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg); in x86_emulate_insn()
5459 ctxt->dst.val = ops->get_dr(ctxt, ctxt->modrm_reg); in x86_emulate_insn()
5462 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5463 ctxt->dst.val = ctxt->src.val; in x86_emulate_insn()
5464 else if (ctxt->op_bytes != 4) in x86_emulate_insn()
5465 ctxt->dst.type = OP_NONE; /* no writeback */ in x86_emulate_insn()
5468 if (test_cc(ctxt->b, ctxt->eflags)) in x86_emulate_insn()
5469 rc = jmp_rel(ctxt, ctxt->src.val); in x86_emulate_insn()
5472 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags); in x86_emulate_insn()
5475 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5476 ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val in x86_emulate_insn()
5477 : (u16) ctxt->src.val; in x86_emulate_insn()
5480 ctxt->dst.bytes = ctxt->op_bytes; in x86_emulate_insn()
5481 ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val : in x86_emulate_insn()
5482 (s16) ctxt->src.val; in x86_emulate_insn()
5511 if (ctxt->rep_prefix && (ctxt->d & String)) in emulator_can_use_gpa()
5514 if (ctxt->d & TwoMemOp) in emulator_can_use_gpa()