1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privieged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
26 #include <sys/queue.h>
34 #include <arch/types.h>
36 #include <arch/emulate.h>
37 #include <arch/vmdebug.h>
40 * Opcode effective-address decode tables.
41 * Note that we only emulate instructions that have at least one memory
42 * operand (excluding implicit stack references). We assume that stack
43 * references and instruction fetches will never occur in special memory
44 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
48 /* Operand sizes: 8-bit operands or specified/overridden size. */
49 #define ByteOp (1<<0) /* 8-bit operands. */
50 /* Destination operand type. */
51 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
52 #define DstReg (2<<1) /* Register operand. */
53 #define DstMem (3<<1) /* Memory operand. */
54 #define DstMask (3<<1)
55 /* Source operand type. */
56 #define SrcNone (0<<3) /* No source operand. */
57 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
58 #define SrcReg (1<<3) /* Register operand. */
59 #define SrcMem (2<<3) /* Memory operand. */
60 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
61 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
62 #define SrcImm (5<<3) /* Immediate operand. */
63 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
64 #define SrcMask (7<<3)
65 /* Generic ModRM decode. */
67 /* Destination is only written; never read. */
70 static uint8_t opcode_table[256] = {
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
101 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
104 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
106 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
108 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
109 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
111 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
113 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
114 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
115 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
116 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
118 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
119 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
120 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
122 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
124 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
125 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
126 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
127 ByteOp | ImplicitOps, ImplicitOps,
129 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
130 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
131 ByteOp | ImplicitOps, ImplicitOps,
133 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
135 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM, 0, 0,
136 0, 0, ByteOp | DstMem | SrcImm | ModRM | Mov,
137 DstMem | SrcImm | ModRM | Mov,
139 0, 0, 0, 0, 0, 0, 0, 0,
141 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
142 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
145 0, 0, 0, 0, 0, 0, 0, 0,
147 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
150 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
153 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
156 static uint8_t twobyte_table[256] = {
158 0, SrcMem | ModRM | DstReg | Mov, 0, 0, 0, 0, 0, 0,
159 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
161 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
163 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
165 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
167 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
168 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
169 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
170 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
172 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
173 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
174 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
175 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
177 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
179 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
181 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
185 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
187 0, 0, 0, DstMem | SrcReg | ModRM, 0, 0, 0, 0,
189 0, 0, 0, DstMem | SrcReg | ModRM, 0, 0, 0, 0,
191 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
192 DstMem | SrcReg | ModRM,
193 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
194 DstReg | SrcMem16 | ModRM | Mov,
196 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM,
197 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
198 DstReg | SrcMem16 | ModRM | Mov,
200 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
209 /* Type, address-of, and value of an instruction's operand. */
211 enum { OP_REG, OP_MEM, OP_IMM } type;
213 unsigned long val, orig_val, *ptr;
216 /* EFLAGS bit definitions. */
217 #define EFLG_OF (1<<11)
218 #define EFLG_DF (1<<10)
219 #define EFLG_SF (1<<7)
220 #define EFLG_ZF (1<<6)
221 #define EFLG_AF (1<<4)
222 #define EFLG_PF (1<<2)
223 #define EFLG_CF (1<<0)
226 * Instruction emulation:
227 * Most instructions are emulated directly via a fragment of inline assembly
228 * code. This allows us to save/restore EFLAGS and thus very easily pick up
229 * any modified flags.
232 #if defined(__x86_64__)
233 #define _LO32 "k" /* force 32-bit operand */
234 #define _STK "%%rsp" /* stack pointer */
235 #elif defined(__i386__)
236 #define _LO32 "" /* force 32-bit operand */
237 #define _STK "%%esp" /* stack pointer */
241 * These EFLAGS bits are restored from saved value during emulation, and
242 * any changes are written back to the saved value after emulation.
244 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
246 /* Before executing instruction: restore necessary bits in EFLAGS. */
247 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
248 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
250 "movl %"_msk",%"_LO32 _tmp"; " \
251 "andl %"_LO32 _tmp",("_STK"); " \
253 "notl %"_LO32 _tmp"; " \
254 "andl %"_LO32 _tmp",("_STK"); " \
256 "orl %"_LO32 _tmp",("_STK"); " \
258 /* _sav &= ~msk; */ \
259 "movl %"_msk",%"_LO32 _tmp"; " \
260 "notl %"_LO32 _tmp"; " \
261 "andl %"_LO32 _tmp",%"_sav"; "
263 /* After executing instruction: write-back necessary bits in EFLAGS. */
264 #define _POST_EFLAGS(_sav, _msk, _tmp) \
265 /* _sav |= EFLAGS & _msk; */ \
268 "andl %"_msk",%"_LO32 _tmp"; " \
269 "orl %"_LO32 _tmp",%"_sav"; "
271 /* Raw emulation: instruction has two explicit operands. */
272 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
274 unsigned long _tmp; \
276 switch ((_dst).bytes) { \
278 __asm__ __volatile__ ( \
279 _PRE_EFLAGS("0","4","2") \
280 _op"w %"_wx"3,%1; " \
281 _POST_EFLAGS("0","4","2") \
282 : "=m" (_eflags), "=m" ((_dst).val), \
284 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
287 __asm__ __volatile__ ( \
288 _PRE_EFLAGS("0","4","2") \
289 _op"l %"_lx"3,%1; " \
290 _POST_EFLAGS("0","4","2") \
291 : "=m" (_eflags), "=m" ((_dst).val), \
293 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
296 __emulate_2op_8byte(_op, _src, _dst, \
297 _eflags, _qx, _qy); \
302 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
304 unsigned long _tmp; \
305 switch ( (_dst).bytes ) \
308 __asm__ __volatile__ ( \
309 _PRE_EFLAGS("0","4","2") \
310 _op"b %"_bx"3,%1; " \
311 _POST_EFLAGS("0","4","2") \
312 : "=m" (_eflags), "=m" ((_dst).val), \
314 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
317 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
318 _wx, _wy, _lx, _ly, _qx, _qy); \
323 /* Source operand is byte-sized and may be restricted to just %cl. */
324 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
325 __emulate_2op(_op, _src, _dst, _eflags, \
326 "b", "c", "b", "c", "b", "c", "b", "c")
328 /* Source operand is byte, word, long or quad sized. */
329 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
330 __emulate_2op(_op, _src, _dst, _eflags, \
331 "b", "q", "w", "r", _LO32, "r", "", "r")
333 /* Source operand is word, long or quad sized. */
334 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
335 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
336 "w", "r", _LO32, "r", "", "r")
338 /* Instruction has only one explicit operand (no source operand). */
339 #define emulate_1op(_op, _dst, _eflags) \
341 unsigned long _tmp; \
343 switch ( (_dst).bytes ) \
346 __asm__ __volatile__ ( \
347 _PRE_EFLAGS("0","3","2") \
349 _POST_EFLAGS("0","3","2") \
350 : "=m" (_eflags), "=m" ((_dst).val), \
352 : "i" (EFLAGS_MASK) ); \
355 __asm__ __volatile__ ( \
356 _PRE_EFLAGS("0","3","2") \
358 _POST_EFLAGS("0","3","2") \
359 : "=m" (_eflags), "=m" ((_dst).val), \
361 : "i" (EFLAGS_MASK) ); \
364 __asm__ __volatile__ ( \
365 _PRE_EFLAGS("0","3","2") \
367 _POST_EFLAGS("0","3","2") \
368 : "=m" (_eflags), "=m" ((_dst).val), \
370 : "i" (EFLAGS_MASK) ); \
373 __emulate_1op_8byte(_op, _dst, _eflags); \
378 /* Emulate an instruction with quadword operands (x86/64 only). */
379 #if defined(__x86_64__)
380 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
382 __asm__ __volatile__ ( \
383 _PRE_EFLAGS("0","4","2") \
384 _op"q %"_qx"3,%1; " \
385 _POST_EFLAGS("0","4","2") \
386 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
387 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
390 #define __emulate_1op_8byte(_op, _dst, _eflags) \
392 __asm__ __volatile__ ( \
393 _PRE_EFLAGS("0","3","2") \
395 _POST_EFLAGS("0","3","2") \
396 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
397 : "i" (EFLAGS_MASK) ); \
400 #elif defined(__i386__)
401 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
402 #define __emulate_1op_8byte(_op, _dst, _eflags)
403 #endif /* __i386__ */
405 /* Fetch next part of the instruction being emulated. */
406 #define insn_fetch(_type, _size, _eip) \
407 ({ unsigned long _x; \
408 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
416 /* Access/update address held in a register, based on addressing mode. */
417 #define register_address(base, reg) \
418 ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
419 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
421 #define register_address_increment(reg, inc) \
423 /* signed type ensures sign extension to long */ \
425 if ( ad_bytes == sizeof(unsigned long) ) \
428 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
429 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
432 void *decode_register(uint8_t modrm_reg, unsigned long *regs, int highbyte_regs)
436 p = ®s[modrm_reg];
437 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
438 p = (unsigned char *)®s[modrm_reg & 3] + 1;
442 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
443 struct x86_emulate_ops *ops,
445 uint16_t * size, unsigned long *address,
453 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2, ctxt);
456 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes, ctxt);
461 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
463 uint8_t b, d, sib, twobyte = 0, rex_prefix = 0;
464 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
465 unsigned long *override_base = NULL;
466 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
468 struct operand src, dst;
469 unsigned long cr2 = ctxt->cr2;
470 int mode = ctxt->mode;
471 unsigned long modrm_ea;
472 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
474 /* Shadow copy of register state. Committed on successful emulation. */
475 unsigned long _regs[NR_VCPU_REGS];
476 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
477 unsigned long modrm_val = 0;
479 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
482 case X86EMUL_MODE_REAL:
483 case X86EMUL_MODE_PROT16:
484 op_bytes = ad_bytes = 2;
486 case X86EMUL_MODE_PROT32:
487 op_bytes = ad_bytes = 4;
490 case X86EMUL_MODE_PROT64:
499 /* Legacy prefixes. */
500 for (i = 0; i < 8; i++) {
501 switch (b = insn_fetch(uint8_t, 1, _eip)) {
502 case 0x66: /* operand-size override */
503 op_bytes ^= 6; /* switch between 2/4 bytes */
505 case 0x67: /* address-size override */
506 if (mode == X86EMUL_MODE_PROT64)
507 ad_bytes ^= 12; /* switch between 4/8 bytes */
509 ad_bytes ^= 6; /* switch between 2/4 bytes */
511 case 0x2e: /* CS override */
512 override_base = &ctxt->cs_base;
514 case 0x3e: /* DS override */
515 override_base = &ctxt->ds_base;
517 case 0x26: /* ES override */
518 override_base = &ctxt->es_base;
520 case 0x64: /* FS override */
521 override_base = &ctxt->fs_base;
523 case 0x65: /* GS override */
524 override_base = &ctxt->gs_base;
526 case 0x36: /* SS override */
527 override_base = &ctxt->ss_base;
529 case 0xf0: /* LOCK */
532 case 0xf3: /* REP/REPE/REPZ */
535 case 0xf2: /* REPNE/REPNZ */
545 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
548 op_bytes = 8; /* REX.W */
549 modrm_reg = (b & 4) << 1; /* REX.R */
550 index_reg = (b & 2) << 2; /* REX.X */
551 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
552 b = insn_fetch(uint8_t, 1, _eip);
555 /* Opcode byte(s). */
558 /* Two-byte opcode? */
561 b = insn_fetch(uint8_t, 1, _eip);
562 d = twobyte_table[b];
570 /* ModRM and SIB bytes. */
572 modrm = insn_fetch(uint8_t, 1, _eip);
573 modrm_mod |= (modrm & 0xc0) >> 6;
574 modrm_reg |= (modrm & 0x38) >> 3;
575 modrm_rm |= (modrm & 0x07);
579 if (modrm_mod == 3) {
580 modrm_val = *(unsigned long *)
581 decode_register(modrm_rm, _regs, d & ByteOp);
586 unsigned bx = _regs[VCPU_REGS_RBX];
587 unsigned bp = _regs[VCPU_REGS_RBP];
588 unsigned si = _regs[VCPU_REGS_RSI];
589 unsigned di = _regs[VCPU_REGS_RDI];
591 /* 16-bit ModR/M decode. */
595 modrm_ea += insn_fetch(uint16_t, 2, _eip);
598 modrm_ea += insn_fetch(int8_t, 1, _eip);
601 modrm_ea += insn_fetch(uint16_t, 2, _eip);
631 if (modrm_rm == 2 || modrm_rm == 3 ||
632 (modrm_rm == 6 && modrm_mod != 0))
634 override_base = &ctxt->ss_base;
635 modrm_ea = (uint16_t) modrm_ea;
637 /* 32/64-bit ModR/M decode. */
641 sib = insn_fetch(uint8_t, 1, _eip);
642 index_reg |= (sib >> 3) & 7;
649 modrm_ea += _regs[base_reg];
651 modrm_ea += insn_fetch(int32_t, 4, _eip);
654 modrm_ea += _regs[base_reg];
660 modrm_ea += _regs[index_reg] << scale;
666 modrm_ea += _regs[modrm_rm];
667 else if (mode == X86EMUL_MODE_PROT64)
671 modrm_ea += _regs[modrm_rm];
677 modrm_ea += insn_fetch(int32_t, 4, _eip);
680 modrm_ea += insn_fetch(int8_t, 1, _eip);
683 modrm_ea += insn_fetch(int32_t, 4, _eip);
688 override_base = &ctxt->ds_base;
689 if (mode == X86EMUL_MODE_PROT64 &&
690 override_base != &ctxt->fs_base && override_base != &ctxt->gs_base)
694 modrm_ea += *override_base;
698 switch (d & SrcMask) {
705 else if (op_bytes == 8)
708 modrm_ea += op_bytes;
712 modrm_ea = (uint32_t) modrm_ea;
718 /* Decode and fetch the destination operand: register or memory. */
719 switch (d & DstMask) {
721 /* Special instructions do their own operand decoding. */
726 && !(twobyte && (b == 0xb6 || b == 0xb7))) {
727 dst.ptr = decode_register(modrm_reg, _regs, (rex_prefix == 0));
728 dst.val = *(uint8_t *) dst.ptr;
731 dst.ptr = decode_register(modrm_reg, _regs, 0);
732 switch ((dst.bytes = op_bytes)) {
734 dst.val = *(uint16_t *) dst.ptr;
737 dst.val = *(uint32_t *) dst.ptr;
740 dst.val = *(uint64_t *) dst.ptr;
747 dst.ptr = (unsigned long *)cr2;
748 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
749 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
750 ((rc = ops->read_emulated((unsigned long)dst.ptr,
751 &dst.val, dst.bytes, ctxt)) != 0))
755 dst.orig_val = dst.val;
758 * Decode and fetch the source operand: register, memory
761 switch (d & SrcMask) {
767 src.ptr = decode_register(modrm_reg, _regs, (rex_prefix == 0));
768 src.val = src.orig_val = *(uint8_t *) src.ptr;
771 src.ptr = decode_register(modrm_reg, _regs, 0);
772 switch ((src.bytes = op_bytes)) {
774 src.val = src.orig_val = *(uint16_t *) src.ptr;
777 src.val = src.orig_val = *(uint32_t *) src.ptr;
780 src.val = src.orig_val = *(uint64_t *) src.ptr;
792 src.bytes = (d & ByteOp) ? 1 : op_bytes;
795 src.ptr = (unsigned long *)cr2;
796 if ((rc = ops->read_emulated((unsigned long)src.ptr,
797 &src.val, src.bytes, ctxt)) != 0)
799 src.orig_val = src.val;
803 src.ptr = (unsigned long *)_eip;
804 src.bytes = (d & ByteOp) ? 1 : op_bytes;
807 /* NB. Immediates are sign-extended as necessary. */
810 src.val = insn_fetch(int8_t, 1, _eip);
813 src.val = insn_fetch(int16_t, 2, _eip);
816 src.val = insn_fetch(int32_t, 4, _eip);
822 src.ptr = (unsigned long *)_eip;
824 src.val = insn_fetch(int8_t, 1, _eip);
834 emulate_2op_SrcV("add", src, dst, _eflags);
838 emulate_2op_SrcV("or", src, dst, _eflags);
842 emulate_2op_SrcV("adc", src, dst, _eflags);
846 emulate_2op_SrcV("sbb", src, dst, _eflags);
850 emulate_2op_SrcV("and", src, dst, _eflags);
854 emulate_2op_SrcV("sub", src, dst, _eflags);
858 emulate_2op_SrcV("xor", src, dst, _eflags);
862 emulate_2op_SrcV("cmp", src, dst, _eflags);
864 case 0x63: /* movsxd */
865 if (mode != X86EMUL_MODE_PROT64)
867 dst.val = (int32_t) src.val;
869 case 0x80 ... 0x83: /* Grp1 */
891 emulate_2op_SrcV("test", src, dst, _eflags);
893 case 0x86 ... 0x87: /* xchg */
894 /* Write back the register source. */
897 *(uint8_t *) src.ptr = (uint8_t) dst.val;
900 *(uint16_t *) src.ptr = (uint16_t) dst.val;
903 *src.ptr = (uint32_t) dst.val;
904 break; /* 64b reg: zero-extend */
910 * Write back the memory destination with implicit LOCK
916 case 0xa0 ... 0xa1: /* mov */
917 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
919 _eip += ad_bytes; /* skip src displacement */
921 case 0xa2 ... 0xa3: /* mov */
922 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
923 _eip += ad_bytes; /* skip dst displacement */
925 case 0x88 ... 0x8b: /* mov */
926 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
929 case 0x8f: /* pop (sole member of Grp1a) */
930 /* 64-bit mode: POP always pops a 64-bit operand. */
931 if (mode == X86EMUL_MODE_PROT64)
933 if ((rc = ops->read_std(register_address(ctxt->ss_base,
934 _regs[VCPU_REGS_RSP]),
935 &dst.val, dst.bytes, ctxt)) != 0)
937 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
943 emulate_2op_SrcB("rol", src, dst, _eflags);
946 emulate_2op_SrcB("ror", src, dst, _eflags);
949 emulate_2op_SrcB("rcl", src, dst, _eflags);
952 emulate_2op_SrcB("rcr", src, dst, _eflags);
954 case 4: /* sal/shl */
955 case 6: /* sal/shl */
956 emulate_2op_SrcB("sal", src, dst, _eflags);
959 emulate_2op_SrcB("shr", src, dst, _eflags);
962 emulate_2op_SrcB("sar", src, dst, _eflags);
966 case 0xd0 ... 0xd1: /* Grp2 */
969 case 0xd2 ... 0xd3: /* Grp2 */
970 src.val = _regs[VCPU_REGS_RCX];
972 case 0xf6 ... 0xf7: /* Grp3 */
974 case 0 ... 1: /* test */
976 * Special case in Grp3: test has an immediate
980 src.ptr = (unsigned long *)_eip;
981 src.bytes = (d & ByteOp) ? 1 : op_bytes;
986 src.val = insn_fetch(int8_t, 1, _eip);
989 src.val = insn_fetch(int16_t, 2, _eip);
992 src.val = insn_fetch(int32_t, 4, _eip);
1000 emulate_1op("neg", dst, _eflags);
1003 goto cannot_emulate;
1006 case 0xfe ... 0xff: /* Grp4/Grp5 */
1007 switch (modrm_reg) {
1009 emulate_1op("inc", dst, _eflags);
1012 emulate_1op("dec", dst, _eflags);
1015 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1016 if (mode == X86EMUL_MODE_PROT64) {
1018 if ((rc = ops->read_std((unsigned long)dst.ptr,
1019 &dst.val, 8, ctxt)) != 0)
1022 register_address_increment(_regs[VCPU_REGS_RSP],
1024 if ((rc = ops->write_std(register_address(ctxt->ss_base,
1027 dst.val, dst.bytes, ctxt)) != 0)
1029 dst.val = dst.orig_val; /* skanky: disable writeback */
1032 goto cannot_emulate;
1038 if ((d & Mov) || (dst.orig_val != dst.val)) {
1041 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1042 switch (dst.bytes) {
1044 *(uint8_t *) dst.ptr = (uint8_t) dst.val;
1047 *(uint16_t *) dst.ptr = (uint16_t) dst.val;
1050 *dst.ptr = (uint32_t) dst.val;
1051 break; /* 64b: zero-ext */
1059 rc = ops->cmpxchg_emulated((unsigned long)dst.ptr,
1060 dst.orig_val, dst.val, dst.bytes,
1063 rc = ops->write_emulated((unsigned long)dst.ptr,
1064 dst.val, dst.bytes, ctxt);
1072 /* Commit shadow register state. */
1073 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1074 ctxt->eflags = _eflags;
1075 ctxt->vcpu->rip = _eip;
1078 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1082 goto twobyte_special_insn;
1084 if (_regs[VCPU_REGS_RCX] == 0) {
1085 ctxt->vcpu->rip = _eip;
1088 _regs[VCPU_REGS_RCX]--;
1089 _eip = ctxt->vcpu->rip;
1092 case 0xa4 ... 0xa5: /* movs */
1094 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1095 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1096 _regs[VCPU_REGS_RDI]);
1099 read_emulated(register_address
1100 (override_base ? *override_base : ctxt->ds_base,
1101 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes,
1104 register_address_increment(_regs[VCPU_REGS_RSI],
1105 (_eflags & EFLG_DF) ? -dst.bytes : dst.
1107 register_address_increment(_regs[VCPU_REGS_RDI],
1108 (_eflags & EFLG_DF) ? -dst.bytes : dst.
1111 case 0xa6 ... 0xa7: /* cmps */
1112 DPRINTF("Urk! I don't handle CMPS.\n");
1113 goto cannot_emulate;
1114 case 0xaa ... 0xab: /* stos */
1116 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1117 dst.ptr = (unsigned long *)cr2;
1118 dst.val = _regs[VCPU_REGS_RAX];
1119 register_address_increment(_regs[VCPU_REGS_RDI],
1120 (_eflags & EFLG_DF) ? -dst.bytes : dst.
1123 case 0xac ... 0xad: /* lods */
1125 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1126 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1127 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0)
1129 register_address_increment(_regs[VCPU_REGS_RSI],
1130 (_eflags & EFLG_DF) ? -dst.bytes : dst.
1133 case 0xae ... 0xaf: /* scas */
1134 DPRINTF("Urk! I don't handle SCAS.\n");
1135 goto cannot_emulate;
1141 case 0x01: /* lgdt, lidt, lmsw */
1142 switch (modrm_reg) {
1144 unsigned long address;
1147 rc = read_descriptor(ctxt, ops, src.ptr,
1148 &size, &address, op_bytes);
1151 #warning "implement realmode_lgdt"
1152 //realmode_lgdt(ctxt->vcpu, size, address);
1155 rc = read_descriptor(ctxt, ops, src.ptr,
1156 &size, &address, op_bytes);
1159 #warning "implement realmode_lidt"
1160 //realmode_lidt(ctxt->vcpu, size, address);
1163 #warning "implement realmod_lmsw"
1164 //realmode_lmsw(ctxt->vcpu, (uint16_t)modrm_val, &_eflags);
1167 goto cannot_emulate;
1170 case 0x40 ... 0x4f: /* cmov */
1171 dst.val = dst.orig_val = src.val;
1172 d &= ~Mov; /* default to no move */
1174 * First, assume we're decoding an even cmov opcode
1177 switch ((b & 15) >> 1) {
1179 d |= (_eflags & EFLG_OF) ? Mov : 0;
1181 case 1: /* cmovb/cmovc/cmovnae */
1182 d |= (_eflags & EFLG_CF) ? Mov : 0;
1184 case 2: /* cmovz/cmove */
1185 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1187 case 3: /* cmovbe/cmovna */
1188 d |= (_eflags & (EFLG_CF | EFLG_ZF)) ? Mov : 0;
1191 d |= (_eflags & EFLG_SF) ? Mov : 0;
1193 case 5: /* cmovp/cmovpe */
1194 d |= (_eflags & EFLG_PF) ? Mov : 0;
1196 case 7: /* cmovle/cmovng */
1197 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1199 case 6: /* cmovl/cmovnge */
1200 d |= (!(_eflags & EFLG_SF) !=
1201 !(_eflags & EFLG_OF)) ? Mov : 0;
1204 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1205 d ^= (b & 1) ? Mov : 0;
1207 case 0xb0 ... 0xb1: /* cmpxchg */
1209 * Save real source value, then compare EAX against
1212 src.orig_val = src.val;
1213 src.val = _regs[VCPU_REGS_RAX];
1214 emulate_2op_SrcV("cmp", src, dst, _eflags);
1215 /* Always write back. The question is: where to? */
1217 if (_eflags & EFLG_ZF) {
1218 /* Success: write back to memory. */
1219 dst.val = src.orig_val;
1221 /* Failure: write the value we saw to EAX. */
1223 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1228 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1229 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1233 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1234 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1238 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1239 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1241 case 0xb6 ... 0xb7: /* movzx */
1242 dst.bytes = op_bytes;
1243 dst.val = (d & ByteOp) ? (uint8_t) src.val : (uint16_t) src.val;
1247 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1248 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1250 case 0xba: /* Grp8 */
1251 switch (modrm_reg & 3) {
1262 case 0xbe ... 0xbf: /* movsx */
1263 dst.bytes = op_bytes;
1264 dst.val = (d & ByteOp) ? (int8_t) src.val : (int16_t) src.val;
1269 twobyte_special_insn:
1270 /* Disable writeback. */
1271 dst.orig_val = dst.val;
1273 case 0x0d: /* GrpP (prefetch) */
1274 case 0x18: /* Grp16 (prefetch/nop) */
1276 case 0x20: /* mov cr, reg */
1277 b = insn_fetch(uint8_t, 1, _eip);
1278 if ((b & 0xc0) != 0xc0)
1279 goto cannot_emulate;
1280 #warning "implement realmode_get_cr"
1281 //_regs[(b >> 3) & 7] = realmode_get_cr(ctxt->vcpu, b & 7);
1283 case 0x22: /* mov reg, cr */
1284 b = insn_fetch(uint8_t, 1, _eip);
1285 if ((b & 0xc0) != 0xc0)
1286 goto cannot_emulate;
1287 #warning "implement realmod_set_cr"
1288 //realmode_set_cr(ctxt->vcpu, b & 7, _regs[(b >> 3) & 7] & -1u,
1291 case 0xc7: /* Grp9 (cmpxchg8b) */
1292 #if defined(__i386__)
1294 unsigned long old_lo, old_hi;
1295 if (((rc = ops->read_emulated(cr2 + 0, &old_lo, 4, ctxt)) != 0)
1296 || ((rc = ops->read_emulated(cr2 + 4, &old_hi, 4,
1299 if ((old_lo != _regs[VCPU_REGS_RAX])
1300 || (old_hi != _regs[VCPU_REGS_RDI])) {
1301 _regs[VCPU_REGS_RAX] = old_lo;
1302 _regs[VCPU_REGS_RDX] = old_hi;
1303 _eflags &= ~EFLG_ZF;
1304 } else if (ops->cmpxchg8b_emulated == NULL) {
1305 rc = X86EMUL_UNHANDLEABLE;
1308 if ((rc = ops->cmpxchg8b_emulated(cr2, old_lo,
1310 _regs[VCPU_REGS_RBX],
1311 _regs[VCPU_REGS_RCX],
1318 #elif defined(__x86_64__)
1320 unsigned long old, new;
1321 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0)
1323 if (((uint32_t) (old >> 0) != (uint32_t) _regs[VCPU_REGS_RAX])
1324 || ((uint32_t) (old >> 32) !=
1325 (uint32_t) _regs[VCPU_REGS_RDX])) {
1326 _regs[VCPU_REGS_RAX] = (uint32_t) (old >> 0);
1327 _regs[VCPU_REGS_RDX] = (uint32_t) (old >> 32);
1328 _eflags &= ~EFLG_ZF;
1331 (_regs[VCPU_REGS_RCX] << 32) | (uint32_t)
1332 _regs[VCPU_REGS_RBX];
1334 ops->cmpxchg_emulated(cr2, old, new, 8, ctxt)) != 0)
1345 printd("Cannot emulate %02x\n", b);