1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privieged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
65 static u8 opcode_table[256] = {
67 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
68 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
71 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
72 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
75 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
76 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
79 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
80 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
83 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
84 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
87 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
88 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
91 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
92 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
95 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
96 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
99 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
101 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
103 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
104 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
106 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
108 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
109 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
110 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
111 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
113 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
114 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
115 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
117 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
119 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
120 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
121 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
122 ByteOp | ImplicitOps, ImplicitOps,
124 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
125 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
126 ByteOp | ImplicitOps, ImplicitOps,
128 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
130 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM, 0, 0,
131 0, 0, ByteOp | DstMem | SrcImm | ModRM | Mov,
132 DstMem | SrcImm | ModRM | Mov,
134 0, 0, 0, 0, 0, 0, 0, 0,
136 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
137 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
140 0, 0, 0, 0, 0, 0, 0, 0,
142 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
145 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
148 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
151 static u8 twobyte_table[256] = {
153 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
154 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
156 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
158 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
159 0, 0, 0, 0, 0, 0, 0, 0,
161 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
163 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
164 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
165 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
166 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
168 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
169 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
170 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
171 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
173 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
175 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
177 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
179 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
181 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 0, 0, 0, DstMem | SrcReg | ModRM, 0, 0, 0, 0,
185 0, 0, 0, DstMem | SrcReg | ModRM, 0, 0, 0, 0,
187 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
188 DstMem | SrcReg | ModRM,
189 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
190 DstReg | SrcMem16 | ModRM | Mov,
192 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM,
193 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
194 DstReg | SrcMem16 | ModRM | Mov,
196 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
198 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
206 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
207 * are interested only in invlpg and not in any of the rest.
209 * invlpg is a special instruction in that the data it references may not
212 void kvm_emulator_want_group7_invlpg(void)
214 twobyte_table[1] &= ~SrcMem;
216 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
218 /* Type, address-of, and value of an instruction's operand. */
220 enum { OP_REG, OP_MEM, OP_IMM } type;
222 unsigned long val, orig_val, *ptr;
225 /* EFLAGS bit definitions. */
226 #define EFLG_OF (1<<11)
227 #define EFLG_DF (1<<10)
228 #define EFLG_SF (1<<7)
229 #define EFLG_ZF (1<<6)
230 #define EFLG_AF (1<<4)
231 #define EFLG_PF (1<<2)
232 #define EFLG_CF (1<<0)
235 * Instruction emulation:
236 * Most instructions are emulated directly via a fragment of inline assembly
237 * code. This allows us to save/restore EFLAGS and thus very easily pick up
238 * any modified flags.
241 #if defined(CONFIG_X86_64)
242 #define _LO32 "k" /* force 32-bit operand */
243 #define _STK "%%rsp" /* stack pointer */
244 #elif defined(__i386__)
245 #define _LO32 "" /* force 32-bit operand */
246 #define _STK "%%esp" /* stack pointer */
250 * These EFLAGS bits are restored from saved value during emulation, and
251 * any changes are written back to the saved value after emulation.
253 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
255 /* Before executing instruction: restore necessary bits in EFLAGS. */
256 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
257 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
259 "movl %"_msk",%"_LO32 _tmp"; " \
260 "andl %"_LO32 _tmp",("_STK"); " \
262 "notl %"_LO32 _tmp"; " \
263 "andl %"_LO32 _tmp",("_STK"); " \
265 "orl %"_LO32 _tmp",("_STK"); " \
267 /* _sav &= ~msk; */ \
268 "movl %"_msk",%"_LO32 _tmp"; " \
269 "notl %"_LO32 _tmp"; " \
270 "andl %"_LO32 _tmp",%"_sav"; "
272 /* After executing instruction: write-back necessary bits in EFLAGS. */
273 #define _POST_EFLAGS(_sav, _msk, _tmp) \
274 /* _sav |= EFLAGS & _msk; */ \
277 "andl %"_msk",%"_LO32 _tmp"; " \
278 "orl %"_LO32 _tmp",%"_sav"; "
280 /* Raw emulation: instruction has two explicit operands. */
281 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
283 unsigned long _tmp; \
285 switch ((_dst).bytes) { \
287 __asm__ __volatile__ ( \
288 _PRE_EFLAGS("0","4","2") \
289 _op"w %"_wx"3,%1; " \
290 _POST_EFLAGS("0","4","2") \
291 : "=m" (_eflags), "=m" ((_dst).val), \
293 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
296 __asm__ __volatile__ ( \
297 _PRE_EFLAGS("0","4","2") \
298 _op"l %"_lx"3,%1; " \
299 _POST_EFLAGS("0","4","2") \
300 : "=m" (_eflags), "=m" ((_dst).val), \
302 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
305 __emulate_2op_8byte(_op, _src, _dst, \
306 _eflags, _qx, _qy); \
311 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
313 unsigned long _tmp; \
314 switch ( (_dst).bytes ) \
317 __asm__ __volatile__ ( \
318 _PRE_EFLAGS("0","4","2") \
319 _op"b %"_bx"3,%1; " \
320 _POST_EFLAGS("0","4","2") \
321 : "=m" (_eflags), "=m" ((_dst).val), \
323 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
326 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
327 _wx, _wy, _lx, _ly, _qx, _qy); \
332 /* Source operand is byte-sized and may be restricted to just %cl. */
333 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
334 __emulate_2op(_op, _src, _dst, _eflags, \
335 "b", "c", "b", "c", "b", "c", "b", "c")
337 /* Source operand is byte, word, long or quad sized. */
338 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
339 __emulate_2op(_op, _src, _dst, _eflags, \
340 "b", "q", "w", "r", _LO32, "r", "", "r")
342 /* Source operand is word, long or quad sized. */
343 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
344 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
345 "w", "r", _LO32, "r", "", "r")
347 /* Instruction has only one explicit operand (no source operand). */
348 #define emulate_1op(_op, _dst, _eflags) \
350 unsigned long _tmp; \
352 switch ( (_dst).bytes ) \
355 __asm__ __volatile__ ( \
356 _PRE_EFLAGS("0","3","2") \
358 _POST_EFLAGS("0","3","2") \
359 : "=m" (_eflags), "=m" ((_dst).val), \
361 : "i" (EFLAGS_MASK) ); \
364 __asm__ __volatile__ ( \
365 _PRE_EFLAGS("0","3","2") \
367 _POST_EFLAGS("0","3","2") \
368 : "=m" (_eflags), "=m" ((_dst).val), \
370 : "i" (EFLAGS_MASK) ); \
373 __asm__ __volatile__ ( \
374 _PRE_EFLAGS("0","3","2") \
376 _POST_EFLAGS("0","3","2") \
377 : "=m" (_eflags), "=m" ((_dst).val), \
379 : "i" (EFLAGS_MASK) ); \
382 __emulate_1op_8byte(_op, _dst, _eflags); \
387 /* Emulate an instruction with quadword operands (x86/64 only). */
388 #if defined(CONFIG_X86_64)
389 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
391 __asm__ __volatile__ ( \
392 _PRE_EFLAGS("0","4","2") \
393 _op"q %"_qx"3,%1; " \
394 _POST_EFLAGS("0","4","2") \
395 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
396 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
399 #define __emulate_1op_8byte(_op, _dst, _eflags) \
401 __asm__ __volatile__ ( \
402 _PRE_EFLAGS("0","3","2") \
404 _POST_EFLAGS("0","3","2") \
405 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
406 : "i" (EFLAGS_MASK) ); \
409 #elif defined(__i386__)
410 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
411 #define __emulate_1op_8byte(_op, _dst, _eflags)
412 #endif /* __i386__ */
414 /* Fetch next part of the instruction being emulated. */
415 #define insn_fetch(_type, _size, _eip) \
416 ({ unsigned long _x; \
417 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
425 /* Access/update address held in a register, based on addressing mode. */
426 #define register_address(base, reg) \
427 ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
428 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
430 #define register_address_increment(reg, inc) \
432 /* signed type ensures sign extension to long */ \
434 if ( ad_bytes == sizeof(unsigned long) ) \
437 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
438 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
441 void *decode_register(u8 modrm_reg, unsigned long *regs,
446 p = ®s[modrm_reg];
447 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
448 p = (unsigned char *)®s[modrm_reg & 3] + 1;
452 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
453 struct x86_emulate_ops *ops,
455 u16 *size, unsigned long *address, int op_bytes)
462 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2, ctxt);
465 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes, ctxt);
470 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
472 u8 b, d, sib, twobyte = 0, rex_prefix = 0;
473 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
474 unsigned long *override_base = NULL;
475 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
477 struct operand src, dst;
478 unsigned long cr2 = ctxt->cr2;
479 int mode = ctxt->mode;
480 unsigned long modrm_ea;
481 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
483 /* Shadow copy of register state. Committed on successful emulation. */
484 unsigned long _regs[NR_VCPU_REGS];
485 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
486 unsigned long modrm_val = 0;
488 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
491 case X86EMUL_MODE_REAL:
492 case X86EMUL_MODE_PROT16:
493 op_bytes = ad_bytes = 2;
495 case X86EMUL_MODE_PROT32:
496 op_bytes = ad_bytes = 4;
499 case X86EMUL_MODE_PROT64:
508 /* Legacy prefixes. */
509 for (i = 0; i < 8; i++) {
510 switch (b = insn_fetch(u8, 1, _eip)) {
511 case 0x66: /* operand-size override */
512 op_bytes ^= 6; /* switch between 2/4 bytes */
514 case 0x67: /* address-size override */
515 if (mode == X86EMUL_MODE_PROT64)
516 ad_bytes ^= 12; /* switch between 4/8 bytes */
518 ad_bytes ^= 6; /* switch between 2/4 bytes */
520 case 0x2e: /* CS override */
521 override_base = &ctxt->cs_base;
523 case 0x3e: /* DS override */
524 override_base = &ctxt->ds_base;
526 case 0x26: /* ES override */
527 override_base = &ctxt->es_base;
529 case 0x64: /* FS override */
530 override_base = &ctxt->fs_base;
532 case 0x65: /* GS override */
533 override_base = &ctxt->gs_base;
535 case 0x36: /* SS override */
536 override_base = &ctxt->ss_base;
538 case 0xf0: /* LOCK */
541 case 0xf3: /* REP/REPE/REPZ */
544 case 0xf2: /* REPNE/REPNZ */
554 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
557 op_bytes = 8; /* REX.W */
558 modrm_reg = (b & 4) << 1; /* REX.R */
559 index_reg = (b & 2) << 2; /* REX.X */
560 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
561 b = insn_fetch(u8, 1, _eip);
564 /* Opcode byte(s). */
567 /* Two-byte opcode? */
570 b = insn_fetch(u8, 1, _eip);
571 d = twobyte_table[b];
579 /* ModRM and SIB bytes. */
581 modrm = insn_fetch(u8, 1, _eip);
582 modrm_mod |= (modrm & 0xc0) >> 6;
583 modrm_reg |= (modrm & 0x38) >> 3;
584 modrm_rm |= (modrm & 0x07);
588 if (modrm_mod == 3) {
589 modrm_val = *(unsigned long *)
590 decode_register(modrm_rm, _regs, d & ByteOp);
595 unsigned bx = _regs[VCPU_REGS_RBX];
596 unsigned bp = _regs[VCPU_REGS_RBP];
597 unsigned si = _regs[VCPU_REGS_RSI];
598 unsigned di = _regs[VCPU_REGS_RDI];
600 /* 16-bit ModR/M decode. */
604 modrm_ea += insn_fetch(u16, 2, _eip);
607 modrm_ea += insn_fetch(s8, 1, _eip);
610 modrm_ea += insn_fetch(u16, 2, _eip);
640 if (modrm_rm == 2 || modrm_rm == 3 ||
641 (modrm_rm == 6 && modrm_mod != 0))
643 override_base = &ctxt->ss_base;
644 modrm_ea = (u16)modrm_ea;
646 /* 32/64-bit ModR/M decode. */
650 sib = insn_fetch(u8, 1, _eip);
651 index_reg |= (sib >> 3) & 7;
658 modrm_ea += _regs[base_reg];
660 modrm_ea += insn_fetch(s32, 4, _eip);
663 modrm_ea += _regs[base_reg];
669 modrm_ea += _regs[index_reg] << scale;
675 modrm_ea += _regs[modrm_rm];
676 else if (mode == X86EMUL_MODE_PROT64)
680 modrm_ea += _regs[modrm_rm];
686 modrm_ea += insn_fetch(s32, 4, _eip);
689 modrm_ea += insn_fetch(s8, 1, _eip);
692 modrm_ea += insn_fetch(s32, 4, _eip);
697 override_base = &ctxt->ds_base;
698 if (mode == X86EMUL_MODE_PROT64 &&
699 override_base != &ctxt->fs_base &&
700 override_base != &ctxt->gs_base)
701 override_base = NULL;
704 modrm_ea += *override_base;
708 switch (d & SrcMask) {
719 modrm_ea += op_bytes;
723 modrm_ea = (u32)modrm_ea;
729 /* Decode and fetch the destination operand: register or memory. */
730 switch (d & DstMask) {
732 /* Special instructions do their own operand decoding. */
737 && !(twobyte_table && (b == 0xb6 || b == 0xb7))) {
738 dst.ptr = decode_register(modrm_reg, _regs,
740 dst.val = *(u8 *) dst.ptr;
743 dst.ptr = decode_register(modrm_reg, _regs, 0);
744 switch ((dst.bytes = op_bytes)) {
746 dst.val = *(u16 *)dst.ptr;
749 dst.val = *(u32 *)dst.ptr;
752 dst.val = *(u64 *)dst.ptr;
759 dst.ptr = (unsigned long *)cr2;
760 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
761 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
762 ((rc = ops->read_emulated((unsigned long)dst.ptr,
763 &dst.val, dst.bytes, ctxt)) != 0))
767 dst.orig_val = dst.val;
770 * Decode and fetch the source operand: register, memory
773 switch (d & SrcMask) {
779 src.ptr = decode_register(modrm_reg, _regs,
781 src.val = src.orig_val = *(u8 *) src.ptr;
784 src.ptr = decode_register(modrm_reg, _regs, 0);
785 switch ((src.bytes = op_bytes)) {
787 src.val = src.orig_val = *(u16 *) src.ptr;
790 src.val = src.orig_val = *(u32 *) src.ptr;
793 src.val = src.orig_val = *(u64 *) src.ptr;
805 src.bytes = (d & ByteOp) ? 1 : op_bytes;
808 src.ptr = (unsigned long *)cr2;
809 if ((rc = ops->read_emulated((unsigned long)src.ptr,
810 &src.val, src.bytes, ctxt)) != 0)
812 src.orig_val = src.val;
816 src.ptr = (unsigned long *)_eip;
817 src.bytes = (d & ByteOp) ? 1 : op_bytes;
820 /* NB. Immediates are sign-extended as necessary. */
823 src.val = insn_fetch(s8, 1, _eip);
826 src.val = insn_fetch(s16, 2, _eip);
829 src.val = insn_fetch(s32, 4, _eip);
835 src.ptr = (unsigned long *)_eip;
837 src.val = insn_fetch(s8, 1, _eip);
847 emulate_2op_SrcV("add", src, dst, _eflags);
851 emulate_2op_SrcV("or", src, dst, _eflags);
855 emulate_2op_SrcV("adc", src, dst, _eflags);
859 emulate_2op_SrcV("sbb", src, dst, _eflags);
863 emulate_2op_SrcV("and", src, dst, _eflags);
867 emulate_2op_SrcV("sub", src, dst, _eflags);
871 emulate_2op_SrcV("xor", src, dst, _eflags);
875 emulate_2op_SrcV("cmp", src, dst, _eflags);
877 case 0x63: /* movsxd */
878 if (mode != X86EMUL_MODE_PROT64)
880 dst.val = (s32) src.val;
882 case 0x80 ... 0x83: /* Grp1 */
904 emulate_2op_SrcV("test", src, dst, _eflags);
906 case 0x86 ... 0x87: /* xchg */
907 /* Write back the register source. */
910 *(u8 *) src.ptr = (u8) dst.val;
913 *(u16 *) src.ptr = (u16) dst.val;
916 *src.ptr = (u32) dst.val;
917 break; /* 64b reg: zero-extend */
923 * Write back the memory destination with implicit LOCK
929 case 0xa0 ... 0xa1: /* mov */
930 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
932 _eip += ad_bytes; /* skip src displacement */
934 case 0xa2 ... 0xa3: /* mov */
935 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
936 _eip += ad_bytes; /* skip dst displacement */
938 case 0x88 ... 0x8b: /* mov */
939 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
942 case 0x8f: /* pop (sole member of Grp1a) */
943 /* 64-bit mode: POP always pops a 64-bit operand. */
944 if (mode == X86EMUL_MODE_PROT64)
946 if ((rc = ops->read_std(register_address(ctxt->ss_base,
947 _regs[VCPU_REGS_RSP]),
948 &dst.val, dst.bytes, ctxt)) != 0)
950 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
956 emulate_2op_SrcB("rol", src, dst, _eflags);
959 emulate_2op_SrcB("ror", src, dst, _eflags);
962 emulate_2op_SrcB("rcl", src, dst, _eflags);
965 emulate_2op_SrcB("rcr", src, dst, _eflags);
967 case 4: /* sal/shl */
968 case 6: /* sal/shl */
969 emulate_2op_SrcB("sal", src, dst, _eflags);
972 emulate_2op_SrcB("shr", src, dst, _eflags);
975 emulate_2op_SrcB("sar", src, dst, _eflags);
979 case 0xd0 ... 0xd1: /* Grp2 */
982 case 0xd2 ... 0xd3: /* Grp2 */
983 src.val = _regs[VCPU_REGS_RCX];
985 case 0xf6 ... 0xf7: /* Grp3 */
987 case 0 ... 1: /* test */
989 * Special case in Grp3: test has an immediate
993 src.ptr = (unsigned long *)_eip;
994 src.bytes = (d & ByteOp) ? 1 : op_bytes;
999 src.val = insn_fetch(s8, 1, _eip);
1002 src.val = insn_fetch(s16, 2, _eip);
1005 src.val = insn_fetch(s32, 4, _eip);
1013 emulate_1op("neg", dst, _eflags);
1016 goto cannot_emulate;
1019 case 0xfe ... 0xff: /* Grp4/Grp5 */
1020 switch (modrm_reg) {
1022 emulate_1op("inc", dst, _eflags);
1025 emulate_1op("dec", dst, _eflags);
1028 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1029 if (mode == X86EMUL_MODE_PROT64) {
1031 if ((rc = ops->read_std((unsigned long)dst.ptr,
1036 register_address_increment(_regs[VCPU_REGS_RSP],
1038 if ((rc = ops->write_std(
1039 register_address(ctxt->ss_base,
1040 _regs[VCPU_REGS_RSP]),
1041 dst.val, dst.bytes, ctxt)) != 0)
1043 dst.val = dst.orig_val; /* skanky: disable writeback */
1046 goto cannot_emulate;
1052 if ((d & Mov) || (dst.orig_val != dst.val)) {
1055 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1056 switch (dst.bytes) {
1058 *(u8 *)dst.ptr = (u8)dst.val;
1061 *(u16 *)dst.ptr = (u16)dst.val;
1064 *dst.ptr = (u32)dst.val;
1065 break; /* 64b: zero-ext */
1073 rc = ops->cmpxchg_emulated((unsigned long)dst.
1078 rc = ops->write_emulated((unsigned long)dst.ptr,
1088 /* Commit shadow register state. */
1089 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1090 ctxt->eflags = _eflags;
1091 ctxt->vcpu->rip = _eip;
1094 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1098 goto twobyte_special_insn;
1100 if (_regs[VCPU_REGS_RCX] == 0) {
1101 ctxt->vcpu->rip = _eip;
1104 _regs[VCPU_REGS_RCX]--;
1105 _eip = ctxt->vcpu->rip;
1108 case 0xa4 ... 0xa5: /* movs */
1110 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1111 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1112 _regs[VCPU_REGS_RDI]);
1113 if ((rc = ops->read_emulated(register_address(
1114 override_base ? *override_base : ctxt->ds_base,
1115 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt)) != 0)
1117 register_address_increment(_regs[VCPU_REGS_RSI],
1118 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1119 register_address_increment(_regs[VCPU_REGS_RDI],
1120 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1122 case 0xa6 ... 0xa7: /* cmps */
1123 DPRINTF("Urk! I don't handle CMPS.\n");
1124 goto cannot_emulate;
1125 case 0xaa ... 0xab: /* stos */
1127 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1128 dst.ptr = (unsigned long *)cr2;
1129 dst.val = _regs[VCPU_REGS_RAX];
1130 register_address_increment(_regs[VCPU_REGS_RDI],
1131 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1133 case 0xac ... 0xad: /* lods */
1135 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1136 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1137 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0)
1139 register_address_increment(_regs[VCPU_REGS_RSI],
1140 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1142 case 0xae ... 0xaf: /* scas */
1143 DPRINTF("Urk! I don't handle SCAS.\n");
1144 goto cannot_emulate;
1150 case 0x01: /* lgdt, lidt, lmsw */
1151 switch (modrm_reg) {
1153 unsigned long address;
1156 rc = read_descriptor(ctxt, ops, src.ptr,
1157 &size, &address, op_bytes);
1160 realmode_lgdt(ctxt->vcpu, size, address);
1163 rc = read_descriptor(ctxt, ops, src.ptr,
1164 &size, &address, op_bytes);
1167 realmode_lidt(ctxt->vcpu, size, address);
1171 goto cannot_emulate;
1172 *(u16 *)&_regs[modrm_rm]
1173 = realmode_get_cr(ctxt->vcpu, 0);
1177 goto cannot_emulate;
1178 realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
1181 emulate_invlpg(ctxt->vcpu, cr2);
1184 goto cannot_emulate;
1187 case 0x21: /* mov from dr to reg */
1189 goto cannot_emulate;
1190 rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
1192 case 0x23: /* mov from reg to dr */
1194 goto cannot_emulate;
1195 rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
1197 case 0x40 ... 0x4f: /* cmov */
1198 dst.val = dst.orig_val = src.val;
1199 d &= ~Mov; /* default to no move */
1201 * First, assume we're decoding an even cmov opcode
1204 switch ((b & 15) >> 1) {
1206 d |= (_eflags & EFLG_OF) ? Mov : 0;
1208 case 1: /* cmovb/cmovc/cmovnae */
1209 d |= (_eflags & EFLG_CF) ? Mov : 0;
1211 case 2: /* cmovz/cmove */
1212 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1214 case 3: /* cmovbe/cmovna */
1215 d |= (_eflags & (EFLG_CF | EFLG_ZF)) ? Mov : 0;
1218 d |= (_eflags & EFLG_SF) ? Mov : 0;
1220 case 5: /* cmovp/cmovpe */
1221 d |= (_eflags & EFLG_PF) ? Mov : 0;
1223 case 7: /* cmovle/cmovng */
1224 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1226 case 6: /* cmovl/cmovnge */
1227 d |= (!(_eflags & EFLG_SF) !=
1228 !(_eflags & EFLG_OF)) ? Mov : 0;
1231 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1232 d ^= (b & 1) ? Mov : 0;
1234 case 0xb0 ... 0xb1: /* cmpxchg */
1236 * Save real source value, then compare EAX against
1239 src.orig_val = src.val;
1240 src.val = _regs[VCPU_REGS_RAX];
1241 emulate_2op_SrcV("cmp", src, dst, _eflags);
1242 /* Always write back. The question is: where to? */
1244 if (_eflags & EFLG_ZF) {
1245 /* Success: write back to memory. */
1246 dst.val = src.orig_val;
1248 /* Failure: write the value we saw to EAX. */
1250 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1255 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1256 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1260 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1261 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1265 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1266 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1268 case 0xb6 ... 0xb7: /* movzx */
1269 dst.bytes = op_bytes;
1270 dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
1274 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1275 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1277 case 0xba: /* Grp8 */
1278 switch (modrm_reg & 3) {
1289 case 0xbe ... 0xbf: /* movsx */
1290 dst.bytes = op_bytes;
1291 dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
1296 twobyte_special_insn:
1297 /* Disable writeback. */
1298 dst.orig_val = dst.val;
1300 case 0x0d: /* GrpP (prefetch) */
1301 case 0x18: /* Grp16 (prefetch/nop) */
1304 emulate_clts(ctxt->vcpu);
1306 case 0x20: /* mov cr, reg */
1308 goto cannot_emulate;
1309 _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
1311 case 0x22: /* mov reg, cr */
1313 goto cannot_emulate;
1314 realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
1316 case 0xc7: /* Grp9 (cmpxchg8b) */
1317 #if defined(__i386__)
1319 unsigned long old_lo, old_hi;
1320 if (((rc = ops->read_emulated(cr2 + 0, &old_lo, 4,
1322 || ((rc = ops->read_emulated(cr2 + 4, &old_hi, 4,
1325 if ((old_lo != _regs[VCPU_REGS_RAX])
1326 || (old_hi != _regs[VCPU_REGS_RDI])) {
1327 _regs[VCPU_REGS_RAX] = old_lo;
1328 _regs[VCPU_REGS_RDX] = old_hi;
1329 _eflags &= ~EFLG_ZF;
1330 } else if (ops->cmpxchg8b_emulated == NULL) {
1331 rc = X86EMUL_UNHANDLEABLE;
1334 if ((rc = ops->cmpxchg8b_emulated(cr2, old_lo,
1336 _regs[VCPU_REGS_RBX],
1337 _regs[VCPU_REGS_RCX],
1344 #elif defined(CONFIG_X86_64)
1346 unsigned long old, new;
1347 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0)
1349 if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
1350 ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
1351 _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1352 _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1353 _eflags &= ~EFLG_ZF;
1355 new = (_regs[VCPU_REGS_RCX] << 32) | (u32) _regs[VCPU_REGS_RBX];
1356 if ((rc = ops->cmpxchg_emulated(cr2, old,
1357 new, 8, ctxt)) != 0)
1368 DPRINTF("Cannot emulate %02x\n", b);
1375 #include <asm/uaccess.h>
1378 x86_emulate_read_std(unsigned long addr,
1380 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1386 if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
1387 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1388 return X86EMUL_PROPAGATE_FAULT;
1391 return X86EMUL_CONTINUE;
1395 x86_emulate_write_std(unsigned long addr,
1397 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1401 if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
1402 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1403 return X86EMUL_PROPAGATE_FAULT;
1406 return X86EMUL_CONTINUE;