1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privieged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table[256] = {
68 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
69 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 0, 0, 0, 0, 0, 0, 0, 0,
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
107 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
108 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
110 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
112 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
113 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
114 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
115 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
117 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
118 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
119 0, 0, 0, DstMem | SrcNone | ModRM | Mov,
121 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
123 ByteOp | DstReg | SrcMem | Mov, DstReg | SrcMem | Mov,
124 ByteOp | DstMem | SrcReg | Mov, DstMem | SrcReg | Mov,
125 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
126 ByteOp | ImplicitOps, ImplicitOps,
128 0, 0, ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
129 ByteOp | ImplicitOps | Mov, ImplicitOps | Mov,
130 ByteOp | ImplicitOps, ImplicitOps,
132 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
134 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
135 0, ImplicitOps, 0, 0,
136 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
138 0, 0, 0, 0, 0, 0, 0, 0,
140 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
141 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
144 0, 0, 0, 0, 0, 0, 0, 0,
146 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
150 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
153 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
156 static u16 twobyte_table[256] = {
158 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
159 0, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
161 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
163 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
164 0, 0, 0, 0, 0, 0, 0, 0,
166 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
168 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
169 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
170 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
171 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
173 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
174 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
175 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
176 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
178 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
180 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
182 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
184 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
186 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
188 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
190 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
192 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
193 DstMem | SrcReg | ModRM | BitOp,
194 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
195 DstReg | SrcMem16 | ModRM | Mov,
197 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
198 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
199 DstReg | SrcMem16 | ModRM | Mov,
201 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
205 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
207 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
211 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
212 * are interested only in invlpg and not in any of the rest.
214 * invlpg is a special instruction in that the data it references may not
217 void kvm_emulator_want_group7_invlpg(void)
219 twobyte_table[1] &= ~SrcMem;
221 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg);
223 /* Type, address-of, and value of an instruction's operand. */
225 enum { OP_REG, OP_MEM, OP_IMM } type;
227 unsigned long val, orig_val, *ptr;
230 /* EFLAGS bit definitions. */
231 #define EFLG_OF (1<<11)
232 #define EFLG_DF (1<<10)
233 #define EFLG_SF (1<<7)
234 #define EFLG_ZF (1<<6)
235 #define EFLG_AF (1<<4)
236 #define EFLG_PF (1<<2)
237 #define EFLG_CF (1<<0)
240 * Instruction emulation:
241 * Most instructions are emulated directly via a fragment of inline assembly
242 * code. This allows us to save/restore EFLAGS and thus very easily pick up
243 * any modified flags.
246 #if defined(CONFIG_X86_64)
247 #define _LO32 "k" /* force 32-bit operand */
248 #define _STK "%%rsp" /* stack pointer */
249 #elif defined(__i386__)
250 #define _LO32 "" /* force 32-bit operand */
251 #define _STK "%%esp" /* stack pointer */
255 * These EFLAGS bits are restored from saved value during emulation, and
256 * any changes are written back to the saved value after emulation.
258 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
260 /* Before executing instruction: restore necessary bits in EFLAGS. */
261 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
262 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
264 "movl %"_msk",%"_LO32 _tmp"; " \
265 "andl %"_LO32 _tmp",("_STK"); " \
267 "notl %"_LO32 _tmp"; " \
268 "andl %"_LO32 _tmp",("_STK"); " \
270 "orl %"_LO32 _tmp",("_STK"); " \
272 /* _sav &= ~msk; */ \
273 "movl %"_msk",%"_LO32 _tmp"; " \
274 "notl %"_LO32 _tmp"; " \
275 "andl %"_LO32 _tmp",%"_sav"; "
277 /* After executing instruction: write-back necessary bits in EFLAGS. */
278 #define _POST_EFLAGS(_sav, _msk, _tmp) \
279 /* _sav |= EFLAGS & _msk; */ \
282 "andl %"_msk",%"_LO32 _tmp"; " \
283 "orl %"_LO32 _tmp",%"_sav"; "
285 /* Raw emulation: instruction has two explicit operands. */
286 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
288 unsigned long _tmp; \
290 switch ((_dst).bytes) { \
292 __asm__ __volatile__ ( \
293 _PRE_EFLAGS("0","4","2") \
294 _op"w %"_wx"3,%1; " \
295 _POST_EFLAGS("0","4","2") \
296 : "=m" (_eflags), "=m" ((_dst).val), \
298 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
301 __asm__ __volatile__ ( \
302 _PRE_EFLAGS("0","4","2") \
303 _op"l %"_lx"3,%1; " \
304 _POST_EFLAGS("0","4","2") \
305 : "=m" (_eflags), "=m" ((_dst).val), \
307 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
310 __emulate_2op_8byte(_op, _src, _dst, \
311 _eflags, _qx, _qy); \
316 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
318 unsigned long _tmp; \
319 switch ( (_dst).bytes ) \
322 __asm__ __volatile__ ( \
323 _PRE_EFLAGS("0","4","2") \
324 _op"b %"_bx"3,%1; " \
325 _POST_EFLAGS("0","4","2") \
326 : "=m" (_eflags), "=m" ((_dst).val), \
328 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
331 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
332 _wx, _wy, _lx, _ly, _qx, _qy); \
337 /* Source operand is byte-sized and may be restricted to just %cl. */
338 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
339 __emulate_2op(_op, _src, _dst, _eflags, \
340 "b", "c", "b", "c", "b", "c", "b", "c")
342 /* Source operand is byte, word, long or quad sized. */
343 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
344 __emulate_2op(_op, _src, _dst, _eflags, \
345 "b", "q", "w", "r", _LO32, "r", "", "r")
347 /* Source operand is word, long or quad sized. */
348 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
349 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
350 "w", "r", _LO32, "r", "", "r")
352 /* Instruction has only one explicit operand (no source operand). */
353 #define emulate_1op(_op, _dst, _eflags) \
355 unsigned long _tmp; \
357 switch ( (_dst).bytes ) \
360 __asm__ __volatile__ ( \
361 _PRE_EFLAGS("0","3","2") \
363 _POST_EFLAGS("0","3","2") \
364 : "=m" (_eflags), "=m" ((_dst).val), \
366 : "i" (EFLAGS_MASK) ); \
369 __asm__ __volatile__ ( \
370 _PRE_EFLAGS("0","3","2") \
372 _POST_EFLAGS("0","3","2") \
373 : "=m" (_eflags), "=m" ((_dst).val), \
375 : "i" (EFLAGS_MASK) ); \
378 __asm__ __volatile__ ( \
379 _PRE_EFLAGS("0","3","2") \
381 _POST_EFLAGS("0","3","2") \
382 : "=m" (_eflags), "=m" ((_dst).val), \
384 : "i" (EFLAGS_MASK) ); \
387 __emulate_1op_8byte(_op, _dst, _eflags); \
392 /* Emulate an instruction with quadword operands (x86/64 only). */
393 #if defined(CONFIG_X86_64)
394 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
396 __asm__ __volatile__ ( \
397 _PRE_EFLAGS("0","4","2") \
398 _op"q %"_qx"3,%1; " \
399 _POST_EFLAGS("0","4","2") \
400 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
401 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
404 #define __emulate_1op_8byte(_op, _dst, _eflags) \
406 __asm__ __volatile__ ( \
407 _PRE_EFLAGS("0","3","2") \
409 _POST_EFLAGS("0","3","2") \
410 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
411 : "i" (EFLAGS_MASK) ); \
414 #elif defined(__i386__)
415 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
416 #define __emulate_1op_8byte(_op, _dst, _eflags)
417 #endif /* __i386__ */
419 /* Fetch next part of the instruction being emulated. */
420 #define insn_fetch(_type, _size, _eip) \
421 ({ unsigned long _x; \
422 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
430 /* Access/update address held in a register, based on addressing mode. */
431 #define register_address(base, reg) \
432 ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
433 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
435 #define register_address_increment(reg, inc) \
437 /* signed type ensures sign extension to long */ \
439 if ( ad_bytes == sizeof(unsigned long) ) \
442 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
443 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
447 * Given the 'reg' portion of a ModRM byte, and a register block, return a
448 * pointer into the block that addresses the relevant register.
449 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
451 static void *decode_register(u8 modrm_reg, unsigned long *regs,
456 p = ®s[modrm_reg];
457 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
458 p = (unsigned char *)®s[modrm_reg & 3] + 1;
462 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
463 struct x86_emulate_ops *ops,
465 u16 *size, unsigned long *address, int op_bytes)
472 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2, ctxt);
475 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes, ctxt);
480 x86_emulate_memop(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
483 u8 b, sib, twobyte = 0, rex_prefix = 0;
484 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
485 unsigned long *override_base = NULL;
486 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
488 struct operand src, dst;
489 unsigned long cr2 = ctxt->cr2;
490 int mode = ctxt->mode;
491 unsigned long modrm_ea;
492 int use_modrm_ea, index_reg = 0, base_reg = 0, scale, rip_relative = 0;
496 /* Shadow copy of register state. Committed on successful emulation. */
497 unsigned long _regs[NR_VCPU_REGS];
498 unsigned long _eip = ctxt->vcpu->rip, _eflags = ctxt->eflags;
499 unsigned long modrm_val = 0;
501 memcpy(_regs, ctxt->vcpu->regs, sizeof _regs);
504 case X86EMUL_MODE_REAL:
505 case X86EMUL_MODE_PROT16:
506 op_bytes = ad_bytes = 2;
508 case X86EMUL_MODE_PROT32:
509 op_bytes = ad_bytes = 4;
512 case X86EMUL_MODE_PROT64:
521 /* Legacy prefixes. */
522 for (i = 0; i < 8; i++) {
523 switch (b = insn_fetch(u8, 1, _eip)) {
524 case 0x66: /* operand-size override */
525 op_bytes ^= 6; /* switch between 2/4 bytes */
527 case 0x67: /* address-size override */
528 if (mode == X86EMUL_MODE_PROT64)
529 ad_bytes ^= 12; /* switch between 4/8 bytes */
531 ad_bytes ^= 6; /* switch between 2/4 bytes */
533 case 0x2e: /* CS override */
534 override_base = &ctxt->cs_base;
536 case 0x3e: /* DS override */
537 override_base = &ctxt->ds_base;
539 case 0x26: /* ES override */
540 override_base = &ctxt->es_base;
542 case 0x64: /* FS override */
543 override_base = &ctxt->fs_base;
545 case 0x65: /* GS override */
546 override_base = &ctxt->gs_base;
548 case 0x36: /* SS override */
549 override_base = &ctxt->ss_base;
551 case 0xf0: /* LOCK */
554 case 0xf3: /* REP/REPE/REPZ */
557 case 0xf2: /* REPNE/REPNZ */
567 if ((mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40)) {
570 op_bytes = 8; /* REX.W */
571 modrm_reg = (b & 4) << 1; /* REX.R */
572 index_reg = (b & 2) << 2; /* REX.X */
573 modrm_rm = base_reg = (b & 1) << 3; /* REG.B */
574 b = insn_fetch(u8, 1, _eip);
577 /* Opcode byte(s). */
580 /* Two-byte opcode? */
583 b = insn_fetch(u8, 1, _eip);
584 d = twobyte_table[b];
592 /* ModRM and SIB bytes. */
594 modrm = insn_fetch(u8, 1, _eip);
595 modrm_mod |= (modrm & 0xc0) >> 6;
596 modrm_reg |= (modrm & 0x38) >> 3;
597 modrm_rm |= (modrm & 0x07);
601 if (modrm_mod == 3) {
602 modrm_val = *(unsigned long *)
603 decode_register(modrm_rm, _regs, d & ByteOp);
608 unsigned bx = _regs[VCPU_REGS_RBX];
609 unsigned bp = _regs[VCPU_REGS_RBP];
610 unsigned si = _regs[VCPU_REGS_RSI];
611 unsigned di = _regs[VCPU_REGS_RDI];
613 /* 16-bit ModR/M decode. */
617 modrm_ea += insn_fetch(u16, 2, _eip);
620 modrm_ea += insn_fetch(s8, 1, _eip);
623 modrm_ea += insn_fetch(u16, 2, _eip);
653 if (modrm_rm == 2 || modrm_rm == 3 ||
654 (modrm_rm == 6 && modrm_mod != 0))
656 override_base = &ctxt->ss_base;
657 modrm_ea = (u16)modrm_ea;
659 /* 32/64-bit ModR/M decode. */
663 sib = insn_fetch(u8, 1, _eip);
664 index_reg |= (sib >> 3) & 7;
671 modrm_ea += _regs[base_reg];
673 modrm_ea += insn_fetch(s32, 4, _eip);
676 modrm_ea += _regs[base_reg];
682 modrm_ea += _regs[index_reg] << scale;
688 modrm_ea += _regs[modrm_rm];
689 else if (mode == X86EMUL_MODE_PROT64)
693 modrm_ea += _regs[modrm_rm];
699 modrm_ea += insn_fetch(s32, 4, _eip);
702 modrm_ea += insn_fetch(s8, 1, _eip);
705 modrm_ea += insn_fetch(s32, 4, _eip);
710 override_base = &ctxt->ds_base;
711 if (mode == X86EMUL_MODE_PROT64 &&
712 override_base != &ctxt->fs_base &&
713 override_base != &ctxt->gs_base)
714 override_base = NULL;
717 modrm_ea += *override_base;
721 switch (d & SrcMask) {
732 modrm_ea += op_bytes;
736 modrm_ea = (u32)modrm_ea;
743 * Decode and fetch the source operand: register, memory
746 switch (d & SrcMask) {
752 src.ptr = decode_register(modrm_reg, _regs,
754 src.val = src.orig_val = *(u8 *) src.ptr;
757 src.ptr = decode_register(modrm_reg, _regs, 0);
758 switch ((src.bytes = op_bytes)) {
760 src.val = src.orig_val = *(u16 *) src.ptr;
763 src.val = src.orig_val = *(u32 *) src.ptr;
766 src.val = src.orig_val = *(u64 *) src.ptr;
778 src.bytes = (d & ByteOp) ? 1 : op_bytes;
781 src.ptr = (unsigned long *)cr2;
782 if ((rc = ops->read_emulated((unsigned long)src.ptr,
783 &src.val, src.bytes, ctxt)) != 0)
785 src.orig_val = src.val;
789 src.ptr = (unsigned long *)_eip;
790 src.bytes = (d & ByteOp) ? 1 : op_bytes;
793 /* NB. Immediates are sign-extended as necessary. */
796 src.val = insn_fetch(s8, 1, _eip);
799 src.val = insn_fetch(s16, 2, _eip);
802 src.val = insn_fetch(s32, 4, _eip);
808 src.ptr = (unsigned long *)_eip;
810 src.val = insn_fetch(s8, 1, _eip);
814 /* Decode and fetch the destination operand: register or memory. */
815 switch (d & DstMask) {
817 /* Special instructions do their own operand decoding. */
822 && !(twobyte_table && (b == 0xb6 || b == 0xb7))) {
823 dst.ptr = decode_register(modrm_reg, _regs,
825 dst.val = *(u8 *) dst.ptr;
828 dst.ptr = decode_register(modrm_reg, _regs, 0);
829 switch ((dst.bytes = op_bytes)) {
831 dst.val = *(u16 *)dst.ptr;
834 dst.val = *(u32 *)dst.ptr;
837 dst.val = *(u64 *)dst.ptr;
844 dst.ptr = (unsigned long *)cr2;
845 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
847 unsigned long mask = ~(dst.bytes * 8 - 1);
849 dst.ptr = (void *)dst.ptr + (src.val & mask) / 8;
851 if (!(d & Mov) && /* optimisation - avoid slow emulated read */
852 ((rc = ops->read_emulated((unsigned long)dst.ptr,
853 &dst.val, dst.bytes, ctxt)) != 0))
857 dst.orig_val = dst.val;
865 emulate_2op_SrcV("add", src, dst, _eflags);
869 emulate_2op_SrcV("or", src, dst, _eflags);
873 emulate_2op_SrcV("adc", src, dst, _eflags);
877 emulate_2op_SrcV("sbb", src, dst, _eflags);
881 emulate_2op_SrcV("and", src, dst, _eflags);
885 emulate_2op_SrcV("sub", src, dst, _eflags);
889 emulate_2op_SrcV("xor", src, dst, _eflags);
893 emulate_2op_SrcV("cmp", src, dst, _eflags);
895 case 0x63: /* movsxd */
896 if (mode != X86EMUL_MODE_PROT64)
898 dst.val = (s32) src.val;
900 case 0x80 ... 0x83: /* Grp1 */
922 emulate_2op_SrcV("test", src, dst, _eflags);
924 case 0x86 ... 0x87: /* xchg */
925 /* Write back the register source. */
928 *(u8 *) src.ptr = (u8) dst.val;
931 *(u16 *) src.ptr = (u16) dst.val;
934 *src.ptr = (u32) dst.val;
935 break; /* 64b reg: zero-extend */
941 * Write back the memory destination with implicit LOCK
947 case 0xa0 ... 0xa1: /* mov */
948 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
950 _eip += ad_bytes; /* skip src displacement */
952 case 0xa2 ... 0xa3: /* mov */
953 dst.val = (unsigned long)_regs[VCPU_REGS_RAX];
954 _eip += ad_bytes; /* skip dst displacement */
956 case 0x88 ... 0x8b: /* mov */
957 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
960 case 0x8f: /* pop (sole member of Grp1a) */
961 /* 64-bit mode: POP always pops a 64-bit operand. */
962 if (mode == X86EMUL_MODE_PROT64)
964 if ((rc = ops->read_std(register_address(ctxt->ss_base,
965 _regs[VCPU_REGS_RSP]),
966 &dst.val, dst.bytes, ctxt)) != 0)
968 register_address_increment(_regs[VCPU_REGS_RSP], dst.bytes);
974 emulate_2op_SrcB("rol", src, dst, _eflags);
977 emulate_2op_SrcB("ror", src, dst, _eflags);
980 emulate_2op_SrcB("rcl", src, dst, _eflags);
983 emulate_2op_SrcB("rcr", src, dst, _eflags);
985 case 4: /* sal/shl */
986 case 6: /* sal/shl */
987 emulate_2op_SrcB("sal", src, dst, _eflags);
990 emulate_2op_SrcB("shr", src, dst, _eflags);
993 emulate_2op_SrcB("sar", src, dst, _eflags);
997 case 0xd0 ... 0xd1: /* Grp2 */
1000 case 0xd2 ... 0xd3: /* Grp2 */
1001 src.val = _regs[VCPU_REGS_RCX];
1003 case 0xf6 ... 0xf7: /* Grp3 */
1004 switch (modrm_reg) {
1005 case 0 ... 1: /* test */
1007 * Special case in Grp3: test has an immediate
1011 src.ptr = (unsigned long *)_eip;
1012 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1015 switch (src.bytes) {
1017 src.val = insn_fetch(s8, 1, _eip);
1020 src.val = insn_fetch(s16, 2, _eip);
1023 src.val = insn_fetch(s32, 4, _eip);
1031 emulate_1op("neg", dst, _eflags);
1034 goto cannot_emulate;
1037 case 0xfe ... 0xff: /* Grp4/Grp5 */
1038 switch (modrm_reg) {
1040 emulate_1op("inc", dst, _eflags);
1043 emulate_1op("dec", dst, _eflags);
1046 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1047 if (mode == X86EMUL_MODE_PROT64) {
1049 if ((rc = ops->read_std((unsigned long)dst.ptr,
1054 register_address_increment(_regs[VCPU_REGS_RSP],
1056 if ((rc = ops->write_std(
1057 register_address(ctxt->ss_base,
1058 _regs[VCPU_REGS_RSP]),
1059 &dst.val, dst.bytes, ctxt)) != 0)
1064 goto cannot_emulate;
1073 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1074 switch (dst.bytes) {
1076 *(u8 *)dst.ptr = (u8)dst.val;
1079 *(u16 *)dst.ptr = (u16)dst.val;
1082 *dst.ptr = (u32)dst.val;
1083 break; /* 64b: zero-ext */
1091 rc = ops->cmpxchg_emulated((unsigned long)dst.
1093 &dst.val, dst.bytes,
1096 rc = ops->write_emulated((unsigned long)dst.ptr,
1097 &dst.val, dst.bytes,
1106 /* Commit shadow register state. */
1107 memcpy(ctxt->vcpu->regs, _regs, sizeof _regs);
1108 ctxt->eflags = _eflags;
1109 ctxt->vcpu->rip = _eip;
1112 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1116 goto twobyte_special_insn;
1118 if (_regs[VCPU_REGS_RCX] == 0) {
1119 ctxt->vcpu->rip = _eip;
1122 _regs[VCPU_REGS_RCX]--;
1123 _eip = ctxt->vcpu->rip;
1126 case 0xa4 ... 0xa5: /* movs */
1128 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1129 dst.ptr = (unsigned long *)register_address(ctxt->es_base,
1130 _regs[VCPU_REGS_RDI]);
1131 if ((rc = ops->read_emulated(register_address(
1132 override_base ? *override_base : ctxt->ds_base,
1133 _regs[VCPU_REGS_RSI]), &dst.val, dst.bytes, ctxt)) != 0)
1135 register_address_increment(_regs[VCPU_REGS_RSI],
1136 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1137 register_address_increment(_regs[VCPU_REGS_RDI],
1138 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1140 case 0xa6 ... 0xa7: /* cmps */
1141 DPRINTF("Urk! I don't handle CMPS.\n");
1142 goto cannot_emulate;
1143 case 0xaa ... 0xab: /* stos */
1145 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1146 dst.ptr = (unsigned long *)cr2;
1147 dst.val = _regs[VCPU_REGS_RAX];
1148 register_address_increment(_regs[VCPU_REGS_RDI],
1149 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1151 case 0xac ... 0xad: /* lods */
1153 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1154 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1155 if ((rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0)
1157 register_address_increment(_regs[VCPU_REGS_RSI],
1158 (_eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1160 case 0xae ... 0xaf: /* scas */
1161 DPRINTF("Urk! I don't handle SCAS.\n");
1162 goto cannot_emulate;
1163 case 0xf4: /* hlt */
1164 ctxt->vcpu->halt_request = 1;
1166 case 0xc3: /* ret */
1168 goto pop_instruction;
1169 case 0x58 ... 0x5f: /* pop reg */
1170 dst.ptr = (unsigned long *)&_regs[b & 0x7];
1173 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1174 _regs[VCPU_REGS_RSP]), dst.ptr, op_bytes, ctxt)) != 0)
1177 register_address_increment(_regs[VCPU_REGS_RSP], op_bytes);
1178 no_wb = 1; /* Disable writeback. */
1185 case 0x01: /* lgdt, lidt, lmsw */
1186 /* Disable writeback. */
1188 switch (modrm_reg) {
1190 unsigned long address;
1193 rc = read_descriptor(ctxt, ops, src.ptr,
1194 &size, &address, op_bytes);
1197 realmode_lgdt(ctxt->vcpu, size, address);
1200 rc = read_descriptor(ctxt, ops, src.ptr,
1201 &size, &address, op_bytes);
1204 realmode_lidt(ctxt->vcpu, size, address);
1208 goto cannot_emulate;
1209 *(u16 *)&_regs[modrm_rm]
1210 = realmode_get_cr(ctxt->vcpu, 0);
1214 goto cannot_emulate;
1215 realmode_lmsw(ctxt->vcpu, (u16)modrm_val, &_eflags);
1218 emulate_invlpg(ctxt->vcpu, cr2);
1221 goto cannot_emulate;
1224 case 0x21: /* mov from dr to reg */
1227 goto cannot_emulate;
1228 rc = emulator_get_dr(ctxt, modrm_reg, &_regs[modrm_rm]);
1230 case 0x23: /* mov from reg to dr */
1233 goto cannot_emulate;
1234 rc = emulator_set_dr(ctxt, modrm_reg, _regs[modrm_rm]);
1236 case 0x40 ... 0x4f: /* cmov */
1237 dst.val = dst.orig_val = src.val;
1238 d &= ~Mov; /* default to no move */
1240 * First, assume we're decoding an even cmov opcode
1243 switch ((b & 15) >> 1) {
1245 d |= (_eflags & EFLG_OF) ? Mov : 0;
1247 case 1: /* cmovb/cmovc/cmovnae */
1248 d |= (_eflags & EFLG_CF) ? Mov : 0;
1250 case 2: /* cmovz/cmove */
1251 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1253 case 3: /* cmovbe/cmovna */
1254 d |= (_eflags & (EFLG_CF | EFLG_ZF)) ? Mov : 0;
1257 d |= (_eflags & EFLG_SF) ? Mov : 0;
1259 case 5: /* cmovp/cmovpe */
1260 d |= (_eflags & EFLG_PF) ? Mov : 0;
1262 case 7: /* cmovle/cmovng */
1263 d |= (_eflags & EFLG_ZF) ? Mov : 0;
1265 case 6: /* cmovl/cmovnge */
1266 d |= (!(_eflags & EFLG_SF) !=
1267 !(_eflags & EFLG_OF)) ? Mov : 0;
1270 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1271 d ^= (b & 1) ? Mov : 0;
1273 case 0xb0 ... 0xb1: /* cmpxchg */
1275 * Save real source value, then compare EAX against
1278 src.orig_val = src.val;
1279 src.val = _regs[VCPU_REGS_RAX];
1280 emulate_2op_SrcV("cmp", src, dst, _eflags);
1281 /* Always write back. The question is: where to? */
1283 if (_eflags & EFLG_ZF) {
1284 /* Success: write back to memory. */
1285 dst.val = src.orig_val;
1287 /* Failure: write the value we saw to EAX. */
1289 dst.ptr = (unsigned long *)&_regs[VCPU_REGS_RAX];
1294 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1295 emulate_2op_SrcV_nobyte("bt", src, dst, _eflags);
1299 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1300 emulate_2op_SrcV_nobyte("btr", src, dst, _eflags);
1304 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1305 emulate_2op_SrcV_nobyte("bts", src, dst, _eflags);
1307 case 0xb6 ... 0xb7: /* movzx */
1308 dst.bytes = op_bytes;
1309 dst.val = (d & ByteOp) ? (u8) src.val : (u16) src.val;
1313 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1314 emulate_2op_SrcV_nobyte("btc", src, dst, _eflags);
1316 case 0xba: /* Grp8 */
1317 switch (modrm_reg & 3) {
1328 case 0xbe ... 0xbf: /* movsx */
1329 dst.bytes = op_bytes;
1330 dst.val = (d & ByteOp) ? (s8) src.val : (s16) src.val;
1335 twobyte_special_insn:
1336 /* Disable writeback. */
1339 case 0x09: /* wbinvd */
1341 case 0x0d: /* GrpP (prefetch) */
1342 case 0x18: /* Grp16 (prefetch/nop) */
1345 emulate_clts(ctxt->vcpu);
1347 case 0x20: /* mov cr, reg */
1349 goto cannot_emulate;
1350 _regs[modrm_rm] = realmode_get_cr(ctxt->vcpu, modrm_reg);
1352 case 0x22: /* mov reg, cr */
1354 goto cannot_emulate;
1355 realmode_set_cr(ctxt->vcpu, modrm_reg, modrm_val, &_eflags);
1359 msr_data = (u32)_regs[VCPU_REGS_RAX]
1360 | ((u64)_regs[VCPU_REGS_RDX] << 32);
1361 rc = kvm_set_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], msr_data);
1363 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1364 _eip = ctxt->vcpu->rip;
1366 rc = X86EMUL_CONTINUE;
1370 rc = kvm_get_msr(ctxt->vcpu, _regs[VCPU_REGS_RCX], &msr_data);
1372 kvm_arch_ops->inject_gp(ctxt->vcpu, 0);
1373 _eip = ctxt->vcpu->rip;
1375 _regs[VCPU_REGS_RAX] = (u32)msr_data;
1376 _regs[VCPU_REGS_RDX] = msr_data >> 32;
1378 rc = X86EMUL_CONTINUE;
1380 case 0xc7: /* Grp9 (cmpxchg8b) */
1383 if ((rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0)
1385 if (((u32) (old >> 0) != (u32) _regs[VCPU_REGS_RAX]) ||
1386 ((u32) (old >> 32) != (u32) _regs[VCPU_REGS_RDX])) {
1387 _regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1388 _regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1389 _eflags &= ~EFLG_ZF;
1391 new = ((u64)_regs[VCPU_REGS_RCX] << 32)
1392 | (u32) _regs[VCPU_REGS_RBX];
1393 if ((rc = ops->cmpxchg_emulated(cr2, &old,
1394 &new, 8, ctxt)) != 0)
1404 DPRINTF("Cannot emulate %02x\n", b);
1411 #include <asm/uaccess.h>
1414 x86_emulate_read_std(unsigned long addr,
1416 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1422 if ((rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0) {
1423 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1424 return X86EMUL_PROPAGATE_FAULT;
1427 return X86EMUL_CONTINUE;
1431 x86_emulate_write_std(unsigned long addr,
1433 unsigned int bytes, struct x86_emulate_ctxt *ctxt)
1437 if ((rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0) {
1438 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1439 return X86EMUL_PROPAGATE_FAULT;
1442 return X86EMUL_CONTINUE;