1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #define DPRINTF(x...) do {} while (0)
31 #include <linux/module.h>
32 #include <asm/kvm_x86_emulate.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
65 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
66 #define String (1<<10) /* String instruction (rep capable) */
67 #define Stack (1<<11) /* Stack instruction (push/pop) */
68 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
69 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
70 #define GroupMask 0xff /* Group number stored in bits 0:7 */
73 Group1_80, Group1_81, Group1_82, Group1_83,
74 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
77 static u16 opcode_table[256] = {
79 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
80 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
83 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
84 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
87 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
88 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
91 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
92 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
95 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
96 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
97 SrcImmByte, SrcImm, 0, 0,
99 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
100 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
103 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
104 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
107 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
108 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
111 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
113 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
115 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
116 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
118 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
119 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
121 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
124 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
125 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
126 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
128 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
129 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
132 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
134 Group | Group1_80, Group | Group1_81,
135 Group | Group1_82, Group | Group1_83,
136 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
137 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
139 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
140 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
141 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
142 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
144 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
146 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
148 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
149 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
150 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
151 ByteOp | ImplicitOps | String, ImplicitOps | String,
153 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
154 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
155 ByteOp | ImplicitOps | String, ImplicitOps | String,
157 0, 0, 0, 0, 0, 0, 0, 0,
158 DstReg | SrcImm | Mov, 0, 0, 0, 0, 0, 0, 0,
160 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
161 0, ImplicitOps | Stack, 0, 0,
162 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
164 0, 0, 0, 0, 0, 0, 0, 0,
166 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
167 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
170 0, 0, 0, 0, 0, 0, 0, 0,
172 0, 0, 0, 0, 0, 0, 0, 0,
174 ImplicitOps | Stack, SrcImm | ImplicitOps,
175 ImplicitOps, SrcImmByte | ImplicitOps,
179 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
181 ImplicitOps, 0, ImplicitOps, ImplicitOps,
182 0, 0, Group | Group4, Group | Group5,
185 static u16 twobyte_table[256] = {
187 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
188 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
190 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
192 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
193 0, 0, 0, 0, 0, 0, 0, 0,
195 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
197 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
198 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
199 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
200 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
202 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
203 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
204 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
205 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
207 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
220 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
222 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
224 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
225 DstMem | SrcReg | ModRM | BitOp,
226 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
227 DstReg | SrcMem16 | ModRM | Mov,
229 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
230 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
231 DstReg | SrcMem16 | ModRM | Mov,
233 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
234 0, 0, 0, 0, 0, 0, 0, 0,
236 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
238 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
240 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
243 static u16 group_table[] = {
245 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
246 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
247 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
248 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
250 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
251 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
252 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
253 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
255 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
256 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
257 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
258 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
260 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
261 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
262 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
263 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
265 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
267 ByteOp | SrcImm | DstMem | ModRM, 0,
268 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
271 DstMem | SrcImm | ModRM | SrcImm, 0,
272 DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
275 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
278 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, 0, 0,
279 SrcMem | ModRM, 0, SrcMem | ModRM | Stack, 0,
281 0, 0, ModRM | SrcMem, ModRM | SrcMem,
282 SrcNone | ModRM | DstMem | Mov, 0,
283 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
286 static u16 group2_table[] = {
288 SrcNone | ModRM, 0, 0, 0,
289 SrcNone | ModRM | DstMem | Mov, 0,
290 SrcMem16 | ModRM | Mov, 0,
293 /* EFLAGS bit definitions. */
294 #define EFLG_OF (1<<11)
295 #define EFLG_DF (1<<10)
296 #define EFLG_SF (1<<7)
297 #define EFLG_ZF (1<<6)
298 #define EFLG_AF (1<<4)
299 #define EFLG_PF (1<<2)
300 #define EFLG_CF (1<<0)
303 * Instruction emulation:
304 * Most instructions are emulated directly via a fragment of inline assembly
305 * code. This allows us to save/restore EFLAGS and thus very easily pick up
306 * any modified flags.
309 #if defined(CONFIG_X86_64)
310 #define _LO32 "k" /* force 32-bit operand */
311 #define _STK "%%rsp" /* stack pointer */
312 #elif defined(__i386__)
313 #define _LO32 "" /* force 32-bit operand */
314 #define _STK "%%esp" /* stack pointer */
318 * These EFLAGS bits are restored from saved value during emulation, and
319 * any changes are written back to the saved value after emulation.
321 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
323 /* Before executing instruction: restore necessary bits in EFLAGS. */
324 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
325 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
326 "movl %"_sav",%"_LO32 _tmp"; " \
329 "movl %"_msk",%"_LO32 _tmp"; " \
330 "andl %"_LO32 _tmp",("_STK"); " \
332 "notl %"_LO32 _tmp"; " \
333 "andl %"_LO32 _tmp",("_STK"); " \
334 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
336 "orl %"_LO32 _tmp",("_STK"); " \
340 /* After executing instruction: write-back necessary bits in EFLAGS. */
341 #define _POST_EFLAGS(_sav, _msk, _tmp) \
342 /* _sav |= EFLAGS & _msk; */ \
345 "andl %"_msk",%"_LO32 _tmp"; " \
346 "orl %"_LO32 _tmp",%"_sav"; "
348 /* Raw emulation: instruction has two explicit operands. */
349 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
351 unsigned long _tmp; \
353 switch ((_dst).bytes) { \
355 __asm__ __volatile__ ( \
356 _PRE_EFLAGS("0", "4", "2") \
357 _op"w %"_wx"3,%1; " \
358 _POST_EFLAGS("0", "4", "2") \
359 : "=m" (_eflags), "=m" ((_dst).val), \
361 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
364 __asm__ __volatile__ ( \
365 _PRE_EFLAGS("0", "4", "2") \
366 _op"l %"_lx"3,%1; " \
367 _POST_EFLAGS("0", "4", "2") \
368 : "=m" (_eflags), "=m" ((_dst).val), \
370 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
373 __emulate_2op_8byte(_op, _src, _dst, \
374 _eflags, _qx, _qy); \
379 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
381 unsigned long __tmp; \
382 switch ((_dst).bytes) { \
384 __asm__ __volatile__ ( \
385 _PRE_EFLAGS("0", "4", "2") \
386 _op"b %"_bx"3,%1; " \
387 _POST_EFLAGS("0", "4", "2") \
388 : "=m" (_eflags), "=m" ((_dst).val), \
390 : _by ((_src).val), "i" (EFLAGS_MASK)); \
393 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
394 _wx, _wy, _lx, _ly, _qx, _qy); \
399 /* Source operand is byte-sized and may be restricted to just %cl. */
400 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
401 __emulate_2op(_op, _src, _dst, _eflags, \
402 "b", "c", "b", "c", "b", "c", "b", "c")
404 /* Source operand is byte, word, long or quad sized. */
405 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
406 __emulate_2op(_op, _src, _dst, _eflags, \
407 "b", "q", "w", "r", _LO32, "r", "", "r")
409 /* Source operand is word, long or quad sized. */
410 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
411 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
412 "w", "r", _LO32, "r", "", "r")
414 /* Instruction has only one explicit operand (no source operand). */
415 #define emulate_1op(_op, _dst, _eflags) \
417 unsigned long _tmp; \
419 switch ((_dst).bytes) { \
421 __asm__ __volatile__ ( \
422 _PRE_EFLAGS("0", "3", "2") \
424 _POST_EFLAGS("0", "3", "2") \
425 : "=m" (_eflags), "=m" ((_dst).val), \
427 : "i" (EFLAGS_MASK)); \
430 __asm__ __volatile__ ( \
431 _PRE_EFLAGS("0", "3", "2") \
433 _POST_EFLAGS("0", "3", "2") \
434 : "=m" (_eflags), "=m" ((_dst).val), \
436 : "i" (EFLAGS_MASK)); \
439 __asm__ __volatile__ ( \
440 _PRE_EFLAGS("0", "3", "2") \
442 _POST_EFLAGS("0", "3", "2") \
443 : "=m" (_eflags), "=m" ((_dst).val), \
445 : "i" (EFLAGS_MASK)); \
448 __emulate_1op_8byte(_op, _dst, _eflags); \
453 /* Emulate an instruction with quadword operands (x86/64 only). */
454 #if defined(CONFIG_X86_64)
455 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
457 __asm__ __volatile__ ( \
458 _PRE_EFLAGS("0", "4", "2") \
459 _op"q %"_qx"3,%1; " \
460 _POST_EFLAGS("0", "4", "2") \
461 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
462 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
465 #define __emulate_1op_8byte(_op, _dst, _eflags) \
467 __asm__ __volatile__ ( \
468 _PRE_EFLAGS("0", "3", "2") \
470 _POST_EFLAGS("0", "3", "2") \
471 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
472 : "i" (EFLAGS_MASK)); \
475 #elif defined(__i386__)
476 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
477 #define __emulate_1op_8byte(_op, _dst, _eflags)
478 #endif /* __i386__ */
480 /* Fetch next part of the instruction being emulated. */
481 #define insn_fetch(_type, _size, _eip) \
482 ({ unsigned long _x; \
483 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
490 static inline unsigned long ad_mask(struct decode_cache *c)
492 return (1UL << (c->ad_bytes << 3)) - 1;
495 /* Access/update address held in a register, based on addressing mode. */
496 static inline unsigned long
497 address_mask(struct decode_cache *c, unsigned long reg)
499 if (c->ad_bytes == sizeof(unsigned long))
502 return reg & ad_mask(c);
505 static inline unsigned long
506 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
508 return base + address_mask(c, reg);
512 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
514 if (c->ad_bytes == sizeof(unsigned long))
517 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
520 static inline void jmp_rel(struct decode_cache *c, int rel)
522 register_address_increment(c, &c->eip, rel);
525 static void set_seg_override(struct decode_cache *c, int seg)
527 c->has_seg_override = true;
528 c->seg_override = seg;
531 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
533 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
536 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
539 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
540 struct decode_cache *c)
542 if (!c->has_seg_override)
545 return seg_base(ctxt, c->seg_override);
548 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
550 return seg_base(ctxt, VCPU_SREG_ES);
553 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
555 return seg_base(ctxt, VCPU_SREG_SS);
558 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
559 struct x86_emulate_ops *ops,
560 unsigned long linear, u8 *dest)
562 struct fetch_cache *fc = &ctxt->decode.fetch;
566 if (linear < fc->start || linear >= fc->end) {
567 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
568 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
572 fc->end = linear + size;
574 *dest = fc->data[linear - fc->start];
578 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
579 struct x86_emulate_ops *ops,
580 unsigned long eip, void *dest, unsigned size)
584 eip += ctxt->cs_base;
586 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
594 * Given the 'reg' portion of a ModRM byte, and a register block, return a
595 * pointer into the block that addresses the relevant register.
596 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
598 static void *decode_register(u8 modrm_reg, unsigned long *regs,
603 p = ®s[modrm_reg];
604 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
605 p = (unsigned char *)®s[modrm_reg & 3] + 1;
609 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
610 struct x86_emulate_ops *ops,
612 u16 *size, unsigned long *address, int op_bytes)
619 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
623 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
628 static int test_cc(unsigned int condition, unsigned int flags)
632 switch ((condition & 15) >> 1) {
634 rc |= (flags & EFLG_OF);
636 case 1: /* b/c/nae */
637 rc |= (flags & EFLG_CF);
640 rc |= (flags & EFLG_ZF);
643 rc |= (flags & (EFLG_CF|EFLG_ZF));
646 rc |= (flags & EFLG_SF);
649 rc |= (flags & EFLG_PF);
652 rc |= (flags & EFLG_ZF);
655 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
659 /* Odd condition identifiers (lsb == 1) have inverted sense. */
660 return (!!rc ^ (condition & 1));
663 static void decode_register_operand(struct operand *op,
664 struct decode_cache *c,
667 unsigned reg = c->modrm_reg;
668 int highbyte_regs = c->rex_prefix == 0;
671 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
673 if ((c->d & ByteOp) && !inhibit_bytereg) {
674 op->ptr = decode_register(reg, c->regs, highbyte_regs);
675 op->val = *(u8 *)op->ptr;
678 op->ptr = decode_register(reg, c->regs, 0);
679 op->bytes = c->op_bytes;
682 op->val = *(u16 *)op->ptr;
685 op->val = *(u32 *)op->ptr;
688 op->val = *(u64 *) op->ptr;
692 op->orig_val = op->val;
695 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
696 struct x86_emulate_ops *ops)
698 struct decode_cache *c = &ctxt->decode;
700 int index_reg = 0, base_reg = 0, scale;
704 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
705 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
706 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
709 c->modrm = insn_fetch(u8, 1, c->eip);
710 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
711 c->modrm_reg |= (c->modrm & 0x38) >> 3;
712 c->modrm_rm |= (c->modrm & 0x07);
716 if (c->modrm_mod == 3) {
717 c->modrm_ptr = decode_register(c->modrm_rm,
718 c->regs, c->d & ByteOp);
719 c->modrm_val = *(unsigned long *)c->modrm_ptr;
723 if (c->ad_bytes == 2) {
724 unsigned bx = c->regs[VCPU_REGS_RBX];
725 unsigned bp = c->regs[VCPU_REGS_RBP];
726 unsigned si = c->regs[VCPU_REGS_RSI];
727 unsigned di = c->regs[VCPU_REGS_RDI];
729 /* 16-bit ModR/M decode. */
730 switch (c->modrm_mod) {
732 if (c->modrm_rm == 6)
733 c->modrm_ea += insn_fetch(u16, 2, c->eip);
736 c->modrm_ea += insn_fetch(s8, 1, c->eip);
739 c->modrm_ea += insn_fetch(u16, 2, c->eip);
742 switch (c->modrm_rm) {
744 c->modrm_ea += bx + si;
747 c->modrm_ea += bx + di;
750 c->modrm_ea += bp + si;
753 c->modrm_ea += bp + di;
762 if (c->modrm_mod != 0)
769 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
770 (c->modrm_rm == 6 && c->modrm_mod != 0))
771 if (!c->has_seg_override)
772 set_seg_override(c, VCPU_SREG_SS);
773 c->modrm_ea = (u16)c->modrm_ea;
775 /* 32/64-bit ModR/M decode. */
776 if ((c->modrm_rm & 7) == 4) {
777 sib = insn_fetch(u8, 1, c->eip);
778 index_reg |= (sib >> 3) & 7;
782 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
783 c->modrm_ea += insn_fetch(s32, 4, c->eip);
785 c->modrm_ea += c->regs[base_reg];
787 c->modrm_ea += c->regs[index_reg] << scale;
788 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
789 if (ctxt->mode == X86EMUL_MODE_PROT64)
792 c->modrm_ea += c->regs[c->modrm_rm];
793 switch (c->modrm_mod) {
795 if (c->modrm_rm == 5)
796 c->modrm_ea += insn_fetch(s32, 4, c->eip);
799 c->modrm_ea += insn_fetch(s8, 1, c->eip);
802 c->modrm_ea += insn_fetch(s32, 4, c->eip);
810 static int decode_abs(struct x86_emulate_ctxt *ctxt,
811 struct x86_emulate_ops *ops)
813 struct decode_cache *c = &ctxt->decode;
816 switch (c->ad_bytes) {
818 c->modrm_ea = insn_fetch(u16, 2, c->eip);
821 c->modrm_ea = insn_fetch(u32, 4, c->eip);
824 c->modrm_ea = insn_fetch(u64, 8, c->eip);
832 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
834 struct decode_cache *c = &ctxt->decode;
836 int mode = ctxt->mode;
837 int def_op_bytes, def_ad_bytes, group;
839 /* Shadow copy of register state. Committed on successful emulation. */
841 memset(c, 0, sizeof(struct decode_cache));
842 c->eip = ctxt->vcpu->arch.rip;
843 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
844 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
847 case X86EMUL_MODE_REAL:
848 case X86EMUL_MODE_PROT16:
849 def_op_bytes = def_ad_bytes = 2;
851 case X86EMUL_MODE_PROT32:
852 def_op_bytes = def_ad_bytes = 4;
855 case X86EMUL_MODE_PROT64:
864 c->op_bytes = def_op_bytes;
865 c->ad_bytes = def_ad_bytes;
867 /* Legacy prefixes. */
869 switch (c->b = insn_fetch(u8, 1, c->eip)) {
870 case 0x66: /* operand-size override */
871 /* switch between 2/4 bytes */
872 c->op_bytes = def_op_bytes ^ 6;
874 case 0x67: /* address-size override */
875 if (mode == X86EMUL_MODE_PROT64)
876 /* switch between 4/8 bytes */
877 c->ad_bytes = def_ad_bytes ^ 12;
879 /* switch between 2/4 bytes */
880 c->ad_bytes = def_ad_bytes ^ 6;
882 case 0x26: /* ES override */
883 case 0x2e: /* CS override */
884 case 0x36: /* SS override */
885 case 0x3e: /* DS override */
886 set_seg_override(c, (c->b >> 3) & 3);
888 case 0x64: /* FS override */
889 case 0x65: /* GS override */
890 set_seg_override(c, c->b & 7);
892 case 0x40 ... 0x4f: /* REX */
893 if (mode != X86EMUL_MODE_PROT64)
895 c->rex_prefix = c->b;
897 case 0xf0: /* LOCK */
900 case 0xf2: /* REPNE/REPNZ */
901 c->rep_prefix = REPNE_PREFIX;
903 case 0xf3: /* REP/REPE/REPZ */
904 c->rep_prefix = REPE_PREFIX;
910 /* Any legacy prefix after a REX prefix nullifies its effect. */
919 if (c->rex_prefix & 8)
920 c->op_bytes = 8; /* REX.W */
922 /* Opcode byte(s). */
923 c->d = opcode_table[c->b];
925 /* Two-byte opcode? */
928 c->b = insn_fetch(u8, 1, c->eip);
929 c->d = twobyte_table[c->b];
934 group = c->d & GroupMask;
935 c->modrm = insn_fetch(u8, 1, c->eip);
938 group = (group << 3) + ((c->modrm >> 3) & 7);
939 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
940 c->d = group2_table[group];
942 c->d = group_table[group];
947 DPRINTF("Cannot emulate %02x\n", c->b);
951 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
954 /* ModRM and SIB bytes. */
956 rc = decode_modrm(ctxt, ops);
957 else if (c->d & MemAbs)
958 rc = decode_abs(ctxt, ops);
962 if (!c->has_seg_override)
963 set_seg_override(c, VCPU_SREG_DS);
965 if (!(!c->twobyte && c->b == 0x8d))
966 c->modrm_ea += seg_override_base(ctxt, c);
968 if (c->ad_bytes != 8)
969 c->modrm_ea = (u32)c->modrm_ea;
971 * Decode and fetch the source operand: register, memory
974 switch (c->d & SrcMask) {
978 decode_register_operand(&c->src, c, 0);
987 c->src.bytes = (c->d & ByteOp) ? 1 :
989 /* Don't fetch the address for invlpg: it could be unmapped. */
990 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
994 * For instructions with a ModR/M byte, switch to register
997 if ((c->d & ModRM) && c->modrm_mod == 3) {
998 c->src.type = OP_REG;
999 c->src.val = c->modrm_val;
1000 c->src.ptr = c->modrm_ptr;
1003 c->src.type = OP_MEM;
1006 c->src.type = OP_IMM;
1007 c->src.ptr = (unsigned long *)c->eip;
1008 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1009 if (c->src.bytes == 8)
1011 /* NB. Immediates are sign-extended as necessary. */
1012 switch (c->src.bytes) {
1014 c->src.val = insn_fetch(s8, 1, c->eip);
1017 c->src.val = insn_fetch(s16, 2, c->eip);
1020 c->src.val = insn_fetch(s32, 4, c->eip);
1025 c->src.type = OP_IMM;
1026 c->src.ptr = (unsigned long *)c->eip;
1028 c->src.val = insn_fetch(s8, 1, c->eip);
1032 /* Decode and fetch the destination operand: register or memory. */
1033 switch (c->d & DstMask) {
1035 /* Special instructions do their own operand decoding. */
1038 decode_register_operand(&c->dst, c,
1039 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1042 if ((c->d & ModRM) && c->modrm_mod == 3) {
1043 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1044 c->dst.type = OP_REG;
1045 c->dst.val = c->dst.orig_val = c->modrm_val;
1046 c->dst.ptr = c->modrm_ptr;
1049 c->dst.type = OP_MEM;
1053 if (c->rip_relative)
1054 c->modrm_ea += c->eip;
1057 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1060 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1062 struct decode_cache *c = &ctxt->decode;
1064 c->dst.type = OP_MEM;
1065 c->dst.bytes = c->op_bytes;
1066 c->dst.val = c->src.val;
1067 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1068 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1069 c->regs[VCPU_REGS_RSP]);
1072 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1073 struct x86_emulate_ops *ops)
1075 struct decode_cache *c = &ctxt->decode;
1078 rc = ops->read_std(register_address(c, ss_base(ctxt),
1079 c->regs[VCPU_REGS_RSP]),
1080 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1084 register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
1089 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1091 struct decode_cache *c = &ctxt->decode;
1092 switch (c->modrm_reg) {
1094 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1097 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1100 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1103 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1105 case 4: /* sal/shl */
1106 case 6: /* sal/shl */
1107 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1110 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1113 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1118 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1119 struct x86_emulate_ops *ops)
1121 struct decode_cache *c = &ctxt->decode;
1124 switch (c->modrm_reg) {
1125 case 0 ... 1: /* test */
1126 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1129 c->dst.val = ~c->dst.val;
1132 emulate_1op("neg", c->dst, ctxt->eflags);
1135 DPRINTF("Cannot emulate %02x\n", c->b);
1136 rc = X86EMUL_UNHANDLEABLE;
1142 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1143 struct x86_emulate_ops *ops)
1145 struct decode_cache *c = &ctxt->decode;
1147 switch (c->modrm_reg) {
1149 emulate_1op("inc", c->dst, ctxt->eflags);
1152 emulate_1op("dec", c->dst, ctxt->eflags);
1154 case 4: /* jmp abs */
1155 c->eip = c->src.val;
1164 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1165 struct x86_emulate_ops *ops,
1166 unsigned long memop)
1168 struct decode_cache *c = &ctxt->decode;
1172 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1176 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1177 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1179 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1180 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1181 ctxt->eflags &= ~EFLG_ZF;
1184 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1185 (u32) c->regs[VCPU_REGS_RBX];
1187 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1190 ctxt->eflags |= EFLG_ZF;
1195 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1196 struct x86_emulate_ops *ops)
1199 struct decode_cache *c = &ctxt->decode;
1201 switch (c->dst.type) {
1203 /* The 4-byte case *is* correct:
1204 * in 64-bit mode we zero-extend.
1206 switch (c->dst.bytes) {
1208 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1211 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1214 *c->dst.ptr = (u32)c->dst.val;
1215 break; /* 64b: zero-ext */
1217 *c->dst.ptr = c->dst.val;
1223 rc = ops->cmpxchg_emulated(
1224 (unsigned long)c->dst.ptr,
1230 rc = ops->write_emulated(
1231 (unsigned long)c->dst.ptr,
1248 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1250 unsigned long memop = 0;
1252 unsigned long saved_eip = 0;
1253 struct decode_cache *c = &ctxt->decode;
1256 /* Shadow copy of register state. Committed on successful emulation.
1257 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1261 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1264 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1265 memop = c->modrm_ea;
1267 if (c->rep_prefix && (c->d & String)) {
1268 /* All REP prefixes have the same first termination condition */
1269 if (c->regs[VCPU_REGS_RCX] == 0) {
1270 ctxt->vcpu->arch.rip = c->eip;
1273 /* The second termination condition only applies for REPE
1274 * and REPNE. Test if the repeat string operation prefix is
1275 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1276 * corresponding termination condition according to:
1277 * - if REPE/REPZ and ZF = 0 then done
1278 * - if REPNE/REPNZ and ZF = 1 then done
1280 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1281 (c->b == 0xae) || (c->b == 0xaf)) {
1282 if ((c->rep_prefix == REPE_PREFIX) &&
1283 ((ctxt->eflags & EFLG_ZF) == 0)) {
1284 ctxt->vcpu->arch.rip = c->eip;
1287 if ((c->rep_prefix == REPNE_PREFIX) &&
1288 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1289 ctxt->vcpu->arch.rip = c->eip;
1293 c->regs[VCPU_REGS_RCX]--;
1294 c->eip = ctxt->vcpu->arch.rip;
1297 if (c->src.type == OP_MEM) {
1298 c->src.ptr = (unsigned long *)memop;
1300 rc = ops->read_emulated((unsigned long)c->src.ptr,
1306 c->src.orig_val = c->src.val;
1309 if ((c->d & DstMask) == ImplicitOps)
1313 if (c->dst.type == OP_MEM) {
1314 c->dst.ptr = (unsigned long *)memop;
1315 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1318 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1320 c->dst.ptr = (void *)c->dst.ptr +
1321 (c->src.val & mask) / 8;
1323 if (!(c->d & Mov) &&
1324 /* optimisation - avoid slow emulated read */
1325 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1327 c->dst.bytes, ctxt->vcpu)) != 0))
1330 c->dst.orig_val = c->dst.val;
1340 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1344 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1348 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1352 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1356 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1358 case 0x24: /* and al imm8 */
1359 c->dst.type = OP_REG;
1360 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1361 c->dst.val = *(u8 *)c->dst.ptr;
1363 c->dst.orig_val = c->dst.val;
1365 case 0x25: /* and ax imm16, or eax imm32 */
1366 c->dst.type = OP_REG;
1367 c->dst.bytes = c->op_bytes;
1368 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1369 if (c->op_bytes == 2)
1370 c->dst.val = *(u16 *)c->dst.ptr;
1372 c->dst.val = *(u32 *)c->dst.ptr;
1373 c->dst.orig_val = c->dst.val;
1377 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1381 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1385 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1387 case 0x40 ... 0x47: /* inc r16/r32 */
1388 emulate_1op("inc", c->dst, ctxt->eflags);
1390 case 0x48 ... 0x4f: /* dec r16/r32 */
1391 emulate_1op("dec", c->dst, ctxt->eflags);
1393 case 0x50 ... 0x57: /* push reg */
1394 c->dst.type = OP_MEM;
1395 c->dst.bytes = c->op_bytes;
1396 c->dst.val = c->src.val;
1397 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1399 c->dst.ptr = (void *) register_address(
1400 c, ss_base(ctxt), c->regs[VCPU_REGS_RSP]);
1402 case 0x58 ... 0x5f: /* pop reg */
1404 if ((rc = ops->read_std(register_address(c, ss_base(ctxt),
1405 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1406 c->op_bytes, ctxt->vcpu)) != 0)
1409 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1411 c->dst.type = OP_NONE; /* Disable writeback. */
1413 case 0x63: /* movsxd */
1414 if (ctxt->mode != X86EMUL_MODE_PROT64)
1415 goto cannot_emulate;
1416 c->dst.val = (s32) c->src.val;
1418 case 0x68: /* push imm */
1419 case 0x6a: /* push imm8 */
1422 case 0x6c: /* insb */
1423 case 0x6d: /* insw/insd */
1424 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1426 (c->d & ByteOp) ? 1 : c->op_bytes,
1428 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1429 (ctxt->eflags & EFLG_DF),
1430 register_address(c, es_base(ctxt),
1431 c->regs[VCPU_REGS_RDI]),
1433 c->regs[VCPU_REGS_RDX]) == 0) {
1438 case 0x6e: /* outsb */
1439 case 0x6f: /* outsw/outsd */
1440 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1442 (c->d & ByteOp) ? 1 : c->op_bytes,
1444 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1445 (ctxt->eflags & EFLG_DF),
1447 seg_override_base(ctxt, c),
1448 c->regs[VCPU_REGS_RSI]),
1450 c->regs[VCPU_REGS_RDX]) == 0) {
1455 case 0x70 ... 0x7f: /* jcc (short) */ {
1456 int rel = insn_fetch(s8, 1, c->eip);
1458 if (test_cc(c->b, ctxt->eflags))
1462 case 0x80 ... 0x83: /* Grp1 */
1463 switch (c->modrm_reg) {
1483 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1485 case 0x86 ... 0x87: /* xchg */
1487 /* Write back the register source. */
1488 switch (c->dst.bytes) {
1490 *(u8 *) c->src.ptr = (u8) c->dst.val;
1493 *(u16 *) c->src.ptr = (u16) c->dst.val;
1496 *c->src.ptr = (u32) c->dst.val;
1497 break; /* 64b reg: zero-extend */
1499 *c->src.ptr = c->dst.val;
1503 * Write back the memory destination with implicit LOCK
1506 c->dst.val = c->src.val;
1509 case 0x88 ... 0x8b: /* mov */
1511 case 0x8c: { /* mov r/m, sreg */
1512 struct kvm_segment segreg;
1514 if (c->modrm_reg <= 5)
1515 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1517 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1519 goto cannot_emulate;
1521 c->dst.val = segreg.selector;
1524 case 0x8d: /* lea r16/r32, m */
1525 c->dst.val = c->modrm_ea;
1527 case 0x8e: { /* mov seg, r/m16 */
1533 if (c->modrm_reg <= 5) {
1534 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1535 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1536 type_bits, c->modrm_reg);
1538 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1540 goto cannot_emulate;
1544 goto cannot_emulate;
1546 c->dst.type = OP_NONE; /* Disable writeback. */
1549 case 0x8f: /* pop (sole member of Grp1a) */
1550 rc = emulate_grp1a(ctxt, ops);
1554 case 0x90: /* nop / xchg r8,rax */
1555 if (!(c->rex_prefix & 1)) { /* nop */
1556 c->dst.type = OP_NONE;
1559 case 0x91 ... 0x97: /* xchg reg,rax */
1560 c->src.type = c->dst.type = OP_REG;
1561 c->src.bytes = c->dst.bytes = c->op_bytes;
1562 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1563 c->src.val = *(c->src.ptr);
1565 case 0x9c: /* pushf */
1566 c->src.val = (unsigned long) ctxt->eflags;
1569 case 0x9d: /* popf */
1570 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1571 goto pop_instruction;
1572 case 0xa0 ... 0xa1: /* mov */
1573 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1574 c->dst.val = c->src.val;
1576 case 0xa2 ... 0xa3: /* mov */
1577 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1579 case 0xa4 ... 0xa5: /* movs */
1580 c->dst.type = OP_MEM;
1581 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1582 c->dst.ptr = (unsigned long *)register_address(c,
1584 c->regs[VCPU_REGS_RDI]);
1585 if ((rc = ops->read_emulated(register_address(c,
1586 seg_override_base(ctxt, c),
1587 c->regs[VCPU_REGS_RSI]),
1589 c->dst.bytes, ctxt->vcpu)) != 0)
1591 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1592 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1594 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1595 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1598 case 0xa6 ... 0xa7: /* cmps */
1599 c->src.type = OP_NONE; /* Disable writeback. */
1600 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1601 c->src.ptr = (unsigned long *)register_address(c,
1602 seg_override_base(ctxt, c),
1603 c->regs[VCPU_REGS_RSI]);
1604 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1610 c->dst.type = OP_NONE; /* Disable writeback. */
1611 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1612 c->dst.ptr = (unsigned long *)register_address(c,
1614 c->regs[VCPU_REGS_RDI]);
1615 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1621 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1623 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1625 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1626 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1628 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1629 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1633 case 0xaa ... 0xab: /* stos */
1634 c->dst.type = OP_MEM;
1635 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1636 c->dst.ptr = (unsigned long *)register_address(c,
1638 c->regs[VCPU_REGS_RDI]);
1639 c->dst.val = c->regs[VCPU_REGS_RAX];
1640 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1641 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1644 case 0xac ... 0xad: /* lods */
1645 c->dst.type = OP_REG;
1646 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1647 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1648 if ((rc = ops->read_emulated(register_address(c,
1649 seg_override_base(ctxt, c),
1650 c->regs[VCPU_REGS_RSI]),
1655 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1656 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1659 case 0xae ... 0xaf: /* scas */
1660 DPRINTF("Urk! I don't handle SCAS.\n");
1661 goto cannot_emulate;
1662 case 0xb8: /* mov r, imm */
1667 case 0xc3: /* ret */
1668 c->dst.ptr = &c->eip;
1669 goto pop_instruction;
1670 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1672 c->dst.val = c->src.val;
1674 case 0xd0 ... 0xd1: /* Grp2 */
1678 case 0xd2 ... 0xd3: /* Grp2 */
1679 c->src.val = c->regs[VCPU_REGS_RCX];
1682 case 0xe8: /* call (near) */ {
1684 switch (c->op_bytes) {
1686 rel = insn_fetch(s16, 2, c->eip);
1689 rel = insn_fetch(s32, 4, c->eip);
1692 DPRINTF("Call: Invalid op_bytes\n");
1693 goto cannot_emulate;
1695 c->src.val = (unsigned long) c->eip;
1697 c->op_bytes = c->ad_bytes;
1701 case 0xe9: /* jmp rel */
1703 case 0xea: /* jmp far */ {
1707 switch (c->op_bytes) {
1709 eip = insn_fetch(u16, 2, c->eip);
1712 eip = insn_fetch(u32, 4, c->eip);
1715 DPRINTF("jmp far: Invalid op_bytes\n");
1716 goto cannot_emulate;
1718 sel = insn_fetch(u16, 2, c->eip);
1719 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1720 DPRINTF("jmp far: Failed to load CS descriptor\n");
1721 goto cannot_emulate;
1728 jmp: /* jmp rel short */
1729 jmp_rel(c, c->src.val);
1730 c->dst.type = OP_NONE; /* Disable writeback. */
1732 case 0xf4: /* hlt */
1733 ctxt->vcpu->arch.halt_request = 1;
1735 case 0xf5: /* cmc */
1736 /* complement carry flag from eflags reg */
1737 ctxt->eflags ^= EFLG_CF;
1738 c->dst.type = OP_NONE; /* Disable writeback. */
1740 case 0xf6 ... 0xf7: /* Grp3 */
1741 rc = emulate_grp3(ctxt, ops);
1745 case 0xf8: /* clc */
1746 ctxt->eflags &= ~EFLG_CF;
1747 c->dst.type = OP_NONE; /* Disable writeback. */
1749 case 0xfa: /* cli */
1750 ctxt->eflags &= ~X86_EFLAGS_IF;
1751 c->dst.type = OP_NONE; /* Disable writeback. */
1753 case 0xfb: /* sti */
1754 ctxt->eflags |= X86_EFLAGS_IF;
1755 c->dst.type = OP_NONE; /* Disable writeback. */
1757 case 0xfe ... 0xff: /* Grp4/Grp5 */
1758 rc = emulate_grp45(ctxt, ops);
1765 rc = writeback(ctxt, ops);
1769 /* Commit shadow register state. */
1770 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1771 ctxt->vcpu->arch.rip = c->eip;
1774 if (rc == X86EMUL_UNHANDLEABLE) {
1782 case 0x01: /* lgdt, lidt, lmsw */
1783 switch (c->modrm_reg) {
1785 unsigned long address;
1787 case 0: /* vmcall */
1788 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1789 goto cannot_emulate;
1791 rc = kvm_fix_hypercall(ctxt->vcpu);
1795 /* Let the processor re-execute the fixed hypercall */
1796 c->eip = ctxt->vcpu->arch.rip;
1797 /* Disable writeback. */
1798 c->dst.type = OP_NONE;
1801 rc = read_descriptor(ctxt, ops, c->src.ptr,
1802 &size, &address, c->op_bytes);
1805 realmode_lgdt(ctxt->vcpu, size, address);
1806 /* Disable writeback. */
1807 c->dst.type = OP_NONE;
1809 case 3: /* lidt/vmmcall */
1810 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1811 rc = kvm_fix_hypercall(ctxt->vcpu);
1814 kvm_emulate_hypercall(ctxt->vcpu);
1816 rc = read_descriptor(ctxt, ops, c->src.ptr,
1821 realmode_lidt(ctxt->vcpu, size, address);
1823 /* Disable writeback. */
1824 c->dst.type = OP_NONE;
1828 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1831 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1833 c->dst.type = OP_NONE;
1836 emulate_invlpg(ctxt->vcpu, memop);
1837 /* Disable writeback. */
1838 c->dst.type = OP_NONE;
1841 goto cannot_emulate;
1845 emulate_clts(ctxt->vcpu);
1846 c->dst.type = OP_NONE;
1848 case 0x08: /* invd */
1849 case 0x09: /* wbinvd */
1850 case 0x0d: /* GrpP (prefetch) */
1851 case 0x18: /* Grp16 (prefetch/nop) */
1852 c->dst.type = OP_NONE;
1854 case 0x20: /* mov cr, reg */
1855 if (c->modrm_mod != 3)
1856 goto cannot_emulate;
1857 c->regs[c->modrm_rm] =
1858 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1859 c->dst.type = OP_NONE; /* no writeback */
1861 case 0x21: /* mov from dr to reg */
1862 if (c->modrm_mod != 3)
1863 goto cannot_emulate;
1864 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1866 goto cannot_emulate;
1867 c->dst.type = OP_NONE; /* no writeback */
1869 case 0x22: /* mov reg, cr */
1870 if (c->modrm_mod != 3)
1871 goto cannot_emulate;
1872 realmode_set_cr(ctxt->vcpu,
1873 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1874 c->dst.type = OP_NONE;
1876 case 0x23: /* mov from reg to dr */
1877 if (c->modrm_mod != 3)
1878 goto cannot_emulate;
1879 rc = emulator_set_dr(ctxt, c->modrm_reg,
1880 c->regs[c->modrm_rm]);
1882 goto cannot_emulate;
1883 c->dst.type = OP_NONE; /* no writeback */
1887 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1888 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1889 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1891 kvm_inject_gp(ctxt->vcpu, 0);
1892 c->eip = ctxt->vcpu->arch.rip;
1894 rc = X86EMUL_CONTINUE;
1895 c->dst.type = OP_NONE;
1899 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1901 kvm_inject_gp(ctxt->vcpu, 0);
1902 c->eip = ctxt->vcpu->arch.rip;
1904 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1905 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1907 rc = X86EMUL_CONTINUE;
1908 c->dst.type = OP_NONE;
1910 case 0x40 ... 0x4f: /* cmov */
1911 c->dst.val = c->dst.orig_val = c->src.val;
1912 if (!test_cc(c->b, ctxt->eflags))
1913 c->dst.type = OP_NONE; /* no writeback */
1915 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1918 switch (c->op_bytes) {
1920 rel = insn_fetch(s16, 2, c->eip);
1923 rel = insn_fetch(s32, 4, c->eip);
1926 rel = insn_fetch(s64, 8, c->eip);
1929 DPRINTF("jnz: Invalid op_bytes\n");
1930 goto cannot_emulate;
1932 if (test_cc(c->b, ctxt->eflags))
1934 c->dst.type = OP_NONE;
1939 c->dst.type = OP_NONE;
1940 /* only subword offset */
1941 c->src.val &= (c->dst.bytes << 3) - 1;
1942 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1946 /* only subword offset */
1947 c->src.val &= (c->dst.bytes << 3) - 1;
1948 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1950 case 0xae: /* clflush */
1952 case 0xb0 ... 0xb1: /* cmpxchg */
1954 * Save real source value, then compare EAX against
1957 c->src.orig_val = c->src.val;
1958 c->src.val = c->regs[VCPU_REGS_RAX];
1959 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1960 if (ctxt->eflags & EFLG_ZF) {
1961 /* Success: write back to memory. */
1962 c->dst.val = c->src.orig_val;
1964 /* Failure: write the value we saw to EAX. */
1965 c->dst.type = OP_REG;
1966 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1971 /* only subword offset */
1972 c->src.val &= (c->dst.bytes << 3) - 1;
1973 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
1975 case 0xb6 ... 0xb7: /* movzx */
1976 c->dst.bytes = c->op_bytes;
1977 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
1980 case 0xba: /* Grp8 */
1981 switch (c->modrm_reg & 3) {
1994 /* only subword offset */
1995 c->src.val &= (c->dst.bytes << 3) - 1;
1996 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
1998 case 0xbe ... 0xbf: /* movsx */
1999 c->dst.bytes = c->op_bytes;
2000 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2003 case 0xc3: /* movnti */
2004 c->dst.bytes = c->op_bytes;
2005 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2008 case 0xc7: /* Grp9 (cmpxchg8b) */
2009 rc = emulate_grp9(ctxt, ops, memop);
2012 c->dst.type = OP_NONE;
2018 DPRINTF("Cannot emulate %02x\n", c->b);