1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #define DPRINTF(x...) do {} while (0)
31 #include <linux/module.h>
32 #include <asm/kvm_x86_emulate.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
65 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
66 #define String (1<<10) /* String instruction (rep capable) */
67 #define Stack (1<<11) /* Stack instruction (push/pop) */
68 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
69 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
70 #define GroupMask 0xff /* Group number stored in bits 0:7 */
73 Group1_80, Group1_81, Group1_82, Group1_83,
74 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
77 static u16 opcode_table[256] = {
79 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
80 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
83 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
84 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
87 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
88 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
91 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
92 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
95 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
96 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
97 SrcImmByte, SrcImm, 0, 0,
99 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
100 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
103 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
104 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
107 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
108 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
111 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
113 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
115 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
116 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
118 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
119 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
121 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
124 0, 0, ImplicitOps | Mov | Stack, 0,
125 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
126 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
128 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
129 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
132 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
134 Group | Group1_80, Group | Group1_81,
135 Group | Group1_82, Group | Group1_83,
136 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
137 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
139 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
140 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
141 0, ModRM | DstReg, 0, Group | Group1A,
143 0, 0, 0, 0, 0, 0, 0, 0,
144 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
146 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
147 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
148 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
149 ByteOp | ImplicitOps | String, ImplicitOps | String,
151 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
152 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
153 ByteOp | ImplicitOps | String, ImplicitOps | String,
155 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
157 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
158 0, ImplicitOps | Stack, 0, 0,
159 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
161 0, 0, 0, 0, 0, 0, 0, 0,
163 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
164 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
167 0, 0, 0, 0, 0, 0, 0, 0,
169 0, 0, 0, 0, 0, 0, 0, 0,
171 ImplicitOps | Stack, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps,
175 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
177 ImplicitOps, 0, ImplicitOps, ImplicitOps,
178 0, 0, Group | Group4, Group | Group5,
181 static u16 twobyte_table[256] = {
183 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
184 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
186 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
188 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
189 0, 0, 0, 0, 0, 0, 0, 0,
191 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
193 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
194 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
195 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
196 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
198 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
199 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
200 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
201 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
205 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
207 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
212 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
214 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
216 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
218 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
220 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
221 DstMem | SrcReg | ModRM | BitOp,
222 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
223 DstReg | SrcMem16 | ModRM | Mov,
225 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
226 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
227 DstReg | SrcMem16 | ModRM | Mov,
229 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
230 0, 0, 0, 0, 0, 0, 0, 0,
232 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
234 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
236 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
239 static u16 group_table[] = {
241 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
242 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
243 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
244 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
246 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
247 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
248 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
249 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
251 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
252 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
253 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
254 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
256 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
257 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
258 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
259 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
261 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
263 ByteOp | SrcImm | DstMem | ModRM, 0,
264 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
267 DstMem | SrcImm | ModRM | SrcImm, 0,
268 DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
271 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
274 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, 0, 0,
275 SrcMem | ModRM, 0, SrcMem | ModRM | Stack, 0,
277 0, 0, ModRM | SrcMem, ModRM | SrcMem,
278 SrcNone | ModRM | DstMem | Mov, 0,
279 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
282 static u16 group2_table[] = {
284 SrcNone | ModRM, 0, 0, 0,
285 SrcNone | ModRM | DstMem | Mov, 0,
286 SrcMem16 | ModRM | Mov, 0,
289 /* EFLAGS bit definitions. */
290 #define EFLG_OF (1<<11)
291 #define EFLG_DF (1<<10)
292 #define EFLG_SF (1<<7)
293 #define EFLG_ZF (1<<6)
294 #define EFLG_AF (1<<4)
295 #define EFLG_PF (1<<2)
296 #define EFLG_CF (1<<0)
299 * Instruction emulation:
300 * Most instructions are emulated directly via a fragment of inline assembly
301 * code. This allows us to save/restore EFLAGS and thus very easily pick up
302 * any modified flags.
305 #if defined(CONFIG_X86_64)
306 #define _LO32 "k" /* force 32-bit operand */
307 #define _STK "%%rsp" /* stack pointer */
308 #elif defined(__i386__)
309 #define _LO32 "" /* force 32-bit operand */
310 #define _STK "%%esp" /* stack pointer */
314 * These EFLAGS bits are restored from saved value during emulation, and
315 * any changes are written back to the saved value after emulation.
317 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
319 /* Before executing instruction: restore necessary bits in EFLAGS. */
320 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
321 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
322 "movl %"_sav",%"_LO32 _tmp"; " \
325 "movl %"_msk",%"_LO32 _tmp"; " \
326 "andl %"_LO32 _tmp",("_STK"); " \
328 "notl %"_LO32 _tmp"; " \
329 "andl %"_LO32 _tmp",("_STK"); " \
330 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
332 "orl %"_LO32 _tmp",("_STK"); " \
336 /* After executing instruction: write-back necessary bits in EFLAGS. */
337 #define _POST_EFLAGS(_sav, _msk, _tmp) \
338 /* _sav |= EFLAGS & _msk; */ \
341 "andl %"_msk",%"_LO32 _tmp"; " \
342 "orl %"_LO32 _tmp",%"_sav"; "
344 /* Raw emulation: instruction has two explicit operands. */
345 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
347 unsigned long _tmp; \
349 switch ((_dst).bytes) { \
351 __asm__ __volatile__ ( \
352 _PRE_EFLAGS("0", "4", "2") \
353 _op"w %"_wx"3,%1; " \
354 _POST_EFLAGS("0", "4", "2") \
355 : "=m" (_eflags), "=m" ((_dst).val), \
357 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
360 __asm__ __volatile__ ( \
361 _PRE_EFLAGS("0", "4", "2") \
362 _op"l %"_lx"3,%1; " \
363 _POST_EFLAGS("0", "4", "2") \
364 : "=m" (_eflags), "=m" ((_dst).val), \
366 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
369 __emulate_2op_8byte(_op, _src, _dst, \
370 _eflags, _qx, _qy); \
375 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
377 unsigned long __tmp; \
378 switch ((_dst).bytes) { \
380 __asm__ __volatile__ ( \
381 _PRE_EFLAGS("0", "4", "2") \
382 _op"b %"_bx"3,%1; " \
383 _POST_EFLAGS("0", "4", "2") \
384 : "=m" (_eflags), "=m" ((_dst).val), \
386 : _by ((_src).val), "i" (EFLAGS_MASK)); \
389 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
390 _wx, _wy, _lx, _ly, _qx, _qy); \
395 /* Source operand is byte-sized and may be restricted to just %cl. */
396 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
397 __emulate_2op(_op, _src, _dst, _eflags, \
398 "b", "c", "b", "c", "b", "c", "b", "c")
400 /* Source operand is byte, word, long or quad sized. */
401 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
402 __emulate_2op(_op, _src, _dst, _eflags, \
403 "b", "q", "w", "r", _LO32, "r", "", "r")
405 /* Source operand is word, long or quad sized. */
406 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
407 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
408 "w", "r", _LO32, "r", "", "r")
410 /* Instruction has only one explicit operand (no source operand). */
411 #define emulate_1op(_op, _dst, _eflags) \
413 unsigned long _tmp; \
415 switch ((_dst).bytes) { \
417 __asm__ __volatile__ ( \
418 _PRE_EFLAGS("0", "3", "2") \
420 _POST_EFLAGS("0", "3", "2") \
421 : "=m" (_eflags), "=m" ((_dst).val), \
423 : "i" (EFLAGS_MASK)); \
426 __asm__ __volatile__ ( \
427 _PRE_EFLAGS("0", "3", "2") \
429 _POST_EFLAGS("0", "3", "2") \
430 : "=m" (_eflags), "=m" ((_dst).val), \
432 : "i" (EFLAGS_MASK)); \
435 __asm__ __volatile__ ( \
436 _PRE_EFLAGS("0", "3", "2") \
438 _POST_EFLAGS("0", "3", "2") \
439 : "=m" (_eflags), "=m" ((_dst).val), \
441 : "i" (EFLAGS_MASK)); \
444 __emulate_1op_8byte(_op, _dst, _eflags); \
449 /* Emulate an instruction with quadword operands (x86/64 only). */
450 #if defined(CONFIG_X86_64)
451 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
453 __asm__ __volatile__ ( \
454 _PRE_EFLAGS("0", "4", "2") \
455 _op"q %"_qx"3,%1; " \
456 _POST_EFLAGS("0", "4", "2") \
457 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
458 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
461 #define __emulate_1op_8byte(_op, _dst, _eflags) \
463 __asm__ __volatile__ ( \
464 _PRE_EFLAGS("0", "3", "2") \
466 _POST_EFLAGS("0", "3", "2") \
467 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
468 : "i" (EFLAGS_MASK)); \
471 #elif defined(__i386__)
472 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
473 #define __emulate_1op_8byte(_op, _dst, _eflags)
474 #endif /* __i386__ */
476 /* Fetch next part of the instruction being emulated. */
477 #define insn_fetch(_type, _size, _eip) \
478 ({ unsigned long _x; \
479 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
486 static inline unsigned long ad_mask(struct decode_cache *c)
488 return (1UL << (c->ad_bytes << 3)) - 1;
491 /* Access/update address held in a register, based on addressing mode. */
492 static inline unsigned long
493 address_mask(struct decode_cache *c, unsigned long reg)
495 if (c->ad_bytes == sizeof(unsigned long))
498 return reg & ad_mask(c);
501 static inline unsigned long
502 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
504 return base + address_mask(c, reg);
508 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
510 if (c->ad_bytes == sizeof(unsigned long))
513 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
516 static inline void jmp_rel(struct decode_cache *c, int rel)
518 register_address_increment(c, &c->eip, rel);
521 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
522 struct x86_emulate_ops *ops,
523 unsigned long linear, u8 *dest)
525 struct fetch_cache *fc = &ctxt->decode.fetch;
529 if (linear < fc->start || linear >= fc->end) {
530 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
531 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
535 fc->end = linear + size;
537 *dest = fc->data[linear - fc->start];
541 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
542 struct x86_emulate_ops *ops,
543 unsigned long eip, void *dest, unsigned size)
547 eip += ctxt->cs_base;
549 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
557 * Given the 'reg' portion of a ModRM byte, and a register block, return a
558 * pointer into the block that addresses the relevant register.
559 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
561 static void *decode_register(u8 modrm_reg, unsigned long *regs,
566 p = ®s[modrm_reg];
567 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
568 p = (unsigned char *)®s[modrm_reg & 3] + 1;
572 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
573 struct x86_emulate_ops *ops,
575 u16 *size, unsigned long *address, int op_bytes)
582 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
586 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
591 static int test_cc(unsigned int condition, unsigned int flags)
595 switch ((condition & 15) >> 1) {
597 rc |= (flags & EFLG_OF);
599 case 1: /* b/c/nae */
600 rc |= (flags & EFLG_CF);
603 rc |= (flags & EFLG_ZF);
606 rc |= (flags & (EFLG_CF|EFLG_ZF));
609 rc |= (flags & EFLG_SF);
612 rc |= (flags & EFLG_PF);
615 rc |= (flags & EFLG_ZF);
618 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
622 /* Odd condition identifiers (lsb == 1) have inverted sense. */
623 return (!!rc ^ (condition & 1));
626 static void decode_register_operand(struct operand *op,
627 struct decode_cache *c,
630 unsigned reg = c->modrm_reg;
631 int highbyte_regs = c->rex_prefix == 0;
634 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
636 if ((c->d & ByteOp) && !inhibit_bytereg) {
637 op->ptr = decode_register(reg, c->regs, highbyte_regs);
638 op->val = *(u8 *)op->ptr;
641 op->ptr = decode_register(reg, c->regs, 0);
642 op->bytes = c->op_bytes;
645 op->val = *(u16 *)op->ptr;
648 op->val = *(u32 *)op->ptr;
651 op->val = *(u64 *) op->ptr;
655 op->orig_val = op->val;
658 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
659 struct x86_emulate_ops *ops)
661 struct decode_cache *c = &ctxt->decode;
663 int index_reg = 0, base_reg = 0, scale, rip_relative = 0;
667 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
668 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
669 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
672 c->modrm = insn_fetch(u8, 1, c->eip);
673 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
674 c->modrm_reg |= (c->modrm & 0x38) >> 3;
675 c->modrm_rm |= (c->modrm & 0x07);
679 if (c->modrm_mod == 3) {
680 c->modrm_ptr = decode_register(c->modrm_rm,
681 c->regs, c->d & ByteOp);
682 c->modrm_val = *(unsigned long *)c->modrm_ptr;
686 if (c->ad_bytes == 2) {
687 unsigned bx = c->regs[VCPU_REGS_RBX];
688 unsigned bp = c->regs[VCPU_REGS_RBP];
689 unsigned si = c->regs[VCPU_REGS_RSI];
690 unsigned di = c->regs[VCPU_REGS_RDI];
692 /* 16-bit ModR/M decode. */
693 switch (c->modrm_mod) {
695 if (c->modrm_rm == 6)
696 c->modrm_ea += insn_fetch(u16, 2, c->eip);
699 c->modrm_ea += insn_fetch(s8, 1, c->eip);
702 c->modrm_ea += insn_fetch(u16, 2, c->eip);
705 switch (c->modrm_rm) {
707 c->modrm_ea += bx + si;
710 c->modrm_ea += bx + di;
713 c->modrm_ea += bp + si;
716 c->modrm_ea += bp + di;
725 if (c->modrm_mod != 0)
732 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
733 (c->modrm_rm == 6 && c->modrm_mod != 0))
734 if (!c->override_base)
735 c->override_base = &ctxt->ss_base;
736 c->modrm_ea = (u16)c->modrm_ea;
738 /* 32/64-bit ModR/M decode. */
739 switch (c->modrm_rm) {
742 sib = insn_fetch(u8, 1, c->eip);
743 index_reg |= (sib >> 3) & 7;
749 if (c->modrm_mod != 0)
750 c->modrm_ea += c->regs[base_reg];
753 insn_fetch(s32, 4, c->eip);
756 c->modrm_ea += c->regs[base_reg];
762 c->modrm_ea += c->regs[index_reg] << scale;
766 if (c->modrm_mod != 0)
767 c->modrm_ea += c->regs[c->modrm_rm];
768 else if (ctxt->mode == X86EMUL_MODE_PROT64)
772 c->modrm_ea += c->regs[c->modrm_rm];
775 switch (c->modrm_mod) {
777 if (c->modrm_rm == 5)
778 c->modrm_ea += insn_fetch(s32, 4, c->eip);
781 c->modrm_ea += insn_fetch(s8, 1, c->eip);
784 c->modrm_ea += insn_fetch(s32, 4, c->eip);
789 c->modrm_ea += c->eip;
790 switch (c->d & SrcMask) {
798 if (c->op_bytes == 8)
801 c->modrm_ea += c->op_bytes;
808 static int decode_abs(struct x86_emulate_ctxt *ctxt,
809 struct x86_emulate_ops *ops)
811 struct decode_cache *c = &ctxt->decode;
814 switch (c->ad_bytes) {
816 c->modrm_ea = insn_fetch(u16, 2, c->eip);
819 c->modrm_ea = insn_fetch(u32, 4, c->eip);
822 c->modrm_ea = insn_fetch(u64, 8, c->eip);
830 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
832 struct decode_cache *c = &ctxt->decode;
834 int mode = ctxt->mode;
835 int def_op_bytes, def_ad_bytes, group;
837 /* Shadow copy of register state. Committed on successful emulation. */
839 memset(c, 0, sizeof(struct decode_cache));
840 c->eip = ctxt->vcpu->arch.rip;
841 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
844 case X86EMUL_MODE_REAL:
845 case X86EMUL_MODE_PROT16:
846 def_op_bytes = def_ad_bytes = 2;
848 case X86EMUL_MODE_PROT32:
849 def_op_bytes = def_ad_bytes = 4;
852 case X86EMUL_MODE_PROT64:
861 c->op_bytes = def_op_bytes;
862 c->ad_bytes = def_ad_bytes;
864 /* Legacy prefixes. */
866 switch (c->b = insn_fetch(u8, 1, c->eip)) {
867 case 0x66: /* operand-size override */
868 /* switch between 2/4 bytes */
869 c->op_bytes = def_op_bytes ^ 6;
871 case 0x67: /* address-size override */
872 if (mode == X86EMUL_MODE_PROT64)
873 /* switch between 4/8 bytes */
874 c->ad_bytes = def_ad_bytes ^ 12;
876 /* switch between 2/4 bytes */
877 c->ad_bytes = def_ad_bytes ^ 6;
879 case 0x2e: /* CS override */
880 c->override_base = &ctxt->cs_base;
882 case 0x3e: /* DS override */
883 c->override_base = &ctxt->ds_base;
885 case 0x26: /* ES override */
886 c->override_base = &ctxt->es_base;
888 case 0x64: /* FS override */
889 c->override_base = &ctxt->fs_base;
891 case 0x65: /* GS override */
892 c->override_base = &ctxt->gs_base;
894 case 0x36: /* SS override */
895 c->override_base = &ctxt->ss_base;
897 case 0x40 ... 0x4f: /* REX */
898 if (mode != X86EMUL_MODE_PROT64)
900 c->rex_prefix = c->b;
902 case 0xf0: /* LOCK */
905 case 0xf2: /* REPNE/REPNZ */
906 c->rep_prefix = REPNE_PREFIX;
908 case 0xf3: /* REP/REPE/REPZ */
909 c->rep_prefix = REPE_PREFIX;
915 /* Any legacy prefix after a REX prefix nullifies its effect. */
924 if (c->rex_prefix & 8)
925 c->op_bytes = 8; /* REX.W */
927 /* Opcode byte(s). */
928 c->d = opcode_table[c->b];
930 /* Two-byte opcode? */
933 c->b = insn_fetch(u8, 1, c->eip);
934 c->d = twobyte_table[c->b];
939 group = c->d & GroupMask;
940 c->modrm = insn_fetch(u8, 1, c->eip);
943 group = (group << 3) + ((c->modrm >> 3) & 7);
944 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
945 c->d = group2_table[group];
947 c->d = group_table[group];
952 DPRINTF("Cannot emulate %02x\n", c->b);
956 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
959 /* ModRM and SIB bytes. */
961 rc = decode_modrm(ctxt, ops);
962 else if (c->d & MemAbs)
963 rc = decode_abs(ctxt, ops);
967 if (!c->override_base)
968 c->override_base = &ctxt->ds_base;
969 if (mode == X86EMUL_MODE_PROT64 &&
970 c->override_base != &ctxt->fs_base &&
971 c->override_base != &ctxt->gs_base)
972 c->override_base = NULL;
974 if (c->override_base)
975 c->modrm_ea += *c->override_base;
977 if (c->ad_bytes != 8)
978 c->modrm_ea = (u32)c->modrm_ea;
980 * Decode and fetch the source operand: register, memory
983 switch (c->d & SrcMask) {
987 decode_register_operand(&c->src, c, 0);
996 c->src.bytes = (c->d & ByteOp) ? 1 :
998 /* Don't fetch the address for invlpg: it could be unmapped. */
999 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1003 * For instructions with a ModR/M byte, switch to register
1004 * access if Mod = 3.
1006 if ((c->d & ModRM) && c->modrm_mod == 3) {
1007 c->src.type = OP_REG;
1008 c->src.val = c->modrm_val;
1009 c->src.ptr = c->modrm_ptr;
1012 c->src.type = OP_MEM;
1015 c->src.type = OP_IMM;
1016 c->src.ptr = (unsigned long *)c->eip;
1017 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1018 if (c->src.bytes == 8)
1020 /* NB. Immediates are sign-extended as necessary. */
1021 switch (c->src.bytes) {
1023 c->src.val = insn_fetch(s8, 1, c->eip);
1026 c->src.val = insn_fetch(s16, 2, c->eip);
1029 c->src.val = insn_fetch(s32, 4, c->eip);
1034 c->src.type = OP_IMM;
1035 c->src.ptr = (unsigned long *)c->eip;
1037 c->src.val = insn_fetch(s8, 1, c->eip);
1041 /* Decode and fetch the destination operand: register or memory. */
1042 switch (c->d & DstMask) {
1044 /* Special instructions do their own operand decoding. */
1047 decode_register_operand(&c->dst, c,
1048 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1051 if ((c->d & ModRM) && c->modrm_mod == 3) {
1052 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1053 c->dst.type = OP_REG;
1054 c->dst.val = c->dst.orig_val = c->modrm_val;
1055 c->dst.ptr = c->modrm_ptr;
1058 c->dst.type = OP_MEM;
1063 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1066 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1068 struct decode_cache *c = &ctxt->decode;
1070 c->dst.type = OP_MEM;
1071 c->dst.bytes = c->op_bytes;
1072 c->dst.val = c->src.val;
1073 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1074 c->dst.ptr = (void *) register_address(c, ctxt->ss_base,
1075 c->regs[VCPU_REGS_RSP]);
1078 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1079 struct x86_emulate_ops *ops)
1081 struct decode_cache *c = &ctxt->decode;
1084 rc = ops->read_std(register_address(c, ctxt->ss_base,
1085 c->regs[VCPU_REGS_RSP]),
1086 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1090 register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
1095 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1097 struct decode_cache *c = &ctxt->decode;
1098 switch (c->modrm_reg) {
1100 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1103 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1106 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1109 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1111 case 4: /* sal/shl */
1112 case 6: /* sal/shl */
1113 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1116 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1119 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1124 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1125 struct x86_emulate_ops *ops)
1127 struct decode_cache *c = &ctxt->decode;
1130 switch (c->modrm_reg) {
1131 case 0 ... 1: /* test */
1132 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1135 c->dst.val = ~c->dst.val;
1138 emulate_1op("neg", c->dst, ctxt->eflags);
1141 DPRINTF("Cannot emulate %02x\n", c->b);
1142 rc = X86EMUL_UNHANDLEABLE;
1148 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1149 struct x86_emulate_ops *ops)
1151 struct decode_cache *c = &ctxt->decode;
1153 switch (c->modrm_reg) {
1155 emulate_1op("inc", c->dst, ctxt->eflags);
1158 emulate_1op("dec", c->dst, ctxt->eflags);
1160 case 4: /* jmp abs */
1161 c->eip = c->src.val;
1170 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1171 struct x86_emulate_ops *ops,
1172 unsigned long memop)
1174 struct decode_cache *c = &ctxt->decode;
1178 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1182 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1183 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1185 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1186 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1187 ctxt->eflags &= ~EFLG_ZF;
1190 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1191 (u32) c->regs[VCPU_REGS_RBX];
1193 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1196 ctxt->eflags |= EFLG_ZF;
1201 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1202 struct x86_emulate_ops *ops)
1205 struct decode_cache *c = &ctxt->decode;
1207 switch (c->dst.type) {
1209 /* The 4-byte case *is* correct:
1210 * in 64-bit mode we zero-extend.
1212 switch (c->dst.bytes) {
1214 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1217 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1220 *c->dst.ptr = (u32)c->dst.val;
1221 break; /* 64b: zero-ext */
1223 *c->dst.ptr = c->dst.val;
1229 rc = ops->cmpxchg_emulated(
1230 (unsigned long)c->dst.ptr,
1236 rc = ops->write_emulated(
1237 (unsigned long)c->dst.ptr,
1254 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1256 unsigned long memop = 0;
1258 unsigned long saved_eip = 0;
1259 struct decode_cache *c = &ctxt->decode;
1262 /* Shadow copy of register state. Committed on successful emulation.
1263 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1267 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1270 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1271 memop = c->modrm_ea;
1273 if (c->rep_prefix && (c->d & String)) {
1274 /* All REP prefixes have the same first termination condition */
1275 if (c->regs[VCPU_REGS_RCX] == 0) {
1276 ctxt->vcpu->arch.rip = c->eip;
1279 /* The second termination condition only applies for REPE
1280 * and REPNE. Test if the repeat string operation prefix is
1281 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1282 * corresponding termination condition according to:
1283 * - if REPE/REPZ and ZF = 0 then done
1284 * - if REPNE/REPNZ and ZF = 1 then done
1286 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1287 (c->b == 0xae) || (c->b == 0xaf)) {
1288 if ((c->rep_prefix == REPE_PREFIX) &&
1289 ((ctxt->eflags & EFLG_ZF) == 0)) {
1290 ctxt->vcpu->arch.rip = c->eip;
1293 if ((c->rep_prefix == REPNE_PREFIX) &&
1294 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1295 ctxt->vcpu->arch.rip = c->eip;
1299 c->regs[VCPU_REGS_RCX]--;
1300 c->eip = ctxt->vcpu->arch.rip;
1303 if (c->src.type == OP_MEM) {
1304 c->src.ptr = (unsigned long *)memop;
1306 rc = ops->read_emulated((unsigned long)c->src.ptr,
1312 c->src.orig_val = c->src.val;
1315 if ((c->d & DstMask) == ImplicitOps)
1319 if (c->dst.type == OP_MEM) {
1320 c->dst.ptr = (unsigned long *)memop;
1321 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1324 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1326 c->dst.ptr = (void *)c->dst.ptr +
1327 (c->src.val & mask) / 8;
1329 if (!(c->d & Mov) &&
1330 /* optimisation - avoid slow emulated read */
1331 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1333 c->dst.bytes, ctxt->vcpu)) != 0))
1336 c->dst.orig_val = c->dst.val;
1346 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1350 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1354 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1358 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1362 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1364 case 0x24: /* and al imm8 */
1365 c->dst.type = OP_REG;
1366 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1367 c->dst.val = *(u8 *)c->dst.ptr;
1369 c->dst.orig_val = c->dst.val;
1371 case 0x25: /* and ax imm16, or eax imm32 */
1372 c->dst.type = OP_REG;
1373 c->dst.bytes = c->op_bytes;
1374 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1375 if (c->op_bytes == 2)
1376 c->dst.val = *(u16 *)c->dst.ptr;
1378 c->dst.val = *(u32 *)c->dst.ptr;
1379 c->dst.orig_val = c->dst.val;
1383 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1387 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1391 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1393 case 0x40 ... 0x47: /* inc r16/r32 */
1394 emulate_1op("inc", c->dst, ctxt->eflags);
1396 case 0x48 ... 0x4f: /* dec r16/r32 */
1397 emulate_1op("dec", c->dst, ctxt->eflags);
1399 case 0x50 ... 0x57: /* push reg */
1400 c->dst.type = OP_MEM;
1401 c->dst.bytes = c->op_bytes;
1402 c->dst.val = c->src.val;
1403 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1405 c->dst.ptr = (void *) register_address(
1406 c, ctxt->ss_base, c->regs[VCPU_REGS_RSP]);
1408 case 0x58 ... 0x5f: /* pop reg */
1410 if ((rc = ops->read_std(register_address(c, ctxt->ss_base,
1411 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1412 c->op_bytes, ctxt->vcpu)) != 0)
1415 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1417 c->dst.type = OP_NONE; /* Disable writeback. */
1419 case 0x63: /* movsxd */
1420 if (ctxt->mode != X86EMUL_MODE_PROT64)
1421 goto cannot_emulate;
1422 c->dst.val = (s32) c->src.val;
1424 case 0x6a: /* push imm8 */
1426 c->src.val = insn_fetch(s8, 1, c->eip);
1429 case 0x6c: /* insb */
1430 case 0x6d: /* insw/insd */
1431 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1433 (c->d & ByteOp) ? 1 : c->op_bytes,
1435 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1436 (ctxt->eflags & EFLG_DF),
1437 register_address(c, ctxt->es_base,
1438 c->regs[VCPU_REGS_RDI]),
1440 c->regs[VCPU_REGS_RDX]) == 0) {
1445 case 0x6e: /* outsb */
1446 case 0x6f: /* outsw/outsd */
1447 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1449 (c->d & ByteOp) ? 1 : c->op_bytes,
1451 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1452 (ctxt->eflags & EFLG_DF),
1453 register_address(c, c->override_base ?
1456 c->regs[VCPU_REGS_RSI]),
1458 c->regs[VCPU_REGS_RDX]) == 0) {
1463 case 0x70 ... 0x7f: /* jcc (short) */ {
1464 int rel = insn_fetch(s8, 1, c->eip);
1466 if (test_cc(c->b, ctxt->eflags))
1470 case 0x80 ... 0x83: /* Grp1 */
1471 switch (c->modrm_reg) {
1491 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1493 case 0x86 ... 0x87: /* xchg */
1494 /* Write back the register source. */
1495 switch (c->dst.bytes) {
1497 *(u8 *) c->src.ptr = (u8) c->dst.val;
1500 *(u16 *) c->src.ptr = (u16) c->dst.val;
1503 *c->src.ptr = (u32) c->dst.val;
1504 break; /* 64b reg: zero-extend */
1506 *c->src.ptr = c->dst.val;
1510 * Write back the memory destination with implicit LOCK
1513 c->dst.val = c->src.val;
1516 case 0x88 ... 0x8b: /* mov */
1518 case 0x8d: /* lea r16/r32, m */
1519 c->dst.val = c->modrm_ea;
1521 case 0x8f: /* pop (sole member of Grp1a) */
1522 rc = emulate_grp1a(ctxt, ops);
1526 case 0x9c: /* pushf */
1527 c->src.val = (unsigned long) ctxt->eflags;
1530 case 0x9d: /* popf */
1531 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1532 goto pop_instruction;
1533 case 0xa0 ... 0xa1: /* mov */
1534 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1535 c->dst.val = c->src.val;
1537 case 0xa2 ... 0xa3: /* mov */
1538 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1540 case 0xa4 ... 0xa5: /* movs */
1541 c->dst.type = OP_MEM;
1542 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1543 c->dst.ptr = (unsigned long *)register_address(c,
1545 c->regs[VCPU_REGS_RDI]);
1546 if ((rc = ops->read_emulated(register_address(c,
1547 c->override_base ? *c->override_base :
1549 c->regs[VCPU_REGS_RSI]),
1551 c->dst.bytes, ctxt->vcpu)) != 0)
1553 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1554 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1556 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1557 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1560 case 0xa6 ... 0xa7: /* cmps */
1561 c->src.type = OP_NONE; /* Disable writeback. */
1562 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1563 c->src.ptr = (unsigned long *)register_address(c,
1564 c->override_base ? *c->override_base :
1566 c->regs[VCPU_REGS_RSI]);
1567 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1573 c->dst.type = OP_NONE; /* Disable writeback. */
1574 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1575 c->dst.ptr = (unsigned long *)register_address(c,
1577 c->regs[VCPU_REGS_RDI]);
1578 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1584 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1586 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1588 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1589 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1591 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1592 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1596 case 0xaa ... 0xab: /* stos */
1597 c->dst.type = OP_MEM;
1598 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1599 c->dst.ptr = (unsigned long *)register_address(c,
1601 c->regs[VCPU_REGS_RDI]);
1602 c->dst.val = c->regs[VCPU_REGS_RAX];
1603 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1604 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1607 case 0xac ... 0xad: /* lods */
1608 c->dst.type = OP_REG;
1609 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1610 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1611 if ((rc = ops->read_emulated(register_address(c,
1612 c->override_base ? *c->override_base :
1614 c->regs[VCPU_REGS_RSI]),
1619 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1620 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1623 case 0xae ... 0xaf: /* scas */
1624 DPRINTF("Urk! I don't handle SCAS.\n");
1625 goto cannot_emulate;
1629 case 0xc3: /* ret */
1630 c->dst.ptr = &c->eip;
1631 goto pop_instruction;
1632 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1634 c->dst.val = c->src.val;
1636 case 0xd0 ... 0xd1: /* Grp2 */
1640 case 0xd2 ... 0xd3: /* Grp2 */
1641 c->src.val = c->regs[VCPU_REGS_RCX];
1644 case 0xe8: /* call (near) */ {
1646 switch (c->op_bytes) {
1648 rel = insn_fetch(s16, 2, c->eip);
1651 rel = insn_fetch(s32, 4, c->eip);
1654 DPRINTF("Call: Invalid op_bytes\n");
1655 goto cannot_emulate;
1657 c->src.val = (unsigned long) c->eip;
1659 c->op_bytes = c->ad_bytes;
1663 case 0xe9: /* jmp rel */
1664 case 0xeb: /* jmp rel short */
1665 jmp_rel(c, c->src.val);
1666 c->dst.type = OP_NONE; /* Disable writeback. */
1668 case 0xf4: /* hlt */
1669 ctxt->vcpu->arch.halt_request = 1;
1671 case 0xf5: /* cmc */
1672 /* complement carry flag from eflags reg */
1673 ctxt->eflags ^= EFLG_CF;
1674 c->dst.type = OP_NONE; /* Disable writeback. */
1676 case 0xf6 ... 0xf7: /* Grp3 */
1677 rc = emulate_grp3(ctxt, ops);
1681 case 0xf8: /* clc */
1682 ctxt->eflags &= ~EFLG_CF;
1683 c->dst.type = OP_NONE; /* Disable writeback. */
1685 case 0xfa: /* cli */
1686 ctxt->eflags &= ~X86_EFLAGS_IF;
1687 c->dst.type = OP_NONE; /* Disable writeback. */
1689 case 0xfb: /* sti */
1690 ctxt->eflags |= X86_EFLAGS_IF;
1691 c->dst.type = OP_NONE; /* Disable writeback. */
1693 case 0xfe ... 0xff: /* Grp4/Grp5 */
1694 rc = emulate_grp45(ctxt, ops);
1701 rc = writeback(ctxt, ops);
1705 /* Commit shadow register state. */
1706 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1707 ctxt->vcpu->arch.rip = c->eip;
1710 if (rc == X86EMUL_UNHANDLEABLE) {
1718 case 0x01: /* lgdt, lidt, lmsw */
1719 switch (c->modrm_reg) {
1721 unsigned long address;
1723 case 0: /* vmcall */
1724 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1725 goto cannot_emulate;
1727 rc = kvm_fix_hypercall(ctxt->vcpu);
1731 /* Let the processor re-execute the fixed hypercall */
1732 c->eip = ctxt->vcpu->arch.rip;
1733 /* Disable writeback. */
1734 c->dst.type = OP_NONE;
1737 rc = read_descriptor(ctxt, ops, c->src.ptr,
1738 &size, &address, c->op_bytes);
1741 realmode_lgdt(ctxt->vcpu, size, address);
1742 /* Disable writeback. */
1743 c->dst.type = OP_NONE;
1745 case 3: /* lidt/vmmcall */
1746 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1747 rc = kvm_fix_hypercall(ctxt->vcpu);
1750 kvm_emulate_hypercall(ctxt->vcpu);
1752 rc = read_descriptor(ctxt, ops, c->src.ptr,
1757 realmode_lidt(ctxt->vcpu, size, address);
1759 /* Disable writeback. */
1760 c->dst.type = OP_NONE;
1764 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1767 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1769 c->dst.type = OP_NONE;
1772 emulate_invlpg(ctxt->vcpu, memop);
1773 /* Disable writeback. */
1774 c->dst.type = OP_NONE;
1777 goto cannot_emulate;
1781 emulate_clts(ctxt->vcpu);
1782 c->dst.type = OP_NONE;
1784 case 0x08: /* invd */
1785 case 0x09: /* wbinvd */
1786 case 0x0d: /* GrpP (prefetch) */
1787 case 0x18: /* Grp16 (prefetch/nop) */
1788 c->dst.type = OP_NONE;
1790 case 0x20: /* mov cr, reg */
1791 if (c->modrm_mod != 3)
1792 goto cannot_emulate;
1793 c->regs[c->modrm_rm] =
1794 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1795 c->dst.type = OP_NONE; /* no writeback */
1797 case 0x21: /* mov from dr to reg */
1798 if (c->modrm_mod != 3)
1799 goto cannot_emulate;
1800 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1802 goto cannot_emulate;
1803 c->dst.type = OP_NONE; /* no writeback */
1805 case 0x22: /* mov reg, cr */
1806 if (c->modrm_mod != 3)
1807 goto cannot_emulate;
1808 realmode_set_cr(ctxt->vcpu,
1809 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1810 c->dst.type = OP_NONE;
1812 case 0x23: /* mov from reg to dr */
1813 if (c->modrm_mod != 3)
1814 goto cannot_emulate;
1815 rc = emulator_set_dr(ctxt, c->modrm_reg,
1816 c->regs[c->modrm_rm]);
1818 goto cannot_emulate;
1819 c->dst.type = OP_NONE; /* no writeback */
1823 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1824 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1825 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1827 kvm_inject_gp(ctxt->vcpu, 0);
1828 c->eip = ctxt->vcpu->arch.rip;
1830 rc = X86EMUL_CONTINUE;
1831 c->dst.type = OP_NONE;
1835 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1837 kvm_inject_gp(ctxt->vcpu, 0);
1838 c->eip = ctxt->vcpu->arch.rip;
1840 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1841 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1843 rc = X86EMUL_CONTINUE;
1844 c->dst.type = OP_NONE;
1846 case 0x40 ... 0x4f: /* cmov */
1847 c->dst.val = c->dst.orig_val = c->src.val;
1848 if (!test_cc(c->b, ctxt->eflags))
1849 c->dst.type = OP_NONE; /* no writeback */
1851 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1854 switch (c->op_bytes) {
1856 rel = insn_fetch(s16, 2, c->eip);
1859 rel = insn_fetch(s32, 4, c->eip);
1862 rel = insn_fetch(s64, 8, c->eip);
1865 DPRINTF("jnz: Invalid op_bytes\n");
1866 goto cannot_emulate;
1868 if (test_cc(c->b, ctxt->eflags))
1870 c->dst.type = OP_NONE;
1875 c->dst.type = OP_NONE;
1876 /* only subword offset */
1877 c->src.val &= (c->dst.bytes << 3) - 1;
1878 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1882 /* only subword offset */
1883 c->src.val &= (c->dst.bytes << 3) - 1;
1884 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1886 case 0xb0 ... 0xb1: /* cmpxchg */
1888 * Save real source value, then compare EAX against
1891 c->src.orig_val = c->src.val;
1892 c->src.val = c->regs[VCPU_REGS_RAX];
1893 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1894 if (ctxt->eflags & EFLG_ZF) {
1895 /* Success: write back to memory. */
1896 c->dst.val = c->src.orig_val;
1898 /* Failure: write the value we saw to EAX. */
1899 c->dst.type = OP_REG;
1900 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1905 /* only subword offset */
1906 c->src.val &= (c->dst.bytes << 3) - 1;
1907 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
1909 case 0xb6 ... 0xb7: /* movzx */
1910 c->dst.bytes = c->op_bytes;
1911 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
1914 case 0xba: /* Grp8 */
1915 switch (c->modrm_reg & 3) {
1928 /* only subword offset */
1929 c->src.val &= (c->dst.bytes << 3) - 1;
1930 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
1932 case 0xbe ... 0xbf: /* movsx */
1933 c->dst.bytes = c->op_bytes;
1934 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
1937 case 0xc3: /* movnti */
1938 c->dst.bytes = c->op_bytes;
1939 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
1942 case 0xc7: /* Grp9 (cmpxchg8b) */
1943 rc = emulate_grp9(ctxt, ops, memop);
1946 c->dst.type = OP_NONE;
1952 DPRINTF("Cannot emulate %02x\n", c->b);