1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstMask (3<<1)
51 /* Source operand type. */
52 #define SrcNone (0<<3) /* No source operand. */
53 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
54 #define SrcReg (1<<3) /* Register operand. */
55 #define SrcMem (2<<3) /* Memory operand. */
56 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
57 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
58 #define SrcImm (5<<3) /* Immediate operand. */
59 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
60 #define SrcMask (7<<3)
61 /* Generic ModRM decode. */
63 /* Destination is only written; never read. */
66 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
67 #define String (1<<10) /* String instruction (rep capable) */
68 #define Stack (1<<11) /* Stack instruction (push/pop) */
69 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
70 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
71 #define GroupMask 0xff /* Group number stored in bits 0:7 */
74 Group1_80, Group1_81, Group1_82, Group1_83,
75 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
78 static u16 opcode_table[256] = {
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
98 SrcImmByte, SrcImm, 0, 0,
100 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
101 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
104 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
105 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
108 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
109 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
112 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
114 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
116 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
117 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
119 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
120 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
122 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
125 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
126 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
127 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
129 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
132 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
133 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
135 Group | Group1_80, Group | Group1_81,
136 Group | Group1_82, Group | Group1_83,
137 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
138 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
140 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
141 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
142 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
143 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
145 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
147 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
149 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
150 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
151 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
152 ByteOp | ImplicitOps | String, ImplicitOps | String,
154 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
155 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
156 ByteOp | ImplicitOps | String, ImplicitOps | String,
158 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
159 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
160 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
161 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
163 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
164 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
165 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
166 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
168 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
169 0, ImplicitOps | Stack, 0, 0,
170 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
172 0, 0, 0, 0, 0, 0, 0, 0,
174 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
175 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
178 0, 0, 0, 0, 0, 0, 0, 0,
181 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
182 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
184 ImplicitOps | Stack, SrcImm | ImplicitOps,
185 ImplicitOps, SrcImmByte | ImplicitOps,
186 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
187 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
190 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
192 ImplicitOps, 0, ImplicitOps, ImplicitOps,
193 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
196 static u16 twobyte_table[256] = {
198 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
199 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
201 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
203 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
204 0, 0, 0, 0, 0, 0, 0, 0,
206 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
208 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
209 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
210 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
211 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
213 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
214 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
215 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
216 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
218 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
224 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
225 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
226 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
227 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
229 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
231 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
233 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
235 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
236 DstMem | SrcReg | ModRM | BitOp,
237 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
238 DstReg | SrcMem16 | ModRM | Mov,
240 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
241 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
242 DstReg | SrcMem16 | ModRM | Mov,
244 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
245 0, 0, 0, 0, 0, 0, 0, 0,
247 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
249 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
254 static u16 group_table[] = {
256 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
257 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
258 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
259 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
261 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
262 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
263 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
264 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
266 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
267 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
268 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
269 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
271 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
272 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
273 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
274 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
276 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
278 ByteOp | SrcImm | DstMem | ModRM, 0,
279 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
282 DstMem | SrcImm | ModRM, 0,
283 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
286 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
289 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, 0, 0,
290 SrcMem | ModRM, 0, SrcMem | ModRM | Stack, 0,
292 0, 0, ModRM | SrcMem, ModRM | SrcMem,
293 SrcNone | ModRM | DstMem | Mov, 0,
294 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
297 static u16 group2_table[] = {
299 SrcNone | ModRM, 0, 0, 0,
300 SrcNone | ModRM | DstMem | Mov, 0,
301 SrcMem16 | ModRM | Mov, 0,
304 /* EFLAGS bit definitions. */
305 #define EFLG_OF (1<<11)
306 #define EFLG_DF (1<<10)
307 #define EFLG_SF (1<<7)
308 #define EFLG_ZF (1<<6)
309 #define EFLG_AF (1<<4)
310 #define EFLG_PF (1<<2)
311 #define EFLG_CF (1<<0)
314 * Instruction emulation:
315 * Most instructions are emulated directly via a fragment of inline assembly
316 * code. This allows us to save/restore EFLAGS and thus very easily pick up
317 * any modified flags.
320 #if defined(CONFIG_X86_64)
321 #define _LO32 "k" /* force 32-bit operand */
322 #define _STK "%%rsp" /* stack pointer */
323 #elif defined(__i386__)
324 #define _LO32 "" /* force 32-bit operand */
325 #define _STK "%%esp" /* stack pointer */
329 * These EFLAGS bits are restored from saved value during emulation, and
330 * any changes are written back to the saved value after emulation.
332 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
334 /* Before executing instruction: restore necessary bits in EFLAGS. */
335 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
336 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
337 "movl %"_sav",%"_LO32 _tmp"; " \
340 "movl %"_msk",%"_LO32 _tmp"; " \
341 "andl %"_LO32 _tmp",("_STK"); " \
343 "notl %"_LO32 _tmp"; " \
344 "andl %"_LO32 _tmp",("_STK"); " \
345 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
347 "orl %"_LO32 _tmp",("_STK"); " \
351 /* After executing instruction: write-back necessary bits in EFLAGS. */
352 #define _POST_EFLAGS(_sav, _msk, _tmp) \
353 /* _sav |= EFLAGS & _msk; */ \
356 "andl %"_msk",%"_LO32 _tmp"; " \
357 "orl %"_LO32 _tmp",%"_sav"; "
359 /* Raw emulation: instruction has two explicit operands. */
360 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
362 unsigned long _tmp; \
364 switch ((_dst).bytes) { \
366 __asm__ __volatile__ ( \
367 _PRE_EFLAGS("0", "4", "2") \
368 _op"w %"_wx"3,%1; " \
369 _POST_EFLAGS("0", "4", "2") \
370 : "=m" (_eflags), "=m" ((_dst).val), \
372 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
375 __asm__ __volatile__ ( \
376 _PRE_EFLAGS("0", "4", "2") \
377 _op"l %"_lx"3,%1; " \
378 _POST_EFLAGS("0", "4", "2") \
379 : "=m" (_eflags), "=m" ((_dst).val), \
381 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
384 __emulate_2op_8byte(_op, _src, _dst, \
385 _eflags, _qx, _qy); \
390 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
392 unsigned long __tmp; \
393 switch ((_dst).bytes) { \
395 __asm__ __volatile__ ( \
396 _PRE_EFLAGS("0", "4", "2") \
397 _op"b %"_bx"3,%1; " \
398 _POST_EFLAGS("0", "4", "2") \
399 : "=m" (_eflags), "=m" ((_dst).val), \
401 : _by ((_src).val), "i" (EFLAGS_MASK)); \
404 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
405 _wx, _wy, _lx, _ly, _qx, _qy); \
410 /* Source operand is byte-sized and may be restricted to just %cl. */
411 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
412 __emulate_2op(_op, _src, _dst, _eflags, \
413 "b", "c", "b", "c", "b", "c", "b", "c")
415 /* Source operand is byte, word, long or quad sized. */
416 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
417 __emulate_2op(_op, _src, _dst, _eflags, \
418 "b", "q", "w", "r", _LO32, "r", "", "r")
420 /* Source operand is word, long or quad sized. */
421 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
422 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
423 "w", "r", _LO32, "r", "", "r")
425 /* Instruction has only one explicit operand (no source operand). */
426 #define emulate_1op(_op, _dst, _eflags) \
428 unsigned long _tmp; \
430 switch ((_dst).bytes) { \
432 __asm__ __volatile__ ( \
433 _PRE_EFLAGS("0", "3", "2") \
435 _POST_EFLAGS("0", "3", "2") \
436 : "=m" (_eflags), "=m" ((_dst).val), \
438 : "i" (EFLAGS_MASK)); \
441 __asm__ __volatile__ ( \
442 _PRE_EFLAGS("0", "3", "2") \
444 _POST_EFLAGS("0", "3", "2") \
445 : "=m" (_eflags), "=m" ((_dst).val), \
447 : "i" (EFLAGS_MASK)); \
450 __asm__ __volatile__ ( \
451 _PRE_EFLAGS("0", "3", "2") \
453 _POST_EFLAGS("0", "3", "2") \
454 : "=m" (_eflags), "=m" ((_dst).val), \
456 : "i" (EFLAGS_MASK)); \
459 __emulate_1op_8byte(_op, _dst, _eflags); \
464 /* Emulate an instruction with quadword operands (x86/64 only). */
465 #if defined(CONFIG_X86_64)
466 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
468 __asm__ __volatile__ ( \
469 _PRE_EFLAGS("0", "4", "2") \
470 _op"q %"_qx"3,%1; " \
471 _POST_EFLAGS("0", "4", "2") \
472 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
473 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
476 #define __emulate_1op_8byte(_op, _dst, _eflags) \
478 __asm__ __volatile__ ( \
479 _PRE_EFLAGS("0", "3", "2") \
481 _POST_EFLAGS("0", "3", "2") \
482 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
483 : "i" (EFLAGS_MASK)); \
486 #elif defined(__i386__)
487 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
488 #define __emulate_1op_8byte(_op, _dst, _eflags)
489 #endif /* __i386__ */
491 /* Fetch next part of the instruction being emulated. */
492 #define insn_fetch(_type, _size, _eip) \
493 ({ unsigned long _x; \
494 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
501 static inline unsigned long ad_mask(struct decode_cache *c)
503 return (1UL << (c->ad_bytes << 3)) - 1;
506 /* Access/update address held in a register, based on addressing mode. */
507 static inline unsigned long
508 address_mask(struct decode_cache *c, unsigned long reg)
510 if (c->ad_bytes == sizeof(unsigned long))
513 return reg & ad_mask(c);
516 static inline unsigned long
517 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
519 return base + address_mask(c, reg);
523 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
525 if (c->ad_bytes == sizeof(unsigned long))
528 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
531 static inline void jmp_rel(struct decode_cache *c, int rel)
533 register_address_increment(c, &c->eip, rel);
536 static void set_seg_override(struct decode_cache *c, int seg)
538 c->has_seg_override = true;
539 c->seg_override = seg;
542 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
544 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
547 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
550 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
551 struct decode_cache *c)
553 if (!c->has_seg_override)
556 return seg_base(ctxt, c->seg_override);
559 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
561 return seg_base(ctxt, VCPU_SREG_ES);
564 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
566 return seg_base(ctxt, VCPU_SREG_SS);
569 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
570 struct x86_emulate_ops *ops,
571 unsigned long linear, u8 *dest)
573 struct fetch_cache *fc = &ctxt->decode.fetch;
577 if (linear < fc->start || linear >= fc->end) {
578 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
579 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
583 fc->end = linear + size;
585 *dest = fc->data[linear - fc->start];
589 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
590 struct x86_emulate_ops *ops,
591 unsigned long eip, void *dest, unsigned size)
595 eip += ctxt->cs_base;
597 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
605 * Given the 'reg' portion of a ModRM byte, and a register block, return a
606 * pointer into the block that addresses the relevant register.
607 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
609 static void *decode_register(u8 modrm_reg, unsigned long *regs,
614 p = ®s[modrm_reg];
615 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
616 p = (unsigned char *)®s[modrm_reg & 3] + 1;
620 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
621 struct x86_emulate_ops *ops,
623 u16 *size, unsigned long *address, int op_bytes)
630 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
634 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
639 static int test_cc(unsigned int condition, unsigned int flags)
643 switch ((condition & 15) >> 1) {
645 rc |= (flags & EFLG_OF);
647 case 1: /* b/c/nae */
648 rc |= (flags & EFLG_CF);
651 rc |= (flags & EFLG_ZF);
654 rc |= (flags & (EFLG_CF|EFLG_ZF));
657 rc |= (flags & EFLG_SF);
660 rc |= (flags & EFLG_PF);
663 rc |= (flags & EFLG_ZF);
666 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
670 /* Odd condition identifiers (lsb == 1) have inverted sense. */
671 return (!!rc ^ (condition & 1));
674 static void decode_register_operand(struct operand *op,
675 struct decode_cache *c,
678 unsigned reg = c->modrm_reg;
679 int highbyte_regs = c->rex_prefix == 0;
682 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
684 if ((c->d & ByteOp) && !inhibit_bytereg) {
685 op->ptr = decode_register(reg, c->regs, highbyte_regs);
686 op->val = *(u8 *)op->ptr;
689 op->ptr = decode_register(reg, c->regs, 0);
690 op->bytes = c->op_bytes;
693 op->val = *(u16 *)op->ptr;
696 op->val = *(u32 *)op->ptr;
699 op->val = *(u64 *) op->ptr;
703 op->orig_val = op->val;
706 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
707 struct x86_emulate_ops *ops)
709 struct decode_cache *c = &ctxt->decode;
711 int index_reg = 0, base_reg = 0, scale;
715 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
716 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
717 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
720 c->modrm = insn_fetch(u8, 1, c->eip);
721 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
722 c->modrm_reg |= (c->modrm & 0x38) >> 3;
723 c->modrm_rm |= (c->modrm & 0x07);
727 if (c->modrm_mod == 3) {
728 c->modrm_ptr = decode_register(c->modrm_rm,
729 c->regs, c->d & ByteOp);
730 c->modrm_val = *(unsigned long *)c->modrm_ptr;
734 if (c->ad_bytes == 2) {
735 unsigned bx = c->regs[VCPU_REGS_RBX];
736 unsigned bp = c->regs[VCPU_REGS_RBP];
737 unsigned si = c->regs[VCPU_REGS_RSI];
738 unsigned di = c->regs[VCPU_REGS_RDI];
740 /* 16-bit ModR/M decode. */
741 switch (c->modrm_mod) {
743 if (c->modrm_rm == 6)
744 c->modrm_ea += insn_fetch(u16, 2, c->eip);
747 c->modrm_ea += insn_fetch(s8, 1, c->eip);
750 c->modrm_ea += insn_fetch(u16, 2, c->eip);
753 switch (c->modrm_rm) {
755 c->modrm_ea += bx + si;
758 c->modrm_ea += bx + di;
761 c->modrm_ea += bp + si;
764 c->modrm_ea += bp + di;
773 if (c->modrm_mod != 0)
780 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
781 (c->modrm_rm == 6 && c->modrm_mod != 0))
782 if (!c->has_seg_override)
783 set_seg_override(c, VCPU_SREG_SS);
784 c->modrm_ea = (u16)c->modrm_ea;
786 /* 32/64-bit ModR/M decode. */
787 if ((c->modrm_rm & 7) == 4) {
788 sib = insn_fetch(u8, 1, c->eip);
789 index_reg |= (sib >> 3) & 7;
793 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
794 c->modrm_ea += insn_fetch(s32, 4, c->eip);
796 c->modrm_ea += c->regs[base_reg];
798 c->modrm_ea += c->regs[index_reg] << scale;
799 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
800 if (ctxt->mode == X86EMUL_MODE_PROT64)
803 c->modrm_ea += c->regs[c->modrm_rm];
804 switch (c->modrm_mod) {
806 if (c->modrm_rm == 5)
807 c->modrm_ea += insn_fetch(s32, 4, c->eip);
810 c->modrm_ea += insn_fetch(s8, 1, c->eip);
813 c->modrm_ea += insn_fetch(s32, 4, c->eip);
821 static int decode_abs(struct x86_emulate_ctxt *ctxt,
822 struct x86_emulate_ops *ops)
824 struct decode_cache *c = &ctxt->decode;
827 switch (c->ad_bytes) {
829 c->modrm_ea = insn_fetch(u16, 2, c->eip);
832 c->modrm_ea = insn_fetch(u32, 4, c->eip);
835 c->modrm_ea = insn_fetch(u64, 8, c->eip);
843 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
845 struct decode_cache *c = &ctxt->decode;
847 int mode = ctxt->mode;
848 int def_op_bytes, def_ad_bytes, group;
850 /* Shadow copy of register state. Committed on successful emulation. */
852 memset(c, 0, sizeof(struct decode_cache));
853 c->eip = kvm_rip_read(ctxt->vcpu);
854 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
855 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
858 case X86EMUL_MODE_REAL:
859 case X86EMUL_MODE_PROT16:
860 def_op_bytes = def_ad_bytes = 2;
862 case X86EMUL_MODE_PROT32:
863 def_op_bytes = def_ad_bytes = 4;
866 case X86EMUL_MODE_PROT64:
875 c->op_bytes = def_op_bytes;
876 c->ad_bytes = def_ad_bytes;
878 /* Legacy prefixes. */
880 switch (c->b = insn_fetch(u8, 1, c->eip)) {
881 case 0x66: /* operand-size override */
882 /* switch between 2/4 bytes */
883 c->op_bytes = def_op_bytes ^ 6;
885 case 0x67: /* address-size override */
886 if (mode == X86EMUL_MODE_PROT64)
887 /* switch between 4/8 bytes */
888 c->ad_bytes = def_ad_bytes ^ 12;
890 /* switch between 2/4 bytes */
891 c->ad_bytes = def_ad_bytes ^ 6;
893 case 0x26: /* ES override */
894 case 0x2e: /* CS override */
895 case 0x36: /* SS override */
896 case 0x3e: /* DS override */
897 set_seg_override(c, (c->b >> 3) & 3);
899 case 0x64: /* FS override */
900 case 0x65: /* GS override */
901 set_seg_override(c, c->b & 7);
903 case 0x40 ... 0x4f: /* REX */
904 if (mode != X86EMUL_MODE_PROT64)
906 c->rex_prefix = c->b;
908 case 0xf0: /* LOCK */
911 case 0xf2: /* REPNE/REPNZ */
912 c->rep_prefix = REPNE_PREFIX;
914 case 0xf3: /* REP/REPE/REPZ */
915 c->rep_prefix = REPE_PREFIX;
921 /* Any legacy prefix after a REX prefix nullifies its effect. */
930 if (c->rex_prefix & 8)
931 c->op_bytes = 8; /* REX.W */
933 /* Opcode byte(s). */
934 c->d = opcode_table[c->b];
936 /* Two-byte opcode? */
939 c->b = insn_fetch(u8, 1, c->eip);
940 c->d = twobyte_table[c->b];
945 group = c->d & GroupMask;
946 c->modrm = insn_fetch(u8, 1, c->eip);
949 group = (group << 3) + ((c->modrm >> 3) & 7);
950 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
951 c->d = group2_table[group];
953 c->d = group_table[group];
958 DPRINTF("Cannot emulate %02x\n", c->b);
962 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
965 /* ModRM and SIB bytes. */
967 rc = decode_modrm(ctxt, ops);
968 else if (c->d & MemAbs)
969 rc = decode_abs(ctxt, ops);
973 if (!c->has_seg_override)
974 set_seg_override(c, VCPU_SREG_DS);
976 if (!(!c->twobyte && c->b == 0x8d))
977 c->modrm_ea += seg_override_base(ctxt, c);
979 if (c->ad_bytes != 8)
980 c->modrm_ea = (u32)c->modrm_ea;
982 * Decode and fetch the source operand: register, memory
985 switch (c->d & SrcMask) {
989 decode_register_operand(&c->src, c, 0);
998 c->src.bytes = (c->d & ByteOp) ? 1 :
1000 /* Don't fetch the address for invlpg: it could be unmapped. */
1001 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1005 * For instructions with a ModR/M byte, switch to register
1006 * access if Mod = 3.
1008 if ((c->d & ModRM) && c->modrm_mod == 3) {
1009 c->src.type = OP_REG;
1010 c->src.val = c->modrm_val;
1011 c->src.ptr = c->modrm_ptr;
1014 c->src.type = OP_MEM;
1017 c->src.type = OP_IMM;
1018 c->src.ptr = (unsigned long *)c->eip;
1019 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1020 if (c->src.bytes == 8)
1022 /* NB. Immediates are sign-extended as necessary. */
1023 switch (c->src.bytes) {
1025 c->src.val = insn_fetch(s8, 1, c->eip);
1028 c->src.val = insn_fetch(s16, 2, c->eip);
1031 c->src.val = insn_fetch(s32, 4, c->eip);
1036 c->src.type = OP_IMM;
1037 c->src.ptr = (unsigned long *)c->eip;
1039 c->src.val = insn_fetch(s8, 1, c->eip);
1043 /* Decode and fetch the destination operand: register or memory. */
1044 switch (c->d & DstMask) {
1046 /* Special instructions do their own operand decoding. */
1049 decode_register_operand(&c->dst, c,
1050 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1053 if ((c->d & ModRM) && c->modrm_mod == 3) {
1054 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1055 c->dst.type = OP_REG;
1056 c->dst.val = c->dst.orig_val = c->modrm_val;
1057 c->dst.ptr = c->modrm_ptr;
1060 c->dst.type = OP_MEM;
1064 if (c->rip_relative)
1065 c->modrm_ea += c->eip;
1068 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1071 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1073 struct decode_cache *c = &ctxt->decode;
1075 c->dst.type = OP_MEM;
1076 c->dst.bytes = c->op_bytes;
1077 c->dst.val = c->src.val;
1078 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1079 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1080 c->regs[VCPU_REGS_RSP]);
1083 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1084 struct x86_emulate_ops *ops)
1086 struct decode_cache *c = &ctxt->decode;
1089 rc = ops->read_std(register_address(c, ss_base(ctxt),
1090 c->regs[VCPU_REGS_RSP]),
1091 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1095 register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
1100 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1102 struct decode_cache *c = &ctxt->decode;
1103 switch (c->modrm_reg) {
1105 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1108 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1111 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1114 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1116 case 4: /* sal/shl */
1117 case 6: /* sal/shl */
1118 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1121 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1124 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1129 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1130 struct x86_emulate_ops *ops)
1132 struct decode_cache *c = &ctxt->decode;
1135 switch (c->modrm_reg) {
1136 case 0 ... 1: /* test */
1137 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1140 c->dst.val = ~c->dst.val;
1143 emulate_1op("neg", c->dst, ctxt->eflags);
1146 DPRINTF("Cannot emulate %02x\n", c->b);
1147 rc = X86EMUL_UNHANDLEABLE;
1153 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1154 struct x86_emulate_ops *ops)
1156 struct decode_cache *c = &ctxt->decode;
1158 switch (c->modrm_reg) {
1160 emulate_1op("inc", c->dst, ctxt->eflags);
1163 emulate_1op("dec", c->dst, ctxt->eflags);
1165 case 4: /* jmp abs */
1166 c->eip = c->src.val;
1175 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1176 struct x86_emulate_ops *ops,
1177 unsigned long memop)
1179 struct decode_cache *c = &ctxt->decode;
1183 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1187 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1188 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1190 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1191 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1192 ctxt->eflags &= ~EFLG_ZF;
1195 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1196 (u32) c->regs[VCPU_REGS_RBX];
1198 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1201 ctxt->eflags |= EFLG_ZF;
1206 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1207 struct x86_emulate_ops *ops)
1210 struct decode_cache *c = &ctxt->decode;
1212 switch (c->dst.type) {
1214 /* The 4-byte case *is* correct:
1215 * in 64-bit mode we zero-extend.
1217 switch (c->dst.bytes) {
1219 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1222 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1225 *c->dst.ptr = (u32)c->dst.val;
1226 break; /* 64b: zero-ext */
1228 *c->dst.ptr = c->dst.val;
1234 rc = ops->cmpxchg_emulated(
1235 (unsigned long)c->dst.ptr,
1241 rc = ops->write_emulated(
1242 (unsigned long)c->dst.ptr,
1259 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1261 unsigned long memop = 0;
1263 unsigned long saved_eip = 0;
1264 struct decode_cache *c = &ctxt->decode;
1269 /* Shadow copy of register state. Committed on successful emulation.
1270 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1274 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1277 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1278 memop = c->modrm_ea;
1280 if (c->rep_prefix && (c->d & String)) {
1281 /* All REP prefixes have the same first termination condition */
1282 if (c->regs[VCPU_REGS_RCX] == 0) {
1283 kvm_rip_write(ctxt->vcpu, c->eip);
1286 /* The second termination condition only applies for REPE
1287 * and REPNE. Test if the repeat string operation prefix is
1288 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1289 * corresponding termination condition according to:
1290 * - if REPE/REPZ and ZF = 0 then done
1291 * - if REPNE/REPNZ and ZF = 1 then done
1293 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1294 (c->b == 0xae) || (c->b == 0xaf)) {
1295 if ((c->rep_prefix == REPE_PREFIX) &&
1296 ((ctxt->eflags & EFLG_ZF) == 0)) {
1297 kvm_rip_write(ctxt->vcpu, c->eip);
1300 if ((c->rep_prefix == REPNE_PREFIX) &&
1301 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1302 kvm_rip_write(ctxt->vcpu, c->eip);
1306 c->regs[VCPU_REGS_RCX]--;
1307 c->eip = kvm_rip_read(ctxt->vcpu);
1310 if (c->src.type == OP_MEM) {
1311 c->src.ptr = (unsigned long *)memop;
1313 rc = ops->read_emulated((unsigned long)c->src.ptr,
1319 c->src.orig_val = c->src.val;
1322 if ((c->d & DstMask) == ImplicitOps)
1326 if (c->dst.type == OP_MEM) {
1327 c->dst.ptr = (unsigned long *)memop;
1328 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1331 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1333 c->dst.ptr = (void *)c->dst.ptr +
1334 (c->src.val & mask) / 8;
1336 if (!(c->d & Mov) &&
1337 /* optimisation - avoid slow emulated read */
1338 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1340 c->dst.bytes, ctxt->vcpu)) != 0))
1343 c->dst.orig_val = c->dst.val;
1353 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1357 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1361 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1365 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1369 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1371 case 0x24: /* and al imm8 */
1372 c->dst.type = OP_REG;
1373 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1374 c->dst.val = *(u8 *)c->dst.ptr;
1376 c->dst.orig_val = c->dst.val;
1378 case 0x25: /* and ax imm16, or eax imm32 */
1379 c->dst.type = OP_REG;
1380 c->dst.bytes = c->op_bytes;
1381 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1382 if (c->op_bytes == 2)
1383 c->dst.val = *(u16 *)c->dst.ptr;
1385 c->dst.val = *(u32 *)c->dst.ptr;
1386 c->dst.orig_val = c->dst.val;
1390 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1394 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1398 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1400 case 0x40 ... 0x47: /* inc r16/r32 */
1401 emulate_1op("inc", c->dst, ctxt->eflags);
1403 case 0x48 ... 0x4f: /* dec r16/r32 */
1404 emulate_1op("dec", c->dst, ctxt->eflags);
1406 case 0x50 ... 0x57: /* push reg */
1407 c->dst.type = OP_MEM;
1408 c->dst.bytes = c->op_bytes;
1409 c->dst.val = c->src.val;
1410 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1412 c->dst.ptr = (void *) register_address(
1413 c, ss_base(ctxt), c->regs[VCPU_REGS_RSP]);
1415 case 0x58 ... 0x5f: /* pop reg */
1417 if ((rc = ops->read_std(register_address(c, ss_base(ctxt),
1418 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1419 c->op_bytes, ctxt->vcpu)) != 0)
1422 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1424 c->dst.type = OP_NONE; /* Disable writeback. */
1426 case 0x63: /* movsxd */
1427 if (ctxt->mode != X86EMUL_MODE_PROT64)
1428 goto cannot_emulate;
1429 c->dst.val = (s32) c->src.val;
1431 case 0x68: /* push imm */
1432 case 0x6a: /* push imm8 */
1435 case 0x6c: /* insb */
1436 case 0x6d: /* insw/insd */
1437 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1439 (c->d & ByteOp) ? 1 : c->op_bytes,
1441 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1442 (ctxt->eflags & EFLG_DF),
1443 register_address(c, es_base(ctxt),
1444 c->regs[VCPU_REGS_RDI]),
1446 c->regs[VCPU_REGS_RDX]) == 0) {
1451 case 0x6e: /* outsb */
1452 case 0x6f: /* outsw/outsd */
1453 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1455 (c->d & ByteOp) ? 1 : c->op_bytes,
1457 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1458 (ctxt->eflags & EFLG_DF),
1460 seg_override_base(ctxt, c),
1461 c->regs[VCPU_REGS_RSI]),
1463 c->regs[VCPU_REGS_RDX]) == 0) {
1468 case 0x70 ... 0x7f: /* jcc (short) */ {
1469 int rel = insn_fetch(s8, 1, c->eip);
1471 if (test_cc(c->b, ctxt->eflags))
1475 case 0x80 ... 0x83: /* Grp1 */
1476 switch (c->modrm_reg) {
1496 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1498 case 0x86 ... 0x87: /* xchg */
1500 /* Write back the register source. */
1501 switch (c->dst.bytes) {
1503 *(u8 *) c->src.ptr = (u8) c->dst.val;
1506 *(u16 *) c->src.ptr = (u16) c->dst.val;
1509 *c->src.ptr = (u32) c->dst.val;
1510 break; /* 64b reg: zero-extend */
1512 *c->src.ptr = c->dst.val;
1516 * Write back the memory destination with implicit LOCK
1519 c->dst.val = c->src.val;
1522 case 0x88 ... 0x8b: /* mov */
1524 case 0x8c: { /* mov r/m, sreg */
1525 struct kvm_segment segreg;
1527 if (c->modrm_reg <= 5)
1528 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1530 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1532 goto cannot_emulate;
1534 c->dst.val = segreg.selector;
1537 case 0x8d: /* lea r16/r32, m */
1538 c->dst.val = c->modrm_ea;
1540 case 0x8e: { /* mov seg, r/m16 */
1546 if (c->modrm_reg <= 5) {
1547 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1548 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1549 type_bits, c->modrm_reg);
1551 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1553 goto cannot_emulate;
1557 goto cannot_emulate;
1559 c->dst.type = OP_NONE; /* Disable writeback. */
1562 case 0x8f: /* pop (sole member of Grp1a) */
1563 rc = emulate_grp1a(ctxt, ops);
1567 case 0x90: /* nop / xchg r8,rax */
1568 if (!(c->rex_prefix & 1)) { /* nop */
1569 c->dst.type = OP_NONE;
1572 case 0x91 ... 0x97: /* xchg reg,rax */
1573 c->src.type = c->dst.type = OP_REG;
1574 c->src.bytes = c->dst.bytes = c->op_bytes;
1575 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1576 c->src.val = *(c->src.ptr);
1578 case 0x9c: /* pushf */
1579 c->src.val = (unsigned long) ctxt->eflags;
1582 case 0x9d: /* popf */
1583 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1584 goto pop_instruction;
1585 case 0xa0 ... 0xa1: /* mov */
1586 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1587 c->dst.val = c->src.val;
1589 case 0xa2 ... 0xa3: /* mov */
1590 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1592 case 0xa4 ... 0xa5: /* movs */
1593 c->dst.type = OP_MEM;
1594 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1595 c->dst.ptr = (unsigned long *)register_address(c,
1597 c->regs[VCPU_REGS_RDI]);
1598 if ((rc = ops->read_emulated(register_address(c,
1599 seg_override_base(ctxt, c),
1600 c->regs[VCPU_REGS_RSI]),
1602 c->dst.bytes, ctxt->vcpu)) != 0)
1604 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1605 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1607 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1608 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1611 case 0xa6 ... 0xa7: /* cmps */
1612 c->src.type = OP_NONE; /* Disable writeback. */
1613 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1614 c->src.ptr = (unsigned long *)register_address(c,
1615 seg_override_base(ctxt, c),
1616 c->regs[VCPU_REGS_RSI]);
1617 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1623 c->dst.type = OP_NONE; /* Disable writeback. */
1624 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1625 c->dst.ptr = (unsigned long *)register_address(c,
1627 c->regs[VCPU_REGS_RDI]);
1628 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1634 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1636 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1638 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1639 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1641 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1642 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1646 case 0xaa ... 0xab: /* stos */
1647 c->dst.type = OP_MEM;
1648 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1649 c->dst.ptr = (unsigned long *)register_address(c,
1651 c->regs[VCPU_REGS_RDI]);
1652 c->dst.val = c->regs[VCPU_REGS_RAX];
1653 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1654 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1657 case 0xac ... 0xad: /* lods */
1658 c->dst.type = OP_REG;
1659 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1660 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1661 if ((rc = ops->read_emulated(register_address(c,
1662 seg_override_base(ctxt, c),
1663 c->regs[VCPU_REGS_RSI]),
1668 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1669 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1672 case 0xae ... 0xaf: /* scas */
1673 DPRINTF("Urk! I don't handle SCAS.\n");
1674 goto cannot_emulate;
1675 case 0xb0 ... 0xbf: /* mov r, imm */
1680 case 0xc3: /* ret */
1681 c->dst.ptr = &c->eip;
1682 goto pop_instruction;
1683 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1685 c->dst.val = c->src.val;
1687 case 0xd0 ... 0xd1: /* Grp2 */
1691 case 0xd2 ... 0xd3: /* Grp2 */
1692 c->src.val = c->regs[VCPU_REGS_RCX];
1695 case 0xe4: /* inb */
1697 port = insn_fetch(u8, 1, c->eip);
1700 case 0xe6: /* outb */
1701 case 0xe7: /* out */
1702 port = insn_fetch(u8, 1, c->eip);
1705 case 0xe8: /* call (near) */ {
1707 switch (c->op_bytes) {
1709 rel = insn_fetch(s16, 2, c->eip);
1712 rel = insn_fetch(s32, 4, c->eip);
1715 DPRINTF("Call: Invalid op_bytes\n");
1716 goto cannot_emulate;
1718 c->src.val = (unsigned long) c->eip;
1720 c->op_bytes = c->ad_bytes;
1724 case 0xe9: /* jmp rel */
1726 case 0xea: /* jmp far */ {
1730 switch (c->op_bytes) {
1732 eip = insn_fetch(u16, 2, c->eip);
1735 eip = insn_fetch(u32, 4, c->eip);
1738 DPRINTF("jmp far: Invalid op_bytes\n");
1739 goto cannot_emulate;
1741 sel = insn_fetch(u16, 2, c->eip);
1742 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1743 DPRINTF("jmp far: Failed to load CS descriptor\n");
1744 goto cannot_emulate;
1751 jmp: /* jmp rel short */
1752 jmp_rel(c, c->src.val);
1753 c->dst.type = OP_NONE; /* Disable writeback. */
1755 case 0xec: /* in al,dx */
1756 case 0xed: /* in (e/r)ax,dx */
1757 port = c->regs[VCPU_REGS_RDX];
1760 case 0xee: /* out al,dx */
1761 case 0xef: /* out (e/r)ax,dx */
1762 port = c->regs[VCPU_REGS_RDX];
1764 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1765 (c->d & ByteOp) ? 1 : c->op_bytes,
1768 goto cannot_emulate;
1771 case 0xf4: /* hlt */
1772 ctxt->vcpu->arch.halt_request = 1;
1774 case 0xf5: /* cmc */
1775 /* complement carry flag from eflags reg */
1776 ctxt->eflags ^= EFLG_CF;
1777 c->dst.type = OP_NONE; /* Disable writeback. */
1779 case 0xf6 ... 0xf7: /* Grp3 */
1780 rc = emulate_grp3(ctxt, ops);
1784 case 0xf8: /* clc */
1785 ctxt->eflags &= ~EFLG_CF;
1786 c->dst.type = OP_NONE; /* Disable writeback. */
1788 case 0xfa: /* cli */
1789 ctxt->eflags &= ~X86_EFLAGS_IF;
1790 c->dst.type = OP_NONE; /* Disable writeback. */
1792 case 0xfb: /* sti */
1793 ctxt->eflags |= X86_EFLAGS_IF;
1794 c->dst.type = OP_NONE; /* Disable writeback. */
1796 case 0xfc: /* cld */
1797 ctxt->eflags &= ~EFLG_DF;
1798 c->dst.type = OP_NONE; /* Disable writeback. */
1800 case 0xfd: /* std */
1801 ctxt->eflags |= EFLG_DF;
1802 c->dst.type = OP_NONE; /* Disable writeback. */
1804 case 0xfe ... 0xff: /* Grp4/Grp5 */
1805 rc = emulate_grp45(ctxt, ops);
1812 rc = writeback(ctxt, ops);
1816 /* Commit shadow register state. */
1817 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1818 kvm_rip_write(ctxt->vcpu, c->eip);
1821 if (rc == X86EMUL_UNHANDLEABLE) {
1829 case 0x01: /* lgdt, lidt, lmsw */
1830 switch (c->modrm_reg) {
1832 unsigned long address;
1834 case 0: /* vmcall */
1835 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1836 goto cannot_emulate;
1838 rc = kvm_fix_hypercall(ctxt->vcpu);
1842 /* Let the processor re-execute the fixed hypercall */
1843 c->eip = kvm_rip_read(ctxt->vcpu);
1844 /* Disable writeback. */
1845 c->dst.type = OP_NONE;
1848 rc = read_descriptor(ctxt, ops, c->src.ptr,
1849 &size, &address, c->op_bytes);
1852 realmode_lgdt(ctxt->vcpu, size, address);
1853 /* Disable writeback. */
1854 c->dst.type = OP_NONE;
1856 case 3: /* lidt/vmmcall */
1857 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1858 rc = kvm_fix_hypercall(ctxt->vcpu);
1861 kvm_emulate_hypercall(ctxt->vcpu);
1863 rc = read_descriptor(ctxt, ops, c->src.ptr,
1868 realmode_lidt(ctxt->vcpu, size, address);
1870 /* Disable writeback. */
1871 c->dst.type = OP_NONE;
1875 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1878 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1880 c->dst.type = OP_NONE;
1883 emulate_invlpg(ctxt->vcpu, memop);
1884 /* Disable writeback. */
1885 c->dst.type = OP_NONE;
1888 goto cannot_emulate;
1892 emulate_clts(ctxt->vcpu);
1893 c->dst.type = OP_NONE;
1895 case 0x08: /* invd */
1896 case 0x09: /* wbinvd */
1897 case 0x0d: /* GrpP (prefetch) */
1898 case 0x18: /* Grp16 (prefetch/nop) */
1899 c->dst.type = OP_NONE;
1901 case 0x20: /* mov cr, reg */
1902 if (c->modrm_mod != 3)
1903 goto cannot_emulate;
1904 c->regs[c->modrm_rm] =
1905 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1906 c->dst.type = OP_NONE; /* no writeback */
1908 case 0x21: /* mov from dr to reg */
1909 if (c->modrm_mod != 3)
1910 goto cannot_emulate;
1911 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1913 goto cannot_emulate;
1914 c->dst.type = OP_NONE; /* no writeback */
1916 case 0x22: /* mov reg, cr */
1917 if (c->modrm_mod != 3)
1918 goto cannot_emulate;
1919 realmode_set_cr(ctxt->vcpu,
1920 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1921 c->dst.type = OP_NONE;
1923 case 0x23: /* mov from reg to dr */
1924 if (c->modrm_mod != 3)
1925 goto cannot_emulate;
1926 rc = emulator_set_dr(ctxt, c->modrm_reg,
1927 c->regs[c->modrm_rm]);
1929 goto cannot_emulate;
1930 c->dst.type = OP_NONE; /* no writeback */
1934 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1935 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1936 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1938 kvm_inject_gp(ctxt->vcpu, 0);
1939 c->eip = kvm_rip_read(ctxt->vcpu);
1941 rc = X86EMUL_CONTINUE;
1942 c->dst.type = OP_NONE;
1946 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1948 kvm_inject_gp(ctxt->vcpu, 0);
1949 c->eip = kvm_rip_read(ctxt->vcpu);
1951 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1952 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1954 rc = X86EMUL_CONTINUE;
1955 c->dst.type = OP_NONE;
1957 case 0x40 ... 0x4f: /* cmov */
1958 c->dst.val = c->dst.orig_val = c->src.val;
1959 if (!test_cc(c->b, ctxt->eflags))
1960 c->dst.type = OP_NONE; /* no writeback */
1962 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1965 switch (c->op_bytes) {
1967 rel = insn_fetch(s16, 2, c->eip);
1970 rel = insn_fetch(s32, 4, c->eip);
1973 rel = insn_fetch(s64, 8, c->eip);
1976 DPRINTF("jnz: Invalid op_bytes\n");
1977 goto cannot_emulate;
1979 if (test_cc(c->b, ctxt->eflags))
1981 c->dst.type = OP_NONE;
1986 c->dst.type = OP_NONE;
1987 /* only subword offset */
1988 c->src.val &= (c->dst.bytes << 3) - 1;
1989 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1993 /* only subword offset */
1994 c->src.val &= (c->dst.bytes << 3) - 1;
1995 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1997 case 0xae: /* clflush */
1999 case 0xb0 ... 0xb1: /* cmpxchg */
2001 * Save real source value, then compare EAX against
2004 c->src.orig_val = c->src.val;
2005 c->src.val = c->regs[VCPU_REGS_RAX];
2006 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2007 if (ctxt->eflags & EFLG_ZF) {
2008 /* Success: write back to memory. */
2009 c->dst.val = c->src.orig_val;
2011 /* Failure: write the value we saw to EAX. */
2012 c->dst.type = OP_REG;
2013 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2018 /* only subword offset */
2019 c->src.val &= (c->dst.bytes << 3) - 1;
2020 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2022 case 0xb6 ... 0xb7: /* movzx */
2023 c->dst.bytes = c->op_bytes;
2024 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2027 case 0xba: /* Grp8 */
2028 switch (c->modrm_reg & 3) {
2041 /* only subword offset */
2042 c->src.val &= (c->dst.bytes << 3) - 1;
2043 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2045 case 0xbe ... 0xbf: /* movsx */
2046 c->dst.bytes = c->op_bytes;
2047 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2050 case 0xc3: /* movnti */
2051 c->dst.bytes = c->op_bytes;
2052 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2055 case 0xc7: /* Grp9 (cmpxchg8b) */
2056 rc = emulate_grp9(ctxt, ops, memop);
2059 c->dst.type = OP_NONE;
2065 DPRINTF("Cannot emulate %02x\n", c->b);