1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcOne (7<<4) /* Implied '1' */
62 #define SrcMask (7<<4)
63 /* Generic ModRM decode. */
65 /* Destination is only written; never read. */
68 #define MemAbs (1<<10) /* Memory operand is absolute displacement */
69 #define String (1<<12) /* String instruction (rep capable) */
70 #define Stack (1<<13) /* Stack instruction (push/pop) */
71 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
72 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
73 #define GroupMask 0xff /* Group number stored in bits 0:7 */
74 /* Source 2 operand type */
75 #define Src2None (0<<29)
76 #define Src2CL (1<<29)
77 #define Src2ImmByte (2<<29)
78 #define Src2One (3<<29)
79 #define Src2Mask (7<<29)
82 Group1_80, Group1_81, Group1_82, Group1_83,
83 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
86 static u32 opcode_table[256] = {
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
101 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
104 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
105 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
106 DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
108 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
109 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
112 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
113 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
116 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
117 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
118 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
121 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
123 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
125 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
126 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
128 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
129 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
131 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
134 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
135 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
136 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
138 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
139 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
141 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
142 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
144 Group | Group1_80, Group | Group1_81,
145 Group | Group1_82, Group | Group1_83,
146 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
147 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
149 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
150 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
151 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
152 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
154 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
156 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
158 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
159 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
160 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
161 ByteOp | ImplicitOps | String, ImplicitOps | String,
163 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
164 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
165 ByteOp | ImplicitOps | String, ImplicitOps | String,
167 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
168 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
169 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
170 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
172 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
173 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
174 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
175 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
177 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
178 0, ImplicitOps | Stack, 0, 0,
179 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
181 0, 0, 0, ImplicitOps | Stack, 0, 0, 0, 0,
183 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
184 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
187 0, 0, 0, 0, 0, 0, 0, 0,
190 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
191 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
193 ImplicitOps | Stack, SrcImm | ImplicitOps,
194 ImplicitOps, SrcImmByte | ImplicitOps,
195 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
196 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
199 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
201 ImplicitOps, 0, ImplicitOps, ImplicitOps,
202 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
205 static u32 twobyte_table[256] = {
207 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
208 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
210 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
212 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
213 0, 0, 0, 0, 0, 0, 0, 0,
215 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
217 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
218 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
219 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
220 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
222 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
223 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
224 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
225 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
227 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
229 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
234 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
235 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
236 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
238 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
240 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
241 DstMem | SrcReg | Src2ImmByte | ModRM,
242 DstMem | SrcReg | Src2CL | ModRM, 0, 0,
244 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
245 DstMem | SrcReg | Src2ImmByte | ModRM,
246 DstMem | SrcReg | Src2CL | ModRM,
249 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
250 DstMem | SrcReg | ModRM | BitOp,
251 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
252 DstReg | SrcMem16 | ModRM | Mov,
254 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
255 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
256 DstReg | SrcMem16 | ModRM | Mov,
258 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
259 0, 0, 0, 0, 0, 0, 0, 0,
261 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
263 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
265 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
268 static u32 group_table[] = {
270 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
271 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
272 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
273 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
275 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
276 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
277 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
278 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
280 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
281 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
282 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
283 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
285 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
286 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
287 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
288 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
290 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
292 ByteOp | SrcImm | DstMem | ModRM, 0,
293 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
296 DstMem | SrcImm | ModRM, 0,
297 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
300 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
303 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
304 SrcMem | ModRM | Stack, 0,
305 SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
307 0, 0, ModRM | SrcMem, ModRM | SrcMem,
308 SrcNone | ModRM | DstMem | Mov, 0,
309 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
312 static u32 group2_table[] = {
314 SrcNone | ModRM, 0, 0, SrcNone | ModRM,
315 SrcNone | ModRM | DstMem | Mov, 0,
316 SrcMem16 | ModRM | Mov, 0,
319 /* EFLAGS bit definitions. */
320 #define EFLG_OF (1<<11)
321 #define EFLG_DF (1<<10)
322 #define EFLG_SF (1<<7)
323 #define EFLG_ZF (1<<6)
324 #define EFLG_AF (1<<4)
325 #define EFLG_PF (1<<2)
326 #define EFLG_CF (1<<0)
329 * Instruction emulation:
330 * Most instructions are emulated directly via a fragment of inline assembly
331 * code. This allows us to save/restore EFLAGS and thus very easily pick up
332 * any modified flags.
335 #if defined(CONFIG_X86_64)
336 #define _LO32 "k" /* force 32-bit operand */
337 #define _STK "%%rsp" /* stack pointer */
338 #elif defined(__i386__)
339 #define _LO32 "" /* force 32-bit operand */
340 #define _STK "%%esp" /* stack pointer */
344 * These EFLAGS bits are restored from saved value during emulation, and
345 * any changes are written back to the saved value after emulation.
347 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
349 /* Before executing instruction: restore necessary bits in EFLAGS. */
350 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
351 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
352 "movl %"_sav",%"_LO32 _tmp"; " \
355 "movl %"_msk",%"_LO32 _tmp"; " \
356 "andl %"_LO32 _tmp",("_STK"); " \
358 "notl %"_LO32 _tmp"; " \
359 "andl %"_LO32 _tmp",("_STK"); " \
360 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
362 "orl %"_LO32 _tmp",("_STK"); " \
366 /* After executing instruction: write-back necessary bits in EFLAGS. */
367 #define _POST_EFLAGS(_sav, _msk, _tmp) \
368 /* _sav |= EFLAGS & _msk; */ \
371 "andl %"_msk",%"_LO32 _tmp"; " \
372 "orl %"_LO32 _tmp",%"_sav"; "
380 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
382 __asm__ __volatile__ ( \
383 _PRE_EFLAGS("0", "4", "2") \
384 _op _suffix " %"_x"3,%1; " \
385 _POST_EFLAGS("0", "4", "2") \
386 : "=m" (_eflags), "=m" ((_dst).val), \
388 : _y ((_src).val), "i" (EFLAGS_MASK)); \
392 /* Raw emulation: instruction has two explicit operands. */
393 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
395 unsigned long _tmp; \
397 switch ((_dst).bytes) { \
399 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
402 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
405 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
410 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
412 unsigned long _tmp; \
413 switch ((_dst).bytes) { \
415 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
418 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
419 _wx, _wy, _lx, _ly, _qx, _qy); \
424 /* Source operand is byte-sized and may be restricted to just %cl. */
425 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
426 __emulate_2op(_op, _src, _dst, _eflags, \
427 "b", "c", "b", "c", "b", "c", "b", "c")
429 /* Source operand is byte, word, long or quad sized. */
430 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
431 __emulate_2op(_op, _src, _dst, _eflags, \
432 "b", "q", "w", "r", _LO32, "r", "", "r")
434 /* Source operand is word, long or quad sized. */
435 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
436 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
437 "w", "r", _LO32, "r", "", "r")
439 /* Instruction has three operands and one operand is stored in ECX register */
440 #define __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, _suffix, _type) \
442 unsigned long _tmp; \
443 _type _clv = (_cl).val; \
444 _type _srcv = (_src).val; \
445 _type _dstv = (_dst).val; \
447 __asm__ __volatile__ ( \
448 _PRE_EFLAGS("0", "5", "2") \
449 _op _suffix " %4,%1 \n" \
450 _POST_EFLAGS("0", "5", "2") \
451 : "=m" (_eflags), "+r" (_dstv), "=&r" (_tmp) \
452 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
455 (_cl).val = (unsigned long) _clv; \
456 (_src).val = (unsigned long) _srcv; \
457 (_dst).val = (unsigned long) _dstv; \
460 #define emulate_2op_cl(_op, _cl, _src, _dst, _eflags) \
462 switch ((_dst).bytes) { \
464 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
465 "w", unsigned short); \
468 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
469 "l", unsigned int); \
472 ON64(__emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
473 "q", unsigned long)); \
478 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
480 unsigned long _tmp; \
482 __asm__ __volatile__ ( \
483 _PRE_EFLAGS("0", "3", "2") \
484 _op _suffix " %1; " \
485 _POST_EFLAGS("0", "3", "2") \
486 : "=m" (_eflags), "+m" ((_dst).val), \
488 : "i" (EFLAGS_MASK)); \
491 /* Instruction has only one explicit operand (no source operand). */
492 #define emulate_1op(_op, _dst, _eflags) \
494 switch ((_dst).bytes) { \
495 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
496 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
497 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
498 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
502 /* Fetch next part of the instruction being emulated. */
503 #define insn_fetch(_type, _size, _eip) \
504 ({ unsigned long _x; \
505 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
512 static inline unsigned long ad_mask(struct decode_cache *c)
514 return (1UL << (c->ad_bytes << 3)) - 1;
517 /* Access/update address held in a register, based on addressing mode. */
518 static inline unsigned long
519 address_mask(struct decode_cache *c, unsigned long reg)
521 if (c->ad_bytes == sizeof(unsigned long))
524 return reg & ad_mask(c);
527 static inline unsigned long
528 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
530 return base + address_mask(c, reg);
534 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
536 if (c->ad_bytes == sizeof(unsigned long))
539 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
542 static inline void jmp_rel(struct decode_cache *c, int rel)
544 register_address_increment(c, &c->eip, rel);
547 static void set_seg_override(struct decode_cache *c, int seg)
549 c->has_seg_override = true;
550 c->seg_override = seg;
553 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
555 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
558 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
561 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
562 struct decode_cache *c)
564 if (!c->has_seg_override)
567 return seg_base(ctxt, c->seg_override);
570 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
572 return seg_base(ctxt, VCPU_SREG_ES);
575 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
577 return seg_base(ctxt, VCPU_SREG_SS);
580 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
581 struct x86_emulate_ops *ops,
582 unsigned long linear, u8 *dest)
584 struct fetch_cache *fc = &ctxt->decode.fetch;
588 if (linear < fc->start || linear >= fc->end) {
589 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
590 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
594 fc->end = linear + size;
596 *dest = fc->data[linear - fc->start];
600 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
601 struct x86_emulate_ops *ops,
602 unsigned long eip, void *dest, unsigned size)
606 eip += ctxt->cs_base;
608 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
616 * Given the 'reg' portion of a ModRM byte, and a register block, return a
617 * pointer into the block that addresses the relevant register.
618 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
620 static void *decode_register(u8 modrm_reg, unsigned long *regs,
625 p = ®s[modrm_reg];
626 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
627 p = (unsigned char *)®s[modrm_reg & 3] + 1;
631 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
632 struct x86_emulate_ops *ops,
634 u16 *size, unsigned long *address, int op_bytes)
641 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
645 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
650 static int test_cc(unsigned int condition, unsigned int flags)
654 switch ((condition & 15) >> 1) {
656 rc |= (flags & EFLG_OF);
658 case 1: /* b/c/nae */
659 rc |= (flags & EFLG_CF);
662 rc |= (flags & EFLG_ZF);
665 rc |= (flags & (EFLG_CF|EFLG_ZF));
668 rc |= (flags & EFLG_SF);
671 rc |= (flags & EFLG_PF);
674 rc |= (flags & EFLG_ZF);
677 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
681 /* Odd condition identifiers (lsb == 1) have inverted sense. */
682 return (!!rc ^ (condition & 1));
685 static void decode_register_operand(struct operand *op,
686 struct decode_cache *c,
689 unsigned reg = c->modrm_reg;
690 int highbyte_regs = c->rex_prefix == 0;
693 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
695 if ((c->d & ByteOp) && !inhibit_bytereg) {
696 op->ptr = decode_register(reg, c->regs, highbyte_regs);
697 op->val = *(u8 *)op->ptr;
700 op->ptr = decode_register(reg, c->regs, 0);
701 op->bytes = c->op_bytes;
704 op->val = *(u16 *)op->ptr;
707 op->val = *(u32 *)op->ptr;
710 op->val = *(u64 *) op->ptr;
714 op->orig_val = op->val;
717 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
718 struct x86_emulate_ops *ops)
720 struct decode_cache *c = &ctxt->decode;
722 int index_reg = 0, base_reg = 0, scale;
726 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
727 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
728 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
731 c->modrm = insn_fetch(u8, 1, c->eip);
732 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
733 c->modrm_reg |= (c->modrm & 0x38) >> 3;
734 c->modrm_rm |= (c->modrm & 0x07);
738 if (c->modrm_mod == 3) {
739 c->modrm_ptr = decode_register(c->modrm_rm,
740 c->regs, c->d & ByteOp);
741 c->modrm_val = *(unsigned long *)c->modrm_ptr;
745 if (c->ad_bytes == 2) {
746 unsigned bx = c->regs[VCPU_REGS_RBX];
747 unsigned bp = c->regs[VCPU_REGS_RBP];
748 unsigned si = c->regs[VCPU_REGS_RSI];
749 unsigned di = c->regs[VCPU_REGS_RDI];
751 /* 16-bit ModR/M decode. */
752 switch (c->modrm_mod) {
754 if (c->modrm_rm == 6)
755 c->modrm_ea += insn_fetch(u16, 2, c->eip);
758 c->modrm_ea += insn_fetch(s8, 1, c->eip);
761 c->modrm_ea += insn_fetch(u16, 2, c->eip);
764 switch (c->modrm_rm) {
766 c->modrm_ea += bx + si;
769 c->modrm_ea += bx + di;
772 c->modrm_ea += bp + si;
775 c->modrm_ea += bp + di;
784 if (c->modrm_mod != 0)
791 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
792 (c->modrm_rm == 6 && c->modrm_mod != 0))
793 if (!c->has_seg_override)
794 set_seg_override(c, VCPU_SREG_SS);
795 c->modrm_ea = (u16)c->modrm_ea;
797 /* 32/64-bit ModR/M decode. */
798 if ((c->modrm_rm & 7) == 4) {
799 sib = insn_fetch(u8, 1, c->eip);
800 index_reg |= (sib >> 3) & 7;
804 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
805 c->modrm_ea += insn_fetch(s32, 4, c->eip);
807 c->modrm_ea += c->regs[base_reg];
809 c->modrm_ea += c->regs[index_reg] << scale;
810 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
811 if (ctxt->mode == X86EMUL_MODE_PROT64)
814 c->modrm_ea += c->regs[c->modrm_rm];
815 switch (c->modrm_mod) {
817 if (c->modrm_rm == 5)
818 c->modrm_ea += insn_fetch(s32, 4, c->eip);
821 c->modrm_ea += insn_fetch(s8, 1, c->eip);
824 c->modrm_ea += insn_fetch(s32, 4, c->eip);
832 static int decode_abs(struct x86_emulate_ctxt *ctxt,
833 struct x86_emulate_ops *ops)
835 struct decode_cache *c = &ctxt->decode;
838 switch (c->ad_bytes) {
840 c->modrm_ea = insn_fetch(u16, 2, c->eip);
843 c->modrm_ea = insn_fetch(u32, 4, c->eip);
846 c->modrm_ea = insn_fetch(u64, 8, c->eip);
854 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
856 struct decode_cache *c = &ctxt->decode;
858 int mode = ctxt->mode;
859 int def_op_bytes, def_ad_bytes, group;
861 /* Shadow copy of register state. Committed on successful emulation. */
863 memset(c, 0, sizeof(struct decode_cache));
864 c->eip = kvm_rip_read(ctxt->vcpu);
865 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
866 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
869 case X86EMUL_MODE_REAL:
870 case X86EMUL_MODE_PROT16:
871 def_op_bytes = def_ad_bytes = 2;
873 case X86EMUL_MODE_PROT32:
874 def_op_bytes = def_ad_bytes = 4;
877 case X86EMUL_MODE_PROT64:
886 c->op_bytes = def_op_bytes;
887 c->ad_bytes = def_ad_bytes;
889 /* Legacy prefixes. */
891 switch (c->b = insn_fetch(u8, 1, c->eip)) {
892 case 0x66: /* operand-size override */
893 /* switch between 2/4 bytes */
894 c->op_bytes = def_op_bytes ^ 6;
896 case 0x67: /* address-size override */
897 if (mode == X86EMUL_MODE_PROT64)
898 /* switch between 4/8 bytes */
899 c->ad_bytes = def_ad_bytes ^ 12;
901 /* switch between 2/4 bytes */
902 c->ad_bytes = def_ad_bytes ^ 6;
904 case 0x26: /* ES override */
905 case 0x2e: /* CS override */
906 case 0x36: /* SS override */
907 case 0x3e: /* DS override */
908 set_seg_override(c, (c->b >> 3) & 3);
910 case 0x64: /* FS override */
911 case 0x65: /* GS override */
912 set_seg_override(c, c->b & 7);
914 case 0x40 ... 0x4f: /* REX */
915 if (mode != X86EMUL_MODE_PROT64)
917 c->rex_prefix = c->b;
919 case 0xf0: /* LOCK */
922 case 0xf2: /* REPNE/REPNZ */
923 c->rep_prefix = REPNE_PREFIX;
925 case 0xf3: /* REP/REPE/REPZ */
926 c->rep_prefix = REPE_PREFIX;
932 /* Any legacy prefix after a REX prefix nullifies its effect. */
941 if (c->rex_prefix & 8)
942 c->op_bytes = 8; /* REX.W */
944 /* Opcode byte(s). */
945 c->d = opcode_table[c->b];
947 /* Two-byte opcode? */
950 c->b = insn_fetch(u8, 1, c->eip);
951 c->d = twobyte_table[c->b];
956 group = c->d & GroupMask;
957 c->modrm = insn_fetch(u8, 1, c->eip);
960 group = (group << 3) + ((c->modrm >> 3) & 7);
961 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
962 c->d = group2_table[group];
964 c->d = group_table[group];
969 DPRINTF("Cannot emulate %02x\n", c->b);
973 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
976 /* ModRM and SIB bytes. */
978 rc = decode_modrm(ctxt, ops);
979 else if (c->d & MemAbs)
980 rc = decode_abs(ctxt, ops);
984 if (!c->has_seg_override)
985 set_seg_override(c, VCPU_SREG_DS);
987 if (!(!c->twobyte && c->b == 0x8d))
988 c->modrm_ea += seg_override_base(ctxt, c);
990 if (c->ad_bytes != 8)
991 c->modrm_ea = (u32)c->modrm_ea;
993 * Decode and fetch the source operand: register, memory
996 switch (c->d & SrcMask) {
1000 decode_register_operand(&c->src, c, 0);
1009 c->src.bytes = (c->d & ByteOp) ? 1 :
1011 /* Don't fetch the address for invlpg: it could be unmapped. */
1012 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1016 * For instructions with a ModR/M byte, switch to register
1017 * access if Mod = 3.
1019 if ((c->d & ModRM) && c->modrm_mod == 3) {
1020 c->src.type = OP_REG;
1021 c->src.val = c->modrm_val;
1022 c->src.ptr = c->modrm_ptr;
1025 c->src.type = OP_MEM;
1028 c->src.type = OP_IMM;
1029 c->src.ptr = (unsigned long *)c->eip;
1030 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1031 if (c->src.bytes == 8)
1033 /* NB. Immediates are sign-extended as necessary. */
1034 switch (c->src.bytes) {
1036 c->src.val = insn_fetch(s8, 1, c->eip);
1039 c->src.val = insn_fetch(s16, 2, c->eip);
1042 c->src.val = insn_fetch(s32, 4, c->eip);
1047 c->src.type = OP_IMM;
1048 c->src.ptr = (unsigned long *)c->eip;
1050 c->src.val = insn_fetch(s8, 1, c->eip);
1059 * Decode and fetch the second source operand: register, memory
1062 switch (c->d & Src2Mask) {
1067 c->src2.val = c->regs[VCPU_REGS_RCX] & 0x8;
1070 c->src2.type = OP_IMM;
1071 c->src2.ptr = (unsigned long *)c->eip;
1073 c->src2.val = insn_fetch(u8, 1, c->eip);
1081 /* Decode and fetch the destination operand: register or memory. */
1082 switch (c->d & DstMask) {
1084 /* Special instructions do their own operand decoding. */
1087 decode_register_operand(&c->dst, c,
1088 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1091 if ((c->d & ModRM) && c->modrm_mod == 3) {
1092 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1093 c->dst.type = OP_REG;
1094 c->dst.val = c->dst.orig_val = c->modrm_val;
1095 c->dst.ptr = c->modrm_ptr;
1098 c->dst.type = OP_MEM;
1101 c->dst.type = OP_REG;
1102 c->dst.bytes = c->op_bytes;
1103 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1104 switch (c->op_bytes) {
1106 c->dst.val = *(u8 *)c->dst.ptr;
1109 c->dst.val = *(u16 *)c->dst.ptr;
1112 c->dst.val = *(u32 *)c->dst.ptr;
1115 c->dst.orig_val = c->dst.val;
1119 if (c->rip_relative)
1120 c->modrm_ea += c->eip;
1123 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1126 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1128 struct decode_cache *c = &ctxt->decode;
1130 c->dst.type = OP_MEM;
1131 c->dst.bytes = c->op_bytes;
1132 c->dst.val = c->src.val;
1133 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1134 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1135 c->regs[VCPU_REGS_RSP]);
1138 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1139 struct x86_emulate_ops *ops,
1140 void *dest, int len)
1142 struct decode_cache *c = &ctxt->decode;
1145 rc = ops->read_emulated(register_address(c, ss_base(ctxt),
1146 c->regs[VCPU_REGS_RSP]),
1147 dest, len, ctxt->vcpu);
1151 register_address_increment(c, &c->regs[VCPU_REGS_RSP], len);
1155 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1156 struct x86_emulate_ops *ops)
1158 struct decode_cache *c = &ctxt->decode;
1161 rc = emulate_pop(ctxt, ops, &c->dst.val, c->dst.bytes);
1167 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1169 struct decode_cache *c = &ctxt->decode;
1170 switch (c->modrm_reg) {
1172 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1175 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1178 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1181 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1183 case 4: /* sal/shl */
1184 case 6: /* sal/shl */
1185 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1188 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1191 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1196 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1197 struct x86_emulate_ops *ops)
1199 struct decode_cache *c = &ctxt->decode;
1202 switch (c->modrm_reg) {
1203 case 0 ... 1: /* test */
1204 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1207 c->dst.val = ~c->dst.val;
1210 emulate_1op("neg", c->dst, ctxt->eflags);
1213 DPRINTF("Cannot emulate %02x\n", c->b);
1214 rc = X86EMUL_UNHANDLEABLE;
1220 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1221 struct x86_emulate_ops *ops)
1223 struct decode_cache *c = &ctxt->decode;
1225 switch (c->modrm_reg) {
1227 emulate_1op("inc", c->dst, ctxt->eflags);
1230 emulate_1op("dec", c->dst, ctxt->eflags);
1232 case 2: /* call near abs */ {
1235 c->eip = c->src.val;
1236 c->src.val = old_eip;
1240 case 4: /* jmp abs */
1241 c->eip = c->src.val;
1250 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1251 struct x86_emulate_ops *ops,
1252 unsigned long memop)
1254 struct decode_cache *c = &ctxt->decode;
1258 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1262 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1263 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1265 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1266 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1267 ctxt->eflags &= ~EFLG_ZF;
1270 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1271 (u32) c->regs[VCPU_REGS_RBX];
1273 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1276 ctxt->eflags |= EFLG_ZF;
1281 static int emulate_ret_far(struct x86_emulate_ctxt *ctxt,
1282 struct x86_emulate_ops *ops)
1284 struct decode_cache *c = &ctxt->decode;
1288 rc = emulate_pop(ctxt, ops, &c->eip, c->op_bytes);
1291 if (c->op_bytes == 4)
1292 c->eip = (u32)c->eip;
1293 rc = emulate_pop(ctxt, ops, &cs, c->op_bytes);
1296 rc = kvm_load_segment_descriptor(ctxt->vcpu, (u16)cs, 1, VCPU_SREG_CS);
1300 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1301 struct x86_emulate_ops *ops)
1304 struct decode_cache *c = &ctxt->decode;
1306 switch (c->dst.type) {
1308 /* The 4-byte case *is* correct:
1309 * in 64-bit mode we zero-extend.
1311 switch (c->dst.bytes) {
1313 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1316 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1319 *c->dst.ptr = (u32)c->dst.val;
1320 break; /* 64b: zero-ext */
1322 *c->dst.ptr = c->dst.val;
1328 rc = ops->cmpxchg_emulated(
1329 (unsigned long)c->dst.ptr,
1335 rc = ops->write_emulated(
1336 (unsigned long)c->dst.ptr,
1353 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1355 unsigned long memop = 0;
1357 unsigned long saved_eip = 0;
1358 struct decode_cache *c = &ctxt->decode;
1363 /* Shadow copy of register state. Committed on successful emulation.
1364 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1368 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1371 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1372 memop = c->modrm_ea;
1374 if (c->rep_prefix && (c->d & String)) {
1375 /* All REP prefixes have the same first termination condition */
1376 if (c->regs[VCPU_REGS_RCX] == 0) {
1377 kvm_rip_write(ctxt->vcpu, c->eip);
1380 /* The second termination condition only applies for REPE
1381 * and REPNE. Test if the repeat string operation prefix is
1382 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1383 * corresponding termination condition according to:
1384 * - if REPE/REPZ and ZF = 0 then done
1385 * - if REPNE/REPNZ and ZF = 1 then done
1387 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1388 (c->b == 0xae) || (c->b == 0xaf)) {
1389 if ((c->rep_prefix == REPE_PREFIX) &&
1390 ((ctxt->eflags & EFLG_ZF) == 0)) {
1391 kvm_rip_write(ctxt->vcpu, c->eip);
1394 if ((c->rep_prefix == REPNE_PREFIX) &&
1395 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1396 kvm_rip_write(ctxt->vcpu, c->eip);
1400 c->regs[VCPU_REGS_RCX]--;
1401 c->eip = kvm_rip_read(ctxt->vcpu);
1404 if (c->src.type == OP_MEM) {
1405 c->src.ptr = (unsigned long *)memop;
1407 rc = ops->read_emulated((unsigned long)c->src.ptr,
1413 c->src.orig_val = c->src.val;
1416 if ((c->d & DstMask) == ImplicitOps)
1420 if (c->dst.type == OP_MEM) {
1421 c->dst.ptr = (unsigned long *)memop;
1422 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1425 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1427 c->dst.ptr = (void *)c->dst.ptr +
1428 (c->src.val & mask) / 8;
1430 if (!(c->d & Mov) &&
1431 /* optimisation - avoid slow emulated read */
1432 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1434 c->dst.bytes, ctxt->vcpu)) != 0))
1437 c->dst.orig_val = c->dst.val;
1447 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1451 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1455 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1459 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1463 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1467 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1471 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1475 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1477 case 0x40 ... 0x47: /* inc r16/r32 */
1478 emulate_1op("inc", c->dst, ctxt->eflags);
1480 case 0x48 ... 0x4f: /* dec r16/r32 */
1481 emulate_1op("dec", c->dst, ctxt->eflags);
1483 case 0x50 ... 0x57: /* push reg */
1486 case 0x58 ... 0x5f: /* pop reg */
1488 rc = emulate_pop(ctxt, ops, &c->dst.val, c->op_bytes);
1492 case 0x63: /* movsxd */
1493 if (ctxt->mode != X86EMUL_MODE_PROT64)
1494 goto cannot_emulate;
1495 c->dst.val = (s32) c->src.val;
1497 case 0x68: /* push imm */
1498 case 0x6a: /* push imm8 */
1501 case 0x6c: /* insb */
1502 case 0x6d: /* insw/insd */
1503 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1505 (c->d & ByteOp) ? 1 : c->op_bytes,
1507 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1508 (ctxt->eflags & EFLG_DF),
1509 register_address(c, es_base(ctxt),
1510 c->regs[VCPU_REGS_RDI]),
1512 c->regs[VCPU_REGS_RDX]) == 0) {
1517 case 0x6e: /* outsb */
1518 case 0x6f: /* outsw/outsd */
1519 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1521 (c->d & ByteOp) ? 1 : c->op_bytes,
1523 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1524 (ctxt->eflags & EFLG_DF),
1526 seg_override_base(ctxt, c),
1527 c->regs[VCPU_REGS_RSI]),
1529 c->regs[VCPU_REGS_RDX]) == 0) {
1534 case 0x70 ... 0x7f: /* jcc (short) */ {
1535 int rel = insn_fetch(s8, 1, c->eip);
1537 if (test_cc(c->b, ctxt->eflags))
1541 case 0x80 ... 0x83: /* Grp1 */
1542 switch (c->modrm_reg) {
1562 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1564 case 0x86 ... 0x87: /* xchg */
1566 /* Write back the register source. */
1567 switch (c->dst.bytes) {
1569 *(u8 *) c->src.ptr = (u8) c->dst.val;
1572 *(u16 *) c->src.ptr = (u16) c->dst.val;
1575 *c->src.ptr = (u32) c->dst.val;
1576 break; /* 64b reg: zero-extend */
1578 *c->src.ptr = c->dst.val;
1582 * Write back the memory destination with implicit LOCK
1585 c->dst.val = c->src.val;
1588 case 0x88 ... 0x8b: /* mov */
1590 case 0x8c: { /* mov r/m, sreg */
1591 struct kvm_segment segreg;
1593 if (c->modrm_reg <= 5)
1594 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1596 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1598 goto cannot_emulate;
1600 c->dst.val = segreg.selector;
1603 case 0x8d: /* lea r16/r32, m */
1604 c->dst.val = c->modrm_ea;
1606 case 0x8e: { /* mov seg, r/m16 */
1612 if (c->modrm_reg <= 5) {
1613 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1614 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1615 type_bits, c->modrm_reg);
1617 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1619 goto cannot_emulate;
1623 goto cannot_emulate;
1625 c->dst.type = OP_NONE; /* Disable writeback. */
1628 case 0x8f: /* pop (sole member of Grp1a) */
1629 rc = emulate_grp1a(ctxt, ops);
1633 case 0x90: /* nop / xchg r8,rax */
1634 if (!(c->rex_prefix & 1)) { /* nop */
1635 c->dst.type = OP_NONE;
1638 case 0x91 ... 0x97: /* xchg reg,rax */
1639 c->src.type = c->dst.type = OP_REG;
1640 c->src.bytes = c->dst.bytes = c->op_bytes;
1641 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1642 c->src.val = *(c->src.ptr);
1644 case 0x9c: /* pushf */
1645 c->src.val = (unsigned long) ctxt->eflags;
1648 case 0x9d: /* popf */
1649 c->dst.type = OP_REG;
1650 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1651 c->dst.bytes = c->op_bytes;
1652 goto pop_instruction;
1653 case 0xa0 ... 0xa1: /* mov */
1654 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1655 c->dst.val = c->src.val;
1657 case 0xa2 ... 0xa3: /* mov */
1658 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1660 case 0xa4 ... 0xa5: /* movs */
1661 c->dst.type = OP_MEM;
1662 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1663 c->dst.ptr = (unsigned long *)register_address(c,
1665 c->regs[VCPU_REGS_RDI]);
1666 if ((rc = ops->read_emulated(register_address(c,
1667 seg_override_base(ctxt, c),
1668 c->regs[VCPU_REGS_RSI]),
1670 c->dst.bytes, ctxt->vcpu)) != 0)
1672 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1673 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1675 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1676 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1679 case 0xa6 ... 0xa7: /* cmps */
1680 c->src.type = OP_NONE; /* Disable writeback. */
1681 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1682 c->src.ptr = (unsigned long *)register_address(c,
1683 seg_override_base(ctxt, c),
1684 c->regs[VCPU_REGS_RSI]);
1685 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1691 c->dst.type = OP_NONE; /* Disable writeback. */
1692 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1693 c->dst.ptr = (unsigned long *)register_address(c,
1695 c->regs[VCPU_REGS_RDI]);
1696 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1702 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1704 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1706 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1707 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1709 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1710 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1714 case 0xaa ... 0xab: /* stos */
1715 c->dst.type = OP_MEM;
1716 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1717 c->dst.ptr = (unsigned long *)register_address(c,
1719 c->regs[VCPU_REGS_RDI]);
1720 c->dst.val = c->regs[VCPU_REGS_RAX];
1721 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1722 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1725 case 0xac ... 0xad: /* lods */
1726 c->dst.type = OP_REG;
1727 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1728 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1729 if ((rc = ops->read_emulated(register_address(c,
1730 seg_override_base(ctxt, c),
1731 c->regs[VCPU_REGS_RSI]),
1736 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1737 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1740 case 0xae ... 0xaf: /* scas */
1741 DPRINTF("Urk! I don't handle SCAS.\n");
1742 goto cannot_emulate;
1743 case 0xb0 ... 0xbf: /* mov r, imm */
1748 case 0xc3: /* ret */
1749 c->dst.type = OP_REG;
1750 c->dst.ptr = &c->eip;
1751 c->dst.bytes = c->op_bytes;
1752 goto pop_instruction;
1753 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1755 c->dst.val = c->src.val;
1757 case 0xcb: /* ret far */
1758 rc = emulate_ret_far(ctxt, ops);
1762 case 0xd0 ... 0xd1: /* Grp2 */
1766 case 0xd2 ... 0xd3: /* Grp2 */
1767 c->src.val = c->regs[VCPU_REGS_RCX];
1770 case 0xe4: /* inb */
1772 port = insn_fetch(u8, 1, c->eip);
1775 case 0xe6: /* outb */
1776 case 0xe7: /* out */
1777 port = insn_fetch(u8, 1, c->eip);
1780 case 0xe8: /* call (near) */ {
1782 switch (c->op_bytes) {
1784 rel = insn_fetch(s16, 2, c->eip);
1787 rel = insn_fetch(s32, 4, c->eip);
1790 DPRINTF("Call: Invalid op_bytes\n");
1791 goto cannot_emulate;
1793 c->src.val = (unsigned long) c->eip;
1795 c->op_bytes = c->ad_bytes;
1799 case 0xe9: /* jmp rel */
1801 case 0xea: /* jmp far */ {
1805 switch (c->op_bytes) {
1807 eip = insn_fetch(u16, 2, c->eip);
1810 eip = insn_fetch(u32, 4, c->eip);
1813 DPRINTF("jmp far: Invalid op_bytes\n");
1814 goto cannot_emulate;
1816 sel = insn_fetch(u16, 2, c->eip);
1817 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1818 DPRINTF("jmp far: Failed to load CS descriptor\n");
1819 goto cannot_emulate;
1826 jmp: /* jmp rel short */
1827 jmp_rel(c, c->src.val);
1828 c->dst.type = OP_NONE; /* Disable writeback. */
1830 case 0xec: /* in al,dx */
1831 case 0xed: /* in (e/r)ax,dx */
1832 port = c->regs[VCPU_REGS_RDX];
1835 case 0xee: /* out al,dx */
1836 case 0xef: /* out (e/r)ax,dx */
1837 port = c->regs[VCPU_REGS_RDX];
1839 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1840 (c->d & ByteOp) ? 1 : c->op_bytes,
1843 goto cannot_emulate;
1846 case 0xf4: /* hlt */
1847 ctxt->vcpu->arch.halt_request = 1;
1849 case 0xf5: /* cmc */
1850 /* complement carry flag from eflags reg */
1851 ctxt->eflags ^= EFLG_CF;
1852 c->dst.type = OP_NONE; /* Disable writeback. */
1854 case 0xf6 ... 0xf7: /* Grp3 */
1855 rc = emulate_grp3(ctxt, ops);
1859 case 0xf8: /* clc */
1860 ctxt->eflags &= ~EFLG_CF;
1861 c->dst.type = OP_NONE; /* Disable writeback. */
1863 case 0xfa: /* cli */
1864 ctxt->eflags &= ~X86_EFLAGS_IF;
1865 c->dst.type = OP_NONE; /* Disable writeback. */
1867 case 0xfb: /* sti */
1868 ctxt->eflags |= X86_EFLAGS_IF;
1869 c->dst.type = OP_NONE; /* Disable writeback. */
1871 case 0xfc: /* cld */
1872 ctxt->eflags &= ~EFLG_DF;
1873 c->dst.type = OP_NONE; /* Disable writeback. */
1875 case 0xfd: /* std */
1876 ctxt->eflags |= EFLG_DF;
1877 c->dst.type = OP_NONE; /* Disable writeback. */
1879 case 0xfe ... 0xff: /* Grp4/Grp5 */
1880 rc = emulate_grp45(ctxt, ops);
1887 rc = writeback(ctxt, ops);
1891 /* Commit shadow register state. */
1892 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1893 kvm_rip_write(ctxt->vcpu, c->eip);
1896 if (rc == X86EMUL_UNHANDLEABLE) {
1904 case 0x01: /* lgdt, lidt, lmsw */
1905 switch (c->modrm_reg) {
1907 unsigned long address;
1909 case 0: /* vmcall */
1910 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1911 goto cannot_emulate;
1913 rc = kvm_fix_hypercall(ctxt->vcpu);
1917 /* Let the processor re-execute the fixed hypercall */
1918 c->eip = kvm_rip_read(ctxt->vcpu);
1919 /* Disable writeback. */
1920 c->dst.type = OP_NONE;
1923 rc = read_descriptor(ctxt, ops, c->src.ptr,
1924 &size, &address, c->op_bytes);
1927 realmode_lgdt(ctxt->vcpu, size, address);
1928 /* Disable writeback. */
1929 c->dst.type = OP_NONE;
1931 case 3: /* lidt/vmmcall */
1932 if (c->modrm_mod == 3) {
1933 switch (c->modrm_rm) {
1935 rc = kvm_fix_hypercall(ctxt->vcpu);
1940 goto cannot_emulate;
1943 rc = read_descriptor(ctxt, ops, c->src.ptr,
1948 realmode_lidt(ctxt->vcpu, size, address);
1950 /* Disable writeback. */
1951 c->dst.type = OP_NONE;
1955 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1958 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1960 c->dst.type = OP_NONE;
1963 emulate_invlpg(ctxt->vcpu, memop);
1964 /* Disable writeback. */
1965 c->dst.type = OP_NONE;
1968 goto cannot_emulate;
1972 emulate_clts(ctxt->vcpu);
1973 c->dst.type = OP_NONE;
1975 case 0x08: /* invd */
1976 case 0x09: /* wbinvd */
1977 case 0x0d: /* GrpP (prefetch) */
1978 case 0x18: /* Grp16 (prefetch/nop) */
1979 c->dst.type = OP_NONE;
1981 case 0x20: /* mov cr, reg */
1982 if (c->modrm_mod != 3)
1983 goto cannot_emulate;
1984 c->regs[c->modrm_rm] =
1985 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1986 c->dst.type = OP_NONE; /* no writeback */
1988 case 0x21: /* mov from dr to reg */
1989 if (c->modrm_mod != 3)
1990 goto cannot_emulate;
1991 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1993 goto cannot_emulate;
1994 c->dst.type = OP_NONE; /* no writeback */
1996 case 0x22: /* mov reg, cr */
1997 if (c->modrm_mod != 3)
1998 goto cannot_emulate;
1999 realmode_set_cr(ctxt->vcpu,
2000 c->modrm_reg, c->modrm_val, &ctxt->eflags);
2001 c->dst.type = OP_NONE;
2003 case 0x23: /* mov from reg to dr */
2004 if (c->modrm_mod != 3)
2005 goto cannot_emulate;
2006 rc = emulator_set_dr(ctxt, c->modrm_reg,
2007 c->regs[c->modrm_rm]);
2009 goto cannot_emulate;
2010 c->dst.type = OP_NONE; /* no writeback */
2014 msr_data = (u32)c->regs[VCPU_REGS_RAX]
2015 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
2016 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
2018 kvm_inject_gp(ctxt->vcpu, 0);
2019 c->eip = kvm_rip_read(ctxt->vcpu);
2021 rc = X86EMUL_CONTINUE;
2022 c->dst.type = OP_NONE;
2026 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
2028 kvm_inject_gp(ctxt->vcpu, 0);
2029 c->eip = kvm_rip_read(ctxt->vcpu);
2031 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
2032 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
2034 rc = X86EMUL_CONTINUE;
2035 c->dst.type = OP_NONE;
2037 case 0x40 ... 0x4f: /* cmov */
2038 c->dst.val = c->dst.orig_val = c->src.val;
2039 if (!test_cc(c->b, ctxt->eflags))
2040 c->dst.type = OP_NONE; /* no writeback */
2042 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
2045 switch (c->op_bytes) {
2047 rel = insn_fetch(s16, 2, c->eip);
2050 rel = insn_fetch(s32, 4, c->eip);
2053 rel = insn_fetch(s64, 8, c->eip);
2056 DPRINTF("jnz: Invalid op_bytes\n");
2057 goto cannot_emulate;
2059 if (test_cc(c->b, ctxt->eflags))
2061 c->dst.type = OP_NONE;
2066 c->dst.type = OP_NONE;
2067 /* only subword offset */
2068 c->src.val &= (c->dst.bytes << 3) - 1;
2069 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
2071 case 0xa4: /* shld imm8, r, r/m */
2072 case 0xa5: /* shld cl, r, r/m */
2073 emulate_2op_cl("shld", c->src2, c->src, c->dst, ctxt->eflags);
2077 /* only subword offset */
2078 c->src.val &= (c->dst.bytes << 3) - 1;
2079 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
2081 case 0xac: /* shrd imm8, r, r/m */
2082 case 0xad: /* shrd cl, r, r/m */
2083 emulate_2op_cl("shrd", c->src2, c->src, c->dst, ctxt->eflags);
2085 case 0xae: /* clflush */
2087 case 0xb0 ... 0xb1: /* cmpxchg */
2089 * Save real source value, then compare EAX against
2092 c->src.orig_val = c->src.val;
2093 c->src.val = c->regs[VCPU_REGS_RAX];
2094 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2095 if (ctxt->eflags & EFLG_ZF) {
2096 /* Success: write back to memory. */
2097 c->dst.val = c->src.orig_val;
2099 /* Failure: write the value we saw to EAX. */
2100 c->dst.type = OP_REG;
2101 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2106 /* only subword offset */
2107 c->src.val &= (c->dst.bytes << 3) - 1;
2108 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2110 case 0xb6 ... 0xb7: /* movzx */
2111 c->dst.bytes = c->op_bytes;
2112 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2115 case 0xba: /* Grp8 */
2116 switch (c->modrm_reg & 3) {
2129 /* only subword offset */
2130 c->src.val &= (c->dst.bytes << 3) - 1;
2131 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2133 case 0xbe ... 0xbf: /* movsx */
2134 c->dst.bytes = c->op_bytes;
2135 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2138 case 0xc3: /* movnti */
2139 c->dst.bytes = c->op_bytes;
2140 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2143 case 0xc7: /* Grp9 (cmpxchg8b) */
2144 rc = emulate_grp9(ctxt, ops, memop);
2147 c->dst.type = OP_NONE;
2153 DPRINTF("Cannot emulate %02x\n", c->b);