1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcOne (7<<4) /* Implied '1' */
62 #define SrcMask (7<<4)
63 /* Generic ModRM decode. */
65 /* Destination is only written; never read. */
68 #define MemAbs (1<<10) /* Memory operand is absolute displacement */
69 #define String (1<<12) /* String instruction (rep capable) */
70 #define Stack (1<<13) /* Stack instruction (push/pop) */
71 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
72 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
73 #define GroupMask 0xff /* Group number stored in bits 0:7 */
74 /* Source 2 operand type */
75 #define Src2None (0<<29)
76 #define Src2CL (1<<29)
77 #define Src2ImmByte (2<<29)
78 #define Src2One (3<<29)
79 #define Src2Mask (7<<29)
82 Group1_80, Group1_81, Group1_82, Group1_83,
83 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
86 static u32 opcode_table[256] = {
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
101 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
104 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
105 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
106 DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
108 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
109 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
112 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
113 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
116 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
117 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
118 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
121 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
123 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
125 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
126 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
128 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
129 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
131 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
134 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
135 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
136 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
138 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
139 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
141 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
142 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
144 Group | Group1_80, Group | Group1_81,
145 Group | Group1_82, Group | Group1_83,
146 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
147 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
149 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
150 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
151 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
152 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
154 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
156 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
158 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
159 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
160 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
161 ByteOp | ImplicitOps | String, ImplicitOps | String,
163 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
164 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
165 ByteOp | ImplicitOps | String, ImplicitOps | String,
167 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
168 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
169 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
170 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
172 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
173 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
174 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
175 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
177 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
178 0, ImplicitOps | Stack, 0, 0,
179 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
181 0, 0, 0, 0, 0, 0, 0, 0,
183 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
184 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
187 0, 0, 0, 0, 0, 0, 0, 0,
190 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
191 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
193 ImplicitOps | Stack, SrcImm | ImplicitOps,
194 ImplicitOps, SrcImmByte | ImplicitOps,
195 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
196 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
199 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
201 ImplicitOps, 0, ImplicitOps, ImplicitOps,
202 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
205 static u32 twobyte_table[256] = {
207 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
208 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
210 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
212 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
213 0, 0, 0, 0, 0, 0, 0, 0,
215 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
217 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
218 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
219 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
220 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
222 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
223 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
224 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
225 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
227 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
229 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
234 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
235 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
236 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
238 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
240 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
242 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
244 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
245 DstMem | SrcReg | ModRM | BitOp,
246 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
247 DstReg | SrcMem16 | ModRM | Mov,
249 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
250 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
251 DstReg | SrcMem16 | ModRM | Mov,
253 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
254 0, 0, 0, 0, 0, 0, 0, 0,
256 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
258 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
260 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
263 static u32 group_table[] = {
265 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
266 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
267 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
268 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
270 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
271 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
272 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
273 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
275 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
276 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
277 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
278 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
280 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
281 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
282 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
283 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
285 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
287 ByteOp | SrcImm | DstMem | ModRM, 0,
288 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
291 DstMem | SrcImm | ModRM, 0,
292 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
295 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
298 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
299 SrcMem | ModRM | Stack, 0,
300 SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
302 0, 0, ModRM | SrcMem, ModRM | SrcMem,
303 SrcNone | ModRM | DstMem | Mov, 0,
304 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
307 static u32 group2_table[] = {
309 SrcNone | ModRM, 0, 0, 0,
310 SrcNone | ModRM | DstMem | Mov, 0,
311 SrcMem16 | ModRM | Mov, 0,
314 /* EFLAGS bit definitions. */
315 #define EFLG_OF (1<<11)
316 #define EFLG_DF (1<<10)
317 #define EFLG_SF (1<<7)
318 #define EFLG_ZF (1<<6)
319 #define EFLG_AF (1<<4)
320 #define EFLG_PF (1<<2)
321 #define EFLG_CF (1<<0)
324 * Instruction emulation:
325 * Most instructions are emulated directly via a fragment of inline assembly
326 * code. This allows us to save/restore EFLAGS and thus very easily pick up
327 * any modified flags.
330 #if defined(CONFIG_X86_64)
331 #define _LO32 "k" /* force 32-bit operand */
332 #define _STK "%%rsp" /* stack pointer */
333 #elif defined(__i386__)
334 #define _LO32 "" /* force 32-bit operand */
335 #define _STK "%%esp" /* stack pointer */
339 * These EFLAGS bits are restored from saved value during emulation, and
340 * any changes are written back to the saved value after emulation.
342 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
344 /* Before executing instruction: restore necessary bits in EFLAGS. */
345 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
346 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
347 "movl %"_sav",%"_LO32 _tmp"; " \
350 "movl %"_msk",%"_LO32 _tmp"; " \
351 "andl %"_LO32 _tmp",("_STK"); " \
353 "notl %"_LO32 _tmp"; " \
354 "andl %"_LO32 _tmp",("_STK"); " \
355 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
357 "orl %"_LO32 _tmp",("_STK"); " \
361 /* After executing instruction: write-back necessary bits in EFLAGS. */
362 #define _POST_EFLAGS(_sav, _msk, _tmp) \
363 /* _sav |= EFLAGS & _msk; */ \
366 "andl %"_msk",%"_LO32 _tmp"; " \
367 "orl %"_LO32 _tmp",%"_sav"; "
375 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
377 __asm__ __volatile__ ( \
378 _PRE_EFLAGS("0", "4", "2") \
379 _op _suffix " %"_x"3,%1; " \
380 _POST_EFLAGS("0", "4", "2") \
381 : "=m" (_eflags), "=m" ((_dst).val), \
383 : _y ((_src).val), "i" (EFLAGS_MASK)); \
387 /* Raw emulation: instruction has two explicit operands. */
388 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
390 unsigned long _tmp; \
392 switch ((_dst).bytes) { \
394 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
397 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
400 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
405 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
407 unsigned long _tmp; \
408 switch ((_dst).bytes) { \
410 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
413 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
414 _wx, _wy, _lx, _ly, _qx, _qy); \
419 /* Source operand is byte-sized and may be restricted to just %cl. */
420 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
421 __emulate_2op(_op, _src, _dst, _eflags, \
422 "b", "c", "b", "c", "b", "c", "b", "c")
424 /* Source operand is byte, word, long or quad sized. */
425 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
426 __emulate_2op(_op, _src, _dst, _eflags, \
427 "b", "q", "w", "r", _LO32, "r", "", "r")
429 /* Source operand is word, long or quad sized. */
430 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
431 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
432 "w", "r", _LO32, "r", "", "r")
434 /* Instruction has three operands and one operand is stored in ECX register */
435 #define __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, _suffix, _type) \
437 unsigned long _tmp; \
438 _type _clv = (_cl).val; \
439 _type _srcv = (_src).val; \
440 _type _dstv = (_dst).val; \
442 __asm__ __volatile__ ( \
443 _PRE_EFLAGS("0", "5", "2") \
444 _op _suffix " %4,%1 \n" \
445 _POST_EFLAGS("0", "5", "2") \
446 : "=m" (_eflags), "+r" (_dstv), "=&r" (_tmp) \
447 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
450 (_cl).val = (unsigned long) _clv; \
451 (_src).val = (unsigned long) _srcv; \
452 (_dst).val = (unsigned long) _dstv; \
455 #define emulate_2op_cl(_op, _cl, _src, _dst, _eflags) \
457 switch ((_dst).bytes) { \
459 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
460 "w", unsigned short); \
463 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
464 "l", unsigned int); \
467 ON64(__emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
468 "q", unsigned long)); \
473 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
475 unsigned long _tmp; \
477 __asm__ __volatile__ ( \
478 _PRE_EFLAGS("0", "3", "2") \
479 _op _suffix " %1; " \
480 _POST_EFLAGS("0", "3", "2") \
481 : "=m" (_eflags), "+m" ((_dst).val), \
483 : "i" (EFLAGS_MASK)); \
486 /* Instruction has only one explicit operand (no source operand). */
487 #define emulate_1op(_op, _dst, _eflags) \
489 switch ((_dst).bytes) { \
490 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
491 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
492 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
493 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
497 /* Fetch next part of the instruction being emulated. */
498 #define insn_fetch(_type, _size, _eip) \
499 ({ unsigned long _x; \
500 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
507 static inline unsigned long ad_mask(struct decode_cache *c)
509 return (1UL << (c->ad_bytes << 3)) - 1;
512 /* Access/update address held in a register, based on addressing mode. */
513 static inline unsigned long
514 address_mask(struct decode_cache *c, unsigned long reg)
516 if (c->ad_bytes == sizeof(unsigned long))
519 return reg & ad_mask(c);
522 static inline unsigned long
523 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
525 return base + address_mask(c, reg);
529 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
531 if (c->ad_bytes == sizeof(unsigned long))
534 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
537 static inline void jmp_rel(struct decode_cache *c, int rel)
539 register_address_increment(c, &c->eip, rel);
542 static void set_seg_override(struct decode_cache *c, int seg)
544 c->has_seg_override = true;
545 c->seg_override = seg;
548 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
550 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
553 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
556 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
557 struct decode_cache *c)
559 if (!c->has_seg_override)
562 return seg_base(ctxt, c->seg_override);
565 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
567 return seg_base(ctxt, VCPU_SREG_ES);
570 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
572 return seg_base(ctxt, VCPU_SREG_SS);
575 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
576 struct x86_emulate_ops *ops,
577 unsigned long linear, u8 *dest)
579 struct fetch_cache *fc = &ctxt->decode.fetch;
583 if (linear < fc->start || linear >= fc->end) {
584 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
585 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
589 fc->end = linear + size;
591 *dest = fc->data[linear - fc->start];
595 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
596 struct x86_emulate_ops *ops,
597 unsigned long eip, void *dest, unsigned size)
601 eip += ctxt->cs_base;
603 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
611 * Given the 'reg' portion of a ModRM byte, and a register block, return a
612 * pointer into the block that addresses the relevant register.
613 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
615 static void *decode_register(u8 modrm_reg, unsigned long *regs,
620 p = ®s[modrm_reg];
621 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
622 p = (unsigned char *)®s[modrm_reg & 3] + 1;
626 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
627 struct x86_emulate_ops *ops,
629 u16 *size, unsigned long *address, int op_bytes)
636 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
640 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
645 static int test_cc(unsigned int condition, unsigned int flags)
649 switch ((condition & 15) >> 1) {
651 rc |= (flags & EFLG_OF);
653 case 1: /* b/c/nae */
654 rc |= (flags & EFLG_CF);
657 rc |= (flags & EFLG_ZF);
660 rc |= (flags & (EFLG_CF|EFLG_ZF));
663 rc |= (flags & EFLG_SF);
666 rc |= (flags & EFLG_PF);
669 rc |= (flags & EFLG_ZF);
672 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
676 /* Odd condition identifiers (lsb == 1) have inverted sense. */
677 return (!!rc ^ (condition & 1));
680 static void decode_register_operand(struct operand *op,
681 struct decode_cache *c,
684 unsigned reg = c->modrm_reg;
685 int highbyte_regs = c->rex_prefix == 0;
688 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
690 if ((c->d & ByteOp) && !inhibit_bytereg) {
691 op->ptr = decode_register(reg, c->regs, highbyte_regs);
692 op->val = *(u8 *)op->ptr;
695 op->ptr = decode_register(reg, c->regs, 0);
696 op->bytes = c->op_bytes;
699 op->val = *(u16 *)op->ptr;
702 op->val = *(u32 *)op->ptr;
705 op->val = *(u64 *) op->ptr;
709 op->orig_val = op->val;
712 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
713 struct x86_emulate_ops *ops)
715 struct decode_cache *c = &ctxt->decode;
717 int index_reg = 0, base_reg = 0, scale;
721 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
722 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
723 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
726 c->modrm = insn_fetch(u8, 1, c->eip);
727 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
728 c->modrm_reg |= (c->modrm & 0x38) >> 3;
729 c->modrm_rm |= (c->modrm & 0x07);
733 if (c->modrm_mod == 3) {
734 c->modrm_ptr = decode_register(c->modrm_rm,
735 c->regs, c->d & ByteOp);
736 c->modrm_val = *(unsigned long *)c->modrm_ptr;
740 if (c->ad_bytes == 2) {
741 unsigned bx = c->regs[VCPU_REGS_RBX];
742 unsigned bp = c->regs[VCPU_REGS_RBP];
743 unsigned si = c->regs[VCPU_REGS_RSI];
744 unsigned di = c->regs[VCPU_REGS_RDI];
746 /* 16-bit ModR/M decode. */
747 switch (c->modrm_mod) {
749 if (c->modrm_rm == 6)
750 c->modrm_ea += insn_fetch(u16, 2, c->eip);
753 c->modrm_ea += insn_fetch(s8, 1, c->eip);
756 c->modrm_ea += insn_fetch(u16, 2, c->eip);
759 switch (c->modrm_rm) {
761 c->modrm_ea += bx + si;
764 c->modrm_ea += bx + di;
767 c->modrm_ea += bp + si;
770 c->modrm_ea += bp + di;
779 if (c->modrm_mod != 0)
786 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
787 (c->modrm_rm == 6 && c->modrm_mod != 0))
788 if (!c->has_seg_override)
789 set_seg_override(c, VCPU_SREG_SS);
790 c->modrm_ea = (u16)c->modrm_ea;
792 /* 32/64-bit ModR/M decode. */
793 if ((c->modrm_rm & 7) == 4) {
794 sib = insn_fetch(u8, 1, c->eip);
795 index_reg |= (sib >> 3) & 7;
799 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
800 c->modrm_ea += insn_fetch(s32, 4, c->eip);
802 c->modrm_ea += c->regs[base_reg];
804 c->modrm_ea += c->regs[index_reg] << scale;
805 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
806 if (ctxt->mode == X86EMUL_MODE_PROT64)
809 c->modrm_ea += c->regs[c->modrm_rm];
810 switch (c->modrm_mod) {
812 if (c->modrm_rm == 5)
813 c->modrm_ea += insn_fetch(s32, 4, c->eip);
816 c->modrm_ea += insn_fetch(s8, 1, c->eip);
819 c->modrm_ea += insn_fetch(s32, 4, c->eip);
827 static int decode_abs(struct x86_emulate_ctxt *ctxt,
828 struct x86_emulate_ops *ops)
830 struct decode_cache *c = &ctxt->decode;
833 switch (c->ad_bytes) {
835 c->modrm_ea = insn_fetch(u16, 2, c->eip);
838 c->modrm_ea = insn_fetch(u32, 4, c->eip);
841 c->modrm_ea = insn_fetch(u64, 8, c->eip);
849 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
851 struct decode_cache *c = &ctxt->decode;
853 int mode = ctxt->mode;
854 int def_op_bytes, def_ad_bytes, group;
856 /* Shadow copy of register state. Committed on successful emulation. */
858 memset(c, 0, sizeof(struct decode_cache));
859 c->eip = kvm_rip_read(ctxt->vcpu);
860 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
861 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
864 case X86EMUL_MODE_REAL:
865 case X86EMUL_MODE_PROT16:
866 def_op_bytes = def_ad_bytes = 2;
868 case X86EMUL_MODE_PROT32:
869 def_op_bytes = def_ad_bytes = 4;
872 case X86EMUL_MODE_PROT64:
881 c->op_bytes = def_op_bytes;
882 c->ad_bytes = def_ad_bytes;
884 /* Legacy prefixes. */
886 switch (c->b = insn_fetch(u8, 1, c->eip)) {
887 case 0x66: /* operand-size override */
888 /* switch between 2/4 bytes */
889 c->op_bytes = def_op_bytes ^ 6;
891 case 0x67: /* address-size override */
892 if (mode == X86EMUL_MODE_PROT64)
893 /* switch between 4/8 bytes */
894 c->ad_bytes = def_ad_bytes ^ 12;
896 /* switch between 2/4 bytes */
897 c->ad_bytes = def_ad_bytes ^ 6;
899 case 0x26: /* ES override */
900 case 0x2e: /* CS override */
901 case 0x36: /* SS override */
902 case 0x3e: /* DS override */
903 set_seg_override(c, (c->b >> 3) & 3);
905 case 0x64: /* FS override */
906 case 0x65: /* GS override */
907 set_seg_override(c, c->b & 7);
909 case 0x40 ... 0x4f: /* REX */
910 if (mode != X86EMUL_MODE_PROT64)
912 c->rex_prefix = c->b;
914 case 0xf0: /* LOCK */
917 case 0xf2: /* REPNE/REPNZ */
918 c->rep_prefix = REPNE_PREFIX;
920 case 0xf3: /* REP/REPE/REPZ */
921 c->rep_prefix = REPE_PREFIX;
927 /* Any legacy prefix after a REX prefix nullifies its effect. */
936 if (c->rex_prefix & 8)
937 c->op_bytes = 8; /* REX.W */
939 /* Opcode byte(s). */
940 c->d = opcode_table[c->b];
942 /* Two-byte opcode? */
945 c->b = insn_fetch(u8, 1, c->eip);
946 c->d = twobyte_table[c->b];
951 group = c->d & GroupMask;
952 c->modrm = insn_fetch(u8, 1, c->eip);
955 group = (group << 3) + ((c->modrm >> 3) & 7);
956 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
957 c->d = group2_table[group];
959 c->d = group_table[group];
964 DPRINTF("Cannot emulate %02x\n", c->b);
968 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
971 /* ModRM and SIB bytes. */
973 rc = decode_modrm(ctxt, ops);
974 else if (c->d & MemAbs)
975 rc = decode_abs(ctxt, ops);
979 if (!c->has_seg_override)
980 set_seg_override(c, VCPU_SREG_DS);
982 if (!(!c->twobyte && c->b == 0x8d))
983 c->modrm_ea += seg_override_base(ctxt, c);
985 if (c->ad_bytes != 8)
986 c->modrm_ea = (u32)c->modrm_ea;
988 * Decode and fetch the source operand: register, memory
991 switch (c->d & SrcMask) {
995 decode_register_operand(&c->src, c, 0);
1004 c->src.bytes = (c->d & ByteOp) ? 1 :
1006 /* Don't fetch the address for invlpg: it could be unmapped. */
1007 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1011 * For instructions with a ModR/M byte, switch to register
1012 * access if Mod = 3.
1014 if ((c->d & ModRM) && c->modrm_mod == 3) {
1015 c->src.type = OP_REG;
1016 c->src.val = c->modrm_val;
1017 c->src.ptr = c->modrm_ptr;
1020 c->src.type = OP_MEM;
1023 c->src.type = OP_IMM;
1024 c->src.ptr = (unsigned long *)c->eip;
1025 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1026 if (c->src.bytes == 8)
1028 /* NB. Immediates are sign-extended as necessary. */
1029 switch (c->src.bytes) {
1031 c->src.val = insn_fetch(s8, 1, c->eip);
1034 c->src.val = insn_fetch(s16, 2, c->eip);
1037 c->src.val = insn_fetch(s32, 4, c->eip);
1042 c->src.type = OP_IMM;
1043 c->src.ptr = (unsigned long *)c->eip;
1045 c->src.val = insn_fetch(s8, 1, c->eip);
1054 * Decode and fetch the second source operand: register, memory
1057 switch (c->d & Src2Mask) {
1062 c->src2.val = c->regs[VCPU_REGS_RCX] & 0x8;
1065 c->src2.type = OP_IMM;
1066 c->src2.ptr = (unsigned long *)c->eip;
1068 c->src2.val = insn_fetch(u8, 1, c->eip);
1076 /* Decode and fetch the destination operand: register or memory. */
1077 switch (c->d & DstMask) {
1079 /* Special instructions do their own operand decoding. */
1082 decode_register_operand(&c->dst, c,
1083 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1086 if ((c->d & ModRM) && c->modrm_mod == 3) {
1087 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1088 c->dst.type = OP_REG;
1089 c->dst.val = c->dst.orig_val = c->modrm_val;
1090 c->dst.ptr = c->modrm_ptr;
1093 c->dst.type = OP_MEM;
1096 c->dst.type = OP_REG;
1097 c->dst.bytes = c->op_bytes;
1098 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1099 switch (c->op_bytes) {
1101 c->dst.val = *(u8 *)c->dst.ptr;
1104 c->dst.val = *(u16 *)c->dst.ptr;
1107 c->dst.val = *(u32 *)c->dst.ptr;
1110 c->dst.orig_val = c->dst.val;
1114 if (c->rip_relative)
1115 c->modrm_ea += c->eip;
1118 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1121 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1123 struct decode_cache *c = &ctxt->decode;
1125 c->dst.type = OP_MEM;
1126 c->dst.bytes = c->op_bytes;
1127 c->dst.val = c->src.val;
1128 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1129 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1130 c->regs[VCPU_REGS_RSP]);
1133 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1134 struct x86_emulate_ops *ops)
1136 struct decode_cache *c = &ctxt->decode;
1139 rc = ops->read_emulated(register_address(c, ss_base(ctxt),
1140 c->regs[VCPU_REGS_RSP]),
1141 &c->src.val, c->src.bytes, ctxt->vcpu);
1145 register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->src.bytes);
1149 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1150 struct x86_emulate_ops *ops)
1152 struct decode_cache *c = &ctxt->decode;
1155 c->src.bytes = c->dst.bytes;
1156 rc = emulate_pop(ctxt, ops);
1159 c->dst.val = c->src.val;
1163 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1165 struct decode_cache *c = &ctxt->decode;
1166 switch (c->modrm_reg) {
1168 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1171 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1174 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1177 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1179 case 4: /* sal/shl */
1180 case 6: /* sal/shl */
1181 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1184 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1187 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1192 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1193 struct x86_emulate_ops *ops)
1195 struct decode_cache *c = &ctxt->decode;
1198 switch (c->modrm_reg) {
1199 case 0 ... 1: /* test */
1200 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1203 c->dst.val = ~c->dst.val;
1206 emulate_1op("neg", c->dst, ctxt->eflags);
1209 DPRINTF("Cannot emulate %02x\n", c->b);
1210 rc = X86EMUL_UNHANDLEABLE;
1216 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1217 struct x86_emulate_ops *ops)
1219 struct decode_cache *c = &ctxt->decode;
1221 switch (c->modrm_reg) {
1223 emulate_1op("inc", c->dst, ctxt->eflags);
1226 emulate_1op("dec", c->dst, ctxt->eflags);
1228 case 2: /* call near abs */ {
1231 c->eip = c->src.val;
1232 c->src.val = old_eip;
1236 case 4: /* jmp abs */
1237 c->eip = c->src.val;
1246 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1247 struct x86_emulate_ops *ops,
1248 unsigned long memop)
1250 struct decode_cache *c = &ctxt->decode;
1254 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1258 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1259 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1261 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1262 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1263 ctxt->eflags &= ~EFLG_ZF;
1266 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1267 (u32) c->regs[VCPU_REGS_RBX];
1269 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1272 ctxt->eflags |= EFLG_ZF;
1277 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1278 struct x86_emulate_ops *ops)
1281 struct decode_cache *c = &ctxt->decode;
1283 switch (c->dst.type) {
1285 /* The 4-byte case *is* correct:
1286 * in 64-bit mode we zero-extend.
1288 switch (c->dst.bytes) {
1290 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1293 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1296 *c->dst.ptr = (u32)c->dst.val;
1297 break; /* 64b: zero-ext */
1299 *c->dst.ptr = c->dst.val;
1305 rc = ops->cmpxchg_emulated(
1306 (unsigned long)c->dst.ptr,
1312 rc = ops->write_emulated(
1313 (unsigned long)c->dst.ptr,
1330 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1332 unsigned long memop = 0;
1334 unsigned long saved_eip = 0;
1335 struct decode_cache *c = &ctxt->decode;
1340 /* Shadow copy of register state. Committed on successful emulation.
1341 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1345 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1348 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1349 memop = c->modrm_ea;
1351 if (c->rep_prefix && (c->d & String)) {
1352 /* All REP prefixes have the same first termination condition */
1353 if (c->regs[VCPU_REGS_RCX] == 0) {
1354 kvm_rip_write(ctxt->vcpu, c->eip);
1357 /* The second termination condition only applies for REPE
1358 * and REPNE. Test if the repeat string operation prefix is
1359 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1360 * corresponding termination condition according to:
1361 * - if REPE/REPZ and ZF = 0 then done
1362 * - if REPNE/REPNZ and ZF = 1 then done
1364 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1365 (c->b == 0xae) || (c->b == 0xaf)) {
1366 if ((c->rep_prefix == REPE_PREFIX) &&
1367 ((ctxt->eflags & EFLG_ZF) == 0)) {
1368 kvm_rip_write(ctxt->vcpu, c->eip);
1371 if ((c->rep_prefix == REPNE_PREFIX) &&
1372 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1373 kvm_rip_write(ctxt->vcpu, c->eip);
1377 c->regs[VCPU_REGS_RCX]--;
1378 c->eip = kvm_rip_read(ctxt->vcpu);
1381 if (c->src.type == OP_MEM) {
1382 c->src.ptr = (unsigned long *)memop;
1384 rc = ops->read_emulated((unsigned long)c->src.ptr,
1390 c->src.orig_val = c->src.val;
1393 if ((c->d & DstMask) == ImplicitOps)
1397 if (c->dst.type == OP_MEM) {
1398 c->dst.ptr = (unsigned long *)memop;
1399 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1402 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1404 c->dst.ptr = (void *)c->dst.ptr +
1405 (c->src.val & mask) / 8;
1407 if (!(c->d & Mov) &&
1408 /* optimisation - avoid slow emulated read */
1409 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1411 c->dst.bytes, ctxt->vcpu)) != 0))
1414 c->dst.orig_val = c->dst.val;
1424 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1428 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1432 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1436 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1440 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1444 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1448 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1452 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1454 case 0x40 ... 0x47: /* inc r16/r32 */
1455 emulate_1op("inc", c->dst, ctxt->eflags);
1457 case 0x48 ... 0x4f: /* dec r16/r32 */
1458 emulate_1op("dec", c->dst, ctxt->eflags);
1460 case 0x50 ... 0x57: /* push reg */
1463 case 0x58 ... 0x5f: /* pop reg */
1465 c->src.bytes = c->op_bytes;
1466 rc = emulate_pop(ctxt, ops);
1469 c->dst.val = c->src.val;
1471 case 0x63: /* movsxd */
1472 if (ctxt->mode != X86EMUL_MODE_PROT64)
1473 goto cannot_emulate;
1474 c->dst.val = (s32) c->src.val;
1476 case 0x68: /* push imm */
1477 case 0x6a: /* push imm8 */
1480 case 0x6c: /* insb */
1481 case 0x6d: /* insw/insd */
1482 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1484 (c->d & ByteOp) ? 1 : c->op_bytes,
1486 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1487 (ctxt->eflags & EFLG_DF),
1488 register_address(c, es_base(ctxt),
1489 c->regs[VCPU_REGS_RDI]),
1491 c->regs[VCPU_REGS_RDX]) == 0) {
1496 case 0x6e: /* outsb */
1497 case 0x6f: /* outsw/outsd */
1498 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1500 (c->d & ByteOp) ? 1 : c->op_bytes,
1502 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1503 (ctxt->eflags & EFLG_DF),
1505 seg_override_base(ctxt, c),
1506 c->regs[VCPU_REGS_RSI]),
1508 c->regs[VCPU_REGS_RDX]) == 0) {
1513 case 0x70 ... 0x7f: /* jcc (short) */ {
1514 int rel = insn_fetch(s8, 1, c->eip);
1516 if (test_cc(c->b, ctxt->eflags))
1520 case 0x80 ... 0x83: /* Grp1 */
1521 switch (c->modrm_reg) {
1541 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1543 case 0x86 ... 0x87: /* xchg */
1545 /* Write back the register source. */
1546 switch (c->dst.bytes) {
1548 *(u8 *) c->src.ptr = (u8) c->dst.val;
1551 *(u16 *) c->src.ptr = (u16) c->dst.val;
1554 *c->src.ptr = (u32) c->dst.val;
1555 break; /* 64b reg: zero-extend */
1557 *c->src.ptr = c->dst.val;
1561 * Write back the memory destination with implicit LOCK
1564 c->dst.val = c->src.val;
1567 case 0x88 ... 0x8b: /* mov */
1569 case 0x8c: { /* mov r/m, sreg */
1570 struct kvm_segment segreg;
1572 if (c->modrm_reg <= 5)
1573 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1575 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1577 goto cannot_emulate;
1579 c->dst.val = segreg.selector;
1582 case 0x8d: /* lea r16/r32, m */
1583 c->dst.val = c->modrm_ea;
1585 case 0x8e: { /* mov seg, r/m16 */
1591 if (c->modrm_reg <= 5) {
1592 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1593 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1594 type_bits, c->modrm_reg);
1596 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1598 goto cannot_emulate;
1602 goto cannot_emulate;
1604 c->dst.type = OP_NONE; /* Disable writeback. */
1607 case 0x8f: /* pop (sole member of Grp1a) */
1608 rc = emulate_grp1a(ctxt, ops);
1612 case 0x90: /* nop / xchg r8,rax */
1613 if (!(c->rex_prefix & 1)) { /* nop */
1614 c->dst.type = OP_NONE;
1617 case 0x91 ... 0x97: /* xchg reg,rax */
1618 c->src.type = c->dst.type = OP_REG;
1619 c->src.bytes = c->dst.bytes = c->op_bytes;
1620 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1621 c->src.val = *(c->src.ptr);
1623 case 0x9c: /* pushf */
1624 c->src.val = (unsigned long) ctxt->eflags;
1627 case 0x9d: /* popf */
1628 c->dst.type = OP_REG;
1629 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1630 c->dst.bytes = c->op_bytes;
1631 goto pop_instruction;
1632 case 0xa0 ... 0xa1: /* mov */
1633 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1634 c->dst.val = c->src.val;
1636 case 0xa2 ... 0xa3: /* mov */
1637 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1639 case 0xa4 ... 0xa5: /* movs */
1640 c->dst.type = OP_MEM;
1641 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1642 c->dst.ptr = (unsigned long *)register_address(c,
1644 c->regs[VCPU_REGS_RDI]);
1645 if ((rc = ops->read_emulated(register_address(c,
1646 seg_override_base(ctxt, c),
1647 c->regs[VCPU_REGS_RSI]),
1649 c->dst.bytes, ctxt->vcpu)) != 0)
1651 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1652 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1654 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1655 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1658 case 0xa6 ... 0xa7: /* cmps */
1659 c->src.type = OP_NONE; /* Disable writeback. */
1660 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1661 c->src.ptr = (unsigned long *)register_address(c,
1662 seg_override_base(ctxt, c),
1663 c->regs[VCPU_REGS_RSI]);
1664 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1670 c->dst.type = OP_NONE; /* Disable writeback. */
1671 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1672 c->dst.ptr = (unsigned long *)register_address(c,
1674 c->regs[VCPU_REGS_RDI]);
1675 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1681 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1683 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1685 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1686 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1688 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1689 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1693 case 0xaa ... 0xab: /* stos */
1694 c->dst.type = OP_MEM;
1695 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1696 c->dst.ptr = (unsigned long *)register_address(c,
1698 c->regs[VCPU_REGS_RDI]);
1699 c->dst.val = c->regs[VCPU_REGS_RAX];
1700 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1701 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1704 case 0xac ... 0xad: /* lods */
1705 c->dst.type = OP_REG;
1706 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1707 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1708 if ((rc = ops->read_emulated(register_address(c,
1709 seg_override_base(ctxt, c),
1710 c->regs[VCPU_REGS_RSI]),
1715 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1716 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1719 case 0xae ... 0xaf: /* scas */
1720 DPRINTF("Urk! I don't handle SCAS.\n");
1721 goto cannot_emulate;
1722 case 0xb0 ... 0xbf: /* mov r, imm */
1727 case 0xc3: /* ret */
1728 c->dst.type = OP_REG;
1729 c->dst.ptr = &c->eip;
1730 c->dst.bytes = c->op_bytes;
1731 goto pop_instruction;
1732 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1734 c->dst.val = c->src.val;
1736 case 0xd0 ... 0xd1: /* Grp2 */
1740 case 0xd2 ... 0xd3: /* Grp2 */
1741 c->src.val = c->regs[VCPU_REGS_RCX];
1744 case 0xe4: /* inb */
1746 port = insn_fetch(u8, 1, c->eip);
1749 case 0xe6: /* outb */
1750 case 0xe7: /* out */
1751 port = insn_fetch(u8, 1, c->eip);
1754 case 0xe8: /* call (near) */ {
1756 switch (c->op_bytes) {
1758 rel = insn_fetch(s16, 2, c->eip);
1761 rel = insn_fetch(s32, 4, c->eip);
1764 DPRINTF("Call: Invalid op_bytes\n");
1765 goto cannot_emulate;
1767 c->src.val = (unsigned long) c->eip;
1769 c->op_bytes = c->ad_bytes;
1773 case 0xe9: /* jmp rel */
1775 case 0xea: /* jmp far */ {
1779 switch (c->op_bytes) {
1781 eip = insn_fetch(u16, 2, c->eip);
1784 eip = insn_fetch(u32, 4, c->eip);
1787 DPRINTF("jmp far: Invalid op_bytes\n");
1788 goto cannot_emulate;
1790 sel = insn_fetch(u16, 2, c->eip);
1791 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1792 DPRINTF("jmp far: Failed to load CS descriptor\n");
1793 goto cannot_emulate;
1800 jmp: /* jmp rel short */
1801 jmp_rel(c, c->src.val);
1802 c->dst.type = OP_NONE; /* Disable writeback. */
1804 case 0xec: /* in al,dx */
1805 case 0xed: /* in (e/r)ax,dx */
1806 port = c->regs[VCPU_REGS_RDX];
1809 case 0xee: /* out al,dx */
1810 case 0xef: /* out (e/r)ax,dx */
1811 port = c->regs[VCPU_REGS_RDX];
1813 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1814 (c->d & ByteOp) ? 1 : c->op_bytes,
1817 goto cannot_emulate;
1820 case 0xf4: /* hlt */
1821 ctxt->vcpu->arch.halt_request = 1;
1823 case 0xf5: /* cmc */
1824 /* complement carry flag from eflags reg */
1825 ctxt->eflags ^= EFLG_CF;
1826 c->dst.type = OP_NONE; /* Disable writeback. */
1828 case 0xf6 ... 0xf7: /* Grp3 */
1829 rc = emulate_grp3(ctxt, ops);
1833 case 0xf8: /* clc */
1834 ctxt->eflags &= ~EFLG_CF;
1835 c->dst.type = OP_NONE; /* Disable writeback. */
1837 case 0xfa: /* cli */
1838 ctxt->eflags &= ~X86_EFLAGS_IF;
1839 c->dst.type = OP_NONE; /* Disable writeback. */
1841 case 0xfb: /* sti */
1842 ctxt->eflags |= X86_EFLAGS_IF;
1843 c->dst.type = OP_NONE; /* Disable writeback. */
1845 case 0xfc: /* cld */
1846 ctxt->eflags &= ~EFLG_DF;
1847 c->dst.type = OP_NONE; /* Disable writeback. */
1849 case 0xfd: /* std */
1850 ctxt->eflags |= EFLG_DF;
1851 c->dst.type = OP_NONE; /* Disable writeback. */
1853 case 0xfe ... 0xff: /* Grp4/Grp5 */
1854 rc = emulate_grp45(ctxt, ops);
1861 rc = writeback(ctxt, ops);
1865 /* Commit shadow register state. */
1866 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1867 kvm_rip_write(ctxt->vcpu, c->eip);
1870 if (rc == X86EMUL_UNHANDLEABLE) {
1878 case 0x01: /* lgdt, lidt, lmsw */
1879 switch (c->modrm_reg) {
1881 unsigned long address;
1883 case 0: /* vmcall */
1884 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1885 goto cannot_emulate;
1887 rc = kvm_fix_hypercall(ctxt->vcpu);
1891 /* Let the processor re-execute the fixed hypercall */
1892 c->eip = kvm_rip_read(ctxt->vcpu);
1893 /* Disable writeback. */
1894 c->dst.type = OP_NONE;
1897 rc = read_descriptor(ctxt, ops, c->src.ptr,
1898 &size, &address, c->op_bytes);
1901 realmode_lgdt(ctxt->vcpu, size, address);
1902 /* Disable writeback. */
1903 c->dst.type = OP_NONE;
1905 case 3: /* lidt/vmmcall */
1906 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1907 rc = kvm_fix_hypercall(ctxt->vcpu);
1910 kvm_emulate_hypercall(ctxt->vcpu);
1912 rc = read_descriptor(ctxt, ops, c->src.ptr,
1917 realmode_lidt(ctxt->vcpu, size, address);
1919 /* Disable writeback. */
1920 c->dst.type = OP_NONE;
1924 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1927 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1929 c->dst.type = OP_NONE;
1932 emulate_invlpg(ctxt->vcpu, memop);
1933 /* Disable writeback. */
1934 c->dst.type = OP_NONE;
1937 goto cannot_emulate;
1941 emulate_clts(ctxt->vcpu);
1942 c->dst.type = OP_NONE;
1944 case 0x08: /* invd */
1945 case 0x09: /* wbinvd */
1946 case 0x0d: /* GrpP (prefetch) */
1947 case 0x18: /* Grp16 (prefetch/nop) */
1948 c->dst.type = OP_NONE;
1950 case 0x20: /* mov cr, reg */
1951 if (c->modrm_mod != 3)
1952 goto cannot_emulate;
1953 c->regs[c->modrm_rm] =
1954 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1955 c->dst.type = OP_NONE; /* no writeback */
1957 case 0x21: /* mov from dr to reg */
1958 if (c->modrm_mod != 3)
1959 goto cannot_emulate;
1960 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1962 goto cannot_emulate;
1963 c->dst.type = OP_NONE; /* no writeback */
1965 case 0x22: /* mov reg, cr */
1966 if (c->modrm_mod != 3)
1967 goto cannot_emulate;
1968 realmode_set_cr(ctxt->vcpu,
1969 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1970 c->dst.type = OP_NONE;
1972 case 0x23: /* mov from reg to dr */
1973 if (c->modrm_mod != 3)
1974 goto cannot_emulate;
1975 rc = emulator_set_dr(ctxt, c->modrm_reg,
1976 c->regs[c->modrm_rm]);
1978 goto cannot_emulate;
1979 c->dst.type = OP_NONE; /* no writeback */
1983 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1984 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1985 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1987 kvm_inject_gp(ctxt->vcpu, 0);
1988 c->eip = kvm_rip_read(ctxt->vcpu);
1990 rc = X86EMUL_CONTINUE;
1991 c->dst.type = OP_NONE;
1995 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1997 kvm_inject_gp(ctxt->vcpu, 0);
1998 c->eip = kvm_rip_read(ctxt->vcpu);
2000 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
2001 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
2003 rc = X86EMUL_CONTINUE;
2004 c->dst.type = OP_NONE;
2006 case 0x40 ... 0x4f: /* cmov */
2007 c->dst.val = c->dst.orig_val = c->src.val;
2008 if (!test_cc(c->b, ctxt->eflags))
2009 c->dst.type = OP_NONE; /* no writeback */
2011 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
2014 switch (c->op_bytes) {
2016 rel = insn_fetch(s16, 2, c->eip);
2019 rel = insn_fetch(s32, 4, c->eip);
2022 rel = insn_fetch(s64, 8, c->eip);
2025 DPRINTF("jnz: Invalid op_bytes\n");
2026 goto cannot_emulate;
2028 if (test_cc(c->b, ctxt->eflags))
2030 c->dst.type = OP_NONE;
2035 c->dst.type = OP_NONE;
2036 /* only subword offset */
2037 c->src.val &= (c->dst.bytes << 3) - 1;
2038 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
2042 /* only subword offset */
2043 c->src.val &= (c->dst.bytes << 3) - 1;
2044 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
2046 case 0xae: /* clflush */
2048 case 0xb0 ... 0xb1: /* cmpxchg */
2050 * Save real source value, then compare EAX against
2053 c->src.orig_val = c->src.val;
2054 c->src.val = c->regs[VCPU_REGS_RAX];
2055 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2056 if (ctxt->eflags & EFLG_ZF) {
2057 /* Success: write back to memory. */
2058 c->dst.val = c->src.orig_val;
2060 /* Failure: write the value we saw to EAX. */
2061 c->dst.type = OP_REG;
2062 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2067 /* only subword offset */
2068 c->src.val &= (c->dst.bytes << 3) - 1;
2069 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2071 case 0xb6 ... 0xb7: /* movzx */
2072 c->dst.bytes = c->op_bytes;
2073 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2076 case 0xba: /* Grp8 */
2077 switch (c->modrm_reg & 3) {
2090 /* only subword offset */
2091 c->src.val &= (c->dst.bytes << 3) - 1;
2092 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2094 case 0xbe ... 0xbf: /* movsx */
2095 c->dst.bytes = c->op_bytes;
2096 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2099 case 0xc3: /* movnti */
2100 c->dst.bytes = c->op_bytes;
2101 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2104 case 0xc7: /* Grp9 (cmpxchg8b) */
2105 rc = emulate_grp9(ctxt, ops, memop);
2108 c->dst.type = OP_NONE;
2114 DPRINTF("Cannot emulate %02x\n", c->b);