1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcOne (7<<4) /* Implied '1' */
62 #define SrcMask (7<<4)
63 /* Generic ModRM decode. */
65 /* Destination is only written; never read. */
68 #define MemAbs (1<<10) /* Memory operand is absolute displacement */
69 #define String (1<<12) /* String instruction (rep capable) */
70 #define Stack (1<<13) /* Stack instruction (push/pop) */
71 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
72 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
73 #define GroupMask 0xff /* Group number stored in bits 0:7 */
74 /* Source 2 operand type */
75 #define Src2None (0<<29)
76 #define Src2CL (1<<29)
77 #define Src2ImmByte (2<<29)
78 #define Src2One (3<<29)
79 #define Src2Imm16 (4<<29)
80 #define Src2Mask (7<<29)
83 Group1_80, Group1_81, Group1_82, Group1_83,
84 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
87 static u32 opcode_table[256] = {
89 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
90 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
91 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
93 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
94 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
97 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
98 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
101 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
102 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
105 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
106 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
107 DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
109 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
110 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
113 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
114 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
117 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
118 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
119 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
122 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
124 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
126 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
127 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
129 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
130 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
132 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
135 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
136 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
137 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
139 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
140 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
142 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
143 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
145 Group | Group1_80, Group | Group1_81,
146 Group | Group1_82, Group | Group1_83,
147 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
148 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
150 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
151 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
152 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
153 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
155 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
157 0, 0, SrcImm | Src2Imm16, 0,
158 ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
160 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
161 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
162 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
163 ByteOp | ImplicitOps | String, ImplicitOps | String,
165 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
166 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
167 ByteOp | ImplicitOps | String, ImplicitOps | String,
169 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
170 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
171 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
172 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
174 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
175 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
176 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
177 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
179 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
180 0, ImplicitOps | Stack, 0, 0,
181 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
183 0, 0, 0, ImplicitOps | Stack, 0, 0, 0, 0,
185 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
186 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
189 0, 0, 0, 0, 0, 0, 0, 0,
192 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
193 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
195 ImplicitOps | Stack, SrcImm | ImplicitOps,
196 ImplicitOps, SrcImmByte | ImplicitOps,
197 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
198 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
201 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
203 ImplicitOps, 0, ImplicitOps, ImplicitOps,
204 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
207 static u32 twobyte_table[256] = {
209 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
210 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
212 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
214 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
215 0, 0, 0, 0, 0, 0, 0, 0,
217 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
219 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
220 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
221 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
222 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
224 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
225 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
226 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
227 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
229 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
235 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
236 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
237 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
238 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
240 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
243 DstMem | SrcReg | Src2ImmByte | ModRM,
244 DstMem | SrcReg | Src2CL | ModRM, 0, 0,
246 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
247 DstMem | SrcReg | Src2ImmByte | ModRM,
248 DstMem | SrcReg | Src2CL | ModRM,
251 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
252 DstMem | SrcReg | ModRM | BitOp,
253 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
254 DstReg | SrcMem16 | ModRM | Mov,
256 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
257 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
258 DstReg | SrcMem16 | ModRM | Mov,
260 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
261 0, 0, 0, 0, 0, 0, 0, 0,
263 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
265 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
267 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
270 static u32 group_table[] = {
272 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
273 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
274 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
275 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
277 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
278 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
279 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
280 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
282 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
283 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
284 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
285 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
287 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
288 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
289 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
290 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
292 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
294 ByteOp | SrcImm | DstMem | ModRM, 0,
295 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
298 DstMem | SrcImm | ModRM, 0,
299 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
302 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
305 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
306 SrcMem | ModRM | Stack, 0,
307 SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
309 0, 0, ModRM | SrcMem, ModRM | SrcMem,
310 SrcNone | ModRM | DstMem | Mov, 0,
311 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
314 static u32 group2_table[] = {
316 SrcNone | ModRM, 0, 0, SrcNone | ModRM,
317 SrcNone | ModRM | DstMem | Mov, 0,
318 SrcMem16 | ModRM | Mov, 0,
321 /* EFLAGS bit definitions. */
322 #define EFLG_OF (1<<11)
323 #define EFLG_DF (1<<10)
324 #define EFLG_SF (1<<7)
325 #define EFLG_ZF (1<<6)
326 #define EFLG_AF (1<<4)
327 #define EFLG_PF (1<<2)
328 #define EFLG_CF (1<<0)
331 * Instruction emulation:
332 * Most instructions are emulated directly via a fragment of inline assembly
333 * code. This allows us to save/restore EFLAGS and thus very easily pick up
334 * any modified flags.
337 #if defined(CONFIG_X86_64)
338 #define _LO32 "k" /* force 32-bit operand */
339 #define _STK "%%rsp" /* stack pointer */
340 #elif defined(__i386__)
341 #define _LO32 "" /* force 32-bit operand */
342 #define _STK "%%esp" /* stack pointer */
346 * These EFLAGS bits are restored from saved value during emulation, and
347 * any changes are written back to the saved value after emulation.
349 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
351 /* Before executing instruction: restore necessary bits in EFLAGS. */
352 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
353 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
354 "movl %"_sav",%"_LO32 _tmp"; " \
357 "movl %"_msk",%"_LO32 _tmp"; " \
358 "andl %"_LO32 _tmp",("_STK"); " \
360 "notl %"_LO32 _tmp"; " \
361 "andl %"_LO32 _tmp",("_STK"); " \
362 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
364 "orl %"_LO32 _tmp",("_STK"); " \
368 /* After executing instruction: write-back necessary bits in EFLAGS. */
369 #define _POST_EFLAGS(_sav, _msk, _tmp) \
370 /* _sav |= EFLAGS & _msk; */ \
373 "andl %"_msk",%"_LO32 _tmp"; " \
374 "orl %"_LO32 _tmp",%"_sav"; "
382 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
384 __asm__ __volatile__ ( \
385 _PRE_EFLAGS("0", "4", "2") \
386 _op _suffix " %"_x"3,%1; " \
387 _POST_EFLAGS("0", "4", "2") \
388 : "=m" (_eflags), "=m" ((_dst).val), \
390 : _y ((_src).val), "i" (EFLAGS_MASK)); \
394 /* Raw emulation: instruction has two explicit operands. */
395 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
397 unsigned long _tmp; \
399 switch ((_dst).bytes) { \
401 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
404 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
407 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
412 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
414 unsigned long _tmp; \
415 switch ((_dst).bytes) { \
417 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
420 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
421 _wx, _wy, _lx, _ly, _qx, _qy); \
426 /* Source operand is byte-sized and may be restricted to just %cl. */
427 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
428 __emulate_2op(_op, _src, _dst, _eflags, \
429 "b", "c", "b", "c", "b", "c", "b", "c")
431 /* Source operand is byte, word, long or quad sized. */
432 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
433 __emulate_2op(_op, _src, _dst, _eflags, \
434 "b", "q", "w", "r", _LO32, "r", "", "r")
436 /* Source operand is word, long or quad sized. */
437 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
438 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
439 "w", "r", _LO32, "r", "", "r")
441 /* Instruction has three operands and one operand is stored in ECX register */
442 #define __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, _suffix, _type) \
444 unsigned long _tmp; \
445 _type _clv = (_cl).val; \
446 _type _srcv = (_src).val; \
447 _type _dstv = (_dst).val; \
449 __asm__ __volatile__ ( \
450 _PRE_EFLAGS("0", "5", "2") \
451 _op _suffix " %4,%1 \n" \
452 _POST_EFLAGS("0", "5", "2") \
453 : "=m" (_eflags), "+r" (_dstv), "=&r" (_tmp) \
454 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
457 (_cl).val = (unsigned long) _clv; \
458 (_src).val = (unsigned long) _srcv; \
459 (_dst).val = (unsigned long) _dstv; \
462 #define emulate_2op_cl(_op, _cl, _src, _dst, _eflags) \
464 switch ((_dst).bytes) { \
466 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
467 "w", unsigned short); \
470 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
471 "l", unsigned int); \
474 ON64(__emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
475 "q", unsigned long)); \
480 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
482 unsigned long _tmp; \
484 __asm__ __volatile__ ( \
485 _PRE_EFLAGS("0", "3", "2") \
486 _op _suffix " %1; " \
487 _POST_EFLAGS("0", "3", "2") \
488 : "=m" (_eflags), "+m" ((_dst).val), \
490 : "i" (EFLAGS_MASK)); \
493 /* Instruction has only one explicit operand (no source operand). */
494 #define emulate_1op(_op, _dst, _eflags) \
496 switch ((_dst).bytes) { \
497 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
498 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
499 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
500 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
504 /* Fetch next part of the instruction being emulated. */
505 #define insn_fetch(_type, _size, _eip) \
506 ({ unsigned long _x; \
507 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
514 static inline unsigned long ad_mask(struct decode_cache *c)
516 return (1UL << (c->ad_bytes << 3)) - 1;
519 /* Access/update address held in a register, based on addressing mode. */
520 static inline unsigned long
521 address_mask(struct decode_cache *c, unsigned long reg)
523 if (c->ad_bytes == sizeof(unsigned long))
526 return reg & ad_mask(c);
529 static inline unsigned long
530 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
532 return base + address_mask(c, reg);
536 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
538 if (c->ad_bytes == sizeof(unsigned long))
541 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
544 static inline void jmp_rel(struct decode_cache *c, int rel)
546 register_address_increment(c, &c->eip, rel);
549 static void set_seg_override(struct decode_cache *c, int seg)
551 c->has_seg_override = true;
552 c->seg_override = seg;
555 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
557 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
560 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
563 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
564 struct decode_cache *c)
566 if (!c->has_seg_override)
569 return seg_base(ctxt, c->seg_override);
572 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
574 return seg_base(ctxt, VCPU_SREG_ES);
577 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
579 return seg_base(ctxt, VCPU_SREG_SS);
582 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
583 struct x86_emulate_ops *ops,
584 unsigned long linear, u8 *dest)
586 struct fetch_cache *fc = &ctxt->decode.fetch;
590 if (linear < fc->start || linear >= fc->end) {
591 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
592 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
596 fc->end = linear + size;
598 *dest = fc->data[linear - fc->start];
602 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
603 struct x86_emulate_ops *ops,
604 unsigned long eip, void *dest, unsigned size)
608 eip += ctxt->cs_base;
610 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
618 * Given the 'reg' portion of a ModRM byte, and a register block, return a
619 * pointer into the block that addresses the relevant register.
620 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
622 static void *decode_register(u8 modrm_reg, unsigned long *regs,
627 p = ®s[modrm_reg];
628 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
629 p = (unsigned char *)®s[modrm_reg & 3] + 1;
633 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
634 struct x86_emulate_ops *ops,
636 u16 *size, unsigned long *address, int op_bytes)
643 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
647 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
652 static int test_cc(unsigned int condition, unsigned int flags)
656 switch ((condition & 15) >> 1) {
658 rc |= (flags & EFLG_OF);
660 case 1: /* b/c/nae */
661 rc |= (flags & EFLG_CF);
664 rc |= (flags & EFLG_ZF);
667 rc |= (flags & (EFLG_CF|EFLG_ZF));
670 rc |= (flags & EFLG_SF);
673 rc |= (flags & EFLG_PF);
676 rc |= (flags & EFLG_ZF);
679 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
683 /* Odd condition identifiers (lsb == 1) have inverted sense. */
684 return (!!rc ^ (condition & 1));
687 static void decode_register_operand(struct operand *op,
688 struct decode_cache *c,
691 unsigned reg = c->modrm_reg;
692 int highbyte_regs = c->rex_prefix == 0;
695 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
697 if ((c->d & ByteOp) && !inhibit_bytereg) {
698 op->ptr = decode_register(reg, c->regs, highbyte_regs);
699 op->val = *(u8 *)op->ptr;
702 op->ptr = decode_register(reg, c->regs, 0);
703 op->bytes = c->op_bytes;
706 op->val = *(u16 *)op->ptr;
709 op->val = *(u32 *)op->ptr;
712 op->val = *(u64 *) op->ptr;
716 op->orig_val = op->val;
719 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
720 struct x86_emulate_ops *ops)
722 struct decode_cache *c = &ctxt->decode;
724 int index_reg = 0, base_reg = 0, scale;
728 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
729 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
730 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
733 c->modrm = insn_fetch(u8, 1, c->eip);
734 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
735 c->modrm_reg |= (c->modrm & 0x38) >> 3;
736 c->modrm_rm |= (c->modrm & 0x07);
740 if (c->modrm_mod == 3) {
741 c->modrm_ptr = decode_register(c->modrm_rm,
742 c->regs, c->d & ByteOp);
743 c->modrm_val = *(unsigned long *)c->modrm_ptr;
747 if (c->ad_bytes == 2) {
748 unsigned bx = c->regs[VCPU_REGS_RBX];
749 unsigned bp = c->regs[VCPU_REGS_RBP];
750 unsigned si = c->regs[VCPU_REGS_RSI];
751 unsigned di = c->regs[VCPU_REGS_RDI];
753 /* 16-bit ModR/M decode. */
754 switch (c->modrm_mod) {
756 if (c->modrm_rm == 6)
757 c->modrm_ea += insn_fetch(u16, 2, c->eip);
760 c->modrm_ea += insn_fetch(s8, 1, c->eip);
763 c->modrm_ea += insn_fetch(u16, 2, c->eip);
766 switch (c->modrm_rm) {
768 c->modrm_ea += bx + si;
771 c->modrm_ea += bx + di;
774 c->modrm_ea += bp + si;
777 c->modrm_ea += bp + di;
786 if (c->modrm_mod != 0)
793 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
794 (c->modrm_rm == 6 && c->modrm_mod != 0))
795 if (!c->has_seg_override)
796 set_seg_override(c, VCPU_SREG_SS);
797 c->modrm_ea = (u16)c->modrm_ea;
799 /* 32/64-bit ModR/M decode. */
800 if ((c->modrm_rm & 7) == 4) {
801 sib = insn_fetch(u8, 1, c->eip);
802 index_reg |= (sib >> 3) & 7;
806 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
807 c->modrm_ea += insn_fetch(s32, 4, c->eip);
809 c->modrm_ea += c->regs[base_reg];
811 c->modrm_ea += c->regs[index_reg] << scale;
812 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
813 if (ctxt->mode == X86EMUL_MODE_PROT64)
816 c->modrm_ea += c->regs[c->modrm_rm];
817 switch (c->modrm_mod) {
819 if (c->modrm_rm == 5)
820 c->modrm_ea += insn_fetch(s32, 4, c->eip);
823 c->modrm_ea += insn_fetch(s8, 1, c->eip);
826 c->modrm_ea += insn_fetch(s32, 4, c->eip);
834 static int decode_abs(struct x86_emulate_ctxt *ctxt,
835 struct x86_emulate_ops *ops)
837 struct decode_cache *c = &ctxt->decode;
840 switch (c->ad_bytes) {
842 c->modrm_ea = insn_fetch(u16, 2, c->eip);
845 c->modrm_ea = insn_fetch(u32, 4, c->eip);
848 c->modrm_ea = insn_fetch(u64, 8, c->eip);
856 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
858 struct decode_cache *c = &ctxt->decode;
860 int mode = ctxt->mode;
861 int def_op_bytes, def_ad_bytes, group;
863 /* Shadow copy of register state. Committed on successful emulation. */
865 memset(c, 0, sizeof(struct decode_cache));
866 c->eip = kvm_rip_read(ctxt->vcpu);
867 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
868 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
871 case X86EMUL_MODE_REAL:
872 case X86EMUL_MODE_PROT16:
873 def_op_bytes = def_ad_bytes = 2;
875 case X86EMUL_MODE_PROT32:
876 def_op_bytes = def_ad_bytes = 4;
879 case X86EMUL_MODE_PROT64:
888 c->op_bytes = def_op_bytes;
889 c->ad_bytes = def_ad_bytes;
891 /* Legacy prefixes. */
893 switch (c->b = insn_fetch(u8, 1, c->eip)) {
894 case 0x66: /* operand-size override */
895 /* switch between 2/4 bytes */
896 c->op_bytes = def_op_bytes ^ 6;
898 case 0x67: /* address-size override */
899 if (mode == X86EMUL_MODE_PROT64)
900 /* switch between 4/8 bytes */
901 c->ad_bytes = def_ad_bytes ^ 12;
903 /* switch between 2/4 bytes */
904 c->ad_bytes = def_ad_bytes ^ 6;
906 case 0x26: /* ES override */
907 case 0x2e: /* CS override */
908 case 0x36: /* SS override */
909 case 0x3e: /* DS override */
910 set_seg_override(c, (c->b >> 3) & 3);
912 case 0x64: /* FS override */
913 case 0x65: /* GS override */
914 set_seg_override(c, c->b & 7);
916 case 0x40 ... 0x4f: /* REX */
917 if (mode != X86EMUL_MODE_PROT64)
919 c->rex_prefix = c->b;
921 case 0xf0: /* LOCK */
924 case 0xf2: /* REPNE/REPNZ */
925 c->rep_prefix = REPNE_PREFIX;
927 case 0xf3: /* REP/REPE/REPZ */
928 c->rep_prefix = REPE_PREFIX;
934 /* Any legacy prefix after a REX prefix nullifies its effect. */
943 if (c->rex_prefix & 8)
944 c->op_bytes = 8; /* REX.W */
946 /* Opcode byte(s). */
947 c->d = opcode_table[c->b];
949 /* Two-byte opcode? */
952 c->b = insn_fetch(u8, 1, c->eip);
953 c->d = twobyte_table[c->b];
958 group = c->d & GroupMask;
959 c->modrm = insn_fetch(u8, 1, c->eip);
962 group = (group << 3) + ((c->modrm >> 3) & 7);
963 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
964 c->d = group2_table[group];
966 c->d = group_table[group];
971 DPRINTF("Cannot emulate %02x\n", c->b);
975 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
978 /* ModRM and SIB bytes. */
980 rc = decode_modrm(ctxt, ops);
981 else if (c->d & MemAbs)
982 rc = decode_abs(ctxt, ops);
986 if (!c->has_seg_override)
987 set_seg_override(c, VCPU_SREG_DS);
989 if (!(!c->twobyte && c->b == 0x8d))
990 c->modrm_ea += seg_override_base(ctxt, c);
992 if (c->ad_bytes != 8)
993 c->modrm_ea = (u32)c->modrm_ea;
995 * Decode and fetch the source operand: register, memory
998 switch (c->d & SrcMask) {
1002 decode_register_operand(&c->src, c, 0);
1011 c->src.bytes = (c->d & ByteOp) ? 1 :
1013 /* Don't fetch the address for invlpg: it could be unmapped. */
1014 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1018 * For instructions with a ModR/M byte, switch to register
1019 * access if Mod = 3.
1021 if ((c->d & ModRM) && c->modrm_mod == 3) {
1022 c->src.type = OP_REG;
1023 c->src.val = c->modrm_val;
1024 c->src.ptr = c->modrm_ptr;
1027 c->src.type = OP_MEM;
1030 c->src.type = OP_IMM;
1031 c->src.ptr = (unsigned long *)c->eip;
1032 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1033 if (c->src.bytes == 8)
1035 /* NB. Immediates are sign-extended as necessary. */
1036 switch (c->src.bytes) {
1038 c->src.val = insn_fetch(s8, 1, c->eip);
1041 c->src.val = insn_fetch(s16, 2, c->eip);
1044 c->src.val = insn_fetch(s32, 4, c->eip);
1049 c->src.type = OP_IMM;
1050 c->src.ptr = (unsigned long *)c->eip;
1052 c->src.val = insn_fetch(s8, 1, c->eip);
1061 * Decode and fetch the second source operand: register, memory
1064 switch (c->d & Src2Mask) {
1069 c->src2.val = c->regs[VCPU_REGS_RCX] & 0x8;
1072 c->src2.type = OP_IMM;
1073 c->src2.ptr = (unsigned long *)c->eip;
1075 c->src2.val = insn_fetch(u8, 1, c->eip);
1078 c->src2.type = OP_IMM;
1079 c->src2.ptr = (unsigned long *)c->eip;
1081 c->src2.val = insn_fetch(u16, 2, c->eip);
1089 /* Decode and fetch the destination operand: register or memory. */
1090 switch (c->d & DstMask) {
1092 /* Special instructions do their own operand decoding. */
1095 decode_register_operand(&c->dst, c,
1096 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1099 if ((c->d & ModRM) && c->modrm_mod == 3) {
1100 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1101 c->dst.type = OP_REG;
1102 c->dst.val = c->dst.orig_val = c->modrm_val;
1103 c->dst.ptr = c->modrm_ptr;
1106 c->dst.type = OP_MEM;
1109 c->dst.type = OP_REG;
1110 c->dst.bytes = c->op_bytes;
1111 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1112 switch (c->op_bytes) {
1114 c->dst.val = *(u8 *)c->dst.ptr;
1117 c->dst.val = *(u16 *)c->dst.ptr;
1120 c->dst.val = *(u32 *)c->dst.ptr;
1123 c->dst.orig_val = c->dst.val;
1127 if (c->rip_relative)
1128 c->modrm_ea += c->eip;
1131 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1134 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1136 struct decode_cache *c = &ctxt->decode;
1138 c->dst.type = OP_MEM;
1139 c->dst.bytes = c->op_bytes;
1140 c->dst.val = c->src.val;
1141 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1142 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1143 c->regs[VCPU_REGS_RSP]);
1146 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1147 struct x86_emulate_ops *ops,
1148 void *dest, int len)
1150 struct decode_cache *c = &ctxt->decode;
1153 rc = ops->read_emulated(register_address(c, ss_base(ctxt),
1154 c->regs[VCPU_REGS_RSP]),
1155 dest, len, ctxt->vcpu);
1159 register_address_increment(c, &c->regs[VCPU_REGS_RSP], len);
1163 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1164 struct x86_emulate_ops *ops)
1166 struct decode_cache *c = &ctxt->decode;
1169 rc = emulate_pop(ctxt, ops, &c->dst.val, c->dst.bytes);
1175 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1177 struct decode_cache *c = &ctxt->decode;
1178 switch (c->modrm_reg) {
1180 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1183 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1186 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1189 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1191 case 4: /* sal/shl */
1192 case 6: /* sal/shl */
1193 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1196 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1199 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1204 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1205 struct x86_emulate_ops *ops)
1207 struct decode_cache *c = &ctxt->decode;
1210 switch (c->modrm_reg) {
1211 case 0 ... 1: /* test */
1212 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1215 c->dst.val = ~c->dst.val;
1218 emulate_1op("neg", c->dst, ctxt->eflags);
1221 DPRINTF("Cannot emulate %02x\n", c->b);
1222 rc = X86EMUL_UNHANDLEABLE;
1228 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1229 struct x86_emulate_ops *ops)
1231 struct decode_cache *c = &ctxt->decode;
1233 switch (c->modrm_reg) {
1235 emulate_1op("inc", c->dst, ctxt->eflags);
1238 emulate_1op("dec", c->dst, ctxt->eflags);
1240 case 2: /* call near abs */ {
1243 c->eip = c->src.val;
1244 c->src.val = old_eip;
1248 case 4: /* jmp abs */
1249 c->eip = c->src.val;
1258 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1259 struct x86_emulate_ops *ops,
1260 unsigned long memop)
1262 struct decode_cache *c = &ctxt->decode;
1266 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1270 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1271 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1273 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1274 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1275 ctxt->eflags &= ~EFLG_ZF;
1278 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1279 (u32) c->regs[VCPU_REGS_RBX];
1281 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1284 ctxt->eflags |= EFLG_ZF;
1289 static int emulate_ret_far(struct x86_emulate_ctxt *ctxt,
1290 struct x86_emulate_ops *ops)
1292 struct decode_cache *c = &ctxt->decode;
1296 rc = emulate_pop(ctxt, ops, &c->eip, c->op_bytes);
1299 if (c->op_bytes == 4)
1300 c->eip = (u32)c->eip;
1301 rc = emulate_pop(ctxt, ops, &cs, c->op_bytes);
1304 rc = kvm_load_segment_descriptor(ctxt->vcpu, (u16)cs, 1, VCPU_SREG_CS);
1308 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1309 struct x86_emulate_ops *ops)
1312 struct decode_cache *c = &ctxt->decode;
1314 switch (c->dst.type) {
1316 /* The 4-byte case *is* correct:
1317 * in 64-bit mode we zero-extend.
1319 switch (c->dst.bytes) {
1321 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1324 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1327 *c->dst.ptr = (u32)c->dst.val;
1328 break; /* 64b: zero-ext */
1330 *c->dst.ptr = c->dst.val;
1336 rc = ops->cmpxchg_emulated(
1337 (unsigned long)c->dst.ptr,
1343 rc = ops->write_emulated(
1344 (unsigned long)c->dst.ptr,
1361 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1363 unsigned long memop = 0;
1365 unsigned long saved_eip = 0;
1366 struct decode_cache *c = &ctxt->decode;
1371 /* Shadow copy of register state. Committed on successful emulation.
1372 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1376 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1379 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1380 memop = c->modrm_ea;
1382 if (c->rep_prefix && (c->d & String)) {
1383 /* All REP prefixes have the same first termination condition */
1384 if (c->regs[VCPU_REGS_RCX] == 0) {
1385 kvm_rip_write(ctxt->vcpu, c->eip);
1388 /* The second termination condition only applies for REPE
1389 * and REPNE. Test if the repeat string operation prefix is
1390 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1391 * corresponding termination condition according to:
1392 * - if REPE/REPZ and ZF = 0 then done
1393 * - if REPNE/REPNZ and ZF = 1 then done
1395 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1396 (c->b == 0xae) || (c->b == 0xaf)) {
1397 if ((c->rep_prefix == REPE_PREFIX) &&
1398 ((ctxt->eflags & EFLG_ZF) == 0)) {
1399 kvm_rip_write(ctxt->vcpu, c->eip);
1402 if ((c->rep_prefix == REPNE_PREFIX) &&
1403 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1404 kvm_rip_write(ctxt->vcpu, c->eip);
1408 c->regs[VCPU_REGS_RCX]--;
1409 c->eip = kvm_rip_read(ctxt->vcpu);
1412 if (c->src.type == OP_MEM) {
1413 c->src.ptr = (unsigned long *)memop;
1415 rc = ops->read_emulated((unsigned long)c->src.ptr,
1421 c->src.orig_val = c->src.val;
1424 if ((c->d & DstMask) == ImplicitOps)
1428 if (c->dst.type == OP_MEM) {
1429 c->dst.ptr = (unsigned long *)memop;
1430 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1433 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1435 c->dst.ptr = (void *)c->dst.ptr +
1436 (c->src.val & mask) / 8;
1438 if (!(c->d & Mov) &&
1439 /* optimisation - avoid slow emulated read */
1440 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1442 c->dst.bytes, ctxt->vcpu)) != 0))
1445 c->dst.orig_val = c->dst.val;
1455 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1459 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1463 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1467 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1471 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1475 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1479 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1483 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1485 case 0x40 ... 0x47: /* inc r16/r32 */
1486 emulate_1op("inc", c->dst, ctxt->eflags);
1488 case 0x48 ... 0x4f: /* dec r16/r32 */
1489 emulate_1op("dec", c->dst, ctxt->eflags);
1491 case 0x50 ... 0x57: /* push reg */
1494 case 0x58 ... 0x5f: /* pop reg */
1496 rc = emulate_pop(ctxt, ops, &c->dst.val, c->op_bytes);
1500 case 0x63: /* movsxd */
1501 if (ctxt->mode != X86EMUL_MODE_PROT64)
1502 goto cannot_emulate;
1503 c->dst.val = (s32) c->src.val;
1505 case 0x68: /* push imm */
1506 case 0x6a: /* push imm8 */
1509 case 0x6c: /* insb */
1510 case 0x6d: /* insw/insd */
1511 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1513 (c->d & ByteOp) ? 1 : c->op_bytes,
1515 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1516 (ctxt->eflags & EFLG_DF),
1517 register_address(c, es_base(ctxt),
1518 c->regs[VCPU_REGS_RDI]),
1520 c->regs[VCPU_REGS_RDX]) == 0) {
1525 case 0x6e: /* outsb */
1526 case 0x6f: /* outsw/outsd */
1527 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1529 (c->d & ByteOp) ? 1 : c->op_bytes,
1531 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1532 (ctxt->eflags & EFLG_DF),
1534 seg_override_base(ctxt, c),
1535 c->regs[VCPU_REGS_RSI]),
1537 c->regs[VCPU_REGS_RDX]) == 0) {
1542 case 0x70 ... 0x7f: /* jcc (short) */ {
1543 int rel = insn_fetch(s8, 1, c->eip);
1545 if (test_cc(c->b, ctxt->eflags))
1549 case 0x80 ... 0x83: /* Grp1 */
1550 switch (c->modrm_reg) {
1570 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1572 case 0x86 ... 0x87: /* xchg */
1574 /* Write back the register source. */
1575 switch (c->dst.bytes) {
1577 *(u8 *) c->src.ptr = (u8) c->dst.val;
1580 *(u16 *) c->src.ptr = (u16) c->dst.val;
1583 *c->src.ptr = (u32) c->dst.val;
1584 break; /* 64b reg: zero-extend */
1586 *c->src.ptr = c->dst.val;
1590 * Write back the memory destination with implicit LOCK
1593 c->dst.val = c->src.val;
1596 case 0x88 ... 0x8b: /* mov */
1598 case 0x8c: { /* mov r/m, sreg */
1599 struct kvm_segment segreg;
1601 if (c->modrm_reg <= 5)
1602 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1604 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1606 goto cannot_emulate;
1608 c->dst.val = segreg.selector;
1611 case 0x8d: /* lea r16/r32, m */
1612 c->dst.val = c->modrm_ea;
1614 case 0x8e: { /* mov seg, r/m16 */
1620 if (c->modrm_reg <= 5) {
1621 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1622 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1623 type_bits, c->modrm_reg);
1625 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1627 goto cannot_emulate;
1631 goto cannot_emulate;
1633 c->dst.type = OP_NONE; /* Disable writeback. */
1636 case 0x8f: /* pop (sole member of Grp1a) */
1637 rc = emulate_grp1a(ctxt, ops);
1641 case 0x90: /* nop / xchg r8,rax */
1642 if (!(c->rex_prefix & 1)) { /* nop */
1643 c->dst.type = OP_NONE;
1646 case 0x91 ... 0x97: /* xchg reg,rax */
1647 c->src.type = c->dst.type = OP_REG;
1648 c->src.bytes = c->dst.bytes = c->op_bytes;
1649 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1650 c->src.val = *(c->src.ptr);
1652 case 0x9c: /* pushf */
1653 c->src.val = (unsigned long) ctxt->eflags;
1656 case 0x9d: /* popf */
1657 c->dst.type = OP_REG;
1658 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1659 c->dst.bytes = c->op_bytes;
1660 goto pop_instruction;
1661 case 0xa0 ... 0xa1: /* mov */
1662 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1663 c->dst.val = c->src.val;
1665 case 0xa2 ... 0xa3: /* mov */
1666 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1668 case 0xa4 ... 0xa5: /* movs */
1669 c->dst.type = OP_MEM;
1670 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1671 c->dst.ptr = (unsigned long *)register_address(c,
1673 c->regs[VCPU_REGS_RDI]);
1674 if ((rc = ops->read_emulated(register_address(c,
1675 seg_override_base(ctxt, c),
1676 c->regs[VCPU_REGS_RSI]),
1678 c->dst.bytes, ctxt->vcpu)) != 0)
1680 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1681 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1683 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1684 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1687 case 0xa6 ... 0xa7: /* cmps */
1688 c->src.type = OP_NONE; /* Disable writeback. */
1689 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1690 c->src.ptr = (unsigned long *)register_address(c,
1691 seg_override_base(ctxt, c),
1692 c->regs[VCPU_REGS_RSI]);
1693 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1699 c->dst.type = OP_NONE; /* Disable writeback. */
1700 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1701 c->dst.ptr = (unsigned long *)register_address(c,
1703 c->regs[VCPU_REGS_RDI]);
1704 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1710 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1712 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1714 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1715 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1717 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1718 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1722 case 0xaa ... 0xab: /* stos */
1723 c->dst.type = OP_MEM;
1724 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1725 c->dst.ptr = (unsigned long *)register_address(c,
1727 c->regs[VCPU_REGS_RDI]);
1728 c->dst.val = c->regs[VCPU_REGS_RAX];
1729 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1730 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1733 case 0xac ... 0xad: /* lods */
1734 c->dst.type = OP_REG;
1735 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1736 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1737 if ((rc = ops->read_emulated(register_address(c,
1738 seg_override_base(ctxt, c),
1739 c->regs[VCPU_REGS_RSI]),
1744 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1745 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1748 case 0xae ... 0xaf: /* scas */
1749 DPRINTF("Urk! I don't handle SCAS.\n");
1750 goto cannot_emulate;
1751 case 0xb0 ... 0xbf: /* mov r, imm */
1756 case 0xc3: /* ret */
1757 c->dst.type = OP_REG;
1758 c->dst.ptr = &c->eip;
1759 c->dst.bytes = c->op_bytes;
1760 goto pop_instruction;
1761 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1763 c->dst.val = c->src.val;
1765 case 0xcb: /* ret far */
1766 rc = emulate_ret_far(ctxt, ops);
1770 case 0xd0 ... 0xd1: /* Grp2 */
1774 case 0xd2 ... 0xd3: /* Grp2 */
1775 c->src.val = c->regs[VCPU_REGS_RCX];
1778 case 0xe4: /* inb */
1780 port = insn_fetch(u8, 1, c->eip);
1783 case 0xe6: /* outb */
1784 case 0xe7: /* out */
1785 port = insn_fetch(u8, 1, c->eip);
1788 case 0xe8: /* call (near) */ {
1790 switch (c->op_bytes) {
1792 rel = insn_fetch(s16, 2, c->eip);
1795 rel = insn_fetch(s32, 4, c->eip);
1798 DPRINTF("Call: Invalid op_bytes\n");
1799 goto cannot_emulate;
1801 c->src.val = (unsigned long) c->eip;
1806 case 0xe9: /* jmp rel */
1808 case 0xea: /* jmp far */ {
1812 switch (c->op_bytes) {
1814 eip = insn_fetch(u16, 2, c->eip);
1817 eip = insn_fetch(u32, 4, c->eip);
1820 DPRINTF("jmp far: Invalid op_bytes\n");
1821 goto cannot_emulate;
1823 sel = insn_fetch(u16, 2, c->eip);
1824 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1825 DPRINTF("jmp far: Failed to load CS descriptor\n");
1826 goto cannot_emulate;
1833 jmp: /* jmp rel short */
1834 jmp_rel(c, c->src.val);
1835 c->dst.type = OP_NONE; /* Disable writeback. */
1837 case 0xec: /* in al,dx */
1838 case 0xed: /* in (e/r)ax,dx */
1839 port = c->regs[VCPU_REGS_RDX];
1842 case 0xee: /* out al,dx */
1843 case 0xef: /* out (e/r)ax,dx */
1844 port = c->regs[VCPU_REGS_RDX];
1846 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1847 (c->d & ByteOp) ? 1 : c->op_bytes,
1850 goto cannot_emulate;
1853 case 0xf4: /* hlt */
1854 ctxt->vcpu->arch.halt_request = 1;
1856 case 0xf5: /* cmc */
1857 /* complement carry flag from eflags reg */
1858 ctxt->eflags ^= EFLG_CF;
1859 c->dst.type = OP_NONE; /* Disable writeback. */
1861 case 0xf6 ... 0xf7: /* Grp3 */
1862 rc = emulate_grp3(ctxt, ops);
1866 case 0xf8: /* clc */
1867 ctxt->eflags &= ~EFLG_CF;
1868 c->dst.type = OP_NONE; /* Disable writeback. */
1870 case 0xfa: /* cli */
1871 ctxt->eflags &= ~X86_EFLAGS_IF;
1872 c->dst.type = OP_NONE; /* Disable writeback. */
1874 case 0xfb: /* sti */
1875 ctxt->eflags |= X86_EFLAGS_IF;
1876 c->dst.type = OP_NONE; /* Disable writeback. */
1878 case 0xfc: /* cld */
1879 ctxt->eflags &= ~EFLG_DF;
1880 c->dst.type = OP_NONE; /* Disable writeback. */
1882 case 0xfd: /* std */
1883 ctxt->eflags |= EFLG_DF;
1884 c->dst.type = OP_NONE; /* Disable writeback. */
1886 case 0xfe ... 0xff: /* Grp4/Grp5 */
1887 rc = emulate_grp45(ctxt, ops);
1894 rc = writeback(ctxt, ops);
1898 /* Commit shadow register state. */
1899 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1900 kvm_rip_write(ctxt->vcpu, c->eip);
1903 if (rc == X86EMUL_UNHANDLEABLE) {
1911 case 0x01: /* lgdt, lidt, lmsw */
1912 switch (c->modrm_reg) {
1914 unsigned long address;
1916 case 0: /* vmcall */
1917 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1918 goto cannot_emulate;
1920 rc = kvm_fix_hypercall(ctxt->vcpu);
1924 /* Let the processor re-execute the fixed hypercall */
1925 c->eip = kvm_rip_read(ctxt->vcpu);
1926 /* Disable writeback. */
1927 c->dst.type = OP_NONE;
1930 rc = read_descriptor(ctxt, ops, c->src.ptr,
1931 &size, &address, c->op_bytes);
1934 realmode_lgdt(ctxt->vcpu, size, address);
1935 /* Disable writeback. */
1936 c->dst.type = OP_NONE;
1938 case 3: /* lidt/vmmcall */
1939 if (c->modrm_mod == 3) {
1940 switch (c->modrm_rm) {
1942 rc = kvm_fix_hypercall(ctxt->vcpu);
1947 goto cannot_emulate;
1950 rc = read_descriptor(ctxt, ops, c->src.ptr,
1955 realmode_lidt(ctxt->vcpu, size, address);
1957 /* Disable writeback. */
1958 c->dst.type = OP_NONE;
1962 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1965 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1967 c->dst.type = OP_NONE;
1970 emulate_invlpg(ctxt->vcpu, memop);
1971 /* Disable writeback. */
1972 c->dst.type = OP_NONE;
1975 goto cannot_emulate;
1979 emulate_clts(ctxt->vcpu);
1980 c->dst.type = OP_NONE;
1982 case 0x08: /* invd */
1983 case 0x09: /* wbinvd */
1984 case 0x0d: /* GrpP (prefetch) */
1985 case 0x18: /* Grp16 (prefetch/nop) */
1986 c->dst.type = OP_NONE;
1988 case 0x20: /* mov cr, reg */
1989 if (c->modrm_mod != 3)
1990 goto cannot_emulate;
1991 c->regs[c->modrm_rm] =
1992 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1993 c->dst.type = OP_NONE; /* no writeback */
1995 case 0x21: /* mov from dr to reg */
1996 if (c->modrm_mod != 3)
1997 goto cannot_emulate;
1998 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
2000 goto cannot_emulate;
2001 c->dst.type = OP_NONE; /* no writeback */
2003 case 0x22: /* mov reg, cr */
2004 if (c->modrm_mod != 3)
2005 goto cannot_emulate;
2006 realmode_set_cr(ctxt->vcpu,
2007 c->modrm_reg, c->modrm_val, &ctxt->eflags);
2008 c->dst.type = OP_NONE;
2010 case 0x23: /* mov from reg to dr */
2011 if (c->modrm_mod != 3)
2012 goto cannot_emulate;
2013 rc = emulator_set_dr(ctxt, c->modrm_reg,
2014 c->regs[c->modrm_rm]);
2016 goto cannot_emulate;
2017 c->dst.type = OP_NONE; /* no writeback */
2021 msr_data = (u32)c->regs[VCPU_REGS_RAX]
2022 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
2023 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
2025 kvm_inject_gp(ctxt->vcpu, 0);
2026 c->eip = kvm_rip_read(ctxt->vcpu);
2028 rc = X86EMUL_CONTINUE;
2029 c->dst.type = OP_NONE;
2033 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
2035 kvm_inject_gp(ctxt->vcpu, 0);
2036 c->eip = kvm_rip_read(ctxt->vcpu);
2038 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
2039 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
2041 rc = X86EMUL_CONTINUE;
2042 c->dst.type = OP_NONE;
2044 case 0x40 ... 0x4f: /* cmov */
2045 c->dst.val = c->dst.orig_val = c->src.val;
2046 if (!test_cc(c->b, ctxt->eflags))
2047 c->dst.type = OP_NONE; /* no writeback */
2049 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
2052 switch (c->op_bytes) {
2054 rel = insn_fetch(s16, 2, c->eip);
2057 rel = insn_fetch(s32, 4, c->eip);
2060 rel = insn_fetch(s64, 8, c->eip);
2063 DPRINTF("jnz: Invalid op_bytes\n");
2064 goto cannot_emulate;
2066 if (test_cc(c->b, ctxt->eflags))
2068 c->dst.type = OP_NONE;
2073 c->dst.type = OP_NONE;
2074 /* only subword offset */
2075 c->src.val &= (c->dst.bytes << 3) - 1;
2076 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
2078 case 0xa4: /* shld imm8, r, r/m */
2079 case 0xa5: /* shld cl, r, r/m */
2080 emulate_2op_cl("shld", c->src2, c->src, c->dst, ctxt->eflags);
2084 /* only subword offset */
2085 c->src.val &= (c->dst.bytes << 3) - 1;
2086 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
2088 case 0xac: /* shrd imm8, r, r/m */
2089 case 0xad: /* shrd cl, r, r/m */
2090 emulate_2op_cl("shrd", c->src2, c->src, c->dst, ctxt->eflags);
2092 case 0xae: /* clflush */
2094 case 0xb0 ... 0xb1: /* cmpxchg */
2096 * Save real source value, then compare EAX against
2099 c->src.orig_val = c->src.val;
2100 c->src.val = c->regs[VCPU_REGS_RAX];
2101 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2102 if (ctxt->eflags & EFLG_ZF) {
2103 /* Success: write back to memory. */
2104 c->dst.val = c->src.orig_val;
2106 /* Failure: write the value we saw to EAX. */
2107 c->dst.type = OP_REG;
2108 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2113 /* only subword offset */
2114 c->src.val &= (c->dst.bytes << 3) - 1;
2115 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2117 case 0xb6 ... 0xb7: /* movzx */
2118 c->dst.bytes = c->op_bytes;
2119 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2122 case 0xba: /* Grp8 */
2123 switch (c->modrm_reg & 3) {
2136 /* only subword offset */
2137 c->src.val &= (c->dst.bytes << 3) - 1;
2138 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2140 case 0xbe ... 0xbf: /* movsx */
2141 c->dst.bytes = c->op_bytes;
2142 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2145 case 0xc3: /* movnti */
2146 c->dst.bytes = c->op_bytes;
2147 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2150 case 0xc7: /* Grp9 (cmpxchg8b) */
2151 rc = emulate_grp9(ctxt, ops, memop);
2154 c->dst.type = OP_NONE;
2160 DPRINTF("Cannot emulate %02x\n", c->b);