1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privieged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table
[256] = {
68 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
69 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
72 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
73 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
76 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
77 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
80 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
81 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
84 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
85 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
88 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
89 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
92 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
93 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
96 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
97 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 0, 0, 0, 0, 0, 0, 0, 0,
104 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
105 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
107 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
108 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
110 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
112 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
113 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
114 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
115 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
117 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
118 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
119 0, 0, 0, DstMem
| SrcNone
| ModRM
| Mov
,
121 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
123 ByteOp
| DstReg
| SrcMem
| Mov
, DstReg
| SrcMem
| Mov
,
124 ByteOp
| DstMem
| SrcReg
| Mov
, DstMem
| SrcReg
| Mov
,
125 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
126 ByteOp
| ImplicitOps
, ImplicitOps
,
128 0, 0, ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
129 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
130 ByteOp
| ImplicitOps
, ImplicitOps
,
132 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
134 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
135 0, ImplicitOps
, 0, 0,
136 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
138 0, 0, 0, 0, 0, 0, 0, 0,
140 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
141 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
144 0, 0, 0, 0, 0, 0, 0, 0,
146 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
150 ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
153 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
156 static u16 twobyte_table
[256] = {
158 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
159 0, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
161 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
163 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
164 0, 0, 0, 0, 0, 0, 0, 0,
166 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
168 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
169 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
170 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
171 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
173 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
174 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
175 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
176 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
178 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
180 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
182 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
184 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
186 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
188 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
190 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
192 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
193 DstMem
| SrcReg
| ModRM
| BitOp
,
194 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
195 DstReg
| SrcMem16
| ModRM
| Mov
,
197 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
198 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
199 DstReg
| SrcMem16
| ModRM
| Mov
,
201 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0, 0,
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
205 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
207 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
211 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
212 * are interested only in invlpg and not in any of the rest.
214 * invlpg is a special instruction in that the data it references may not
217 void kvm_emulator_want_group7_invlpg(void)
219 twobyte_table
[1] &= ~SrcMem
;
221 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg
);
223 /* Type, address-of, and value of an instruction's operand. */
225 enum { OP_REG
, OP_MEM
, OP_IMM
} type
;
227 unsigned long val
, orig_val
, *ptr
;
230 /* EFLAGS bit definitions. */
231 #define EFLG_OF (1<<11)
232 #define EFLG_DF (1<<10)
233 #define EFLG_SF (1<<7)
234 #define EFLG_ZF (1<<6)
235 #define EFLG_AF (1<<4)
236 #define EFLG_PF (1<<2)
237 #define EFLG_CF (1<<0)
240 * Instruction emulation:
241 * Most instructions are emulated directly via a fragment of inline assembly
242 * code. This allows us to save/restore EFLAGS and thus very easily pick up
243 * any modified flags.
246 #if defined(CONFIG_X86_64)
247 #define _LO32 "k" /* force 32-bit operand */
248 #define _STK "%%rsp" /* stack pointer */
249 #elif defined(__i386__)
250 #define _LO32 "" /* force 32-bit operand */
251 #define _STK "%%esp" /* stack pointer */
255 * These EFLAGS bits are restored from saved value during emulation, and
256 * any changes are written back to the saved value after emulation.
258 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
260 /* Before executing instruction: restore necessary bits in EFLAGS. */
261 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
262 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
264 "movl %"_msk",%"_LO32 _tmp"; " \
265 "andl %"_LO32 _tmp",("_STK"); " \
267 "notl %"_LO32 _tmp"; " \
268 "andl %"_LO32 _tmp",("_STK"); " \
270 "orl %"_LO32 _tmp",("_STK"); " \
272 /* _sav &= ~msk; */ \
273 "movl %"_msk",%"_LO32 _tmp"; " \
274 "notl %"_LO32 _tmp"; " \
275 "andl %"_LO32 _tmp",%"_sav"; "
277 /* After executing instruction: write-back necessary bits in EFLAGS. */
278 #define _POST_EFLAGS(_sav, _msk, _tmp) \
279 /* _sav |= EFLAGS & _msk; */ \
282 "andl %"_msk",%"_LO32 _tmp"; " \
283 "orl %"_LO32 _tmp",%"_sav"; "
285 /* Raw emulation: instruction has two explicit operands. */
286 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
288 unsigned long _tmp; \
290 switch ((_dst).bytes) { \
292 __asm__ __volatile__ ( \
293 _PRE_EFLAGS("0","4","2") \
294 _op"w %"_wx"3,%1; " \
295 _POST_EFLAGS("0","4","2") \
296 : "=m" (_eflags), "=m" ((_dst).val), \
298 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
301 __asm__ __volatile__ ( \
302 _PRE_EFLAGS("0","4","2") \
303 _op"l %"_lx"3,%1; " \
304 _POST_EFLAGS("0","4","2") \
305 : "=m" (_eflags), "=m" ((_dst).val), \
307 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
310 __emulate_2op_8byte(_op, _src, _dst, \
311 _eflags, _qx, _qy); \
316 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
318 unsigned long _tmp; \
319 switch ( (_dst).bytes ) \
322 __asm__ __volatile__ ( \
323 _PRE_EFLAGS("0","4","2") \
324 _op"b %"_bx"3,%1; " \
325 _POST_EFLAGS("0","4","2") \
326 : "=m" (_eflags), "=m" ((_dst).val), \
328 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
331 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
332 _wx, _wy, _lx, _ly, _qx, _qy); \
337 /* Source operand is byte-sized and may be restricted to just %cl. */
338 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
339 __emulate_2op(_op, _src, _dst, _eflags, \
340 "b", "c", "b", "c", "b", "c", "b", "c")
342 /* Source operand is byte, word, long or quad sized. */
343 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
344 __emulate_2op(_op, _src, _dst, _eflags, \
345 "b", "q", "w", "r", _LO32, "r", "", "r")
347 /* Source operand is word, long or quad sized. */
348 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
349 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
350 "w", "r", _LO32, "r", "", "r")
352 /* Instruction has only one explicit operand (no source operand). */
353 #define emulate_1op(_op, _dst, _eflags) \
355 unsigned long _tmp; \
357 switch ( (_dst).bytes ) \
360 __asm__ __volatile__ ( \
361 _PRE_EFLAGS("0","3","2") \
363 _POST_EFLAGS("0","3","2") \
364 : "=m" (_eflags), "=m" ((_dst).val), \
366 : "i" (EFLAGS_MASK) ); \
369 __asm__ __volatile__ ( \
370 _PRE_EFLAGS("0","3","2") \
372 _POST_EFLAGS("0","3","2") \
373 : "=m" (_eflags), "=m" ((_dst).val), \
375 : "i" (EFLAGS_MASK) ); \
378 __asm__ __volatile__ ( \
379 _PRE_EFLAGS("0","3","2") \
381 _POST_EFLAGS("0","3","2") \
382 : "=m" (_eflags), "=m" ((_dst).val), \
384 : "i" (EFLAGS_MASK) ); \
387 __emulate_1op_8byte(_op, _dst, _eflags); \
392 /* Emulate an instruction with quadword operands (x86/64 only). */
393 #if defined(CONFIG_X86_64)
394 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
396 __asm__ __volatile__ ( \
397 _PRE_EFLAGS("0","4","2") \
398 _op"q %"_qx"3,%1; " \
399 _POST_EFLAGS("0","4","2") \
400 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
401 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
404 #define __emulate_1op_8byte(_op, _dst, _eflags) \
406 __asm__ __volatile__ ( \
407 _PRE_EFLAGS("0","3","2") \
409 _POST_EFLAGS("0","3","2") \
410 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
411 : "i" (EFLAGS_MASK) ); \
414 #elif defined(__i386__)
415 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
416 #define __emulate_1op_8byte(_op, _dst, _eflags)
417 #endif /* __i386__ */
419 /* Fetch next part of the instruction being emulated. */
420 #define insn_fetch(_type, _size, _eip) \
421 ({ unsigned long _x; \
422 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
430 /* Access/update address held in a register, based on addressing mode. */
431 #define register_address(base, reg) \
432 ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
433 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
435 #define register_address_increment(reg, inc) \
437 /* signed type ensures sign extension to long */ \
439 if ( ad_bytes == sizeof(unsigned long) ) \
442 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
443 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
446 void *decode_register(u8 modrm_reg
, unsigned long *regs
,
451 p
= ®s
[modrm_reg
];
452 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
453 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
457 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
458 struct x86_emulate_ops
*ops
,
460 u16
*size
, unsigned long *address
, int op_bytes
)
467 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2, ctxt
);
470 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
, ctxt
);
475 x86_emulate_memop(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
478 u8 b
, sib
, twobyte
= 0, rex_prefix
= 0;
479 u8 modrm
, modrm_mod
= 0, modrm_reg
= 0, modrm_rm
= 0;
480 unsigned long *override_base
= NULL
;
481 unsigned int op_bytes
, ad_bytes
, lock_prefix
= 0, rep_prefix
= 0, i
;
483 struct operand src
, dst
;
484 unsigned long cr2
= ctxt
->cr2
;
485 int mode
= ctxt
->mode
;
486 unsigned long modrm_ea
;
487 int use_modrm_ea
, index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
491 /* Shadow copy of register state. Committed on successful emulation. */
492 unsigned long _regs
[NR_VCPU_REGS
];
493 unsigned long _eip
= ctxt
->vcpu
->rip
, _eflags
= ctxt
->eflags
;
494 unsigned long modrm_val
= 0;
496 memcpy(_regs
, ctxt
->vcpu
->regs
, sizeof _regs
);
499 case X86EMUL_MODE_REAL
:
500 case X86EMUL_MODE_PROT16
:
501 op_bytes
= ad_bytes
= 2;
503 case X86EMUL_MODE_PROT32
:
504 op_bytes
= ad_bytes
= 4;
507 case X86EMUL_MODE_PROT64
:
516 /* Legacy prefixes. */
517 for (i
= 0; i
< 8; i
++) {
518 switch (b
= insn_fetch(u8
, 1, _eip
)) {
519 case 0x66: /* operand-size override */
520 op_bytes
^= 6; /* switch between 2/4 bytes */
522 case 0x67: /* address-size override */
523 if (mode
== X86EMUL_MODE_PROT64
)
524 ad_bytes
^= 12; /* switch between 4/8 bytes */
526 ad_bytes
^= 6; /* switch between 2/4 bytes */
528 case 0x2e: /* CS override */
529 override_base
= &ctxt
->cs_base
;
531 case 0x3e: /* DS override */
532 override_base
= &ctxt
->ds_base
;
534 case 0x26: /* ES override */
535 override_base
= &ctxt
->es_base
;
537 case 0x64: /* FS override */
538 override_base
= &ctxt
->fs_base
;
540 case 0x65: /* GS override */
541 override_base
= &ctxt
->gs_base
;
543 case 0x36: /* SS override */
544 override_base
= &ctxt
->ss_base
;
546 case 0xf0: /* LOCK */
549 case 0xf3: /* REP/REPE/REPZ */
552 case 0xf2: /* REPNE/REPNZ */
562 if ((mode
== X86EMUL_MODE_PROT64
) && ((b
& 0xf0) == 0x40)) {
565 op_bytes
= 8; /* REX.W */
566 modrm_reg
= (b
& 4) << 1; /* REX.R */
567 index_reg
= (b
& 2) << 2; /* REX.X */
568 modrm_rm
= base_reg
= (b
& 1) << 3; /* REG.B */
569 b
= insn_fetch(u8
, 1, _eip
);
572 /* Opcode byte(s). */
575 /* Two-byte opcode? */
578 b
= insn_fetch(u8
, 1, _eip
);
579 d
= twobyte_table
[b
];
587 /* ModRM and SIB bytes. */
589 modrm
= insn_fetch(u8
, 1, _eip
);
590 modrm_mod
|= (modrm
& 0xc0) >> 6;
591 modrm_reg
|= (modrm
& 0x38) >> 3;
592 modrm_rm
|= (modrm
& 0x07);
596 if (modrm_mod
== 3) {
597 modrm_val
= *(unsigned long *)
598 decode_register(modrm_rm
, _regs
, d
& ByteOp
);
603 unsigned bx
= _regs
[VCPU_REGS_RBX
];
604 unsigned bp
= _regs
[VCPU_REGS_RBP
];
605 unsigned si
= _regs
[VCPU_REGS_RSI
];
606 unsigned di
= _regs
[VCPU_REGS_RDI
];
608 /* 16-bit ModR/M decode. */
612 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
615 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
618 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
648 if (modrm_rm
== 2 || modrm_rm
== 3 ||
649 (modrm_rm
== 6 && modrm_mod
!= 0))
651 override_base
= &ctxt
->ss_base
;
652 modrm_ea
= (u16
)modrm_ea
;
654 /* 32/64-bit ModR/M decode. */
658 sib
= insn_fetch(u8
, 1, _eip
);
659 index_reg
|= (sib
>> 3) & 7;
666 modrm_ea
+= _regs
[base_reg
];
668 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
671 modrm_ea
+= _regs
[base_reg
];
677 modrm_ea
+= _regs
[index_reg
] << scale
;
683 modrm_ea
+= _regs
[modrm_rm
];
684 else if (mode
== X86EMUL_MODE_PROT64
)
688 modrm_ea
+= _regs
[modrm_rm
];
694 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
697 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
700 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
705 override_base
= &ctxt
->ds_base
;
706 if (mode
== X86EMUL_MODE_PROT64
&&
707 override_base
!= &ctxt
->fs_base
&&
708 override_base
!= &ctxt
->gs_base
)
709 override_base
= NULL
;
712 modrm_ea
+= *override_base
;
716 switch (d
& SrcMask
) {
727 modrm_ea
+= op_bytes
;
731 modrm_ea
= (u32
)modrm_ea
;
738 * Decode and fetch the source operand: register, memory
741 switch (d
& SrcMask
) {
747 src
.ptr
= decode_register(modrm_reg
, _regs
,
749 src
.val
= src
.orig_val
= *(u8
*) src
.ptr
;
752 src
.ptr
= decode_register(modrm_reg
, _regs
, 0);
753 switch ((src
.bytes
= op_bytes
)) {
755 src
.val
= src
.orig_val
= *(u16
*) src
.ptr
;
758 src
.val
= src
.orig_val
= *(u32
*) src
.ptr
;
761 src
.val
= src
.orig_val
= *(u64
*) src
.ptr
;
773 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
776 src
.ptr
= (unsigned long *)cr2
;
777 if ((rc
= ops
->read_emulated((unsigned long)src
.ptr
,
778 &src
.val
, src
.bytes
, ctxt
)) != 0)
780 src
.orig_val
= src
.val
;
784 src
.ptr
= (unsigned long *)_eip
;
785 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
788 /* NB. Immediates are sign-extended as necessary. */
791 src
.val
= insn_fetch(s8
, 1, _eip
);
794 src
.val
= insn_fetch(s16
, 2, _eip
);
797 src
.val
= insn_fetch(s32
, 4, _eip
);
803 src
.ptr
= (unsigned long *)_eip
;
805 src
.val
= insn_fetch(s8
, 1, _eip
);
809 /* Decode and fetch the destination operand: register or memory. */
810 switch (d
& DstMask
) {
812 /* Special instructions do their own operand decoding. */
817 && !(twobyte_table
&& (b
== 0xb6 || b
== 0xb7))) {
818 dst
.ptr
= decode_register(modrm_reg
, _regs
,
820 dst
.val
= *(u8
*) dst
.ptr
;
823 dst
.ptr
= decode_register(modrm_reg
, _regs
, 0);
824 switch ((dst
.bytes
= op_bytes
)) {
826 dst
.val
= *(u16
*)dst
.ptr
;
829 dst
.val
= *(u32
*)dst
.ptr
;
832 dst
.val
= *(u64
*)dst
.ptr
;
839 dst
.ptr
= (unsigned long *)cr2
;
840 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
842 unsigned long mask
= ~(dst
.bytes
* 8 - 1);
844 dst
.ptr
= (void *)dst
.ptr
+ (src
.val
& mask
) / 8;
846 if (!(d
& Mov
) && /* optimisation - avoid slow emulated read */
847 ((rc
= ops
->read_emulated((unsigned long)dst
.ptr
,
848 &dst
.val
, dst
.bytes
, ctxt
)) != 0))
852 dst
.orig_val
= dst
.val
;
860 emulate_2op_SrcV("add", src
, dst
, _eflags
);
864 emulate_2op_SrcV("or", src
, dst
, _eflags
);
868 emulate_2op_SrcV("adc", src
, dst
, _eflags
);
872 emulate_2op_SrcV("sbb", src
, dst
, _eflags
);
876 emulate_2op_SrcV("and", src
, dst
, _eflags
);
880 emulate_2op_SrcV("sub", src
, dst
, _eflags
);
884 emulate_2op_SrcV("xor", src
, dst
, _eflags
);
888 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
890 case 0x63: /* movsxd */
891 if (mode
!= X86EMUL_MODE_PROT64
)
893 dst
.val
= (s32
) src
.val
;
895 case 0x80 ... 0x83: /* Grp1 */
917 emulate_2op_SrcV("test", src
, dst
, _eflags
);
919 case 0x86 ... 0x87: /* xchg */
920 /* Write back the register source. */
923 *(u8
*) src
.ptr
= (u8
) dst
.val
;
926 *(u16
*) src
.ptr
= (u16
) dst
.val
;
929 *src
.ptr
= (u32
) dst
.val
;
930 break; /* 64b reg: zero-extend */
936 * Write back the memory destination with implicit LOCK
942 case 0xa0 ... 0xa1: /* mov */
943 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
945 _eip
+= ad_bytes
; /* skip src displacement */
947 case 0xa2 ... 0xa3: /* mov */
948 dst
.val
= (unsigned long)_regs
[VCPU_REGS_RAX
];
949 _eip
+= ad_bytes
; /* skip dst displacement */
951 case 0x88 ... 0x8b: /* mov */
952 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
955 case 0x8f: /* pop (sole member of Grp1a) */
956 /* 64-bit mode: POP always pops a 64-bit operand. */
957 if (mode
== X86EMUL_MODE_PROT64
)
959 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
960 _regs
[VCPU_REGS_RSP
]),
961 &dst
.val
, dst
.bytes
, ctxt
)) != 0)
963 register_address_increment(_regs
[VCPU_REGS_RSP
], dst
.bytes
);
969 emulate_2op_SrcB("rol", src
, dst
, _eflags
);
972 emulate_2op_SrcB("ror", src
, dst
, _eflags
);
975 emulate_2op_SrcB("rcl", src
, dst
, _eflags
);
978 emulate_2op_SrcB("rcr", src
, dst
, _eflags
);
980 case 4: /* sal/shl */
981 case 6: /* sal/shl */
982 emulate_2op_SrcB("sal", src
, dst
, _eflags
);
985 emulate_2op_SrcB("shr", src
, dst
, _eflags
);
988 emulate_2op_SrcB("sar", src
, dst
, _eflags
);
992 case 0xd0 ... 0xd1: /* Grp2 */
995 case 0xd2 ... 0xd3: /* Grp2 */
996 src
.val
= _regs
[VCPU_REGS_RCX
];
998 case 0xf6 ... 0xf7: /* Grp3 */
1000 case 0 ... 1: /* test */
1002 * Special case in Grp3: test has an immediate
1006 src
.ptr
= (unsigned long *)_eip
;
1007 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1010 switch (src
.bytes
) {
1012 src
.val
= insn_fetch(s8
, 1, _eip
);
1015 src
.val
= insn_fetch(s16
, 2, _eip
);
1018 src
.val
= insn_fetch(s32
, 4, _eip
);
1026 emulate_1op("neg", dst
, _eflags
);
1029 goto cannot_emulate
;
1032 case 0xfe ... 0xff: /* Grp4/Grp5 */
1033 switch (modrm_reg
) {
1035 emulate_1op("inc", dst
, _eflags
);
1038 emulate_1op("dec", dst
, _eflags
);
1041 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1042 if (mode
== X86EMUL_MODE_PROT64
) {
1044 if ((rc
= ops
->read_std((unsigned long)dst
.ptr
,
1049 register_address_increment(_regs
[VCPU_REGS_RSP
],
1051 if ((rc
= ops
->write_std(
1052 register_address(ctxt
->ss_base
,
1053 _regs
[VCPU_REGS_RSP
]),
1054 &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1059 goto cannot_emulate
;
1068 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1069 switch (dst
.bytes
) {
1071 *(u8
*)dst
.ptr
= (u8
)dst
.val
;
1074 *(u16
*)dst
.ptr
= (u16
)dst
.val
;
1077 *dst
.ptr
= (u32
)dst
.val
;
1078 break; /* 64b: zero-ext */
1086 rc
= ops
->cmpxchg_emulated((unsigned long)dst
.
1088 &dst
.val
, dst
.bytes
,
1091 rc
= ops
->write_emulated((unsigned long)dst
.ptr
,
1092 &dst
.val
, dst
.bytes
,
1101 /* Commit shadow register state. */
1102 memcpy(ctxt
->vcpu
->regs
, _regs
, sizeof _regs
);
1103 ctxt
->eflags
= _eflags
;
1104 ctxt
->vcpu
->rip
= _eip
;
1107 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1111 goto twobyte_special_insn
;
1113 if (_regs
[VCPU_REGS_RCX
] == 0) {
1114 ctxt
->vcpu
->rip
= _eip
;
1117 _regs
[VCPU_REGS_RCX
]--;
1118 _eip
= ctxt
->vcpu
->rip
;
1121 case 0xa4 ... 0xa5: /* movs */
1123 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1124 dst
.ptr
= (unsigned long *)register_address(ctxt
->es_base
,
1125 _regs
[VCPU_REGS_RDI
]);
1126 if ((rc
= ops
->read_emulated(register_address(
1127 override_base
? *override_base
: ctxt
->ds_base
,
1128 _regs
[VCPU_REGS_RSI
]), &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1130 register_address_increment(_regs
[VCPU_REGS_RSI
],
1131 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1132 register_address_increment(_regs
[VCPU_REGS_RDI
],
1133 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1135 case 0xa6 ... 0xa7: /* cmps */
1136 DPRINTF("Urk! I don't handle CMPS.\n");
1137 goto cannot_emulate
;
1138 case 0xaa ... 0xab: /* stos */
1140 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1141 dst
.ptr
= (unsigned long *)cr2
;
1142 dst
.val
= _regs
[VCPU_REGS_RAX
];
1143 register_address_increment(_regs
[VCPU_REGS_RDI
],
1144 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1146 case 0xac ... 0xad: /* lods */
1148 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1149 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1150 if ((rc
= ops
->read_emulated(cr2
, &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1152 register_address_increment(_regs
[VCPU_REGS_RSI
],
1153 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1155 case 0xae ... 0xaf: /* scas */
1156 DPRINTF("Urk! I don't handle SCAS.\n");
1157 goto cannot_emulate
;
1158 case 0xf4: /* hlt */
1159 ctxt
->vcpu
->halt_request
= 1;
1161 case 0xc3: /* ret */
1163 goto pop_instruction
;
1164 case 0x58 ... 0x5f: /* pop reg */
1165 dst
.ptr
= (unsigned long *)&_regs
[b
& 0x7];
1168 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1169 _regs
[VCPU_REGS_RSP
]), dst
.ptr
, op_bytes
, ctxt
)) != 0)
1172 register_address_increment(_regs
[VCPU_REGS_RSP
], op_bytes
);
1173 no_wb
= 1; /* Disable writeback. */
1180 case 0x01: /* lgdt, lidt, lmsw */
1181 switch (modrm_reg
) {
1183 unsigned long address
;
1186 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1187 &size
, &address
, op_bytes
);
1190 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1193 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1194 &size
, &address
, op_bytes
);
1197 realmode_lidt(ctxt
->vcpu
, size
, address
);
1201 goto cannot_emulate
;
1202 *(u16
*)&_regs
[modrm_rm
]
1203 = realmode_get_cr(ctxt
->vcpu
, 0);
1207 goto cannot_emulate
;
1208 realmode_lmsw(ctxt
->vcpu
, (u16
)modrm_val
, &_eflags
);
1211 emulate_invlpg(ctxt
->vcpu
, cr2
);
1214 goto cannot_emulate
;
1217 case 0x21: /* mov from dr to reg */
1219 goto cannot_emulate
;
1220 rc
= emulator_get_dr(ctxt
, modrm_reg
, &_regs
[modrm_rm
]);
1222 case 0x23: /* mov from reg to dr */
1224 goto cannot_emulate
;
1225 rc
= emulator_set_dr(ctxt
, modrm_reg
, _regs
[modrm_rm
]);
1227 case 0x40 ... 0x4f: /* cmov */
1228 dst
.val
= dst
.orig_val
= src
.val
;
1229 d
&= ~Mov
; /* default to no move */
1231 * First, assume we're decoding an even cmov opcode
1234 switch ((b
& 15) >> 1) {
1236 d
|= (_eflags
& EFLG_OF
) ? Mov
: 0;
1238 case 1: /* cmovb/cmovc/cmovnae */
1239 d
|= (_eflags
& EFLG_CF
) ? Mov
: 0;
1241 case 2: /* cmovz/cmove */
1242 d
|= (_eflags
& EFLG_ZF
) ? Mov
: 0;
1244 case 3: /* cmovbe/cmovna */
1245 d
|= (_eflags
& (EFLG_CF
| EFLG_ZF
)) ? Mov
: 0;
1248 d
|= (_eflags
& EFLG_SF
) ? Mov
: 0;
1250 case 5: /* cmovp/cmovpe */
1251 d
|= (_eflags
& EFLG_PF
) ? Mov
: 0;
1253 case 7: /* cmovle/cmovng */
1254 d
|= (_eflags
& EFLG_ZF
) ? Mov
: 0;
1256 case 6: /* cmovl/cmovnge */
1257 d
|= (!(_eflags
& EFLG_SF
) !=
1258 !(_eflags
& EFLG_OF
)) ? Mov
: 0;
1261 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1262 d
^= (b
& 1) ? Mov
: 0;
1264 case 0xb0 ... 0xb1: /* cmpxchg */
1266 * Save real source value, then compare EAX against
1269 src
.orig_val
= src
.val
;
1270 src
.val
= _regs
[VCPU_REGS_RAX
];
1271 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
1272 /* Always write back. The question is: where to? */
1274 if (_eflags
& EFLG_ZF
) {
1275 /* Success: write back to memory. */
1276 dst
.val
= src
.orig_val
;
1278 /* Failure: write the value we saw to EAX. */
1280 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1285 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1286 emulate_2op_SrcV_nobyte("bt", src
, dst
, _eflags
);
1290 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1291 emulate_2op_SrcV_nobyte("btr", src
, dst
, _eflags
);
1295 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1296 emulate_2op_SrcV_nobyte("bts", src
, dst
, _eflags
);
1298 case 0xb6 ... 0xb7: /* movzx */
1299 dst
.bytes
= op_bytes
;
1300 dst
.val
= (d
& ByteOp
) ? (u8
) src
.val
: (u16
) src
.val
;
1304 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1305 emulate_2op_SrcV_nobyte("btc", src
, dst
, _eflags
);
1307 case 0xba: /* Grp8 */
1308 switch (modrm_reg
& 3) {
1319 case 0xbe ... 0xbf: /* movsx */
1320 dst
.bytes
= op_bytes
;
1321 dst
.val
= (d
& ByteOp
) ? (s8
) src
.val
: (s16
) src
.val
;
1326 twobyte_special_insn
:
1327 /* Disable writeback. */
1330 case 0x09: /* wbinvd */
1332 case 0x0d: /* GrpP (prefetch) */
1333 case 0x18: /* Grp16 (prefetch/nop) */
1336 emulate_clts(ctxt
->vcpu
);
1338 case 0x20: /* mov cr, reg */
1340 goto cannot_emulate
;
1341 _regs
[modrm_rm
] = realmode_get_cr(ctxt
->vcpu
, modrm_reg
);
1343 case 0x22: /* mov reg, cr */
1345 goto cannot_emulate
;
1346 realmode_set_cr(ctxt
->vcpu
, modrm_reg
, modrm_val
, &_eflags
);
1350 msr_data
= (u32
)_regs
[VCPU_REGS_RAX
]
1351 | ((u64
)_regs
[VCPU_REGS_RDX
] << 32);
1352 rc
= kvm_set_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], msr_data
);
1354 kvm_arch_ops
->inject_gp(ctxt
->vcpu
, 0);
1355 _eip
= ctxt
->vcpu
->rip
;
1357 rc
= X86EMUL_CONTINUE
;
1361 rc
= kvm_get_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], &msr_data
);
1363 kvm_arch_ops
->inject_gp(ctxt
->vcpu
, 0);
1364 _eip
= ctxt
->vcpu
->rip
;
1366 _regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1367 _regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1369 rc
= X86EMUL_CONTINUE
;
1371 case 0xc7: /* Grp9 (cmpxchg8b) */
1374 if ((rc
= ops
->read_emulated(cr2
, &old
, 8, ctxt
)) != 0)
1376 if (((u32
) (old
>> 0) != (u32
) _regs
[VCPU_REGS_RAX
]) ||
1377 ((u32
) (old
>> 32) != (u32
) _regs
[VCPU_REGS_RDX
])) {
1378 _regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1379 _regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1380 _eflags
&= ~EFLG_ZF
;
1382 new = ((u64
)_regs
[VCPU_REGS_RCX
] << 32)
1383 | (u32
) _regs
[VCPU_REGS_RBX
];
1384 if ((rc
= ops
->cmpxchg_emulated(cr2
, &old
,
1385 &new, 8, ctxt
)) != 0)
1395 DPRINTF("Cannot emulate %02x\n", b
);
1402 #include <asm/uaccess.h>
1405 x86_emulate_read_std(unsigned long addr
,
1407 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1413 if ((rc
= copy_from_user((void *)val
, (void *)addr
, bytes
)) != 0) {
1414 propagate_page_fault(addr
+ bytes
- rc
, 0); /* read fault */
1415 return X86EMUL_PROPAGATE_FAULT
;
1418 return X86EMUL_CONTINUE
;
1422 x86_emulate_write_std(unsigned long addr
,
1424 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1428 if ((rc
= copy_to_user((void *)addr
, (void *)&val
, bytes
)) != 0) {
1429 propagate_page_fault(addr
+ bytes
- rc
, PGERR_write_access
);
1430 return X86EMUL_PROPAGATE_FAULT
;
1433 return X86EMUL_CONTINUE
;