1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privieged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table
[256] = {
68 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
69 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
72 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
73 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
76 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
77 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
80 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
81 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
84 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
85 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
88 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
89 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
92 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
93 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
96 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
97 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
104 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
105 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
107 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
109 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
110 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
111 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
112 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
114 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
115 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
116 0, 0, 0, DstMem
| SrcNone
| ModRM
| Mov
,
118 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
120 ByteOp
| DstReg
| SrcMem
| Mov
, DstReg
| SrcMem
| Mov
,
121 ByteOp
| DstMem
| SrcReg
| Mov
, DstMem
| SrcReg
| Mov
,
122 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
123 ByteOp
| ImplicitOps
, ImplicitOps
,
125 0, 0, ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
126 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
127 ByteOp
| ImplicitOps
, ImplicitOps
,
129 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
131 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
, 0, 0,
132 0, 0, ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
,
133 DstMem
| SrcImm
| ModRM
| Mov
,
135 0, 0, 0, 0, 0, 0, 0, 0,
137 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
138 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
141 0, 0, 0, 0, 0, 0, 0, 0,
143 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
146 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
149 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
152 static u16 twobyte_table
[256] = {
154 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
155 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
157 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
159 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
160 0, 0, 0, 0, 0, 0, 0, 0,
162 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
164 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
165 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
166 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
167 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
169 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
170 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
171 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
172 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
174 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
176 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
178 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
180 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
182 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
184 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
186 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
188 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
189 DstMem
| SrcReg
| ModRM
| BitOp
,
190 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
191 DstReg
| SrcMem16
| ModRM
| Mov
,
193 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
194 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
195 DstReg
| SrcMem16
| ModRM
| Mov
,
197 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0, 0,
199 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
201 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
207 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
208 * are interested only in invlpg and not in any of the rest.
210 * invlpg is a special instruction in that the data it references may not
213 void kvm_emulator_want_group7_invlpg(void)
215 twobyte_table
[1] &= ~SrcMem
;
217 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg
);
219 /* Type, address-of, and value of an instruction's operand. */
221 enum { OP_REG
, OP_MEM
, OP_IMM
} type
;
223 unsigned long val
, orig_val
, *ptr
;
226 /* EFLAGS bit definitions. */
227 #define EFLG_OF (1<<11)
228 #define EFLG_DF (1<<10)
229 #define EFLG_SF (1<<7)
230 #define EFLG_ZF (1<<6)
231 #define EFLG_AF (1<<4)
232 #define EFLG_PF (1<<2)
233 #define EFLG_CF (1<<0)
236 * Instruction emulation:
237 * Most instructions are emulated directly via a fragment of inline assembly
238 * code. This allows us to save/restore EFLAGS and thus very easily pick up
239 * any modified flags.
242 #if defined(CONFIG_X86_64)
243 #define _LO32 "k" /* force 32-bit operand */
244 #define _STK "%%rsp" /* stack pointer */
245 #elif defined(__i386__)
246 #define _LO32 "" /* force 32-bit operand */
247 #define _STK "%%esp" /* stack pointer */
251 * These EFLAGS bits are restored from saved value during emulation, and
252 * any changes are written back to the saved value after emulation.
254 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
256 /* Before executing instruction: restore necessary bits in EFLAGS. */
257 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
258 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
260 "movl %"_msk",%"_LO32 _tmp"; " \
261 "andl %"_LO32 _tmp",("_STK"); " \
263 "notl %"_LO32 _tmp"; " \
264 "andl %"_LO32 _tmp",("_STK"); " \
266 "orl %"_LO32 _tmp",("_STK"); " \
268 /* _sav &= ~msk; */ \
269 "movl %"_msk",%"_LO32 _tmp"; " \
270 "notl %"_LO32 _tmp"; " \
271 "andl %"_LO32 _tmp",%"_sav"; "
273 /* After executing instruction: write-back necessary bits in EFLAGS. */
274 #define _POST_EFLAGS(_sav, _msk, _tmp) \
275 /* _sav |= EFLAGS & _msk; */ \
278 "andl %"_msk",%"_LO32 _tmp"; " \
279 "orl %"_LO32 _tmp",%"_sav"; "
281 /* Raw emulation: instruction has two explicit operands. */
282 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
284 unsigned long _tmp; \
286 switch ((_dst).bytes) { \
288 __asm__ __volatile__ ( \
289 _PRE_EFLAGS("0","4","2") \
290 _op"w %"_wx"3,%1; " \
291 _POST_EFLAGS("0","4","2") \
292 : "=m" (_eflags), "=m" ((_dst).val), \
294 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
297 __asm__ __volatile__ ( \
298 _PRE_EFLAGS("0","4","2") \
299 _op"l %"_lx"3,%1; " \
300 _POST_EFLAGS("0","4","2") \
301 : "=m" (_eflags), "=m" ((_dst).val), \
303 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
306 __emulate_2op_8byte(_op, _src, _dst, \
307 _eflags, _qx, _qy); \
312 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
314 unsigned long _tmp; \
315 switch ( (_dst).bytes ) \
318 __asm__ __volatile__ ( \
319 _PRE_EFLAGS("0","4","2") \
320 _op"b %"_bx"3,%1; " \
321 _POST_EFLAGS("0","4","2") \
322 : "=m" (_eflags), "=m" ((_dst).val), \
324 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
327 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
328 _wx, _wy, _lx, _ly, _qx, _qy); \
333 /* Source operand is byte-sized and may be restricted to just %cl. */
334 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
335 __emulate_2op(_op, _src, _dst, _eflags, \
336 "b", "c", "b", "c", "b", "c", "b", "c")
338 /* Source operand is byte, word, long or quad sized. */
339 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
340 __emulate_2op(_op, _src, _dst, _eflags, \
341 "b", "q", "w", "r", _LO32, "r", "", "r")
343 /* Source operand is word, long or quad sized. */
344 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
345 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
346 "w", "r", _LO32, "r", "", "r")
348 /* Instruction has only one explicit operand (no source operand). */
349 #define emulate_1op(_op, _dst, _eflags) \
351 unsigned long _tmp; \
353 switch ( (_dst).bytes ) \
356 __asm__ __volatile__ ( \
357 _PRE_EFLAGS("0","3","2") \
359 _POST_EFLAGS("0","3","2") \
360 : "=m" (_eflags), "=m" ((_dst).val), \
362 : "i" (EFLAGS_MASK) ); \
365 __asm__ __volatile__ ( \
366 _PRE_EFLAGS("0","3","2") \
368 _POST_EFLAGS("0","3","2") \
369 : "=m" (_eflags), "=m" ((_dst).val), \
371 : "i" (EFLAGS_MASK) ); \
374 __asm__ __volatile__ ( \
375 _PRE_EFLAGS("0","3","2") \
377 _POST_EFLAGS("0","3","2") \
378 : "=m" (_eflags), "=m" ((_dst).val), \
380 : "i" (EFLAGS_MASK) ); \
383 __emulate_1op_8byte(_op, _dst, _eflags); \
388 /* Emulate an instruction with quadword operands (x86/64 only). */
389 #if defined(CONFIG_X86_64)
390 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
392 __asm__ __volatile__ ( \
393 _PRE_EFLAGS("0","4","2") \
394 _op"q %"_qx"3,%1; " \
395 _POST_EFLAGS("0","4","2") \
396 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
397 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
400 #define __emulate_1op_8byte(_op, _dst, _eflags) \
402 __asm__ __volatile__ ( \
403 _PRE_EFLAGS("0","3","2") \
405 _POST_EFLAGS("0","3","2") \
406 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
407 : "i" (EFLAGS_MASK) ); \
410 #elif defined(__i386__)
411 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
412 #define __emulate_1op_8byte(_op, _dst, _eflags)
413 #endif /* __i386__ */
415 /* Fetch next part of the instruction being emulated. */
416 #define insn_fetch(_type, _size, _eip) \
417 ({ unsigned long _x; \
418 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
426 /* Access/update address held in a register, based on addressing mode. */
427 #define register_address(base, reg) \
428 ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
429 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
431 #define register_address_increment(reg, inc) \
433 /* signed type ensures sign extension to long */ \
435 if ( ad_bytes == sizeof(unsigned long) ) \
438 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
439 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
442 void *decode_register(u8 modrm_reg
, unsigned long *regs
,
447 p
= ®s
[modrm_reg
];
448 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
449 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
453 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
454 struct x86_emulate_ops
*ops
,
456 u16
*size
, unsigned long *address
, int op_bytes
)
463 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2, ctxt
);
466 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
, ctxt
);
471 x86_emulate_memop(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
474 u8 b
, sib
, twobyte
= 0, rex_prefix
= 0;
475 u8 modrm
, modrm_mod
= 0, modrm_reg
= 0, modrm_rm
= 0;
476 unsigned long *override_base
= NULL
;
477 unsigned int op_bytes
, ad_bytes
, lock_prefix
= 0, rep_prefix
= 0, i
;
479 struct operand src
, dst
;
480 unsigned long cr2
= ctxt
->cr2
;
481 int mode
= ctxt
->mode
;
482 unsigned long modrm_ea
;
483 int use_modrm_ea
, index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
485 /* Shadow copy of register state. Committed on successful emulation. */
486 unsigned long _regs
[NR_VCPU_REGS
];
487 unsigned long _eip
= ctxt
->vcpu
->rip
, _eflags
= ctxt
->eflags
;
488 unsigned long modrm_val
= 0;
490 memcpy(_regs
, ctxt
->vcpu
->regs
, sizeof _regs
);
493 case X86EMUL_MODE_REAL
:
494 case X86EMUL_MODE_PROT16
:
495 op_bytes
= ad_bytes
= 2;
497 case X86EMUL_MODE_PROT32
:
498 op_bytes
= ad_bytes
= 4;
501 case X86EMUL_MODE_PROT64
:
510 /* Legacy prefixes. */
511 for (i
= 0; i
< 8; i
++) {
512 switch (b
= insn_fetch(u8
, 1, _eip
)) {
513 case 0x66: /* operand-size override */
514 op_bytes
^= 6; /* switch between 2/4 bytes */
516 case 0x67: /* address-size override */
517 if (mode
== X86EMUL_MODE_PROT64
)
518 ad_bytes
^= 12; /* switch between 4/8 bytes */
520 ad_bytes
^= 6; /* switch between 2/4 bytes */
522 case 0x2e: /* CS override */
523 override_base
= &ctxt
->cs_base
;
525 case 0x3e: /* DS override */
526 override_base
= &ctxt
->ds_base
;
528 case 0x26: /* ES override */
529 override_base
= &ctxt
->es_base
;
531 case 0x64: /* FS override */
532 override_base
= &ctxt
->fs_base
;
534 case 0x65: /* GS override */
535 override_base
= &ctxt
->gs_base
;
537 case 0x36: /* SS override */
538 override_base
= &ctxt
->ss_base
;
540 case 0xf0: /* LOCK */
543 case 0xf3: /* REP/REPE/REPZ */
546 case 0xf2: /* REPNE/REPNZ */
556 if ((mode
== X86EMUL_MODE_PROT64
) && ((b
& 0xf0) == 0x40)) {
559 op_bytes
= 8; /* REX.W */
560 modrm_reg
= (b
& 4) << 1; /* REX.R */
561 index_reg
= (b
& 2) << 2; /* REX.X */
562 modrm_rm
= base_reg
= (b
& 1) << 3; /* REG.B */
563 b
= insn_fetch(u8
, 1, _eip
);
566 /* Opcode byte(s). */
569 /* Two-byte opcode? */
572 b
= insn_fetch(u8
, 1, _eip
);
573 d
= twobyte_table
[b
];
581 /* ModRM and SIB bytes. */
583 modrm
= insn_fetch(u8
, 1, _eip
);
584 modrm_mod
|= (modrm
& 0xc0) >> 6;
585 modrm_reg
|= (modrm
& 0x38) >> 3;
586 modrm_rm
|= (modrm
& 0x07);
590 if (modrm_mod
== 3) {
591 modrm_val
= *(unsigned long *)
592 decode_register(modrm_rm
, _regs
, d
& ByteOp
);
597 unsigned bx
= _regs
[VCPU_REGS_RBX
];
598 unsigned bp
= _regs
[VCPU_REGS_RBP
];
599 unsigned si
= _regs
[VCPU_REGS_RSI
];
600 unsigned di
= _regs
[VCPU_REGS_RDI
];
602 /* 16-bit ModR/M decode. */
606 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
609 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
612 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
642 if (modrm_rm
== 2 || modrm_rm
== 3 ||
643 (modrm_rm
== 6 && modrm_mod
!= 0))
645 override_base
= &ctxt
->ss_base
;
646 modrm_ea
= (u16
)modrm_ea
;
648 /* 32/64-bit ModR/M decode. */
652 sib
= insn_fetch(u8
, 1, _eip
);
653 index_reg
|= (sib
>> 3) & 7;
660 modrm_ea
+= _regs
[base_reg
];
662 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
665 modrm_ea
+= _regs
[base_reg
];
671 modrm_ea
+= _regs
[index_reg
] << scale
;
677 modrm_ea
+= _regs
[modrm_rm
];
678 else if (mode
== X86EMUL_MODE_PROT64
)
682 modrm_ea
+= _regs
[modrm_rm
];
688 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
691 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
694 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
699 override_base
= &ctxt
->ds_base
;
700 if (mode
== X86EMUL_MODE_PROT64
&&
701 override_base
!= &ctxt
->fs_base
&&
702 override_base
!= &ctxt
->gs_base
)
703 override_base
= NULL
;
706 modrm_ea
+= *override_base
;
710 switch (d
& SrcMask
) {
721 modrm_ea
+= op_bytes
;
725 modrm_ea
= (u32
)modrm_ea
;
732 * Decode and fetch the source operand: register, memory
735 switch (d
& SrcMask
) {
741 src
.ptr
= decode_register(modrm_reg
, _regs
,
743 src
.val
= src
.orig_val
= *(u8
*) src
.ptr
;
746 src
.ptr
= decode_register(modrm_reg
, _regs
, 0);
747 switch ((src
.bytes
= op_bytes
)) {
749 src
.val
= src
.orig_val
= *(u16
*) src
.ptr
;
752 src
.val
= src
.orig_val
= *(u32
*) src
.ptr
;
755 src
.val
= src
.orig_val
= *(u64
*) src
.ptr
;
767 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
770 src
.ptr
= (unsigned long *)cr2
;
771 if ((rc
= ops
->read_emulated((unsigned long)src
.ptr
,
772 &src
.val
, src
.bytes
, ctxt
)) != 0)
774 src
.orig_val
= src
.val
;
778 src
.ptr
= (unsigned long *)_eip
;
779 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
782 /* NB. Immediates are sign-extended as necessary. */
785 src
.val
= insn_fetch(s8
, 1, _eip
);
788 src
.val
= insn_fetch(s16
, 2, _eip
);
791 src
.val
= insn_fetch(s32
, 4, _eip
);
797 src
.ptr
= (unsigned long *)_eip
;
799 src
.val
= insn_fetch(s8
, 1, _eip
);
803 /* Decode and fetch the destination operand: register or memory. */
804 switch (d
& DstMask
) {
806 /* Special instructions do their own operand decoding. */
811 && !(twobyte_table
&& (b
== 0xb6 || b
== 0xb7))) {
812 dst
.ptr
= decode_register(modrm_reg
, _regs
,
814 dst
.val
= *(u8
*) dst
.ptr
;
817 dst
.ptr
= decode_register(modrm_reg
, _regs
, 0);
818 switch ((dst
.bytes
= op_bytes
)) {
820 dst
.val
= *(u16
*)dst
.ptr
;
823 dst
.val
= *(u32
*)dst
.ptr
;
826 dst
.val
= *(u64
*)dst
.ptr
;
833 dst
.ptr
= (unsigned long *)cr2
;
834 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
836 dst
.ptr
+= src
.val
/ BITS_PER_LONG
;
837 dst
.bytes
= sizeof(long);
839 if (!(d
& Mov
) && /* optimisation - avoid slow emulated read */
840 ((rc
= ops
->read_emulated((unsigned long)dst
.ptr
,
841 &dst
.val
, dst
.bytes
, ctxt
)) != 0))
845 dst
.orig_val
= dst
.val
;
853 emulate_2op_SrcV("add", src
, dst
, _eflags
);
857 emulate_2op_SrcV("or", src
, dst
, _eflags
);
861 emulate_2op_SrcV("adc", src
, dst
, _eflags
);
865 emulate_2op_SrcV("sbb", src
, dst
, _eflags
);
869 emulate_2op_SrcV("and", src
, dst
, _eflags
);
873 emulate_2op_SrcV("sub", src
, dst
, _eflags
);
877 emulate_2op_SrcV("xor", src
, dst
, _eflags
);
881 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
883 case 0x63: /* movsxd */
884 if (mode
!= X86EMUL_MODE_PROT64
)
886 dst
.val
= (s32
) src
.val
;
888 case 0x80 ... 0x83: /* Grp1 */
910 emulate_2op_SrcV("test", src
, dst
, _eflags
);
912 case 0x86 ... 0x87: /* xchg */
913 /* Write back the register source. */
916 *(u8
*) src
.ptr
= (u8
) dst
.val
;
919 *(u16
*) src
.ptr
= (u16
) dst
.val
;
922 *src
.ptr
= (u32
) dst
.val
;
923 break; /* 64b reg: zero-extend */
929 * Write back the memory destination with implicit LOCK
935 case 0xa0 ... 0xa1: /* mov */
936 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
938 _eip
+= ad_bytes
; /* skip src displacement */
940 case 0xa2 ... 0xa3: /* mov */
941 dst
.val
= (unsigned long)_regs
[VCPU_REGS_RAX
];
942 _eip
+= ad_bytes
; /* skip dst displacement */
944 case 0x88 ... 0x8b: /* mov */
945 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
948 case 0x8f: /* pop (sole member of Grp1a) */
949 /* 64-bit mode: POP always pops a 64-bit operand. */
950 if (mode
== X86EMUL_MODE_PROT64
)
952 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
953 _regs
[VCPU_REGS_RSP
]),
954 &dst
.val
, dst
.bytes
, ctxt
)) != 0)
956 register_address_increment(_regs
[VCPU_REGS_RSP
], dst
.bytes
);
962 emulate_2op_SrcB("rol", src
, dst
, _eflags
);
965 emulate_2op_SrcB("ror", src
, dst
, _eflags
);
968 emulate_2op_SrcB("rcl", src
, dst
, _eflags
);
971 emulate_2op_SrcB("rcr", src
, dst
, _eflags
);
973 case 4: /* sal/shl */
974 case 6: /* sal/shl */
975 emulate_2op_SrcB("sal", src
, dst
, _eflags
);
978 emulate_2op_SrcB("shr", src
, dst
, _eflags
);
981 emulate_2op_SrcB("sar", src
, dst
, _eflags
);
985 case 0xd0 ... 0xd1: /* Grp2 */
988 case 0xd2 ... 0xd3: /* Grp2 */
989 src
.val
= _regs
[VCPU_REGS_RCX
];
991 case 0xf6 ... 0xf7: /* Grp3 */
993 case 0 ... 1: /* test */
995 * Special case in Grp3: test has an immediate
999 src
.ptr
= (unsigned long *)_eip
;
1000 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1003 switch (src
.bytes
) {
1005 src
.val
= insn_fetch(s8
, 1, _eip
);
1008 src
.val
= insn_fetch(s16
, 2, _eip
);
1011 src
.val
= insn_fetch(s32
, 4, _eip
);
1019 emulate_1op("neg", dst
, _eflags
);
1022 goto cannot_emulate
;
1025 case 0xfe ... 0xff: /* Grp4/Grp5 */
1026 switch (modrm_reg
) {
1028 emulate_1op("inc", dst
, _eflags
);
1031 emulate_1op("dec", dst
, _eflags
);
1034 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1035 if (mode
== X86EMUL_MODE_PROT64
) {
1037 if ((rc
= ops
->read_std((unsigned long)dst
.ptr
,
1042 register_address_increment(_regs
[VCPU_REGS_RSP
],
1044 if ((rc
= ops
->write_std(
1045 register_address(ctxt
->ss_base
,
1046 _regs
[VCPU_REGS_RSP
]),
1047 dst
.val
, dst
.bytes
, ctxt
)) != 0)
1049 dst
.val
= dst
.orig_val
; /* skanky: disable writeback */
1052 goto cannot_emulate
;
1058 if ((d
& Mov
) || (dst
.orig_val
!= dst
.val
)) {
1061 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1062 switch (dst
.bytes
) {
1064 *(u8
*)dst
.ptr
= (u8
)dst
.val
;
1067 *(u16
*)dst
.ptr
= (u16
)dst
.val
;
1070 *dst
.ptr
= (u32
)dst
.val
;
1071 break; /* 64b: zero-ext */
1079 rc
= ops
->cmpxchg_emulated((unsigned long)dst
.
1084 rc
= ops
->write_emulated((unsigned long)dst
.ptr
,
1094 /* Commit shadow register state. */
1095 memcpy(ctxt
->vcpu
->regs
, _regs
, sizeof _regs
);
1096 ctxt
->eflags
= _eflags
;
1097 ctxt
->vcpu
->rip
= _eip
;
1100 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1104 goto twobyte_special_insn
;
1106 if (_regs
[VCPU_REGS_RCX
] == 0) {
1107 ctxt
->vcpu
->rip
= _eip
;
1110 _regs
[VCPU_REGS_RCX
]--;
1111 _eip
= ctxt
->vcpu
->rip
;
1114 case 0xa4 ... 0xa5: /* movs */
1116 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1117 dst
.ptr
= (unsigned long *)register_address(ctxt
->es_base
,
1118 _regs
[VCPU_REGS_RDI
]);
1119 if ((rc
= ops
->read_emulated(register_address(
1120 override_base
? *override_base
: ctxt
->ds_base
,
1121 _regs
[VCPU_REGS_RSI
]), &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1123 register_address_increment(_regs
[VCPU_REGS_RSI
],
1124 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1125 register_address_increment(_regs
[VCPU_REGS_RDI
],
1126 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1128 case 0xa6 ... 0xa7: /* cmps */
1129 DPRINTF("Urk! I don't handle CMPS.\n");
1130 goto cannot_emulate
;
1131 case 0xaa ... 0xab: /* stos */
1133 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1134 dst
.ptr
= (unsigned long *)cr2
;
1135 dst
.val
= _regs
[VCPU_REGS_RAX
];
1136 register_address_increment(_regs
[VCPU_REGS_RDI
],
1137 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1139 case 0xac ... 0xad: /* lods */
1141 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1142 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1143 if ((rc
= ops
->read_emulated(cr2
, &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1145 register_address_increment(_regs
[VCPU_REGS_RSI
],
1146 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1148 case 0xae ... 0xaf: /* scas */
1149 DPRINTF("Urk! I don't handle SCAS.\n");
1150 goto cannot_emulate
;
1156 case 0x01: /* lgdt, lidt, lmsw */
1157 switch (modrm_reg
) {
1159 unsigned long address
;
1162 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1163 &size
, &address
, op_bytes
);
1166 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1169 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1170 &size
, &address
, op_bytes
);
1173 realmode_lidt(ctxt
->vcpu
, size
, address
);
1177 goto cannot_emulate
;
1178 *(u16
*)&_regs
[modrm_rm
]
1179 = realmode_get_cr(ctxt
->vcpu
, 0);
1183 goto cannot_emulate
;
1184 realmode_lmsw(ctxt
->vcpu
, (u16
)modrm_val
, &_eflags
);
1187 emulate_invlpg(ctxt
->vcpu
, cr2
);
1190 goto cannot_emulate
;
1193 case 0x21: /* mov from dr to reg */
1195 goto cannot_emulate
;
1196 rc
= emulator_get_dr(ctxt
, modrm_reg
, &_regs
[modrm_rm
]);
1198 case 0x23: /* mov from reg to dr */
1200 goto cannot_emulate
;
1201 rc
= emulator_set_dr(ctxt
, modrm_reg
, _regs
[modrm_rm
]);
1203 case 0x40 ... 0x4f: /* cmov */
1204 dst
.val
= dst
.orig_val
= src
.val
;
1205 d
&= ~Mov
; /* default to no move */
1207 * First, assume we're decoding an even cmov opcode
1210 switch ((b
& 15) >> 1) {
1212 d
|= (_eflags
& EFLG_OF
) ? Mov
: 0;
1214 case 1: /* cmovb/cmovc/cmovnae */
1215 d
|= (_eflags
& EFLG_CF
) ? Mov
: 0;
1217 case 2: /* cmovz/cmove */
1218 d
|= (_eflags
& EFLG_ZF
) ? Mov
: 0;
1220 case 3: /* cmovbe/cmovna */
1221 d
|= (_eflags
& (EFLG_CF
| EFLG_ZF
)) ? Mov
: 0;
1224 d
|= (_eflags
& EFLG_SF
) ? Mov
: 0;
1226 case 5: /* cmovp/cmovpe */
1227 d
|= (_eflags
& EFLG_PF
) ? Mov
: 0;
1229 case 7: /* cmovle/cmovng */
1230 d
|= (_eflags
& EFLG_ZF
) ? Mov
: 0;
1232 case 6: /* cmovl/cmovnge */
1233 d
|= (!(_eflags
& EFLG_SF
) !=
1234 !(_eflags
& EFLG_OF
)) ? Mov
: 0;
1237 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1238 d
^= (b
& 1) ? Mov
: 0;
1240 case 0xb0 ... 0xb1: /* cmpxchg */
1242 * Save real source value, then compare EAX against
1245 src
.orig_val
= src
.val
;
1246 src
.val
= _regs
[VCPU_REGS_RAX
];
1247 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
1248 /* Always write back. The question is: where to? */
1250 if (_eflags
& EFLG_ZF
) {
1251 /* Success: write back to memory. */
1252 dst
.val
= src
.orig_val
;
1254 /* Failure: write the value we saw to EAX. */
1256 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1261 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1262 emulate_2op_SrcV_nobyte("bt", src
, dst
, _eflags
);
1266 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1267 emulate_2op_SrcV_nobyte("btr", src
, dst
, _eflags
);
1271 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1272 emulate_2op_SrcV_nobyte("bts", src
, dst
, _eflags
);
1274 case 0xb6 ... 0xb7: /* movzx */
1275 dst
.bytes
= op_bytes
;
1276 dst
.val
= (d
& ByteOp
) ? (u8
) src
.val
: (u16
) src
.val
;
1280 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1281 emulate_2op_SrcV_nobyte("btc", src
, dst
, _eflags
);
1283 case 0xba: /* Grp8 */
1284 switch (modrm_reg
& 3) {
1295 case 0xbe ... 0xbf: /* movsx */
1296 dst
.bytes
= op_bytes
;
1297 dst
.val
= (d
& ByteOp
) ? (s8
) src
.val
: (s16
) src
.val
;
1302 twobyte_special_insn
:
1303 /* Disable writeback. */
1304 dst
.orig_val
= dst
.val
;
1306 case 0x0d: /* GrpP (prefetch) */
1307 case 0x18: /* Grp16 (prefetch/nop) */
1310 emulate_clts(ctxt
->vcpu
);
1312 case 0x20: /* mov cr, reg */
1314 goto cannot_emulate
;
1315 _regs
[modrm_rm
] = realmode_get_cr(ctxt
->vcpu
, modrm_reg
);
1317 case 0x22: /* mov reg, cr */
1319 goto cannot_emulate
;
1320 realmode_set_cr(ctxt
->vcpu
, modrm_reg
, modrm_val
, &_eflags
);
1322 case 0xc7: /* Grp9 (cmpxchg8b) */
1323 #if defined(__i386__)
1325 unsigned long old_lo
, old_hi
;
1326 if (((rc
= ops
->read_emulated(cr2
+ 0, &old_lo
, 4,
1328 || ((rc
= ops
->read_emulated(cr2
+ 4, &old_hi
, 4,
1331 if ((old_lo
!= _regs
[VCPU_REGS_RAX
])
1332 || (old_hi
!= _regs
[VCPU_REGS_RDX
])) {
1333 _regs
[VCPU_REGS_RAX
] = old_lo
;
1334 _regs
[VCPU_REGS_RDX
] = old_hi
;
1335 _eflags
&= ~EFLG_ZF
;
1336 } else if (ops
->cmpxchg8b_emulated
== NULL
) {
1337 rc
= X86EMUL_UNHANDLEABLE
;
1340 if ((rc
= ops
->cmpxchg8b_emulated(cr2
, old_lo
,
1342 _regs
[VCPU_REGS_RBX
],
1343 _regs
[VCPU_REGS_RCX
],
1350 #elif defined(CONFIG_X86_64)
1352 unsigned long old
, new;
1353 if ((rc
= ops
->read_emulated(cr2
, &old
, 8, ctxt
)) != 0)
1355 if (((u32
) (old
>> 0) != (u32
) _regs
[VCPU_REGS_RAX
]) ||
1356 ((u32
) (old
>> 32) != (u32
) _regs
[VCPU_REGS_RDX
])) {
1357 _regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1358 _regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1359 _eflags
&= ~EFLG_ZF
;
1361 new = (_regs
[VCPU_REGS_RCX
] << 32) | (u32
) _regs
[VCPU_REGS_RBX
];
1362 if ((rc
= ops
->cmpxchg_emulated(cr2
, old
,
1363 new, 8, ctxt
)) != 0)
1374 DPRINTF("Cannot emulate %02x\n", b
);
1381 #include <asm/uaccess.h>
1384 x86_emulate_read_std(unsigned long addr
,
1386 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1392 if ((rc
= copy_from_user((void *)val
, (void *)addr
, bytes
)) != 0) {
1393 propagate_page_fault(addr
+ bytes
- rc
, 0); /* read fault */
1394 return X86EMUL_PROPAGATE_FAULT
;
1397 return X86EMUL_CONTINUE
;
1401 x86_emulate_write_std(unsigned long addr
,
1403 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1407 if ((rc
= copy_to_user((void *)addr
, (void *)&val
, bytes
)) != 0) {
1408 propagate_page_fault(addr
+ bytes
- rc
, PGERR_write_access
);
1409 return X86EMUL_PROPAGATE_FAULT
;
1412 return X86EMUL_CONTINUE
;