1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privieged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table
[256] = {
68 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
69 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
72 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
73 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
76 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
77 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
80 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
81 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
84 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
85 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
88 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
89 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
92 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
93 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
96 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
97 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
104 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
105 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
107 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
109 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
110 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
111 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
112 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
114 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
115 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
116 0, 0, 0, DstMem
| SrcNone
| ModRM
| Mov
,
118 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
120 ByteOp
| DstReg
| SrcMem
| Mov
, DstReg
| SrcMem
| Mov
,
121 ByteOp
| DstMem
| SrcReg
| Mov
, DstMem
| SrcReg
| Mov
,
122 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
123 ByteOp
| ImplicitOps
, ImplicitOps
,
125 0, 0, ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
126 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
127 ByteOp
| ImplicitOps
, ImplicitOps
,
129 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
131 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
, 0, 0,
132 0, 0, ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
,
133 DstMem
| SrcImm
| ModRM
| Mov
,
135 0, 0, 0, 0, 0, 0, 0, 0,
137 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
138 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
141 0, 0, 0, 0, 0, 0, 0, 0,
143 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
147 ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
150 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
153 static u16 twobyte_table
[256] = {
155 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
156 0, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
158 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
160 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
161 0, 0, 0, 0, 0, 0, 0, 0,
163 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
165 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
166 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
167 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
168 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
170 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
171 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
172 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
173 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
175 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
177 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
179 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
181 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
185 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
187 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
189 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
190 DstMem
| SrcReg
| ModRM
| BitOp
,
191 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
192 DstReg
| SrcMem16
| ModRM
| Mov
,
194 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
195 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
196 DstReg
| SrcMem16
| ModRM
| Mov
,
198 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0, 0,
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
208 * Tell the emulator that of the Group 7 instructions (sgdt, lidt, etc.) we
209 * are interested only in invlpg and not in any of the rest.
211 * invlpg is a special instruction in that the data it references may not
214 void kvm_emulator_want_group7_invlpg(void)
216 twobyte_table
[1] &= ~SrcMem
;
218 EXPORT_SYMBOL_GPL(kvm_emulator_want_group7_invlpg
);
220 /* Type, address-of, and value of an instruction's operand. */
222 enum { OP_REG
, OP_MEM
, OP_IMM
} type
;
224 unsigned long val
, orig_val
, *ptr
;
227 /* EFLAGS bit definitions. */
228 #define EFLG_OF (1<<11)
229 #define EFLG_DF (1<<10)
230 #define EFLG_SF (1<<7)
231 #define EFLG_ZF (1<<6)
232 #define EFLG_AF (1<<4)
233 #define EFLG_PF (1<<2)
234 #define EFLG_CF (1<<0)
237 * Instruction emulation:
238 * Most instructions are emulated directly via a fragment of inline assembly
239 * code. This allows us to save/restore EFLAGS and thus very easily pick up
240 * any modified flags.
243 #if defined(CONFIG_X86_64)
244 #define _LO32 "k" /* force 32-bit operand */
245 #define _STK "%%rsp" /* stack pointer */
246 #elif defined(__i386__)
247 #define _LO32 "" /* force 32-bit operand */
248 #define _STK "%%esp" /* stack pointer */
252 * These EFLAGS bits are restored from saved value during emulation, and
253 * any changes are written back to the saved value after emulation.
255 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
257 /* Before executing instruction: restore necessary bits in EFLAGS. */
258 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
259 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
261 "movl %"_msk",%"_LO32 _tmp"; " \
262 "andl %"_LO32 _tmp",("_STK"); " \
264 "notl %"_LO32 _tmp"; " \
265 "andl %"_LO32 _tmp",("_STK"); " \
267 "orl %"_LO32 _tmp",("_STK"); " \
269 /* _sav &= ~msk; */ \
270 "movl %"_msk",%"_LO32 _tmp"; " \
271 "notl %"_LO32 _tmp"; " \
272 "andl %"_LO32 _tmp",%"_sav"; "
274 /* After executing instruction: write-back necessary bits in EFLAGS. */
275 #define _POST_EFLAGS(_sav, _msk, _tmp) \
276 /* _sav |= EFLAGS & _msk; */ \
279 "andl %"_msk",%"_LO32 _tmp"; " \
280 "orl %"_LO32 _tmp",%"_sav"; "
282 /* Raw emulation: instruction has two explicit operands. */
283 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
285 unsigned long _tmp; \
287 switch ((_dst).bytes) { \
289 __asm__ __volatile__ ( \
290 _PRE_EFLAGS("0","4","2") \
291 _op"w %"_wx"3,%1; " \
292 _POST_EFLAGS("0","4","2") \
293 : "=m" (_eflags), "=m" ((_dst).val), \
295 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
298 __asm__ __volatile__ ( \
299 _PRE_EFLAGS("0","4","2") \
300 _op"l %"_lx"3,%1; " \
301 _POST_EFLAGS("0","4","2") \
302 : "=m" (_eflags), "=m" ((_dst).val), \
304 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
307 __emulate_2op_8byte(_op, _src, _dst, \
308 _eflags, _qx, _qy); \
313 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
315 unsigned long _tmp; \
316 switch ( (_dst).bytes ) \
319 __asm__ __volatile__ ( \
320 _PRE_EFLAGS("0","4","2") \
321 _op"b %"_bx"3,%1; " \
322 _POST_EFLAGS("0","4","2") \
323 : "=m" (_eflags), "=m" ((_dst).val), \
325 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
328 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
329 _wx, _wy, _lx, _ly, _qx, _qy); \
334 /* Source operand is byte-sized and may be restricted to just %cl. */
335 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
336 __emulate_2op(_op, _src, _dst, _eflags, \
337 "b", "c", "b", "c", "b", "c", "b", "c")
339 /* Source operand is byte, word, long or quad sized. */
340 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
341 __emulate_2op(_op, _src, _dst, _eflags, \
342 "b", "q", "w", "r", _LO32, "r", "", "r")
344 /* Source operand is word, long or quad sized. */
345 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
346 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
347 "w", "r", _LO32, "r", "", "r")
349 /* Instruction has only one explicit operand (no source operand). */
350 #define emulate_1op(_op, _dst, _eflags) \
352 unsigned long _tmp; \
354 switch ( (_dst).bytes ) \
357 __asm__ __volatile__ ( \
358 _PRE_EFLAGS("0","3","2") \
360 _POST_EFLAGS("0","3","2") \
361 : "=m" (_eflags), "=m" ((_dst).val), \
363 : "i" (EFLAGS_MASK) ); \
366 __asm__ __volatile__ ( \
367 _PRE_EFLAGS("0","3","2") \
369 _POST_EFLAGS("0","3","2") \
370 : "=m" (_eflags), "=m" ((_dst).val), \
372 : "i" (EFLAGS_MASK) ); \
375 __asm__ __volatile__ ( \
376 _PRE_EFLAGS("0","3","2") \
378 _POST_EFLAGS("0","3","2") \
379 : "=m" (_eflags), "=m" ((_dst).val), \
381 : "i" (EFLAGS_MASK) ); \
384 __emulate_1op_8byte(_op, _dst, _eflags); \
389 /* Emulate an instruction with quadword operands (x86/64 only). */
390 #if defined(CONFIG_X86_64)
391 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
393 __asm__ __volatile__ ( \
394 _PRE_EFLAGS("0","4","2") \
395 _op"q %"_qx"3,%1; " \
396 _POST_EFLAGS("0","4","2") \
397 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
398 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
401 #define __emulate_1op_8byte(_op, _dst, _eflags) \
403 __asm__ __volatile__ ( \
404 _PRE_EFLAGS("0","3","2") \
406 _POST_EFLAGS("0","3","2") \
407 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
408 : "i" (EFLAGS_MASK) ); \
411 #elif defined(__i386__)
412 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
413 #define __emulate_1op_8byte(_op, _dst, _eflags)
414 #endif /* __i386__ */
416 /* Fetch next part of the instruction being emulated. */
417 #define insn_fetch(_type, _size, _eip) \
418 ({ unsigned long _x; \
419 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
427 /* Access/update address held in a register, based on addressing mode. */
428 #define register_address(base, reg) \
429 ((base) + ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
430 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
432 #define register_address_increment(reg, inc) \
434 /* signed type ensures sign extension to long */ \
436 if ( ad_bytes == sizeof(unsigned long) ) \
439 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
440 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
443 void *decode_register(u8 modrm_reg
, unsigned long *regs
,
448 p
= ®s
[modrm_reg
];
449 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
450 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
454 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
455 struct x86_emulate_ops
*ops
,
457 u16
*size
, unsigned long *address
, int op_bytes
)
464 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2, ctxt
);
467 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
, ctxt
);
472 x86_emulate_memop(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
475 u8 b
, sib
, twobyte
= 0, rex_prefix
= 0;
476 u8 modrm
, modrm_mod
= 0, modrm_reg
= 0, modrm_rm
= 0;
477 unsigned long *override_base
= NULL
;
478 unsigned int op_bytes
, ad_bytes
, lock_prefix
= 0, rep_prefix
= 0, i
;
480 struct operand src
, dst
;
481 unsigned long cr2
= ctxt
->cr2
;
482 int mode
= ctxt
->mode
;
483 unsigned long modrm_ea
;
484 int use_modrm_ea
, index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
486 /* Shadow copy of register state. Committed on successful emulation. */
487 unsigned long _regs
[NR_VCPU_REGS
];
488 unsigned long _eip
= ctxt
->vcpu
->rip
, _eflags
= ctxt
->eflags
;
489 unsigned long modrm_val
= 0;
491 memcpy(_regs
, ctxt
->vcpu
->regs
, sizeof _regs
);
494 case X86EMUL_MODE_REAL
:
495 case X86EMUL_MODE_PROT16
:
496 op_bytes
= ad_bytes
= 2;
498 case X86EMUL_MODE_PROT32
:
499 op_bytes
= ad_bytes
= 4;
502 case X86EMUL_MODE_PROT64
:
511 /* Legacy prefixes. */
512 for (i
= 0; i
< 8; i
++) {
513 switch (b
= insn_fetch(u8
, 1, _eip
)) {
514 case 0x66: /* operand-size override */
515 op_bytes
^= 6; /* switch between 2/4 bytes */
517 case 0x67: /* address-size override */
518 if (mode
== X86EMUL_MODE_PROT64
)
519 ad_bytes
^= 12; /* switch between 4/8 bytes */
521 ad_bytes
^= 6; /* switch between 2/4 bytes */
523 case 0x2e: /* CS override */
524 override_base
= &ctxt
->cs_base
;
526 case 0x3e: /* DS override */
527 override_base
= &ctxt
->ds_base
;
529 case 0x26: /* ES override */
530 override_base
= &ctxt
->es_base
;
532 case 0x64: /* FS override */
533 override_base
= &ctxt
->fs_base
;
535 case 0x65: /* GS override */
536 override_base
= &ctxt
->gs_base
;
538 case 0x36: /* SS override */
539 override_base
= &ctxt
->ss_base
;
541 case 0xf0: /* LOCK */
544 case 0xf3: /* REP/REPE/REPZ */
547 case 0xf2: /* REPNE/REPNZ */
557 if ((mode
== X86EMUL_MODE_PROT64
) && ((b
& 0xf0) == 0x40)) {
560 op_bytes
= 8; /* REX.W */
561 modrm_reg
= (b
& 4) << 1; /* REX.R */
562 index_reg
= (b
& 2) << 2; /* REX.X */
563 modrm_rm
= base_reg
= (b
& 1) << 3; /* REG.B */
564 b
= insn_fetch(u8
, 1, _eip
);
567 /* Opcode byte(s). */
570 /* Two-byte opcode? */
573 b
= insn_fetch(u8
, 1, _eip
);
574 d
= twobyte_table
[b
];
582 /* ModRM and SIB bytes. */
584 modrm
= insn_fetch(u8
, 1, _eip
);
585 modrm_mod
|= (modrm
& 0xc0) >> 6;
586 modrm_reg
|= (modrm
& 0x38) >> 3;
587 modrm_rm
|= (modrm
& 0x07);
591 if (modrm_mod
== 3) {
592 modrm_val
= *(unsigned long *)
593 decode_register(modrm_rm
, _regs
, d
& ByteOp
);
598 unsigned bx
= _regs
[VCPU_REGS_RBX
];
599 unsigned bp
= _regs
[VCPU_REGS_RBP
];
600 unsigned si
= _regs
[VCPU_REGS_RSI
];
601 unsigned di
= _regs
[VCPU_REGS_RDI
];
603 /* 16-bit ModR/M decode. */
607 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
610 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
613 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
643 if (modrm_rm
== 2 || modrm_rm
== 3 ||
644 (modrm_rm
== 6 && modrm_mod
!= 0))
646 override_base
= &ctxt
->ss_base
;
647 modrm_ea
= (u16
)modrm_ea
;
649 /* 32/64-bit ModR/M decode. */
653 sib
= insn_fetch(u8
, 1, _eip
);
654 index_reg
|= (sib
>> 3) & 7;
661 modrm_ea
+= _regs
[base_reg
];
663 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
666 modrm_ea
+= _regs
[base_reg
];
672 modrm_ea
+= _regs
[index_reg
] << scale
;
678 modrm_ea
+= _regs
[modrm_rm
];
679 else if (mode
== X86EMUL_MODE_PROT64
)
683 modrm_ea
+= _regs
[modrm_rm
];
689 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
692 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
695 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
700 override_base
= &ctxt
->ds_base
;
701 if (mode
== X86EMUL_MODE_PROT64
&&
702 override_base
!= &ctxt
->fs_base
&&
703 override_base
!= &ctxt
->gs_base
)
704 override_base
= NULL
;
707 modrm_ea
+= *override_base
;
711 switch (d
& SrcMask
) {
722 modrm_ea
+= op_bytes
;
726 modrm_ea
= (u32
)modrm_ea
;
733 * Decode and fetch the source operand: register, memory
736 switch (d
& SrcMask
) {
742 src
.ptr
= decode_register(modrm_reg
, _regs
,
744 src
.val
= src
.orig_val
= *(u8
*) src
.ptr
;
747 src
.ptr
= decode_register(modrm_reg
, _regs
, 0);
748 switch ((src
.bytes
= op_bytes
)) {
750 src
.val
= src
.orig_val
= *(u16
*) src
.ptr
;
753 src
.val
= src
.orig_val
= *(u32
*) src
.ptr
;
756 src
.val
= src
.orig_val
= *(u64
*) src
.ptr
;
768 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
771 src
.ptr
= (unsigned long *)cr2
;
772 if ((rc
= ops
->read_emulated((unsigned long)src
.ptr
,
773 &src
.val
, src
.bytes
, ctxt
)) != 0)
775 src
.orig_val
= src
.val
;
779 src
.ptr
= (unsigned long *)_eip
;
780 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
783 /* NB. Immediates are sign-extended as necessary. */
786 src
.val
= insn_fetch(s8
, 1, _eip
);
789 src
.val
= insn_fetch(s16
, 2, _eip
);
792 src
.val
= insn_fetch(s32
, 4, _eip
);
798 src
.ptr
= (unsigned long *)_eip
;
800 src
.val
= insn_fetch(s8
, 1, _eip
);
804 /* Decode and fetch the destination operand: register or memory. */
805 switch (d
& DstMask
) {
807 /* Special instructions do their own operand decoding. */
812 && !(twobyte_table
&& (b
== 0xb6 || b
== 0xb7))) {
813 dst
.ptr
= decode_register(modrm_reg
, _regs
,
815 dst
.val
= *(u8
*) dst
.ptr
;
818 dst
.ptr
= decode_register(modrm_reg
, _regs
, 0);
819 switch ((dst
.bytes
= op_bytes
)) {
821 dst
.val
= *(u16
*)dst
.ptr
;
824 dst
.val
= *(u32
*)dst
.ptr
;
827 dst
.val
= *(u64
*)dst
.ptr
;
834 dst
.ptr
= (unsigned long *)cr2
;
835 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
837 unsigned long mask
= ~(dst
.bytes
* 8 - 1);
839 dst
.ptr
= (void *)dst
.ptr
+ (src
.val
& mask
) / 8;
841 if (!(d
& Mov
) && /* optimisation - avoid slow emulated read */
842 ((rc
= ops
->read_emulated((unsigned long)dst
.ptr
,
843 &dst
.val
, dst
.bytes
, ctxt
)) != 0))
847 dst
.orig_val
= dst
.val
;
855 emulate_2op_SrcV("add", src
, dst
, _eflags
);
859 emulate_2op_SrcV("or", src
, dst
, _eflags
);
863 emulate_2op_SrcV("adc", src
, dst
, _eflags
);
867 emulate_2op_SrcV("sbb", src
, dst
, _eflags
);
871 emulate_2op_SrcV("and", src
, dst
, _eflags
);
875 emulate_2op_SrcV("sub", src
, dst
, _eflags
);
879 emulate_2op_SrcV("xor", src
, dst
, _eflags
);
883 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
885 case 0x63: /* movsxd */
886 if (mode
!= X86EMUL_MODE_PROT64
)
888 dst
.val
= (s32
) src
.val
;
890 case 0x80 ... 0x83: /* Grp1 */
912 emulate_2op_SrcV("test", src
, dst
, _eflags
);
914 case 0x86 ... 0x87: /* xchg */
915 /* Write back the register source. */
918 *(u8
*) src
.ptr
= (u8
) dst
.val
;
921 *(u16
*) src
.ptr
= (u16
) dst
.val
;
924 *src
.ptr
= (u32
) dst
.val
;
925 break; /* 64b reg: zero-extend */
931 * Write back the memory destination with implicit LOCK
937 case 0xa0 ... 0xa1: /* mov */
938 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
940 _eip
+= ad_bytes
; /* skip src displacement */
942 case 0xa2 ... 0xa3: /* mov */
943 dst
.val
= (unsigned long)_regs
[VCPU_REGS_RAX
];
944 _eip
+= ad_bytes
; /* skip dst displacement */
946 case 0x88 ... 0x8b: /* mov */
947 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
950 case 0x8f: /* pop (sole member of Grp1a) */
951 /* 64-bit mode: POP always pops a 64-bit operand. */
952 if (mode
== X86EMUL_MODE_PROT64
)
954 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
955 _regs
[VCPU_REGS_RSP
]),
956 &dst
.val
, dst
.bytes
, ctxt
)) != 0)
958 register_address_increment(_regs
[VCPU_REGS_RSP
], dst
.bytes
);
964 emulate_2op_SrcB("rol", src
, dst
, _eflags
);
967 emulate_2op_SrcB("ror", src
, dst
, _eflags
);
970 emulate_2op_SrcB("rcl", src
, dst
, _eflags
);
973 emulate_2op_SrcB("rcr", src
, dst
, _eflags
);
975 case 4: /* sal/shl */
976 case 6: /* sal/shl */
977 emulate_2op_SrcB("sal", src
, dst
, _eflags
);
980 emulate_2op_SrcB("shr", src
, dst
, _eflags
);
983 emulate_2op_SrcB("sar", src
, dst
, _eflags
);
987 case 0xd0 ... 0xd1: /* Grp2 */
990 case 0xd2 ... 0xd3: /* Grp2 */
991 src
.val
= _regs
[VCPU_REGS_RCX
];
993 case 0xf6 ... 0xf7: /* Grp3 */
995 case 0 ... 1: /* test */
997 * Special case in Grp3: test has an immediate
1001 src
.ptr
= (unsigned long *)_eip
;
1002 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1005 switch (src
.bytes
) {
1007 src
.val
= insn_fetch(s8
, 1, _eip
);
1010 src
.val
= insn_fetch(s16
, 2, _eip
);
1013 src
.val
= insn_fetch(s32
, 4, _eip
);
1021 emulate_1op("neg", dst
, _eflags
);
1024 goto cannot_emulate
;
1027 case 0xfe ... 0xff: /* Grp4/Grp5 */
1028 switch (modrm_reg
) {
1030 emulate_1op("inc", dst
, _eflags
);
1033 emulate_1op("dec", dst
, _eflags
);
1036 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1037 if (mode
== X86EMUL_MODE_PROT64
) {
1039 if ((rc
= ops
->read_std((unsigned long)dst
.ptr
,
1044 register_address_increment(_regs
[VCPU_REGS_RSP
],
1046 if ((rc
= ops
->write_std(
1047 register_address(ctxt
->ss_base
,
1048 _regs
[VCPU_REGS_RSP
]),
1049 &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1051 dst
.val
= dst
.orig_val
; /* skanky: disable writeback */
1054 goto cannot_emulate
;
1060 if ((d
& Mov
) || (dst
.orig_val
!= dst
.val
)) {
1063 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1064 switch (dst
.bytes
) {
1066 *(u8
*)dst
.ptr
= (u8
)dst
.val
;
1069 *(u16
*)dst
.ptr
= (u16
)dst
.val
;
1072 *dst
.ptr
= (u32
)dst
.val
;
1073 break; /* 64b: zero-ext */
1081 rc
= ops
->cmpxchg_emulated((unsigned long)dst
.
1083 &dst
.val
, dst
.bytes
,
1086 rc
= ops
->write_emulated((unsigned long)dst
.ptr
,
1087 &dst
.val
, dst
.bytes
,
1096 /* Commit shadow register state. */
1097 memcpy(ctxt
->vcpu
->regs
, _regs
, sizeof _regs
);
1098 ctxt
->eflags
= _eflags
;
1099 ctxt
->vcpu
->rip
= _eip
;
1102 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1106 goto twobyte_special_insn
;
1108 if (_regs
[VCPU_REGS_RCX
] == 0) {
1109 ctxt
->vcpu
->rip
= _eip
;
1112 _regs
[VCPU_REGS_RCX
]--;
1113 _eip
= ctxt
->vcpu
->rip
;
1116 case 0xa4 ... 0xa5: /* movs */
1118 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1119 dst
.ptr
= (unsigned long *)register_address(ctxt
->es_base
,
1120 _regs
[VCPU_REGS_RDI
]);
1121 if ((rc
= ops
->read_emulated(register_address(
1122 override_base
? *override_base
: ctxt
->ds_base
,
1123 _regs
[VCPU_REGS_RSI
]), &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1125 register_address_increment(_regs
[VCPU_REGS_RSI
],
1126 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1127 register_address_increment(_regs
[VCPU_REGS_RDI
],
1128 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1130 case 0xa6 ... 0xa7: /* cmps */
1131 DPRINTF("Urk! I don't handle CMPS.\n");
1132 goto cannot_emulate
;
1133 case 0xaa ... 0xab: /* stos */
1135 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1136 dst
.ptr
= (unsigned long *)cr2
;
1137 dst
.val
= _regs
[VCPU_REGS_RAX
];
1138 register_address_increment(_regs
[VCPU_REGS_RDI
],
1139 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1141 case 0xac ... 0xad: /* lods */
1143 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1144 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1145 if ((rc
= ops
->read_emulated(cr2
, &dst
.val
, dst
.bytes
, ctxt
)) != 0)
1147 register_address_increment(_regs
[VCPU_REGS_RSI
],
1148 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1150 case 0xae ... 0xaf: /* scas */
1151 DPRINTF("Urk! I don't handle SCAS.\n");
1152 goto cannot_emulate
;
1153 case 0xf4: /* hlt */
1154 ctxt
->vcpu
->halt_request
= 1;
1161 case 0x01: /* lgdt, lidt, lmsw */
1162 switch (modrm_reg
) {
1164 unsigned long address
;
1167 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1168 &size
, &address
, op_bytes
);
1171 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1174 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1175 &size
, &address
, op_bytes
);
1178 realmode_lidt(ctxt
->vcpu
, size
, address
);
1182 goto cannot_emulate
;
1183 *(u16
*)&_regs
[modrm_rm
]
1184 = realmode_get_cr(ctxt
->vcpu
, 0);
1188 goto cannot_emulate
;
1189 realmode_lmsw(ctxt
->vcpu
, (u16
)modrm_val
, &_eflags
);
1192 emulate_invlpg(ctxt
->vcpu
, cr2
);
1195 goto cannot_emulate
;
1198 case 0x21: /* mov from dr to reg */
1200 goto cannot_emulate
;
1201 rc
= emulator_get_dr(ctxt
, modrm_reg
, &_regs
[modrm_rm
]);
1203 case 0x23: /* mov from reg to dr */
1205 goto cannot_emulate
;
1206 rc
= emulator_set_dr(ctxt
, modrm_reg
, _regs
[modrm_rm
]);
1208 case 0x40 ... 0x4f: /* cmov */
1209 dst
.val
= dst
.orig_val
= src
.val
;
1210 d
&= ~Mov
; /* default to no move */
1212 * First, assume we're decoding an even cmov opcode
1215 switch ((b
& 15) >> 1) {
1217 d
|= (_eflags
& EFLG_OF
) ? Mov
: 0;
1219 case 1: /* cmovb/cmovc/cmovnae */
1220 d
|= (_eflags
& EFLG_CF
) ? Mov
: 0;
1222 case 2: /* cmovz/cmove */
1223 d
|= (_eflags
& EFLG_ZF
) ? Mov
: 0;
1225 case 3: /* cmovbe/cmovna */
1226 d
|= (_eflags
& (EFLG_CF
| EFLG_ZF
)) ? Mov
: 0;
1229 d
|= (_eflags
& EFLG_SF
) ? Mov
: 0;
1231 case 5: /* cmovp/cmovpe */
1232 d
|= (_eflags
& EFLG_PF
) ? Mov
: 0;
1234 case 7: /* cmovle/cmovng */
1235 d
|= (_eflags
& EFLG_ZF
) ? Mov
: 0;
1237 case 6: /* cmovl/cmovnge */
1238 d
|= (!(_eflags
& EFLG_SF
) !=
1239 !(_eflags
& EFLG_OF
)) ? Mov
: 0;
1242 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1243 d
^= (b
& 1) ? Mov
: 0;
1245 case 0xb0 ... 0xb1: /* cmpxchg */
1247 * Save real source value, then compare EAX against
1250 src
.orig_val
= src
.val
;
1251 src
.val
= _regs
[VCPU_REGS_RAX
];
1252 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
1253 /* Always write back. The question is: where to? */
1255 if (_eflags
& EFLG_ZF
) {
1256 /* Success: write back to memory. */
1257 dst
.val
= src
.orig_val
;
1259 /* Failure: write the value we saw to EAX. */
1261 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1266 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1267 emulate_2op_SrcV_nobyte("bt", src
, dst
, _eflags
);
1271 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1272 emulate_2op_SrcV_nobyte("btr", src
, dst
, _eflags
);
1276 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1277 emulate_2op_SrcV_nobyte("bts", src
, dst
, _eflags
);
1279 case 0xb6 ... 0xb7: /* movzx */
1280 dst
.bytes
= op_bytes
;
1281 dst
.val
= (d
& ByteOp
) ? (u8
) src
.val
: (u16
) src
.val
;
1285 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1286 emulate_2op_SrcV_nobyte("btc", src
, dst
, _eflags
);
1288 case 0xba: /* Grp8 */
1289 switch (modrm_reg
& 3) {
1300 case 0xbe ... 0xbf: /* movsx */
1301 dst
.bytes
= op_bytes
;
1302 dst
.val
= (d
& ByteOp
) ? (s8
) src
.val
: (s16
) src
.val
;
1307 twobyte_special_insn
:
1308 /* Disable writeback. */
1309 dst
.orig_val
= dst
.val
;
1311 case 0x09: /* wbinvd */
1313 case 0x0d: /* GrpP (prefetch) */
1314 case 0x18: /* Grp16 (prefetch/nop) */
1317 emulate_clts(ctxt
->vcpu
);
1319 case 0x20: /* mov cr, reg */
1321 goto cannot_emulate
;
1322 _regs
[modrm_rm
] = realmode_get_cr(ctxt
->vcpu
, modrm_reg
);
1324 case 0x22: /* mov reg, cr */
1326 goto cannot_emulate
;
1327 realmode_set_cr(ctxt
->vcpu
, modrm_reg
, modrm_val
, &_eflags
);
1329 case 0xc7: /* Grp9 (cmpxchg8b) */
1332 if ((rc
= ops
->read_emulated(cr2
, &old
, 8, ctxt
)) != 0)
1334 if (((u32
) (old
>> 0) != (u32
) _regs
[VCPU_REGS_RAX
]) ||
1335 ((u32
) (old
>> 32) != (u32
) _regs
[VCPU_REGS_RDX
])) {
1336 _regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1337 _regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1338 _eflags
&= ~EFLG_ZF
;
1340 new = ((u64
)_regs
[VCPU_REGS_RCX
] << 32)
1341 | (u32
) _regs
[VCPU_REGS_RBX
];
1342 if ((rc
= ops
->cmpxchg_emulated(cr2
, &old
,
1343 &new, 8, ctxt
)) != 0)
1353 DPRINTF("Cannot emulate %02x\n", b
);
1360 #include <asm/uaccess.h>
1363 x86_emulate_read_std(unsigned long addr
,
1365 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1371 if ((rc
= copy_from_user((void *)val
, (void *)addr
, bytes
)) != 0) {
1372 propagate_page_fault(addr
+ bytes
- rc
, 0); /* read fault */
1373 return X86EMUL_PROPAGATE_FAULT
;
1376 return X86EMUL_CONTINUE
;
1380 x86_emulate_write_std(unsigned long addr
,
1382 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1386 if ((rc
= copy_to_user((void *)addr
, (void *)&val
, bytes
)) != 0) {
1387 propagate_page_fault(addr
+ bytes
- rc
, PGERR_write_access
);
1388 return X86EMUL_PROPAGATE_FAULT
;
1391 return X86EMUL_CONTINUE
;