1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table
[256] = {
68 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
69 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
72 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
73 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
76 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
77 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
80 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
81 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
84 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
85 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
86 SrcImmByte
, SrcImm
, 0, 0,
88 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
89 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
92 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
93 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
96 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
97 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
103 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
105 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
106 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
108 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
111 0, 0, ImplicitOps
|Mov
, 0,
112 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
113 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
115 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
116 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
118 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
119 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
121 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
122 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
123 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
124 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
126 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
127 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
128 0, ModRM
| DstReg
, 0, DstMem
| SrcNone
| ModRM
| Mov
,
130 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
, ImplicitOps
, 0, 0,
132 ByteOp
| DstReg
| SrcMem
| Mov
, DstReg
| SrcMem
| Mov
,
133 ByteOp
| DstMem
| SrcReg
| Mov
, DstMem
| SrcReg
| Mov
,
134 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
135 ByteOp
| ImplicitOps
, ImplicitOps
,
137 0, 0, ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
138 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
139 ByteOp
| ImplicitOps
, ImplicitOps
,
141 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
143 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
144 0, ImplicitOps
, 0, 0,
145 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
147 0, 0, 0, 0, 0, 0, 0, 0,
149 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
150 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
153 0, 0, 0, 0, 0, 0, 0, 0,
155 0, 0, 0, 0, 0, 0, 0, 0,
157 ImplicitOps
, SrcImm
|ImplicitOps
, 0, SrcImmByte
|ImplicitOps
, 0, 0, 0, 0,
161 ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
164 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
167 static u16 twobyte_table
[256] = {
169 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
170 ImplicitOps
, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
172 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
174 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
175 0, 0, 0, 0, 0, 0, 0, 0,
177 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
179 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
180 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
181 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
182 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
184 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
185 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
186 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
187 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
193 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
195 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
196 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
197 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
198 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
204 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
206 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
207 DstMem
| SrcReg
| ModRM
| BitOp
,
208 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
209 DstReg
| SrcMem16
| ModRM
| Mov
,
211 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
212 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
213 DstReg
| SrcMem16
| ModRM
| Mov
,
215 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
216 0, 0, 0, 0, 0, 0, 0, 0,
218 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
225 /* Type, address-of, and value of an instruction's operand. */
227 enum { OP_REG
, OP_MEM
, OP_IMM
} type
;
229 unsigned long val
, orig_val
, *ptr
;
232 /* EFLAGS bit definitions. */
233 #define EFLG_OF (1<<11)
234 #define EFLG_DF (1<<10)
235 #define EFLG_SF (1<<7)
236 #define EFLG_ZF (1<<6)
237 #define EFLG_AF (1<<4)
238 #define EFLG_PF (1<<2)
239 #define EFLG_CF (1<<0)
242 * Instruction emulation:
243 * Most instructions are emulated directly via a fragment of inline assembly
244 * code. This allows us to save/restore EFLAGS and thus very easily pick up
245 * any modified flags.
248 #if defined(CONFIG_X86_64)
249 #define _LO32 "k" /* force 32-bit operand */
250 #define _STK "%%rsp" /* stack pointer */
251 #elif defined(__i386__)
252 #define _LO32 "" /* force 32-bit operand */
253 #define _STK "%%esp" /* stack pointer */
257 * These EFLAGS bits are restored from saved value during emulation, and
258 * any changes are written back to the saved value after emulation.
260 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
262 /* Before executing instruction: restore necessary bits in EFLAGS. */
263 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
264 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
266 "movl %"_msk",%"_LO32 _tmp"; " \
267 "andl %"_LO32 _tmp",("_STK"); " \
269 "notl %"_LO32 _tmp"; " \
270 "andl %"_LO32 _tmp",("_STK"); " \
272 "orl %"_LO32 _tmp",("_STK"); " \
274 /* _sav &= ~msk; */ \
275 "movl %"_msk",%"_LO32 _tmp"; " \
276 "notl %"_LO32 _tmp"; " \
277 "andl %"_LO32 _tmp",%"_sav"; "
279 /* After executing instruction: write-back necessary bits in EFLAGS. */
280 #define _POST_EFLAGS(_sav, _msk, _tmp) \
281 /* _sav |= EFLAGS & _msk; */ \
284 "andl %"_msk",%"_LO32 _tmp"; " \
285 "orl %"_LO32 _tmp",%"_sav"; "
287 /* Raw emulation: instruction has two explicit operands. */
288 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
290 unsigned long _tmp; \
292 switch ((_dst).bytes) { \
294 __asm__ __volatile__ ( \
295 _PRE_EFLAGS("0","4","2") \
296 _op"w %"_wx"3,%1; " \
297 _POST_EFLAGS("0","4","2") \
298 : "=m" (_eflags), "=m" ((_dst).val), \
300 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
303 __asm__ __volatile__ ( \
304 _PRE_EFLAGS("0","4","2") \
305 _op"l %"_lx"3,%1; " \
306 _POST_EFLAGS("0","4","2") \
307 : "=m" (_eflags), "=m" ((_dst).val), \
309 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
312 __emulate_2op_8byte(_op, _src, _dst, \
313 _eflags, _qx, _qy); \
318 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
320 unsigned long _tmp; \
321 switch ( (_dst).bytes ) \
324 __asm__ __volatile__ ( \
325 _PRE_EFLAGS("0","4","2") \
326 _op"b %"_bx"3,%1; " \
327 _POST_EFLAGS("0","4","2") \
328 : "=m" (_eflags), "=m" ((_dst).val), \
330 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
333 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
334 _wx, _wy, _lx, _ly, _qx, _qy); \
339 /* Source operand is byte-sized and may be restricted to just %cl. */
340 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
341 __emulate_2op(_op, _src, _dst, _eflags, \
342 "b", "c", "b", "c", "b", "c", "b", "c")
344 /* Source operand is byte, word, long or quad sized. */
345 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
346 __emulate_2op(_op, _src, _dst, _eflags, \
347 "b", "q", "w", "r", _LO32, "r", "", "r")
349 /* Source operand is word, long or quad sized. */
350 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
351 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
352 "w", "r", _LO32, "r", "", "r")
354 /* Instruction has only one explicit operand (no source operand). */
355 #define emulate_1op(_op, _dst, _eflags) \
357 unsigned long _tmp; \
359 switch ( (_dst).bytes ) \
362 __asm__ __volatile__ ( \
363 _PRE_EFLAGS("0","3","2") \
365 _POST_EFLAGS("0","3","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), \
368 : "i" (EFLAGS_MASK) ); \
371 __asm__ __volatile__ ( \
372 _PRE_EFLAGS("0","3","2") \
374 _POST_EFLAGS("0","3","2") \
375 : "=m" (_eflags), "=m" ((_dst).val), \
377 : "i" (EFLAGS_MASK) ); \
380 __asm__ __volatile__ ( \
381 _PRE_EFLAGS("0","3","2") \
383 _POST_EFLAGS("0","3","2") \
384 : "=m" (_eflags), "=m" ((_dst).val), \
386 : "i" (EFLAGS_MASK) ); \
389 __emulate_1op_8byte(_op, _dst, _eflags); \
394 /* Emulate an instruction with quadword operands (x86/64 only). */
395 #if defined(CONFIG_X86_64)
396 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
398 __asm__ __volatile__ ( \
399 _PRE_EFLAGS("0","4","2") \
400 _op"q %"_qx"3,%1; " \
401 _POST_EFLAGS("0","4","2") \
402 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
403 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
406 #define __emulate_1op_8byte(_op, _dst, _eflags) \
408 __asm__ __volatile__ ( \
409 _PRE_EFLAGS("0","3","2") \
411 _POST_EFLAGS("0","3","2") \
412 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
413 : "i" (EFLAGS_MASK) ); \
416 #elif defined(__i386__)
417 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
418 #define __emulate_1op_8byte(_op, _dst, _eflags)
419 #endif /* __i386__ */
421 /* Fetch next part of the instruction being emulated. */
422 #define insn_fetch(_type, _size, _eip) \
423 ({ unsigned long _x; \
424 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
425 (_size), ctxt->vcpu); \
432 /* Access/update address held in a register, based on addressing mode. */
433 #define address_mask(reg) \
434 ((ad_bytes == sizeof(unsigned long)) ? \
435 (reg) : ((reg) & ((1UL << (ad_bytes << 3)) - 1)))
436 #define register_address(base, reg) \
437 ((base) + address_mask(reg))
438 #define register_address_increment(reg, inc) \
440 /* signed type ensures sign extension to long */ \
442 if ( ad_bytes == sizeof(unsigned long) ) \
445 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
446 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
449 #define JMP_REL(rel) \
451 register_address_increment(_eip, rel); \
455 * Given the 'reg' portion of a ModRM byte, and a register block, return a
456 * pointer into the block that addresses the relevant register.
457 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
459 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
464 p
= ®s
[modrm_reg
];
465 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
466 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
470 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
471 struct x86_emulate_ops
*ops
,
473 u16
*size
, unsigned long *address
, int op_bytes
)
480 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
484 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
489 static int test_cc(unsigned int condition
, unsigned int flags
)
493 switch ((condition
& 15) >> 1) {
495 rc
|= (flags
& EFLG_OF
);
497 case 1: /* b/c/nae */
498 rc
|= (flags
& EFLG_CF
);
501 rc
|= (flags
& EFLG_ZF
);
504 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
507 rc
|= (flags
& EFLG_SF
);
510 rc
|= (flags
& EFLG_PF
);
513 rc
|= (flags
& EFLG_ZF
);
516 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
520 /* Odd condition identifiers (lsb == 1) have inverted sense. */
521 return (!!rc
^ (condition
& 1));
525 x86_emulate_memop(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
528 u8 b
, sib
, twobyte
= 0, rex_prefix
= 0;
529 u8 modrm
, modrm_mod
= 0, modrm_reg
= 0, modrm_rm
= 0;
530 unsigned long *override_base
= NULL
;
531 unsigned int op_bytes
, ad_bytes
, lock_prefix
= 0, rep_prefix
= 0, i
;
533 struct operand src
, dst
;
534 unsigned long cr2
= ctxt
->cr2
;
535 int mode
= ctxt
->mode
;
536 unsigned long modrm_ea
;
537 int use_modrm_ea
, index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
541 /* Shadow copy of register state. Committed on successful emulation. */
542 unsigned long _regs
[NR_VCPU_REGS
];
543 unsigned long _eip
= ctxt
->vcpu
->rip
, _eflags
= ctxt
->eflags
;
544 unsigned long modrm_val
= 0;
546 memcpy(_regs
, ctxt
->vcpu
->regs
, sizeof _regs
);
549 case X86EMUL_MODE_REAL
:
550 case X86EMUL_MODE_PROT16
:
551 op_bytes
= ad_bytes
= 2;
553 case X86EMUL_MODE_PROT32
:
554 op_bytes
= ad_bytes
= 4;
557 case X86EMUL_MODE_PROT64
:
566 /* Legacy prefixes. */
567 for (i
= 0; i
< 8; i
++) {
568 switch (b
= insn_fetch(u8
, 1, _eip
)) {
569 case 0x66: /* operand-size override */
570 op_bytes
^= 6; /* switch between 2/4 bytes */
572 case 0x67: /* address-size override */
573 if (mode
== X86EMUL_MODE_PROT64
)
574 ad_bytes
^= 12; /* switch between 4/8 bytes */
576 ad_bytes
^= 6; /* switch between 2/4 bytes */
578 case 0x2e: /* CS override */
579 override_base
= &ctxt
->cs_base
;
581 case 0x3e: /* DS override */
582 override_base
= &ctxt
->ds_base
;
584 case 0x26: /* ES override */
585 override_base
= &ctxt
->es_base
;
587 case 0x64: /* FS override */
588 override_base
= &ctxt
->fs_base
;
590 case 0x65: /* GS override */
591 override_base
= &ctxt
->gs_base
;
593 case 0x36: /* SS override */
594 override_base
= &ctxt
->ss_base
;
596 case 0xf0: /* LOCK */
599 case 0xf2: /* REPNE/REPNZ */
600 case 0xf3: /* REP/REPE/REPZ */
611 if ((mode
== X86EMUL_MODE_PROT64
) && ((b
& 0xf0) == 0x40)) {
614 op_bytes
= 8; /* REX.W */
615 modrm_reg
= (b
& 4) << 1; /* REX.R */
616 index_reg
= (b
& 2) << 2; /* REX.X */
617 modrm_rm
= base_reg
= (b
& 1) << 3; /* REG.B */
618 b
= insn_fetch(u8
, 1, _eip
);
621 /* Opcode byte(s). */
624 /* Two-byte opcode? */
627 b
= insn_fetch(u8
, 1, _eip
);
628 d
= twobyte_table
[b
];
636 /* ModRM and SIB bytes. */
638 modrm
= insn_fetch(u8
, 1, _eip
);
639 modrm_mod
|= (modrm
& 0xc0) >> 6;
640 modrm_reg
|= (modrm
& 0x38) >> 3;
641 modrm_rm
|= (modrm
& 0x07);
645 if (modrm_mod
== 3) {
646 modrm_val
= *(unsigned long *)
647 decode_register(modrm_rm
, _regs
, d
& ByteOp
);
652 unsigned bx
= _regs
[VCPU_REGS_RBX
];
653 unsigned bp
= _regs
[VCPU_REGS_RBP
];
654 unsigned si
= _regs
[VCPU_REGS_RSI
];
655 unsigned di
= _regs
[VCPU_REGS_RDI
];
657 /* 16-bit ModR/M decode. */
661 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
664 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
667 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
697 if (modrm_rm
== 2 || modrm_rm
== 3 ||
698 (modrm_rm
== 6 && modrm_mod
!= 0))
700 override_base
= &ctxt
->ss_base
;
701 modrm_ea
= (u16
)modrm_ea
;
703 /* 32/64-bit ModR/M decode. */
707 sib
= insn_fetch(u8
, 1, _eip
);
708 index_reg
|= (sib
>> 3) & 7;
715 modrm_ea
+= _regs
[base_reg
];
717 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
720 modrm_ea
+= _regs
[base_reg
];
726 modrm_ea
+= _regs
[index_reg
] << scale
;
732 modrm_ea
+= _regs
[modrm_rm
];
733 else if (mode
== X86EMUL_MODE_PROT64
)
737 modrm_ea
+= _regs
[modrm_rm
];
743 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
746 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
749 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
754 override_base
= &ctxt
->ds_base
;
755 if (mode
== X86EMUL_MODE_PROT64
&&
756 override_base
!= &ctxt
->fs_base
&&
757 override_base
!= &ctxt
->gs_base
)
758 override_base
= NULL
;
761 modrm_ea
+= *override_base
;
765 switch (d
& SrcMask
) {
776 modrm_ea
+= op_bytes
;
780 modrm_ea
= (u32
)modrm_ea
;
787 * Decode and fetch the source operand: register, memory
790 switch (d
& SrcMask
) {
796 src
.ptr
= decode_register(modrm_reg
, _regs
,
798 src
.val
= src
.orig_val
= *(u8
*) src
.ptr
;
801 src
.ptr
= decode_register(modrm_reg
, _regs
, 0);
802 switch ((src
.bytes
= op_bytes
)) {
804 src
.val
= src
.orig_val
= *(u16
*) src
.ptr
;
807 src
.val
= src
.orig_val
= *(u32
*) src
.ptr
;
810 src
.val
= src
.orig_val
= *(u64
*) src
.ptr
;
822 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
823 /* Don't fetch the address for invlpg: it could be unmapped. */
824 if (twobyte
&& b
== 0x01 && modrm_reg
== 7)
828 * For instructions with a ModR/M byte, switch to register
831 if ((d
& ModRM
) && modrm_mod
== 3) {
836 src
.ptr
= (unsigned long *)cr2
;
838 if ((rc
= ops
->read_emulated((unsigned long)src
.ptr
,
839 &src
.val
, src
.bytes
, ctxt
->vcpu
)) != 0)
841 src
.orig_val
= src
.val
;
845 src
.ptr
= (unsigned long *)_eip
;
846 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
849 /* NB. Immediates are sign-extended as necessary. */
852 src
.val
= insn_fetch(s8
, 1, _eip
);
855 src
.val
= insn_fetch(s16
, 2, _eip
);
858 src
.val
= insn_fetch(s32
, 4, _eip
);
864 src
.ptr
= (unsigned long *)_eip
;
866 src
.val
= insn_fetch(s8
, 1, _eip
);
870 /* Decode and fetch the destination operand: register or memory. */
871 switch (d
& DstMask
) {
873 /* Special instructions do their own operand decoding. */
878 && !(twobyte
&& (b
== 0xb6 || b
== 0xb7))) {
879 dst
.ptr
= decode_register(modrm_reg
, _regs
,
881 dst
.val
= *(u8
*) dst
.ptr
;
884 dst
.ptr
= decode_register(modrm_reg
, _regs
, 0);
885 switch ((dst
.bytes
= op_bytes
)) {
887 dst
.val
= *(u16
*)dst
.ptr
;
890 dst
.val
= *(u32
*)dst
.ptr
;
893 dst
.val
= *(u64
*)dst
.ptr
;
900 dst
.ptr
= (unsigned long *)cr2
;
901 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
904 * For instructions with a ModR/M byte, switch to register
907 if ((d
& ModRM
) && modrm_mod
== 3) {
912 unsigned long mask
= ~(dst
.bytes
* 8 - 1);
914 dst
.ptr
= (void *)dst
.ptr
+ (src
.val
& mask
) / 8;
916 if (!(d
& Mov
) && /* optimisation - avoid slow emulated read */
917 ((rc
= ops
->read_emulated((unsigned long)dst
.ptr
,
918 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0))
922 dst
.orig_val
= dst
.val
;
930 emulate_2op_SrcV("add", src
, dst
, _eflags
);
934 emulate_2op_SrcV("or", src
, dst
, _eflags
);
938 emulate_2op_SrcV("adc", src
, dst
, _eflags
);
942 emulate_2op_SrcV("sbb", src
, dst
, _eflags
);
946 emulate_2op_SrcV("and", src
, dst
, _eflags
);
948 case 0x24: /* and al imm8 */
950 dst
.ptr
= &_regs
[VCPU_REGS_RAX
];
951 dst
.val
= *(u8
*)dst
.ptr
;
953 dst
.orig_val
= dst
.val
;
955 case 0x25: /* and ax imm16, or eax imm32 */
957 dst
.bytes
= op_bytes
;
958 dst
.ptr
= &_regs
[VCPU_REGS_RAX
];
960 dst
.val
= *(u16
*)dst
.ptr
;
962 dst
.val
= *(u32
*)dst
.ptr
;
963 dst
.orig_val
= dst
.val
;
967 emulate_2op_SrcV("sub", src
, dst
, _eflags
);
971 emulate_2op_SrcV("xor", src
, dst
, _eflags
);
975 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
977 case 0x63: /* movsxd */
978 if (mode
!= X86EMUL_MODE_PROT64
)
980 dst
.val
= (s32
) src
.val
;
982 case 0x80 ... 0x83: /* Grp1 */
1004 emulate_2op_SrcV("test", src
, dst
, _eflags
);
1006 case 0x86 ... 0x87: /* xchg */
1007 /* Write back the register source. */
1008 switch (dst
.bytes
) {
1010 *(u8
*) src
.ptr
= (u8
) dst
.val
;
1013 *(u16
*) src
.ptr
= (u16
) dst
.val
;
1016 *src
.ptr
= (u32
) dst
.val
;
1017 break; /* 64b reg: zero-extend */
1023 * Write back the memory destination with implicit LOCK
1029 case 0x88 ... 0x8b: /* mov */
1031 case 0x8d: /* lea r16/r32, m */
1032 dst
.val
= modrm_val
;
1034 case 0x8f: /* pop (sole member of Grp1a) */
1035 /* 64-bit mode: POP always pops a 64-bit operand. */
1036 if (mode
== X86EMUL_MODE_PROT64
)
1038 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1039 _regs
[VCPU_REGS_RSP
]),
1040 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1042 register_address_increment(_regs
[VCPU_REGS_RSP
], dst
.bytes
);
1044 case 0xa0 ... 0xa1: /* mov */
1045 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1047 _eip
+= ad_bytes
; /* skip src displacement */
1049 case 0xa2 ... 0xa3: /* mov */
1050 dst
.val
= (unsigned long)_regs
[VCPU_REGS_RAX
];
1051 _eip
+= ad_bytes
; /* skip dst displacement */
1055 switch (modrm_reg
) {
1057 emulate_2op_SrcB("rol", src
, dst
, _eflags
);
1060 emulate_2op_SrcB("ror", src
, dst
, _eflags
);
1063 emulate_2op_SrcB("rcl", src
, dst
, _eflags
);
1066 emulate_2op_SrcB("rcr", src
, dst
, _eflags
);
1068 case 4: /* sal/shl */
1069 case 6: /* sal/shl */
1070 emulate_2op_SrcB("sal", src
, dst
, _eflags
);
1073 emulate_2op_SrcB("shr", src
, dst
, _eflags
);
1076 emulate_2op_SrcB("sar", src
, dst
, _eflags
);
1080 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1084 case 0xd0 ... 0xd1: /* Grp2 */
1087 case 0xd2 ... 0xd3: /* Grp2 */
1088 src
.val
= _regs
[VCPU_REGS_RCX
];
1090 case 0xf6 ... 0xf7: /* Grp3 */
1091 switch (modrm_reg
) {
1092 case 0 ... 1: /* test */
1094 * Special case in Grp3: test has an immediate
1098 src
.ptr
= (unsigned long *)_eip
;
1099 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1102 switch (src
.bytes
) {
1104 src
.val
= insn_fetch(s8
, 1, _eip
);
1107 src
.val
= insn_fetch(s16
, 2, _eip
);
1110 src
.val
= insn_fetch(s32
, 4, _eip
);
1118 emulate_1op("neg", dst
, _eflags
);
1121 goto cannot_emulate
;
1124 case 0xfe ... 0xff: /* Grp4/Grp5 */
1125 switch (modrm_reg
) {
1127 emulate_1op("inc", dst
, _eflags
);
1130 emulate_1op("dec", dst
, _eflags
);
1132 case 4: /* jmp abs */
1136 goto cannot_emulate
;
1139 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1140 if (mode
== X86EMUL_MODE_PROT64
) {
1142 if ((rc
= ops
->read_std((unsigned long)dst
.ptr
,
1147 register_address_increment(_regs
[VCPU_REGS_RSP
],
1149 if ((rc
= ops
->write_emulated(
1150 register_address(ctxt
->ss_base
,
1151 _regs
[VCPU_REGS_RSP
]),
1152 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1157 goto cannot_emulate
;
1166 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1167 switch (dst
.bytes
) {
1169 *(u8
*)dst
.ptr
= (u8
)dst
.val
;
1172 *(u16
*)dst
.ptr
= (u16
)dst
.val
;
1175 *dst
.ptr
= (u32
)dst
.val
;
1176 break; /* 64b: zero-ext */
1184 rc
= ops
->cmpxchg_emulated((unsigned long)dst
.
1186 &dst
.val
, dst
.bytes
,
1189 rc
= ops
->write_emulated((unsigned long)dst
.ptr
,
1190 &dst
.val
, dst
.bytes
,
1199 /* Commit shadow register state. */
1200 memcpy(ctxt
->vcpu
->regs
, _regs
, sizeof _regs
);
1201 ctxt
->eflags
= _eflags
;
1202 ctxt
->vcpu
->rip
= _eip
;
1205 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1209 goto twobyte_special_insn
;
1211 case 0x50 ... 0x57: /* push reg */
1213 src
.val
= (u16
) _regs
[b
& 0x7];
1215 src
.val
= (u32
) _regs
[b
& 0x7];
1217 dst
.bytes
= op_bytes
;
1219 register_address_increment(_regs
[VCPU_REGS_RSP
], -op_bytes
);
1220 dst
.ptr
= (void *) register_address(
1221 ctxt
->ss_base
, _regs
[VCPU_REGS_RSP
]);
1223 case 0x58 ... 0x5f: /* pop reg */
1224 dst
.ptr
= (unsigned long *)&_regs
[b
& 0x7];
1226 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1227 _regs
[VCPU_REGS_RSP
]), dst
.ptr
, op_bytes
, ctxt
->vcpu
))
1231 register_address_increment(_regs
[VCPU_REGS_RSP
], op_bytes
);
1232 no_wb
= 1; /* Disable writeback. */
1234 case 0x6a: /* push imm8 */
1236 src
.val
= insn_fetch(s8
, 1, _eip
);
1239 dst
.bytes
= op_bytes
;
1241 register_address_increment(_regs
[VCPU_REGS_RSP
], -op_bytes
);
1242 dst
.ptr
= (void *) register_address(ctxt
->ss_base
,
1243 _regs
[VCPU_REGS_RSP
]);
1245 case 0x6c: /* insb */
1246 case 0x6d: /* insw/insd */
1247 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1249 (d
& ByteOp
) ? 1 : op_bytes
, /* size */
1251 address_mask(_regs
[VCPU_REGS_RCX
]) : 1, /* count */
1252 (_eflags
& EFLG_DF
), /* down */
1253 register_address(ctxt
->es_base
,
1254 _regs
[VCPU_REGS_RDI
]), /* address */
1256 _regs
[VCPU_REGS_RDX
] /* port */
1260 case 0x6e: /* outsb */
1261 case 0x6f: /* outsw/outsd */
1262 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1264 (d
& ByteOp
) ? 1 : op_bytes
, /* size */
1266 address_mask(_regs
[VCPU_REGS_RCX
]) : 1, /* count */
1267 (_eflags
& EFLG_DF
), /* down */
1268 register_address(override_base
?
1269 *override_base
: ctxt
->ds_base
,
1270 _regs
[VCPU_REGS_RSI
]), /* address */
1272 _regs
[VCPU_REGS_RDX
] /* port */
1276 case 0x70 ... 0x7f: /* jcc (short) */ {
1277 int rel
= insn_fetch(s8
, 1, _eip
);
1279 if (test_cc(b
, _eflags
))
1283 case 0x9c: /* pushf */
1284 src
.val
= (unsigned long) _eflags
;
1286 case 0x9d: /* popf */
1287 dst
.ptr
= (unsigned long *) &_eflags
;
1288 goto pop_instruction
;
1289 case 0xc3: /* ret */
1291 goto pop_instruction
;
1292 case 0xf4: /* hlt */
1293 ctxt
->vcpu
->halt_request
= 1;
1297 if (_regs
[VCPU_REGS_RCX
] == 0) {
1298 ctxt
->vcpu
->rip
= _eip
;
1301 _regs
[VCPU_REGS_RCX
]--;
1302 _eip
= ctxt
->vcpu
->rip
;
1305 case 0xa4 ... 0xa5: /* movs */
1307 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1308 dst
.ptr
= (unsigned long *)register_address(ctxt
->es_base
,
1309 _regs
[VCPU_REGS_RDI
]);
1310 if ((rc
= ops
->read_emulated(register_address(
1311 override_base
? *override_base
: ctxt
->ds_base
,
1312 _regs
[VCPU_REGS_RSI
]), &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1314 register_address_increment(_regs
[VCPU_REGS_RSI
],
1315 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1316 register_address_increment(_regs
[VCPU_REGS_RDI
],
1317 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1319 case 0xa6 ... 0xa7: /* cmps */
1320 DPRINTF("Urk! I don't handle CMPS.\n");
1321 goto cannot_emulate
;
1322 case 0xaa ... 0xab: /* stos */
1324 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1325 dst
.ptr
= (unsigned long *)cr2
;
1326 dst
.val
= _regs
[VCPU_REGS_RAX
];
1327 register_address_increment(_regs
[VCPU_REGS_RDI
],
1328 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1330 case 0xac ... 0xad: /* lods */
1332 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1333 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1334 if ((rc
= ops
->read_emulated(cr2
, &dst
.val
, dst
.bytes
,
1337 register_address_increment(_regs
[VCPU_REGS_RSI
],
1338 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1340 case 0xae ... 0xaf: /* scas */
1341 DPRINTF("Urk! I don't handle SCAS.\n");
1342 goto cannot_emulate
;
1343 case 0xe8: /* call (near) */ {
1347 rel
= insn_fetch(s16
, 2, _eip
);
1350 rel
= insn_fetch(s32
, 4, _eip
);
1353 rel
= insn_fetch(s64
, 8, _eip
);
1356 DPRINTF("Call: Invalid op_bytes\n");
1357 goto cannot_emulate
;
1359 src
.val
= (unsigned long) _eip
;
1361 op_bytes
= ad_bytes
;
1364 case 0xe9: /* jmp rel */
1365 case 0xeb: /* jmp rel short */
1367 no_wb
= 1; /* Disable writeback. */
1376 case 0x01: /* lgdt, lidt, lmsw */
1377 /* Disable writeback. */
1379 switch (modrm_reg
) {
1381 unsigned long address
;
1384 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1385 &size
, &address
, op_bytes
);
1388 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1391 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1392 &size
, &address
, op_bytes
);
1395 realmode_lidt(ctxt
->vcpu
, size
, address
);
1399 goto cannot_emulate
;
1400 *(u16
*)&_regs
[modrm_rm
]
1401 = realmode_get_cr(ctxt
->vcpu
, 0);
1405 goto cannot_emulate
;
1406 realmode_lmsw(ctxt
->vcpu
, (u16
)modrm_val
, &_eflags
);
1409 emulate_invlpg(ctxt
->vcpu
, cr2
);
1412 goto cannot_emulate
;
1415 case 0x21: /* mov from dr to reg */
1418 goto cannot_emulate
;
1419 rc
= emulator_get_dr(ctxt
, modrm_reg
, &_regs
[modrm_rm
]);
1421 case 0x23: /* mov from reg to dr */
1424 goto cannot_emulate
;
1425 rc
= emulator_set_dr(ctxt
, modrm_reg
, _regs
[modrm_rm
]);
1427 case 0x40 ... 0x4f: /* cmov */
1428 dst
.val
= dst
.orig_val
= src
.val
;
1431 * First, assume we're decoding an even cmov opcode
1434 switch ((b
& 15) >> 1) {
1436 no_wb
= (_eflags
& EFLG_OF
) ? 0 : 1;
1438 case 1: /* cmovb/cmovc/cmovnae */
1439 no_wb
= (_eflags
& EFLG_CF
) ? 0 : 1;
1441 case 2: /* cmovz/cmove */
1442 no_wb
= (_eflags
& EFLG_ZF
) ? 0 : 1;
1444 case 3: /* cmovbe/cmovna */
1445 no_wb
= (_eflags
& (EFLG_CF
| EFLG_ZF
)) ? 0 : 1;
1448 no_wb
= (_eflags
& EFLG_SF
) ? 0 : 1;
1450 case 5: /* cmovp/cmovpe */
1451 no_wb
= (_eflags
& EFLG_PF
) ? 0 : 1;
1453 case 7: /* cmovle/cmovng */
1454 no_wb
= (_eflags
& EFLG_ZF
) ? 0 : 1;
1456 case 6: /* cmovl/cmovnge */
1457 no_wb
&= (!(_eflags
& EFLG_SF
) !=
1458 !(_eflags
& EFLG_OF
)) ? 0 : 1;
1461 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1466 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1467 emulate_2op_SrcV_nobyte("bt", src
, dst
, _eflags
);
1471 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1472 emulate_2op_SrcV_nobyte("bts", src
, dst
, _eflags
);
1474 case 0xb0 ... 0xb1: /* cmpxchg */
1476 * Save real source value, then compare EAX against
1479 src
.orig_val
= src
.val
;
1480 src
.val
= _regs
[VCPU_REGS_RAX
];
1481 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
1482 if (_eflags
& EFLG_ZF
) {
1483 /* Success: write back to memory. */
1484 dst
.val
= src
.orig_val
;
1486 /* Failure: write the value we saw to EAX. */
1488 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1493 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1494 emulate_2op_SrcV_nobyte("btr", src
, dst
, _eflags
);
1496 case 0xb6 ... 0xb7: /* movzx */
1497 dst
.bytes
= op_bytes
;
1498 dst
.val
= (d
& ByteOp
) ? (u8
) src
.val
: (u16
) src
.val
;
1500 case 0xba: /* Grp8 */
1501 switch (modrm_reg
& 3) {
1514 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1515 emulate_2op_SrcV_nobyte("btc", src
, dst
, _eflags
);
1517 case 0xbe ... 0xbf: /* movsx */
1518 dst
.bytes
= op_bytes
;
1519 dst
.val
= (d
& ByteOp
) ? (s8
) src
.val
: (s16
) src
.val
;
1521 case 0xc3: /* movnti */
1522 dst
.bytes
= op_bytes
;
1523 dst
.val
= (op_bytes
== 4) ? (u32
) src
.val
: (u64
) src
.val
;
1528 twobyte_special_insn
:
1529 /* Disable writeback. */
1533 emulate_clts(ctxt
->vcpu
);
1535 case 0x08: /* invd */
1537 case 0x09: /* wbinvd */
1539 case 0x0d: /* GrpP (prefetch) */
1540 case 0x18: /* Grp16 (prefetch/nop) */
1542 case 0x20: /* mov cr, reg */
1544 goto cannot_emulate
;
1545 _regs
[modrm_rm
] = realmode_get_cr(ctxt
->vcpu
, modrm_reg
);
1547 case 0x22: /* mov reg, cr */
1549 goto cannot_emulate
;
1550 realmode_set_cr(ctxt
->vcpu
, modrm_reg
, modrm_val
, &_eflags
);
1554 msr_data
= (u32
)_regs
[VCPU_REGS_RAX
]
1555 | ((u64
)_regs
[VCPU_REGS_RDX
] << 32);
1556 rc
= kvm_set_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], msr_data
);
1558 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1559 _eip
= ctxt
->vcpu
->rip
;
1561 rc
= X86EMUL_CONTINUE
;
1565 rc
= kvm_get_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], &msr_data
);
1567 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1568 _eip
= ctxt
->vcpu
->rip
;
1570 _regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1571 _regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1573 rc
= X86EMUL_CONTINUE
;
1575 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1580 rel
= insn_fetch(s16
, 2, _eip
);
1583 rel
= insn_fetch(s32
, 4, _eip
);
1586 rel
= insn_fetch(s64
, 8, _eip
);
1589 DPRINTF("jnz: Invalid op_bytes\n");
1590 goto cannot_emulate
;
1592 if (test_cc(b
, _eflags
))
1596 case 0xc7: /* Grp9 (cmpxchg8b) */
1599 if ((rc
= ops
->read_emulated(cr2
, &old
, 8, ctxt
->vcpu
))
1602 if (((u32
) (old
>> 0) != (u32
) _regs
[VCPU_REGS_RAX
]) ||
1603 ((u32
) (old
>> 32) != (u32
) _regs
[VCPU_REGS_RDX
])) {
1604 _regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1605 _regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1606 _eflags
&= ~EFLG_ZF
;
1608 new = ((u64
)_regs
[VCPU_REGS_RCX
] << 32)
1609 | (u32
) _regs
[VCPU_REGS_RBX
];
1610 if ((rc
= ops
->cmpxchg_emulated(cr2
, &old
,
1611 &new, 8, ctxt
->vcpu
)) != 0)
1621 DPRINTF("Cannot emulate %02x\n", b
);
1628 #include <asm/uaccess.h>
1631 x86_emulate_read_std(unsigned long addr
,
1633 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1639 if ((rc
= copy_from_user((void *)val
, (void *)addr
, bytes
)) != 0) {
1640 propagate_page_fault(addr
+ bytes
- rc
, 0); /* read fault */
1641 return X86EMUL_PROPAGATE_FAULT
;
1644 return X86EMUL_CONTINUE
;
1648 x86_emulate_write_std(unsigned long addr
,
1650 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1654 if ((rc
= copy_to_user((void *)addr
, (void *)&val
, bytes
)) != 0) {
1655 propagate_page_fault(addr
+ bytes
- rc
, PGERR_write_access
);
1656 return X86EMUL_PROPAGATE_FAULT
;
1659 return X86EMUL_CONTINUE
;