1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcMask (7<<4)
62 /* Generic ModRM decode. */
64 /* Destination is only written; never read. */
67 #define MemAbs (1<<10) /* Memory operand is absolute displacement */
68 #define String (1<<12) /* String instruction (rep capable) */
69 #define Stack (1<<13) /* Stack instruction (push/pop) */
70 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
71 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
72 #define GroupMask 0xff /* Group number stored in bits 0:7 */
75 Group1_80
, Group1_81
, Group1_82
, Group1_83
,
76 Group1A
, Group3_Byte
, Group3
, Group4
, Group5
, Group7
,
79 static u16 opcode_table
[256] = {
81 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
82 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
85 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
86 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
89 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
90 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
93 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
94 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
97 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
98 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
99 DstAcc
| SrcImmByte
, DstAcc
| SrcImm
, 0, 0,
101 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
102 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
105 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
106 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
109 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
110 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
111 ByteOp
| DstAcc
| SrcImm
, DstAcc
| SrcImm
,
114 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
116 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
118 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
119 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
121 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
122 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
124 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
127 SrcImm
| Mov
| Stack
, 0, SrcImmByte
| Mov
| Stack
, 0,
128 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
129 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
131 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
132 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
134 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
135 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
137 Group
| Group1_80
, Group
| Group1_81
,
138 Group
| Group1_82
, Group
| Group1_83
,
139 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
140 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
142 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
143 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
144 DstMem
| SrcReg
| ModRM
| Mov
, ModRM
| DstReg
,
145 DstReg
| SrcMem
| ModRM
| Mov
, Group
| Group1A
,
147 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
149 0, 0, 0, 0, ImplicitOps
| Stack
, ImplicitOps
| Stack
, 0, 0,
151 ByteOp
| DstReg
| SrcMem
| Mov
| MemAbs
, DstReg
| SrcMem
| Mov
| MemAbs
,
152 ByteOp
| DstMem
| SrcReg
| Mov
| MemAbs
, DstMem
| SrcReg
| Mov
| MemAbs
,
153 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
154 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
156 0, 0, ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
157 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
158 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
160 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
161 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
162 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
163 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
165 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
166 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
167 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
168 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
170 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
171 0, ImplicitOps
| Stack
, 0, 0,
172 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
174 0, 0, 0, 0, 0, 0, 0, 0,
176 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
177 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
180 0, 0, 0, 0, 0, 0, 0, 0,
183 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
184 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
186 ImplicitOps
| Stack
, SrcImm
| ImplicitOps
,
187 ImplicitOps
, SrcImmByte
| ImplicitOps
,
188 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
189 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
192 ImplicitOps
, ImplicitOps
, Group
| Group3_Byte
, Group
| Group3
,
194 ImplicitOps
, 0, ImplicitOps
, ImplicitOps
,
195 ImplicitOps
, ImplicitOps
, Group
| Group4
, Group
| Group5
,
198 static u16 twobyte_table
[256] = {
200 0, Group
| GroupDual
| Group7
, 0, 0, 0, 0, ImplicitOps
, 0,
201 ImplicitOps
, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
203 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
205 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0,
208 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
210 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
211 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
212 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
213 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
215 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
216 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
217 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
218 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
224 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
226 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
227 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
228 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
229 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
235 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, ModRM
, 0,
237 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
238 DstMem
| SrcReg
| ModRM
| BitOp
,
239 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
240 DstReg
| SrcMem16
| ModRM
| Mov
,
242 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
243 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
244 DstReg
| SrcMem16
| ModRM
| Mov
,
246 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
247 0, 0, 0, 0, 0, 0, 0, 0,
249 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 static u16 group_table
[] = {
258 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
259 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
260 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
261 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
263 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
264 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
265 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
266 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
268 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
269 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
270 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
271 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
273 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
274 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
275 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
276 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
278 DstMem
| SrcNone
| ModRM
| Mov
| Stack
, 0, 0, 0, 0, 0, 0, 0,
280 ByteOp
| SrcImm
| DstMem
| ModRM
, 0,
281 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
284 DstMem
| SrcImm
| ModRM
, 0,
285 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
288 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
291 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
292 SrcMem
| ModRM
| Stack
, 0,
293 SrcMem
| ModRM
| Stack
, 0, SrcMem
| ModRM
| Stack
, 0,
295 0, 0, ModRM
| SrcMem
, ModRM
| SrcMem
,
296 SrcNone
| ModRM
| DstMem
| Mov
, 0,
297 SrcMem16
| ModRM
| Mov
, SrcMem
| ModRM
| ByteOp
,
300 static u16 group2_table
[] = {
302 SrcNone
| ModRM
, 0, 0, 0,
303 SrcNone
| ModRM
| DstMem
| Mov
, 0,
304 SrcMem16
| ModRM
| Mov
, 0,
307 /* EFLAGS bit definitions. */
308 #define EFLG_OF (1<<11)
309 #define EFLG_DF (1<<10)
310 #define EFLG_SF (1<<7)
311 #define EFLG_ZF (1<<6)
312 #define EFLG_AF (1<<4)
313 #define EFLG_PF (1<<2)
314 #define EFLG_CF (1<<0)
317 * Instruction emulation:
318 * Most instructions are emulated directly via a fragment of inline assembly
319 * code. This allows us to save/restore EFLAGS and thus very easily pick up
320 * any modified flags.
323 #if defined(CONFIG_X86_64)
324 #define _LO32 "k" /* force 32-bit operand */
325 #define _STK "%%rsp" /* stack pointer */
326 #elif defined(__i386__)
327 #define _LO32 "" /* force 32-bit operand */
328 #define _STK "%%esp" /* stack pointer */
332 * These EFLAGS bits are restored from saved value during emulation, and
333 * any changes are written back to the saved value after emulation.
335 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
337 /* Before executing instruction: restore necessary bits in EFLAGS. */
338 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
339 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
340 "movl %"_sav",%"_LO32 _tmp"; " \
343 "movl %"_msk",%"_LO32 _tmp"; " \
344 "andl %"_LO32 _tmp",("_STK"); " \
346 "notl %"_LO32 _tmp"; " \
347 "andl %"_LO32 _tmp",("_STK"); " \
348 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
350 "orl %"_LO32 _tmp",("_STK"); " \
354 /* After executing instruction: write-back necessary bits in EFLAGS. */
355 #define _POST_EFLAGS(_sav, _msk, _tmp) \
356 /* _sav |= EFLAGS & _msk; */ \
359 "andl %"_msk",%"_LO32 _tmp"; " \
360 "orl %"_LO32 _tmp",%"_sav"; "
362 /* Raw emulation: instruction has two explicit operands. */
363 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
365 unsigned long _tmp; \
367 switch ((_dst).bytes) { \
369 __asm__ __volatile__ ( \
370 _PRE_EFLAGS("0", "4", "2") \
371 _op"w %"_wx"3,%1; " \
372 _POST_EFLAGS("0", "4", "2") \
373 : "=m" (_eflags), "=m" ((_dst).val), \
375 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
378 __asm__ __volatile__ ( \
379 _PRE_EFLAGS("0", "4", "2") \
380 _op"l %"_lx"3,%1; " \
381 _POST_EFLAGS("0", "4", "2") \
382 : "=m" (_eflags), "=m" ((_dst).val), \
384 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
387 __emulate_2op_8byte(_op, _src, _dst, \
388 _eflags, _qx, _qy); \
393 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
395 unsigned long __tmp; \
396 switch ((_dst).bytes) { \
398 __asm__ __volatile__ ( \
399 _PRE_EFLAGS("0", "4", "2") \
400 _op"b %"_bx"3,%1; " \
401 _POST_EFLAGS("0", "4", "2") \
402 : "=m" (_eflags), "=m" ((_dst).val), \
404 : _by ((_src).val), "i" (EFLAGS_MASK)); \
407 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
408 _wx, _wy, _lx, _ly, _qx, _qy); \
413 /* Source operand is byte-sized and may be restricted to just %cl. */
414 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
415 __emulate_2op(_op, _src, _dst, _eflags, \
416 "b", "c", "b", "c", "b", "c", "b", "c")
418 /* Source operand is byte, word, long or quad sized. */
419 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
420 __emulate_2op(_op, _src, _dst, _eflags, \
421 "b", "q", "w", "r", _LO32, "r", "", "r")
423 /* Source operand is word, long or quad sized. */
424 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
425 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
426 "w", "r", _LO32, "r", "", "r")
428 /* Instruction has only one explicit operand (no source operand). */
429 #define emulate_1op(_op, _dst, _eflags) \
431 unsigned long _tmp; \
433 switch ((_dst).bytes) { \
435 __asm__ __volatile__ ( \
436 _PRE_EFLAGS("0", "3", "2") \
438 _POST_EFLAGS("0", "3", "2") \
439 : "=m" (_eflags), "=m" ((_dst).val), \
441 : "i" (EFLAGS_MASK)); \
444 __asm__ __volatile__ ( \
445 _PRE_EFLAGS("0", "3", "2") \
447 _POST_EFLAGS("0", "3", "2") \
448 : "=m" (_eflags), "=m" ((_dst).val), \
450 : "i" (EFLAGS_MASK)); \
453 __asm__ __volatile__ ( \
454 _PRE_EFLAGS("0", "3", "2") \
456 _POST_EFLAGS("0", "3", "2") \
457 : "=m" (_eflags), "=m" ((_dst).val), \
459 : "i" (EFLAGS_MASK)); \
462 __emulate_1op_8byte(_op, _dst, _eflags); \
467 /* Emulate an instruction with quadword operands (x86/64 only). */
468 #if defined(CONFIG_X86_64)
469 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
471 __asm__ __volatile__ ( \
472 _PRE_EFLAGS("0", "4", "2") \
473 _op"q %"_qx"3,%1; " \
474 _POST_EFLAGS("0", "4", "2") \
475 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
476 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
479 #define __emulate_1op_8byte(_op, _dst, _eflags) \
481 __asm__ __volatile__ ( \
482 _PRE_EFLAGS("0", "3", "2") \
484 _POST_EFLAGS("0", "3", "2") \
485 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
486 : "i" (EFLAGS_MASK)); \
489 #elif defined(__i386__)
490 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
491 #define __emulate_1op_8byte(_op, _dst, _eflags)
492 #endif /* __i386__ */
494 /* Fetch next part of the instruction being emulated. */
495 #define insn_fetch(_type, _size, _eip) \
496 ({ unsigned long _x; \
497 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
504 static inline unsigned long ad_mask(struct decode_cache
*c
)
506 return (1UL << (c
->ad_bytes
<< 3)) - 1;
509 /* Access/update address held in a register, based on addressing mode. */
510 static inline unsigned long
511 address_mask(struct decode_cache
*c
, unsigned long reg
)
513 if (c
->ad_bytes
== sizeof(unsigned long))
516 return reg
& ad_mask(c
);
519 static inline unsigned long
520 register_address(struct decode_cache
*c
, unsigned long base
, unsigned long reg
)
522 return base
+ address_mask(c
, reg
);
526 register_address_increment(struct decode_cache
*c
, unsigned long *reg
, int inc
)
528 if (c
->ad_bytes
== sizeof(unsigned long))
531 *reg
= (*reg
& ~ad_mask(c
)) | ((*reg
+ inc
) & ad_mask(c
));
534 static inline void jmp_rel(struct decode_cache
*c
, int rel
)
536 register_address_increment(c
, &c
->eip
, rel
);
539 static void set_seg_override(struct decode_cache
*c
, int seg
)
541 c
->has_seg_override
= true;
542 c
->seg_override
= seg
;
545 static unsigned long seg_base(struct x86_emulate_ctxt
*ctxt
, int seg
)
547 if (ctxt
->mode
== X86EMUL_MODE_PROT64
&& seg
< VCPU_SREG_FS
)
550 return kvm_x86_ops
->get_segment_base(ctxt
->vcpu
, seg
);
553 static unsigned long seg_override_base(struct x86_emulate_ctxt
*ctxt
,
554 struct decode_cache
*c
)
556 if (!c
->has_seg_override
)
559 return seg_base(ctxt
, c
->seg_override
);
562 static unsigned long es_base(struct x86_emulate_ctxt
*ctxt
)
564 return seg_base(ctxt
, VCPU_SREG_ES
);
567 static unsigned long ss_base(struct x86_emulate_ctxt
*ctxt
)
569 return seg_base(ctxt
, VCPU_SREG_SS
);
572 static int do_fetch_insn_byte(struct x86_emulate_ctxt
*ctxt
,
573 struct x86_emulate_ops
*ops
,
574 unsigned long linear
, u8
*dest
)
576 struct fetch_cache
*fc
= &ctxt
->decode
.fetch
;
580 if (linear
< fc
->start
|| linear
>= fc
->end
) {
581 size
= min(15UL, PAGE_SIZE
- offset_in_page(linear
));
582 rc
= ops
->read_std(linear
, fc
->data
, size
, ctxt
->vcpu
);
586 fc
->end
= linear
+ size
;
588 *dest
= fc
->data
[linear
- fc
->start
];
592 static int do_insn_fetch(struct x86_emulate_ctxt
*ctxt
,
593 struct x86_emulate_ops
*ops
,
594 unsigned long eip
, void *dest
, unsigned size
)
598 eip
+= ctxt
->cs_base
;
600 rc
= do_fetch_insn_byte(ctxt
, ops
, eip
++, dest
++);
608 * Given the 'reg' portion of a ModRM byte, and a register block, return a
609 * pointer into the block that addresses the relevant register.
610 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
612 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
617 p
= ®s
[modrm_reg
];
618 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
619 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
623 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
624 struct x86_emulate_ops
*ops
,
626 u16
*size
, unsigned long *address
, int op_bytes
)
633 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
637 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
642 static int test_cc(unsigned int condition
, unsigned int flags
)
646 switch ((condition
& 15) >> 1) {
648 rc
|= (flags
& EFLG_OF
);
650 case 1: /* b/c/nae */
651 rc
|= (flags
& EFLG_CF
);
654 rc
|= (flags
& EFLG_ZF
);
657 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
660 rc
|= (flags
& EFLG_SF
);
663 rc
|= (flags
& EFLG_PF
);
666 rc
|= (flags
& EFLG_ZF
);
669 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
673 /* Odd condition identifiers (lsb == 1) have inverted sense. */
674 return (!!rc
^ (condition
& 1));
677 static void decode_register_operand(struct operand
*op
,
678 struct decode_cache
*c
,
681 unsigned reg
= c
->modrm_reg
;
682 int highbyte_regs
= c
->rex_prefix
== 0;
685 reg
= (c
->b
& 7) | ((c
->rex_prefix
& 1) << 3);
687 if ((c
->d
& ByteOp
) && !inhibit_bytereg
) {
688 op
->ptr
= decode_register(reg
, c
->regs
, highbyte_regs
);
689 op
->val
= *(u8
*)op
->ptr
;
692 op
->ptr
= decode_register(reg
, c
->regs
, 0);
693 op
->bytes
= c
->op_bytes
;
696 op
->val
= *(u16
*)op
->ptr
;
699 op
->val
= *(u32
*)op
->ptr
;
702 op
->val
= *(u64
*) op
->ptr
;
706 op
->orig_val
= op
->val
;
709 static int decode_modrm(struct x86_emulate_ctxt
*ctxt
,
710 struct x86_emulate_ops
*ops
)
712 struct decode_cache
*c
= &ctxt
->decode
;
714 int index_reg
= 0, base_reg
= 0, scale
;
718 c
->modrm_reg
= (c
->rex_prefix
& 4) << 1; /* REX.R */
719 index_reg
= (c
->rex_prefix
& 2) << 2; /* REX.X */
720 c
->modrm_rm
= base_reg
= (c
->rex_prefix
& 1) << 3; /* REG.B */
723 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
724 c
->modrm_mod
|= (c
->modrm
& 0xc0) >> 6;
725 c
->modrm_reg
|= (c
->modrm
& 0x38) >> 3;
726 c
->modrm_rm
|= (c
->modrm
& 0x07);
730 if (c
->modrm_mod
== 3) {
731 c
->modrm_ptr
= decode_register(c
->modrm_rm
,
732 c
->regs
, c
->d
& ByteOp
);
733 c
->modrm_val
= *(unsigned long *)c
->modrm_ptr
;
737 if (c
->ad_bytes
== 2) {
738 unsigned bx
= c
->regs
[VCPU_REGS_RBX
];
739 unsigned bp
= c
->regs
[VCPU_REGS_RBP
];
740 unsigned si
= c
->regs
[VCPU_REGS_RSI
];
741 unsigned di
= c
->regs
[VCPU_REGS_RDI
];
743 /* 16-bit ModR/M decode. */
744 switch (c
->modrm_mod
) {
746 if (c
->modrm_rm
== 6)
747 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
750 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
753 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
756 switch (c
->modrm_rm
) {
758 c
->modrm_ea
+= bx
+ si
;
761 c
->modrm_ea
+= bx
+ di
;
764 c
->modrm_ea
+= bp
+ si
;
767 c
->modrm_ea
+= bp
+ di
;
776 if (c
->modrm_mod
!= 0)
783 if (c
->modrm_rm
== 2 || c
->modrm_rm
== 3 ||
784 (c
->modrm_rm
== 6 && c
->modrm_mod
!= 0))
785 if (!c
->has_seg_override
)
786 set_seg_override(c
, VCPU_SREG_SS
);
787 c
->modrm_ea
= (u16
)c
->modrm_ea
;
789 /* 32/64-bit ModR/M decode. */
790 if ((c
->modrm_rm
& 7) == 4) {
791 sib
= insn_fetch(u8
, 1, c
->eip
);
792 index_reg
|= (sib
>> 3) & 7;
796 if ((base_reg
& 7) == 5 && c
->modrm_mod
== 0)
797 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
799 c
->modrm_ea
+= c
->regs
[base_reg
];
801 c
->modrm_ea
+= c
->regs
[index_reg
] << scale
;
802 } else if ((c
->modrm_rm
& 7) == 5 && c
->modrm_mod
== 0) {
803 if (ctxt
->mode
== X86EMUL_MODE_PROT64
)
806 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
807 switch (c
->modrm_mod
) {
809 if (c
->modrm_rm
== 5)
810 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
813 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
816 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
824 static int decode_abs(struct x86_emulate_ctxt
*ctxt
,
825 struct x86_emulate_ops
*ops
)
827 struct decode_cache
*c
= &ctxt
->decode
;
830 switch (c
->ad_bytes
) {
832 c
->modrm_ea
= insn_fetch(u16
, 2, c
->eip
);
835 c
->modrm_ea
= insn_fetch(u32
, 4, c
->eip
);
838 c
->modrm_ea
= insn_fetch(u64
, 8, c
->eip
);
846 x86_decode_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
848 struct decode_cache
*c
= &ctxt
->decode
;
850 int mode
= ctxt
->mode
;
851 int def_op_bytes
, def_ad_bytes
, group
;
853 /* Shadow copy of register state. Committed on successful emulation. */
855 memset(c
, 0, sizeof(struct decode_cache
));
856 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
857 ctxt
->cs_base
= seg_base(ctxt
, VCPU_SREG_CS
);
858 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
861 case X86EMUL_MODE_REAL
:
862 case X86EMUL_MODE_PROT16
:
863 def_op_bytes
= def_ad_bytes
= 2;
865 case X86EMUL_MODE_PROT32
:
866 def_op_bytes
= def_ad_bytes
= 4;
869 case X86EMUL_MODE_PROT64
:
878 c
->op_bytes
= def_op_bytes
;
879 c
->ad_bytes
= def_ad_bytes
;
881 /* Legacy prefixes. */
883 switch (c
->b
= insn_fetch(u8
, 1, c
->eip
)) {
884 case 0x66: /* operand-size override */
885 /* switch between 2/4 bytes */
886 c
->op_bytes
= def_op_bytes
^ 6;
888 case 0x67: /* address-size override */
889 if (mode
== X86EMUL_MODE_PROT64
)
890 /* switch between 4/8 bytes */
891 c
->ad_bytes
= def_ad_bytes
^ 12;
893 /* switch between 2/4 bytes */
894 c
->ad_bytes
= def_ad_bytes
^ 6;
896 case 0x26: /* ES override */
897 case 0x2e: /* CS override */
898 case 0x36: /* SS override */
899 case 0x3e: /* DS override */
900 set_seg_override(c
, (c
->b
>> 3) & 3);
902 case 0x64: /* FS override */
903 case 0x65: /* GS override */
904 set_seg_override(c
, c
->b
& 7);
906 case 0x40 ... 0x4f: /* REX */
907 if (mode
!= X86EMUL_MODE_PROT64
)
909 c
->rex_prefix
= c
->b
;
911 case 0xf0: /* LOCK */
914 case 0xf2: /* REPNE/REPNZ */
915 c
->rep_prefix
= REPNE_PREFIX
;
917 case 0xf3: /* REP/REPE/REPZ */
918 c
->rep_prefix
= REPE_PREFIX
;
924 /* Any legacy prefix after a REX prefix nullifies its effect. */
933 if (c
->rex_prefix
& 8)
934 c
->op_bytes
= 8; /* REX.W */
936 /* Opcode byte(s). */
937 c
->d
= opcode_table
[c
->b
];
939 /* Two-byte opcode? */
942 c
->b
= insn_fetch(u8
, 1, c
->eip
);
943 c
->d
= twobyte_table
[c
->b
];
948 group
= c
->d
& GroupMask
;
949 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
952 group
= (group
<< 3) + ((c
->modrm
>> 3) & 7);
953 if ((c
->d
& GroupDual
) && (c
->modrm
>> 6) == 3)
954 c
->d
= group2_table
[group
];
956 c
->d
= group_table
[group
];
961 DPRINTF("Cannot emulate %02x\n", c
->b
);
965 if (mode
== X86EMUL_MODE_PROT64
&& (c
->d
& Stack
))
968 /* ModRM and SIB bytes. */
970 rc
= decode_modrm(ctxt
, ops
);
971 else if (c
->d
& MemAbs
)
972 rc
= decode_abs(ctxt
, ops
);
976 if (!c
->has_seg_override
)
977 set_seg_override(c
, VCPU_SREG_DS
);
979 if (!(!c
->twobyte
&& c
->b
== 0x8d))
980 c
->modrm_ea
+= seg_override_base(ctxt
, c
);
982 if (c
->ad_bytes
!= 8)
983 c
->modrm_ea
= (u32
)c
->modrm_ea
;
985 * Decode and fetch the source operand: register, memory
988 switch (c
->d
& SrcMask
) {
992 decode_register_operand(&c
->src
, c
, 0);
1001 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 :
1003 /* Don't fetch the address for invlpg: it could be unmapped. */
1004 if (c
->twobyte
&& c
->b
== 0x01 && c
->modrm_reg
== 7)
1008 * For instructions with a ModR/M byte, switch to register
1009 * access if Mod = 3.
1011 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
1012 c
->src
.type
= OP_REG
;
1013 c
->src
.val
= c
->modrm_val
;
1014 c
->src
.ptr
= c
->modrm_ptr
;
1017 c
->src
.type
= OP_MEM
;
1020 c
->src
.type
= OP_IMM
;
1021 c
->src
.ptr
= (unsigned long *)c
->eip
;
1022 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1023 if (c
->src
.bytes
== 8)
1025 /* NB. Immediates are sign-extended as necessary. */
1026 switch (c
->src
.bytes
) {
1028 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1031 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
1034 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
1039 c
->src
.type
= OP_IMM
;
1040 c
->src
.ptr
= (unsigned long *)c
->eip
;
1042 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1046 /* Decode and fetch the destination operand: register or memory. */
1047 switch (c
->d
& DstMask
) {
1049 /* Special instructions do their own operand decoding. */
1052 decode_register_operand(&c
->dst
, c
,
1053 c
->twobyte
&& (c
->b
== 0xb6 || c
->b
== 0xb7));
1056 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
1057 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1058 c
->dst
.type
= OP_REG
;
1059 c
->dst
.val
= c
->dst
.orig_val
= c
->modrm_val
;
1060 c
->dst
.ptr
= c
->modrm_ptr
;
1063 c
->dst
.type
= OP_MEM
;
1066 c
->dst
.type
= OP_REG
;
1067 c
->dst
.bytes
= c
->op_bytes
;
1068 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1069 switch (c
->op_bytes
) {
1071 c
->dst
.val
= *(u8
*)c
->dst
.ptr
;
1074 c
->dst
.val
= *(u16
*)c
->dst
.ptr
;
1077 c
->dst
.val
= *(u32
*)c
->dst
.ptr
;
1080 c
->dst
.orig_val
= c
->dst
.val
;
1084 if (c
->rip_relative
)
1085 c
->modrm_ea
+= c
->eip
;
1088 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1091 static inline void emulate_push(struct x86_emulate_ctxt
*ctxt
)
1093 struct decode_cache
*c
= &ctxt
->decode
;
1095 c
->dst
.type
= OP_MEM
;
1096 c
->dst
.bytes
= c
->op_bytes
;
1097 c
->dst
.val
= c
->src
.val
;
1098 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], -c
->op_bytes
);
1099 c
->dst
.ptr
= (void *) register_address(c
, ss_base(ctxt
),
1100 c
->regs
[VCPU_REGS_RSP
]);
1103 static inline int emulate_grp1a(struct x86_emulate_ctxt
*ctxt
,
1104 struct x86_emulate_ops
*ops
)
1106 struct decode_cache
*c
= &ctxt
->decode
;
1109 rc
= ops
->read_std(register_address(c
, ss_base(ctxt
),
1110 c
->regs
[VCPU_REGS_RSP
]),
1111 &c
->dst
.val
, c
->dst
.bytes
, ctxt
->vcpu
);
1115 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], c
->dst
.bytes
);
1120 static inline void emulate_grp2(struct x86_emulate_ctxt
*ctxt
)
1122 struct decode_cache
*c
= &ctxt
->decode
;
1123 switch (c
->modrm_reg
) {
1125 emulate_2op_SrcB("rol", c
->src
, c
->dst
, ctxt
->eflags
);
1128 emulate_2op_SrcB("ror", c
->src
, c
->dst
, ctxt
->eflags
);
1131 emulate_2op_SrcB("rcl", c
->src
, c
->dst
, ctxt
->eflags
);
1134 emulate_2op_SrcB("rcr", c
->src
, c
->dst
, ctxt
->eflags
);
1136 case 4: /* sal/shl */
1137 case 6: /* sal/shl */
1138 emulate_2op_SrcB("sal", c
->src
, c
->dst
, ctxt
->eflags
);
1141 emulate_2op_SrcB("shr", c
->src
, c
->dst
, ctxt
->eflags
);
1144 emulate_2op_SrcB("sar", c
->src
, c
->dst
, ctxt
->eflags
);
1149 static inline int emulate_grp3(struct x86_emulate_ctxt
*ctxt
,
1150 struct x86_emulate_ops
*ops
)
1152 struct decode_cache
*c
= &ctxt
->decode
;
1155 switch (c
->modrm_reg
) {
1156 case 0 ... 1: /* test */
1157 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1160 c
->dst
.val
= ~c
->dst
.val
;
1163 emulate_1op("neg", c
->dst
, ctxt
->eflags
);
1166 DPRINTF("Cannot emulate %02x\n", c
->b
);
1167 rc
= X86EMUL_UNHANDLEABLE
;
1173 static inline int emulate_grp45(struct x86_emulate_ctxt
*ctxt
,
1174 struct x86_emulate_ops
*ops
)
1176 struct decode_cache
*c
= &ctxt
->decode
;
1178 switch (c
->modrm_reg
) {
1180 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1183 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1185 case 2: /* call near abs */ {
1188 c
->eip
= c
->src
.val
;
1189 c
->src
.val
= old_eip
;
1193 case 4: /* jmp abs */
1194 c
->eip
= c
->src
.val
;
1203 static inline int emulate_grp9(struct x86_emulate_ctxt
*ctxt
,
1204 struct x86_emulate_ops
*ops
,
1205 unsigned long memop
)
1207 struct decode_cache
*c
= &ctxt
->decode
;
1211 rc
= ops
->read_emulated(memop
, &old
, 8, ctxt
->vcpu
);
1215 if (((u32
) (old
>> 0) != (u32
) c
->regs
[VCPU_REGS_RAX
]) ||
1216 ((u32
) (old
>> 32) != (u32
) c
->regs
[VCPU_REGS_RDX
])) {
1218 c
->regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1219 c
->regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1220 ctxt
->eflags
&= ~EFLG_ZF
;
1223 new = ((u64
)c
->regs
[VCPU_REGS_RCX
] << 32) |
1224 (u32
) c
->regs
[VCPU_REGS_RBX
];
1226 rc
= ops
->cmpxchg_emulated(memop
, &old
, &new, 8, ctxt
->vcpu
);
1229 ctxt
->eflags
|= EFLG_ZF
;
1234 static inline int writeback(struct x86_emulate_ctxt
*ctxt
,
1235 struct x86_emulate_ops
*ops
)
1238 struct decode_cache
*c
= &ctxt
->decode
;
1240 switch (c
->dst
.type
) {
1242 /* The 4-byte case *is* correct:
1243 * in 64-bit mode we zero-extend.
1245 switch (c
->dst
.bytes
) {
1247 *(u8
*)c
->dst
.ptr
= (u8
)c
->dst
.val
;
1250 *(u16
*)c
->dst
.ptr
= (u16
)c
->dst
.val
;
1253 *c
->dst
.ptr
= (u32
)c
->dst
.val
;
1254 break; /* 64b: zero-ext */
1256 *c
->dst
.ptr
= c
->dst
.val
;
1262 rc
= ops
->cmpxchg_emulated(
1263 (unsigned long)c
->dst
.ptr
,
1269 rc
= ops
->write_emulated(
1270 (unsigned long)c
->dst
.ptr
,
1287 x86_emulate_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
1289 unsigned long memop
= 0;
1291 unsigned long saved_eip
= 0;
1292 struct decode_cache
*c
= &ctxt
->decode
;
1297 /* Shadow copy of register state. Committed on successful emulation.
1298 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1302 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
1305 if (((c
->d
& ModRM
) && (c
->modrm_mod
!= 3)) || (c
->d
& MemAbs
))
1306 memop
= c
->modrm_ea
;
1308 if (c
->rep_prefix
&& (c
->d
& String
)) {
1309 /* All REP prefixes have the same first termination condition */
1310 if (c
->regs
[VCPU_REGS_RCX
] == 0) {
1311 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1314 /* The second termination condition only applies for REPE
1315 * and REPNE. Test if the repeat string operation prefix is
1316 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1317 * corresponding termination condition according to:
1318 * - if REPE/REPZ and ZF = 0 then done
1319 * - if REPNE/REPNZ and ZF = 1 then done
1321 if ((c
->b
== 0xa6) || (c
->b
== 0xa7) ||
1322 (c
->b
== 0xae) || (c
->b
== 0xaf)) {
1323 if ((c
->rep_prefix
== REPE_PREFIX
) &&
1324 ((ctxt
->eflags
& EFLG_ZF
) == 0)) {
1325 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1328 if ((c
->rep_prefix
== REPNE_PREFIX
) &&
1329 ((ctxt
->eflags
& EFLG_ZF
) == EFLG_ZF
)) {
1330 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1334 c
->regs
[VCPU_REGS_RCX
]--;
1335 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1338 if (c
->src
.type
== OP_MEM
) {
1339 c
->src
.ptr
= (unsigned long *)memop
;
1341 rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1347 c
->src
.orig_val
= c
->src
.val
;
1350 if ((c
->d
& DstMask
) == ImplicitOps
)
1354 if (c
->dst
.type
== OP_MEM
) {
1355 c
->dst
.ptr
= (unsigned long *)memop
;
1356 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1359 unsigned long mask
= ~(c
->dst
.bytes
* 8 - 1);
1361 c
->dst
.ptr
= (void *)c
->dst
.ptr
+
1362 (c
->src
.val
& mask
) / 8;
1364 if (!(c
->d
& Mov
) &&
1365 /* optimisation - avoid slow emulated read */
1366 ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1368 c
->dst
.bytes
, ctxt
->vcpu
)) != 0))
1371 c
->dst
.orig_val
= c
->dst
.val
;
1381 emulate_2op_SrcV("add", c
->src
, c
->dst
, ctxt
->eflags
);
1385 emulate_2op_SrcV("or", c
->src
, c
->dst
, ctxt
->eflags
);
1389 emulate_2op_SrcV("adc", c
->src
, c
->dst
, ctxt
->eflags
);
1393 emulate_2op_SrcV("sbb", c
->src
, c
->dst
, ctxt
->eflags
);
1397 emulate_2op_SrcV("and", c
->src
, c
->dst
, ctxt
->eflags
);
1401 emulate_2op_SrcV("sub", c
->src
, c
->dst
, ctxt
->eflags
);
1405 emulate_2op_SrcV("xor", c
->src
, c
->dst
, ctxt
->eflags
);
1409 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1411 case 0x40 ... 0x47: /* inc r16/r32 */
1412 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1414 case 0x48 ... 0x4f: /* dec r16/r32 */
1415 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1417 case 0x50 ... 0x57: /* push reg */
1418 c
->dst
.type
= OP_MEM
;
1419 c
->dst
.bytes
= c
->op_bytes
;
1420 c
->dst
.val
= c
->src
.val
;
1421 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
],
1423 c
->dst
.ptr
= (void *) register_address(
1424 c
, ss_base(ctxt
), c
->regs
[VCPU_REGS_RSP
]);
1426 case 0x58 ... 0x5f: /* pop reg */
1428 if ((rc
= ops
->read_std(register_address(c
, ss_base(ctxt
),
1429 c
->regs
[VCPU_REGS_RSP
]), c
->dst
.ptr
,
1430 c
->op_bytes
, ctxt
->vcpu
)) != 0)
1433 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
],
1435 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1437 case 0x63: /* movsxd */
1438 if (ctxt
->mode
!= X86EMUL_MODE_PROT64
)
1439 goto cannot_emulate
;
1440 c
->dst
.val
= (s32
) c
->src
.val
;
1442 case 0x68: /* push imm */
1443 case 0x6a: /* push imm8 */
1446 case 0x6c: /* insb */
1447 case 0x6d: /* insw/insd */
1448 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1450 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1452 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1453 (ctxt
->eflags
& EFLG_DF
),
1454 register_address(c
, es_base(ctxt
),
1455 c
->regs
[VCPU_REGS_RDI
]),
1457 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1462 case 0x6e: /* outsb */
1463 case 0x6f: /* outsw/outsd */
1464 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1466 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1468 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1469 (ctxt
->eflags
& EFLG_DF
),
1471 seg_override_base(ctxt
, c
),
1472 c
->regs
[VCPU_REGS_RSI
]),
1474 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1479 case 0x70 ... 0x7f: /* jcc (short) */ {
1480 int rel
= insn_fetch(s8
, 1, c
->eip
);
1482 if (test_cc(c
->b
, ctxt
->eflags
))
1486 case 0x80 ... 0x83: /* Grp1 */
1487 switch (c
->modrm_reg
) {
1507 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1509 case 0x86 ... 0x87: /* xchg */
1511 /* Write back the register source. */
1512 switch (c
->dst
.bytes
) {
1514 *(u8
*) c
->src
.ptr
= (u8
) c
->dst
.val
;
1517 *(u16
*) c
->src
.ptr
= (u16
) c
->dst
.val
;
1520 *c
->src
.ptr
= (u32
) c
->dst
.val
;
1521 break; /* 64b reg: zero-extend */
1523 *c
->src
.ptr
= c
->dst
.val
;
1527 * Write back the memory destination with implicit LOCK
1530 c
->dst
.val
= c
->src
.val
;
1533 case 0x88 ... 0x8b: /* mov */
1535 case 0x8c: { /* mov r/m, sreg */
1536 struct kvm_segment segreg
;
1538 if (c
->modrm_reg
<= 5)
1539 kvm_get_segment(ctxt
->vcpu
, &segreg
, c
->modrm_reg
);
1541 printk(KERN_INFO
"0x8c: Invalid segreg in modrm byte 0x%02x\n",
1543 goto cannot_emulate
;
1545 c
->dst
.val
= segreg
.selector
;
1548 case 0x8d: /* lea r16/r32, m */
1549 c
->dst
.val
= c
->modrm_ea
;
1551 case 0x8e: { /* mov seg, r/m16 */
1557 if (c
->modrm_reg
<= 5) {
1558 type_bits
= (c
->modrm_reg
== 1) ? 9 : 1;
1559 err
= kvm_load_segment_descriptor(ctxt
->vcpu
, sel
,
1560 type_bits
, c
->modrm_reg
);
1562 printk(KERN_INFO
"Invalid segreg in modrm byte 0x%02x\n",
1564 goto cannot_emulate
;
1568 goto cannot_emulate
;
1570 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1573 case 0x8f: /* pop (sole member of Grp1a) */
1574 rc
= emulate_grp1a(ctxt
, ops
);
1578 case 0x90: /* nop / xchg r8,rax */
1579 if (!(c
->rex_prefix
& 1)) { /* nop */
1580 c
->dst
.type
= OP_NONE
;
1583 case 0x91 ... 0x97: /* xchg reg,rax */
1584 c
->src
.type
= c
->dst
.type
= OP_REG
;
1585 c
->src
.bytes
= c
->dst
.bytes
= c
->op_bytes
;
1586 c
->src
.ptr
= (unsigned long *) &c
->regs
[VCPU_REGS_RAX
];
1587 c
->src
.val
= *(c
->src
.ptr
);
1589 case 0x9c: /* pushf */
1590 c
->src
.val
= (unsigned long) ctxt
->eflags
;
1593 case 0x9d: /* popf */
1594 c
->dst
.ptr
= (unsigned long *) &ctxt
->eflags
;
1595 goto pop_instruction
;
1596 case 0xa0 ... 0xa1: /* mov */
1597 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1598 c
->dst
.val
= c
->src
.val
;
1600 case 0xa2 ... 0xa3: /* mov */
1601 c
->dst
.val
= (unsigned long)c
->regs
[VCPU_REGS_RAX
];
1603 case 0xa4 ... 0xa5: /* movs */
1604 c
->dst
.type
= OP_MEM
;
1605 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1606 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1608 c
->regs
[VCPU_REGS_RDI
]);
1609 if ((rc
= ops
->read_emulated(register_address(c
,
1610 seg_override_base(ctxt
, c
),
1611 c
->regs
[VCPU_REGS_RSI
]),
1613 c
->dst
.bytes
, ctxt
->vcpu
)) != 0)
1615 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1616 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1618 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1619 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1622 case 0xa6 ... 0xa7: /* cmps */
1623 c
->src
.type
= OP_NONE
; /* Disable writeback. */
1624 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1625 c
->src
.ptr
= (unsigned long *)register_address(c
,
1626 seg_override_base(ctxt
, c
),
1627 c
->regs
[VCPU_REGS_RSI
]);
1628 if ((rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1634 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1635 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1636 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1638 c
->regs
[VCPU_REGS_RDI
]);
1639 if ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1645 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c
->src
.ptr
, c
->dst
.ptr
);
1647 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1649 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1650 (ctxt
->eflags
& EFLG_DF
) ? -c
->src
.bytes
1652 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1653 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1657 case 0xaa ... 0xab: /* stos */
1658 c
->dst
.type
= OP_MEM
;
1659 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1660 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1662 c
->regs
[VCPU_REGS_RDI
]);
1663 c
->dst
.val
= c
->regs
[VCPU_REGS_RAX
];
1664 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1665 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1668 case 0xac ... 0xad: /* lods */
1669 c
->dst
.type
= OP_REG
;
1670 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1671 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1672 if ((rc
= ops
->read_emulated(register_address(c
,
1673 seg_override_base(ctxt
, c
),
1674 c
->regs
[VCPU_REGS_RSI
]),
1679 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1680 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1683 case 0xae ... 0xaf: /* scas */
1684 DPRINTF("Urk! I don't handle SCAS.\n");
1685 goto cannot_emulate
;
1686 case 0xb0 ... 0xbf: /* mov r, imm */
1691 case 0xc3: /* ret */
1692 c
->dst
.ptr
= &c
->eip
;
1693 goto pop_instruction
;
1694 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1696 c
->dst
.val
= c
->src
.val
;
1698 case 0xd0 ... 0xd1: /* Grp2 */
1702 case 0xd2 ... 0xd3: /* Grp2 */
1703 c
->src
.val
= c
->regs
[VCPU_REGS_RCX
];
1706 case 0xe4: /* inb */
1708 port
= insn_fetch(u8
, 1, c
->eip
);
1711 case 0xe6: /* outb */
1712 case 0xe7: /* out */
1713 port
= insn_fetch(u8
, 1, c
->eip
);
1716 case 0xe8: /* call (near) */ {
1718 switch (c
->op_bytes
) {
1720 rel
= insn_fetch(s16
, 2, c
->eip
);
1723 rel
= insn_fetch(s32
, 4, c
->eip
);
1726 DPRINTF("Call: Invalid op_bytes\n");
1727 goto cannot_emulate
;
1729 c
->src
.val
= (unsigned long) c
->eip
;
1731 c
->op_bytes
= c
->ad_bytes
;
1735 case 0xe9: /* jmp rel */
1737 case 0xea: /* jmp far */ {
1741 switch (c
->op_bytes
) {
1743 eip
= insn_fetch(u16
, 2, c
->eip
);
1746 eip
= insn_fetch(u32
, 4, c
->eip
);
1749 DPRINTF("jmp far: Invalid op_bytes\n");
1750 goto cannot_emulate
;
1752 sel
= insn_fetch(u16
, 2, c
->eip
);
1753 if (kvm_load_segment_descriptor(ctxt
->vcpu
, sel
, 9, VCPU_SREG_CS
) < 0) {
1754 DPRINTF("jmp far: Failed to load CS descriptor\n");
1755 goto cannot_emulate
;
1762 jmp
: /* jmp rel short */
1763 jmp_rel(c
, c
->src
.val
);
1764 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1766 case 0xec: /* in al,dx */
1767 case 0xed: /* in (e/r)ax,dx */
1768 port
= c
->regs
[VCPU_REGS_RDX
];
1771 case 0xee: /* out al,dx */
1772 case 0xef: /* out (e/r)ax,dx */
1773 port
= c
->regs
[VCPU_REGS_RDX
];
1775 do_io
: if (kvm_emulate_pio(ctxt
->vcpu
, NULL
, io_dir_in
,
1776 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1779 goto cannot_emulate
;
1782 case 0xf4: /* hlt */
1783 ctxt
->vcpu
->arch
.halt_request
= 1;
1785 case 0xf5: /* cmc */
1786 /* complement carry flag from eflags reg */
1787 ctxt
->eflags
^= EFLG_CF
;
1788 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1790 case 0xf6 ... 0xf7: /* Grp3 */
1791 rc
= emulate_grp3(ctxt
, ops
);
1795 case 0xf8: /* clc */
1796 ctxt
->eflags
&= ~EFLG_CF
;
1797 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1799 case 0xfa: /* cli */
1800 ctxt
->eflags
&= ~X86_EFLAGS_IF
;
1801 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1803 case 0xfb: /* sti */
1804 ctxt
->eflags
|= X86_EFLAGS_IF
;
1805 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1807 case 0xfc: /* cld */
1808 ctxt
->eflags
&= ~EFLG_DF
;
1809 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1811 case 0xfd: /* std */
1812 ctxt
->eflags
|= EFLG_DF
;
1813 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1815 case 0xfe ... 0xff: /* Grp4/Grp5 */
1816 rc
= emulate_grp45(ctxt
, ops
);
1823 rc
= writeback(ctxt
, ops
);
1827 /* Commit shadow register state. */
1828 memcpy(ctxt
->vcpu
->arch
.regs
, c
->regs
, sizeof c
->regs
);
1829 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1832 if (rc
== X86EMUL_UNHANDLEABLE
) {
1840 case 0x01: /* lgdt, lidt, lmsw */
1841 switch (c
->modrm_reg
) {
1843 unsigned long address
;
1845 case 0: /* vmcall */
1846 if (c
->modrm_mod
!= 3 || c
->modrm_rm
!= 1)
1847 goto cannot_emulate
;
1849 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1853 /* Let the processor re-execute the fixed hypercall */
1854 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1855 /* Disable writeback. */
1856 c
->dst
.type
= OP_NONE
;
1859 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1860 &size
, &address
, c
->op_bytes
);
1863 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1864 /* Disable writeback. */
1865 c
->dst
.type
= OP_NONE
;
1867 case 3: /* lidt/vmmcall */
1868 if (c
->modrm_mod
== 3 && c
->modrm_rm
== 1) {
1869 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1872 kvm_emulate_hypercall(ctxt
->vcpu
);
1874 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1879 realmode_lidt(ctxt
->vcpu
, size
, address
);
1881 /* Disable writeback. */
1882 c
->dst
.type
= OP_NONE
;
1886 c
->dst
.val
= realmode_get_cr(ctxt
->vcpu
, 0);
1889 realmode_lmsw(ctxt
->vcpu
, (u16
)c
->src
.val
,
1891 c
->dst
.type
= OP_NONE
;
1894 emulate_invlpg(ctxt
->vcpu
, memop
);
1895 /* Disable writeback. */
1896 c
->dst
.type
= OP_NONE
;
1899 goto cannot_emulate
;
1903 emulate_clts(ctxt
->vcpu
);
1904 c
->dst
.type
= OP_NONE
;
1906 case 0x08: /* invd */
1907 case 0x09: /* wbinvd */
1908 case 0x0d: /* GrpP (prefetch) */
1909 case 0x18: /* Grp16 (prefetch/nop) */
1910 c
->dst
.type
= OP_NONE
;
1912 case 0x20: /* mov cr, reg */
1913 if (c
->modrm_mod
!= 3)
1914 goto cannot_emulate
;
1915 c
->regs
[c
->modrm_rm
] =
1916 realmode_get_cr(ctxt
->vcpu
, c
->modrm_reg
);
1917 c
->dst
.type
= OP_NONE
; /* no writeback */
1919 case 0x21: /* mov from dr to reg */
1920 if (c
->modrm_mod
!= 3)
1921 goto cannot_emulate
;
1922 rc
= emulator_get_dr(ctxt
, c
->modrm_reg
, &c
->regs
[c
->modrm_rm
]);
1924 goto cannot_emulate
;
1925 c
->dst
.type
= OP_NONE
; /* no writeback */
1927 case 0x22: /* mov reg, cr */
1928 if (c
->modrm_mod
!= 3)
1929 goto cannot_emulate
;
1930 realmode_set_cr(ctxt
->vcpu
,
1931 c
->modrm_reg
, c
->modrm_val
, &ctxt
->eflags
);
1932 c
->dst
.type
= OP_NONE
;
1934 case 0x23: /* mov from reg to dr */
1935 if (c
->modrm_mod
!= 3)
1936 goto cannot_emulate
;
1937 rc
= emulator_set_dr(ctxt
, c
->modrm_reg
,
1938 c
->regs
[c
->modrm_rm
]);
1940 goto cannot_emulate
;
1941 c
->dst
.type
= OP_NONE
; /* no writeback */
1945 msr_data
= (u32
)c
->regs
[VCPU_REGS_RAX
]
1946 | ((u64
)c
->regs
[VCPU_REGS_RDX
] << 32);
1947 rc
= kvm_set_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], msr_data
);
1949 kvm_inject_gp(ctxt
->vcpu
, 0);
1950 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1952 rc
= X86EMUL_CONTINUE
;
1953 c
->dst
.type
= OP_NONE
;
1957 rc
= kvm_get_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], &msr_data
);
1959 kvm_inject_gp(ctxt
->vcpu
, 0);
1960 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1962 c
->regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1963 c
->regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1965 rc
= X86EMUL_CONTINUE
;
1966 c
->dst
.type
= OP_NONE
;
1968 case 0x40 ... 0x4f: /* cmov */
1969 c
->dst
.val
= c
->dst
.orig_val
= c
->src
.val
;
1970 if (!test_cc(c
->b
, ctxt
->eflags
))
1971 c
->dst
.type
= OP_NONE
; /* no writeback */
1973 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1976 switch (c
->op_bytes
) {
1978 rel
= insn_fetch(s16
, 2, c
->eip
);
1981 rel
= insn_fetch(s32
, 4, c
->eip
);
1984 rel
= insn_fetch(s64
, 8, c
->eip
);
1987 DPRINTF("jnz: Invalid op_bytes\n");
1988 goto cannot_emulate
;
1990 if (test_cc(c
->b
, ctxt
->eflags
))
1992 c
->dst
.type
= OP_NONE
;
1997 c
->dst
.type
= OP_NONE
;
1998 /* only subword offset */
1999 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2000 emulate_2op_SrcV_nobyte("bt", c
->src
, c
->dst
, ctxt
->eflags
);
2004 /* only subword offset */
2005 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2006 emulate_2op_SrcV_nobyte("bts", c
->src
, c
->dst
, ctxt
->eflags
);
2008 case 0xae: /* clflush */
2010 case 0xb0 ... 0xb1: /* cmpxchg */
2012 * Save real source value, then compare EAX against
2015 c
->src
.orig_val
= c
->src
.val
;
2016 c
->src
.val
= c
->regs
[VCPU_REGS_RAX
];
2017 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
2018 if (ctxt
->eflags
& EFLG_ZF
) {
2019 /* Success: write back to memory. */
2020 c
->dst
.val
= c
->src
.orig_val
;
2022 /* Failure: write the value we saw to EAX. */
2023 c
->dst
.type
= OP_REG
;
2024 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
2029 /* only subword offset */
2030 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2031 emulate_2op_SrcV_nobyte("btr", c
->src
, c
->dst
, ctxt
->eflags
);
2033 case 0xb6 ... 0xb7: /* movzx */
2034 c
->dst
.bytes
= c
->op_bytes
;
2035 c
->dst
.val
= (c
->d
& ByteOp
) ? (u8
) c
->src
.val
2038 case 0xba: /* Grp8 */
2039 switch (c
->modrm_reg
& 3) {
2052 /* only subword offset */
2053 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2054 emulate_2op_SrcV_nobyte("btc", c
->src
, c
->dst
, ctxt
->eflags
);
2056 case 0xbe ... 0xbf: /* movsx */
2057 c
->dst
.bytes
= c
->op_bytes
;
2058 c
->dst
.val
= (c
->d
& ByteOp
) ? (s8
) c
->src
.val
:
2061 case 0xc3: /* movnti */
2062 c
->dst
.bytes
= c
->op_bytes
;
2063 c
->dst
.val
= (c
->op_bytes
== 4) ? (u32
) c
->src
.val
:
2066 case 0xc7: /* Grp9 (cmpxchg8b) */
2067 rc
= emulate_grp9(ctxt
, ops
, memop
);
2070 c
->dst
.type
= OP_NONE
;
2076 DPRINTF("Cannot emulate %02x\n", c
->b
);