1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcMask (7<<4)
62 /* Generic ModRM decode. */
64 /* Destination is only written; never read. */
67 #define MemAbs (1<<10) /* Memory operand is absolute displacement */
68 #define String (1<<12) /* String instruction (rep capable) */
69 #define Stack (1<<13) /* Stack instruction (push/pop) */
70 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
71 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
72 #define GroupMask 0xff /* Group number stored in bits 0:7 */
75 Group1_80
, Group1_81
, Group1_82
, Group1_83
,
76 Group1A
, Group3_Byte
, Group3
, Group4
, Group5
, Group7
,
79 static u16 opcode_table
[256] = {
81 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
82 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
83 ByteOp
| DstAcc
| SrcImm
, DstAcc
| SrcImm
, 0, 0,
85 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
86 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
89 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
90 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
93 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
94 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
97 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
98 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
99 DstAcc
| SrcImmByte
, DstAcc
| SrcImm
, 0, 0,
101 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
102 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
105 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
106 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
109 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
110 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
111 ByteOp
| DstAcc
| SrcImm
, DstAcc
| SrcImm
,
114 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
116 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
118 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
119 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
121 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
122 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
124 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
127 SrcImm
| Mov
| Stack
, 0, SrcImmByte
| Mov
| Stack
, 0,
128 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
129 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
131 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
132 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
134 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
135 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
137 Group
| Group1_80
, Group
| Group1_81
,
138 Group
| Group1_82
, Group
| Group1_83
,
139 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
140 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
142 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
143 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
144 DstMem
| SrcReg
| ModRM
| Mov
, ModRM
| DstReg
,
145 DstReg
| SrcMem
| ModRM
| Mov
, Group
| Group1A
,
147 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
149 0, 0, 0, 0, ImplicitOps
| Stack
, ImplicitOps
| Stack
, 0, 0,
151 ByteOp
| DstReg
| SrcMem
| Mov
| MemAbs
, DstReg
| SrcMem
| Mov
| MemAbs
,
152 ByteOp
| DstMem
| SrcReg
| Mov
| MemAbs
, DstMem
| SrcReg
| Mov
| MemAbs
,
153 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
154 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
156 0, 0, ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
157 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
158 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
160 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
161 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
162 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
163 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
165 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
166 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
167 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
168 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
170 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
171 0, ImplicitOps
| Stack
, 0, 0,
172 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
174 0, 0, 0, 0, 0, 0, 0, 0,
176 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
177 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
180 0, 0, 0, 0, 0, 0, 0, 0,
183 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
184 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
186 ImplicitOps
| Stack
, SrcImm
| ImplicitOps
,
187 ImplicitOps
, SrcImmByte
| ImplicitOps
,
188 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
189 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
192 ImplicitOps
, ImplicitOps
, Group
| Group3_Byte
, Group
| Group3
,
194 ImplicitOps
, 0, ImplicitOps
, ImplicitOps
,
195 ImplicitOps
, ImplicitOps
, Group
| Group4
, Group
| Group5
,
198 static u16 twobyte_table
[256] = {
200 0, Group
| GroupDual
| Group7
, 0, 0, 0, 0, ImplicitOps
, 0,
201 ImplicitOps
, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
203 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
205 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0,
208 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
210 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
211 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
212 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
213 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
215 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
216 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
217 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
218 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
224 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
226 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
227 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
228 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
229 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
235 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, ModRM
, 0,
237 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
238 DstMem
| SrcReg
| ModRM
| BitOp
,
239 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
240 DstReg
| SrcMem16
| ModRM
| Mov
,
242 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
243 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
244 DstReg
| SrcMem16
| ModRM
| Mov
,
246 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
247 0, 0, 0, 0, 0, 0, 0, 0,
249 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 static u16 group_table
[] = {
258 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
259 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
260 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
261 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
263 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
264 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
265 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
266 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
268 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
269 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
270 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
271 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
273 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
274 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
275 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
276 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
278 DstMem
| SrcNone
| ModRM
| Mov
| Stack
, 0, 0, 0, 0, 0, 0, 0,
280 ByteOp
| SrcImm
| DstMem
| ModRM
, 0,
281 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
284 DstMem
| SrcImm
| ModRM
, 0,
285 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
288 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
291 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
292 SrcMem
| ModRM
| Stack
, 0,
293 SrcMem
| ModRM
| Stack
, 0, SrcMem
| ModRM
| Stack
, 0,
295 0, 0, ModRM
| SrcMem
, ModRM
| SrcMem
,
296 SrcNone
| ModRM
| DstMem
| Mov
, 0,
297 SrcMem16
| ModRM
| Mov
, SrcMem
| ModRM
| ByteOp
,
300 static u16 group2_table
[] = {
302 SrcNone
| ModRM
, 0, 0, 0,
303 SrcNone
| ModRM
| DstMem
| Mov
, 0,
304 SrcMem16
| ModRM
| Mov
, 0,
307 /* EFLAGS bit definitions. */
308 #define EFLG_OF (1<<11)
309 #define EFLG_DF (1<<10)
310 #define EFLG_SF (1<<7)
311 #define EFLG_ZF (1<<6)
312 #define EFLG_AF (1<<4)
313 #define EFLG_PF (1<<2)
314 #define EFLG_CF (1<<0)
317 * Instruction emulation:
318 * Most instructions are emulated directly via a fragment of inline assembly
319 * code. This allows us to save/restore EFLAGS and thus very easily pick up
320 * any modified flags.
323 #if defined(CONFIG_X86_64)
324 #define _LO32 "k" /* force 32-bit operand */
325 #define _STK "%%rsp" /* stack pointer */
326 #elif defined(__i386__)
327 #define _LO32 "" /* force 32-bit operand */
328 #define _STK "%%esp" /* stack pointer */
332 * These EFLAGS bits are restored from saved value during emulation, and
333 * any changes are written back to the saved value after emulation.
335 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
337 /* Before executing instruction: restore necessary bits in EFLAGS. */
338 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
339 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
340 "movl %"_sav",%"_LO32 _tmp"; " \
343 "movl %"_msk",%"_LO32 _tmp"; " \
344 "andl %"_LO32 _tmp",("_STK"); " \
346 "notl %"_LO32 _tmp"; " \
347 "andl %"_LO32 _tmp",("_STK"); " \
348 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
350 "orl %"_LO32 _tmp",("_STK"); " \
354 /* After executing instruction: write-back necessary bits in EFLAGS. */
355 #define _POST_EFLAGS(_sav, _msk, _tmp) \
356 /* _sav |= EFLAGS & _msk; */ \
359 "andl %"_msk",%"_LO32 _tmp"; " \
360 "orl %"_LO32 _tmp",%"_sav"; "
368 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
370 __asm__ __volatile__ ( \
371 _PRE_EFLAGS("0", "4", "2") \
372 _op _suffix " %"_x"3,%1; " \
373 _POST_EFLAGS("0", "4", "2") \
374 : "=m" (_eflags), "=m" ((_dst).val), \
376 : _y ((_src).val), "i" (EFLAGS_MASK)); \
380 /* Raw emulation: instruction has two explicit operands. */
381 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
383 unsigned long _tmp; \
385 switch ((_dst).bytes) { \
387 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
390 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
393 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
398 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
400 unsigned long _tmp; \
401 switch ((_dst).bytes) { \
403 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
406 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
407 _wx, _wy, _lx, _ly, _qx, _qy); \
412 /* Source operand is byte-sized and may be restricted to just %cl. */
413 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
414 __emulate_2op(_op, _src, _dst, _eflags, \
415 "b", "c", "b", "c", "b", "c", "b", "c")
417 /* Source operand is byte, word, long or quad sized. */
418 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
419 __emulate_2op(_op, _src, _dst, _eflags, \
420 "b", "q", "w", "r", _LO32, "r", "", "r")
422 /* Source operand is word, long or quad sized. */
423 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
424 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
425 "w", "r", _LO32, "r", "", "r")
427 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
429 unsigned long _tmp; \
431 __asm__ __volatile__ ( \
432 _PRE_EFLAGS("0", "3", "2") \
433 _op _suffix " %1; " \
434 _POST_EFLAGS("0", "3", "2") \
435 : "=m" (_eflags), "+m" ((_dst).val), \
437 : "i" (EFLAGS_MASK)); \
440 /* Instruction has only one explicit operand (no source operand). */
441 #define emulate_1op(_op, _dst, _eflags) \
443 switch ((_dst).bytes) { \
444 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
445 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
446 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
447 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
451 /* Fetch next part of the instruction being emulated. */
452 #define insn_fetch(_type, _size, _eip) \
453 ({ unsigned long _x; \
454 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
461 static inline unsigned long ad_mask(struct decode_cache
*c
)
463 return (1UL << (c
->ad_bytes
<< 3)) - 1;
466 /* Access/update address held in a register, based on addressing mode. */
467 static inline unsigned long
468 address_mask(struct decode_cache
*c
, unsigned long reg
)
470 if (c
->ad_bytes
== sizeof(unsigned long))
473 return reg
& ad_mask(c
);
476 static inline unsigned long
477 register_address(struct decode_cache
*c
, unsigned long base
, unsigned long reg
)
479 return base
+ address_mask(c
, reg
);
483 register_address_increment(struct decode_cache
*c
, unsigned long *reg
, int inc
)
485 if (c
->ad_bytes
== sizeof(unsigned long))
488 *reg
= (*reg
& ~ad_mask(c
)) | ((*reg
+ inc
) & ad_mask(c
));
491 static inline void jmp_rel(struct decode_cache
*c
, int rel
)
493 register_address_increment(c
, &c
->eip
, rel
);
496 static void set_seg_override(struct decode_cache
*c
, int seg
)
498 c
->has_seg_override
= true;
499 c
->seg_override
= seg
;
502 static unsigned long seg_base(struct x86_emulate_ctxt
*ctxt
, int seg
)
504 if (ctxt
->mode
== X86EMUL_MODE_PROT64
&& seg
< VCPU_SREG_FS
)
507 return kvm_x86_ops
->get_segment_base(ctxt
->vcpu
, seg
);
510 static unsigned long seg_override_base(struct x86_emulate_ctxt
*ctxt
,
511 struct decode_cache
*c
)
513 if (!c
->has_seg_override
)
516 return seg_base(ctxt
, c
->seg_override
);
519 static unsigned long es_base(struct x86_emulate_ctxt
*ctxt
)
521 return seg_base(ctxt
, VCPU_SREG_ES
);
524 static unsigned long ss_base(struct x86_emulate_ctxt
*ctxt
)
526 return seg_base(ctxt
, VCPU_SREG_SS
);
529 static int do_fetch_insn_byte(struct x86_emulate_ctxt
*ctxt
,
530 struct x86_emulate_ops
*ops
,
531 unsigned long linear
, u8
*dest
)
533 struct fetch_cache
*fc
= &ctxt
->decode
.fetch
;
537 if (linear
< fc
->start
|| linear
>= fc
->end
) {
538 size
= min(15UL, PAGE_SIZE
- offset_in_page(linear
));
539 rc
= ops
->read_std(linear
, fc
->data
, size
, ctxt
->vcpu
);
543 fc
->end
= linear
+ size
;
545 *dest
= fc
->data
[linear
- fc
->start
];
549 static int do_insn_fetch(struct x86_emulate_ctxt
*ctxt
,
550 struct x86_emulate_ops
*ops
,
551 unsigned long eip
, void *dest
, unsigned size
)
555 eip
+= ctxt
->cs_base
;
557 rc
= do_fetch_insn_byte(ctxt
, ops
, eip
++, dest
++);
565 * Given the 'reg' portion of a ModRM byte, and a register block, return a
566 * pointer into the block that addresses the relevant register.
567 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
569 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
574 p
= ®s
[modrm_reg
];
575 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
576 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
580 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
581 struct x86_emulate_ops
*ops
,
583 u16
*size
, unsigned long *address
, int op_bytes
)
590 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
594 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
599 static int test_cc(unsigned int condition
, unsigned int flags
)
603 switch ((condition
& 15) >> 1) {
605 rc
|= (flags
& EFLG_OF
);
607 case 1: /* b/c/nae */
608 rc
|= (flags
& EFLG_CF
);
611 rc
|= (flags
& EFLG_ZF
);
614 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
617 rc
|= (flags
& EFLG_SF
);
620 rc
|= (flags
& EFLG_PF
);
623 rc
|= (flags
& EFLG_ZF
);
626 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
630 /* Odd condition identifiers (lsb == 1) have inverted sense. */
631 return (!!rc
^ (condition
& 1));
634 static void decode_register_operand(struct operand
*op
,
635 struct decode_cache
*c
,
638 unsigned reg
= c
->modrm_reg
;
639 int highbyte_regs
= c
->rex_prefix
== 0;
642 reg
= (c
->b
& 7) | ((c
->rex_prefix
& 1) << 3);
644 if ((c
->d
& ByteOp
) && !inhibit_bytereg
) {
645 op
->ptr
= decode_register(reg
, c
->regs
, highbyte_regs
);
646 op
->val
= *(u8
*)op
->ptr
;
649 op
->ptr
= decode_register(reg
, c
->regs
, 0);
650 op
->bytes
= c
->op_bytes
;
653 op
->val
= *(u16
*)op
->ptr
;
656 op
->val
= *(u32
*)op
->ptr
;
659 op
->val
= *(u64
*) op
->ptr
;
663 op
->orig_val
= op
->val
;
666 static int decode_modrm(struct x86_emulate_ctxt
*ctxt
,
667 struct x86_emulate_ops
*ops
)
669 struct decode_cache
*c
= &ctxt
->decode
;
671 int index_reg
= 0, base_reg
= 0, scale
;
675 c
->modrm_reg
= (c
->rex_prefix
& 4) << 1; /* REX.R */
676 index_reg
= (c
->rex_prefix
& 2) << 2; /* REX.X */
677 c
->modrm_rm
= base_reg
= (c
->rex_prefix
& 1) << 3; /* REG.B */
680 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
681 c
->modrm_mod
|= (c
->modrm
& 0xc0) >> 6;
682 c
->modrm_reg
|= (c
->modrm
& 0x38) >> 3;
683 c
->modrm_rm
|= (c
->modrm
& 0x07);
687 if (c
->modrm_mod
== 3) {
688 c
->modrm_ptr
= decode_register(c
->modrm_rm
,
689 c
->regs
, c
->d
& ByteOp
);
690 c
->modrm_val
= *(unsigned long *)c
->modrm_ptr
;
694 if (c
->ad_bytes
== 2) {
695 unsigned bx
= c
->regs
[VCPU_REGS_RBX
];
696 unsigned bp
= c
->regs
[VCPU_REGS_RBP
];
697 unsigned si
= c
->regs
[VCPU_REGS_RSI
];
698 unsigned di
= c
->regs
[VCPU_REGS_RDI
];
700 /* 16-bit ModR/M decode. */
701 switch (c
->modrm_mod
) {
703 if (c
->modrm_rm
== 6)
704 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
707 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
710 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
713 switch (c
->modrm_rm
) {
715 c
->modrm_ea
+= bx
+ si
;
718 c
->modrm_ea
+= bx
+ di
;
721 c
->modrm_ea
+= bp
+ si
;
724 c
->modrm_ea
+= bp
+ di
;
733 if (c
->modrm_mod
!= 0)
740 if (c
->modrm_rm
== 2 || c
->modrm_rm
== 3 ||
741 (c
->modrm_rm
== 6 && c
->modrm_mod
!= 0))
742 if (!c
->has_seg_override
)
743 set_seg_override(c
, VCPU_SREG_SS
);
744 c
->modrm_ea
= (u16
)c
->modrm_ea
;
746 /* 32/64-bit ModR/M decode. */
747 if ((c
->modrm_rm
& 7) == 4) {
748 sib
= insn_fetch(u8
, 1, c
->eip
);
749 index_reg
|= (sib
>> 3) & 7;
753 if ((base_reg
& 7) == 5 && c
->modrm_mod
== 0)
754 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
756 c
->modrm_ea
+= c
->regs
[base_reg
];
758 c
->modrm_ea
+= c
->regs
[index_reg
] << scale
;
759 } else if ((c
->modrm_rm
& 7) == 5 && c
->modrm_mod
== 0) {
760 if (ctxt
->mode
== X86EMUL_MODE_PROT64
)
763 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
764 switch (c
->modrm_mod
) {
766 if (c
->modrm_rm
== 5)
767 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
770 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
773 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
781 static int decode_abs(struct x86_emulate_ctxt
*ctxt
,
782 struct x86_emulate_ops
*ops
)
784 struct decode_cache
*c
= &ctxt
->decode
;
787 switch (c
->ad_bytes
) {
789 c
->modrm_ea
= insn_fetch(u16
, 2, c
->eip
);
792 c
->modrm_ea
= insn_fetch(u32
, 4, c
->eip
);
795 c
->modrm_ea
= insn_fetch(u64
, 8, c
->eip
);
803 x86_decode_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
805 struct decode_cache
*c
= &ctxt
->decode
;
807 int mode
= ctxt
->mode
;
808 int def_op_bytes
, def_ad_bytes
, group
;
810 /* Shadow copy of register state. Committed on successful emulation. */
812 memset(c
, 0, sizeof(struct decode_cache
));
813 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
814 ctxt
->cs_base
= seg_base(ctxt
, VCPU_SREG_CS
);
815 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
818 case X86EMUL_MODE_REAL
:
819 case X86EMUL_MODE_PROT16
:
820 def_op_bytes
= def_ad_bytes
= 2;
822 case X86EMUL_MODE_PROT32
:
823 def_op_bytes
= def_ad_bytes
= 4;
826 case X86EMUL_MODE_PROT64
:
835 c
->op_bytes
= def_op_bytes
;
836 c
->ad_bytes
= def_ad_bytes
;
838 /* Legacy prefixes. */
840 switch (c
->b
= insn_fetch(u8
, 1, c
->eip
)) {
841 case 0x66: /* operand-size override */
842 /* switch between 2/4 bytes */
843 c
->op_bytes
= def_op_bytes
^ 6;
845 case 0x67: /* address-size override */
846 if (mode
== X86EMUL_MODE_PROT64
)
847 /* switch between 4/8 bytes */
848 c
->ad_bytes
= def_ad_bytes
^ 12;
850 /* switch between 2/4 bytes */
851 c
->ad_bytes
= def_ad_bytes
^ 6;
853 case 0x26: /* ES override */
854 case 0x2e: /* CS override */
855 case 0x36: /* SS override */
856 case 0x3e: /* DS override */
857 set_seg_override(c
, (c
->b
>> 3) & 3);
859 case 0x64: /* FS override */
860 case 0x65: /* GS override */
861 set_seg_override(c
, c
->b
& 7);
863 case 0x40 ... 0x4f: /* REX */
864 if (mode
!= X86EMUL_MODE_PROT64
)
866 c
->rex_prefix
= c
->b
;
868 case 0xf0: /* LOCK */
871 case 0xf2: /* REPNE/REPNZ */
872 c
->rep_prefix
= REPNE_PREFIX
;
874 case 0xf3: /* REP/REPE/REPZ */
875 c
->rep_prefix
= REPE_PREFIX
;
881 /* Any legacy prefix after a REX prefix nullifies its effect. */
890 if (c
->rex_prefix
& 8)
891 c
->op_bytes
= 8; /* REX.W */
893 /* Opcode byte(s). */
894 c
->d
= opcode_table
[c
->b
];
896 /* Two-byte opcode? */
899 c
->b
= insn_fetch(u8
, 1, c
->eip
);
900 c
->d
= twobyte_table
[c
->b
];
905 group
= c
->d
& GroupMask
;
906 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
909 group
= (group
<< 3) + ((c
->modrm
>> 3) & 7);
910 if ((c
->d
& GroupDual
) && (c
->modrm
>> 6) == 3)
911 c
->d
= group2_table
[group
];
913 c
->d
= group_table
[group
];
918 DPRINTF("Cannot emulate %02x\n", c
->b
);
922 if (mode
== X86EMUL_MODE_PROT64
&& (c
->d
& Stack
))
925 /* ModRM and SIB bytes. */
927 rc
= decode_modrm(ctxt
, ops
);
928 else if (c
->d
& MemAbs
)
929 rc
= decode_abs(ctxt
, ops
);
933 if (!c
->has_seg_override
)
934 set_seg_override(c
, VCPU_SREG_DS
);
936 if (!(!c
->twobyte
&& c
->b
== 0x8d))
937 c
->modrm_ea
+= seg_override_base(ctxt
, c
);
939 if (c
->ad_bytes
!= 8)
940 c
->modrm_ea
= (u32
)c
->modrm_ea
;
942 * Decode and fetch the source operand: register, memory
945 switch (c
->d
& SrcMask
) {
949 decode_register_operand(&c
->src
, c
, 0);
958 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 :
960 /* Don't fetch the address for invlpg: it could be unmapped. */
961 if (c
->twobyte
&& c
->b
== 0x01 && c
->modrm_reg
== 7)
965 * For instructions with a ModR/M byte, switch to register
968 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
969 c
->src
.type
= OP_REG
;
970 c
->src
.val
= c
->modrm_val
;
971 c
->src
.ptr
= c
->modrm_ptr
;
974 c
->src
.type
= OP_MEM
;
977 c
->src
.type
= OP_IMM
;
978 c
->src
.ptr
= (unsigned long *)c
->eip
;
979 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
980 if (c
->src
.bytes
== 8)
982 /* NB. Immediates are sign-extended as necessary. */
983 switch (c
->src
.bytes
) {
985 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
988 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
991 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
996 c
->src
.type
= OP_IMM
;
997 c
->src
.ptr
= (unsigned long *)c
->eip
;
999 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1003 /* Decode and fetch the destination operand: register or memory. */
1004 switch (c
->d
& DstMask
) {
1006 /* Special instructions do their own operand decoding. */
1009 decode_register_operand(&c
->dst
, c
,
1010 c
->twobyte
&& (c
->b
== 0xb6 || c
->b
== 0xb7));
1013 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
1014 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1015 c
->dst
.type
= OP_REG
;
1016 c
->dst
.val
= c
->dst
.orig_val
= c
->modrm_val
;
1017 c
->dst
.ptr
= c
->modrm_ptr
;
1020 c
->dst
.type
= OP_MEM
;
1023 c
->dst
.type
= OP_REG
;
1024 c
->dst
.bytes
= c
->op_bytes
;
1025 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1026 switch (c
->op_bytes
) {
1028 c
->dst
.val
= *(u8
*)c
->dst
.ptr
;
1031 c
->dst
.val
= *(u16
*)c
->dst
.ptr
;
1034 c
->dst
.val
= *(u32
*)c
->dst
.ptr
;
1037 c
->dst
.orig_val
= c
->dst
.val
;
1041 if (c
->rip_relative
)
1042 c
->modrm_ea
+= c
->eip
;
1045 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1048 static inline void emulate_push(struct x86_emulate_ctxt
*ctxt
)
1050 struct decode_cache
*c
= &ctxt
->decode
;
1052 c
->dst
.type
= OP_MEM
;
1053 c
->dst
.bytes
= c
->op_bytes
;
1054 c
->dst
.val
= c
->src
.val
;
1055 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], -c
->op_bytes
);
1056 c
->dst
.ptr
= (void *) register_address(c
, ss_base(ctxt
),
1057 c
->regs
[VCPU_REGS_RSP
]);
1060 static int emulate_pop(struct x86_emulate_ctxt
*ctxt
,
1061 struct x86_emulate_ops
*ops
)
1063 struct decode_cache
*c
= &ctxt
->decode
;
1066 rc
= ops
->read_emulated(register_address(c
, ss_base(ctxt
),
1067 c
->regs
[VCPU_REGS_RSP
]),
1068 &c
->src
.val
, c
->src
.bytes
, ctxt
->vcpu
);
1072 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], c
->src
.bytes
);
1076 static inline int emulate_grp1a(struct x86_emulate_ctxt
*ctxt
,
1077 struct x86_emulate_ops
*ops
)
1079 struct decode_cache
*c
= &ctxt
->decode
;
1082 c
->src
.bytes
= c
->dst
.bytes
;
1083 rc
= emulate_pop(ctxt
, ops
);
1086 c
->dst
.val
= c
->src
.val
;
1090 static inline void emulate_grp2(struct x86_emulate_ctxt
*ctxt
)
1092 struct decode_cache
*c
= &ctxt
->decode
;
1093 switch (c
->modrm_reg
) {
1095 emulate_2op_SrcB("rol", c
->src
, c
->dst
, ctxt
->eflags
);
1098 emulate_2op_SrcB("ror", c
->src
, c
->dst
, ctxt
->eflags
);
1101 emulate_2op_SrcB("rcl", c
->src
, c
->dst
, ctxt
->eflags
);
1104 emulate_2op_SrcB("rcr", c
->src
, c
->dst
, ctxt
->eflags
);
1106 case 4: /* sal/shl */
1107 case 6: /* sal/shl */
1108 emulate_2op_SrcB("sal", c
->src
, c
->dst
, ctxt
->eflags
);
1111 emulate_2op_SrcB("shr", c
->src
, c
->dst
, ctxt
->eflags
);
1114 emulate_2op_SrcB("sar", c
->src
, c
->dst
, ctxt
->eflags
);
1119 static inline int emulate_grp3(struct x86_emulate_ctxt
*ctxt
,
1120 struct x86_emulate_ops
*ops
)
1122 struct decode_cache
*c
= &ctxt
->decode
;
1125 switch (c
->modrm_reg
) {
1126 case 0 ... 1: /* test */
1127 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1130 c
->dst
.val
= ~c
->dst
.val
;
1133 emulate_1op("neg", c
->dst
, ctxt
->eflags
);
1136 DPRINTF("Cannot emulate %02x\n", c
->b
);
1137 rc
= X86EMUL_UNHANDLEABLE
;
1143 static inline int emulate_grp45(struct x86_emulate_ctxt
*ctxt
,
1144 struct x86_emulate_ops
*ops
)
1146 struct decode_cache
*c
= &ctxt
->decode
;
1148 switch (c
->modrm_reg
) {
1150 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1153 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1155 case 2: /* call near abs */ {
1158 c
->eip
= c
->src
.val
;
1159 c
->src
.val
= old_eip
;
1163 case 4: /* jmp abs */
1164 c
->eip
= c
->src
.val
;
1173 static inline int emulate_grp9(struct x86_emulate_ctxt
*ctxt
,
1174 struct x86_emulate_ops
*ops
,
1175 unsigned long memop
)
1177 struct decode_cache
*c
= &ctxt
->decode
;
1181 rc
= ops
->read_emulated(memop
, &old
, 8, ctxt
->vcpu
);
1185 if (((u32
) (old
>> 0) != (u32
) c
->regs
[VCPU_REGS_RAX
]) ||
1186 ((u32
) (old
>> 32) != (u32
) c
->regs
[VCPU_REGS_RDX
])) {
1188 c
->regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1189 c
->regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1190 ctxt
->eflags
&= ~EFLG_ZF
;
1193 new = ((u64
)c
->regs
[VCPU_REGS_RCX
] << 32) |
1194 (u32
) c
->regs
[VCPU_REGS_RBX
];
1196 rc
= ops
->cmpxchg_emulated(memop
, &old
, &new, 8, ctxt
->vcpu
);
1199 ctxt
->eflags
|= EFLG_ZF
;
1204 static inline int writeback(struct x86_emulate_ctxt
*ctxt
,
1205 struct x86_emulate_ops
*ops
)
1208 struct decode_cache
*c
= &ctxt
->decode
;
1210 switch (c
->dst
.type
) {
1212 /* The 4-byte case *is* correct:
1213 * in 64-bit mode we zero-extend.
1215 switch (c
->dst
.bytes
) {
1217 *(u8
*)c
->dst
.ptr
= (u8
)c
->dst
.val
;
1220 *(u16
*)c
->dst
.ptr
= (u16
)c
->dst
.val
;
1223 *c
->dst
.ptr
= (u32
)c
->dst
.val
;
1224 break; /* 64b: zero-ext */
1226 *c
->dst
.ptr
= c
->dst
.val
;
1232 rc
= ops
->cmpxchg_emulated(
1233 (unsigned long)c
->dst
.ptr
,
1239 rc
= ops
->write_emulated(
1240 (unsigned long)c
->dst
.ptr
,
1257 x86_emulate_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
1259 unsigned long memop
= 0;
1261 unsigned long saved_eip
= 0;
1262 struct decode_cache
*c
= &ctxt
->decode
;
1267 /* Shadow copy of register state. Committed on successful emulation.
1268 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1272 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
1275 if (((c
->d
& ModRM
) && (c
->modrm_mod
!= 3)) || (c
->d
& MemAbs
))
1276 memop
= c
->modrm_ea
;
1278 if (c
->rep_prefix
&& (c
->d
& String
)) {
1279 /* All REP prefixes have the same first termination condition */
1280 if (c
->regs
[VCPU_REGS_RCX
] == 0) {
1281 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1284 /* The second termination condition only applies for REPE
1285 * and REPNE. Test if the repeat string operation prefix is
1286 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1287 * corresponding termination condition according to:
1288 * - if REPE/REPZ and ZF = 0 then done
1289 * - if REPNE/REPNZ and ZF = 1 then done
1291 if ((c
->b
== 0xa6) || (c
->b
== 0xa7) ||
1292 (c
->b
== 0xae) || (c
->b
== 0xaf)) {
1293 if ((c
->rep_prefix
== REPE_PREFIX
) &&
1294 ((ctxt
->eflags
& EFLG_ZF
) == 0)) {
1295 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1298 if ((c
->rep_prefix
== REPNE_PREFIX
) &&
1299 ((ctxt
->eflags
& EFLG_ZF
) == EFLG_ZF
)) {
1300 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1304 c
->regs
[VCPU_REGS_RCX
]--;
1305 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1308 if (c
->src
.type
== OP_MEM
) {
1309 c
->src
.ptr
= (unsigned long *)memop
;
1311 rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1317 c
->src
.orig_val
= c
->src
.val
;
1320 if ((c
->d
& DstMask
) == ImplicitOps
)
1324 if (c
->dst
.type
== OP_MEM
) {
1325 c
->dst
.ptr
= (unsigned long *)memop
;
1326 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1329 unsigned long mask
= ~(c
->dst
.bytes
* 8 - 1);
1331 c
->dst
.ptr
= (void *)c
->dst
.ptr
+
1332 (c
->src
.val
& mask
) / 8;
1334 if (!(c
->d
& Mov
) &&
1335 /* optimisation - avoid slow emulated read */
1336 ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1338 c
->dst
.bytes
, ctxt
->vcpu
)) != 0))
1341 c
->dst
.orig_val
= c
->dst
.val
;
1351 emulate_2op_SrcV("add", c
->src
, c
->dst
, ctxt
->eflags
);
1355 emulate_2op_SrcV("or", c
->src
, c
->dst
, ctxt
->eflags
);
1359 emulate_2op_SrcV("adc", c
->src
, c
->dst
, ctxt
->eflags
);
1363 emulate_2op_SrcV("sbb", c
->src
, c
->dst
, ctxt
->eflags
);
1367 emulate_2op_SrcV("and", c
->src
, c
->dst
, ctxt
->eflags
);
1371 emulate_2op_SrcV("sub", c
->src
, c
->dst
, ctxt
->eflags
);
1375 emulate_2op_SrcV("xor", c
->src
, c
->dst
, ctxt
->eflags
);
1379 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1381 case 0x40 ... 0x47: /* inc r16/r32 */
1382 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1384 case 0x48 ... 0x4f: /* dec r16/r32 */
1385 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1387 case 0x50 ... 0x57: /* push reg */
1390 case 0x58 ... 0x5f: /* pop reg */
1392 c
->src
.bytes
= c
->op_bytes
;
1393 rc
= emulate_pop(ctxt
, ops
);
1396 c
->dst
.val
= c
->src
.val
;
1398 case 0x63: /* movsxd */
1399 if (ctxt
->mode
!= X86EMUL_MODE_PROT64
)
1400 goto cannot_emulate
;
1401 c
->dst
.val
= (s32
) c
->src
.val
;
1403 case 0x68: /* push imm */
1404 case 0x6a: /* push imm8 */
1407 case 0x6c: /* insb */
1408 case 0x6d: /* insw/insd */
1409 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1411 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1413 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1414 (ctxt
->eflags
& EFLG_DF
),
1415 register_address(c
, es_base(ctxt
),
1416 c
->regs
[VCPU_REGS_RDI
]),
1418 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1423 case 0x6e: /* outsb */
1424 case 0x6f: /* outsw/outsd */
1425 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1427 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1429 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1430 (ctxt
->eflags
& EFLG_DF
),
1432 seg_override_base(ctxt
, c
),
1433 c
->regs
[VCPU_REGS_RSI
]),
1435 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1440 case 0x70 ... 0x7f: /* jcc (short) */ {
1441 int rel
= insn_fetch(s8
, 1, c
->eip
);
1443 if (test_cc(c
->b
, ctxt
->eflags
))
1447 case 0x80 ... 0x83: /* Grp1 */
1448 switch (c
->modrm_reg
) {
1468 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1470 case 0x86 ... 0x87: /* xchg */
1472 /* Write back the register source. */
1473 switch (c
->dst
.bytes
) {
1475 *(u8
*) c
->src
.ptr
= (u8
) c
->dst
.val
;
1478 *(u16
*) c
->src
.ptr
= (u16
) c
->dst
.val
;
1481 *c
->src
.ptr
= (u32
) c
->dst
.val
;
1482 break; /* 64b reg: zero-extend */
1484 *c
->src
.ptr
= c
->dst
.val
;
1488 * Write back the memory destination with implicit LOCK
1491 c
->dst
.val
= c
->src
.val
;
1494 case 0x88 ... 0x8b: /* mov */
1496 case 0x8c: { /* mov r/m, sreg */
1497 struct kvm_segment segreg
;
1499 if (c
->modrm_reg
<= 5)
1500 kvm_get_segment(ctxt
->vcpu
, &segreg
, c
->modrm_reg
);
1502 printk(KERN_INFO
"0x8c: Invalid segreg in modrm byte 0x%02x\n",
1504 goto cannot_emulate
;
1506 c
->dst
.val
= segreg
.selector
;
1509 case 0x8d: /* lea r16/r32, m */
1510 c
->dst
.val
= c
->modrm_ea
;
1512 case 0x8e: { /* mov seg, r/m16 */
1518 if (c
->modrm_reg
<= 5) {
1519 type_bits
= (c
->modrm_reg
== 1) ? 9 : 1;
1520 err
= kvm_load_segment_descriptor(ctxt
->vcpu
, sel
,
1521 type_bits
, c
->modrm_reg
);
1523 printk(KERN_INFO
"Invalid segreg in modrm byte 0x%02x\n",
1525 goto cannot_emulate
;
1529 goto cannot_emulate
;
1531 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1534 case 0x8f: /* pop (sole member of Grp1a) */
1535 rc
= emulate_grp1a(ctxt
, ops
);
1539 case 0x90: /* nop / xchg r8,rax */
1540 if (!(c
->rex_prefix
& 1)) { /* nop */
1541 c
->dst
.type
= OP_NONE
;
1544 case 0x91 ... 0x97: /* xchg reg,rax */
1545 c
->src
.type
= c
->dst
.type
= OP_REG
;
1546 c
->src
.bytes
= c
->dst
.bytes
= c
->op_bytes
;
1547 c
->src
.ptr
= (unsigned long *) &c
->regs
[VCPU_REGS_RAX
];
1548 c
->src
.val
= *(c
->src
.ptr
);
1550 case 0x9c: /* pushf */
1551 c
->src
.val
= (unsigned long) ctxt
->eflags
;
1554 case 0x9d: /* popf */
1555 c
->dst
.type
= OP_REG
;
1556 c
->dst
.ptr
= (unsigned long *) &ctxt
->eflags
;
1557 c
->dst
.bytes
= c
->op_bytes
;
1558 goto pop_instruction
;
1559 case 0xa0 ... 0xa1: /* mov */
1560 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1561 c
->dst
.val
= c
->src
.val
;
1563 case 0xa2 ... 0xa3: /* mov */
1564 c
->dst
.val
= (unsigned long)c
->regs
[VCPU_REGS_RAX
];
1566 case 0xa4 ... 0xa5: /* movs */
1567 c
->dst
.type
= OP_MEM
;
1568 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1569 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1571 c
->regs
[VCPU_REGS_RDI
]);
1572 if ((rc
= ops
->read_emulated(register_address(c
,
1573 seg_override_base(ctxt
, c
),
1574 c
->regs
[VCPU_REGS_RSI
]),
1576 c
->dst
.bytes
, ctxt
->vcpu
)) != 0)
1578 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1579 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1581 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1582 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1585 case 0xa6 ... 0xa7: /* cmps */
1586 c
->src
.type
= OP_NONE
; /* Disable writeback. */
1587 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1588 c
->src
.ptr
= (unsigned long *)register_address(c
,
1589 seg_override_base(ctxt
, c
),
1590 c
->regs
[VCPU_REGS_RSI
]);
1591 if ((rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1597 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1598 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1599 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1601 c
->regs
[VCPU_REGS_RDI
]);
1602 if ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1608 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c
->src
.ptr
, c
->dst
.ptr
);
1610 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1612 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1613 (ctxt
->eflags
& EFLG_DF
) ? -c
->src
.bytes
1615 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1616 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1620 case 0xaa ... 0xab: /* stos */
1621 c
->dst
.type
= OP_MEM
;
1622 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1623 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1625 c
->regs
[VCPU_REGS_RDI
]);
1626 c
->dst
.val
= c
->regs
[VCPU_REGS_RAX
];
1627 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1628 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1631 case 0xac ... 0xad: /* lods */
1632 c
->dst
.type
= OP_REG
;
1633 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1634 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1635 if ((rc
= ops
->read_emulated(register_address(c
,
1636 seg_override_base(ctxt
, c
),
1637 c
->regs
[VCPU_REGS_RSI
]),
1642 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1643 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1646 case 0xae ... 0xaf: /* scas */
1647 DPRINTF("Urk! I don't handle SCAS.\n");
1648 goto cannot_emulate
;
1649 case 0xb0 ... 0xbf: /* mov r, imm */
1654 case 0xc3: /* ret */
1655 c
->dst
.type
= OP_REG
;
1656 c
->dst
.ptr
= &c
->eip
;
1657 c
->dst
.bytes
= c
->op_bytes
;
1658 goto pop_instruction
;
1659 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1661 c
->dst
.val
= c
->src
.val
;
1663 case 0xd0 ... 0xd1: /* Grp2 */
1667 case 0xd2 ... 0xd3: /* Grp2 */
1668 c
->src
.val
= c
->regs
[VCPU_REGS_RCX
];
1671 case 0xe4: /* inb */
1673 port
= insn_fetch(u8
, 1, c
->eip
);
1676 case 0xe6: /* outb */
1677 case 0xe7: /* out */
1678 port
= insn_fetch(u8
, 1, c
->eip
);
1681 case 0xe8: /* call (near) */ {
1683 switch (c
->op_bytes
) {
1685 rel
= insn_fetch(s16
, 2, c
->eip
);
1688 rel
= insn_fetch(s32
, 4, c
->eip
);
1691 DPRINTF("Call: Invalid op_bytes\n");
1692 goto cannot_emulate
;
1694 c
->src
.val
= (unsigned long) c
->eip
;
1696 c
->op_bytes
= c
->ad_bytes
;
1700 case 0xe9: /* jmp rel */
1702 case 0xea: /* jmp far */ {
1706 switch (c
->op_bytes
) {
1708 eip
= insn_fetch(u16
, 2, c
->eip
);
1711 eip
= insn_fetch(u32
, 4, c
->eip
);
1714 DPRINTF("jmp far: Invalid op_bytes\n");
1715 goto cannot_emulate
;
1717 sel
= insn_fetch(u16
, 2, c
->eip
);
1718 if (kvm_load_segment_descriptor(ctxt
->vcpu
, sel
, 9, VCPU_SREG_CS
) < 0) {
1719 DPRINTF("jmp far: Failed to load CS descriptor\n");
1720 goto cannot_emulate
;
1727 jmp
: /* jmp rel short */
1728 jmp_rel(c
, c
->src
.val
);
1729 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1731 case 0xec: /* in al,dx */
1732 case 0xed: /* in (e/r)ax,dx */
1733 port
= c
->regs
[VCPU_REGS_RDX
];
1736 case 0xee: /* out al,dx */
1737 case 0xef: /* out (e/r)ax,dx */
1738 port
= c
->regs
[VCPU_REGS_RDX
];
1740 do_io
: if (kvm_emulate_pio(ctxt
->vcpu
, NULL
, io_dir_in
,
1741 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1744 goto cannot_emulate
;
1747 case 0xf4: /* hlt */
1748 ctxt
->vcpu
->arch
.halt_request
= 1;
1750 case 0xf5: /* cmc */
1751 /* complement carry flag from eflags reg */
1752 ctxt
->eflags
^= EFLG_CF
;
1753 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1755 case 0xf6 ... 0xf7: /* Grp3 */
1756 rc
= emulate_grp3(ctxt
, ops
);
1760 case 0xf8: /* clc */
1761 ctxt
->eflags
&= ~EFLG_CF
;
1762 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1764 case 0xfa: /* cli */
1765 ctxt
->eflags
&= ~X86_EFLAGS_IF
;
1766 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1768 case 0xfb: /* sti */
1769 ctxt
->eflags
|= X86_EFLAGS_IF
;
1770 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1772 case 0xfc: /* cld */
1773 ctxt
->eflags
&= ~EFLG_DF
;
1774 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1776 case 0xfd: /* std */
1777 ctxt
->eflags
|= EFLG_DF
;
1778 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1780 case 0xfe ... 0xff: /* Grp4/Grp5 */
1781 rc
= emulate_grp45(ctxt
, ops
);
1788 rc
= writeback(ctxt
, ops
);
1792 /* Commit shadow register state. */
1793 memcpy(ctxt
->vcpu
->arch
.regs
, c
->regs
, sizeof c
->regs
);
1794 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1797 if (rc
== X86EMUL_UNHANDLEABLE
) {
1805 case 0x01: /* lgdt, lidt, lmsw */
1806 switch (c
->modrm_reg
) {
1808 unsigned long address
;
1810 case 0: /* vmcall */
1811 if (c
->modrm_mod
!= 3 || c
->modrm_rm
!= 1)
1812 goto cannot_emulate
;
1814 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1818 /* Let the processor re-execute the fixed hypercall */
1819 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1820 /* Disable writeback. */
1821 c
->dst
.type
= OP_NONE
;
1824 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1825 &size
, &address
, c
->op_bytes
);
1828 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1829 /* Disable writeback. */
1830 c
->dst
.type
= OP_NONE
;
1832 case 3: /* lidt/vmmcall */
1833 if (c
->modrm_mod
== 3 && c
->modrm_rm
== 1) {
1834 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1837 kvm_emulate_hypercall(ctxt
->vcpu
);
1839 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1844 realmode_lidt(ctxt
->vcpu
, size
, address
);
1846 /* Disable writeback. */
1847 c
->dst
.type
= OP_NONE
;
1851 c
->dst
.val
= realmode_get_cr(ctxt
->vcpu
, 0);
1854 realmode_lmsw(ctxt
->vcpu
, (u16
)c
->src
.val
,
1856 c
->dst
.type
= OP_NONE
;
1859 emulate_invlpg(ctxt
->vcpu
, memop
);
1860 /* Disable writeback. */
1861 c
->dst
.type
= OP_NONE
;
1864 goto cannot_emulate
;
1868 emulate_clts(ctxt
->vcpu
);
1869 c
->dst
.type
= OP_NONE
;
1871 case 0x08: /* invd */
1872 case 0x09: /* wbinvd */
1873 case 0x0d: /* GrpP (prefetch) */
1874 case 0x18: /* Grp16 (prefetch/nop) */
1875 c
->dst
.type
= OP_NONE
;
1877 case 0x20: /* mov cr, reg */
1878 if (c
->modrm_mod
!= 3)
1879 goto cannot_emulate
;
1880 c
->regs
[c
->modrm_rm
] =
1881 realmode_get_cr(ctxt
->vcpu
, c
->modrm_reg
);
1882 c
->dst
.type
= OP_NONE
; /* no writeback */
1884 case 0x21: /* mov from dr to reg */
1885 if (c
->modrm_mod
!= 3)
1886 goto cannot_emulate
;
1887 rc
= emulator_get_dr(ctxt
, c
->modrm_reg
, &c
->regs
[c
->modrm_rm
]);
1889 goto cannot_emulate
;
1890 c
->dst
.type
= OP_NONE
; /* no writeback */
1892 case 0x22: /* mov reg, cr */
1893 if (c
->modrm_mod
!= 3)
1894 goto cannot_emulate
;
1895 realmode_set_cr(ctxt
->vcpu
,
1896 c
->modrm_reg
, c
->modrm_val
, &ctxt
->eflags
);
1897 c
->dst
.type
= OP_NONE
;
1899 case 0x23: /* mov from reg to dr */
1900 if (c
->modrm_mod
!= 3)
1901 goto cannot_emulate
;
1902 rc
= emulator_set_dr(ctxt
, c
->modrm_reg
,
1903 c
->regs
[c
->modrm_rm
]);
1905 goto cannot_emulate
;
1906 c
->dst
.type
= OP_NONE
; /* no writeback */
1910 msr_data
= (u32
)c
->regs
[VCPU_REGS_RAX
]
1911 | ((u64
)c
->regs
[VCPU_REGS_RDX
] << 32);
1912 rc
= kvm_set_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], msr_data
);
1914 kvm_inject_gp(ctxt
->vcpu
, 0);
1915 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1917 rc
= X86EMUL_CONTINUE
;
1918 c
->dst
.type
= OP_NONE
;
1922 rc
= kvm_get_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], &msr_data
);
1924 kvm_inject_gp(ctxt
->vcpu
, 0);
1925 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1927 c
->regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1928 c
->regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1930 rc
= X86EMUL_CONTINUE
;
1931 c
->dst
.type
= OP_NONE
;
1933 case 0x40 ... 0x4f: /* cmov */
1934 c
->dst
.val
= c
->dst
.orig_val
= c
->src
.val
;
1935 if (!test_cc(c
->b
, ctxt
->eflags
))
1936 c
->dst
.type
= OP_NONE
; /* no writeback */
1938 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1941 switch (c
->op_bytes
) {
1943 rel
= insn_fetch(s16
, 2, c
->eip
);
1946 rel
= insn_fetch(s32
, 4, c
->eip
);
1949 rel
= insn_fetch(s64
, 8, c
->eip
);
1952 DPRINTF("jnz: Invalid op_bytes\n");
1953 goto cannot_emulate
;
1955 if (test_cc(c
->b
, ctxt
->eflags
))
1957 c
->dst
.type
= OP_NONE
;
1962 c
->dst
.type
= OP_NONE
;
1963 /* only subword offset */
1964 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1965 emulate_2op_SrcV_nobyte("bt", c
->src
, c
->dst
, ctxt
->eflags
);
1969 /* only subword offset */
1970 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1971 emulate_2op_SrcV_nobyte("bts", c
->src
, c
->dst
, ctxt
->eflags
);
1973 case 0xae: /* clflush */
1975 case 0xb0 ... 0xb1: /* cmpxchg */
1977 * Save real source value, then compare EAX against
1980 c
->src
.orig_val
= c
->src
.val
;
1981 c
->src
.val
= c
->regs
[VCPU_REGS_RAX
];
1982 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1983 if (ctxt
->eflags
& EFLG_ZF
) {
1984 /* Success: write back to memory. */
1985 c
->dst
.val
= c
->src
.orig_val
;
1987 /* Failure: write the value we saw to EAX. */
1988 c
->dst
.type
= OP_REG
;
1989 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1994 /* only subword offset */
1995 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1996 emulate_2op_SrcV_nobyte("btr", c
->src
, c
->dst
, ctxt
->eflags
);
1998 case 0xb6 ... 0xb7: /* movzx */
1999 c
->dst
.bytes
= c
->op_bytes
;
2000 c
->dst
.val
= (c
->d
& ByteOp
) ? (u8
) c
->src
.val
2003 case 0xba: /* Grp8 */
2004 switch (c
->modrm_reg
& 3) {
2017 /* only subword offset */
2018 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2019 emulate_2op_SrcV_nobyte("btc", c
->src
, c
->dst
, ctxt
->eflags
);
2021 case 0xbe ... 0xbf: /* movsx */
2022 c
->dst
.bytes
= c
->op_bytes
;
2023 c
->dst
.val
= (c
->d
& ByteOp
) ? (s8
) c
->src
.val
:
2026 case 0xc3: /* movnti */
2027 c
->dst
.bytes
= c
->op_bytes
;
2028 c
->dst
.val
= (c
->op_bytes
== 4) ? (u32
) c
->src
.val
:
2031 case 0xc7: /* Grp9 (cmpxchg8b) */
2032 rc
= emulate_grp9(ctxt
, ops
, memop
);
2035 c
->dst
.type
= OP_NONE
;
2041 DPRINTF("Cannot emulate %02x\n", c
->b
);