KVM: x86 emulator: fix popf emulation
[linux-2.6/verdex.git] / arch / x86 / kvm / x86_emulate.c
blobe8c87ccfe3106849a884a64b3df45563ff45a9d3
1 /******************************************************************************
2 * x86_emulate.c
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
22 #ifndef __KERNEL__
23 #include <stdio.h>
24 #include <stdint.h>
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
27 #else
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
31 #endif
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
41 * not be handled.
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcMask (7<<4)
62 /* Generic ModRM decode. */
63 #define ModRM (1<<7)
64 /* Destination is only written; never read. */
65 #define Mov (1<<8)
66 #define BitOp (1<<9)
67 #define MemAbs (1<<10) /* Memory operand is absolute displacement */
68 #define String (1<<12) /* String instruction (rep capable) */
69 #define Stack (1<<13) /* Stack instruction (push/pop) */
70 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
71 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
72 #define GroupMask 0xff /* Group number stored in bits 0:7 */
74 enum {
75 Group1_80, Group1_81, Group1_82, Group1_83,
76 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
79 static u16 opcode_table[256] = {
80 /* 0x00 - 0x07 */
81 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
82 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
83 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
84 /* 0x08 - 0x0F */
85 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
86 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
87 0, 0, 0, 0,
88 /* 0x10 - 0x17 */
89 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
90 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
91 0, 0, 0, 0,
92 /* 0x18 - 0x1F */
93 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
94 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
95 0, 0, 0, 0,
96 /* 0x20 - 0x27 */
97 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
98 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
99 DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
100 /* 0x28 - 0x2F */
101 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
102 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
103 0, 0, 0, 0,
104 /* 0x30 - 0x37 */
105 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
106 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
107 0, 0, 0, 0,
108 /* 0x38 - 0x3F */
109 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
110 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
111 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
112 0, 0,
113 /* 0x40 - 0x47 */
114 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
115 /* 0x48 - 0x4F */
116 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
117 /* 0x50 - 0x57 */
118 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
119 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
120 /* 0x58 - 0x5F */
121 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
122 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
123 /* 0x60 - 0x67 */
124 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
125 0, 0, 0, 0,
126 /* 0x68 - 0x6F */
127 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
128 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
129 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
130 /* 0x70 - 0x77 */
131 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
132 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
133 /* 0x78 - 0x7F */
134 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
135 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
136 /* 0x80 - 0x87 */
137 Group | Group1_80, Group | Group1_81,
138 Group | Group1_82, Group | Group1_83,
139 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
140 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
141 /* 0x88 - 0x8F */
142 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
143 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
144 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
145 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
146 /* 0x90 - 0x97 */
147 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
148 /* 0x98 - 0x9F */
149 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
150 /* 0xA0 - 0xA7 */
151 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
152 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
153 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
154 ByteOp | ImplicitOps | String, ImplicitOps | String,
155 /* 0xA8 - 0xAF */
156 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
157 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
158 ByteOp | ImplicitOps | String, ImplicitOps | String,
159 /* 0xB0 - 0xB7 */
160 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
161 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
162 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
163 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
164 /* 0xB8 - 0xBF */
165 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
166 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
167 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
168 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
169 /* 0xC0 - 0xC7 */
170 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
171 0, ImplicitOps | Stack, 0, 0,
172 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
173 /* 0xC8 - 0xCF */
174 0, 0, 0, 0, 0, 0, 0, 0,
175 /* 0xD0 - 0xD7 */
176 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
177 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
178 0, 0, 0, 0,
179 /* 0xD8 - 0xDF */
180 0, 0, 0, 0, 0, 0, 0, 0,
181 /* 0xE0 - 0xE7 */
182 0, 0, 0, 0,
183 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
184 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
185 /* 0xE8 - 0xEF */
186 ImplicitOps | Stack, SrcImm | ImplicitOps,
187 ImplicitOps, SrcImmByte | ImplicitOps,
188 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
189 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
190 /* 0xF0 - 0xF7 */
191 0, 0, 0, 0,
192 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
193 /* 0xF8 - 0xFF */
194 ImplicitOps, 0, ImplicitOps, ImplicitOps,
195 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
198 static u16 twobyte_table[256] = {
199 /* 0x00 - 0x0F */
200 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
201 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
202 /* 0x10 - 0x1F */
203 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
204 /* 0x20 - 0x2F */
205 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0,
207 /* 0x30 - 0x3F */
208 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x40 - 0x47 */
210 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
211 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
212 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
213 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
214 /* 0x48 - 0x4F */
215 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
216 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
217 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
218 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
219 /* 0x50 - 0x5F */
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
221 /* 0x60 - 0x6F */
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
223 /* 0x70 - 0x7F */
224 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
225 /* 0x80 - 0x8F */
226 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
227 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
228 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
229 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
230 /* 0x90 - 0x9F */
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
232 /* 0xA0 - 0xA7 */
233 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
234 /* 0xA8 - 0xAF */
235 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
236 /* 0xB0 - 0xB7 */
237 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
238 DstMem | SrcReg | ModRM | BitOp,
239 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
240 DstReg | SrcMem16 | ModRM | Mov,
241 /* 0xB8 - 0xBF */
242 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
243 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
244 DstReg | SrcMem16 | ModRM | Mov,
245 /* 0xC0 - 0xCF */
246 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
247 0, 0, 0, 0, 0, 0, 0, 0,
248 /* 0xD0 - 0xDF */
249 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250 /* 0xE0 - 0xEF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252 /* 0xF0 - 0xFF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 static u16 group_table[] = {
257 [Group1_80*8] =
258 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
259 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
260 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
261 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
262 [Group1_81*8] =
263 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
264 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
265 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
266 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
267 [Group1_82*8] =
268 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
269 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
270 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
271 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
272 [Group1_83*8] =
273 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
274 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
275 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
276 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
277 [Group1A*8] =
278 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
279 [Group3_Byte*8] =
280 ByteOp | SrcImm | DstMem | ModRM, 0,
281 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
282 0, 0, 0, 0,
283 [Group3*8] =
284 DstMem | SrcImm | ModRM, 0,
285 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
286 0, 0, 0, 0,
287 [Group4*8] =
288 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
289 0, 0, 0, 0, 0, 0,
290 [Group5*8] =
291 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
292 SrcMem | ModRM | Stack, 0,
293 SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
294 [Group7*8] =
295 0, 0, ModRM | SrcMem, ModRM | SrcMem,
296 SrcNone | ModRM | DstMem | Mov, 0,
297 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
300 static u16 group2_table[] = {
301 [Group7*8] =
302 SrcNone | ModRM, 0, 0, 0,
303 SrcNone | ModRM | DstMem | Mov, 0,
304 SrcMem16 | ModRM | Mov, 0,
307 /* EFLAGS bit definitions. */
308 #define EFLG_OF (1<<11)
309 #define EFLG_DF (1<<10)
310 #define EFLG_SF (1<<7)
311 #define EFLG_ZF (1<<6)
312 #define EFLG_AF (1<<4)
313 #define EFLG_PF (1<<2)
314 #define EFLG_CF (1<<0)
317 * Instruction emulation:
318 * Most instructions are emulated directly via a fragment of inline assembly
319 * code. This allows us to save/restore EFLAGS and thus very easily pick up
320 * any modified flags.
323 #if defined(CONFIG_X86_64)
324 #define _LO32 "k" /* force 32-bit operand */
325 #define _STK "%%rsp" /* stack pointer */
326 #elif defined(__i386__)
327 #define _LO32 "" /* force 32-bit operand */
328 #define _STK "%%esp" /* stack pointer */
329 #endif
332 * These EFLAGS bits are restored from saved value during emulation, and
333 * any changes are written back to the saved value after emulation.
335 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
337 /* Before executing instruction: restore necessary bits in EFLAGS. */
338 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
339 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
340 "movl %"_sav",%"_LO32 _tmp"; " \
341 "push %"_tmp"; " \
342 "push %"_tmp"; " \
343 "movl %"_msk",%"_LO32 _tmp"; " \
344 "andl %"_LO32 _tmp",("_STK"); " \
345 "pushf; " \
346 "notl %"_LO32 _tmp"; " \
347 "andl %"_LO32 _tmp",("_STK"); " \
348 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
349 "pop %"_tmp"; " \
350 "orl %"_LO32 _tmp",("_STK"); " \
351 "popf; " \
352 "pop %"_sav"; "
354 /* After executing instruction: write-back necessary bits in EFLAGS. */
355 #define _POST_EFLAGS(_sav, _msk, _tmp) \
356 /* _sav |= EFLAGS & _msk; */ \
357 "pushf; " \
358 "pop %"_tmp"; " \
359 "andl %"_msk",%"_LO32 _tmp"; " \
360 "orl %"_LO32 _tmp",%"_sav"; "
362 #ifdef CONFIG_X86_64
363 #define ON64(x) x
364 #else
365 #define ON64(x)
366 #endif
368 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
369 do { \
370 __asm__ __volatile__ ( \
371 _PRE_EFLAGS("0", "4", "2") \
372 _op _suffix " %"_x"3,%1; " \
373 _POST_EFLAGS("0", "4", "2") \
374 : "=m" (_eflags), "=m" ((_dst).val), \
375 "=&r" (_tmp) \
376 : _y ((_src).val), "i" (EFLAGS_MASK)); \
377 } while (0);
380 /* Raw emulation: instruction has two explicit operands. */
381 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
382 do { \
383 unsigned long _tmp; \
385 switch ((_dst).bytes) { \
386 case 2: \
387 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
388 break; \
389 case 4: \
390 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
391 break; \
392 case 8: \
393 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
394 break; \
396 } while (0)
398 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
399 do { \
400 unsigned long _tmp; \
401 switch ((_dst).bytes) { \
402 case 1: \
403 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
404 break; \
405 default: \
406 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
407 _wx, _wy, _lx, _ly, _qx, _qy); \
408 break; \
410 } while (0)
412 /* Source operand is byte-sized and may be restricted to just %cl. */
413 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
414 __emulate_2op(_op, _src, _dst, _eflags, \
415 "b", "c", "b", "c", "b", "c", "b", "c")
417 /* Source operand is byte, word, long or quad sized. */
418 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
419 __emulate_2op(_op, _src, _dst, _eflags, \
420 "b", "q", "w", "r", _LO32, "r", "", "r")
422 /* Source operand is word, long or quad sized. */
423 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
424 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
425 "w", "r", _LO32, "r", "", "r")
427 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
428 do { \
429 unsigned long _tmp; \
431 __asm__ __volatile__ ( \
432 _PRE_EFLAGS("0", "3", "2") \
433 _op _suffix " %1; " \
434 _POST_EFLAGS("0", "3", "2") \
435 : "=m" (_eflags), "+m" ((_dst).val), \
436 "=&r" (_tmp) \
437 : "i" (EFLAGS_MASK)); \
438 } while (0)
440 /* Instruction has only one explicit operand (no source operand). */
441 #define emulate_1op(_op, _dst, _eflags) \
442 do { \
443 switch ((_dst).bytes) { \
444 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
445 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
446 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
447 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
449 } while (0)
451 /* Fetch next part of the instruction being emulated. */
452 #define insn_fetch(_type, _size, _eip) \
453 ({ unsigned long _x; \
454 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
455 if (rc != 0) \
456 goto done; \
457 (_eip) += (_size); \
458 (_type)_x; \
461 static inline unsigned long ad_mask(struct decode_cache *c)
463 return (1UL << (c->ad_bytes << 3)) - 1;
466 /* Access/update address held in a register, based on addressing mode. */
467 static inline unsigned long
468 address_mask(struct decode_cache *c, unsigned long reg)
470 if (c->ad_bytes == sizeof(unsigned long))
471 return reg;
472 else
473 return reg & ad_mask(c);
476 static inline unsigned long
477 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
479 return base + address_mask(c, reg);
482 static inline void
483 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
485 if (c->ad_bytes == sizeof(unsigned long))
486 *reg += inc;
487 else
488 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
491 static inline void jmp_rel(struct decode_cache *c, int rel)
493 register_address_increment(c, &c->eip, rel);
496 static void set_seg_override(struct decode_cache *c, int seg)
498 c->has_seg_override = true;
499 c->seg_override = seg;
502 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
504 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
505 return 0;
507 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
510 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
511 struct decode_cache *c)
513 if (!c->has_seg_override)
514 return 0;
516 return seg_base(ctxt, c->seg_override);
519 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
521 return seg_base(ctxt, VCPU_SREG_ES);
524 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
526 return seg_base(ctxt, VCPU_SREG_SS);
529 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
530 struct x86_emulate_ops *ops,
531 unsigned long linear, u8 *dest)
533 struct fetch_cache *fc = &ctxt->decode.fetch;
534 int rc;
535 int size;
537 if (linear < fc->start || linear >= fc->end) {
538 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
539 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
540 if (rc)
541 return rc;
542 fc->start = linear;
543 fc->end = linear + size;
545 *dest = fc->data[linear - fc->start];
546 return 0;
549 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
550 struct x86_emulate_ops *ops,
551 unsigned long eip, void *dest, unsigned size)
553 int rc = 0;
555 eip += ctxt->cs_base;
556 while (size--) {
557 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
558 if (rc)
559 return rc;
561 return 0;
565 * Given the 'reg' portion of a ModRM byte, and a register block, return a
566 * pointer into the block that addresses the relevant register.
567 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
569 static void *decode_register(u8 modrm_reg, unsigned long *regs,
570 int highbyte_regs)
572 void *p;
574 p = &regs[modrm_reg];
575 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
576 p = (unsigned char *)&regs[modrm_reg & 3] + 1;
577 return p;
580 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
581 struct x86_emulate_ops *ops,
582 void *ptr,
583 u16 *size, unsigned long *address, int op_bytes)
585 int rc;
587 if (op_bytes == 2)
588 op_bytes = 3;
589 *address = 0;
590 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
591 ctxt->vcpu);
592 if (rc)
593 return rc;
594 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
595 ctxt->vcpu);
596 return rc;
599 static int test_cc(unsigned int condition, unsigned int flags)
601 int rc = 0;
603 switch ((condition & 15) >> 1) {
604 case 0: /* o */
605 rc |= (flags & EFLG_OF);
606 break;
607 case 1: /* b/c/nae */
608 rc |= (flags & EFLG_CF);
609 break;
610 case 2: /* z/e */
611 rc |= (flags & EFLG_ZF);
612 break;
613 case 3: /* be/na */
614 rc |= (flags & (EFLG_CF|EFLG_ZF));
615 break;
616 case 4: /* s */
617 rc |= (flags & EFLG_SF);
618 break;
619 case 5: /* p/pe */
620 rc |= (flags & EFLG_PF);
621 break;
622 case 7: /* le/ng */
623 rc |= (flags & EFLG_ZF);
624 /* fall through */
625 case 6: /* l/nge */
626 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
627 break;
630 /* Odd condition identifiers (lsb == 1) have inverted sense. */
631 return (!!rc ^ (condition & 1));
634 static void decode_register_operand(struct operand *op,
635 struct decode_cache *c,
636 int inhibit_bytereg)
638 unsigned reg = c->modrm_reg;
639 int highbyte_regs = c->rex_prefix == 0;
641 if (!(c->d & ModRM))
642 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
643 op->type = OP_REG;
644 if ((c->d & ByteOp) && !inhibit_bytereg) {
645 op->ptr = decode_register(reg, c->regs, highbyte_regs);
646 op->val = *(u8 *)op->ptr;
647 op->bytes = 1;
648 } else {
649 op->ptr = decode_register(reg, c->regs, 0);
650 op->bytes = c->op_bytes;
651 switch (op->bytes) {
652 case 2:
653 op->val = *(u16 *)op->ptr;
654 break;
655 case 4:
656 op->val = *(u32 *)op->ptr;
657 break;
658 case 8:
659 op->val = *(u64 *) op->ptr;
660 break;
663 op->orig_val = op->val;
666 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
667 struct x86_emulate_ops *ops)
669 struct decode_cache *c = &ctxt->decode;
670 u8 sib;
671 int index_reg = 0, base_reg = 0, scale;
672 int rc = 0;
674 if (c->rex_prefix) {
675 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
676 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
677 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
680 c->modrm = insn_fetch(u8, 1, c->eip);
681 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
682 c->modrm_reg |= (c->modrm & 0x38) >> 3;
683 c->modrm_rm |= (c->modrm & 0x07);
684 c->modrm_ea = 0;
685 c->use_modrm_ea = 1;
687 if (c->modrm_mod == 3) {
688 c->modrm_ptr = decode_register(c->modrm_rm,
689 c->regs, c->d & ByteOp);
690 c->modrm_val = *(unsigned long *)c->modrm_ptr;
691 return rc;
694 if (c->ad_bytes == 2) {
695 unsigned bx = c->regs[VCPU_REGS_RBX];
696 unsigned bp = c->regs[VCPU_REGS_RBP];
697 unsigned si = c->regs[VCPU_REGS_RSI];
698 unsigned di = c->regs[VCPU_REGS_RDI];
700 /* 16-bit ModR/M decode. */
701 switch (c->modrm_mod) {
702 case 0:
703 if (c->modrm_rm == 6)
704 c->modrm_ea += insn_fetch(u16, 2, c->eip);
705 break;
706 case 1:
707 c->modrm_ea += insn_fetch(s8, 1, c->eip);
708 break;
709 case 2:
710 c->modrm_ea += insn_fetch(u16, 2, c->eip);
711 break;
713 switch (c->modrm_rm) {
714 case 0:
715 c->modrm_ea += bx + si;
716 break;
717 case 1:
718 c->modrm_ea += bx + di;
719 break;
720 case 2:
721 c->modrm_ea += bp + si;
722 break;
723 case 3:
724 c->modrm_ea += bp + di;
725 break;
726 case 4:
727 c->modrm_ea += si;
728 break;
729 case 5:
730 c->modrm_ea += di;
731 break;
732 case 6:
733 if (c->modrm_mod != 0)
734 c->modrm_ea += bp;
735 break;
736 case 7:
737 c->modrm_ea += bx;
738 break;
740 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
741 (c->modrm_rm == 6 && c->modrm_mod != 0))
742 if (!c->has_seg_override)
743 set_seg_override(c, VCPU_SREG_SS);
744 c->modrm_ea = (u16)c->modrm_ea;
745 } else {
746 /* 32/64-bit ModR/M decode. */
747 if ((c->modrm_rm & 7) == 4) {
748 sib = insn_fetch(u8, 1, c->eip);
749 index_reg |= (sib >> 3) & 7;
750 base_reg |= sib & 7;
751 scale = sib >> 6;
753 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
754 c->modrm_ea += insn_fetch(s32, 4, c->eip);
755 else
756 c->modrm_ea += c->regs[base_reg];
757 if (index_reg != 4)
758 c->modrm_ea += c->regs[index_reg] << scale;
759 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
760 if (ctxt->mode == X86EMUL_MODE_PROT64)
761 c->rip_relative = 1;
762 } else
763 c->modrm_ea += c->regs[c->modrm_rm];
764 switch (c->modrm_mod) {
765 case 0:
766 if (c->modrm_rm == 5)
767 c->modrm_ea += insn_fetch(s32, 4, c->eip);
768 break;
769 case 1:
770 c->modrm_ea += insn_fetch(s8, 1, c->eip);
771 break;
772 case 2:
773 c->modrm_ea += insn_fetch(s32, 4, c->eip);
774 break;
777 done:
778 return rc;
781 static int decode_abs(struct x86_emulate_ctxt *ctxt,
782 struct x86_emulate_ops *ops)
784 struct decode_cache *c = &ctxt->decode;
785 int rc = 0;
787 switch (c->ad_bytes) {
788 case 2:
789 c->modrm_ea = insn_fetch(u16, 2, c->eip);
790 break;
791 case 4:
792 c->modrm_ea = insn_fetch(u32, 4, c->eip);
793 break;
794 case 8:
795 c->modrm_ea = insn_fetch(u64, 8, c->eip);
796 break;
798 done:
799 return rc;
803 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
805 struct decode_cache *c = &ctxt->decode;
806 int rc = 0;
807 int mode = ctxt->mode;
808 int def_op_bytes, def_ad_bytes, group;
810 /* Shadow copy of register state. Committed on successful emulation. */
812 memset(c, 0, sizeof(struct decode_cache));
813 c->eip = kvm_rip_read(ctxt->vcpu);
814 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
815 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
817 switch (mode) {
818 case X86EMUL_MODE_REAL:
819 case X86EMUL_MODE_PROT16:
820 def_op_bytes = def_ad_bytes = 2;
821 break;
822 case X86EMUL_MODE_PROT32:
823 def_op_bytes = def_ad_bytes = 4;
824 break;
825 #ifdef CONFIG_X86_64
826 case X86EMUL_MODE_PROT64:
827 def_op_bytes = 4;
828 def_ad_bytes = 8;
829 break;
830 #endif
831 default:
832 return -1;
835 c->op_bytes = def_op_bytes;
836 c->ad_bytes = def_ad_bytes;
838 /* Legacy prefixes. */
839 for (;;) {
840 switch (c->b = insn_fetch(u8, 1, c->eip)) {
841 case 0x66: /* operand-size override */
842 /* switch between 2/4 bytes */
843 c->op_bytes = def_op_bytes ^ 6;
844 break;
845 case 0x67: /* address-size override */
846 if (mode == X86EMUL_MODE_PROT64)
847 /* switch between 4/8 bytes */
848 c->ad_bytes = def_ad_bytes ^ 12;
849 else
850 /* switch between 2/4 bytes */
851 c->ad_bytes = def_ad_bytes ^ 6;
852 break;
853 case 0x26: /* ES override */
854 case 0x2e: /* CS override */
855 case 0x36: /* SS override */
856 case 0x3e: /* DS override */
857 set_seg_override(c, (c->b >> 3) & 3);
858 break;
859 case 0x64: /* FS override */
860 case 0x65: /* GS override */
861 set_seg_override(c, c->b & 7);
862 break;
863 case 0x40 ... 0x4f: /* REX */
864 if (mode != X86EMUL_MODE_PROT64)
865 goto done_prefixes;
866 c->rex_prefix = c->b;
867 continue;
868 case 0xf0: /* LOCK */
869 c->lock_prefix = 1;
870 break;
871 case 0xf2: /* REPNE/REPNZ */
872 c->rep_prefix = REPNE_PREFIX;
873 break;
874 case 0xf3: /* REP/REPE/REPZ */
875 c->rep_prefix = REPE_PREFIX;
876 break;
877 default:
878 goto done_prefixes;
881 /* Any legacy prefix after a REX prefix nullifies its effect. */
883 c->rex_prefix = 0;
886 done_prefixes:
888 /* REX prefix. */
889 if (c->rex_prefix)
890 if (c->rex_prefix & 8)
891 c->op_bytes = 8; /* REX.W */
893 /* Opcode byte(s). */
894 c->d = opcode_table[c->b];
895 if (c->d == 0) {
896 /* Two-byte opcode? */
897 if (c->b == 0x0f) {
898 c->twobyte = 1;
899 c->b = insn_fetch(u8, 1, c->eip);
900 c->d = twobyte_table[c->b];
904 if (c->d & Group) {
905 group = c->d & GroupMask;
906 c->modrm = insn_fetch(u8, 1, c->eip);
907 --c->eip;
909 group = (group << 3) + ((c->modrm >> 3) & 7);
910 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
911 c->d = group2_table[group];
912 else
913 c->d = group_table[group];
916 /* Unrecognised? */
917 if (c->d == 0) {
918 DPRINTF("Cannot emulate %02x\n", c->b);
919 return -1;
922 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
923 c->op_bytes = 8;
925 /* ModRM and SIB bytes. */
926 if (c->d & ModRM)
927 rc = decode_modrm(ctxt, ops);
928 else if (c->d & MemAbs)
929 rc = decode_abs(ctxt, ops);
930 if (rc)
931 goto done;
933 if (!c->has_seg_override)
934 set_seg_override(c, VCPU_SREG_DS);
936 if (!(!c->twobyte && c->b == 0x8d))
937 c->modrm_ea += seg_override_base(ctxt, c);
939 if (c->ad_bytes != 8)
940 c->modrm_ea = (u32)c->modrm_ea;
942 * Decode and fetch the source operand: register, memory
943 * or immediate.
945 switch (c->d & SrcMask) {
946 case SrcNone:
947 break;
948 case SrcReg:
949 decode_register_operand(&c->src, c, 0);
950 break;
951 case SrcMem16:
952 c->src.bytes = 2;
953 goto srcmem_common;
954 case SrcMem32:
955 c->src.bytes = 4;
956 goto srcmem_common;
957 case SrcMem:
958 c->src.bytes = (c->d & ByteOp) ? 1 :
959 c->op_bytes;
960 /* Don't fetch the address for invlpg: it could be unmapped. */
961 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
962 break;
963 srcmem_common:
965 * For instructions with a ModR/M byte, switch to register
966 * access if Mod = 3.
968 if ((c->d & ModRM) && c->modrm_mod == 3) {
969 c->src.type = OP_REG;
970 c->src.val = c->modrm_val;
971 c->src.ptr = c->modrm_ptr;
972 break;
974 c->src.type = OP_MEM;
975 break;
976 case SrcImm:
977 c->src.type = OP_IMM;
978 c->src.ptr = (unsigned long *)c->eip;
979 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
980 if (c->src.bytes == 8)
981 c->src.bytes = 4;
982 /* NB. Immediates are sign-extended as necessary. */
983 switch (c->src.bytes) {
984 case 1:
985 c->src.val = insn_fetch(s8, 1, c->eip);
986 break;
987 case 2:
988 c->src.val = insn_fetch(s16, 2, c->eip);
989 break;
990 case 4:
991 c->src.val = insn_fetch(s32, 4, c->eip);
992 break;
994 break;
995 case SrcImmByte:
996 c->src.type = OP_IMM;
997 c->src.ptr = (unsigned long *)c->eip;
998 c->src.bytes = 1;
999 c->src.val = insn_fetch(s8, 1, c->eip);
1000 break;
1003 /* Decode and fetch the destination operand: register or memory. */
1004 switch (c->d & DstMask) {
1005 case ImplicitOps:
1006 /* Special instructions do their own operand decoding. */
1007 return 0;
1008 case DstReg:
1009 decode_register_operand(&c->dst, c,
1010 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1011 break;
1012 case DstMem:
1013 if ((c->d & ModRM) && c->modrm_mod == 3) {
1014 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1015 c->dst.type = OP_REG;
1016 c->dst.val = c->dst.orig_val = c->modrm_val;
1017 c->dst.ptr = c->modrm_ptr;
1018 break;
1020 c->dst.type = OP_MEM;
1021 break;
1022 case DstAcc:
1023 c->dst.type = OP_REG;
1024 c->dst.bytes = c->op_bytes;
1025 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1026 switch (c->op_bytes) {
1027 case 1:
1028 c->dst.val = *(u8 *)c->dst.ptr;
1029 break;
1030 case 2:
1031 c->dst.val = *(u16 *)c->dst.ptr;
1032 break;
1033 case 4:
1034 c->dst.val = *(u32 *)c->dst.ptr;
1035 break;
1037 c->dst.orig_val = c->dst.val;
1038 break;
1041 if (c->rip_relative)
1042 c->modrm_ea += c->eip;
1044 done:
1045 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1048 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1050 struct decode_cache *c = &ctxt->decode;
1052 c->dst.type = OP_MEM;
1053 c->dst.bytes = c->op_bytes;
1054 c->dst.val = c->src.val;
1055 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1056 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1057 c->regs[VCPU_REGS_RSP]);
1060 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1061 struct x86_emulate_ops *ops)
1063 struct decode_cache *c = &ctxt->decode;
1064 int rc;
1066 rc = ops->read_emulated(register_address(c, ss_base(ctxt),
1067 c->regs[VCPU_REGS_RSP]),
1068 &c->src.val, c->src.bytes, ctxt->vcpu);
1069 if (rc != 0)
1070 return rc;
1072 register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->src.bytes);
1073 return rc;
1076 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1077 struct x86_emulate_ops *ops)
1079 struct decode_cache *c = &ctxt->decode;
1080 int rc;
1082 c->src.bytes = c->dst.bytes;
1083 rc = emulate_pop(ctxt, ops);
1084 if (rc != 0)
1085 return rc;
1086 c->dst.val = c->src.val;
1087 return 0;
1090 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1092 struct decode_cache *c = &ctxt->decode;
1093 switch (c->modrm_reg) {
1094 case 0: /* rol */
1095 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1096 break;
1097 case 1: /* ror */
1098 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1099 break;
1100 case 2: /* rcl */
1101 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1102 break;
1103 case 3: /* rcr */
1104 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1105 break;
1106 case 4: /* sal/shl */
1107 case 6: /* sal/shl */
1108 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1109 break;
1110 case 5: /* shr */
1111 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1112 break;
1113 case 7: /* sar */
1114 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1115 break;
1119 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1120 struct x86_emulate_ops *ops)
1122 struct decode_cache *c = &ctxt->decode;
1123 int rc = 0;
1125 switch (c->modrm_reg) {
1126 case 0 ... 1: /* test */
1127 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1128 break;
1129 case 2: /* not */
1130 c->dst.val = ~c->dst.val;
1131 break;
1132 case 3: /* neg */
1133 emulate_1op("neg", c->dst, ctxt->eflags);
1134 break;
1135 default:
1136 DPRINTF("Cannot emulate %02x\n", c->b);
1137 rc = X86EMUL_UNHANDLEABLE;
1138 break;
1140 return rc;
1143 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1144 struct x86_emulate_ops *ops)
1146 struct decode_cache *c = &ctxt->decode;
1148 switch (c->modrm_reg) {
1149 case 0: /* inc */
1150 emulate_1op("inc", c->dst, ctxt->eflags);
1151 break;
1152 case 1: /* dec */
1153 emulate_1op("dec", c->dst, ctxt->eflags);
1154 break;
1155 case 2: /* call near abs */ {
1156 long int old_eip;
1157 old_eip = c->eip;
1158 c->eip = c->src.val;
1159 c->src.val = old_eip;
1160 emulate_push(ctxt);
1161 break;
1163 case 4: /* jmp abs */
1164 c->eip = c->src.val;
1165 break;
1166 case 6: /* push */
1167 emulate_push(ctxt);
1168 break;
1170 return 0;
1173 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1174 struct x86_emulate_ops *ops,
1175 unsigned long memop)
1177 struct decode_cache *c = &ctxt->decode;
1178 u64 old, new;
1179 int rc;
1181 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1182 if (rc != 0)
1183 return rc;
1185 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1186 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1188 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1189 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1190 ctxt->eflags &= ~EFLG_ZF;
1192 } else {
1193 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1194 (u32) c->regs[VCPU_REGS_RBX];
1196 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1197 if (rc != 0)
1198 return rc;
1199 ctxt->eflags |= EFLG_ZF;
1201 return 0;
1204 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1205 struct x86_emulate_ops *ops)
1207 int rc;
1208 struct decode_cache *c = &ctxt->decode;
1210 switch (c->dst.type) {
1211 case OP_REG:
1212 /* The 4-byte case *is* correct:
1213 * in 64-bit mode we zero-extend.
1215 switch (c->dst.bytes) {
1216 case 1:
1217 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1218 break;
1219 case 2:
1220 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1221 break;
1222 case 4:
1223 *c->dst.ptr = (u32)c->dst.val;
1224 break; /* 64b: zero-ext */
1225 case 8:
1226 *c->dst.ptr = c->dst.val;
1227 break;
1229 break;
1230 case OP_MEM:
1231 if (c->lock_prefix)
1232 rc = ops->cmpxchg_emulated(
1233 (unsigned long)c->dst.ptr,
1234 &c->dst.orig_val,
1235 &c->dst.val,
1236 c->dst.bytes,
1237 ctxt->vcpu);
1238 else
1239 rc = ops->write_emulated(
1240 (unsigned long)c->dst.ptr,
1241 &c->dst.val,
1242 c->dst.bytes,
1243 ctxt->vcpu);
1244 if (rc != 0)
1245 return rc;
1246 break;
1247 case OP_NONE:
1248 /* no writeback */
1249 break;
1250 default:
1251 break;
1253 return 0;
1257 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1259 unsigned long memop = 0;
1260 u64 msr_data;
1261 unsigned long saved_eip = 0;
1262 struct decode_cache *c = &ctxt->decode;
1263 unsigned int port;
1264 int io_dir_in;
1265 int rc = 0;
1267 /* Shadow copy of register state. Committed on successful emulation.
1268 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1269 * modify them.
1272 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1273 saved_eip = c->eip;
1275 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1276 memop = c->modrm_ea;
1278 if (c->rep_prefix && (c->d & String)) {
1279 /* All REP prefixes have the same first termination condition */
1280 if (c->regs[VCPU_REGS_RCX] == 0) {
1281 kvm_rip_write(ctxt->vcpu, c->eip);
1282 goto done;
1284 /* The second termination condition only applies for REPE
1285 * and REPNE. Test if the repeat string operation prefix is
1286 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1287 * corresponding termination condition according to:
1288 * - if REPE/REPZ and ZF = 0 then done
1289 * - if REPNE/REPNZ and ZF = 1 then done
1291 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1292 (c->b == 0xae) || (c->b == 0xaf)) {
1293 if ((c->rep_prefix == REPE_PREFIX) &&
1294 ((ctxt->eflags & EFLG_ZF) == 0)) {
1295 kvm_rip_write(ctxt->vcpu, c->eip);
1296 goto done;
1298 if ((c->rep_prefix == REPNE_PREFIX) &&
1299 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1300 kvm_rip_write(ctxt->vcpu, c->eip);
1301 goto done;
1304 c->regs[VCPU_REGS_RCX]--;
1305 c->eip = kvm_rip_read(ctxt->vcpu);
1308 if (c->src.type == OP_MEM) {
1309 c->src.ptr = (unsigned long *)memop;
1310 c->src.val = 0;
1311 rc = ops->read_emulated((unsigned long)c->src.ptr,
1312 &c->src.val,
1313 c->src.bytes,
1314 ctxt->vcpu);
1315 if (rc != 0)
1316 goto done;
1317 c->src.orig_val = c->src.val;
1320 if ((c->d & DstMask) == ImplicitOps)
1321 goto special_insn;
1324 if (c->dst.type == OP_MEM) {
1325 c->dst.ptr = (unsigned long *)memop;
1326 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1327 c->dst.val = 0;
1328 if (c->d & BitOp) {
1329 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1331 c->dst.ptr = (void *)c->dst.ptr +
1332 (c->src.val & mask) / 8;
1334 if (!(c->d & Mov) &&
1335 /* optimisation - avoid slow emulated read */
1336 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1337 &c->dst.val,
1338 c->dst.bytes, ctxt->vcpu)) != 0))
1339 goto done;
1341 c->dst.orig_val = c->dst.val;
1343 special_insn:
1345 if (c->twobyte)
1346 goto twobyte_insn;
1348 switch (c->b) {
1349 case 0x00 ... 0x05:
1350 add: /* add */
1351 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1352 break;
1353 case 0x08 ... 0x0d:
1354 or: /* or */
1355 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1356 break;
1357 case 0x10 ... 0x15:
1358 adc: /* adc */
1359 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1360 break;
1361 case 0x18 ... 0x1d:
1362 sbb: /* sbb */
1363 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1364 break;
1365 case 0x20 ... 0x25:
1366 and: /* and */
1367 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1368 break;
1369 case 0x28 ... 0x2d:
1370 sub: /* sub */
1371 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1372 break;
1373 case 0x30 ... 0x35:
1374 xor: /* xor */
1375 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1376 break;
1377 case 0x38 ... 0x3d:
1378 cmp: /* cmp */
1379 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1380 break;
1381 case 0x40 ... 0x47: /* inc r16/r32 */
1382 emulate_1op("inc", c->dst, ctxt->eflags);
1383 break;
1384 case 0x48 ... 0x4f: /* dec r16/r32 */
1385 emulate_1op("dec", c->dst, ctxt->eflags);
1386 break;
1387 case 0x50 ... 0x57: /* push reg */
1388 emulate_push(ctxt);
1389 break;
1390 case 0x58 ... 0x5f: /* pop reg */
1391 pop_instruction:
1392 c->src.bytes = c->op_bytes;
1393 rc = emulate_pop(ctxt, ops);
1394 if (rc != 0)
1395 goto done;
1396 c->dst.val = c->src.val;
1397 break;
1398 case 0x63: /* movsxd */
1399 if (ctxt->mode != X86EMUL_MODE_PROT64)
1400 goto cannot_emulate;
1401 c->dst.val = (s32) c->src.val;
1402 break;
1403 case 0x68: /* push imm */
1404 case 0x6a: /* push imm8 */
1405 emulate_push(ctxt);
1406 break;
1407 case 0x6c: /* insb */
1408 case 0x6d: /* insw/insd */
1409 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1411 (c->d & ByteOp) ? 1 : c->op_bytes,
1412 c->rep_prefix ?
1413 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1414 (ctxt->eflags & EFLG_DF),
1415 register_address(c, es_base(ctxt),
1416 c->regs[VCPU_REGS_RDI]),
1417 c->rep_prefix,
1418 c->regs[VCPU_REGS_RDX]) == 0) {
1419 c->eip = saved_eip;
1420 return -1;
1422 return 0;
1423 case 0x6e: /* outsb */
1424 case 0x6f: /* outsw/outsd */
1425 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1427 (c->d & ByteOp) ? 1 : c->op_bytes,
1428 c->rep_prefix ?
1429 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1430 (ctxt->eflags & EFLG_DF),
1431 register_address(c,
1432 seg_override_base(ctxt, c),
1433 c->regs[VCPU_REGS_RSI]),
1434 c->rep_prefix,
1435 c->regs[VCPU_REGS_RDX]) == 0) {
1436 c->eip = saved_eip;
1437 return -1;
1439 return 0;
1440 case 0x70 ... 0x7f: /* jcc (short) */ {
1441 int rel = insn_fetch(s8, 1, c->eip);
1443 if (test_cc(c->b, ctxt->eflags))
1444 jmp_rel(c, rel);
1445 break;
1447 case 0x80 ... 0x83: /* Grp1 */
1448 switch (c->modrm_reg) {
1449 case 0:
1450 goto add;
1451 case 1:
1452 goto or;
1453 case 2:
1454 goto adc;
1455 case 3:
1456 goto sbb;
1457 case 4:
1458 goto and;
1459 case 5:
1460 goto sub;
1461 case 6:
1462 goto xor;
1463 case 7:
1464 goto cmp;
1466 break;
1467 case 0x84 ... 0x85:
1468 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1469 break;
1470 case 0x86 ... 0x87: /* xchg */
1471 xchg:
1472 /* Write back the register source. */
1473 switch (c->dst.bytes) {
1474 case 1:
1475 *(u8 *) c->src.ptr = (u8) c->dst.val;
1476 break;
1477 case 2:
1478 *(u16 *) c->src.ptr = (u16) c->dst.val;
1479 break;
1480 case 4:
1481 *c->src.ptr = (u32) c->dst.val;
1482 break; /* 64b reg: zero-extend */
1483 case 8:
1484 *c->src.ptr = c->dst.val;
1485 break;
1488 * Write back the memory destination with implicit LOCK
1489 * prefix.
1491 c->dst.val = c->src.val;
1492 c->lock_prefix = 1;
1493 break;
1494 case 0x88 ... 0x8b: /* mov */
1495 goto mov;
1496 case 0x8c: { /* mov r/m, sreg */
1497 struct kvm_segment segreg;
1499 if (c->modrm_reg <= 5)
1500 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1501 else {
1502 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1503 c->modrm);
1504 goto cannot_emulate;
1506 c->dst.val = segreg.selector;
1507 break;
1509 case 0x8d: /* lea r16/r32, m */
1510 c->dst.val = c->modrm_ea;
1511 break;
1512 case 0x8e: { /* mov seg, r/m16 */
1513 uint16_t sel;
1514 int type_bits;
1515 int err;
1517 sel = c->src.val;
1518 if (c->modrm_reg <= 5) {
1519 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1520 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1521 type_bits, c->modrm_reg);
1522 } else {
1523 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1524 c->modrm);
1525 goto cannot_emulate;
1528 if (err < 0)
1529 goto cannot_emulate;
1531 c->dst.type = OP_NONE; /* Disable writeback. */
1532 break;
1534 case 0x8f: /* pop (sole member of Grp1a) */
1535 rc = emulate_grp1a(ctxt, ops);
1536 if (rc != 0)
1537 goto done;
1538 break;
1539 case 0x90: /* nop / xchg r8,rax */
1540 if (!(c->rex_prefix & 1)) { /* nop */
1541 c->dst.type = OP_NONE;
1542 break;
1544 case 0x91 ... 0x97: /* xchg reg,rax */
1545 c->src.type = c->dst.type = OP_REG;
1546 c->src.bytes = c->dst.bytes = c->op_bytes;
1547 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1548 c->src.val = *(c->src.ptr);
1549 goto xchg;
1550 case 0x9c: /* pushf */
1551 c->src.val = (unsigned long) ctxt->eflags;
1552 emulate_push(ctxt);
1553 break;
1554 case 0x9d: /* popf */
1555 c->dst.type = OP_REG;
1556 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1557 c->dst.bytes = c->op_bytes;
1558 goto pop_instruction;
1559 case 0xa0 ... 0xa1: /* mov */
1560 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1561 c->dst.val = c->src.val;
1562 break;
1563 case 0xa2 ... 0xa3: /* mov */
1564 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1565 break;
1566 case 0xa4 ... 0xa5: /* movs */
1567 c->dst.type = OP_MEM;
1568 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1569 c->dst.ptr = (unsigned long *)register_address(c,
1570 es_base(ctxt),
1571 c->regs[VCPU_REGS_RDI]);
1572 if ((rc = ops->read_emulated(register_address(c,
1573 seg_override_base(ctxt, c),
1574 c->regs[VCPU_REGS_RSI]),
1575 &c->dst.val,
1576 c->dst.bytes, ctxt->vcpu)) != 0)
1577 goto done;
1578 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1579 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1580 : c->dst.bytes);
1581 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1582 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1583 : c->dst.bytes);
1584 break;
1585 case 0xa6 ... 0xa7: /* cmps */
1586 c->src.type = OP_NONE; /* Disable writeback. */
1587 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1588 c->src.ptr = (unsigned long *)register_address(c,
1589 seg_override_base(ctxt, c),
1590 c->regs[VCPU_REGS_RSI]);
1591 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1592 &c->src.val,
1593 c->src.bytes,
1594 ctxt->vcpu)) != 0)
1595 goto done;
1597 c->dst.type = OP_NONE; /* Disable writeback. */
1598 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1599 c->dst.ptr = (unsigned long *)register_address(c,
1600 es_base(ctxt),
1601 c->regs[VCPU_REGS_RDI]);
1602 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1603 &c->dst.val,
1604 c->dst.bytes,
1605 ctxt->vcpu)) != 0)
1606 goto done;
1608 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1610 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1612 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1613 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1614 : c->src.bytes);
1615 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1616 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1617 : c->dst.bytes);
1619 break;
1620 case 0xaa ... 0xab: /* stos */
1621 c->dst.type = OP_MEM;
1622 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1623 c->dst.ptr = (unsigned long *)register_address(c,
1624 es_base(ctxt),
1625 c->regs[VCPU_REGS_RDI]);
1626 c->dst.val = c->regs[VCPU_REGS_RAX];
1627 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1628 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1629 : c->dst.bytes);
1630 break;
1631 case 0xac ... 0xad: /* lods */
1632 c->dst.type = OP_REG;
1633 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1634 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1635 if ((rc = ops->read_emulated(register_address(c,
1636 seg_override_base(ctxt, c),
1637 c->regs[VCPU_REGS_RSI]),
1638 &c->dst.val,
1639 c->dst.bytes,
1640 ctxt->vcpu)) != 0)
1641 goto done;
1642 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1643 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1644 : c->dst.bytes);
1645 break;
1646 case 0xae ... 0xaf: /* scas */
1647 DPRINTF("Urk! I don't handle SCAS.\n");
1648 goto cannot_emulate;
1649 case 0xb0 ... 0xbf: /* mov r, imm */
1650 goto mov;
1651 case 0xc0 ... 0xc1:
1652 emulate_grp2(ctxt);
1653 break;
1654 case 0xc3: /* ret */
1655 c->dst.type = OP_REG;
1656 c->dst.ptr = &c->eip;
1657 c->dst.bytes = c->op_bytes;
1658 goto pop_instruction;
1659 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1660 mov:
1661 c->dst.val = c->src.val;
1662 break;
1663 case 0xd0 ... 0xd1: /* Grp2 */
1664 c->src.val = 1;
1665 emulate_grp2(ctxt);
1666 break;
1667 case 0xd2 ... 0xd3: /* Grp2 */
1668 c->src.val = c->regs[VCPU_REGS_RCX];
1669 emulate_grp2(ctxt);
1670 break;
1671 case 0xe4: /* inb */
1672 case 0xe5: /* in */
1673 port = insn_fetch(u8, 1, c->eip);
1674 io_dir_in = 1;
1675 goto do_io;
1676 case 0xe6: /* outb */
1677 case 0xe7: /* out */
1678 port = insn_fetch(u8, 1, c->eip);
1679 io_dir_in = 0;
1680 goto do_io;
1681 case 0xe8: /* call (near) */ {
1682 long int rel;
1683 switch (c->op_bytes) {
1684 case 2:
1685 rel = insn_fetch(s16, 2, c->eip);
1686 break;
1687 case 4:
1688 rel = insn_fetch(s32, 4, c->eip);
1689 break;
1690 default:
1691 DPRINTF("Call: Invalid op_bytes\n");
1692 goto cannot_emulate;
1694 c->src.val = (unsigned long) c->eip;
1695 jmp_rel(c, rel);
1696 c->op_bytes = c->ad_bytes;
1697 emulate_push(ctxt);
1698 break;
1700 case 0xe9: /* jmp rel */
1701 goto jmp;
1702 case 0xea: /* jmp far */ {
1703 uint32_t eip;
1704 uint16_t sel;
1706 switch (c->op_bytes) {
1707 case 2:
1708 eip = insn_fetch(u16, 2, c->eip);
1709 break;
1710 case 4:
1711 eip = insn_fetch(u32, 4, c->eip);
1712 break;
1713 default:
1714 DPRINTF("jmp far: Invalid op_bytes\n");
1715 goto cannot_emulate;
1717 sel = insn_fetch(u16, 2, c->eip);
1718 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1719 DPRINTF("jmp far: Failed to load CS descriptor\n");
1720 goto cannot_emulate;
1723 c->eip = eip;
1724 break;
1726 case 0xeb:
1727 jmp: /* jmp rel short */
1728 jmp_rel(c, c->src.val);
1729 c->dst.type = OP_NONE; /* Disable writeback. */
1730 break;
1731 case 0xec: /* in al,dx */
1732 case 0xed: /* in (e/r)ax,dx */
1733 port = c->regs[VCPU_REGS_RDX];
1734 io_dir_in = 1;
1735 goto do_io;
1736 case 0xee: /* out al,dx */
1737 case 0xef: /* out (e/r)ax,dx */
1738 port = c->regs[VCPU_REGS_RDX];
1739 io_dir_in = 0;
1740 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1741 (c->d & ByteOp) ? 1 : c->op_bytes,
1742 port) != 0) {
1743 c->eip = saved_eip;
1744 goto cannot_emulate;
1746 break;
1747 case 0xf4: /* hlt */
1748 ctxt->vcpu->arch.halt_request = 1;
1749 break;
1750 case 0xf5: /* cmc */
1751 /* complement carry flag from eflags reg */
1752 ctxt->eflags ^= EFLG_CF;
1753 c->dst.type = OP_NONE; /* Disable writeback. */
1754 break;
1755 case 0xf6 ... 0xf7: /* Grp3 */
1756 rc = emulate_grp3(ctxt, ops);
1757 if (rc != 0)
1758 goto done;
1759 break;
1760 case 0xf8: /* clc */
1761 ctxt->eflags &= ~EFLG_CF;
1762 c->dst.type = OP_NONE; /* Disable writeback. */
1763 break;
1764 case 0xfa: /* cli */
1765 ctxt->eflags &= ~X86_EFLAGS_IF;
1766 c->dst.type = OP_NONE; /* Disable writeback. */
1767 break;
1768 case 0xfb: /* sti */
1769 ctxt->eflags |= X86_EFLAGS_IF;
1770 c->dst.type = OP_NONE; /* Disable writeback. */
1771 break;
1772 case 0xfc: /* cld */
1773 ctxt->eflags &= ~EFLG_DF;
1774 c->dst.type = OP_NONE; /* Disable writeback. */
1775 break;
1776 case 0xfd: /* std */
1777 ctxt->eflags |= EFLG_DF;
1778 c->dst.type = OP_NONE; /* Disable writeback. */
1779 break;
1780 case 0xfe ... 0xff: /* Grp4/Grp5 */
1781 rc = emulate_grp45(ctxt, ops);
1782 if (rc != 0)
1783 goto done;
1784 break;
1787 writeback:
1788 rc = writeback(ctxt, ops);
1789 if (rc != 0)
1790 goto done;
1792 /* Commit shadow register state. */
1793 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1794 kvm_rip_write(ctxt->vcpu, c->eip);
1796 done:
1797 if (rc == X86EMUL_UNHANDLEABLE) {
1798 c->eip = saved_eip;
1799 return -1;
1801 return 0;
1803 twobyte_insn:
1804 switch (c->b) {
1805 case 0x01: /* lgdt, lidt, lmsw */
1806 switch (c->modrm_reg) {
1807 u16 size;
1808 unsigned long address;
1810 case 0: /* vmcall */
1811 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1812 goto cannot_emulate;
1814 rc = kvm_fix_hypercall(ctxt->vcpu);
1815 if (rc)
1816 goto done;
1818 /* Let the processor re-execute the fixed hypercall */
1819 c->eip = kvm_rip_read(ctxt->vcpu);
1820 /* Disable writeback. */
1821 c->dst.type = OP_NONE;
1822 break;
1823 case 2: /* lgdt */
1824 rc = read_descriptor(ctxt, ops, c->src.ptr,
1825 &size, &address, c->op_bytes);
1826 if (rc)
1827 goto done;
1828 realmode_lgdt(ctxt->vcpu, size, address);
1829 /* Disable writeback. */
1830 c->dst.type = OP_NONE;
1831 break;
1832 case 3: /* lidt/vmmcall */
1833 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1834 rc = kvm_fix_hypercall(ctxt->vcpu);
1835 if (rc)
1836 goto done;
1837 kvm_emulate_hypercall(ctxt->vcpu);
1838 } else {
1839 rc = read_descriptor(ctxt, ops, c->src.ptr,
1840 &size, &address,
1841 c->op_bytes);
1842 if (rc)
1843 goto done;
1844 realmode_lidt(ctxt->vcpu, size, address);
1846 /* Disable writeback. */
1847 c->dst.type = OP_NONE;
1848 break;
1849 case 4: /* smsw */
1850 c->dst.bytes = 2;
1851 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1852 break;
1853 case 6: /* lmsw */
1854 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1855 &ctxt->eflags);
1856 c->dst.type = OP_NONE;
1857 break;
1858 case 7: /* invlpg*/
1859 emulate_invlpg(ctxt->vcpu, memop);
1860 /* Disable writeback. */
1861 c->dst.type = OP_NONE;
1862 break;
1863 default:
1864 goto cannot_emulate;
1866 break;
1867 case 0x06:
1868 emulate_clts(ctxt->vcpu);
1869 c->dst.type = OP_NONE;
1870 break;
1871 case 0x08: /* invd */
1872 case 0x09: /* wbinvd */
1873 case 0x0d: /* GrpP (prefetch) */
1874 case 0x18: /* Grp16 (prefetch/nop) */
1875 c->dst.type = OP_NONE;
1876 break;
1877 case 0x20: /* mov cr, reg */
1878 if (c->modrm_mod != 3)
1879 goto cannot_emulate;
1880 c->regs[c->modrm_rm] =
1881 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1882 c->dst.type = OP_NONE; /* no writeback */
1883 break;
1884 case 0x21: /* mov from dr to reg */
1885 if (c->modrm_mod != 3)
1886 goto cannot_emulate;
1887 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1888 if (rc)
1889 goto cannot_emulate;
1890 c->dst.type = OP_NONE; /* no writeback */
1891 break;
1892 case 0x22: /* mov reg, cr */
1893 if (c->modrm_mod != 3)
1894 goto cannot_emulate;
1895 realmode_set_cr(ctxt->vcpu,
1896 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1897 c->dst.type = OP_NONE;
1898 break;
1899 case 0x23: /* mov from reg to dr */
1900 if (c->modrm_mod != 3)
1901 goto cannot_emulate;
1902 rc = emulator_set_dr(ctxt, c->modrm_reg,
1903 c->regs[c->modrm_rm]);
1904 if (rc)
1905 goto cannot_emulate;
1906 c->dst.type = OP_NONE; /* no writeback */
1907 break;
1908 case 0x30:
1909 /* wrmsr */
1910 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1911 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1912 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1913 if (rc) {
1914 kvm_inject_gp(ctxt->vcpu, 0);
1915 c->eip = kvm_rip_read(ctxt->vcpu);
1917 rc = X86EMUL_CONTINUE;
1918 c->dst.type = OP_NONE;
1919 break;
1920 case 0x32:
1921 /* rdmsr */
1922 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1923 if (rc) {
1924 kvm_inject_gp(ctxt->vcpu, 0);
1925 c->eip = kvm_rip_read(ctxt->vcpu);
1926 } else {
1927 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1928 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1930 rc = X86EMUL_CONTINUE;
1931 c->dst.type = OP_NONE;
1932 break;
1933 case 0x40 ... 0x4f: /* cmov */
1934 c->dst.val = c->dst.orig_val = c->src.val;
1935 if (!test_cc(c->b, ctxt->eflags))
1936 c->dst.type = OP_NONE; /* no writeback */
1937 break;
1938 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1939 long int rel;
1941 switch (c->op_bytes) {
1942 case 2:
1943 rel = insn_fetch(s16, 2, c->eip);
1944 break;
1945 case 4:
1946 rel = insn_fetch(s32, 4, c->eip);
1947 break;
1948 case 8:
1949 rel = insn_fetch(s64, 8, c->eip);
1950 break;
1951 default:
1952 DPRINTF("jnz: Invalid op_bytes\n");
1953 goto cannot_emulate;
1955 if (test_cc(c->b, ctxt->eflags))
1956 jmp_rel(c, rel);
1957 c->dst.type = OP_NONE;
1958 break;
1960 case 0xa3:
1961 bt: /* bt */
1962 c->dst.type = OP_NONE;
1963 /* only subword offset */
1964 c->src.val &= (c->dst.bytes << 3) - 1;
1965 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1966 break;
1967 case 0xab:
1968 bts: /* bts */
1969 /* only subword offset */
1970 c->src.val &= (c->dst.bytes << 3) - 1;
1971 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1972 break;
1973 case 0xae: /* clflush */
1974 break;
1975 case 0xb0 ... 0xb1: /* cmpxchg */
1977 * Save real source value, then compare EAX against
1978 * destination.
1980 c->src.orig_val = c->src.val;
1981 c->src.val = c->regs[VCPU_REGS_RAX];
1982 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1983 if (ctxt->eflags & EFLG_ZF) {
1984 /* Success: write back to memory. */
1985 c->dst.val = c->src.orig_val;
1986 } else {
1987 /* Failure: write the value we saw to EAX. */
1988 c->dst.type = OP_REG;
1989 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1991 break;
1992 case 0xb3:
1993 btr: /* btr */
1994 /* only subword offset */
1995 c->src.val &= (c->dst.bytes << 3) - 1;
1996 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
1997 break;
1998 case 0xb6 ... 0xb7: /* movzx */
1999 c->dst.bytes = c->op_bytes;
2000 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2001 : (u16) c->src.val;
2002 break;
2003 case 0xba: /* Grp8 */
2004 switch (c->modrm_reg & 3) {
2005 case 0:
2006 goto bt;
2007 case 1:
2008 goto bts;
2009 case 2:
2010 goto btr;
2011 case 3:
2012 goto btc;
2014 break;
2015 case 0xbb:
2016 btc: /* btc */
2017 /* only subword offset */
2018 c->src.val &= (c->dst.bytes << 3) - 1;
2019 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2020 break;
2021 case 0xbe ... 0xbf: /* movsx */
2022 c->dst.bytes = c->op_bytes;
2023 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2024 (s16) c->src.val;
2025 break;
2026 case 0xc3: /* movnti */
2027 c->dst.bytes = c->op_bytes;
2028 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2029 (u64) c->src.val;
2030 break;
2031 case 0xc7: /* Grp9 (cmpxchg8b) */
2032 rc = emulate_grp9(ctxt, ops, memop);
2033 if (rc != 0)
2034 goto done;
2035 c->dst.type = OP_NONE;
2036 break;
2038 goto writeback;
2040 cannot_emulate:
2041 DPRINTF("Cannot emulate %02x\n", c->b);
2042 c->eip = saved_eip;
2043 return -1;