fpu init fix (Jocelyn Mayer)
[qemu/qemu_0_9_1_stable.git] / target-i386 / translate.c
blobad5acc9c48743479258e08775485a97cc6aa4cc8
1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26 #include <assert.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr;
34 static uint32_t *gen_opparam_ptr;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
42 #ifdef TARGET_X86_64
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49 #if 1
50 #define BUGGY_64(x) NULL
51 #endif
52 #else
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
55 #define CODE64(s) 0
56 #define REX_X(s) 0
57 #define REX_B(s) 0
58 #endif
60 #ifdef TARGET_X86_64
61 static int x86_64_hregs;
62 #endif
64 #ifdef USE_DIRECT_JUMP
65 #define TBPARAM(x)
66 #else
67 #define TBPARAM(x) (long)(x)
68 #endif
70 typedef struct DisasContext {
71 /* current insn context */
72 int override; /* -1 if no override */
73 int prefix;
74 int aflag, dflag;
75 target_ulong pc; /* pc = eip + cs_base */
76 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base; /* base of CS segment */
80 int pe; /* protected mode */
81 int code32; /* 32 bit code segment */
82 #ifdef TARGET_X86_64
83 int lma; /* long mode active */
84 int code64; /* 64 bit code segment */
85 int rex_x, rex_b;
86 #endif
87 int ss32; /* 32 bit stack segment */
88 int cc_op; /* current CC operation */
89 int addseg; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st; /* currently unused */
91 int vm86; /* vm86 mode */
92 int cpl;
93 int iopl;
94 int tf; /* TF cpu flag */
95 int singlestep_enabled; /* "hardware" single step enabled */
96 int jmp_opt; /* use direct block chaining for direct jumps */
97 int mem_index; /* select memory access functions */
98 int flags; /* all execution flags */
99 struct TranslationBlock *tb;
100 int popl_esp_hack; /* for correct popl with esp base handling */
101 int rip_offset; /* only used in x86_64, but left for simplicity */
102 int cpuid_features;
103 } DisasContext;
105 static void gen_eob(DisasContext *s);
106 static void gen_jmp(DisasContext *s, target_ulong eip);
107 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109 /* i386 arith/logic operations */
110 enum {
111 OP_ADDL,
112 OP_ORL,
113 OP_ADCL,
114 OP_SBBL,
115 OP_ANDL,
116 OP_SUBL,
117 OP_XORL,
118 OP_CMPL,
121 /* i386 shift ops */
122 enum {
123 OP_ROL,
124 OP_ROR,
125 OP_RCL,
126 OP_RCR,
127 OP_SHL,
128 OP_SHR,
129 OP_SHL1, /* undocumented */
130 OP_SAR = 7,
133 enum {
134 #define DEF(s, n, copy_size) INDEX_op_ ## s,
135 #include "opc.h"
136 #undef DEF
137 NB_OPS,
140 #include "gen-op.h"
142 /* operand size */
143 enum {
144 OT_BYTE = 0,
145 OT_WORD,
146 OT_LONG,
147 OT_QUAD,
150 enum {
151 /* I386 int registers */
152 OR_EAX, /* MUST be even numbered */
153 OR_ECX,
154 OR_EDX,
155 OR_EBX,
156 OR_ESP,
157 OR_EBP,
158 OR_ESI,
159 OR_EDI,
161 OR_TMP0 = 16, /* temporary operand register */
162 OR_TMP1,
163 OR_A0, /* temporary register used when doing address evaluation */
166 #ifdef TARGET_X86_64
168 #define NB_OP_SIZES 4
170 #define DEF_REGS(prefix, suffix) \
171 prefix ## EAX ## suffix,\
172 prefix ## ECX ## suffix,\
173 prefix ## EDX ## suffix,\
174 prefix ## EBX ## suffix,\
175 prefix ## ESP ## suffix,\
176 prefix ## EBP ## suffix,\
177 prefix ## ESI ## suffix,\
178 prefix ## EDI ## suffix,\
179 prefix ## R8 ## suffix,\
180 prefix ## R9 ## suffix,\
181 prefix ## R10 ## suffix,\
182 prefix ## R11 ## suffix,\
183 prefix ## R12 ## suffix,\
184 prefix ## R13 ## suffix,\
185 prefix ## R14 ## suffix,\
186 prefix ## R15 ## suffix,
188 #define DEF_BREGS(prefixb, prefixh, suffix) \
190 static void prefixb ## ESP ## suffix ## _wrapper(void) \
192 if (x86_64_hregs) \
193 prefixb ## ESP ## suffix (); \
194 else \
195 prefixh ## EAX ## suffix (); \
198 static void prefixb ## EBP ## suffix ## _wrapper(void) \
200 if (x86_64_hregs) \
201 prefixb ## EBP ## suffix (); \
202 else \
203 prefixh ## ECX ## suffix (); \
206 static void prefixb ## ESI ## suffix ## _wrapper(void) \
208 if (x86_64_hregs) \
209 prefixb ## ESI ## suffix (); \
210 else \
211 prefixh ## EDX ## suffix (); \
214 static void prefixb ## EDI ## suffix ## _wrapper(void) \
216 if (x86_64_hregs) \
217 prefixb ## EDI ## suffix (); \
218 else \
219 prefixh ## EBX ## suffix (); \
222 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
223 DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
224 DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
225 DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
227 #else /* !TARGET_X86_64 */
229 #define NB_OP_SIZES 3
231 #define DEF_REGS(prefix, suffix) \
232 prefix ## EAX ## suffix,\
233 prefix ## ECX ## suffix,\
234 prefix ## EDX ## suffix,\
235 prefix ## EBX ## suffix,\
236 prefix ## ESP ## suffix,\
237 prefix ## EBP ## suffix,\
238 prefix ## ESI ## suffix,\
239 prefix ## EDI ## suffix,
241 #endif /* !TARGET_X86_64 */
243 static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
244 [OT_BYTE] = {
245 gen_op_movb_EAX_T0,
246 gen_op_movb_ECX_T0,
247 gen_op_movb_EDX_T0,
248 gen_op_movb_EBX_T0,
249 #ifdef TARGET_X86_64
250 gen_op_movb_ESP_T0_wrapper,
251 gen_op_movb_EBP_T0_wrapper,
252 gen_op_movb_ESI_T0_wrapper,
253 gen_op_movb_EDI_T0_wrapper,
254 gen_op_movb_R8_T0,
255 gen_op_movb_R9_T0,
256 gen_op_movb_R10_T0,
257 gen_op_movb_R11_T0,
258 gen_op_movb_R12_T0,
259 gen_op_movb_R13_T0,
260 gen_op_movb_R14_T0,
261 gen_op_movb_R15_T0,
262 #else
263 gen_op_movh_EAX_T0,
264 gen_op_movh_ECX_T0,
265 gen_op_movh_EDX_T0,
266 gen_op_movh_EBX_T0,
267 #endif
269 [OT_WORD] = {
270 DEF_REGS(gen_op_movw_, _T0)
272 [OT_LONG] = {
273 DEF_REGS(gen_op_movl_, _T0)
275 #ifdef TARGET_X86_64
276 [OT_QUAD] = {
277 DEF_REGS(gen_op_movq_, _T0)
279 #endif
282 static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
283 [OT_BYTE] = {
284 gen_op_movb_EAX_T1,
285 gen_op_movb_ECX_T1,
286 gen_op_movb_EDX_T1,
287 gen_op_movb_EBX_T1,
288 #ifdef TARGET_X86_64
289 gen_op_movb_ESP_T1_wrapper,
290 gen_op_movb_EBP_T1_wrapper,
291 gen_op_movb_ESI_T1_wrapper,
292 gen_op_movb_EDI_T1_wrapper,
293 gen_op_movb_R8_T1,
294 gen_op_movb_R9_T1,
295 gen_op_movb_R10_T1,
296 gen_op_movb_R11_T1,
297 gen_op_movb_R12_T1,
298 gen_op_movb_R13_T1,
299 gen_op_movb_R14_T1,
300 gen_op_movb_R15_T1,
301 #else
302 gen_op_movh_EAX_T1,
303 gen_op_movh_ECX_T1,
304 gen_op_movh_EDX_T1,
305 gen_op_movh_EBX_T1,
306 #endif
308 [OT_WORD] = {
309 DEF_REGS(gen_op_movw_, _T1)
311 [OT_LONG] = {
312 DEF_REGS(gen_op_movl_, _T1)
314 #ifdef TARGET_X86_64
315 [OT_QUAD] = {
316 DEF_REGS(gen_op_movq_, _T1)
318 #endif
321 static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
322 [0] = {
323 DEF_REGS(gen_op_movw_, _A0)
325 [1] = {
326 DEF_REGS(gen_op_movl_, _A0)
328 #ifdef TARGET_X86_64
329 [2] = {
330 DEF_REGS(gen_op_movq_, _A0)
332 #endif
335 static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
337 [OT_BYTE] = {
339 gen_op_movl_T0_EAX,
340 gen_op_movl_T0_ECX,
341 gen_op_movl_T0_EDX,
342 gen_op_movl_T0_EBX,
343 #ifdef TARGET_X86_64
344 gen_op_movl_T0_ESP_wrapper,
345 gen_op_movl_T0_EBP_wrapper,
346 gen_op_movl_T0_ESI_wrapper,
347 gen_op_movl_T0_EDI_wrapper,
348 gen_op_movl_T0_R8,
349 gen_op_movl_T0_R9,
350 gen_op_movl_T0_R10,
351 gen_op_movl_T0_R11,
352 gen_op_movl_T0_R12,
353 gen_op_movl_T0_R13,
354 gen_op_movl_T0_R14,
355 gen_op_movl_T0_R15,
356 #else
357 gen_op_movh_T0_EAX,
358 gen_op_movh_T0_ECX,
359 gen_op_movh_T0_EDX,
360 gen_op_movh_T0_EBX,
361 #endif
364 gen_op_movl_T1_EAX,
365 gen_op_movl_T1_ECX,
366 gen_op_movl_T1_EDX,
367 gen_op_movl_T1_EBX,
368 #ifdef TARGET_X86_64
369 gen_op_movl_T1_ESP_wrapper,
370 gen_op_movl_T1_EBP_wrapper,
371 gen_op_movl_T1_ESI_wrapper,
372 gen_op_movl_T1_EDI_wrapper,
373 gen_op_movl_T1_R8,
374 gen_op_movl_T1_R9,
375 gen_op_movl_T1_R10,
376 gen_op_movl_T1_R11,
377 gen_op_movl_T1_R12,
378 gen_op_movl_T1_R13,
379 gen_op_movl_T1_R14,
380 gen_op_movl_T1_R15,
381 #else
382 gen_op_movh_T1_EAX,
383 gen_op_movh_T1_ECX,
384 gen_op_movh_T1_EDX,
385 gen_op_movh_T1_EBX,
386 #endif
389 [OT_WORD] = {
391 DEF_REGS(gen_op_movl_T0_, )
394 DEF_REGS(gen_op_movl_T1_, )
397 [OT_LONG] = {
399 DEF_REGS(gen_op_movl_T0_, )
402 DEF_REGS(gen_op_movl_T1_, )
405 #ifdef TARGET_X86_64
406 [OT_QUAD] = {
408 DEF_REGS(gen_op_movl_T0_, )
411 DEF_REGS(gen_op_movl_T1_, )
414 #endif
417 static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
418 DEF_REGS(gen_op_movl_A0_, )
421 static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
422 [0] = {
423 DEF_REGS(gen_op_addl_A0_, )
425 [1] = {
426 DEF_REGS(gen_op_addl_A0_, _s1)
428 [2] = {
429 DEF_REGS(gen_op_addl_A0_, _s2)
431 [3] = {
432 DEF_REGS(gen_op_addl_A0_, _s3)
436 #ifdef TARGET_X86_64
437 static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
438 DEF_REGS(gen_op_movq_A0_, )
441 static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
442 [0] = {
443 DEF_REGS(gen_op_addq_A0_, )
445 [1] = {
446 DEF_REGS(gen_op_addq_A0_, _s1)
448 [2] = {
449 DEF_REGS(gen_op_addq_A0_, _s2)
451 [3] = {
452 DEF_REGS(gen_op_addq_A0_, _s3)
455 #endif
457 static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
458 [0] = {
459 DEF_REGS(gen_op_cmovw_, _T1_T0)
461 [1] = {
462 DEF_REGS(gen_op_cmovl_, _T1_T0)
464 #ifdef TARGET_X86_64
465 [2] = {
466 DEF_REGS(gen_op_cmovq_, _T1_T0)
468 #endif
471 static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
472 NULL,
473 gen_op_orl_T0_T1,
474 NULL,
475 NULL,
476 gen_op_andl_T0_T1,
477 NULL,
478 gen_op_xorl_T0_T1,
479 NULL,
482 #define DEF_ARITHC(SUFFIX)\
484 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
488 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
492 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
496 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
500 static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
501 DEF_ARITHC( )
504 static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
505 DEF_ARITHC(_raw)
506 #ifndef CONFIG_USER_ONLY
507 DEF_ARITHC(_kernel)
508 DEF_ARITHC(_user)
509 #endif
512 static const int cc_op_arithb[8] = {
513 CC_OP_ADDB,
514 CC_OP_LOGICB,
515 CC_OP_ADDB,
516 CC_OP_SUBB,
517 CC_OP_LOGICB,
518 CC_OP_SUBB,
519 CC_OP_LOGICB,
520 CC_OP_SUBB,
523 #define DEF_CMPXCHG(SUFFIX)\
524 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529 static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
530 DEF_CMPXCHG( )
533 static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
534 DEF_CMPXCHG(_raw)
535 #ifndef CONFIG_USER_ONLY
536 DEF_CMPXCHG(_kernel)
537 DEF_CMPXCHG(_user)
538 #endif
541 #define DEF_SHIFT(SUFFIX)\
543 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
553 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
563 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
573 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
583 static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
584 DEF_SHIFT( )
587 static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
588 DEF_SHIFT(_raw)
589 #ifndef CONFIG_USER_ONLY
590 DEF_SHIFT(_kernel)
591 DEF_SHIFT(_user)
592 #endif
595 #define DEF_SHIFTD(SUFFIX, op)\
597 NULL,\
598 NULL,\
601 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
605 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
609 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
613 static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
614 DEF_SHIFTD(, im)
617 static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
618 DEF_SHIFTD(, ECX)
621 static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
622 DEF_SHIFTD(_raw, im)
623 #ifndef CONFIG_USER_ONLY
624 DEF_SHIFTD(_kernel, im)
625 DEF_SHIFTD(_user, im)
626 #endif
629 static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
630 DEF_SHIFTD(_raw, ECX)
631 #ifndef CONFIG_USER_ONLY
632 DEF_SHIFTD(_kernel, ECX)
633 DEF_SHIFTD(_user, ECX)
634 #endif
637 static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
638 [0] = {
639 gen_op_btw_T0_T1_cc,
640 gen_op_btsw_T0_T1_cc,
641 gen_op_btrw_T0_T1_cc,
642 gen_op_btcw_T0_T1_cc,
644 [1] = {
645 gen_op_btl_T0_T1_cc,
646 gen_op_btsl_T0_T1_cc,
647 gen_op_btrl_T0_T1_cc,
648 gen_op_btcl_T0_T1_cc,
650 #ifdef TARGET_X86_64
651 [2] = {
652 gen_op_btq_T0_T1_cc,
653 gen_op_btsq_T0_T1_cc,
654 gen_op_btrq_T0_T1_cc,
655 gen_op_btcq_T0_T1_cc,
657 #endif
660 static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
661 gen_op_add_bitw_A0_T1,
662 gen_op_add_bitl_A0_T1,
663 X86_64_ONLY(gen_op_add_bitq_A0_T1),
666 static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
667 [0] = {
668 gen_op_bsfw_T0_cc,
669 gen_op_bsrw_T0_cc,
671 [1] = {
672 gen_op_bsfl_T0_cc,
673 gen_op_bsrl_T0_cc,
675 #ifdef TARGET_X86_64
676 [2] = {
677 gen_op_bsfq_T0_cc,
678 gen_op_bsrq_T0_cc,
680 #endif
683 static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
684 gen_op_ldsb_raw_T0_A0,
685 gen_op_ldsw_raw_T0_A0,
686 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
687 NULL,
688 #ifndef CONFIG_USER_ONLY
689 gen_op_ldsb_kernel_T0_A0,
690 gen_op_ldsw_kernel_T0_A0,
691 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
692 NULL,
694 gen_op_ldsb_user_T0_A0,
695 gen_op_ldsw_user_T0_A0,
696 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
697 NULL,
698 #endif
701 static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
702 gen_op_ldub_raw_T0_A0,
703 gen_op_lduw_raw_T0_A0,
704 NULL,
705 NULL,
707 #ifndef CONFIG_USER_ONLY
708 gen_op_ldub_kernel_T0_A0,
709 gen_op_lduw_kernel_T0_A0,
710 NULL,
711 NULL,
713 gen_op_ldub_user_T0_A0,
714 gen_op_lduw_user_T0_A0,
715 NULL,
716 NULL,
717 #endif
720 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
721 static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
722 gen_op_ldub_raw_T0_A0,
723 gen_op_lduw_raw_T0_A0,
724 gen_op_ldl_raw_T0_A0,
725 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
727 #ifndef CONFIG_USER_ONLY
728 gen_op_ldub_kernel_T0_A0,
729 gen_op_lduw_kernel_T0_A0,
730 gen_op_ldl_kernel_T0_A0,
731 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
733 gen_op_ldub_user_T0_A0,
734 gen_op_lduw_user_T0_A0,
735 gen_op_ldl_user_T0_A0,
736 X86_64_ONLY(gen_op_ldq_user_T0_A0),
737 #endif
740 static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
741 gen_op_ldub_raw_T1_A0,
742 gen_op_lduw_raw_T1_A0,
743 gen_op_ldl_raw_T1_A0,
744 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
746 #ifndef CONFIG_USER_ONLY
747 gen_op_ldub_kernel_T1_A0,
748 gen_op_lduw_kernel_T1_A0,
749 gen_op_ldl_kernel_T1_A0,
750 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
752 gen_op_ldub_user_T1_A0,
753 gen_op_lduw_user_T1_A0,
754 gen_op_ldl_user_T1_A0,
755 X86_64_ONLY(gen_op_ldq_user_T1_A0),
756 #endif
759 static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
760 gen_op_stb_raw_T0_A0,
761 gen_op_stw_raw_T0_A0,
762 gen_op_stl_raw_T0_A0,
763 X86_64_ONLY(gen_op_stq_raw_T0_A0),
765 #ifndef CONFIG_USER_ONLY
766 gen_op_stb_kernel_T0_A0,
767 gen_op_stw_kernel_T0_A0,
768 gen_op_stl_kernel_T0_A0,
769 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
771 gen_op_stb_user_T0_A0,
772 gen_op_stw_user_T0_A0,
773 gen_op_stl_user_T0_A0,
774 X86_64_ONLY(gen_op_stq_user_T0_A0),
775 #endif
778 static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
779 NULL,
780 gen_op_stw_raw_T1_A0,
781 gen_op_stl_raw_T1_A0,
782 X86_64_ONLY(gen_op_stq_raw_T1_A0),
784 #ifndef CONFIG_USER_ONLY
785 NULL,
786 gen_op_stw_kernel_T1_A0,
787 gen_op_stl_kernel_T1_A0,
788 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
790 NULL,
791 gen_op_stw_user_T1_A0,
792 gen_op_stl_user_T1_A0,
793 X86_64_ONLY(gen_op_stq_user_T1_A0),
794 #endif
797 static inline void gen_jmp_im(target_ulong pc)
799 #ifdef TARGET_X86_64
800 if (pc == (uint32_t)pc) {
801 gen_op_movl_eip_im(pc);
802 } else if (pc == (int32_t)pc) {
803 gen_op_movq_eip_im(pc);
804 } else {
805 gen_op_movq_eip_im64(pc >> 32, pc);
807 #else
808 gen_op_movl_eip_im(pc);
809 #endif
812 static inline void gen_string_movl_A0_ESI(DisasContext *s)
814 int override;
816 override = s->override;
817 #ifdef TARGET_X86_64
818 if (s->aflag == 2) {
819 if (override >= 0) {
820 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
821 gen_op_addq_A0_reg_sN[0][R_ESI]();
822 } else {
823 gen_op_movq_A0_reg[R_ESI]();
825 } else
826 #endif
827 if (s->aflag) {
828 /* 32 bit address */
829 if (s->addseg && override < 0)
830 override = R_DS;
831 if (override >= 0) {
832 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
833 gen_op_addl_A0_reg_sN[0][R_ESI]();
834 } else {
835 gen_op_movl_A0_reg[R_ESI]();
837 } else {
838 /* 16 address, always override */
839 if (override < 0)
840 override = R_DS;
841 gen_op_movl_A0_reg[R_ESI]();
842 gen_op_andl_A0_ffff();
843 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
847 static inline void gen_string_movl_A0_EDI(DisasContext *s)
849 #ifdef TARGET_X86_64
850 if (s->aflag == 2) {
851 gen_op_movq_A0_reg[R_EDI]();
852 } else
853 #endif
854 if (s->aflag) {
855 if (s->addseg) {
856 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
857 gen_op_addl_A0_reg_sN[0][R_EDI]();
858 } else {
859 gen_op_movl_A0_reg[R_EDI]();
861 } else {
862 gen_op_movl_A0_reg[R_EDI]();
863 gen_op_andl_A0_ffff();
864 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
868 static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
869 gen_op_movl_T0_Dshiftb,
870 gen_op_movl_T0_Dshiftw,
871 gen_op_movl_T0_Dshiftl,
872 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
875 static GenOpFunc1 *gen_op_jnz_ecx[3] = {
876 gen_op_jnz_ecxw,
877 gen_op_jnz_ecxl,
878 X86_64_ONLY(gen_op_jnz_ecxq),
881 static GenOpFunc1 *gen_op_jz_ecx[3] = {
882 gen_op_jz_ecxw,
883 gen_op_jz_ecxl,
884 X86_64_ONLY(gen_op_jz_ecxq),
887 static GenOpFunc *gen_op_dec_ECX[3] = {
888 gen_op_decw_ECX,
889 gen_op_decl_ECX,
890 X86_64_ONLY(gen_op_decq_ECX),
893 static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
895 gen_op_jnz_subb,
896 gen_op_jnz_subw,
897 gen_op_jnz_subl,
898 X86_64_ONLY(gen_op_jnz_subq),
901 gen_op_jz_subb,
902 gen_op_jz_subw,
903 gen_op_jz_subl,
904 X86_64_ONLY(gen_op_jz_subq),
908 static GenOpFunc *gen_op_in_DX_T0[3] = {
909 gen_op_inb_DX_T0,
910 gen_op_inw_DX_T0,
911 gen_op_inl_DX_T0,
914 static GenOpFunc *gen_op_out_DX_T0[3] = {
915 gen_op_outb_DX_T0,
916 gen_op_outw_DX_T0,
917 gen_op_outl_DX_T0,
920 static GenOpFunc *gen_op_in[3] = {
921 gen_op_inb_T0_T1,
922 gen_op_inw_T0_T1,
923 gen_op_inl_T0_T1,
926 static GenOpFunc *gen_op_out[3] = {
927 gen_op_outb_T0_T1,
928 gen_op_outw_T0_T1,
929 gen_op_outl_T0_T1,
932 static GenOpFunc *gen_check_io_T0[3] = {
933 gen_op_check_iob_T0,
934 gen_op_check_iow_T0,
935 gen_op_check_iol_T0,
938 static GenOpFunc *gen_check_io_DX[3] = {
939 gen_op_check_iob_DX,
940 gen_op_check_iow_DX,
941 gen_op_check_iol_DX,
944 static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
946 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
947 if (s->cc_op != CC_OP_DYNAMIC)
948 gen_op_set_cc_op(s->cc_op);
949 gen_jmp_im(cur_eip);
950 if (use_dx)
951 gen_check_io_DX[ot]();
952 else
953 gen_check_io_T0[ot]();
957 static inline void gen_movs(DisasContext *s, int ot)
959 gen_string_movl_A0_ESI(s);
960 gen_op_ld_T0_A0[ot + s->mem_index]();
961 gen_string_movl_A0_EDI(s);
962 gen_op_st_T0_A0[ot + s->mem_index]();
963 gen_op_movl_T0_Dshift[ot]();
964 #ifdef TARGET_X86_64
965 if (s->aflag == 2) {
966 gen_op_addq_ESI_T0();
967 gen_op_addq_EDI_T0();
968 } else
969 #endif
970 if (s->aflag) {
971 gen_op_addl_ESI_T0();
972 gen_op_addl_EDI_T0();
973 } else {
974 gen_op_addw_ESI_T0();
975 gen_op_addw_EDI_T0();
979 static inline void gen_update_cc_op(DisasContext *s)
981 if (s->cc_op != CC_OP_DYNAMIC) {
982 gen_op_set_cc_op(s->cc_op);
983 s->cc_op = CC_OP_DYNAMIC;
987 /* XXX: does not work with gdbstub "ice" single step - not a
988 serious problem */
989 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991 int l1, l2;
993 l1 = gen_new_label();
994 l2 = gen_new_label();
995 gen_op_jnz_ecx[s->aflag](l1);
996 gen_set_label(l2);
997 gen_jmp_tb(s, next_eip, 1);
998 gen_set_label(l1);
999 return l2;
1002 static inline void gen_stos(DisasContext *s, int ot)
1004 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1005 gen_string_movl_A0_EDI(s);
1006 gen_op_st_T0_A0[ot + s->mem_index]();
1007 gen_op_movl_T0_Dshift[ot]();
1008 #ifdef TARGET_X86_64
1009 if (s->aflag == 2) {
1010 gen_op_addq_EDI_T0();
1011 } else
1012 #endif
1013 if (s->aflag) {
1014 gen_op_addl_EDI_T0();
1015 } else {
1016 gen_op_addw_EDI_T0();
1020 static inline void gen_lods(DisasContext *s, int ot)
1022 gen_string_movl_A0_ESI(s);
1023 gen_op_ld_T0_A0[ot + s->mem_index]();
1024 gen_op_mov_reg_T0[ot][R_EAX]();
1025 gen_op_movl_T0_Dshift[ot]();
1026 #ifdef TARGET_X86_64
1027 if (s->aflag == 2) {
1028 gen_op_addq_ESI_T0();
1029 } else
1030 #endif
1031 if (s->aflag) {
1032 gen_op_addl_ESI_T0();
1033 } else {
1034 gen_op_addw_ESI_T0();
1038 static inline void gen_scas(DisasContext *s, int ot)
1040 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1041 gen_string_movl_A0_EDI(s);
1042 gen_op_ld_T1_A0[ot + s->mem_index]();
1043 gen_op_cmpl_T0_T1_cc();
1044 gen_op_movl_T0_Dshift[ot]();
1045 #ifdef TARGET_X86_64
1046 if (s->aflag == 2) {
1047 gen_op_addq_EDI_T0();
1048 } else
1049 #endif
1050 if (s->aflag) {
1051 gen_op_addl_EDI_T0();
1052 } else {
1053 gen_op_addw_EDI_T0();
1057 static inline void gen_cmps(DisasContext *s, int ot)
1059 gen_string_movl_A0_ESI(s);
1060 gen_op_ld_T0_A0[ot + s->mem_index]();
1061 gen_string_movl_A0_EDI(s);
1062 gen_op_ld_T1_A0[ot + s->mem_index]();
1063 gen_op_cmpl_T0_T1_cc();
1064 gen_op_movl_T0_Dshift[ot]();
1065 #ifdef TARGET_X86_64
1066 if (s->aflag == 2) {
1067 gen_op_addq_ESI_T0();
1068 gen_op_addq_EDI_T0();
1069 } else
1070 #endif
1071 if (s->aflag) {
1072 gen_op_addl_ESI_T0();
1073 gen_op_addl_EDI_T0();
1074 } else {
1075 gen_op_addw_ESI_T0();
1076 gen_op_addw_EDI_T0();
1080 static inline void gen_ins(DisasContext *s, int ot)
1082 gen_string_movl_A0_EDI(s);
1083 gen_op_movl_T0_0();
1084 gen_op_st_T0_A0[ot + s->mem_index]();
1085 gen_op_in_DX_T0[ot]();
1086 gen_op_st_T0_A0[ot + s->mem_index]();
1087 gen_op_movl_T0_Dshift[ot]();
1088 #ifdef TARGET_X86_64
1089 if (s->aflag == 2) {
1090 gen_op_addq_EDI_T0();
1091 } else
1092 #endif
1093 if (s->aflag) {
1094 gen_op_addl_EDI_T0();
1095 } else {
1096 gen_op_addw_EDI_T0();
1100 static inline void gen_outs(DisasContext *s, int ot)
1102 gen_string_movl_A0_ESI(s);
1103 gen_op_ld_T0_A0[ot + s->mem_index]();
1104 gen_op_out_DX_T0[ot]();
1105 gen_op_movl_T0_Dshift[ot]();
1106 #ifdef TARGET_X86_64
1107 if (s->aflag == 2) {
1108 gen_op_addq_ESI_T0();
1109 } else
1110 #endif
1111 if (s->aflag) {
1112 gen_op_addl_ESI_T0();
1113 } else {
1114 gen_op_addw_ESI_T0();
1118 /* same method as Valgrind : we generate jumps to current or next
1119 instruction */
1120 #define GEN_REPZ(op) \
1121 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1122 target_ulong cur_eip, target_ulong next_eip) \
1124 int l2;\
1125 gen_update_cc_op(s); \
1126 l2 = gen_jz_ecx_string(s, next_eip); \
1127 gen_ ## op(s, ot); \
1128 gen_op_dec_ECX[s->aflag](); \
1129 /* a loop would cause two single step exceptions if ECX = 1 \
1130 before rep string_insn */ \
1131 if (!s->jmp_opt) \
1132 gen_op_jz_ecx[s->aflag](l2); \
1133 gen_jmp(s, cur_eip); \
1136 #define GEN_REPZ2(op) \
1137 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1138 target_ulong cur_eip, \
1139 target_ulong next_eip, \
1140 int nz) \
1142 int l2;\
1143 gen_update_cc_op(s); \
1144 l2 = gen_jz_ecx_string(s, next_eip); \
1145 gen_ ## op(s, ot); \
1146 gen_op_dec_ECX[s->aflag](); \
1147 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1148 gen_op_string_jnz_sub[nz][ot](l2);\
1149 if (!s->jmp_opt) \
1150 gen_op_jz_ecx[s->aflag](l2); \
1151 gen_jmp(s, cur_eip); \
1154 GEN_REPZ(movs)
1155 GEN_REPZ(stos)
1156 GEN_REPZ(lods)
1157 GEN_REPZ(ins)
1158 GEN_REPZ(outs)
1159 GEN_REPZ2(scas)
1160 GEN_REPZ2(cmps)
1162 enum {
1163 JCC_O,
1164 JCC_B,
1165 JCC_Z,
1166 JCC_BE,
1167 JCC_S,
1168 JCC_P,
1169 JCC_L,
1170 JCC_LE,
1173 static GenOpFunc1 *gen_jcc_sub[4][8] = {
1174 [OT_BYTE] = {
1175 NULL,
1176 gen_op_jb_subb,
1177 gen_op_jz_subb,
1178 gen_op_jbe_subb,
1179 gen_op_js_subb,
1180 NULL,
1181 gen_op_jl_subb,
1182 gen_op_jle_subb,
1184 [OT_WORD] = {
1185 NULL,
1186 gen_op_jb_subw,
1187 gen_op_jz_subw,
1188 gen_op_jbe_subw,
1189 gen_op_js_subw,
1190 NULL,
1191 gen_op_jl_subw,
1192 gen_op_jle_subw,
1194 [OT_LONG] = {
1195 NULL,
1196 gen_op_jb_subl,
1197 gen_op_jz_subl,
1198 gen_op_jbe_subl,
1199 gen_op_js_subl,
1200 NULL,
1201 gen_op_jl_subl,
1202 gen_op_jle_subl,
1204 #ifdef TARGET_X86_64
1205 [OT_QUAD] = {
1206 NULL,
1207 BUGGY_64(gen_op_jb_subq),
1208 gen_op_jz_subq,
1209 BUGGY_64(gen_op_jbe_subq),
1210 gen_op_js_subq,
1211 NULL,
1212 BUGGY_64(gen_op_jl_subq),
1213 BUGGY_64(gen_op_jle_subq),
1215 #endif
1217 static GenOpFunc1 *gen_op_loop[3][4] = {
1218 [0] = {
1219 gen_op_loopnzw,
1220 gen_op_loopzw,
1221 gen_op_jnz_ecxw,
1223 [1] = {
1224 gen_op_loopnzl,
1225 gen_op_loopzl,
1226 gen_op_jnz_ecxl,
1228 #ifdef TARGET_X86_64
1229 [2] = {
1230 gen_op_loopnzq,
1231 gen_op_loopzq,
1232 gen_op_jnz_ecxq,
1234 #endif
1237 static GenOpFunc *gen_setcc_slow[8] = {
1238 gen_op_seto_T0_cc,
1239 gen_op_setb_T0_cc,
1240 gen_op_setz_T0_cc,
1241 gen_op_setbe_T0_cc,
1242 gen_op_sets_T0_cc,
1243 gen_op_setp_T0_cc,
1244 gen_op_setl_T0_cc,
1245 gen_op_setle_T0_cc,
1248 static GenOpFunc *gen_setcc_sub[4][8] = {
1249 [OT_BYTE] = {
1250 NULL,
1251 gen_op_setb_T0_subb,
1252 gen_op_setz_T0_subb,
1253 gen_op_setbe_T0_subb,
1254 gen_op_sets_T0_subb,
1255 NULL,
1256 gen_op_setl_T0_subb,
1257 gen_op_setle_T0_subb,
1259 [OT_WORD] = {
1260 NULL,
1261 gen_op_setb_T0_subw,
1262 gen_op_setz_T0_subw,
1263 gen_op_setbe_T0_subw,
1264 gen_op_sets_T0_subw,
1265 NULL,
1266 gen_op_setl_T0_subw,
1267 gen_op_setle_T0_subw,
1269 [OT_LONG] = {
1270 NULL,
1271 gen_op_setb_T0_subl,
1272 gen_op_setz_T0_subl,
1273 gen_op_setbe_T0_subl,
1274 gen_op_sets_T0_subl,
1275 NULL,
1276 gen_op_setl_T0_subl,
1277 gen_op_setle_T0_subl,
1279 #ifdef TARGET_X86_64
1280 [OT_QUAD] = {
1281 NULL,
1282 gen_op_setb_T0_subq,
1283 gen_op_setz_T0_subq,
1284 gen_op_setbe_T0_subq,
1285 gen_op_sets_T0_subq,
1286 NULL,
1287 gen_op_setl_T0_subq,
1288 gen_op_setle_T0_subq,
1290 #endif
1293 static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1294 gen_op_fadd_ST0_FT0,
1295 gen_op_fmul_ST0_FT0,
1296 gen_op_fcom_ST0_FT0,
1297 gen_op_fcom_ST0_FT0,
1298 gen_op_fsub_ST0_FT0,
1299 gen_op_fsubr_ST0_FT0,
1300 gen_op_fdiv_ST0_FT0,
1301 gen_op_fdivr_ST0_FT0,
1304 /* NOTE the exception in "r" op ordering */
1305 static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1306 gen_op_fadd_STN_ST0,
1307 gen_op_fmul_STN_ST0,
1308 NULL,
1309 NULL,
1310 gen_op_fsubr_STN_ST0,
1311 gen_op_fsub_STN_ST0,
1312 gen_op_fdivr_STN_ST0,
1313 gen_op_fdiv_STN_ST0,
1316 /* if d == OR_TMP0, it means memory operand (address in A0) */
1317 static void gen_op(DisasContext *s1, int op, int ot, int d)
1319 GenOpFunc *gen_update_cc;
1321 if (d != OR_TMP0) {
1322 gen_op_mov_TN_reg[ot][0][d]();
1323 } else {
1324 gen_op_ld_T0_A0[ot + s1->mem_index]();
1326 switch(op) {
1327 case OP_ADCL:
1328 case OP_SBBL:
1329 if (s1->cc_op != CC_OP_DYNAMIC)
1330 gen_op_set_cc_op(s1->cc_op);
1331 if (d != OR_TMP0) {
1332 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1333 gen_op_mov_reg_T0[ot][d]();
1334 } else {
1335 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1337 s1->cc_op = CC_OP_DYNAMIC;
1338 goto the_end;
1339 case OP_ADDL:
1340 gen_op_addl_T0_T1();
1341 s1->cc_op = CC_OP_ADDB + ot;
1342 gen_update_cc = gen_op_update2_cc;
1343 break;
1344 case OP_SUBL:
1345 gen_op_subl_T0_T1();
1346 s1->cc_op = CC_OP_SUBB + ot;
1347 gen_update_cc = gen_op_update2_cc;
1348 break;
1349 default:
1350 case OP_ANDL:
1351 case OP_ORL:
1352 case OP_XORL:
1353 gen_op_arith_T0_T1_cc[op]();
1354 s1->cc_op = CC_OP_LOGICB + ot;
1355 gen_update_cc = gen_op_update1_cc;
1356 break;
1357 case OP_CMPL:
1358 gen_op_cmpl_T0_T1_cc();
1359 s1->cc_op = CC_OP_SUBB + ot;
1360 gen_update_cc = NULL;
1361 break;
1363 if (op != OP_CMPL) {
1364 if (d != OR_TMP0)
1365 gen_op_mov_reg_T0[ot][d]();
1366 else
1367 gen_op_st_T0_A0[ot + s1->mem_index]();
1369 /* the flags update must happen after the memory write (precise
1370 exception support) */
1371 if (gen_update_cc)
1372 gen_update_cc();
1373 the_end: ;
1376 /* if d == OR_TMP0, it means memory operand (address in A0) */
1377 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1379 if (d != OR_TMP0)
1380 gen_op_mov_TN_reg[ot][0][d]();
1381 else
1382 gen_op_ld_T0_A0[ot + s1->mem_index]();
1383 if (s1->cc_op != CC_OP_DYNAMIC)
1384 gen_op_set_cc_op(s1->cc_op);
1385 if (c > 0) {
1386 gen_op_incl_T0();
1387 s1->cc_op = CC_OP_INCB + ot;
1388 } else {
1389 gen_op_decl_T0();
1390 s1->cc_op = CC_OP_DECB + ot;
1392 if (d != OR_TMP0)
1393 gen_op_mov_reg_T0[ot][d]();
1394 else
1395 gen_op_st_T0_A0[ot + s1->mem_index]();
1396 gen_op_update_inc_cc();
1399 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1401 if (d != OR_TMP0)
1402 gen_op_mov_TN_reg[ot][0][d]();
1403 else
1404 gen_op_ld_T0_A0[ot + s1->mem_index]();
1405 if (s != OR_TMP1)
1406 gen_op_mov_TN_reg[ot][1][s]();
1407 /* for zero counts, flags are not updated, so must do it dynamically */
1408 if (s1->cc_op != CC_OP_DYNAMIC)
1409 gen_op_set_cc_op(s1->cc_op);
1411 if (d != OR_TMP0)
1412 gen_op_shift_T0_T1_cc[ot][op]();
1413 else
1414 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1415 if (d != OR_TMP0)
1416 gen_op_mov_reg_T0[ot][d]();
1417 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1420 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1422 /* currently not optimized */
1423 gen_op_movl_T1_im(c);
1424 gen_shift(s1, op, ot, d, OR_TMP1);
1427 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1429 target_long disp;
1430 int havesib;
1431 int base;
1432 int index;
1433 int scale;
1434 int opreg;
1435 int mod, rm, code, override, must_add_seg;
1437 override = s->override;
1438 must_add_seg = s->addseg;
1439 if (override >= 0)
1440 must_add_seg = 1;
1441 mod = (modrm >> 6) & 3;
1442 rm = modrm & 7;
1444 if (s->aflag) {
1446 havesib = 0;
1447 base = rm;
1448 index = 0;
1449 scale = 0;
1451 if (base == 4) {
1452 havesib = 1;
1453 code = ldub_code(s->pc++);
1454 scale = (code >> 6) & 3;
1455 index = ((code >> 3) & 7) | REX_X(s);
1456 base = (code & 7);
1458 base |= REX_B(s);
1460 switch (mod) {
1461 case 0:
1462 if ((base & 7) == 5) {
1463 base = -1;
1464 disp = (int32_t)ldl_code(s->pc);
1465 s->pc += 4;
1466 if (CODE64(s) && !havesib) {
1467 disp += s->pc + s->rip_offset;
1469 } else {
1470 disp = 0;
1472 break;
1473 case 1:
1474 disp = (int8_t)ldub_code(s->pc++);
1475 break;
1476 default:
1477 case 2:
1478 disp = ldl_code(s->pc);
1479 s->pc += 4;
1480 break;
1483 if (base >= 0) {
1484 /* for correct popl handling with esp */
1485 if (base == 4 && s->popl_esp_hack)
1486 disp += s->popl_esp_hack;
1487 #ifdef TARGET_X86_64
1488 if (s->aflag == 2) {
1489 gen_op_movq_A0_reg[base]();
1490 if (disp != 0) {
1491 if ((int32_t)disp == disp)
1492 gen_op_addq_A0_im(disp);
1493 else
1494 gen_op_addq_A0_im64(disp >> 32, disp);
1496 } else
1497 #endif
1499 gen_op_movl_A0_reg[base]();
1500 if (disp != 0)
1501 gen_op_addl_A0_im(disp);
1503 } else {
1504 #ifdef TARGET_X86_64
1505 if (s->aflag == 2) {
1506 if ((int32_t)disp == disp)
1507 gen_op_movq_A0_im(disp);
1508 else
1509 gen_op_movq_A0_im64(disp >> 32, disp);
1510 } else
1511 #endif
1513 gen_op_movl_A0_im(disp);
1516 /* XXX: index == 4 is always invalid */
1517 if (havesib && (index != 4 || scale != 0)) {
1518 #ifdef TARGET_X86_64
1519 if (s->aflag == 2) {
1520 gen_op_addq_A0_reg_sN[scale][index]();
1521 } else
1522 #endif
1524 gen_op_addl_A0_reg_sN[scale][index]();
1527 if (must_add_seg) {
1528 if (override < 0) {
1529 if (base == R_EBP || base == R_ESP)
1530 override = R_SS;
1531 else
1532 override = R_DS;
1534 #ifdef TARGET_X86_64
1535 if (s->aflag == 2) {
1536 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1537 } else
1538 #endif
1540 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1543 } else {
1544 switch (mod) {
1545 case 0:
1546 if (rm == 6) {
1547 disp = lduw_code(s->pc);
1548 s->pc += 2;
1549 gen_op_movl_A0_im(disp);
1550 rm = 0; /* avoid SS override */
1551 goto no_rm;
1552 } else {
1553 disp = 0;
1555 break;
1556 case 1:
1557 disp = (int8_t)ldub_code(s->pc++);
1558 break;
1559 default:
1560 case 2:
1561 disp = lduw_code(s->pc);
1562 s->pc += 2;
1563 break;
1565 switch(rm) {
1566 case 0:
1567 gen_op_movl_A0_reg[R_EBX]();
1568 gen_op_addl_A0_reg_sN[0][R_ESI]();
1569 break;
1570 case 1:
1571 gen_op_movl_A0_reg[R_EBX]();
1572 gen_op_addl_A0_reg_sN[0][R_EDI]();
1573 break;
1574 case 2:
1575 gen_op_movl_A0_reg[R_EBP]();
1576 gen_op_addl_A0_reg_sN[0][R_ESI]();
1577 break;
1578 case 3:
1579 gen_op_movl_A0_reg[R_EBP]();
1580 gen_op_addl_A0_reg_sN[0][R_EDI]();
1581 break;
1582 case 4:
1583 gen_op_movl_A0_reg[R_ESI]();
1584 break;
1585 case 5:
1586 gen_op_movl_A0_reg[R_EDI]();
1587 break;
1588 case 6:
1589 gen_op_movl_A0_reg[R_EBP]();
1590 break;
1591 default:
1592 case 7:
1593 gen_op_movl_A0_reg[R_EBX]();
1594 break;
1596 if (disp != 0)
1597 gen_op_addl_A0_im(disp);
1598 gen_op_andl_A0_ffff();
1599 no_rm:
1600 if (must_add_seg) {
1601 if (override < 0) {
1602 if (rm == 2 || rm == 3 || rm == 6)
1603 override = R_SS;
1604 else
1605 override = R_DS;
1607 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1611 opreg = OR_A0;
1612 disp = 0;
1613 *reg_ptr = opreg;
1614 *offset_ptr = disp;
1617 /* used for LEA and MOV AX, mem */
1618 static void gen_add_A0_ds_seg(DisasContext *s)
1620 int override, must_add_seg;
1621 must_add_seg = s->addseg;
1622 override = R_DS;
1623 if (s->override >= 0) {
1624 override = s->override;
1625 must_add_seg = 1;
1626 } else {
1627 override = R_DS;
1629 if (must_add_seg) {
1630 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1634 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1635 OR_TMP0 */
1636 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1638 int mod, rm, opreg, disp;
1640 mod = (modrm >> 6) & 3;
1641 rm = (modrm & 7) | REX_B(s);
1642 if (mod == 3) {
1643 if (is_store) {
1644 if (reg != OR_TMP0)
1645 gen_op_mov_TN_reg[ot][0][reg]();
1646 gen_op_mov_reg_T0[ot][rm]();
1647 } else {
1648 gen_op_mov_TN_reg[ot][0][rm]();
1649 if (reg != OR_TMP0)
1650 gen_op_mov_reg_T0[ot][reg]();
1652 } else {
1653 gen_lea_modrm(s, modrm, &opreg, &disp);
1654 if (is_store) {
1655 if (reg != OR_TMP0)
1656 gen_op_mov_TN_reg[ot][0][reg]();
1657 gen_op_st_T0_A0[ot + s->mem_index]();
1658 } else {
1659 gen_op_ld_T0_A0[ot + s->mem_index]();
1660 if (reg != OR_TMP0)
1661 gen_op_mov_reg_T0[ot][reg]();
1666 static inline uint32_t insn_get(DisasContext *s, int ot)
1668 uint32_t ret;
1670 switch(ot) {
1671 case OT_BYTE:
1672 ret = ldub_code(s->pc);
1673 s->pc++;
1674 break;
1675 case OT_WORD:
1676 ret = lduw_code(s->pc);
1677 s->pc += 2;
1678 break;
1679 default:
1680 case OT_LONG:
1681 ret = ldl_code(s->pc);
1682 s->pc += 4;
1683 break;
1685 return ret;
1688 static inline int insn_const_size(unsigned int ot)
1690 if (ot <= OT_LONG)
1691 return 1 << ot;
1692 else
1693 return 4;
1696 static inline void gen_jcc(DisasContext *s, int b,
1697 target_ulong val, target_ulong next_eip)
1699 TranslationBlock *tb;
1700 int inv, jcc_op;
1701 GenOpFunc1 *func;
1702 target_ulong tmp;
1703 int l1, l2;
1705 inv = b & 1;
1706 jcc_op = (b >> 1) & 7;
1708 if (s->jmp_opt) {
1709 switch(s->cc_op) {
1710 /* we optimize the cmp/jcc case */
1711 case CC_OP_SUBB:
1712 case CC_OP_SUBW:
1713 case CC_OP_SUBL:
1714 case CC_OP_SUBQ:
1715 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1716 break;
1718 /* some jumps are easy to compute */
1719 case CC_OP_ADDB:
1720 case CC_OP_ADDW:
1721 case CC_OP_ADDL:
1722 case CC_OP_ADDQ:
1724 case CC_OP_ADCB:
1725 case CC_OP_ADCW:
1726 case CC_OP_ADCL:
1727 case CC_OP_ADCQ:
1729 case CC_OP_SBBB:
1730 case CC_OP_SBBW:
1731 case CC_OP_SBBL:
1732 case CC_OP_SBBQ:
1734 case CC_OP_LOGICB:
1735 case CC_OP_LOGICW:
1736 case CC_OP_LOGICL:
1737 case CC_OP_LOGICQ:
1739 case CC_OP_INCB:
1740 case CC_OP_INCW:
1741 case CC_OP_INCL:
1742 case CC_OP_INCQ:
1744 case CC_OP_DECB:
1745 case CC_OP_DECW:
1746 case CC_OP_DECL:
1747 case CC_OP_DECQ:
1749 case CC_OP_SHLB:
1750 case CC_OP_SHLW:
1751 case CC_OP_SHLL:
1752 case CC_OP_SHLQ:
1754 case CC_OP_SARB:
1755 case CC_OP_SARW:
1756 case CC_OP_SARL:
1757 case CC_OP_SARQ:
1758 switch(jcc_op) {
1759 case JCC_Z:
1760 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1761 break;
1762 case JCC_S:
1763 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1764 break;
1765 default:
1766 func = NULL;
1767 break;
1769 break;
1770 default:
1771 func = NULL;
1772 break;
1775 if (s->cc_op != CC_OP_DYNAMIC)
1776 gen_op_set_cc_op(s->cc_op);
1778 if (!func) {
1779 gen_setcc_slow[jcc_op]();
1780 func = gen_op_jnz_T0_label;
1783 if (inv) {
1784 tmp = val;
1785 val = next_eip;
1786 next_eip = tmp;
1788 tb = s->tb;
1790 l1 = gen_new_label();
1791 func(l1);
1793 gen_op_goto_tb0(TBPARAM(tb));
1794 gen_jmp_im(next_eip);
1795 gen_op_movl_T0_im((long)tb + 0);
1796 gen_op_exit_tb();
1798 gen_set_label(l1);
1799 gen_op_goto_tb1(TBPARAM(tb));
1800 gen_jmp_im(val);
1801 gen_op_movl_T0_im((long)tb + 1);
1802 gen_op_exit_tb();
1804 s->is_jmp = 3;
1805 } else {
1807 if (s->cc_op != CC_OP_DYNAMIC) {
1808 gen_op_set_cc_op(s->cc_op);
1809 s->cc_op = CC_OP_DYNAMIC;
1811 gen_setcc_slow[jcc_op]();
1812 if (inv) {
1813 tmp = val;
1814 val = next_eip;
1815 next_eip = tmp;
1817 l1 = gen_new_label();
1818 l2 = gen_new_label();
1819 gen_op_jnz_T0_label(l1);
1820 gen_jmp_im(next_eip);
1821 gen_op_jmp_label(l2);
1822 gen_set_label(l1);
1823 gen_jmp_im(val);
1824 gen_set_label(l2);
1825 gen_eob(s);
1829 static void gen_setcc(DisasContext *s, int b)
1831 int inv, jcc_op;
1832 GenOpFunc *func;
1834 inv = b & 1;
1835 jcc_op = (b >> 1) & 7;
1836 switch(s->cc_op) {
1837 /* we optimize the cmp/jcc case */
1838 case CC_OP_SUBB:
1839 case CC_OP_SUBW:
1840 case CC_OP_SUBL:
1841 case CC_OP_SUBQ:
1842 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1843 if (!func)
1844 goto slow_jcc;
1845 break;
1847 /* some jumps are easy to compute */
1848 case CC_OP_ADDB:
1849 case CC_OP_ADDW:
1850 case CC_OP_ADDL:
1851 case CC_OP_ADDQ:
1853 case CC_OP_LOGICB:
1854 case CC_OP_LOGICW:
1855 case CC_OP_LOGICL:
1856 case CC_OP_LOGICQ:
1858 case CC_OP_INCB:
1859 case CC_OP_INCW:
1860 case CC_OP_INCL:
1861 case CC_OP_INCQ:
1863 case CC_OP_DECB:
1864 case CC_OP_DECW:
1865 case CC_OP_DECL:
1866 case CC_OP_DECQ:
1868 case CC_OP_SHLB:
1869 case CC_OP_SHLW:
1870 case CC_OP_SHLL:
1871 case CC_OP_SHLQ:
1872 switch(jcc_op) {
1873 case JCC_Z:
1874 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1875 break;
1876 case JCC_S:
1877 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1878 break;
1879 default:
1880 goto slow_jcc;
1882 break;
1883 default:
1884 slow_jcc:
1885 if (s->cc_op != CC_OP_DYNAMIC)
1886 gen_op_set_cc_op(s->cc_op);
1887 func = gen_setcc_slow[jcc_op];
1888 break;
1890 func();
1891 if (inv) {
1892 gen_op_xor_T0_1();
1896 /* move T0 to seg_reg and compute if the CPU state may change. Never
1897 call this function with seg_reg == R_CS */
1898 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1900 if (s->pe && !s->vm86) {
1901 /* XXX: optimize by finding processor state dynamically */
1902 if (s->cc_op != CC_OP_DYNAMIC)
1903 gen_op_set_cc_op(s->cc_op);
1904 gen_jmp_im(cur_eip);
1905 gen_op_movl_seg_T0(seg_reg);
1906 /* abort translation because the addseg value may change or
1907 because ss32 may change. For R_SS, translation must always
1908 stop as a special handling must be done to disable hardware
1909 interrupts for the next instruction */
1910 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1911 s->is_jmp = 3;
1912 } else {
1913 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1914 if (seg_reg == R_SS)
1915 s->is_jmp = 3;
1919 static inline void gen_stack_update(DisasContext *s, int addend)
1921 #ifdef TARGET_X86_64
1922 if (CODE64(s)) {
1923 if (addend == 8)
1924 gen_op_addq_ESP_8();
1925 else
1926 gen_op_addq_ESP_im(addend);
1927 } else
1928 #endif
1929 if (s->ss32) {
1930 if (addend == 2)
1931 gen_op_addl_ESP_2();
1932 else if (addend == 4)
1933 gen_op_addl_ESP_4();
1934 else
1935 gen_op_addl_ESP_im(addend);
1936 } else {
1937 if (addend == 2)
1938 gen_op_addw_ESP_2();
1939 else if (addend == 4)
1940 gen_op_addw_ESP_4();
1941 else
1942 gen_op_addw_ESP_im(addend);
1946 /* generate a push. It depends on ss32, addseg and dflag */
1947 static void gen_push_T0(DisasContext *s)
1949 #ifdef TARGET_X86_64
1950 if (CODE64(s)) {
1951 /* XXX: check 16 bit behaviour */
1952 gen_op_movq_A0_reg[R_ESP]();
1953 gen_op_subq_A0_8();
1954 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1955 gen_op_movq_ESP_A0();
1956 } else
1957 #endif
1959 gen_op_movl_A0_reg[R_ESP]();
1960 if (!s->dflag)
1961 gen_op_subl_A0_2();
1962 else
1963 gen_op_subl_A0_4();
1964 if (s->ss32) {
1965 if (s->addseg) {
1966 gen_op_movl_T1_A0();
1967 gen_op_addl_A0_SS();
1969 } else {
1970 gen_op_andl_A0_ffff();
1971 gen_op_movl_T1_A0();
1972 gen_op_addl_A0_SS();
1974 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1975 if (s->ss32 && !s->addseg)
1976 gen_op_movl_ESP_A0();
1977 else
1978 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1982 /* generate a push. It depends on ss32, addseg and dflag */
1983 /* slower version for T1, only used for call Ev */
1984 static void gen_push_T1(DisasContext *s)
1986 #ifdef TARGET_X86_64
1987 if (CODE64(s)) {
1988 /* XXX: check 16 bit behaviour */
1989 gen_op_movq_A0_reg[R_ESP]();
1990 gen_op_subq_A0_8();
1991 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
1992 gen_op_movq_ESP_A0();
1993 } else
1994 #endif
1996 gen_op_movl_A0_reg[R_ESP]();
1997 if (!s->dflag)
1998 gen_op_subl_A0_2();
1999 else
2000 gen_op_subl_A0_4();
2001 if (s->ss32) {
2002 if (s->addseg) {
2003 gen_op_addl_A0_SS();
2005 } else {
2006 gen_op_andl_A0_ffff();
2007 gen_op_addl_A0_SS();
2009 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2011 if (s->ss32 && !s->addseg)
2012 gen_op_movl_ESP_A0();
2013 else
2014 gen_stack_update(s, (-2) << s->dflag);
2018 /* two step pop is necessary for precise exceptions */
2019 static void gen_pop_T0(DisasContext *s)
2021 #ifdef TARGET_X86_64
2022 if (CODE64(s)) {
2023 /* XXX: check 16 bit behaviour */
2024 gen_op_movq_A0_reg[R_ESP]();
2025 gen_op_ld_T0_A0[OT_QUAD + s->mem_index]();
2026 } else
2027 #endif
2029 gen_op_movl_A0_reg[R_ESP]();
2030 if (s->ss32) {
2031 if (s->addseg)
2032 gen_op_addl_A0_SS();
2033 } else {
2034 gen_op_andl_A0_ffff();
2035 gen_op_addl_A0_SS();
2037 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2041 static void gen_pop_update(DisasContext *s)
2043 #ifdef TARGET_X86_64
2044 if (CODE64(s)) {
2045 gen_stack_update(s, 8);
2046 } else
2047 #endif
2049 gen_stack_update(s, 2 << s->dflag);
2053 static void gen_stack_A0(DisasContext *s)
2055 gen_op_movl_A0_ESP();
2056 if (!s->ss32)
2057 gen_op_andl_A0_ffff();
2058 gen_op_movl_T1_A0();
2059 if (s->addseg)
2060 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2063 /* NOTE: wrap around in 16 bit not fully handled */
2064 static void gen_pusha(DisasContext *s)
2066 int i;
2067 gen_op_movl_A0_ESP();
2068 gen_op_addl_A0_im(-16 << s->dflag);
2069 if (!s->ss32)
2070 gen_op_andl_A0_ffff();
2071 gen_op_movl_T1_A0();
2072 if (s->addseg)
2073 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2074 for(i = 0;i < 8; i++) {
2075 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2076 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2077 gen_op_addl_A0_im(2 << s->dflag);
2079 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2082 /* NOTE: wrap around in 16 bit not fully handled */
2083 static void gen_popa(DisasContext *s)
2085 int i;
2086 gen_op_movl_A0_ESP();
2087 if (!s->ss32)
2088 gen_op_andl_A0_ffff();
2089 gen_op_movl_T1_A0();
2090 gen_op_addl_T1_im(16 << s->dflag);
2091 if (s->addseg)
2092 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2093 for(i = 0;i < 8; i++) {
2094 /* ESP is not reloaded */
2095 if (i != 3) {
2096 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2097 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2099 gen_op_addl_A0_im(2 << s->dflag);
2101 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2104 static void gen_enter(DisasContext *s, int esp_addend, int level)
2106 int ot, opsize;
2108 ot = s->dflag + OT_WORD;
2109 level &= 0x1f;
2110 opsize = 2 << s->dflag;
2112 gen_op_movl_A0_ESP();
2113 gen_op_addl_A0_im(-opsize);
2114 if (!s->ss32)
2115 gen_op_andl_A0_ffff();
2116 gen_op_movl_T1_A0();
2117 if (s->addseg)
2118 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2119 /* push bp */
2120 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2121 gen_op_st_T0_A0[ot + s->mem_index]();
2122 if (level) {
2123 gen_op_enter_level(level, s->dflag);
2125 gen_op_mov_reg_T1[ot][R_EBP]();
2126 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2127 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2130 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2132 if (s->cc_op != CC_OP_DYNAMIC)
2133 gen_op_set_cc_op(s->cc_op);
2134 gen_jmp_im(cur_eip);
2135 gen_op_raise_exception(trapno);
2136 s->is_jmp = 3;
2139 /* an interrupt is different from an exception because of the
2140 priviledge checks */
2141 static void gen_interrupt(DisasContext *s, int intno,
2142 target_ulong cur_eip, target_ulong next_eip)
2144 if (s->cc_op != CC_OP_DYNAMIC)
2145 gen_op_set_cc_op(s->cc_op);
2146 gen_jmp_im(cur_eip);
2147 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2148 s->is_jmp = 3;
2151 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2153 if (s->cc_op != CC_OP_DYNAMIC)
2154 gen_op_set_cc_op(s->cc_op);
2155 gen_jmp_im(cur_eip);
2156 gen_op_debug();
2157 s->is_jmp = 3;
2160 /* generate a generic end of block. Trace exception is also generated
2161 if needed */
2162 static void gen_eob(DisasContext *s)
2164 if (s->cc_op != CC_OP_DYNAMIC)
2165 gen_op_set_cc_op(s->cc_op);
2166 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2167 gen_op_reset_inhibit_irq();
2169 if (s->singlestep_enabled) {
2170 gen_op_debug();
2171 } else if (s->tf) {
2172 gen_op_raise_exception(EXCP01_SSTP);
2173 } else {
2174 gen_op_movl_T0_0();
2175 gen_op_exit_tb();
2177 s->is_jmp = 3;
2180 /* generate a jump to eip. No segment change must happen before as a
2181 direct call to the next block may occur */
2182 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2184 TranslationBlock *tb = s->tb;
2186 if (s->jmp_opt) {
2187 if (s->cc_op != CC_OP_DYNAMIC)
2188 gen_op_set_cc_op(s->cc_op);
2189 if (tb_num)
2190 gen_op_goto_tb1(TBPARAM(tb));
2191 else
2192 gen_op_goto_tb0(TBPARAM(tb));
2193 gen_jmp_im(eip);
2194 gen_op_movl_T0_im((long)tb + tb_num);
2195 gen_op_exit_tb();
2196 s->is_jmp = 3;
2197 } else {
2198 gen_jmp_im(eip);
2199 gen_eob(s);
2203 static void gen_jmp(DisasContext *s, target_ulong eip)
2205 gen_jmp_tb(s, eip, 0);
2208 static void gen_movtl_T0_im(target_ulong val)
2210 #ifdef TARGET_X86_64
2211 if ((int32_t)val == val) {
2212 gen_op_movl_T0_im(val);
2213 } else {
2214 gen_op_movq_T0_im64(val >> 32, val);
2216 #else
2217 gen_op_movl_T0_im(val);
2218 #endif
2221 static void gen_movtl_T1_im(target_ulong val)
2223 #ifdef TARGET_X86_64
2224 if ((int32_t)val == val) {
2225 gen_op_movl_T1_im(val);
2226 } else {
2227 gen_op_movq_T1_im64(val >> 32, val);
2229 #else
2230 gen_op_movl_T1_im(val);
2231 #endif
2234 static GenOpFunc1 *gen_ldq_env_A0[3] = {
2235 gen_op_ldq_raw_env_A0,
2236 #ifndef CONFIG_USER_ONLY
2237 gen_op_ldq_kernel_env_A0,
2238 gen_op_ldq_user_env_A0,
2239 #endif
2242 static GenOpFunc1 *gen_stq_env_A0[3] = {
2243 gen_op_stq_raw_env_A0,
2244 #ifndef CONFIG_USER_ONLY
2245 gen_op_stq_kernel_env_A0,
2246 gen_op_stq_user_env_A0,
2247 #endif
2250 static GenOpFunc1 *gen_ldo_env_A0[3] = {
2251 gen_op_ldo_raw_env_A0,
2252 #ifndef CONFIG_USER_ONLY
2253 gen_op_ldo_kernel_env_A0,
2254 gen_op_ldo_user_env_A0,
2255 #endif
2258 static GenOpFunc1 *gen_sto_env_A0[3] = {
2259 gen_op_sto_raw_env_A0,
2260 #ifndef CONFIG_USER_ONLY
2261 gen_op_sto_kernel_env_A0,
2262 gen_op_sto_user_env_A0,
2263 #endif
2266 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2268 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2269 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2270 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2272 static GenOpFunc2 *sse_op_table1[256][4] = {
2273 /* pure SSE operations */
2274 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2275 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2276 [0x12] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2277 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2278 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2279 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2280 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2281 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2283 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2284 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2285 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2286 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2287 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2288 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2289 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2290 [0x2f] = { gen_op_comiss, gen_op_comisd },
2291 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2292 [0x51] = SSE_FOP(sqrt),
2293 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2294 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2295 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2296 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2297 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2298 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2299 [0x58] = SSE_FOP(add),
2300 [0x59] = SSE_FOP(mul),
2301 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2302 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2303 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2304 [0x5c] = SSE_FOP(sub),
2305 [0x5d] = SSE_FOP(min),
2306 [0x5e] = SSE_FOP(div),
2307 [0x5f] = SSE_FOP(max),
2309 [0xc2] = SSE_FOP(cmpeq),
2310 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2312 /* MMX ops and their SSE extensions */
2313 [0x60] = MMX_OP2(punpcklbw),
2314 [0x61] = MMX_OP2(punpcklwd),
2315 [0x62] = MMX_OP2(punpckldq),
2316 [0x63] = MMX_OP2(packsswb),
2317 [0x64] = MMX_OP2(pcmpgtb),
2318 [0x65] = MMX_OP2(pcmpgtw),
2319 [0x66] = MMX_OP2(pcmpgtl),
2320 [0x67] = MMX_OP2(packuswb),
2321 [0x68] = MMX_OP2(punpckhbw),
2322 [0x69] = MMX_OP2(punpckhwd),
2323 [0x6a] = MMX_OP2(punpckhdq),
2324 [0x6b] = MMX_OP2(packssdw),
2325 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2326 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2327 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2328 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2329 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2330 (GenOpFunc2 *)gen_op_pshufd_xmm,
2331 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2332 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2333 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2334 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2335 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2336 [0x74] = MMX_OP2(pcmpeqb),
2337 [0x75] = MMX_OP2(pcmpeqw),
2338 [0x76] = MMX_OP2(pcmpeql),
2339 [0x77] = { SSE_SPECIAL }, /* emms */
2340 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2341 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2342 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2343 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2344 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2345 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2346 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2347 [0xd1] = MMX_OP2(psrlw),
2348 [0xd2] = MMX_OP2(psrld),
2349 [0xd3] = MMX_OP2(psrlq),
2350 [0xd4] = MMX_OP2(paddq),
2351 [0xd5] = MMX_OP2(pmullw),
2352 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2353 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2354 [0xd8] = MMX_OP2(psubusb),
2355 [0xd9] = MMX_OP2(psubusw),
2356 [0xda] = MMX_OP2(pminub),
2357 [0xdb] = MMX_OP2(pand),
2358 [0xdc] = MMX_OP2(paddusb),
2359 [0xdd] = MMX_OP2(paddusw),
2360 [0xde] = MMX_OP2(pmaxub),
2361 [0xdf] = MMX_OP2(pandn),
2362 [0xe0] = MMX_OP2(pavgb),
2363 [0xe1] = MMX_OP2(psraw),
2364 [0xe2] = MMX_OP2(psrad),
2365 [0xe3] = MMX_OP2(pavgw),
2366 [0xe4] = MMX_OP2(pmulhuw),
2367 [0xe5] = MMX_OP2(pmulhw),
2368 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2369 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2370 [0xe8] = MMX_OP2(psubsb),
2371 [0xe9] = MMX_OP2(psubsw),
2372 [0xea] = MMX_OP2(pminsw),
2373 [0xeb] = MMX_OP2(por),
2374 [0xec] = MMX_OP2(paddsb),
2375 [0xed] = MMX_OP2(paddsw),
2376 [0xee] = MMX_OP2(pmaxsw),
2377 [0xef] = MMX_OP2(pxor),
2378 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu (PNI) */
2379 [0xf1] = MMX_OP2(psllw),
2380 [0xf2] = MMX_OP2(pslld),
2381 [0xf3] = MMX_OP2(psllq),
2382 [0xf4] = MMX_OP2(pmuludq),
2383 [0xf5] = MMX_OP2(pmaddwd),
2384 [0xf6] = MMX_OP2(psadbw),
2385 [0xf7] = MMX_OP2(maskmov),
2386 [0xf8] = MMX_OP2(psubb),
2387 [0xf9] = MMX_OP2(psubw),
2388 [0xfa] = MMX_OP2(psubl),
2389 [0xfb] = MMX_OP2(psubq),
2390 [0xfc] = MMX_OP2(paddb),
2391 [0xfd] = MMX_OP2(paddw),
2392 [0xfe] = MMX_OP2(paddl),
2395 static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2396 [0 + 2] = MMX_OP2(psrlw),
2397 [0 + 4] = MMX_OP2(psraw),
2398 [0 + 6] = MMX_OP2(psllw),
2399 [8 + 2] = MMX_OP2(psrld),
2400 [8 + 4] = MMX_OP2(psrad),
2401 [8 + 6] = MMX_OP2(pslld),
2402 [16 + 2] = MMX_OP2(psrlq),
2403 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2404 [16 + 6] = MMX_OP2(psllq),
2405 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2408 static GenOpFunc1 *sse_op_table3[4 * 3] = {
2409 gen_op_cvtsi2ss,
2410 gen_op_cvtsi2sd,
2411 X86_64_ONLY(gen_op_cvtsq2ss),
2412 X86_64_ONLY(gen_op_cvtsq2sd),
2414 gen_op_cvttss2si,
2415 gen_op_cvttsd2si,
2416 X86_64_ONLY(gen_op_cvttss2sq),
2417 X86_64_ONLY(gen_op_cvttsd2sq),
2419 gen_op_cvtss2si,
2420 gen_op_cvtsd2si,
2421 X86_64_ONLY(gen_op_cvtss2sq),
2422 X86_64_ONLY(gen_op_cvtsd2sq),
2425 static GenOpFunc2 *sse_op_table4[8][4] = {
2426 SSE_FOP(cmpeq),
2427 SSE_FOP(cmplt),
2428 SSE_FOP(cmple),
2429 SSE_FOP(cmpunord),
2430 SSE_FOP(cmpneq),
2431 SSE_FOP(cmpnlt),
2432 SSE_FOP(cmpnle),
2433 SSE_FOP(cmpord),
2436 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2438 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2439 int modrm, mod, rm, reg, reg_addr, offset_addr;
2440 GenOpFunc2 *sse_op2;
2441 GenOpFunc3 *sse_op3;
2443 b &= 0xff;
2444 if (s->prefix & PREFIX_DATA)
2445 b1 = 1;
2446 else if (s->prefix & PREFIX_REPZ)
2447 b1 = 2;
2448 else if (s->prefix & PREFIX_REPNZ)
2449 b1 = 3;
2450 else
2451 b1 = 0;
2452 sse_op2 = sse_op_table1[b][b1];
2453 if (!sse_op2)
2454 goto illegal_op;
2455 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2456 is_xmm = 1;
2457 } else {
2458 if (b1 == 0) {
2459 /* MMX case */
2460 is_xmm = 0;
2461 } else {
2462 is_xmm = 1;
2465 /* simple MMX/SSE operation */
2466 if (s->flags & HF_TS_MASK) {
2467 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2468 return;
2470 if (s->flags & HF_EM_MASK) {
2471 illegal_op:
2472 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2473 return;
2475 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2476 goto illegal_op;
2477 if (b == 0x77) {
2478 /* emms */
2479 gen_op_emms();
2480 return;
2482 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2483 the static cpu state) */
2484 if (!is_xmm) {
2485 gen_op_enter_mmx();
2488 modrm = ldub_code(s->pc++);
2489 reg = ((modrm >> 3) & 7);
2490 if (is_xmm)
2491 reg |= rex_r;
2492 mod = (modrm >> 6) & 3;
2493 if (sse_op2 == SSE_SPECIAL) {
2494 b |= (b1 << 8);
2495 switch(b) {
2496 case 0x0e7: /* movntq */
2497 if (mod == 3)
2498 goto illegal_op;
2499 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2500 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2501 break;
2502 case 0x1e7: /* movntdq */
2503 case 0x02b: /* movntps */
2504 case 0x12b: /* movntps */
2505 case 0x2f0: /* lddqu */
2506 if (mod == 3)
2507 goto illegal_op;
2508 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2509 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2510 break;
2511 case 0x6e: /* movd mm, ea */
2512 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2513 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2514 break;
2515 case 0x16e: /* movd xmm, ea */
2516 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2517 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2518 break;
2519 case 0x6f: /* movq mm, ea */
2520 if (mod != 3) {
2521 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2522 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2523 } else {
2524 rm = (modrm & 7);
2525 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2526 offsetof(CPUX86State,fpregs[rm].mmx));
2528 break;
2529 case 0x010: /* movups */
2530 case 0x110: /* movupd */
2531 case 0x028: /* movaps */
2532 case 0x128: /* movapd */
2533 case 0x16f: /* movdqa xmm, ea */
2534 case 0x26f: /* movdqu xmm, ea */
2535 if (mod != 3) {
2536 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2537 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2538 } else {
2539 rm = (modrm & 7) | REX_B(s);
2540 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2541 offsetof(CPUX86State,xmm_regs[rm]));
2543 break;
2544 case 0x210: /* movss xmm, ea */
2545 if (mod != 3) {
2546 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2547 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2548 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2549 gen_op_movl_T0_0();
2550 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2551 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2552 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2553 } else {
2554 rm = (modrm & 7) | REX_B(s);
2555 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2556 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2558 break;
2559 case 0x310: /* movsd xmm, ea */
2560 if (mod != 3) {
2561 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2562 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2563 gen_op_movl_T0_0();
2564 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2565 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2566 } else {
2567 rm = (modrm & 7) | REX_B(s);
2568 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2569 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2571 break;
2572 case 0x012: /* movlps */
2573 case 0x112: /* movlpd */
2574 if (mod != 3) {
2575 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2576 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2577 } else {
2578 /* movhlps */
2579 rm = (modrm & 7) | REX_B(s);
2580 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2581 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2583 break;
2584 case 0x016: /* movhps */
2585 case 0x116: /* movhpd */
2586 if (mod != 3) {
2587 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2588 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2589 } else {
2590 /* movlhps */
2591 rm = (modrm & 7) | REX_B(s);
2592 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2593 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2595 break;
2596 case 0x216: /* movshdup */
2597 if (mod != 3) {
2598 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2599 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2600 } else {
2601 rm = (modrm & 7) | REX_B(s);
2602 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2603 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2604 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2605 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2607 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2608 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2609 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2610 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2611 break;
2612 case 0x7e: /* movd ea, mm */
2613 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2614 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2615 break;
2616 case 0x17e: /* movd ea, xmm */
2617 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2618 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2619 break;
2620 case 0x27e: /* movq xmm, ea */
2621 if (mod != 3) {
2622 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2623 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2624 } else {
2625 rm = (modrm & 7) | REX_B(s);
2626 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2627 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2629 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2630 break;
2631 case 0x7f: /* movq ea, mm */
2632 if (mod != 3) {
2633 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2634 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2635 } else {
2636 rm = (modrm & 7);
2637 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2638 offsetof(CPUX86State,fpregs[reg].mmx));
2640 break;
2641 case 0x011: /* movups */
2642 case 0x111: /* movupd */
2643 case 0x029: /* movaps */
2644 case 0x129: /* movapd */
2645 case 0x17f: /* movdqa ea, xmm */
2646 case 0x27f: /* movdqu ea, xmm */
2647 if (mod != 3) {
2648 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2649 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2650 } else {
2651 rm = (modrm & 7) | REX_B(s);
2652 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2653 offsetof(CPUX86State,xmm_regs[reg]));
2655 break;
2656 case 0x211: /* movss ea, xmm */
2657 if (mod != 3) {
2658 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2659 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2660 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2661 } else {
2662 rm = (modrm & 7) | REX_B(s);
2663 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2664 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2666 break;
2667 case 0x311: /* movsd ea, xmm */
2668 if (mod != 3) {
2669 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2670 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2671 } else {
2672 rm = (modrm & 7) | REX_B(s);
2673 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2674 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2676 break;
2677 case 0x013: /* movlps */
2678 case 0x113: /* movlpd */
2679 if (mod != 3) {
2680 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2681 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2682 } else {
2683 goto illegal_op;
2685 break;
2686 case 0x017: /* movhps */
2687 case 0x117: /* movhpd */
2688 if (mod != 3) {
2689 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2690 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2691 } else {
2692 goto illegal_op;
2694 break;
2695 case 0x71: /* shift mm, im */
2696 case 0x72:
2697 case 0x73:
2698 case 0x171: /* shift xmm, im */
2699 case 0x172:
2700 case 0x173:
2701 val = ldub_code(s->pc++);
2702 if (is_xmm) {
2703 gen_op_movl_T0_im(val);
2704 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2705 gen_op_movl_T0_0();
2706 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2707 op1_offset = offsetof(CPUX86State,xmm_t0);
2708 } else {
2709 gen_op_movl_T0_im(val);
2710 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2711 gen_op_movl_T0_0();
2712 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2713 op1_offset = offsetof(CPUX86State,mmx_t0);
2715 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2716 if (!sse_op2)
2717 goto illegal_op;
2718 if (is_xmm) {
2719 rm = (modrm & 7) | REX_B(s);
2720 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2721 } else {
2722 rm = (modrm & 7);
2723 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2725 sse_op2(op2_offset, op1_offset);
2726 break;
2727 case 0x050: /* movmskps */
2728 rm = (modrm & 7) | REX_B(s);
2729 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2730 gen_op_mov_reg_T0[OT_LONG][reg]();
2731 break;
2732 case 0x150: /* movmskpd */
2733 rm = (modrm & 7) | REX_B(s);
2734 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2735 gen_op_mov_reg_T0[OT_LONG][reg]();
2736 break;
2737 case 0x02a: /* cvtpi2ps */
2738 case 0x12a: /* cvtpi2pd */
2739 gen_op_enter_mmx();
2740 if (mod != 3) {
2741 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2742 op2_offset = offsetof(CPUX86State,mmx_t0);
2743 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2744 } else {
2745 rm = (modrm & 7);
2746 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2748 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2749 switch(b >> 8) {
2750 case 0x0:
2751 gen_op_cvtpi2ps(op1_offset, op2_offset);
2752 break;
2753 default:
2754 case 0x1:
2755 gen_op_cvtpi2pd(op1_offset, op2_offset);
2756 break;
2758 break;
2759 case 0x22a: /* cvtsi2ss */
2760 case 0x32a: /* cvtsi2sd */
2761 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2762 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2763 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2764 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2765 break;
2766 case 0x02c: /* cvttps2pi */
2767 case 0x12c: /* cvttpd2pi */
2768 case 0x02d: /* cvtps2pi */
2769 case 0x12d: /* cvtpd2pi */
2770 gen_op_enter_mmx();
2771 if (mod != 3) {
2772 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2773 op2_offset = offsetof(CPUX86State,xmm_t0);
2774 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2775 } else {
2776 rm = (modrm & 7) | REX_B(s);
2777 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2779 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2780 switch(b) {
2781 case 0x02c:
2782 gen_op_cvttps2pi(op1_offset, op2_offset);
2783 break;
2784 case 0x12c:
2785 gen_op_cvttpd2pi(op1_offset, op2_offset);
2786 break;
2787 case 0x02d:
2788 gen_op_cvtps2pi(op1_offset, op2_offset);
2789 break;
2790 case 0x12d:
2791 gen_op_cvtpd2pi(op1_offset, op2_offset);
2792 break;
2794 break;
2795 case 0x22c: /* cvttss2si */
2796 case 0x32c: /* cvttsd2si */
2797 case 0x22d: /* cvtss2si */
2798 case 0x32d: /* cvtsd2si */
2799 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2800 if (mod != 3) {
2801 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2802 if ((b >> 8) & 1) {
2803 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2804 } else {
2805 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2806 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2808 op2_offset = offsetof(CPUX86State,xmm_t0);
2809 } else {
2810 rm = (modrm & 7) | REX_B(s);
2811 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2813 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2814 (b & 1) * 4](op2_offset);
2815 gen_op_mov_reg_T0[ot][reg]();
2816 break;
2817 case 0xc4: /* pinsrw */
2818 case 0x1c4:
2819 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2820 val = ldub_code(s->pc++);
2821 if (b1) {
2822 val &= 7;
2823 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2824 } else {
2825 val &= 3;
2826 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2828 break;
2829 case 0xc5: /* pextrw */
2830 case 0x1c5:
2831 if (mod != 3)
2832 goto illegal_op;
2833 val = ldub_code(s->pc++);
2834 if (b1) {
2835 val &= 7;
2836 rm = (modrm & 7) | REX_B(s);
2837 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2838 } else {
2839 val &= 3;
2840 rm = (modrm & 7);
2841 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2843 reg = ((modrm >> 3) & 7) | rex_r;
2844 gen_op_mov_reg_T0[OT_LONG][reg]();
2845 break;
2846 case 0x1d6: /* movq ea, xmm */
2847 if (mod != 3) {
2848 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2849 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2850 } else {
2851 rm = (modrm & 7) | REX_B(s);
2852 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2853 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2854 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2856 break;
2857 case 0x2d6: /* movq2dq */
2858 gen_op_enter_mmx();
2859 rm = (modrm & 7) | REX_B(s);
2860 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2861 offsetof(CPUX86State,fpregs[reg & 7].mmx));
2862 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2863 break;
2864 case 0x3d6: /* movdq2q */
2865 gen_op_enter_mmx();
2866 rm = (modrm & 7);
2867 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2868 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2869 break;
2870 case 0xd7: /* pmovmskb */
2871 case 0x1d7:
2872 if (mod != 3)
2873 goto illegal_op;
2874 if (b1) {
2875 rm = (modrm & 7) | REX_B(s);
2876 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
2877 } else {
2878 rm = (modrm & 7);
2879 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
2881 reg = ((modrm >> 3) & 7) | rex_r;
2882 gen_op_mov_reg_T0[OT_LONG][reg]();
2883 break;
2884 default:
2885 goto illegal_op;
2887 } else {
2888 /* generic MMX or SSE operation */
2889 if (b == 0xf7) {
2890 /* maskmov : we must prepare A0 */
2891 if (mod != 3)
2892 goto illegal_op;
2893 #ifdef TARGET_X86_64
2894 if (CODE64(s)) {
2895 gen_op_movq_A0_reg[R_EDI]();
2896 } else
2897 #endif
2899 gen_op_movl_A0_reg[R_EDI]();
2900 if (s->aflag == 0)
2901 gen_op_andl_A0_ffff();
2903 gen_add_A0_ds_seg(s);
2905 if (is_xmm) {
2906 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2907 if (mod != 3) {
2908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2909 op2_offset = offsetof(CPUX86State,xmm_t0);
2910 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
2911 b == 0xc2)) {
2912 /* specific case for SSE single instructions */
2913 if (b1 == 2) {
2914 /* 32 bit access */
2915 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2916 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2917 } else {
2918 /* 64 bit access */
2919 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
2921 } else {
2922 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2924 } else {
2925 rm = (modrm & 7) | REX_B(s);
2926 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2928 } else {
2929 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
2930 if (mod != 3) {
2931 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2932 op2_offset = offsetof(CPUX86State,mmx_t0);
2933 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2934 } else {
2935 rm = (modrm & 7);
2936 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2939 switch(b) {
2940 case 0x70: /* pshufx insn */
2941 case 0xc6: /* pshufx insn */
2942 val = ldub_code(s->pc++);
2943 sse_op3 = (GenOpFunc3 *)sse_op2;
2944 sse_op3(op1_offset, op2_offset, val);
2945 break;
2946 case 0xc2:
2947 /* compare insns */
2948 val = ldub_code(s->pc++);
2949 if (val >= 8)
2950 goto illegal_op;
2951 sse_op2 = sse_op_table4[val][b1];
2952 sse_op2(op1_offset, op2_offset);
2953 break;
2954 default:
2955 sse_op2(op1_offset, op2_offset);
2956 break;
2958 if (b == 0x2e || b == 0x2f) {
2959 s->cc_op = CC_OP_EFLAGS;
2965 /* convert one instruction. s->is_jmp is set if the translation must
2966 be stopped. Return the next pc value */
2967 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
2969 int b, prefixes, aflag, dflag;
2970 int shift, ot;
2971 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
2972 target_ulong next_eip, tval;
2973 int rex_w, rex_r;
2975 s->pc = pc_start;
2976 prefixes = 0;
2977 aflag = s->code32;
2978 dflag = s->code32;
2979 s->override = -1;
2980 rex_w = -1;
2981 rex_r = 0;
2982 #ifdef TARGET_X86_64
2983 s->rex_x = 0;
2984 s->rex_b = 0;
2985 x86_64_hregs = 0;
2986 #endif
2987 s->rip_offset = 0; /* for relative ip address */
2988 next_byte:
2989 b = ldub_code(s->pc);
2990 s->pc++;
2991 /* check prefixes */
2992 #ifdef TARGET_X86_64
2993 if (CODE64(s)) {
2994 switch (b) {
2995 case 0xf3:
2996 prefixes |= PREFIX_REPZ;
2997 goto next_byte;
2998 case 0xf2:
2999 prefixes |= PREFIX_REPNZ;
3000 goto next_byte;
3001 case 0xf0:
3002 prefixes |= PREFIX_LOCK;
3003 goto next_byte;
3004 case 0x2e:
3005 s->override = R_CS;
3006 goto next_byte;
3007 case 0x36:
3008 s->override = R_SS;
3009 goto next_byte;
3010 case 0x3e:
3011 s->override = R_DS;
3012 goto next_byte;
3013 case 0x26:
3014 s->override = R_ES;
3015 goto next_byte;
3016 case 0x64:
3017 s->override = R_FS;
3018 goto next_byte;
3019 case 0x65:
3020 s->override = R_GS;
3021 goto next_byte;
3022 case 0x66:
3023 prefixes |= PREFIX_DATA;
3024 goto next_byte;
3025 case 0x67:
3026 prefixes |= PREFIX_ADR;
3027 goto next_byte;
3028 case 0x40 ... 0x4f:
3029 /* REX prefix */
3030 rex_w = (b >> 3) & 1;
3031 rex_r = (b & 0x4) << 1;
3032 s->rex_x = (b & 0x2) << 2;
3033 REX_B(s) = (b & 0x1) << 3;
3034 x86_64_hregs = 1; /* select uniform byte register addressing */
3035 goto next_byte;
3037 if (rex_w == 1) {
3038 /* 0x66 is ignored if rex.w is set */
3039 dflag = 2;
3040 } else {
3041 if (prefixes & PREFIX_DATA)
3042 dflag ^= 1;
3044 if (!(prefixes & PREFIX_ADR))
3045 aflag = 2;
3046 } else
3047 #endif
3049 switch (b) {
3050 case 0xf3:
3051 prefixes |= PREFIX_REPZ;
3052 goto next_byte;
3053 case 0xf2:
3054 prefixes |= PREFIX_REPNZ;
3055 goto next_byte;
3056 case 0xf0:
3057 prefixes |= PREFIX_LOCK;
3058 goto next_byte;
3059 case 0x2e:
3060 s->override = R_CS;
3061 goto next_byte;
3062 case 0x36:
3063 s->override = R_SS;
3064 goto next_byte;
3065 case 0x3e:
3066 s->override = R_DS;
3067 goto next_byte;
3068 case 0x26:
3069 s->override = R_ES;
3070 goto next_byte;
3071 case 0x64:
3072 s->override = R_FS;
3073 goto next_byte;
3074 case 0x65:
3075 s->override = R_GS;
3076 goto next_byte;
3077 case 0x66:
3078 prefixes |= PREFIX_DATA;
3079 goto next_byte;
3080 case 0x67:
3081 prefixes |= PREFIX_ADR;
3082 goto next_byte;
3084 if (prefixes & PREFIX_DATA)
3085 dflag ^= 1;
3086 if (prefixes & PREFIX_ADR)
3087 aflag ^= 1;
3090 s->prefix = prefixes;
3091 s->aflag = aflag;
3092 s->dflag = dflag;
3094 /* lock generation */
3095 if (prefixes & PREFIX_LOCK)
3096 gen_op_lock();
3098 /* now check op code */
3099 reswitch:
3100 switch(b) {
3101 case 0x0f:
3102 /**************************/
3103 /* extended op code */
3104 b = ldub_code(s->pc++) | 0x100;
3105 goto reswitch;
3107 /**************************/
3108 /* arith & logic */
3109 case 0x00 ... 0x05:
3110 case 0x08 ... 0x0d:
3111 case 0x10 ... 0x15:
3112 case 0x18 ... 0x1d:
3113 case 0x20 ... 0x25:
3114 case 0x28 ... 0x2d:
3115 case 0x30 ... 0x35:
3116 case 0x38 ... 0x3d:
3118 int op, f, val;
3119 op = (b >> 3) & 7;
3120 f = (b >> 1) & 3;
3122 if ((b & 1) == 0)
3123 ot = OT_BYTE;
3124 else
3125 ot = dflag + OT_WORD;
3127 switch(f) {
3128 case 0: /* OP Ev, Gv */
3129 modrm = ldub_code(s->pc++);
3130 reg = ((modrm >> 3) & 7) | rex_r;
3131 mod = (modrm >> 6) & 3;
3132 rm = (modrm & 7) | REX_B(s);
3133 if (mod != 3) {
3134 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3135 opreg = OR_TMP0;
3136 } else if (op == OP_XORL && rm == reg) {
3137 xor_zero:
3138 /* xor reg, reg optimisation */
3139 gen_op_movl_T0_0();
3140 s->cc_op = CC_OP_LOGICB + ot;
3141 gen_op_mov_reg_T0[ot][reg]();
3142 gen_op_update1_cc();
3143 break;
3144 } else {
3145 opreg = rm;
3147 gen_op_mov_TN_reg[ot][1][reg]();
3148 gen_op(s, op, ot, opreg);
3149 break;
3150 case 1: /* OP Gv, Ev */
3151 modrm = ldub_code(s->pc++);
3152 mod = (modrm >> 6) & 3;
3153 reg = ((modrm >> 3) & 7) | rex_r;
3154 rm = (modrm & 7) | REX_B(s);
3155 if (mod != 3) {
3156 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3157 gen_op_ld_T1_A0[ot + s->mem_index]();
3158 } else if (op == OP_XORL && rm == reg) {
3159 goto xor_zero;
3160 } else {
3161 gen_op_mov_TN_reg[ot][1][rm]();
3163 gen_op(s, op, ot, reg);
3164 break;
3165 case 2: /* OP A, Iv */
3166 val = insn_get(s, ot);
3167 gen_op_movl_T1_im(val);
3168 gen_op(s, op, ot, OR_EAX);
3169 break;
3172 break;
3174 case 0x80: /* GRP1 */
3175 case 0x81:
3176 case 0x82:
3177 case 0x83:
3179 int val;
3181 if ((b & 1) == 0)
3182 ot = OT_BYTE;
3183 else
3184 ot = dflag + OT_WORD;
3186 modrm = ldub_code(s->pc++);
3187 mod = (modrm >> 6) & 3;
3188 rm = (modrm & 7) | REX_B(s);
3189 op = (modrm >> 3) & 7;
3191 if (mod != 3) {
3192 if (b == 0x83)
3193 s->rip_offset = 1;
3194 else
3195 s->rip_offset = insn_const_size(ot);
3196 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3197 opreg = OR_TMP0;
3198 } else {
3199 opreg = rm;
3202 switch(b) {
3203 default:
3204 case 0x80:
3205 case 0x81:
3206 case 0x82:
3207 val = insn_get(s, ot);
3208 break;
3209 case 0x83:
3210 val = (int8_t)insn_get(s, OT_BYTE);
3211 break;
3213 gen_op_movl_T1_im(val);
3214 gen_op(s, op, ot, opreg);
3216 break;
3218 /**************************/
3219 /* inc, dec, and other misc arith */
3220 case 0x40 ... 0x47: /* inc Gv */
3221 ot = dflag ? OT_LONG : OT_WORD;
3222 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3223 break;
3224 case 0x48 ... 0x4f: /* dec Gv */
3225 ot = dflag ? OT_LONG : OT_WORD;
3226 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3227 break;
3228 case 0xf6: /* GRP3 */
3229 case 0xf7:
3230 if ((b & 1) == 0)
3231 ot = OT_BYTE;
3232 else
3233 ot = dflag + OT_WORD;
3235 modrm = ldub_code(s->pc++);
3236 mod = (modrm >> 6) & 3;
3237 rm = (modrm & 7) | REX_B(s);
3238 op = (modrm >> 3) & 7;
3239 if (mod != 3) {
3240 if (op == 0)
3241 s->rip_offset = insn_const_size(ot);
3242 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3243 gen_op_ld_T0_A0[ot + s->mem_index]();
3244 } else {
3245 gen_op_mov_TN_reg[ot][0][rm]();
3248 switch(op) {
3249 case 0: /* test */
3250 val = insn_get(s, ot);
3251 gen_op_movl_T1_im(val);
3252 gen_op_testl_T0_T1_cc();
3253 s->cc_op = CC_OP_LOGICB + ot;
3254 break;
3255 case 2: /* not */
3256 gen_op_notl_T0();
3257 if (mod != 3) {
3258 gen_op_st_T0_A0[ot + s->mem_index]();
3259 } else {
3260 gen_op_mov_reg_T0[ot][rm]();
3262 break;
3263 case 3: /* neg */
3264 gen_op_negl_T0();
3265 if (mod != 3) {
3266 gen_op_st_T0_A0[ot + s->mem_index]();
3267 } else {
3268 gen_op_mov_reg_T0[ot][rm]();
3270 gen_op_update_neg_cc();
3271 s->cc_op = CC_OP_SUBB + ot;
3272 break;
3273 case 4: /* mul */
3274 switch(ot) {
3275 case OT_BYTE:
3276 gen_op_mulb_AL_T0();
3277 s->cc_op = CC_OP_MULB;
3278 break;
3279 case OT_WORD:
3280 gen_op_mulw_AX_T0();
3281 s->cc_op = CC_OP_MULW;
3282 break;
3283 default:
3284 case OT_LONG:
3285 gen_op_mull_EAX_T0();
3286 s->cc_op = CC_OP_MULL;
3287 break;
3288 #ifdef TARGET_X86_64
3289 case OT_QUAD:
3290 gen_op_mulq_EAX_T0();
3291 s->cc_op = CC_OP_MULQ;
3292 break;
3293 #endif
3295 break;
3296 case 5: /* imul */
3297 switch(ot) {
3298 case OT_BYTE:
3299 gen_op_imulb_AL_T0();
3300 s->cc_op = CC_OP_MULB;
3301 break;
3302 case OT_WORD:
3303 gen_op_imulw_AX_T0();
3304 s->cc_op = CC_OP_MULW;
3305 break;
3306 default:
3307 case OT_LONG:
3308 gen_op_imull_EAX_T0();
3309 s->cc_op = CC_OP_MULL;
3310 break;
3311 #ifdef TARGET_X86_64
3312 case OT_QUAD:
3313 gen_op_imulq_EAX_T0();
3314 s->cc_op = CC_OP_MULQ;
3315 break;
3316 #endif
3318 break;
3319 case 6: /* div */
3320 switch(ot) {
3321 case OT_BYTE:
3322 gen_jmp_im(pc_start - s->cs_base);
3323 gen_op_divb_AL_T0();
3324 break;
3325 case OT_WORD:
3326 gen_jmp_im(pc_start - s->cs_base);
3327 gen_op_divw_AX_T0();
3328 break;
3329 default:
3330 case OT_LONG:
3331 gen_jmp_im(pc_start - s->cs_base);
3332 gen_op_divl_EAX_T0();
3333 break;
3334 #ifdef TARGET_X86_64
3335 case OT_QUAD:
3336 gen_jmp_im(pc_start - s->cs_base);
3337 gen_op_divq_EAX_T0();
3338 break;
3339 #endif
3341 break;
3342 case 7: /* idiv */
3343 switch(ot) {
3344 case OT_BYTE:
3345 gen_jmp_im(pc_start - s->cs_base);
3346 gen_op_idivb_AL_T0();
3347 break;
3348 case OT_WORD:
3349 gen_jmp_im(pc_start - s->cs_base);
3350 gen_op_idivw_AX_T0();
3351 break;
3352 default:
3353 case OT_LONG:
3354 gen_jmp_im(pc_start - s->cs_base);
3355 gen_op_idivl_EAX_T0();
3356 break;
3357 #ifdef TARGET_X86_64
3358 case OT_QUAD:
3359 gen_jmp_im(pc_start - s->cs_base);
3360 gen_op_idivq_EAX_T0();
3361 break;
3362 #endif
3364 break;
3365 default:
3366 goto illegal_op;
3368 break;
3370 case 0xfe: /* GRP4 */
3371 case 0xff: /* GRP5 */
3372 if ((b & 1) == 0)
3373 ot = OT_BYTE;
3374 else
3375 ot = dflag + OT_WORD;
3377 modrm = ldub_code(s->pc++);
3378 mod = (modrm >> 6) & 3;
3379 rm = (modrm & 7) | REX_B(s);
3380 op = (modrm >> 3) & 7;
3381 if (op >= 2 && b == 0xfe) {
3382 goto illegal_op;
3384 if (CODE64(s)) {
3385 if (op >= 2 && op <= 5) {
3386 /* operand size for jumps is 64 bit */
3387 ot = OT_QUAD;
3388 } else if (op == 6) {
3389 /* default push size is 64 bit */
3390 ot = dflag ? OT_QUAD : OT_WORD;
3393 if (mod != 3) {
3394 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3395 if (op >= 2 && op != 3 && op != 5)
3396 gen_op_ld_T0_A0[ot + s->mem_index]();
3397 } else {
3398 gen_op_mov_TN_reg[ot][0][rm]();
3401 switch(op) {
3402 case 0: /* inc Ev */
3403 if (mod != 3)
3404 opreg = OR_TMP0;
3405 else
3406 opreg = rm;
3407 gen_inc(s, ot, opreg, 1);
3408 break;
3409 case 1: /* dec Ev */
3410 if (mod != 3)
3411 opreg = OR_TMP0;
3412 else
3413 opreg = rm;
3414 gen_inc(s, ot, opreg, -1);
3415 break;
3416 case 2: /* call Ev */
3417 /* XXX: optimize if memory (no 'and' is necessary) */
3418 if (s->dflag == 0)
3419 gen_op_andl_T0_ffff();
3420 next_eip = s->pc - s->cs_base;
3421 gen_movtl_T1_im(next_eip);
3422 gen_push_T1(s);
3423 gen_op_jmp_T0();
3424 gen_eob(s);
3425 break;
3426 case 3: /* lcall Ev */
3427 gen_op_ld_T1_A0[ot + s->mem_index]();
3428 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3429 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3430 do_lcall:
3431 if (s->pe && !s->vm86) {
3432 if (s->cc_op != CC_OP_DYNAMIC)
3433 gen_op_set_cc_op(s->cc_op);
3434 gen_jmp_im(pc_start - s->cs_base);
3435 gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
3436 } else {
3437 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3439 gen_eob(s);
3440 break;
3441 case 4: /* jmp Ev */
3442 if (s->dflag == 0)
3443 gen_op_andl_T0_ffff();
3444 gen_op_jmp_T0();
3445 gen_eob(s);
3446 break;
3447 case 5: /* ljmp Ev */
3448 gen_op_ld_T1_A0[ot + s->mem_index]();
3449 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3450 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3451 do_ljmp:
3452 if (s->pe && !s->vm86) {
3453 if (s->cc_op != CC_OP_DYNAMIC)
3454 gen_op_set_cc_op(s->cc_op);
3455 gen_jmp_im(pc_start - s->cs_base);
3456 gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
3457 } else {
3458 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3459 gen_op_movl_T0_T1();
3460 gen_op_jmp_T0();
3462 gen_eob(s);
3463 break;
3464 case 6: /* push Ev */
3465 gen_push_T0(s);
3466 break;
3467 default:
3468 goto illegal_op;
3470 break;
3472 case 0x84: /* test Ev, Gv */
3473 case 0x85:
3474 if ((b & 1) == 0)
3475 ot = OT_BYTE;
3476 else
3477 ot = dflag + OT_WORD;
3479 modrm = ldub_code(s->pc++);
3480 mod = (modrm >> 6) & 3;
3481 rm = (modrm & 7) | REX_B(s);
3482 reg = ((modrm >> 3) & 7) | rex_r;
3484 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3485 gen_op_mov_TN_reg[ot][1][reg]();
3486 gen_op_testl_T0_T1_cc();
3487 s->cc_op = CC_OP_LOGICB + ot;
3488 break;
3490 case 0xa8: /* test eAX, Iv */
3491 case 0xa9:
3492 if ((b & 1) == 0)
3493 ot = OT_BYTE;
3494 else
3495 ot = dflag + OT_WORD;
3496 val = insn_get(s, ot);
3498 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3499 gen_op_movl_T1_im(val);
3500 gen_op_testl_T0_T1_cc();
3501 s->cc_op = CC_OP_LOGICB + ot;
3502 break;
3504 case 0x98: /* CWDE/CBW */
3505 #ifdef TARGET_X86_64
3506 if (dflag == 2) {
3507 gen_op_movslq_RAX_EAX();
3508 } else
3509 #endif
3510 if (dflag == 1)
3511 gen_op_movswl_EAX_AX();
3512 else
3513 gen_op_movsbw_AX_AL();
3514 break;
3515 case 0x99: /* CDQ/CWD */
3516 #ifdef TARGET_X86_64
3517 if (dflag == 2) {
3518 gen_op_movsqo_RDX_RAX();
3519 } else
3520 #endif
3521 if (dflag == 1)
3522 gen_op_movslq_EDX_EAX();
3523 else
3524 gen_op_movswl_DX_AX();
3525 break;
3526 case 0x1af: /* imul Gv, Ev */
3527 case 0x69: /* imul Gv, Ev, I */
3528 case 0x6b:
3529 ot = dflag + OT_WORD;
3530 modrm = ldub_code(s->pc++);
3531 reg = ((modrm >> 3) & 7) | rex_r;
3532 if (b == 0x69)
3533 s->rip_offset = insn_const_size(ot);
3534 else if (b == 0x6b)
3535 s->rip_offset = 1;
3536 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3537 if (b == 0x69) {
3538 val = insn_get(s, ot);
3539 gen_op_movl_T1_im(val);
3540 } else if (b == 0x6b) {
3541 val = (int8_t)insn_get(s, OT_BYTE);
3542 gen_op_movl_T1_im(val);
3543 } else {
3544 gen_op_mov_TN_reg[ot][1][reg]();
3547 #ifdef TARGET_X86_64
3548 if (ot == OT_QUAD) {
3549 gen_op_imulq_T0_T1();
3550 } else
3551 #endif
3552 if (ot == OT_LONG) {
3553 gen_op_imull_T0_T1();
3554 } else {
3555 gen_op_imulw_T0_T1();
3557 gen_op_mov_reg_T0[ot][reg]();
3558 s->cc_op = CC_OP_MULB + ot;
3559 break;
3560 case 0x1c0:
3561 case 0x1c1: /* xadd Ev, Gv */
3562 if ((b & 1) == 0)
3563 ot = OT_BYTE;
3564 else
3565 ot = dflag + OT_WORD;
3566 modrm = ldub_code(s->pc++);
3567 reg = ((modrm >> 3) & 7) | rex_r;
3568 mod = (modrm >> 6) & 3;
3569 if (mod == 3) {
3570 rm = (modrm & 7) | REX_B(s);
3571 gen_op_mov_TN_reg[ot][0][reg]();
3572 gen_op_mov_TN_reg[ot][1][rm]();
3573 gen_op_addl_T0_T1();
3574 gen_op_mov_reg_T1[ot][reg]();
3575 gen_op_mov_reg_T0[ot][rm]();
3576 } else {
3577 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3578 gen_op_mov_TN_reg[ot][0][reg]();
3579 gen_op_ld_T1_A0[ot + s->mem_index]();
3580 gen_op_addl_T0_T1();
3581 gen_op_st_T0_A0[ot + s->mem_index]();
3582 gen_op_mov_reg_T1[ot][reg]();
3584 gen_op_update2_cc();
3585 s->cc_op = CC_OP_ADDB + ot;
3586 break;
3587 case 0x1b0:
3588 case 0x1b1: /* cmpxchg Ev, Gv */
3589 if ((b & 1) == 0)
3590 ot = OT_BYTE;
3591 else
3592 ot = dflag + OT_WORD;
3593 modrm = ldub_code(s->pc++);
3594 reg = ((modrm >> 3) & 7) | rex_r;
3595 mod = (modrm >> 6) & 3;
3596 gen_op_mov_TN_reg[ot][1][reg]();
3597 if (mod == 3) {
3598 rm = (modrm & 7) | REX_B(s);
3599 gen_op_mov_TN_reg[ot][0][rm]();
3600 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3601 gen_op_mov_reg_T0[ot][rm]();
3602 } else {
3603 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3604 gen_op_ld_T0_A0[ot + s->mem_index]();
3605 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3607 s->cc_op = CC_OP_SUBB + ot;
3608 break;
3609 case 0x1c7: /* cmpxchg8b */
3610 modrm = ldub_code(s->pc++);
3611 mod = (modrm >> 6) & 3;
3612 if (mod == 3)
3613 goto illegal_op;
3614 if (s->cc_op != CC_OP_DYNAMIC)
3615 gen_op_set_cc_op(s->cc_op);
3616 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3617 gen_op_cmpxchg8b();
3618 s->cc_op = CC_OP_EFLAGS;
3619 break;
3621 /**************************/
3622 /* push/pop */
3623 case 0x50 ... 0x57: /* push */
3624 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3625 gen_push_T0(s);
3626 break;
3627 case 0x58 ... 0x5f: /* pop */
3628 if (CODE64(s)) {
3629 ot = dflag ? OT_QUAD : OT_WORD;
3630 } else {
3631 ot = dflag + OT_WORD;
3633 gen_pop_T0(s);
3634 /* NOTE: order is important for pop %sp */
3635 gen_pop_update(s);
3636 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3637 break;
3638 case 0x60: /* pusha */
3639 if (CODE64(s))
3640 goto illegal_op;
3641 gen_pusha(s);
3642 break;
3643 case 0x61: /* popa */
3644 if (CODE64(s))
3645 goto illegal_op;
3646 gen_popa(s);
3647 break;
3648 case 0x68: /* push Iv */
3649 case 0x6a:
3650 if (CODE64(s)) {
3651 ot = dflag ? OT_QUAD : OT_WORD;
3652 } else {
3653 ot = dflag + OT_WORD;
3655 if (b == 0x68)
3656 val = insn_get(s, ot);
3657 else
3658 val = (int8_t)insn_get(s, OT_BYTE);
3659 gen_op_movl_T0_im(val);
3660 gen_push_T0(s);
3661 break;
3662 case 0x8f: /* pop Ev */
3663 if (CODE64(s)) {
3664 ot = dflag ? OT_QUAD : OT_WORD;
3665 } else {
3666 ot = dflag + OT_WORD;
3668 modrm = ldub_code(s->pc++);
3669 mod = (modrm >> 6) & 3;
3670 gen_pop_T0(s);
3671 if (mod == 3) {
3672 /* NOTE: order is important for pop %sp */
3673 gen_pop_update(s);
3674 rm = (modrm & 7) | REX_B(s);
3675 gen_op_mov_reg_T0[ot][rm]();
3676 } else {
3677 /* NOTE: order is important too for MMU exceptions */
3678 s->popl_esp_hack = 1 << ot;
3679 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3680 s->popl_esp_hack = 0;
3681 gen_pop_update(s);
3683 break;
3684 case 0xc8: /* enter */
3686 /* XXX: long mode support */
3687 int level;
3688 val = lduw_code(s->pc);
3689 s->pc += 2;
3690 level = ldub_code(s->pc++);
3691 gen_enter(s, val, level);
3693 break;
3694 case 0xc9: /* leave */
3695 /* XXX: exception not precise (ESP is updated before potential exception) */
3696 /* XXX: may be invalid for 16 bit in long mode */
3697 if (CODE64(s)) {
3698 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3699 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3700 } else if (s->ss32) {
3701 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3702 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3703 } else {
3704 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3705 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3707 gen_pop_T0(s);
3708 if (CODE64(s)) {
3709 ot = dflag ? OT_QUAD : OT_WORD;
3710 } else {
3711 ot = dflag + OT_WORD;
3713 gen_op_mov_reg_T0[ot][R_EBP]();
3714 gen_pop_update(s);
3715 break;
3716 case 0x06: /* push es */
3717 case 0x0e: /* push cs */
3718 case 0x16: /* push ss */
3719 case 0x1e: /* push ds */
3720 if (CODE64(s))
3721 goto illegal_op;
3722 gen_op_movl_T0_seg(b >> 3);
3723 gen_push_T0(s);
3724 break;
3725 case 0x1a0: /* push fs */
3726 case 0x1a8: /* push gs */
3727 gen_op_movl_T0_seg((b >> 3) & 7);
3728 gen_push_T0(s);
3729 break;
3730 case 0x07: /* pop es */
3731 case 0x17: /* pop ss */
3732 case 0x1f: /* pop ds */
3733 if (CODE64(s))
3734 goto illegal_op;
3735 reg = b >> 3;
3736 gen_pop_T0(s);
3737 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3738 gen_pop_update(s);
3739 if (reg == R_SS) {
3740 /* if reg == SS, inhibit interrupts/trace. */
3741 /* If several instructions disable interrupts, only the
3742 _first_ does it */
3743 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3744 gen_op_set_inhibit_irq();
3745 s->tf = 0;
3747 if (s->is_jmp) {
3748 gen_jmp_im(s->pc - s->cs_base);
3749 gen_eob(s);
3751 break;
3752 case 0x1a1: /* pop fs */
3753 case 0x1a9: /* pop gs */
3754 gen_pop_T0(s);
3755 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3756 gen_pop_update(s);
3757 if (s->is_jmp) {
3758 gen_jmp_im(s->pc - s->cs_base);
3759 gen_eob(s);
3761 break;
3763 /**************************/
3764 /* mov */
3765 case 0x88:
3766 case 0x89: /* mov Gv, Ev */
3767 if ((b & 1) == 0)
3768 ot = OT_BYTE;
3769 else
3770 ot = dflag + OT_WORD;
3771 modrm = ldub_code(s->pc++);
3772 reg = ((modrm >> 3) & 7) | rex_r;
3774 /* generate a generic store */
3775 gen_ldst_modrm(s, modrm, ot, reg, 1);
3776 break;
3777 case 0xc6:
3778 case 0xc7: /* mov Ev, Iv */
3779 if ((b & 1) == 0)
3780 ot = OT_BYTE;
3781 else
3782 ot = dflag + OT_WORD;
3783 modrm = ldub_code(s->pc++);
3784 mod = (modrm >> 6) & 3;
3785 if (mod != 3) {
3786 s->rip_offset = insn_const_size(ot);
3787 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3789 val = insn_get(s, ot);
3790 gen_op_movl_T0_im(val);
3791 if (mod != 3)
3792 gen_op_st_T0_A0[ot + s->mem_index]();
3793 else
3794 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3795 break;
3796 case 0x8a:
3797 case 0x8b: /* mov Ev, Gv */
3798 if ((b & 1) == 0)
3799 ot = OT_BYTE;
3800 else
3801 ot = OT_WORD + dflag;
3802 modrm = ldub_code(s->pc++);
3803 reg = ((modrm >> 3) & 7) | rex_r;
3805 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3806 gen_op_mov_reg_T0[ot][reg]();
3807 break;
3808 case 0x8e: /* mov seg, Gv */
3809 modrm = ldub_code(s->pc++);
3810 reg = (modrm >> 3) & 7;
3811 if (reg >= 6 || reg == R_CS)
3812 goto illegal_op;
3813 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3814 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3815 if (reg == R_SS) {
3816 /* if reg == SS, inhibit interrupts/trace */
3817 /* If several instructions disable interrupts, only the
3818 _first_ does it */
3819 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3820 gen_op_set_inhibit_irq();
3821 s->tf = 0;
3823 if (s->is_jmp) {
3824 gen_jmp_im(s->pc - s->cs_base);
3825 gen_eob(s);
3827 break;
3828 case 0x8c: /* mov Gv, seg */
3829 modrm = ldub_code(s->pc++);
3830 reg = (modrm >> 3) & 7;
3831 mod = (modrm >> 6) & 3;
3832 if (reg >= 6)
3833 goto illegal_op;
3834 gen_op_movl_T0_seg(reg);
3835 if (mod == 3)
3836 ot = OT_WORD + dflag;
3837 else
3838 ot = OT_WORD;
3839 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3840 break;
3842 case 0x1b6: /* movzbS Gv, Eb */
3843 case 0x1b7: /* movzwS Gv, Eb */
3844 case 0x1be: /* movsbS Gv, Eb */
3845 case 0x1bf: /* movswS Gv, Eb */
3847 int d_ot;
3848 /* d_ot is the size of destination */
3849 d_ot = dflag + OT_WORD;
3850 /* ot is the size of source */
3851 ot = (b & 1) + OT_BYTE;
3852 modrm = ldub_code(s->pc++);
3853 reg = ((modrm >> 3) & 7) | rex_r;
3854 mod = (modrm >> 6) & 3;
3855 rm = (modrm & 7) | REX_B(s);
3857 if (mod == 3) {
3858 gen_op_mov_TN_reg[ot][0][rm]();
3859 switch(ot | (b & 8)) {
3860 case OT_BYTE:
3861 gen_op_movzbl_T0_T0();
3862 break;
3863 case OT_BYTE | 8:
3864 gen_op_movsbl_T0_T0();
3865 break;
3866 case OT_WORD:
3867 gen_op_movzwl_T0_T0();
3868 break;
3869 default:
3870 case OT_WORD | 8:
3871 gen_op_movswl_T0_T0();
3872 break;
3874 gen_op_mov_reg_T0[d_ot][reg]();
3875 } else {
3876 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3877 if (b & 8) {
3878 gen_op_lds_T0_A0[ot + s->mem_index]();
3879 } else {
3880 gen_op_ldu_T0_A0[ot + s->mem_index]();
3882 gen_op_mov_reg_T0[d_ot][reg]();
3885 break;
3887 case 0x8d: /* lea */
3888 ot = dflag + OT_WORD;
3889 modrm = ldub_code(s->pc++);
3890 mod = (modrm >> 6) & 3;
3891 if (mod == 3)
3892 goto illegal_op;
3893 reg = ((modrm >> 3) & 7) | rex_r;
3894 /* we must ensure that no segment is added */
3895 s->override = -1;
3896 val = s->addseg;
3897 s->addseg = 0;
3898 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3899 s->addseg = val;
3900 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3901 break;
3903 case 0xa0: /* mov EAX, Ov */
3904 case 0xa1:
3905 case 0xa2: /* mov Ov, EAX */
3906 case 0xa3:
3908 target_ulong offset_addr;
3910 if ((b & 1) == 0)
3911 ot = OT_BYTE;
3912 else
3913 ot = dflag + OT_WORD;
3914 #ifdef TARGET_X86_64
3915 if (CODE64(s)) {
3916 offset_addr = ldq_code(s->pc);
3917 s->pc += 8;
3918 if (offset_addr == (int32_t)offset_addr)
3919 gen_op_movq_A0_im(offset_addr);
3920 else
3921 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3922 } else
3923 #endif
3925 if (s->aflag) {
3926 offset_addr = insn_get(s, OT_LONG);
3927 } else {
3928 offset_addr = insn_get(s, OT_WORD);
3930 gen_op_movl_A0_im(offset_addr);
3932 gen_add_A0_ds_seg(s);
3933 if ((b & 2) == 0) {
3934 gen_op_ld_T0_A0[ot + s->mem_index]();
3935 gen_op_mov_reg_T0[ot][R_EAX]();
3936 } else {
3937 gen_op_mov_TN_reg[ot][0][R_EAX]();
3938 gen_op_st_T0_A0[ot + s->mem_index]();
3941 break;
3942 case 0xd7: /* xlat */
3943 #ifdef TARGET_X86_64
3944 if (CODE64(s)) {
3945 gen_op_movq_A0_reg[R_EBX]();
3946 gen_op_addq_A0_AL();
3947 } else
3948 #endif
3950 gen_op_movl_A0_reg[R_EBX]();
3951 gen_op_addl_A0_AL();
3952 if (s->aflag == 0)
3953 gen_op_andl_A0_ffff();
3955 gen_add_A0_ds_seg(s);
3956 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
3957 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
3958 break;
3959 case 0xb0 ... 0xb7: /* mov R, Ib */
3960 val = insn_get(s, OT_BYTE);
3961 gen_op_movl_T0_im(val);
3962 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
3963 break;
3964 case 0xb8 ... 0xbf: /* mov R, Iv */
3965 #ifdef TARGET_X86_64
3966 if (dflag == 2) {
3967 uint64_t tmp;
3968 /* 64 bit case */
3969 tmp = ldq_code(s->pc);
3970 s->pc += 8;
3971 reg = (b & 7) | REX_B(s);
3972 gen_movtl_T0_im(tmp);
3973 gen_op_mov_reg_T0[OT_QUAD][reg]();
3974 } else
3975 #endif
3977 ot = dflag ? OT_LONG : OT_WORD;
3978 val = insn_get(s, ot);
3979 reg = (b & 7) | REX_B(s);
3980 gen_op_movl_T0_im(val);
3981 gen_op_mov_reg_T0[ot][reg]();
3983 break;
3985 case 0x91 ... 0x97: /* xchg R, EAX */
3986 ot = dflag + OT_WORD;
3987 reg = (b & 7) | REX_B(s);
3988 rm = R_EAX;
3989 goto do_xchg_reg;
3990 case 0x86:
3991 case 0x87: /* xchg Ev, Gv */
3992 if ((b & 1) == 0)
3993 ot = OT_BYTE;
3994 else
3995 ot = dflag + OT_WORD;
3996 modrm = ldub_code(s->pc++);
3997 reg = ((modrm >> 3) & 7) | rex_r;
3998 mod = (modrm >> 6) & 3;
3999 if (mod == 3) {
4000 rm = (modrm & 7) | REX_B(s);
4001 do_xchg_reg:
4002 gen_op_mov_TN_reg[ot][0][reg]();
4003 gen_op_mov_TN_reg[ot][1][rm]();
4004 gen_op_mov_reg_T0[ot][rm]();
4005 gen_op_mov_reg_T1[ot][reg]();
4006 } else {
4007 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4008 gen_op_mov_TN_reg[ot][0][reg]();
4009 /* for xchg, lock is implicit */
4010 if (!(prefixes & PREFIX_LOCK))
4011 gen_op_lock();
4012 gen_op_ld_T1_A0[ot + s->mem_index]();
4013 gen_op_st_T0_A0[ot + s->mem_index]();
4014 if (!(prefixes & PREFIX_LOCK))
4015 gen_op_unlock();
4016 gen_op_mov_reg_T1[ot][reg]();
4018 break;
4019 case 0xc4: /* les Gv */
4020 if (CODE64(s))
4021 goto illegal_op;
4022 op = R_ES;
4023 goto do_lxx;
4024 case 0xc5: /* lds Gv */
4025 if (CODE64(s))
4026 goto illegal_op;
4027 op = R_DS;
4028 goto do_lxx;
4029 case 0x1b2: /* lss Gv */
4030 op = R_SS;
4031 goto do_lxx;
4032 case 0x1b4: /* lfs Gv */
4033 op = R_FS;
4034 goto do_lxx;
4035 case 0x1b5: /* lgs Gv */
4036 op = R_GS;
4037 do_lxx:
4038 ot = dflag ? OT_LONG : OT_WORD;
4039 modrm = ldub_code(s->pc++);
4040 reg = ((modrm >> 3) & 7) | rex_r;
4041 mod = (modrm >> 6) & 3;
4042 if (mod == 3)
4043 goto illegal_op;
4044 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4045 gen_op_ld_T1_A0[ot + s->mem_index]();
4046 gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
4047 /* load the segment first to handle exceptions properly */
4048 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4049 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4050 /* then put the data */
4051 gen_op_mov_reg_T1[ot][reg]();
4052 if (s->is_jmp) {
4053 gen_jmp_im(s->pc - s->cs_base);
4054 gen_eob(s);
4056 break;
4058 /************************/
4059 /* shifts */
4060 case 0xc0:
4061 case 0xc1:
4062 /* shift Ev,Ib */
4063 shift = 2;
4064 grp2:
4066 if ((b & 1) == 0)
4067 ot = OT_BYTE;
4068 else
4069 ot = dflag + OT_WORD;
4071 modrm = ldub_code(s->pc++);
4072 mod = (modrm >> 6) & 3;
4073 op = (modrm >> 3) & 7;
4075 if (mod != 3) {
4076 if (shift == 2) {
4077 s->rip_offset = 1;
4079 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4080 opreg = OR_TMP0;
4081 } else {
4082 opreg = (modrm & 7) | REX_B(s);
4085 /* simpler op */
4086 if (shift == 0) {
4087 gen_shift(s, op, ot, opreg, OR_ECX);
4088 } else {
4089 if (shift == 2) {
4090 shift = ldub_code(s->pc++);
4092 gen_shifti(s, op, ot, opreg, shift);
4095 break;
4096 case 0xd0:
4097 case 0xd1:
4098 /* shift Ev,1 */
4099 shift = 1;
4100 goto grp2;
4101 case 0xd2:
4102 case 0xd3:
4103 /* shift Ev,cl */
4104 shift = 0;
4105 goto grp2;
4107 case 0x1a4: /* shld imm */
4108 op = 0;
4109 shift = 1;
4110 goto do_shiftd;
4111 case 0x1a5: /* shld cl */
4112 op = 0;
4113 shift = 0;
4114 goto do_shiftd;
4115 case 0x1ac: /* shrd imm */
4116 op = 1;
4117 shift = 1;
4118 goto do_shiftd;
4119 case 0x1ad: /* shrd cl */
4120 op = 1;
4121 shift = 0;
4122 do_shiftd:
4123 ot = dflag + OT_WORD;
4124 modrm = ldub_code(s->pc++);
4125 mod = (modrm >> 6) & 3;
4126 rm = (modrm & 7) | REX_B(s);
4127 reg = ((modrm >> 3) & 7) | rex_r;
4129 if (mod != 3) {
4130 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4131 gen_op_ld_T0_A0[ot + s->mem_index]();
4132 } else {
4133 gen_op_mov_TN_reg[ot][0][rm]();
4135 gen_op_mov_TN_reg[ot][1][reg]();
4137 if (shift) {
4138 val = ldub_code(s->pc++);
4139 if (ot == OT_QUAD)
4140 val &= 0x3f;
4141 else
4142 val &= 0x1f;
4143 if (val) {
4144 if (mod == 3)
4145 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4146 else
4147 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4148 if (op == 0 && ot != OT_WORD)
4149 s->cc_op = CC_OP_SHLB + ot;
4150 else
4151 s->cc_op = CC_OP_SARB + ot;
4153 } else {
4154 if (s->cc_op != CC_OP_DYNAMIC)
4155 gen_op_set_cc_op(s->cc_op);
4156 if (mod == 3)
4157 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4158 else
4159 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4160 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4162 if (mod == 3) {
4163 gen_op_mov_reg_T0[ot][rm]();
4165 break;
4167 /************************/
4168 /* floats */
4169 case 0xd8 ... 0xdf:
4170 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4171 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4172 /* XXX: what to do if illegal op ? */
4173 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4174 break;
4176 modrm = ldub_code(s->pc++);
4177 mod = (modrm >> 6) & 3;
4178 rm = modrm & 7;
4179 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4180 if (mod != 3) {
4181 /* memory op */
4182 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4183 switch(op) {
4184 case 0x00 ... 0x07: /* fxxxs */
4185 case 0x10 ... 0x17: /* fixxxl */
4186 case 0x20 ... 0x27: /* fxxxl */
4187 case 0x30 ... 0x37: /* fixxx */
4189 int op1;
4190 op1 = op & 7;
4192 switch(op >> 4) {
4193 case 0:
4194 gen_op_flds_FT0_A0();
4195 break;
4196 case 1:
4197 gen_op_fildl_FT0_A0();
4198 break;
4199 case 2:
4200 gen_op_fldl_FT0_A0();
4201 break;
4202 case 3:
4203 default:
4204 gen_op_fild_FT0_A0();
4205 break;
4208 gen_op_fp_arith_ST0_FT0[op1]();
4209 if (op1 == 3) {
4210 /* fcomp needs pop */
4211 gen_op_fpop();
4214 break;
4215 case 0x08: /* flds */
4216 case 0x0a: /* fsts */
4217 case 0x0b: /* fstps */
4218 case 0x18: /* fildl */
4219 case 0x1a: /* fistl */
4220 case 0x1b: /* fistpl */
4221 case 0x28: /* fldl */
4222 case 0x2a: /* fstl */
4223 case 0x2b: /* fstpl */
4224 case 0x38: /* filds */
4225 case 0x3a: /* fists */
4226 case 0x3b: /* fistps */
4228 switch(op & 7) {
4229 case 0:
4230 switch(op >> 4) {
4231 case 0:
4232 gen_op_flds_ST0_A0();
4233 break;
4234 case 1:
4235 gen_op_fildl_ST0_A0();
4236 break;
4237 case 2:
4238 gen_op_fldl_ST0_A0();
4239 break;
4240 case 3:
4241 default:
4242 gen_op_fild_ST0_A0();
4243 break;
4245 break;
4246 default:
4247 switch(op >> 4) {
4248 case 0:
4249 gen_op_fsts_ST0_A0();
4250 break;
4251 case 1:
4252 gen_op_fistl_ST0_A0();
4253 break;
4254 case 2:
4255 gen_op_fstl_ST0_A0();
4256 break;
4257 case 3:
4258 default:
4259 gen_op_fist_ST0_A0();
4260 break;
4262 if ((op & 7) == 3)
4263 gen_op_fpop();
4264 break;
4266 break;
4267 case 0x0c: /* fldenv mem */
4268 gen_op_fldenv_A0(s->dflag);
4269 break;
4270 case 0x0d: /* fldcw mem */
4271 gen_op_fldcw_A0();
4272 break;
4273 case 0x0e: /* fnstenv mem */
4274 gen_op_fnstenv_A0(s->dflag);
4275 break;
4276 case 0x0f: /* fnstcw mem */
4277 gen_op_fnstcw_A0();
4278 break;
4279 case 0x1d: /* fldt mem */
4280 gen_op_fldt_ST0_A0();
4281 break;
4282 case 0x1f: /* fstpt mem */
4283 gen_op_fstt_ST0_A0();
4284 gen_op_fpop();
4285 break;
4286 case 0x2c: /* frstor mem */
4287 gen_op_frstor_A0(s->dflag);
4288 break;
4289 case 0x2e: /* fnsave mem */
4290 gen_op_fnsave_A0(s->dflag);
4291 break;
4292 case 0x2f: /* fnstsw mem */
4293 gen_op_fnstsw_A0();
4294 break;
4295 case 0x3c: /* fbld */
4296 gen_op_fbld_ST0_A0();
4297 break;
4298 case 0x3e: /* fbstp */
4299 gen_op_fbst_ST0_A0();
4300 gen_op_fpop();
4301 break;
4302 case 0x3d: /* fildll */
4303 gen_op_fildll_ST0_A0();
4304 break;
4305 case 0x3f: /* fistpll */
4306 gen_op_fistll_ST0_A0();
4307 gen_op_fpop();
4308 break;
4309 default:
4310 goto illegal_op;
4312 } else {
4313 /* register float ops */
4314 opreg = rm;
4316 switch(op) {
4317 case 0x08: /* fld sti */
4318 gen_op_fpush();
4319 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4320 break;
4321 case 0x09: /* fxchg sti */
4322 case 0x29: /* fxchg4 sti, undocumented op */
4323 case 0x39: /* fxchg7 sti, undocumented op */
4324 gen_op_fxchg_ST0_STN(opreg);
4325 break;
4326 case 0x0a: /* grp d9/2 */
4327 switch(rm) {
4328 case 0: /* fnop */
4329 /* check exceptions (FreeBSD FPU probe) */
4330 if (s->cc_op != CC_OP_DYNAMIC)
4331 gen_op_set_cc_op(s->cc_op);
4332 gen_jmp_im(pc_start - s->cs_base);
4333 gen_op_fwait();
4334 break;
4335 default:
4336 goto illegal_op;
4338 break;
4339 case 0x0c: /* grp d9/4 */
4340 switch(rm) {
4341 case 0: /* fchs */
4342 gen_op_fchs_ST0();
4343 break;
4344 case 1: /* fabs */
4345 gen_op_fabs_ST0();
4346 break;
4347 case 4: /* ftst */
4348 gen_op_fldz_FT0();
4349 gen_op_fcom_ST0_FT0();
4350 break;
4351 case 5: /* fxam */
4352 gen_op_fxam_ST0();
4353 break;
4354 default:
4355 goto illegal_op;
4357 break;
4358 case 0x0d: /* grp d9/5 */
4360 switch(rm) {
4361 case 0:
4362 gen_op_fpush();
4363 gen_op_fld1_ST0();
4364 break;
4365 case 1:
4366 gen_op_fpush();
4367 gen_op_fldl2t_ST0();
4368 break;
4369 case 2:
4370 gen_op_fpush();
4371 gen_op_fldl2e_ST0();
4372 break;
4373 case 3:
4374 gen_op_fpush();
4375 gen_op_fldpi_ST0();
4376 break;
4377 case 4:
4378 gen_op_fpush();
4379 gen_op_fldlg2_ST0();
4380 break;
4381 case 5:
4382 gen_op_fpush();
4383 gen_op_fldln2_ST0();
4384 break;
4385 case 6:
4386 gen_op_fpush();
4387 gen_op_fldz_ST0();
4388 break;
4389 default:
4390 goto illegal_op;
4393 break;
4394 case 0x0e: /* grp d9/6 */
4395 switch(rm) {
4396 case 0: /* f2xm1 */
4397 gen_op_f2xm1();
4398 break;
4399 case 1: /* fyl2x */
4400 gen_op_fyl2x();
4401 break;
4402 case 2: /* fptan */
4403 gen_op_fptan();
4404 break;
4405 case 3: /* fpatan */
4406 gen_op_fpatan();
4407 break;
4408 case 4: /* fxtract */
4409 gen_op_fxtract();
4410 break;
4411 case 5: /* fprem1 */
4412 gen_op_fprem1();
4413 break;
4414 case 6: /* fdecstp */
4415 gen_op_fdecstp();
4416 break;
4417 default:
4418 case 7: /* fincstp */
4419 gen_op_fincstp();
4420 break;
4422 break;
4423 case 0x0f: /* grp d9/7 */
4424 switch(rm) {
4425 case 0: /* fprem */
4426 gen_op_fprem();
4427 break;
4428 case 1: /* fyl2xp1 */
4429 gen_op_fyl2xp1();
4430 break;
4431 case 2: /* fsqrt */
4432 gen_op_fsqrt();
4433 break;
4434 case 3: /* fsincos */
4435 gen_op_fsincos();
4436 break;
4437 case 5: /* fscale */
4438 gen_op_fscale();
4439 break;
4440 case 4: /* frndint */
4441 gen_op_frndint();
4442 break;
4443 case 6: /* fsin */
4444 gen_op_fsin();
4445 break;
4446 default:
4447 case 7: /* fcos */
4448 gen_op_fcos();
4449 break;
4451 break;
4452 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4453 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4454 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4456 int op1;
4458 op1 = op & 7;
4459 if (op >= 0x20) {
4460 gen_op_fp_arith_STN_ST0[op1](opreg);
4461 if (op >= 0x30)
4462 gen_op_fpop();
4463 } else {
4464 gen_op_fmov_FT0_STN(opreg);
4465 gen_op_fp_arith_ST0_FT0[op1]();
4468 break;
4469 case 0x02: /* fcom */
4470 case 0x22: /* fcom2, undocumented op */
4471 gen_op_fmov_FT0_STN(opreg);
4472 gen_op_fcom_ST0_FT0();
4473 break;
4474 case 0x03: /* fcomp */
4475 case 0x23: /* fcomp3, undocumented op */
4476 case 0x32: /* fcomp5, undocumented op */
4477 gen_op_fmov_FT0_STN(opreg);
4478 gen_op_fcom_ST0_FT0();
4479 gen_op_fpop();
4480 break;
4481 case 0x15: /* da/5 */
4482 switch(rm) {
4483 case 1: /* fucompp */
4484 gen_op_fmov_FT0_STN(1);
4485 gen_op_fucom_ST0_FT0();
4486 gen_op_fpop();
4487 gen_op_fpop();
4488 break;
4489 default:
4490 goto illegal_op;
4492 break;
4493 case 0x1c:
4494 switch(rm) {
4495 case 0: /* feni (287 only, just do nop here) */
4496 break;
4497 case 1: /* fdisi (287 only, just do nop here) */
4498 break;
4499 case 2: /* fclex */
4500 gen_op_fclex();
4501 break;
4502 case 3: /* fninit */
4503 gen_op_fninit();
4504 break;
4505 case 4: /* fsetpm (287 only, just do nop here) */
4506 break;
4507 default:
4508 goto illegal_op;
4510 break;
4511 case 0x1d: /* fucomi */
4512 if (s->cc_op != CC_OP_DYNAMIC)
4513 gen_op_set_cc_op(s->cc_op);
4514 gen_op_fmov_FT0_STN(opreg);
4515 gen_op_fucomi_ST0_FT0();
4516 s->cc_op = CC_OP_EFLAGS;
4517 break;
4518 case 0x1e: /* fcomi */
4519 if (s->cc_op != CC_OP_DYNAMIC)
4520 gen_op_set_cc_op(s->cc_op);
4521 gen_op_fmov_FT0_STN(opreg);
4522 gen_op_fcomi_ST0_FT0();
4523 s->cc_op = CC_OP_EFLAGS;
4524 break;
4525 case 0x28: /* ffree sti */
4526 gen_op_ffree_STN(opreg);
4527 break;
4528 case 0x2a: /* fst sti */
4529 gen_op_fmov_STN_ST0(opreg);
4530 break;
4531 case 0x2b: /* fstp sti */
4532 case 0x0b: /* fstp1 sti, undocumented op */
4533 case 0x3a: /* fstp8 sti, undocumented op */
4534 case 0x3b: /* fstp9 sti, undocumented op */
4535 gen_op_fmov_STN_ST0(opreg);
4536 gen_op_fpop();
4537 break;
4538 case 0x2c: /* fucom st(i) */
4539 gen_op_fmov_FT0_STN(opreg);
4540 gen_op_fucom_ST0_FT0();
4541 break;
4542 case 0x2d: /* fucomp st(i) */
4543 gen_op_fmov_FT0_STN(opreg);
4544 gen_op_fucom_ST0_FT0();
4545 gen_op_fpop();
4546 break;
4547 case 0x33: /* de/3 */
4548 switch(rm) {
4549 case 1: /* fcompp */
4550 gen_op_fmov_FT0_STN(1);
4551 gen_op_fcom_ST0_FT0();
4552 gen_op_fpop();
4553 gen_op_fpop();
4554 break;
4555 default:
4556 goto illegal_op;
4558 break;
4559 case 0x38: /* ffreep sti, undocumented op */
4560 gen_op_ffree_STN(opreg);
4561 gen_op_fpop();
4562 break;
4563 case 0x3c: /* df/4 */
4564 switch(rm) {
4565 case 0:
4566 gen_op_fnstsw_EAX();
4567 break;
4568 default:
4569 goto illegal_op;
4571 break;
4572 case 0x3d: /* fucomip */
4573 if (s->cc_op != CC_OP_DYNAMIC)
4574 gen_op_set_cc_op(s->cc_op);
4575 gen_op_fmov_FT0_STN(opreg);
4576 gen_op_fucomi_ST0_FT0();
4577 gen_op_fpop();
4578 s->cc_op = CC_OP_EFLAGS;
4579 break;
4580 case 0x3e: /* fcomip */
4581 if (s->cc_op != CC_OP_DYNAMIC)
4582 gen_op_set_cc_op(s->cc_op);
4583 gen_op_fmov_FT0_STN(opreg);
4584 gen_op_fcomi_ST0_FT0();
4585 gen_op_fpop();
4586 s->cc_op = CC_OP_EFLAGS;
4587 break;
4588 case 0x10 ... 0x13: /* fcmovxx */
4589 case 0x18 ... 0x1b:
4591 int op1;
4592 const static uint8_t fcmov_cc[8] = {
4593 (JCC_B << 1),
4594 (JCC_Z << 1),
4595 (JCC_BE << 1),
4596 (JCC_P << 1),
4598 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4599 gen_setcc(s, op1);
4600 gen_op_fcmov_ST0_STN_T0(opreg);
4602 break;
4603 default:
4604 goto illegal_op;
4607 #ifdef USE_CODE_COPY
4608 s->tb->cflags |= CF_TB_FP_USED;
4609 #endif
4610 break;
4611 /************************/
4612 /* string ops */
4614 case 0xa4: /* movsS */
4615 case 0xa5:
4616 if ((b & 1) == 0)
4617 ot = OT_BYTE;
4618 else
4619 ot = dflag + OT_WORD;
4621 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4622 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4623 } else {
4624 gen_movs(s, ot);
4626 break;
4628 case 0xaa: /* stosS */
4629 case 0xab:
4630 if ((b & 1) == 0)
4631 ot = OT_BYTE;
4632 else
4633 ot = dflag + OT_WORD;
4635 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4636 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4637 } else {
4638 gen_stos(s, ot);
4640 break;
4641 case 0xac: /* lodsS */
4642 case 0xad:
4643 if ((b & 1) == 0)
4644 ot = OT_BYTE;
4645 else
4646 ot = dflag + OT_WORD;
4647 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4648 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4649 } else {
4650 gen_lods(s, ot);
4652 break;
4653 case 0xae: /* scasS */
4654 case 0xaf:
4655 if ((b & 1) == 0)
4656 ot = OT_BYTE;
4657 else
4658 ot = dflag + OT_WORD;
4659 if (prefixes & PREFIX_REPNZ) {
4660 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4661 } else if (prefixes & PREFIX_REPZ) {
4662 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4663 } else {
4664 gen_scas(s, ot);
4665 s->cc_op = CC_OP_SUBB + ot;
4667 break;
4669 case 0xa6: /* cmpsS */
4670 case 0xa7:
4671 if ((b & 1) == 0)
4672 ot = OT_BYTE;
4673 else
4674 ot = dflag + OT_WORD;
4675 if (prefixes & PREFIX_REPNZ) {
4676 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4677 } else if (prefixes & PREFIX_REPZ) {
4678 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4679 } else {
4680 gen_cmps(s, ot);
4681 s->cc_op = CC_OP_SUBB + ot;
4683 break;
4684 case 0x6c: /* insS */
4685 case 0x6d:
4686 if ((b & 1) == 0)
4687 ot = OT_BYTE;
4688 else
4689 ot = dflag ? OT_LONG : OT_WORD;
4690 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4691 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4692 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4693 } else {
4694 gen_ins(s, ot);
4696 break;
4697 case 0x6e: /* outsS */
4698 case 0x6f:
4699 if ((b & 1) == 0)
4700 ot = OT_BYTE;
4701 else
4702 ot = dflag ? OT_LONG : OT_WORD;
4703 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4704 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4705 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4706 } else {
4707 gen_outs(s, ot);
4709 break;
4711 /************************/
4712 /* port I/O */
4713 case 0xe4:
4714 case 0xe5:
4715 if ((b & 1) == 0)
4716 ot = OT_BYTE;
4717 else
4718 ot = dflag ? OT_LONG : OT_WORD;
4719 val = ldub_code(s->pc++);
4720 gen_op_movl_T0_im(val);
4721 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4722 gen_op_in[ot]();
4723 gen_op_mov_reg_T1[ot][R_EAX]();
4724 break;
4725 case 0xe6:
4726 case 0xe7:
4727 if ((b & 1) == 0)
4728 ot = OT_BYTE;
4729 else
4730 ot = dflag ? OT_LONG : OT_WORD;
4731 val = ldub_code(s->pc++);
4732 gen_op_movl_T0_im(val);
4733 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4734 gen_op_mov_TN_reg[ot][1][R_EAX]();
4735 gen_op_out[ot]();
4736 break;
4737 case 0xec:
4738 case 0xed:
4739 if ((b & 1) == 0)
4740 ot = OT_BYTE;
4741 else
4742 ot = dflag ? OT_LONG : OT_WORD;
4743 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4744 gen_op_andl_T0_ffff();
4745 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4746 gen_op_in[ot]();
4747 gen_op_mov_reg_T1[ot][R_EAX]();
4748 break;
4749 case 0xee:
4750 case 0xef:
4751 if ((b & 1) == 0)
4752 ot = OT_BYTE;
4753 else
4754 ot = dflag ? OT_LONG : OT_WORD;
4755 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4756 gen_op_andl_T0_ffff();
4757 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4758 gen_op_mov_TN_reg[ot][1][R_EAX]();
4759 gen_op_out[ot]();
4760 break;
4762 /************************/
4763 /* control */
4764 case 0xc2: /* ret im */
4765 val = ldsw_code(s->pc);
4766 s->pc += 2;
4767 gen_pop_T0(s);
4768 gen_stack_update(s, val + (2 << s->dflag));
4769 if (s->dflag == 0)
4770 gen_op_andl_T0_ffff();
4771 gen_op_jmp_T0();
4772 gen_eob(s);
4773 break;
4774 case 0xc3: /* ret */
4775 gen_pop_T0(s);
4776 gen_pop_update(s);
4777 if (s->dflag == 0)
4778 gen_op_andl_T0_ffff();
4779 gen_op_jmp_T0();
4780 gen_eob(s);
4781 break;
4782 case 0xca: /* lret im */
4783 val = ldsw_code(s->pc);
4784 s->pc += 2;
4785 do_lret:
4786 if (s->pe && !s->vm86) {
4787 if (s->cc_op != CC_OP_DYNAMIC)
4788 gen_op_set_cc_op(s->cc_op);
4789 gen_jmp_im(pc_start - s->cs_base);
4790 gen_op_lret_protected(s->dflag, val);
4791 } else {
4792 gen_stack_A0(s);
4793 /* pop offset */
4794 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4795 if (s->dflag == 0)
4796 gen_op_andl_T0_ffff();
4797 /* NOTE: keeping EIP updated is not a problem in case of
4798 exception */
4799 gen_op_jmp_T0();
4800 /* pop selector */
4801 gen_op_addl_A0_im(2 << s->dflag);
4802 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4803 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4804 /* add stack offset */
4805 gen_stack_update(s, val + (4 << s->dflag));
4807 gen_eob(s);
4808 break;
4809 case 0xcb: /* lret */
4810 val = 0;
4811 goto do_lret;
4812 case 0xcf: /* iret */
4813 if (!s->pe) {
4814 /* real mode */
4815 gen_op_iret_real(s->dflag);
4816 s->cc_op = CC_OP_EFLAGS;
4817 } else if (s->vm86) {
4818 if (s->iopl != 3) {
4819 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4820 } else {
4821 gen_op_iret_real(s->dflag);
4822 s->cc_op = CC_OP_EFLAGS;
4824 } else {
4825 if (s->cc_op != CC_OP_DYNAMIC)
4826 gen_op_set_cc_op(s->cc_op);
4827 gen_jmp_im(pc_start - s->cs_base);
4828 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4829 s->cc_op = CC_OP_EFLAGS;
4831 gen_eob(s);
4832 break;
4833 case 0xe8: /* call im */
4835 if (dflag)
4836 tval = (int32_t)insn_get(s, OT_LONG);
4837 else
4838 tval = (int16_t)insn_get(s, OT_WORD);
4839 next_eip = s->pc - s->cs_base;
4840 tval += next_eip;
4841 if (s->dflag == 0)
4842 tval &= 0xffff;
4843 gen_movtl_T0_im(next_eip);
4844 gen_push_T0(s);
4845 gen_jmp(s, tval);
4847 break;
4848 case 0x9a: /* lcall im */
4850 unsigned int selector, offset;
4852 if (CODE64(s))
4853 goto illegal_op;
4854 ot = dflag ? OT_LONG : OT_WORD;
4855 offset = insn_get(s, ot);
4856 selector = insn_get(s, OT_WORD);
4858 gen_op_movl_T0_im(selector);
4859 gen_op_movl_T1_imu(offset);
4861 goto do_lcall;
4862 case 0xe9: /* jmp */
4863 if (dflag)
4864 tval = (int32_t)insn_get(s, OT_LONG);
4865 else
4866 tval = (int16_t)insn_get(s, OT_WORD);
4867 tval += s->pc - s->cs_base;
4868 if (s->dflag == 0)
4869 tval &= 0xffff;
4870 gen_jmp(s, tval);
4871 break;
4872 case 0xea: /* ljmp im */
4874 unsigned int selector, offset;
4876 if (CODE64(s))
4877 goto illegal_op;
4878 ot = dflag ? OT_LONG : OT_WORD;
4879 offset = insn_get(s, ot);
4880 selector = insn_get(s, OT_WORD);
4882 gen_op_movl_T0_im(selector);
4883 gen_op_movl_T1_imu(offset);
4885 goto do_ljmp;
4886 case 0xeb: /* jmp Jb */
4887 tval = (int8_t)insn_get(s, OT_BYTE);
4888 tval += s->pc - s->cs_base;
4889 if (s->dflag == 0)
4890 tval &= 0xffff;
4891 gen_jmp(s, tval);
4892 break;
4893 case 0x70 ... 0x7f: /* jcc Jb */
4894 tval = (int8_t)insn_get(s, OT_BYTE);
4895 goto do_jcc;
4896 case 0x180 ... 0x18f: /* jcc Jv */
4897 if (dflag) {
4898 tval = (int32_t)insn_get(s, OT_LONG);
4899 } else {
4900 tval = (int16_t)insn_get(s, OT_WORD);
4902 do_jcc:
4903 next_eip = s->pc - s->cs_base;
4904 tval += next_eip;
4905 if (s->dflag == 0)
4906 tval &= 0xffff;
4907 gen_jcc(s, b, tval, next_eip);
4908 break;
4910 case 0x190 ... 0x19f: /* setcc Gv */
4911 modrm = ldub_code(s->pc++);
4912 gen_setcc(s, b);
4913 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4914 break;
4915 case 0x140 ... 0x14f: /* cmov Gv, Ev */
4916 ot = dflag + OT_WORD;
4917 modrm = ldub_code(s->pc++);
4918 reg = ((modrm >> 3) & 7) | rex_r;
4919 mod = (modrm >> 6) & 3;
4920 gen_setcc(s, b);
4921 if (mod != 3) {
4922 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4923 gen_op_ld_T1_A0[ot + s->mem_index]();
4924 } else {
4925 rm = (modrm & 7) | REX_B(s);
4926 gen_op_mov_TN_reg[ot][1][rm]();
4928 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4929 break;
4931 /************************/
4932 /* flags */
4933 case 0x9c: /* pushf */
4934 if (s->vm86 && s->iopl != 3) {
4935 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4936 } else {
4937 if (s->cc_op != CC_OP_DYNAMIC)
4938 gen_op_set_cc_op(s->cc_op);
4939 gen_op_movl_T0_eflags();
4940 gen_push_T0(s);
4942 break;
4943 case 0x9d: /* popf */
4944 if (s->vm86 && s->iopl != 3) {
4945 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4946 } else {
4947 gen_pop_T0(s);
4948 if (s->cpl == 0) {
4949 if (s->dflag) {
4950 gen_op_movl_eflags_T0_cpl0();
4951 } else {
4952 gen_op_movw_eflags_T0_cpl0();
4954 } else {
4955 if (s->cpl <= s->iopl) {
4956 if (s->dflag) {
4957 gen_op_movl_eflags_T0_io();
4958 } else {
4959 gen_op_movw_eflags_T0_io();
4961 } else {
4962 if (s->dflag) {
4963 gen_op_movl_eflags_T0();
4964 } else {
4965 gen_op_movw_eflags_T0();
4969 gen_pop_update(s);
4970 s->cc_op = CC_OP_EFLAGS;
4971 /* abort translation because TF flag may change */
4972 gen_jmp_im(s->pc - s->cs_base);
4973 gen_eob(s);
4975 break;
4976 case 0x9e: /* sahf */
4977 if (CODE64(s))
4978 goto illegal_op;
4979 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
4980 if (s->cc_op != CC_OP_DYNAMIC)
4981 gen_op_set_cc_op(s->cc_op);
4982 gen_op_movb_eflags_T0();
4983 s->cc_op = CC_OP_EFLAGS;
4984 break;
4985 case 0x9f: /* lahf */
4986 if (CODE64(s))
4987 goto illegal_op;
4988 if (s->cc_op != CC_OP_DYNAMIC)
4989 gen_op_set_cc_op(s->cc_op);
4990 gen_op_movl_T0_eflags();
4991 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
4992 break;
4993 case 0xf5: /* cmc */
4994 if (s->cc_op != CC_OP_DYNAMIC)
4995 gen_op_set_cc_op(s->cc_op);
4996 gen_op_cmc();
4997 s->cc_op = CC_OP_EFLAGS;
4998 break;
4999 case 0xf8: /* clc */
5000 if (s->cc_op != CC_OP_DYNAMIC)
5001 gen_op_set_cc_op(s->cc_op);
5002 gen_op_clc();
5003 s->cc_op = CC_OP_EFLAGS;
5004 break;
5005 case 0xf9: /* stc */
5006 if (s->cc_op != CC_OP_DYNAMIC)
5007 gen_op_set_cc_op(s->cc_op);
5008 gen_op_stc();
5009 s->cc_op = CC_OP_EFLAGS;
5010 break;
5011 case 0xfc: /* cld */
5012 gen_op_cld();
5013 break;
5014 case 0xfd: /* std */
5015 gen_op_std();
5016 break;
5018 /************************/
5019 /* bit operations */
5020 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5021 ot = dflag + OT_WORD;
5022 modrm = ldub_code(s->pc++);
5023 op = ((modrm >> 3) & 7) | rex_r;
5024 mod = (modrm >> 6) & 3;
5025 rm = (modrm & 7) | REX_B(s);
5026 if (mod != 3) {
5027 s->rip_offset = 1;
5028 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5029 gen_op_ld_T0_A0[ot + s->mem_index]();
5030 } else {
5031 gen_op_mov_TN_reg[ot][0][rm]();
5033 /* load shift */
5034 val = ldub_code(s->pc++);
5035 gen_op_movl_T1_im(val);
5036 if (op < 4)
5037 goto illegal_op;
5038 op -= 4;
5039 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5040 s->cc_op = CC_OP_SARB + ot;
5041 if (op != 0) {
5042 if (mod != 3)
5043 gen_op_st_T0_A0[ot + s->mem_index]();
5044 else
5045 gen_op_mov_reg_T0[ot][rm]();
5046 gen_op_update_bt_cc();
5048 break;
5049 case 0x1a3: /* bt Gv, Ev */
5050 op = 0;
5051 goto do_btx;
5052 case 0x1ab: /* bts */
5053 op = 1;
5054 goto do_btx;
5055 case 0x1b3: /* btr */
5056 op = 2;
5057 goto do_btx;
5058 case 0x1bb: /* btc */
5059 op = 3;
5060 do_btx:
5061 ot = dflag + OT_WORD;
5062 modrm = ldub_code(s->pc++);
5063 reg = ((modrm >> 3) & 7) | rex_r;
5064 mod = (modrm >> 6) & 3;
5065 rm = (modrm & 7) | REX_B(s);
5066 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5067 if (mod != 3) {
5068 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5069 /* specific case: we need to add a displacement */
5070 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5071 gen_op_ld_T0_A0[ot + s->mem_index]();
5072 } else {
5073 gen_op_mov_TN_reg[ot][0][rm]();
5075 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5076 s->cc_op = CC_OP_SARB + ot;
5077 if (op != 0) {
5078 if (mod != 3)
5079 gen_op_st_T0_A0[ot + s->mem_index]();
5080 else
5081 gen_op_mov_reg_T0[ot][rm]();
5082 gen_op_update_bt_cc();
5084 break;
5085 case 0x1bc: /* bsf */
5086 case 0x1bd: /* bsr */
5087 ot = dflag + OT_WORD;
5088 modrm = ldub_code(s->pc++);
5089 reg = ((modrm >> 3) & 7) | rex_r;
5090 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5091 /* NOTE: in order to handle the 0 case, we must load the
5092 result. It could be optimized with a generated jump */
5093 gen_op_mov_TN_reg[ot][1][reg]();
5094 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5095 gen_op_mov_reg_T1[ot][reg]();
5096 s->cc_op = CC_OP_LOGICB + ot;
5097 break;
5098 /************************/
5099 /* bcd */
5100 case 0x27: /* daa */
5101 if (CODE64(s))
5102 goto illegal_op;
5103 if (s->cc_op != CC_OP_DYNAMIC)
5104 gen_op_set_cc_op(s->cc_op);
5105 gen_op_daa();
5106 s->cc_op = CC_OP_EFLAGS;
5107 break;
5108 case 0x2f: /* das */
5109 if (CODE64(s))
5110 goto illegal_op;
5111 if (s->cc_op != CC_OP_DYNAMIC)
5112 gen_op_set_cc_op(s->cc_op);
5113 gen_op_das();
5114 s->cc_op = CC_OP_EFLAGS;
5115 break;
5116 case 0x37: /* aaa */
5117 if (CODE64(s))
5118 goto illegal_op;
5119 if (s->cc_op != CC_OP_DYNAMIC)
5120 gen_op_set_cc_op(s->cc_op);
5121 gen_op_aaa();
5122 s->cc_op = CC_OP_EFLAGS;
5123 break;
5124 case 0x3f: /* aas */
5125 if (CODE64(s))
5126 goto illegal_op;
5127 if (s->cc_op != CC_OP_DYNAMIC)
5128 gen_op_set_cc_op(s->cc_op);
5129 gen_op_aas();
5130 s->cc_op = CC_OP_EFLAGS;
5131 break;
5132 case 0xd4: /* aam */
5133 if (CODE64(s))
5134 goto illegal_op;
5135 val = ldub_code(s->pc++);
5136 gen_op_aam(val);
5137 s->cc_op = CC_OP_LOGICB;
5138 break;
5139 case 0xd5: /* aad */
5140 if (CODE64(s))
5141 goto illegal_op;
5142 val = ldub_code(s->pc++);
5143 gen_op_aad(val);
5144 s->cc_op = CC_OP_LOGICB;
5145 break;
5146 /************************/
5147 /* misc */
5148 case 0x90: /* nop */
5149 /* XXX: xchg + rex handling */
5150 /* XXX: correct lock test for all insn */
5151 if (prefixes & PREFIX_LOCK)
5152 goto illegal_op;
5153 break;
5154 case 0x9b: /* fwait */
5155 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5156 (HF_MP_MASK | HF_TS_MASK)) {
5157 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5158 } else {
5159 if (s->cc_op != CC_OP_DYNAMIC)
5160 gen_op_set_cc_op(s->cc_op);
5161 gen_jmp_im(pc_start - s->cs_base);
5162 gen_op_fwait();
5164 break;
5165 case 0xcc: /* int3 */
5166 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5167 break;
5168 case 0xcd: /* int N */
5169 val = ldub_code(s->pc++);
5170 if (s->vm86 && s->iopl != 3) {
5171 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5172 } else {
5173 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5175 break;
5176 case 0xce: /* into */
5177 if (CODE64(s))
5178 goto illegal_op;
5179 if (s->cc_op != CC_OP_DYNAMIC)
5180 gen_op_set_cc_op(s->cc_op);
5181 gen_jmp_im(pc_start - s->cs_base);
5182 gen_op_into(s->pc - pc_start);
5183 break;
5184 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5185 gen_debug(s, pc_start - s->cs_base);
5186 break;
5187 case 0xfa: /* cli */
5188 if (!s->vm86) {
5189 if (s->cpl <= s->iopl) {
5190 gen_op_cli();
5191 } else {
5192 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5194 } else {
5195 if (s->iopl == 3) {
5196 gen_op_cli();
5197 } else {
5198 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5201 break;
5202 case 0xfb: /* sti */
5203 if (!s->vm86) {
5204 if (s->cpl <= s->iopl) {
5205 gen_sti:
5206 gen_op_sti();
5207 /* interruptions are enabled only the first insn after sti */
5208 /* If several instructions disable interrupts, only the
5209 _first_ does it */
5210 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5211 gen_op_set_inhibit_irq();
5212 /* give a chance to handle pending irqs */
5213 gen_jmp_im(s->pc - s->cs_base);
5214 gen_eob(s);
5215 } else {
5216 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5218 } else {
5219 if (s->iopl == 3) {
5220 goto gen_sti;
5221 } else {
5222 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5225 break;
5226 case 0x62: /* bound */
5227 if (CODE64(s))
5228 goto illegal_op;
5229 ot = dflag ? OT_LONG : OT_WORD;
5230 modrm = ldub_code(s->pc++);
5231 reg = (modrm >> 3) & 7;
5232 mod = (modrm >> 6) & 3;
5233 if (mod == 3)
5234 goto illegal_op;
5235 gen_op_mov_TN_reg[ot][0][reg]();
5236 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5237 gen_jmp_im(pc_start - s->cs_base);
5238 if (ot == OT_WORD)
5239 gen_op_boundw();
5240 else
5241 gen_op_boundl();
5242 break;
5243 case 0x1c8 ... 0x1cf: /* bswap reg */
5244 reg = (b & 7) | REX_B(s);
5245 #ifdef TARGET_X86_64
5246 if (dflag == 2) {
5247 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5248 gen_op_bswapq_T0();
5249 gen_op_mov_reg_T0[OT_QUAD][reg]();
5250 } else
5251 #endif
5253 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5254 gen_op_bswapl_T0();
5255 gen_op_mov_reg_T0[OT_LONG][reg]();
5257 break;
5258 case 0xd6: /* salc */
5259 if (CODE64(s))
5260 goto illegal_op;
5261 if (s->cc_op != CC_OP_DYNAMIC)
5262 gen_op_set_cc_op(s->cc_op);
5263 gen_op_salc();
5264 break;
5265 case 0xe0: /* loopnz */
5266 case 0xe1: /* loopz */
5267 if (s->cc_op != CC_OP_DYNAMIC)
5268 gen_op_set_cc_op(s->cc_op);
5269 /* FALL THRU */
5270 case 0xe2: /* loop */
5271 case 0xe3: /* jecxz */
5273 int l1, l2;
5275 tval = (int8_t)insn_get(s, OT_BYTE);
5276 next_eip = s->pc - s->cs_base;
5277 tval += next_eip;
5278 if (s->dflag == 0)
5279 tval &= 0xffff;
5281 l1 = gen_new_label();
5282 l2 = gen_new_label();
5283 b &= 3;
5284 if (b == 3) {
5285 gen_op_jz_ecx[s->aflag](l1);
5286 } else {
5287 gen_op_dec_ECX[s->aflag]();
5288 if (b <= 1)
5289 gen_op_mov_T0_cc();
5290 gen_op_loop[s->aflag][b](l1);
5293 gen_jmp_im(next_eip);
5294 gen_op_jmp_label(l2);
5295 gen_set_label(l1);
5296 gen_jmp_im(tval);
5297 gen_set_label(l2);
5298 gen_eob(s);
5300 break;
5301 case 0x130: /* wrmsr */
5302 case 0x132: /* rdmsr */
5303 if (s->cpl != 0) {
5304 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5305 } else {
5306 if (b & 2)
5307 gen_op_rdmsr();
5308 else
5309 gen_op_wrmsr();
5311 break;
5312 case 0x131: /* rdtsc */
5313 gen_op_rdtsc();
5314 break;
5315 case 0x134: /* sysenter */
5316 if (CODE64(s))
5317 goto illegal_op;
5318 if (!s->pe) {
5319 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5320 } else {
5321 if (s->cc_op != CC_OP_DYNAMIC) {
5322 gen_op_set_cc_op(s->cc_op);
5323 s->cc_op = CC_OP_DYNAMIC;
5325 gen_jmp_im(pc_start - s->cs_base);
5326 gen_op_sysenter();
5327 gen_eob(s);
5329 break;
5330 case 0x135: /* sysexit */
5331 if (CODE64(s))
5332 goto illegal_op;
5333 if (!s->pe) {
5334 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5335 } else {
5336 if (s->cc_op != CC_OP_DYNAMIC) {
5337 gen_op_set_cc_op(s->cc_op);
5338 s->cc_op = CC_OP_DYNAMIC;
5340 gen_jmp_im(pc_start - s->cs_base);
5341 gen_op_sysexit();
5342 gen_eob(s);
5344 break;
5345 #ifdef TARGET_X86_64
5346 case 0x105: /* syscall */
5347 /* XXX: is it usable in real mode ? */
5348 if (s->cc_op != CC_OP_DYNAMIC) {
5349 gen_op_set_cc_op(s->cc_op);
5350 s->cc_op = CC_OP_DYNAMIC;
5352 gen_jmp_im(pc_start - s->cs_base);
5353 gen_op_syscall(s->pc - pc_start);
5354 gen_eob(s);
5355 break;
5356 case 0x107: /* sysret */
5357 if (!s->pe) {
5358 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5359 } else {
5360 if (s->cc_op != CC_OP_DYNAMIC) {
5361 gen_op_set_cc_op(s->cc_op);
5362 s->cc_op = CC_OP_DYNAMIC;
5364 gen_jmp_im(pc_start - s->cs_base);
5365 gen_op_sysret(s->dflag);
5366 gen_eob(s);
5368 break;
5369 #endif
5370 case 0x1a2: /* cpuid */
5371 gen_op_cpuid();
5372 break;
5373 case 0xf4: /* hlt */
5374 if (s->cpl != 0) {
5375 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5376 } else {
5377 if (s->cc_op != CC_OP_DYNAMIC)
5378 gen_op_set_cc_op(s->cc_op);
5379 gen_jmp_im(s->pc - s->cs_base);
5380 gen_op_hlt();
5381 s->is_jmp = 3;
5383 break;
5384 case 0x100:
5385 modrm = ldub_code(s->pc++);
5386 mod = (modrm >> 6) & 3;
5387 op = (modrm >> 3) & 7;
5388 switch(op) {
5389 case 0: /* sldt */
5390 if (!s->pe || s->vm86)
5391 goto illegal_op;
5392 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5393 ot = OT_WORD;
5394 if (mod == 3)
5395 ot += s->dflag;
5396 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5397 break;
5398 case 2: /* lldt */
5399 if (!s->pe || s->vm86)
5400 goto illegal_op;
5401 if (s->cpl != 0) {
5402 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5403 } else {
5404 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5405 gen_jmp_im(pc_start - s->cs_base);
5406 gen_op_lldt_T0();
5408 break;
5409 case 1: /* str */
5410 if (!s->pe || s->vm86)
5411 goto illegal_op;
5412 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5413 ot = OT_WORD;
5414 if (mod == 3)
5415 ot += s->dflag;
5416 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5417 break;
5418 case 3: /* ltr */
5419 if (!s->pe || s->vm86)
5420 goto illegal_op;
5421 if (s->cpl != 0) {
5422 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5423 } else {
5424 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5425 gen_jmp_im(pc_start - s->cs_base);
5426 gen_op_ltr_T0();
5428 break;
5429 case 4: /* verr */
5430 case 5: /* verw */
5431 if (!s->pe || s->vm86)
5432 goto illegal_op;
5433 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5434 if (s->cc_op != CC_OP_DYNAMIC)
5435 gen_op_set_cc_op(s->cc_op);
5436 if (op == 4)
5437 gen_op_verr();
5438 else
5439 gen_op_verw();
5440 s->cc_op = CC_OP_EFLAGS;
5441 break;
5442 default:
5443 goto illegal_op;
5445 break;
5446 case 0x101:
5447 modrm = ldub_code(s->pc++);
5448 mod = (modrm >> 6) & 3;
5449 op = (modrm >> 3) & 7;
5450 switch(op) {
5451 case 0: /* sgdt */
5452 case 1: /* sidt */
5453 if (mod == 3)
5454 goto illegal_op;
5455 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5456 if (op == 0)
5457 gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
5458 else
5459 gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
5460 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5461 #ifdef TARGET_X86_64
5462 if (CODE64(s))
5463 gen_op_addq_A0_im(2);
5464 else
5465 #endif
5466 gen_op_addl_A0_im(2);
5467 if (op == 0)
5468 gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
5469 else
5470 gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
5471 if (!s->dflag)
5472 gen_op_andl_T0_im(0xffffff);
5473 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5474 break;
5475 case 2: /* lgdt */
5476 case 3: /* lidt */
5477 if (mod == 3)
5478 goto illegal_op;
5479 if (s->cpl != 0) {
5480 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5481 } else {
5482 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5483 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5484 #ifdef TARGET_X86_64
5485 if (CODE64(s))
5486 gen_op_addq_A0_im(2);
5487 else
5488 #endif
5489 gen_op_addl_A0_im(2);
5490 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5491 if (!s->dflag)
5492 gen_op_andl_T0_im(0xffffff);
5493 if (op == 2) {
5494 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5495 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5496 } else {
5497 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5498 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5501 break;
5502 case 4: /* smsw */
5503 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5504 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5505 break;
5506 case 6: /* lmsw */
5507 if (s->cpl != 0) {
5508 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5509 } else {
5510 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5511 gen_op_lmsw_T0();
5512 gen_jmp_im(s->pc - s->cs_base);
5513 gen_eob(s);
5515 break;
5516 case 7: /* invlpg */
5517 if (s->cpl != 0) {
5518 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5519 } else {
5520 if (mod == 3) {
5521 #ifdef TARGET_X86_64
5522 if (CODE64(s) && (modrm & 7) == 0) {
5523 /* swapgs */
5524 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5525 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5526 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5527 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5528 } else
5529 #endif
5531 goto illegal_op;
5533 } else {
5534 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5535 gen_op_invlpg_A0();
5536 gen_jmp_im(s->pc - s->cs_base);
5537 gen_eob(s);
5540 break;
5541 default:
5542 goto illegal_op;
5544 break;
5545 case 0x108: /* invd */
5546 case 0x109: /* wbinvd */
5547 if (s->cpl != 0) {
5548 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5549 } else {
5550 /* nothing to do */
5552 break;
5553 case 0x63: /* arpl or movslS (x86_64) */
5554 #ifdef TARGET_X86_64
5555 if (CODE64(s)) {
5556 int d_ot;
5557 /* d_ot is the size of destination */
5558 d_ot = dflag + OT_WORD;
5560 modrm = ldub_code(s->pc++);
5561 reg = ((modrm >> 3) & 7) | rex_r;
5562 mod = (modrm >> 6) & 3;
5563 rm = (modrm & 7) | REX_B(s);
5565 if (mod == 3) {
5566 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5567 /* sign extend */
5568 if (d_ot == OT_QUAD)
5569 gen_op_movslq_T0_T0();
5570 gen_op_mov_reg_T0[d_ot][reg]();
5571 } else {
5572 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5573 if (d_ot == OT_QUAD) {
5574 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5575 } else {
5576 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5578 gen_op_mov_reg_T0[d_ot][reg]();
5580 } else
5581 #endif
5583 if (!s->pe || s->vm86)
5584 goto illegal_op;
5585 ot = dflag ? OT_LONG : OT_WORD;
5586 modrm = ldub_code(s->pc++);
5587 reg = (modrm >> 3) & 7;
5588 mod = (modrm >> 6) & 3;
5589 rm = modrm & 7;
5590 if (mod != 3) {
5591 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5592 gen_op_ld_T0_A0[ot + s->mem_index]();
5593 } else {
5594 gen_op_mov_TN_reg[ot][0][rm]();
5596 if (s->cc_op != CC_OP_DYNAMIC)
5597 gen_op_set_cc_op(s->cc_op);
5598 gen_op_arpl();
5599 s->cc_op = CC_OP_EFLAGS;
5600 if (mod != 3) {
5601 gen_op_st_T0_A0[ot + s->mem_index]();
5602 } else {
5603 gen_op_mov_reg_T0[ot][rm]();
5605 gen_op_arpl_update();
5607 break;
5608 case 0x102: /* lar */
5609 case 0x103: /* lsl */
5610 if (!s->pe || s->vm86)
5611 goto illegal_op;
5612 ot = dflag ? OT_LONG : OT_WORD;
5613 modrm = ldub_code(s->pc++);
5614 reg = ((modrm >> 3) & 7) | rex_r;
5615 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5616 gen_op_mov_TN_reg[ot][1][reg]();
5617 if (s->cc_op != CC_OP_DYNAMIC)
5618 gen_op_set_cc_op(s->cc_op);
5619 if (b == 0x102)
5620 gen_op_lar();
5621 else
5622 gen_op_lsl();
5623 s->cc_op = CC_OP_EFLAGS;
5624 gen_op_mov_reg_T1[ot][reg]();
5625 break;
5626 case 0x118:
5627 modrm = ldub_code(s->pc++);
5628 mod = (modrm >> 6) & 3;
5629 op = (modrm >> 3) & 7;
5630 switch(op) {
5631 case 0: /* prefetchnta */
5632 case 1: /* prefetchnt0 */
5633 case 2: /* prefetchnt0 */
5634 case 3: /* prefetchnt0 */
5635 if (mod == 3)
5636 goto illegal_op;
5637 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5638 /* nothing more to do */
5639 break;
5640 default:
5641 goto illegal_op;
5643 break;
5644 case 0x120: /* mov reg, crN */
5645 case 0x122: /* mov crN, reg */
5646 if (s->cpl != 0) {
5647 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5648 } else {
5649 modrm = ldub_code(s->pc++);
5650 if ((modrm & 0xc0) != 0xc0)
5651 goto illegal_op;
5652 rm = (modrm & 7) | REX_B(s);
5653 reg = ((modrm >> 3) & 7) | rex_r;
5654 if (CODE64(s))
5655 ot = OT_QUAD;
5656 else
5657 ot = OT_LONG;
5658 switch(reg) {
5659 case 0:
5660 case 2:
5661 case 3:
5662 case 4:
5663 case 8:
5664 if (b & 2) {
5665 gen_op_mov_TN_reg[ot][0][rm]();
5666 gen_op_movl_crN_T0(reg);
5667 gen_jmp_im(s->pc - s->cs_base);
5668 gen_eob(s);
5669 } else {
5670 #if !defined(CONFIG_USER_ONLY)
5671 if (reg == 8)
5672 gen_op_movtl_T0_cr8();
5673 else
5674 #endif
5675 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5676 gen_op_mov_reg_T0[ot][rm]();
5678 break;
5679 default:
5680 goto illegal_op;
5683 break;
5684 case 0x121: /* mov reg, drN */
5685 case 0x123: /* mov drN, reg */
5686 if (s->cpl != 0) {
5687 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5688 } else {
5689 modrm = ldub_code(s->pc++);
5690 if ((modrm & 0xc0) != 0xc0)
5691 goto illegal_op;
5692 rm = (modrm & 7) | REX_B(s);
5693 reg = ((modrm >> 3) & 7) | rex_r;
5694 if (CODE64(s))
5695 ot = OT_QUAD;
5696 else
5697 ot = OT_LONG;
5698 /* XXX: do it dynamically with CR4.DE bit */
5699 if (reg == 4 || reg == 5 || reg >= 8)
5700 goto illegal_op;
5701 if (b & 2) {
5702 gen_op_mov_TN_reg[ot][0][rm]();
5703 gen_op_movl_drN_T0(reg);
5704 gen_jmp_im(s->pc - s->cs_base);
5705 gen_eob(s);
5706 } else {
5707 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5708 gen_op_mov_reg_T0[ot][rm]();
5711 break;
5712 case 0x106: /* clts */
5713 if (s->cpl != 0) {
5714 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5715 } else {
5716 gen_op_clts();
5717 /* abort block because static cpu state changed */
5718 gen_jmp_im(s->pc - s->cs_base);
5719 gen_eob(s);
5721 break;
5722 /* MMX/SSE/SSE2/PNI support */
5723 case 0x1c3: /* MOVNTI reg, mem */
5724 if (!(s->cpuid_features & CPUID_SSE2))
5725 goto illegal_op;
5726 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5727 modrm = ldub_code(s->pc++);
5728 mod = (modrm >> 6) & 3;
5729 if (mod == 3)
5730 goto illegal_op;
5731 reg = ((modrm >> 3) & 7) | rex_r;
5732 /* generate a generic store */
5733 gen_ldst_modrm(s, modrm, ot, reg, 1);
5734 break;
5735 case 0x1ae:
5736 modrm = ldub_code(s->pc++);
5737 mod = (modrm >> 6) & 3;
5738 op = (modrm >> 3) & 7;
5739 switch(op) {
5740 case 0: /* fxsave */
5741 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5742 goto illegal_op;
5743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5744 gen_op_fxsave_A0((s->dflag == 2));
5745 break;
5746 case 1: /* fxrstor */
5747 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5748 goto illegal_op;
5749 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5750 gen_op_fxrstor_A0((s->dflag == 2));
5751 break;
5752 case 2: /* ldmxcsr */
5753 case 3: /* stmxcsr */
5754 if (s->flags & HF_TS_MASK) {
5755 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5756 break;
5758 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
5759 mod == 3)
5760 goto illegal_op;
5761 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5762 if (op == 2) {
5763 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5764 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
5765 } else {
5766 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
5767 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
5769 break;
5770 case 5: /* lfence */
5771 case 6: /* mfence */
5772 case 7: /* sfence */
5773 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
5774 goto illegal_op;
5775 break;
5776 default:
5777 goto illegal_op;
5779 break;
5780 case 0x110 ... 0x117:
5781 case 0x128 ... 0x12f:
5782 case 0x150 ... 0x177:
5783 case 0x17c ... 0x17f:
5784 case 0x1c2:
5785 case 0x1c4 ... 0x1c6:
5786 case 0x1d0 ... 0x1fe:
5787 gen_sse(s, b, pc_start, rex_r);
5788 break;
5789 default:
5790 goto illegal_op;
5792 /* lock generation */
5793 if (s->prefix & PREFIX_LOCK)
5794 gen_op_unlock();
5795 return s->pc;
5796 illegal_op:
5797 if (s->prefix & PREFIX_LOCK)
5798 gen_op_unlock();
5799 /* XXX: ensure that no lock was generated */
5800 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5801 return s->pc;
5804 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5805 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5807 /* flags read by an operation */
5808 static uint16_t opc_read_flags[NB_OPS] = {
5809 [INDEX_op_aas] = CC_A,
5810 [INDEX_op_aaa] = CC_A,
5811 [INDEX_op_das] = CC_A | CC_C,
5812 [INDEX_op_daa] = CC_A | CC_C,
5814 /* subtle: due to the incl/decl implementation, C is used */
5815 [INDEX_op_update_inc_cc] = CC_C,
5817 [INDEX_op_into] = CC_O,
5819 [INDEX_op_jb_subb] = CC_C,
5820 [INDEX_op_jb_subw] = CC_C,
5821 [INDEX_op_jb_subl] = CC_C,
5823 [INDEX_op_jz_subb] = CC_Z,
5824 [INDEX_op_jz_subw] = CC_Z,
5825 [INDEX_op_jz_subl] = CC_Z,
5827 [INDEX_op_jbe_subb] = CC_Z | CC_C,
5828 [INDEX_op_jbe_subw] = CC_Z | CC_C,
5829 [INDEX_op_jbe_subl] = CC_Z | CC_C,
5831 [INDEX_op_js_subb] = CC_S,
5832 [INDEX_op_js_subw] = CC_S,
5833 [INDEX_op_js_subl] = CC_S,
5835 [INDEX_op_jl_subb] = CC_O | CC_S,
5836 [INDEX_op_jl_subw] = CC_O | CC_S,
5837 [INDEX_op_jl_subl] = CC_O | CC_S,
5839 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5840 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5841 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5843 [INDEX_op_loopnzw] = CC_Z,
5844 [INDEX_op_loopnzl] = CC_Z,
5845 [INDEX_op_loopzw] = CC_Z,
5846 [INDEX_op_loopzl] = CC_Z,
5848 [INDEX_op_seto_T0_cc] = CC_O,
5849 [INDEX_op_setb_T0_cc] = CC_C,
5850 [INDEX_op_setz_T0_cc] = CC_Z,
5851 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5852 [INDEX_op_sets_T0_cc] = CC_S,
5853 [INDEX_op_setp_T0_cc] = CC_P,
5854 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5855 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5857 [INDEX_op_setb_T0_subb] = CC_C,
5858 [INDEX_op_setb_T0_subw] = CC_C,
5859 [INDEX_op_setb_T0_subl] = CC_C,
5861 [INDEX_op_setz_T0_subb] = CC_Z,
5862 [INDEX_op_setz_T0_subw] = CC_Z,
5863 [INDEX_op_setz_T0_subl] = CC_Z,
5865 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5866 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5867 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5869 [INDEX_op_sets_T0_subb] = CC_S,
5870 [INDEX_op_sets_T0_subw] = CC_S,
5871 [INDEX_op_sets_T0_subl] = CC_S,
5873 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5874 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5875 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5877 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5878 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5879 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5881 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5882 [INDEX_op_cmc] = CC_C,
5883 [INDEX_op_salc] = CC_C,
5885 /* needed for correct flag optimisation before string ops */
5886 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5887 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5888 [INDEX_op_jz_ecxw] = CC_OSZAPC,
5889 [INDEX_op_jz_ecxl] = CC_OSZAPC,
5891 #ifdef TARGET_X86_64
5892 [INDEX_op_jb_subq] = CC_C,
5893 [INDEX_op_jz_subq] = CC_Z,
5894 [INDEX_op_jbe_subq] = CC_Z | CC_C,
5895 [INDEX_op_js_subq] = CC_S,
5896 [INDEX_op_jl_subq] = CC_O | CC_S,
5897 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5899 [INDEX_op_loopnzq] = CC_Z,
5900 [INDEX_op_loopzq] = CC_Z,
5902 [INDEX_op_setb_T0_subq] = CC_C,
5903 [INDEX_op_setz_T0_subq] = CC_Z,
5904 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5905 [INDEX_op_sets_T0_subq] = CC_S,
5906 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5907 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5909 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5910 [INDEX_op_jz_ecxq] = CC_OSZAPC,
5911 #endif
5913 #define DEF_READF(SUFFIX)\
5914 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5915 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5916 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5917 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5918 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5919 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5920 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5921 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5923 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5924 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5925 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5926 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5927 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5928 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5929 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5930 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5932 DEF_READF( )
5933 DEF_READF(_raw)
5934 #ifndef CONFIG_USER_ONLY
5935 DEF_READF(_kernel)
5936 DEF_READF(_user)
5937 #endif
5940 /* flags written by an operation */
5941 static uint16_t opc_write_flags[NB_OPS] = {
5942 [INDEX_op_update2_cc] = CC_OSZAPC,
5943 [INDEX_op_update1_cc] = CC_OSZAPC,
5944 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
5945 [INDEX_op_update_neg_cc] = CC_OSZAPC,
5946 /* subtle: due to the incl/decl implementation, C is used */
5947 [INDEX_op_update_inc_cc] = CC_OSZAPC,
5948 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
5950 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
5951 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
5952 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
5953 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
5954 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
5955 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
5956 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
5957 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
5958 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
5959 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
5960 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
5962 /* sse */
5963 [INDEX_op_ucomiss] = CC_OSZAPC,
5964 [INDEX_op_ucomisd] = CC_OSZAPC,
5965 [INDEX_op_comiss] = CC_OSZAPC,
5966 [INDEX_op_comisd] = CC_OSZAPC,
5968 /* bcd */
5969 [INDEX_op_aam] = CC_OSZAPC,
5970 [INDEX_op_aad] = CC_OSZAPC,
5971 [INDEX_op_aas] = CC_OSZAPC,
5972 [INDEX_op_aaa] = CC_OSZAPC,
5973 [INDEX_op_das] = CC_OSZAPC,
5974 [INDEX_op_daa] = CC_OSZAPC,
5976 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
5977 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
5978 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
5979 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
5980 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
5981 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
5982 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
5983 [INDEX_op_clc] = CC_C,
5984 [INDEX_op_stc] = CC_C,
5985 [INDEX_op_cmc] = CC_C,
5987 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
5988 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
5989 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
5990 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
5991 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
5992 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
5993 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
5994 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
5995 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
5996 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
5997 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
5998 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6000 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6001 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6002 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6003 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6004 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6005 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6007 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6008 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6009 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6010 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6012 [INDEX_op_cmpxchg8b] = CC_Z,
6013 [INDEX_op_lar] = CC_Z,
6014 [INDEX_op_lsl] = CC_Z,
6015 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6016 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6018 #define DEF_WRITEF(SUFFIX)\
6019 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6020 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6021 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6022 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6023 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6024 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6025 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6026 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6028 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6029 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6030 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6031 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6032 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6033 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6034 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6035 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6037 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6038 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6039 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6040 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6041 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6042 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6043 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6044 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6046 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6047 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6048 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6049 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6051 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6052 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6053 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6054 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6056 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6057 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6058 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6059 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6061 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6062 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6063 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6064 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6065 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6066 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6068 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6069 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6070 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6071 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6072 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6073 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6075 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6076 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6077 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6078 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6081 DEF_WRITEF( )
6082 DEF_WRITEF(_raw)
6083 #ifndef CONFIG_USER_ONLY
6084 DEF_WRITEF(_kernel)
6085 DEF_WRITEF(_user)
6086 #endif
6089 /* simpler form of an operation if no flags need to be generated */
6090 static uint16_t opc_simpler[NB_OPS] = {
6091 [INDEX_op_update2_cc] = INDEX_op_nop,
6092 [INDEX_op_update1_cc] = INDEX_op_nop,
6093 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6094 #if 0
6095 /* broken: CC_OP logic must be rewritten */
6096 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6097 #endif
6099 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6100 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6101 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6102 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6104 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6105 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6106 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6107 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6109 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6110 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6111 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6112 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6114 #define DEF_SIMPLER(SUFFIX)\
6115 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6116 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6117 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6118 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6120 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6121 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6122 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6123 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6125 DEF_SIMPLER( )
6126 DEF_SIMPLER(_raw)
6127 #ifndef CONFIG_USER_ONLY
6128 DEF_SIMPLER(_kernel)
6129 DEF_SIMPLER(_user)
6130 #endif
6133 void optimize_flags_init(void)
6135 int i;
6136 /* put default values in arrays */
6137 for(i = 0; i < NB_OPS; i++) {
6138 if (opc_simpler[i] == 0)
6139 opc_simpler[i] = i;
6143 /* CPU flags computation optimization: we move backward thru the
6144 generated code to see which flags are needed. The operation is
6145 modified if suitable */
6146 static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6148 uint16_t *opc_ptr;
6149 int live_flags, write_flags, op;
6151 opc_ptr = opc_buf + opc_buf_len;
6152 /* live_flags contains the flags needed by the next instructions
6153 in the code. At the end of the bloc, we consider that all the
6154 flags are live. */
6155 live_flags = CC_OSZAPC;
6156 while (opc_ptr > opc_buf) {
6157 op = *--opc_ptr;
6158 /* if none of the flags written by the instruction is used,
6159 then we can try to find a simpler instruction */
6160 write_flags = opc_write_flags[op];
6161 if ((live_flags & write_flags) == 0) {
6162 *opc_ptr = opc_simpler[op];
6164 /* compute the live flags before the instruction */
6165 live_flags &= ~write_flags;
6166 live_flags |= opc_read_flags[op];
6170 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6171 basic block 'tb'. If search_pc is TRUE, also generate PC
6172 information for each intermediate instruction. */
6173 static inline int gen_intermediate_code_internal(CPUState *env,
6174 TranslationBlock *tb,
6175 int search_pc)
6177 DisasContext dc1, *dc = &dc1;
6178 target_ulong pc_ptr;
6179 uint16_t *gen_opc_end;
6180 int flags, j, lj, cflags;
6181 target_ulong pc_start;
6182 target_ulong cs_base;
6184 /* generate intermediate code */
6185 pc_start = tb->pc;
6186 cs_base = tb->cs_base;
6187 flags = tb->flags;
6188 cflags = tb->cflags;
6190 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6191 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6192 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6193 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6194 dc->f_st = 0;
6195 dc->vm86 = (flags >> VM_SHIFT) & 1;
6196 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6197 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6198 dc->tf = (flags >> TF_SHIFT) & 1;
6199 dc->singlestep_enabled = env->singlestep_enabled;
6200 dc->cc_op = CC_OP_DYNAMIC;
6201 dc->cs_base = cs_base;
6202 dc->tb = tb;
6203 dc->popl_esp_hack = 0;
6204 /* select memory access functions */
6205 dc->mem_index = 0;
6206 if (flags & HF_SOFTMMU_MASK) {
6207 if (dc->cpl == 3)
6208 dc->mem_index = 2 * 4;
6209 else
6210 dc->mem_index = 1 * 4;
6212 dc->cpuid_features = env->cpuid_features;
6213 #ifdef TARGET_X86_64
6214 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6215 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6216 #endif
6217 dc->flags = flags;
6218 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6219 (flags & HF_INHIBIT_IRQ_MASK)
6220 #ifndef CONFIG_SOFTMMU
6221 || (flags & HF_SOFTMMU_MASK)
6222 #endif
6224 #if 0
6225 /* check addseg logic */
6226 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6227 printf("ERROR addseg\n");
6228 #endif
6230 gen_opc_ptr = gen_opc_buf;
6231 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6232 gen_opparam_ptr = gen_opparam_buf;
6233 nb_gen_labels = 0;
6235 dc->is_jmp = DISAS_NEXT;
6236 pc_ptr = pc_start;
6237 lj = -1;
6239 for(;;) {
6240 if (env->nb_breakpoints > 0) {
6241 for(j = 0; j < env->nb_breakpoints; j++) {
6242 if (env->breakpoints[j] == pc_ptr) {
6243 gen_debug(dc, pc_ptr - dc->cs_base);
6244 break;
6248 if (search_pc) {
6249 j = gen_opc_ptr - gen_opc_buf;
6250 if (lj < j) {
6251 lj++;
6252 while (lj < j)
6253 gen_opc_instr_start[lj++] = 0;
6255 gen_opc_pc[lj] = pc_ptr;
6256 gen_opc_cc_op[lj] = dc->cc_op;
6257 gen_opc_instr_start[lj] = 1;
6259 pc_ptr = disas_insn(dc, pc_ptr);
6260 /* stop translation if indicated */
6261 if (dc->is_jmp)
6262 break;
6263 /* if single step mode, we generate only one instruction and
6264 generate an exception */
6265 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6266 the flag and abort the translation to give the irqs a
6267 change to be happen */
6268 if (dc->tf || dc->singlestep_enabled ||
6269 (flags & HF_INHIBIT_IRQ_MASK) ||
6270 (cflags & CF_SINGLE_INSN)) {
6271 gen_jmp_im(pc_ptr - dc->cs_base);
6272 gen_eob(dc);
6273 break;
6275 /* if too long translation, stop generation too */
6276 if (gen_opc_ptr >= gen_opc_end ||
6277 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6278 gen_jmp_im(pc_ptr - dc->cs_base);
6279 gen_eob(dc);
6280 break;
6283 *gen_opc_ptr = INDEX_op_end;
6284 /* we don't forget to fill the last values */
6285 if (search_pc) {
6286 j = gen_opc_ptr - gen_opc_buf;
6287 lj++;
6288 while (lj <= j)
6289 gen_opc_instr_start[lj++] = 0;
6292 #ifdef DEBUG_DISAS
6293 if (loglevel & CPU_LOG_TB_CPU) {
6294 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6296 if (loglevel & CPU_LOG_TB_IN_ASM) {
6297 int disas_flags;
6298 fprintf(logfile, "----------------\n");
6299 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6300 #ifdef TARGET_X86_64
6301 if (dc->code64)
6302 disas_flags = 2;
6303 else
6304 #endif
6305 disas_flags = !dc->code32;
6306 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6307 fprintf(logfile, "\n");
6308 if (loglevel & CPU_LOG_TB_OP) {
6309 fprintf(logfile, "OP:\n");
6310 dump_ops(gen_opc_buf, gen_opparam_buf);
6311 fprintf(logfile, "\n");
6314 #endif
6316 /* optimize flag computations */
6317 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6319 #ifdef DEBUG_DISAS
6320 if (loglevel & CPU_LOG_TB_OP_OPT) {
6321 fprintf(logfile, "AFTER FLAGS OPT:\n");
6322 dump_ops(gen_opc_buf, gen_opparam_buf);
6323 fprintf(logfile, "\n");
6325 #endif
6326 if (!search_pc)
6327 tb->size = pc_ptr - pc_start;
6328 return 0;
6331 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6333 return gen_intermediate_code_internal(env, tb, 0);
6336 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6338 return gen_intermediate_code_internal(env, tb, 1);