4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr
;
34 static uint32_t *gen_opparam_ptr
;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs
;
64 #ifdef USE_DIRECT_JUMP
67 #define TBPARAM(x) (long)(x)
70 typedef struct DisasContext
{
71 /* current insn context */
72 int override
; /* -1 if no override */
75 target_ulong pc
; /* pc = eip + cs_base */
76 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base
; /* base of CS segment */
80 int pe
; /* protected mode */
81 int code32
; /* 32 bit code segment */
83 int lma
; /* long mode active */
84 int code64
; /* 64 bit code segment */
87 int ss32
; /* 32 bit stack segment */
88 int cc_op
; /* current CC operation */
89 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st
; /* currently unused */
91 int vm86
; /* vm86 mode */
94 int tf
; /* TF cpu flag */
95 int singlestep_enabled
; /* "hardware" single step enabled */
96 int jmp_opt
; /* use direct block chaining for direct jumps */
97 int mem_index
; /* select memory access functions */
98 int flags
; /* all execution flags */
99 struct TranslationBlock
*tb
;
100 int popl_esp_hack
; /* for correct popl with esp base handling */
101 int rip_offset
; /* only used in x86_64, but left for simplicity */
103 int cpuid_ext_features
;
106 static void gen_eob(DisasContext
*s
);
107 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
108 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
110 /* i386 arith/logic operations */
130 OP_SHL1
, /* undocumented */
135 #define DEF(s, n, copy_size) INDEX_op_ ## s,
152 /* I386 int registers */
153 OR_EAX
, /* MUST be even numbered */
162 OR_TMP0
= 16, /* temporary operand register */
164 OR_A0
, /* temporary register used when doing address evaluation */
169 #define NB_OP_SIZES 4
171 #define DEF_REGS(prefix, suffix) \
172 prefix ## EAX ## suffix,\
173 prefix ## ECX ## suffix,\
174 prefix ## EDX ## suffix,\
175 prefix ## EBX ## suffix,\
176 prefix ## ESP ## suffix,\
177 prefix ## EBP ## suffix,\
178 prefix ## ESI ## suffix,\
179 prefix ## EDI ## suffix,\
180 prefix ## R8 ## suffix,\
181 prefix ## R9 ## suffix,\
182 prefix ## R10 ## suffix,\
183 prefix ## R11 ## suffix,\
184 prefix ## R12 ## suffix,\
185 prefix ## R13 ## suffix,\
186 prefix ## R14 ## suffix,\
187 prefix ## R15 ## suffix,
189 #define DEF_BREGS(prefixb, prefixh, suffix) \
191 static void prefixb ## ESP ## suffix ## _wrapper(void) \
194 prefixb ## ESP ## suffix (); \
196 prefixh ## EAX ## suffix (); \
199 static void prefixb ## EBP ## suffix ## _wrapper(void) \
202 prefixb ## EBP ## suffix (); \
204 prefixh ## ECX ## suffix (); \
207 static void prefixb ## ESI ## suffix ## _wrapper(void) \
210 prefixb ## ESI ## suffix (); \
212 prefixh ## EDX ## suffix (); \
215 static void prefixb ## EDI ## suffix ## _wrapper(void) \
218 prefixb ## EDI ## suffix (); \
220 prefixh ## EBX ## suffix (); \
223 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T0
)
224 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T1
)
225 DEF_BREGS(gen_op_movl_T0_
, gen_op_movh_T0_
, )
226 DEF_BREGS(gen_op_movl_T1_
, gen_op_movh_T1_
, )
228 #else /* !TARGET_X86_64 */
230 #define NB_OP_SIZES 3
232 #define DEF_REGS(prefix, suffix) \
233 prefix ## EAX ## suffix,\
234 prefix ## ECX ## suffix,\
235 prefix ## EDX ## suffix,\
236 prefix ## EBX ## suffix,\
237 prefix ## ESP ## suffix,\
238 prefix ## EBP ## suffix,\
239 prefix ## ESI ## suffix,\
240 prefix ## EDI ## suffix,
242 #endif /* !TARGET_X86_64 */
244 static GenOpFunc
*gen_op_mov_reg_T0
[NB_OP_SIZES
][CPU_NB_REGS
] = {
251 gen_op_movb_ESP_T0_wrapper
,
252 gen_op_movb_EBP_T0_wrapper
,
253 gen_op_movb_ESI_T0_wrapper
,
254 gen_op_movb_EDI_T0_wrapper
,
271 DEF_REGS(gen_op_movw_
, _T0
)
274 DEF_REGS(gen_op_movl_
, _T0
)
278 DEF_REGS(gen_op_movq_
, _T0
)
283 static GenOpFunc
*gen_op_mov_reg_T1
[NB_OP_SIZES
][CPU_NB_REGS
] = {
290 gen_op_movb_ESP_T1_wrapper
,
291 gen_op_movb_EBP_T1_wrapper
,
292 gen_op_movb_ESI_T1_wrapper
,
293 gen_op_movb_EDI_T1_wrapper
,
310 DEF_REGS(gen_op_movw_
, _T1
)
313 DEF_REGS(gen_op_movl_
, _T1
)
317 DEF_REGS(gen_op_movq_
, _T1
)
322 static GenOpFunc
*gen_op_mov_reg_A0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
324 DEF_REGS(gen_op_movw_
, _A0
)
327 DEF_REGS(gen_op_movl_
, _A0
)
331 DEF_REGS(gen_op_movq_
, _A0
)
336 static GenOpFunc
*gen_op_mov_TN_reg
[NB_OP_SIZES
][2][CPU_NB_REGS
] =
345 gen_op_movl_T0_ESP_wrapper
,
346 gen_op_movl_T0_EBP_wrapper
,
347 gen_op_movl_T0_ESI_wrapper
,
348 gen_op_movl_T0_EDI_wrapper
,
370 gen_op_movl_T1_ESP_wrapper
,
371 gen_op_movl_T1_EBP_wrapper
,
372 gen_op_movl_T1_ESI_wrapper
,
373 gen_op_movl_T1_EDI_wrapper
,
392 DEF_REGS(gen_op_movl_T0_
, )
395 DEF_REGS(gen_op_movl_T1_
, )
400 DEF_REGS(gen_op_movl_T0_
, )
403 DEF_REGS(gen_op_movl_T1_
, )
409 DEF_REGS(gen_op_movl_T0_
, )
412 DEF_REGS(gen_op_movl_T1_
, )
418 static GenOpFunc
*gen_op_movl_A0_reg
[CPU_NB_REGS
] = {
419 DEF_REGS(gen_op_movl_A0_
, )
422 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][CPU_NB_REGS
] = {
424 DEF_REGS(gen_op_addl_A0_
, )
427 DEF_REGS(gen_op_addl_A0_
, _s1
)
430 DEF_REGS(gen_op_addl_A0_
, _s2
)
433 DEF_REGS(gen_op_addl_A0_
, _s3
)
438 static GenOpFunc
*gen_op_movq_A0_reg
[CPU_NB_REGS
] = {
439 DEF_REGS(gen_op_movq_A0_
, )
442 static GenOpFunc
*gen_op_addq_A0_reg_sN
[4][CPU_NB_REGS
] = {
444 DEF_REGS(gen_op_addq_A0_
, )
447 DEF_REGS(gen_op_addq_A0_
, _s1
)
450 DEF_REGS(gen_op_addq_A0_
, _s2
)
453 DEF_REGS(gen_op_addq_A0_
, _s3
)
458 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
460 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
463 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
467 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
472 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
483 #define DEF_ARITHC(SUFFIX)\
485 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
489 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
493 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
497 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
501 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
505 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
507 #ifndef CONFIG_USER_ONLY
513 static const int cc_op_arithb
[8] = {
524 #define DEF_CMPXCHG(SUFFIX)\
525 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
530 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
534 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
536 #ifndef CONFIG_USER_ONLY
542 #define DEF_SHIFT(SUFFIX)\
544 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
564 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
574 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
584 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
588 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
590 #ifndef CONFIG_USER_ONLY
596 #define DEF_SHIFTD(SUFFIX, op)\
602 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
614 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
618 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
622 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
624 #ifndef CONFIG_USER_ONLY
625 DEF_SHIFTD(_kernel
, im
)
626 DEF_SHIFTD(_user
, im
)
630 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
631 DEF_SHIFTD(_raw
, ECX
)
632 #ifndef CONFIG_USER_ONLY
633 DEF_SHIFTD(_kernel
, ECX
)
634 DEF_SHIFTD(_user
, ECX
)
638 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
641 gen_op_btsw_T0_T1_cc
,
642 gen_op_btrw_T0_T1_cc
,
643 gen_op_btcw_T0_T1_cc
,
647 gen_op_btsl_T0_T1_cc
,
648 gen_op_btrl_T0_T1_cc
,
649 gen_op_btcl_T0_T1_cc
,
654 gen_op_btsq_T0_T1_cc
,
655 gen_op_btrq_T0_T1_cc
,
656 gen_op_btcq_T0_T1_cc
,
661 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
662 gen_op_add_bitw_A0_T1
,
663 gen_op_add_bitl_A0_T1
,
664 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
667 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
684 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 4] = {
685 gen_op_ldsb_raw_T0_A0
,
686 gen_op_ldsw_raw_T0_A0
,
687 X86_64_ONLY(gen_op_ldsl_raw_T0_A0
),
689 #ifndef CONFIG_USER_ONLY
690 gen_op_ldsb_kernel_T0_A0
,
691 gen_op_ldsw_kernel_T0_A0
,
692 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0
),
695 gen_op_ldsb_user_T0_A0
,
696 gen_op_ldsw_user_T0_A0
,
697 X86_64_ONLY(gen_op_ldsl_user_T0_A0
),
702 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 4] = {
703 gen_op_ldub_raw_T0_A0
,
704 gen_op_lduw_raw_T0_A0
,
708 #ifndef CONFIG_USER_ONLY
709 gen_op_ldub_kernel_T0_A0
,
710 gen_op_lduw_kernel_T0_A0
,
714 gen_op_ldub_user_T0_A0
,
715 gen_op_lduw_user_T0_A0
,
721 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 4] = {
723 gen_op_ldub_raw_T0_A0
,
724 gen_op_lduw_raw_T0_A0
,
725 gen_op_ldl_raw_T0_A0
,
726 X86_64_ONLY(gen_op_ldq_raw_T0_A0
),
728 #ifndef CONFIG_USER_ONLY
729 gen_op_ldub_kernel_T0_A0
,
730 gen_op_lduw_kernel_T0_A0
,
731 gen_op_ldl_kernel_T0_A0
,
732 X86_64_ONLY(gen_op_ldq_kernel_T0_A0
),
734 gen_op_ldub_user_T0_A0
,
735 gen_op_lduw_user_T0_A0
,
736 gen_op_ldl_user_T0_A0
,
737 X86_64_ONLY(gen_op_ldq_user_T0_A0
),
741 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 4] = {
742 gen_op_ldub_raw_T1_A0
,
743 gen_op_lduw_raw_T1_A0
,
744 gen_op_ldl_raw_T1_A0
,
745 X86_64_ONLY(gen_op_ldq_raw_T1_A0
),
747 #ifndef CONFIG_USER_ONLY
748 gen_op_ldub_kernel_T1_A0
,
749 gen_op_lduw_kernel_T1_A0
,
750 gen_op_ldl_kernel_T1_A0
,
751 X86_64_ONLY(gen_op_ldq_kernel_T1_A0
),
753 gen_op_ldub_user_T1_A0
,
754 gen_op_lduw_user_T1_A0
,
755 gen_op_ldl_user_T1_A0
,
756 X86_64_ONLY(gen_op_ldq_user_T1_A0
),
760 static GenOpFunc
*gen_op_st_T0_A0
[3 * 4] = {
761 gen_op_stb_raw_T0_A0
,
762 gen_op_stw_raw_T0_A0
,
763 gen_op_stl_raw_T0_A0
,
764 X86_64_ONLY(gen_op_stq_raw_T0_A0
),
766 #ifndef CONFIG_USER_ONLY
767 gen_op_stb_kernel_T0_A0
,
768 gen_op_stw_kernel_T0_A0
,
769 gen_op_stl_kernel_T0_A0
,
770 X86_64_ONLY(gen_op_stq_kernel_T0_A0
),
772 gen_op_stb_user_T0_A0
,
773 gen_op_stw_user_T0_A0
,
774 gen_op_stl_user_T0_A0
,
775 X86_64_ONLY(gen_op_stq_user_T0_A0
),
779 static GenOpFunc
*gen_op_st_T1_A0
[3 * 4] = {
781 gen_op_stw_raw_T1_A0
,
782 gen_op_stl_raw_T1_A0
,
783 X86_64_ONLY(gen_op_stq_raw_T1_A0
),
785 #ifndef CONFIG_USER_ONLY
787 gen_op_stw_kernel_T1_A0
,
788 gen_op_stl_kernel_T1_A0
,
789 X86_64_ONLY(gen_op_stq_kernel_T1_A0
),
792 gen_op_stw_user_T1_A0
,
793 gen_op_stl_user_T1_A0
,
794 X86_64_ONLY(gen_op_stq_user_T1_A0
),
798 static inline void gen_jmp_im(target_ulong pc
)
801 if (pc
== (uint32_t)pc
) {
802 gen_op_movl_eip_im(pc
);
803 } else if (pc
== (int32_t)pc
) {
804 gen_op_movq_eip_im(pc
);
806 gen_op_movq_eip_im64(pc
>> 32, pc
);
809 gen_op_movl_eip_im(pc
);
813 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
817 override
= s
->override
;
821 gen_op_movq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
822 gen_op_addq_A0_reg_sN
[0][R_ESI
]();
824 gen_op_movq_A0_reg
[R_ESI
]();
830 if (s
->addseg
&& override
< 0)
833 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
834 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
836 gen_op_movl_A0_reg
[R_ESI
]();
839 /* 16 address, always override */
842 gen_op_movl_A0_reg
[R_ESI
]();
843 gen_op_andl_A0_ffff();
844 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
848 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
852 gen_op_movq_A0_reg
[R_EDI
]();
857 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
858 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
860 gen_op_movl_A0_reg
[R_EDI
]();
863 gen_op_movl_A0_reg
[R_EDI
]();
864 gen_op_andl_A0_ffff();
865 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
869 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
870 gen_op_movl_T0_Dshiftb
,
871 gen_op_movl_T0_Dshiftw
,
872 gen_op_movl_T0_Dshiftl
,
873 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
876 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
879 X86_64_ONLY(gen_op_jnz_ecxq
),
882 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
885 X86_64_ONLY(gen_op_jz_ecxq
),
888 static GenOpFunc
*gen_op_dec_ECX
[3] = {
891 X86_64_ONLY(gen_op_decq_ECX
),
894 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
899 X86_64_ONLY(gen_op_jnz_subq
),
905 X86_64_ONLY(gen_op_jz_subq
),
909 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
915 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
921 static GenOpFunc
*gen_op_in
[3] = {
927 static GenOpFunc
*gen_op_out
[3] = {
933 static GenOpFunc
*gen_check_io_T0
[3] = {
939 static GenOpFunc
*gen_check_io_DX
[3] = {
945 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
947 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
948 if (s
->cc_op
!= CC_OP_DYNAMIC
)
949 gen_op_set_cc_op(s
->cc_op
);
952 gen_check_io_DX
[ot
]();
954 gen_check_io_T0
[ot
]();
958 static inline void gen_movs(DisasContext
*s
, int ot
)
960 gen_string_movl_A0_ESI(s
);
961 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
962 gen_string_movl_A0_EDI(s
);
963 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
964 gen_op_movl_T0_Dshift
[ot
]();
967 gen_op_addq_ESI_T0();
968 gen_op_addq_EDI_T0();
972 gen_op_addl_ESI_T0();
973 gen_op_addl_EDI_T0();
975 gen_op_addw_ESI_T0();
976 gen_op_addw_EDI_T0();
980 static inline void gen_update_cc_op(DisasContext
*s
)
982 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
983 gen_op_set_cc_op(s
->cc_op
);
984 s
->cc_op
= CC_OP_DYNAMIC
;
988 /* XXX: does not work with gdbstub "ice" single step - not a
990 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
994 l1
= gen_new_label();
995 l2
= gen_new_label();
996 gen_op_jnz_ecx
[s
->aflag
](l1
);
998 gen_jmp_tb(s
, next_eip
, 1);
1003 static inline void gen_stos(DisasContext
*s
, int ot
)
1005 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1006 gen_string_movl_A0_EDI(s
);
1007 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1008 gen_op_movl_T0_Dshift
[ot
]();
1009 #ifdef TARGET_X86_64
1010 if (s
->aflag
== 2) {
1011 gen_op_addq_EDI_T0();
1015 gen_op_addl_EDI_T0();
1017 gen_op_addw_EDI_T0();
1021 static inline void gen_lods(DisasContext
*s
, int ot
)
1023 gen_string_movl_A0_ESI(s
);
1024 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1025 gen_op_mov_reg_T0
[ot
][R_EAX
]();
1026 gen_op_movl_T0_Dshift
[ot
]();
1027 #ifdef TARGET_X86_64
1028 if (s
->aflag
== 2) {
1029 gen_op_addq_ESI_T0();
1033 gen_op_addl_ESI_T0();
1035 gen_op_addw_ESI_T0();
1039 static inline void gen_scas(DisasContext
*s
, int ot
)
1041 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1042 gen_string_movl_A0_EDI(s
);
1043 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1044 gen_op_cmpl_T0_T1_cc();
1045 gen_op_movl_T0_Dshift
[ot
]();
1046 #ifdef TARGET_X86_64
1047 if (s
->aflag
== 2) {
1048 gen_op_addq_EDI_T0();
1052 gen_op_addl_EDI_T0();
1054 gen_op_addw_EDI_T0();
1058 static inline void gen_cmps(DisasContext
*s
, int ot
)
1060 gen_string_movl_A0_ESI(s
);
1061 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1062 gen_string_movl_A0_EDI(s
);
1063 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1064 gen_op_cmpl_T0_T1_cc();
1065 gen_op_movl_T0_Dshift
[ot
]();
1066 #ifdef TARGET_X86_64
1067 if (s
->aflag
== 2) {
1068 gen_op_addq_ESI_T0();
1069 gen_op_addq_EDI_T0();
1073 gen_op_addl_ESI_T0();
1074 gen_op_addl_EDI_T0();
1076 gen_op_addw_ESI_T0();
1077 gen_op_addw_EDI_T0();
1081 static inline void gen_ins(DisasContext
*s
, int ot
)
1083 gen_string_movl_A0_EDI(s
);
1085 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1086 gen_op_in_DX_T0
[ot
]();
1087 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1088 gen_op_movl_T0_Dshift
[ot
]();
1089 #ifdef TARGET_X86_64
1090 if (s
->aflag
== 2) {
1091 gen_op_addq_EDI_T0();
1095 gen_op_addl_EDI_T0();
1097 gen_op_addw_EDI_T0();
1101 static inline void gen_outs(DisasContext
*s
, int ot
)
1103 gen_string_movl_A0_ESI(s
);
1104 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1105 gen_op_out_DX_T0
[ot
]();
1106 gen_op_movl_T0_Dshift
[ot
]();
1107 #ifdef TARGET_X86_64
1108 if (s
->aflag
== 2) {
1109 gen_op_addq_ESI_T0();
1113 gen_op_addl_ESI_T0();
1115 gen_op_addw_ESI_T0();
1119 /* same method as Valgrind : we generate jumps to current or next
1121 #define GEN_REPZ(op) \
1122 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1123 target_ulong cur_eip, target_ulong next_eip) \
1126 gen_update_cc_op(s); \
1127 l2 = gen_jz_ecx_string(s, next_eip); \
1128 gen_ ## op(s, ot); \
1129 gen_op_dec_ECX[s->aflag](); \
1130 /* a loop would cause two single step exceptions if ECX = 1 \
1131 before rep string_insn */ \
1133 gen_op_jz_ecx[s->aflag](l2); \
1134 gen_jmp(s, cur_eip); \
1137 #define GEN_REPZ2(op) \
1138 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1139 target_ulong cur_eip, \
1140 target_ulong next_eip, \
1144 gen_update_cc_op(s); \
1145 l2 = gen_jz_ecx_string(s, next_eip); \
1146 gen_ ## op(s, ot); \
1147 gen_op_dec_ECX[s->aflag](); \
1148 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1149 gen_op_string_jnz_sub[nz][ot](l2);\
1151 gen_op_jz_ecx[s->aflag](l2); \
1152 gen_jmp(s, cur_eip); \
1174 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1205 #ifdef TARGET_X86_64
1208 BUGGY_64(gen_op_jb_subq
),
1210 BUGGY_64(gen_op_jbe_subq
),
1213 BUGGY_64(gen_op_jl_subq
),
1214 BUGGY_64(gen_op_jle_subq
),
1218 static GenOpFunc1
*gen_op_loop
[3][4] = {
1229 #ifdef TARGET_X86_64
1238 static GenOpFunc
*gen_setcc_slow
[8] = {
1249 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1252 gen_op_setb_T0_subb
,
1253 gen_op_setz_T0_subb
,
1254 gen_op_setbe_T0_subb
,
1255 gen_op_sets_T0_subb
,
1257 gen_op_setl_T0_subb
,
1258 gen_op_setle_T0_subb
,
1262 gen_op_setb_T0_subw
,
1263 gen_op_setz_T0_subw
,
1264 gen_op_setbe_T0_subw
,
1265 gen_op_sets_T0_subw
,
1267 gen_op_setl_T0_subw
,
1268 gen_op_setle_T0_subw
,
1272 gen_op_setb_T0_subl
,
1273 gen_op_setz_T0_subl
,
1274 gen_op_setbe_T0_subl
,
1275 gen_op_sets_T0_subl
,
1277 gen_op_setl_T0_subl
,
1278 gen_op_setle_T0_subl
,
1280 #ifdef TARGET_X86_64
1283 gen_op_setb_T0_subq
,
1284 gen_op_setz_T0_subq
,
1285 gen_op_setbe_T0_subq
,
1286 gen_op_sets_T0_subq
,
1288 gen_op_setl_T0_subq
,
1289 gen_op_setle_T0_subq
,
1294 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1295 gen_op_fadd_ST0_FT0
,
1296 gen_op_fmul_ST0_FT0
,
1297 gen_op_fcom_ST0_FT0
,
1298 gen_op_fcom_ST0_FT0
,
1299 gen_op_fsub_ST0_FT0
,
1300 gen_op_fsubr_ST0_FT0
,
1301 gen_op_fdiv_ST0_FT0
,
1302 gen_op_fdivr_ST0_FT0
,
1305 /* NOTE the exception in "r" op ordering */
1306 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1307 gen_op_fadd_STN_ST0
,
1308 gen_op_fmul_STN_ST0
,
1311 gen_op_fsubr_STN_ST0
,
1312 gen_op_fsub_STN_ST0
,
1313 gen_op_fdivr_STN_ST0
,
1314 gen_op_fdiv_STN_ST0
,
1317 /* if d == OR_TMP0, it means memory operand (address in A0) */
1318 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1320 GenOpFunc
*gen_update_cc
;
1323 gen_op_mov_TN_reg
[ot
][0][d
]();
1325 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1330 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1331 gen_op_set_cc_op(s1
->cc_op
);
1333 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1334 gen_op_mov_reg_T0
[ot
][d
]();
1336 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1338 s1
->cc_op
= CC_OP_DYNAMIC
;
1341 gen_op_addl_T0_T1();
1342 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1343 gen_update_cc
= gen_op_update2_cc
;
1346 gen_op_subl_T0_T1();
1347 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1348 gen_update_cc
= gen_op_update2_cc
;
1354 gen_op_arith_T0_T1_cc
[op
]();
1355 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1356 gen_update_cc
= gen_op_update1_cc
;
1359 gen_op_cmpl_T0_T1_cc();
1360 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1361 gen_update_cc
= NULL
;
1364 if (op
!= OP_CMPL
) {
1366 gen_op_mov_reg_T0
[ot
][d
]();
1368 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1370 /* the flags update must happen after the memory write (precise
1371 exception support) */
1377 /* if d == OR_TMP0, it means memory operand (address in A0) */
1378 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1381 gen_op_mov_TN_reg
[ot
][0][d
]();
1383 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1384 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1385 gen_op_set_cc_op(s1
->cc_op
);
1388 s1
->cc_op
= CC_OP_INCB
+ ot
;
1391 s1
->cc_op
= CC_OP_DECB
+ ot
;
1394 gen_op_mov_reg_T0
[ot
][d
]();
1396 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1397 gen_op_update_inc_cc();
1400 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1403 gen_op_mov_TN_reg
[ot
][0][d
]();
1405 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1407 gen_op_mov_TN_reg
[ot
][1][s
]();
1408 /* for zero counts, flags are not updated, so must do it dynamically */
1409 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1410 gen_op_set_cc_op(s1
->cc_op
);
1413 gen_op_shift_T0_T1_cc
[ot
][op
]();
1415 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1417 gen_op_mov_reg_T0
[ot
][d
]();
1418 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1421 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1423 /* currently not optimized */
1424 gen_op_movl_T1_im(c
);
1425 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1428 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1436 int mod
, rm
, code
, override
, must_add_seg
;
1438 override
= s
->override
;
1439 must_add_seg
= s
->addseg
;
1442 mod
= (modrm
>> 6) & 3;
1454 code
= ldub_code(s
->pc
++);
1455 scale
= (code
>> 6) & 3;
1456 index
= ((code
>> 3) & 7) | REX_X(s
);
1463 if ((base
& 7) == 5) {
1465 disp
= (int32_t)ldl_code(s
->pc
);
1467 if (CODE64(s
) && !havesib
) {
1468 disp
+= s
->pc
+ s
->rip_offset
;
1475 disp
= (int8_t)ldub_code(s
->pc
++);
1479 disp
= ldl_code(s
->pc
);
1485 /* for correct popl handling with esp */
1486 if (base
== 4 && s
->popl_esp_hack
)
1487 disp
+= s
->popl_esp_hack
;
1488 #ifdef TARGET_X86_64
1489 if (s
->aflag
== 2) {
1490 gen_op_movq_A0_reg
[base
]();
1492 if ((int32_t)disp
== disp
)
1493 gen_op_addq_A0_im(disp
);
1495 gen_op_addq_A0_im64(disp
>> 32, disp
);
1500 gen_op_movl_A0_reg
[base
]();
1502 gen_op_addl_A0_im(disp
);
1505 #ifdef TARGET_X86_64
1506 if (s
->aflag
== 2) {
1507 if ((int32_t)disp
== disp
)
1508 gen_op_movq_A0_im(disp
);
1510 gen_op_movq_A0_im64(disp
>> 32, disp
);
1514 gen_op_movl_A0_im(disp
);
1517 /* XXX: index == 4 is always invalid */
1518 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1519 #ifdef TARGET_X86_64
1520 if (s
->aflag
== 2) {
1521 gen_op_addq_A0_reg_sN
[scale
][index
]();
1525 gen_op_addl_A0_reg_sN
[scale
][index
]();
1530 if (base
== R_EBP
|| base
== R_ESP
)
1535 #ifdef TARGET_X86_64
1536 if (s
->aflag
== 2) {
1537 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1541 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1548 disp
= lduw_code(s
->pc
);
1550 gen_op_movl_A0_im(disp
);
1551 rm
= 0; /* avoid SS override */
1558 disp
= (int8_t)ldub_code(s
->pc
++);
1562 disp
= lduw_code(s
->pc
);
1568 gen_op_movl_A0_reg
[R_EBX
]();
1569 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1572 gen_op_movl_A0_reg
[R_EBX
]();
1573 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1576 gen_op_movl_A0_reg
[R_EBP
]();
1577 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1580 gen_op_movl_A0_reg
[R_EBP
]();
1581 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1584 gen_op_movl_A0_reg
[R_ESI
]();
1587 gen_op_movl_A0_reg
[R_EDI
]();
1590 gen_op_movl_A0_reg
[R_EBP
]();
1594 gen_op_movl_A0_reg
[R_EBX
]();
1598 gen_op_addl_A0_im(disp
);
1599 gen_op_andl_A0_ffff();
1603 if (rm
== 2 || rm
== 3 || rm
== 6)
1608 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1618 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1620 int mod
, rm
, base
, code
;
1622 mod
= (modrm
>> 6) & 3;
1632 code
= ldub_code(s
->pc
++);
1668 /* used for LEA and MOV AX, mem */
1669 static void gen_add_A0_ds_seg(DisasContext
*s
)
1671 int override
, must_add_seg
;
1672 must_add_seg
= s
->addseg
;
1674 if (s
->override
>= 0) {
1675 override
= s
->override
;
1681 #ifdef TARGET_X86_64
1683 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1687 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1692 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1694 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1696 int mod
, rm
, opreg
, disp
;
1698 mod
= (modrm
>> 6) & 3;
1699 rm
= (modrm
& 7) | REX_B(s
);
1703 gen_op_mov_TN_reg
[ot
][0][reg
]();
1704 gen_op_mov_reg_T0
[ot
][rm
]();
1706 gen_op_mov_TN_reg
[ot
][0][rm
]();
1708 gen_op_mov_reg_T0
[ot
][reg
]();
1711 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1714 gen_op_mov_TN_reg
[ot
][0][reg
]();
1715 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1717 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1719 gen_op_mov_reg_T0
[ot
][reg
]();
1724 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1730 ret
= ldub_code(s
->pc
);
1734 ret
= lduw_code(s
->pc
);
1739 ret
= ldl_code(s
->pc
);
1746 static inline int insn_const_size(unsigned int ot
)
1754 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1756 TranslationBlock
*tb
;
1759 pc
= s
->cs_base
+ eip
;
1761 /* NOTE: we handle the case where the TB spans two pages here */
1762 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1763 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1764 /* jump to same page: we can use a direct jump */
1766 gen_op_goto_tb0(TBPARAM(tb
));
1768 gen_op_goto_tb1(TBPARAM(tb
));
1770 gen_op_movl_T0_im((long)tb
+ tb_num
);
1773 /* jump to another page: currently not optimized */
1779 static inline void gen_jcc(DisasContext
*s
, int b
,
1780 target_ulong val
, target_ulong next_eip
)
1782 TranslationBlock
*tb
;
1789 jcc_op
= (b
>> 1) & 7;
1793 /* we optimize the cmp/jcc case */
1798 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1801 /* some jumps are easy to compute */
1843 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1846 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1858 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1859 gen_op_set_cc_op(s
->cc_op
);
1860 s
->cc_op
= CC_OP_DYNAMIC
;
1864 gen_setcc_slow
[jcc_op
]();
1865 func
= gen_op_jnz_T0_label
;
1875 l1
= gen_new_label();
1878 gen_goto_tb(s
, 0, next_eip
);
1881 gen_goto_tb(s
, 1, val
);
1886 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1887 gen_op_set_cc_op(s
->cc_op
);
1888 s
->cc_op
= CC_OP_DYNAMIC
;
1890 gen_setcc_slow
[jcc_op
]();
1896 l1
= gen_new_label();
1897 l2
= gen_new_label();
1898 gen_op_jnz_T0_label(l1
);
1899 gen_jmp_im(next_eip
);
1900 gen_op_jmp_label(l2
);
1908 static void gen_setcc(DisasContext
*s
, int b
)
1914 jcc_op
= (b
>> 1) & 7;
1916 /* we optimize the cmp/jcc case */
1921 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1926 /* some jumps are easy to compute */
1953 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1956 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1964 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1965 gen_op_set_cc_op(s
->cc_op
);
1966 func
= gen_setcc_slow
[jcc_op
];
1975 /* move T0 to seg_reg and compute if the CPU state may change. Never
1976 call this function with seg_reg == R_CS */
1977 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1979 if (s
->pe
&& !s
->vm86
) {
1980 /* XXX: optimize by finding processor state dynamically */
1981 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1982 gen_op_set_cc_op(s
->cc_op
);
1983 gen_jmp_im(cur_eip
);
1984 gen_op_movl_seg_T0(seg_reg
);
1985 /* abort translation because the addseg value may change or
1986 because ss32 may change. For R_SS, translation must always
1987 stop as a special handling must be done to disable hardware
1988 interrupts for the next instruction */
1989 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1992 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1993 if (seg_reg
== R_SS
)
1998 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2000 #ifdef TARGET_X86_64
2003 gen_op_addq_ESP_8();
2005 gen_op_addq_ESP_im(addend
);
2010 gen_op_addl_ESP_2();
2011 else if (addend
== 4)
2012 gen_op_addl_ESP_4();
2014 gen_op_addl_ESP_im(addend
);
2017 gen_op_addw_ESP_2();
2018 else if (addend
== 4)
2019 gen_op_addw_ESP_4();
2021 gen_op_addw_ESP_im(addend
);
2025 /* generate a push. It depends on ss32, addseg and dflag */
2026 static void gen_push_T0(DisasContext
*s
)
2028 #ifdef TARGET_X86_64
2030 gen_op_movq_A0_reg
[R_ESP
]();
2033 gen_op_st_T0_A0
[OT_QUAD
+ s
->mem_index
]();
2036 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
2038 gen_op_movq_ESP_A0();
2042 gen_op_movl_A0_reg
[R_ESP
]();
2049 gen_op_movl_T1_A0();
2050 gen_op_addl_A0_SS();
2053 gen_op_andl_A0_ffff();
2054 gen_op_movl_T1_A0();
2055 gen_op_addl_A0_SS();
2057 gen_op_st_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2058 if (s
->ss32
&& !s
->addseg
)
2059 gen_op_movl_ESP_A0();
2061 gen_op_mov_reg_T1
[s
->ss32
+ 1][R_ESP
]();
2065 /* generate a push. It depends on ss32, addseg and dflag */
2066 /* slower version for T1, only used for call Ev */
2067 static void gen_push_T1(DisasContext
*s
)
2069 #ifdef TARGET_X86_64
2071 gen_op_movq_A0_reg
[R_ESP
]();
2074 gen_op_st_T1_A0
[OT_QUAD
+ s
->mem_index
]();
2077 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
2079 gen_op_movq_ESP_A0();
2083 gen_op_movl_A0_reg
[R_ESP
]();
2090 gen_op_addl_A0_SS();
2093 gen_op_andl_A0_ffff();
2094 gen_op_addl_A0_SS();
2096 gen_op_st_T1_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2098 if (s
->ss32
&& !s
->addseg
)
2099 gen_op_movl_ESP_A0();
2101 gen_stack_update(s
, (-2) << s
->dflag
);
2105 /* two step pop is necessary for precise exceptions */
2106 static void gen_pop_T0(DisasContext
*s
)
2108 #ifdef TARGET_X86_64
2110 gen_op_movq_A0_reg
[R_ESP
]();
2111 gen_op_ld_T0_A0
[(s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
]();
2115 gen_op_movl_A0_reg
[R_ESP
]();
2118 gen_op_addl_A0_SS();
2120 gen_op_andl_A0_ffff();
2121 gen_op_addl_A0_SS();
2123 gen_op_ld_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2127 static void gen_pop_update(DisasContext
*s
)
2129 #ifdef TARGET_X86_64
2130 if (CODE64(s
) && s
->dflag
) {
2131 gen_stack_update(s
, 8);
2135 gen_stack_update(s
, 2 << s
->dflag
);
2139 static void gen_stack_A0(DisasContext
*s
)
2141 gen_op_movl_A0_ESP();
2143 gen_op_andl_A0_ffff();
2144 gen_op_movl_T1_A0();
2146 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2149 /* NOTE: wrap around in 16 bit not fully handled */
2150 static void gen_pusha(DisasContext
*s
)
2153 gen_op_movl_A0_ESP();
2154 gen_op_addl_A0_im(-16 << s
->dflag
);
2156 gen_op_andl_A0_ffff();
2157 gen_op_movl_T1_A0();
2159 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2160 for(i
= 0;i
< 8; i
++) {
2161 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
2162 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2163 gen_op_addl_A0_im(2 << s
->dflag
);
2165 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2168 /* NOTE: wrap around in 16 bit not fully handled */
2169 static void gen_popa(DisasContext
*s
)
2172 gen_op_movl_A0_ESP();
2174 gen_op_andl_A0_ffff();
2175 gen_op_movl_T1_A0();
2176 gen_op_addl_T1_im(16 << s
->dflag
);
2178 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2179 for(i
= 0;i
< 8; i
++) {
2180 /* ESP is not reloaded */
2182 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2183 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
2185 gen_op_addl_A0_im(2 << s
->dflag
);
2187 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2190 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2195 #ifdef TARGET_X86_64
2197 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2200 gen_op_movl_A0_ESP();
2201 gen_op_addq_A0_im(-opsize
);
2202 gen_op_movl_T1_A0();
2205 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2206 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2208 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2210 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2211 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2212 gen_op_mov_reg_T1
[OT_QUAD
][R_ESP
]();
2216 ot
= s
->dflag
+ OT_WORD
;
2217 opsize
= 2 << s
->dflag
;
2219 gen_op_movl_A0_ESP();
2220 gen_op_addl_A0_im(-opsize
);
2222 gen_op_andl_A0_ffff();
2223 gen_op_movl_T1_A0();
2225 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2227 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2228 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2230 gen_op_enter_level(level
, s
->dflag
);
2232 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2233 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2234 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2238 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2240 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2241 gen_op_set_cc_op(s
->cc_op
);
2242 gen_jmp_im(cur_eip
);
2243 gen_op_raise_exception(trapno
);
2247 /* an interrupt is different from an exception because of the
2248 priviledge checks */
2249 static void gen_interrupt(DisasContext
*s
, int intno
,
2250 target_ulong cur_eip
, target_ulong next_eip
)
2252 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2253 gen_op_set_cc_op(s
->cc_op
);
2254 gen_jmp_im(cur_eip
);
2255 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2259 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2261 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2262 gen_op_set_cc_op(s
->cc_op
);
2263 gen_jmp_im(cur_eip
);
2268 /* generate a generic end of block. Trace exception is also generated
2270 static void gen_eob(DisasContext
*s
)
2272 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2273 gen_op_set_cc_op(s
->cc_op
);
2274 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2275 gen_op_reset_inhibit_irq();
2277 if (s
->singlestep_enabled
) {
2280 gen_op_raise_exception(EXCP01_SSTP
);
2288 /* generate a jump to eip. No segment change must happen before as a
2289 direct call to the next block may occur */
2290 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2293 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2294 gen_op_set_cc_op(s
->cc_op
);
2295 s
->cc_op
= CC_OP_DYNAMIC
;
2297 gen_goto_tb(s
, tb_num
, eip
);
2305 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2307 gen_jmp_tb(s
, eip
, 0);
2310 static void gen_movtl_T0_im(target_ulong val
)
2312 #ifdef TARGET_X86_64
2313 if ((int32_t)val
== val
) {
2314 gen_op_movl_T0_im(val
);
2316 gen_op_movq_T0_im64(val
>> 32, val
);
2319 gen_op_movl_T0_im(val
);
2323 static void gen_movtl_T1_im(target_ulong val
)
2325 #ifdef TARGET_X86_64
2326 if ((int32_t)val
== val
) {
2327 gen_op_movl_T1_im(val
);
2329 gen_op_movq_T1_im64(val
>> 32, val
);
2332 gen_op_movl_T1_im(val
);
2336 static void gen_add_A0_im(DisasContext
*s
, int val
)
2338 #ifdef TARGET_X86_64
2340 gen_op_addq_A0_im(val
);
2343 gen_op_addl_A0_im(val
);
2346 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2347 gen_op_ldq_raw_env_A0
,
2348 #ifndef CONFIG_USER_ONLY
2349 gen_op_ldq_kernel_env_A0
,
2350 gen_op_ldq_user_env_A0
,
2354 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2355 gen_op_stq_raw_env_A0
,
2356 #ifndef CONFIG_USER_ONLY
2357 gen_op_stq_kernel_env_A0
,
2358 gen_op_stq_user_env_A0
,
2362 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2363 gen_op_ldo_raw_env_A0
,
2364 #ifndef CONFIG_USER_ONLY
2365 gen_op_ldo_kernel_env_A0
,
2366 gen_op_ldo_user_env_A0
,
2370 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2371 gen_op_sto_raw_env_A0
,
2372 #ifndef CONFIG_USER_ONLY
2373 gen_op_sto_kernel_env_A0
,
2374 gen_op_sto_user_env_A0
,
2378 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2380 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2381 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2382 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2384 static GenOpFunc2
*sse_op_table1
[256][4] = {
2385 /* pure SSE operations */
2386 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2387 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2388 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2389 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2390 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2391 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2392 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2393 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2395 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2396 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2397 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2398 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2399 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2400 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2401 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2402 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2403 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2404 [0x51] = SSE_FOP(sqrt
),
2405 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2406 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2407 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2408 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2409 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2410 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2411 [0x58] = SSE_FOP(add
),
2412 [0x59] = SSE_FOP(mul
),
2413 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2414 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2415 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2416 [0x5c] = SSE_FOP(sub
),
2417 [0x5d] = SSE_FOP(min
),
2418 [0x5e] = SSE_FOP(div
),
2419 [0x5f] = SSE_FOP(max
),
2421 [0xc2] = SSE_FOP(cmpeq
),
2422 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2424 /* MMX ops and their SSE extensions */
2425 [0x60] = MMX_OP2(punpcklbw
),
2426 [0x61] = MMX_OP2(punpcklwd
),
2427 [0x62] = MMX_OP2(punpckldq
),
2428 [0x63] = MMX_OP2(packsswb
),
2429 [0x64] = MMX_OP2(pcmpgtb
),
2430 [0x65] = MMX_OP2(pcmpgtw
),
2431 [0x66] = MMX_OP2(pcmpgtl
),
2432 [0x67] = MMX_OP2(packuswb
),
2433 [0x68] = MMX_OP2(punpckhbw
),
2434 [0x69] = MMX_OP2(punpckhwd
),
2435 [0x6a] = MMX_OP2(punpckhdq
),
2436 [0x6b] = MMX_OP2(packssdw
),
2437 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2438 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2439 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2440 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2441 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2442 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2443 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2444 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2445 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2446 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2447 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2448 [0x74] = MMX_OP2(pcmpeqb
),
2449 [0x75] = MMX_OP2(pcmpeqw
),
2450 [0x76] = MMX_OP2(pcmpeql
),
2451 [0x77] = { SSE_SPECIAL
}, /* emms */
2452 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2453 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2454 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2455 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2456 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2457 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2458 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2459 [0xd1] = MMX_OP2(psrlw
),
2460 [0xd2] = MMX_OP2(psrld
),
2461 [0xd3] = MMX_OP2(psrlq
),
2462 [0xd4] = MMX_OP2(paddq
),
2463 [0xd5] = MMX_OP2(pmullw
),
2464 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2465 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2466 [0xd8] = MMX_OP2(psubusb
),
2467 [0xd9] = MMX_OP2(psubusw
),
2468 [0xda] = MMX_OP2(pminub
),
2469 [0xdb] = MMX_OP2(pand
),
2470 [0xdc] = MMX_OP2(paddusb
),
2471 [0xdd] = MMX_OP2(paddusw
),
2472 [0xde] = MMX_OP2(pmaxub
),
2473 [0xdf] = MMX_OP2(pandn
),
2474 [0xe0] = MMX_OP2(pavgb
),
2475 [0xe1] = MMX_OP2(psraw
),
2476 [0xe2] = MMX_OP2(psrad
),
2477 [0xe3] = MMX_OP2(pavgw
),
2478 [0xe4] = MMX_OP2(pmulhuw
),
2479 [0xe5] = MMX_OP2(pmulhw
),
2480 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2481 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2482 [0xe8] = MMX_OP2(psubsb
),
2483 [0xe9] = MMX_OP2(psubsw
),
2484 [0xea] = MMX_OP2(pminsw
),
2485 [0xeb] = MMX_OP2(por
),
2486 [0xec] = MMX_OP2(paddsb
),
2487 [0xed] = MMX_OP2(paddsw
),
2488 [0xee] = MMX_OP2(pmaxsw
),
2489 [0xef] = MMX_OP2(pxor
),
2490 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2491 [0xf1] = MMX_OP2(psllw
),
2492 [0xf2] = MMX_OP2(pslld
),
2493 [0xf3] = MMX_OP2(psllq
),
2494 [0xf4] = MMX_OP2(pmuludq
),
2495 [0xf5] = MMX_OP2(pmaddwd
),
2496 [0xf6] = MMX_OP2(psadbw
),
2497 [0xf7] = MMX_OP2(maskmov
),
2498 [0xf8] = MMX_OP2(psubb
),
2499 [0xf9] = MMX_OP2(psubw
),
2500 [0xfa] = MMX_OP2(psubl
),
2501 [0xfb] = MMX_OP2(psubq
),
2502 [0xfc] = MMX_OP2(paddb
),
2503 [0xfd] = MMX_OP2(paddw
),
2504 [0xfe] = MMX_OP2(paddl
),
2507 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2508 [0 + 2] = MMX_OP2(psrlw
),
2509 [0 + 4] = MMX_OP2(psraw
),
2510 [0 + 6] = MMX_OP2(psllw
),
2511 [8 + 2] = MMX_OP2(psrld
),
2512 [8 + 4] = MMX_OP2(psrad
),
2513 [8 + 6] = MMX_OP2(pslld
),
2514 [16 + 2] = MMX_OP2(psrlq
),
2515 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2516 [16 + 6] = MMX_OP2(psllq
),
2517 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2520 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2523 X86_64_ONLY(gen_op_cvtsq2ss
),
2524 X86_64_ONLY(gen_op_cvtsq2sd
),
2528 X86_64_ONLY(gen_op_cvttss2sq
),
2529 X86_64_ONLY(gen_op_cvttsd2sq
),
2533 X86_64_ONLY(gen_op_cvtss2sq
),
2534 X86_64_ONLY(gen_op_cvtsd2sq
),
2537 static GenOpFunc2
*sse_op_table4
[8][4] = {
2548 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2550 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2551 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2552 GenOpFunc2
*sse_op2
;
2553 GenOpFunc3
*sse_op3
;
2556 if (s
->prefix
& PREFIX_DATA
)
2558 else if (s
->prefix
& PREFIX_REPZ
)
2560 else if (s
->prefix
& PREFIX_REPNZ
)
2564 sse_op2
= sse_op_table1
[b
][b1
];
2567 if (b
<= 0x5f || b
== 0xc6 || b
== 0xc2) {
2577 /* simple MMX/SSE operation */
2578 if (s
->flags
& HF_TS_MASK
) {
2579 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2582 if (s
->flags
& HF_EM_MASK
) {
2584 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2587 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2594 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2595 the static cpu state) */
2600 modrm
= ldub_code(s
->pc
++);
2601 reg
= ((modrm
>> 3) & 7);
2604 mod
= (modrm
>> 6) & 3;
2605 if (sse_op2
== SSE_SPECIAL
) {
2608 case 0x0e7: /* movntq */
2611 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2612 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2614 case 0x1e7: /* movntdq */
2615 case 0x02b: /* movntps */
2616 case 0x12b: /* movntps */
2617 case 0x3f0: /* lddqu */
2620 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2621 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2623 case 0x6e: /* movd mm, ea */
2624 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2625 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2627 case 0x16e: /* movd xmm, ea */
2628 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2629 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2631 case 0x6f: /* movq mm, ea */
2633 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2634 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2637 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2638 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2641 case 0x010: /* movups */
2642 case 0x110: /* movupd */
2643 case 0x028: /* movaps */
2644 case 0x128: /* movapd */
2645 case 0x16f: /* movdqa xmm, ea */
2646 case 0x26f: /* movdqu xmm, ea */
2648 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2649 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2651 rm
= (modrm
& 7) | REX_B(s
);
2652 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2653 offsetof(CPUX86State
,xmm_regs
[rm
]));
2656 case 0x210: /* movss xmm, ea */
2658 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2659 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2660 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2662 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2663 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2664 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2666 rm
= (modrm
& 7) | REX_B(s
);
2667 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2668 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2671 case 0x310: /* movsd xmm, ea */
2673 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2674 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2676 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2677 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2679 rm
= (modrm
& 7) | REX_B(s
);
2680 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2681 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2684 case 0x012: /* movlps */
2685 case 0x112: /* movlpd */
2687 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2688 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2691 rm
= (modrm
& 7) | REX_B(s
);
2692 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2693 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2696 case 0x212: /* movsldup */
2698 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2699 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2701 rm
= (modrm
& 7) | REX_B(s
);
2702 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2703 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2704 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2705 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2707 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2708 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2709 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2710 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2712 case 0x312: /* movddup */
2714 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2715 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2717 rm
= (modrm
& 7) | REX_B(s
);
2718 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2719 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2721 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2722 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2724 case 0x016: /* movhps */
2725 case 0x116: /* movhpd */
2727 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2728 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2731 rm
= (modrm
& 7) | REX_B(s
);
2732 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2733 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2736 case 0x216: /* movshdup */
2738 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2739 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2741 rm
= (modrm
& 7) | REX_B(s
);
2742 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2743 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2744 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2745 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2747 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2748 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2749 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2750 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2752 case 0x7e: /* movd ea, mm */
2753 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2754 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2756 case 0x17e: /* movd ea, xmm */
2757 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2758 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2760 case 0x27e: /* movq xmm, ea */
2762 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2763 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2765 rm
= (modrm
& 7) | REX_B(s
);
2766 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2767 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2769 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2771 case 0x7f: /* movq ea, mm */
2773 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2774 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2777 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2778 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2781 case 0x011: /* movups */
2782 case 0x111: /* movupd */
2783 case 0x029: /* movaps */
2784 case 0x129: /* movapd */
2785 case 0x17f: /* movdqa ea, xmm */
2786 case 0x27f: /* movdqu ea, xmm */
2788 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2789 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2791 rm
= (modrm
& 7) | REX_B(s
);
2792 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2793 offsetof(CPUX86State
,xmm_regs
[reg
]));
2796 case 0x211: /* movss ea, xmm */
2798 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2799 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2800 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
2802 rm
= (modrm
& 7) | REX_B(s
);
2803 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2804 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2807 case 0x311: /* movsd ea, xmm */
2809 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2810 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2812 rm
= (modrm
& 7) | REX_B(s
);
2813 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2814 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2817 case 0x013: /* movlps */
2818 case 0x113: /* movlpd */
2820 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2821 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2826 case 0x017: /* movhps */
2827 case 0x117: /* movhpd */
2829 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2830 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2835 case 0x71: /* shift mm, im */
2838 case 0x171: /* shift xmm, im */
2841 val
= ldub_code(s
->pc
++);
2843 gen_op_movl_T0_im(val
);
2844 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2846 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2847 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2849 gen_op_movl_T0_im(val
);
2850 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2852 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2853 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2855 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2859 rm
= (modrm
& 7) | REX_B(s
);
2860 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2863 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2865 sse_op2(op2_offset
, op1_offset
);
2867 case 0x050: /* movmskps */
2868 rm
= (modrm
& 7) | REX_B(s
);
2869 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[rm
]));
2870 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2872 case 0x150: /* movmskpd */
2873 rm
= (modrm
& 7) | REX_B(s
);
2874 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[rm
]));
2875 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2877 case 0x02a: /* cvtpi2ps */
2878 case 0x12a: /* cvtpi2pd */
2881 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2882 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2883 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2886 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2888 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2891 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
2895 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
2899 case 0x22a: /* cvtsi2ss */
2900 case 0x32a: /* cvtsi2sd */
2901 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2902 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2903 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2904 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
2906 case 0x02c: /* cvttps2pi */
2907 case 0x12c: /* cvttpd2pi */
2908 case 0x02d: /* cvtps2pi */
2909 case 0x12d: /* cvtpd2pi */
2912 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2913 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2914 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
2916 rm
= (modrm
& 7) | REX_B(s
);
2917 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2919 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
2922 gen_op_cvttps2pi(op1_offset
, op2_offset
);
2925 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
2928 gen_op_cvtps2pi(op1_offset
, op2_offset
);
2931 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
2935 case 0x22c: /* cvttss2si */
2936 case 0x32c: /* cvttsd2si */
2937 case 0x22d: /* cvtss2si */
2938 case 0x32d: /* cvtsd2si */
2939 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2941 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2943 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
2945 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2946 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2948 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2950 rm
= (modrm
& 7) | REX_B(s
);
2951 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2953 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
2954 (b
& 1) * 4](op2_offset
);
2955 gen_op_mov_reg_T0
[ot
][reg
]();
2957 case 0xc4: /* pinsrw */
2960 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
2961 val
= ldub_code(s
->pc
++);
2964 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
2967 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
2970 case 0xc5: /* pextrw */
2974 val
= ldub_code(s
->pc
++);
2977 rm
= (modrm
& 7) | REX_B(s
);
2978 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
2982 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
2984 reg
= ((modrm
>> 3) & 7) | rex_r
;
2985 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2987 case 0x1d6: /* movq ea, xmm */
2989 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2990 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2992 rm
= (modrm
& 7) | REX_B(s
);
2993 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2994 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2995 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2998 case 0x2d6: /* movq2dq */
3001 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3002 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3003 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3005 case 0x3d6: /* movdq2q */
3007 rm
= (modrm
& 7) | REX_B(s
);
3008 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3009 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3011 case 0xd7: /* pmovmskb */
3016 rm
= (modrm
& 7) | REX_B(s
);
3017 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
3020 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3022 reg
= ((modrm
>> 3) & 7) | rex_r
;
3023 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
3029 /* generic MMX or SSE operation */
3032 /* maskmov : we must prepare A0 */
3035 #ifdef TARGET_X86_64
3036 if (s
->aflag
== 2) {
3037 gen_op_movq_A0_reg
[R_EDI
]();
3041 gen_op_movl_A0_reg
[R_EDI
]();
3043 gen_op_andl_A0_ffff();
3045 gen_add_A0_ds_seg(s
);
3047 case 0x70: /* pshufx insn */
3048 case 0xc6: /* pshufx insn */
3049 case 0xc2: /* compare insns */
3056 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3058 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3059 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3060 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3062 /* specific case for SSE single instructions */
3065 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
3066 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3069 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3072 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3075 rm
= (modrm
& 7) | REX_B(s
);
3076 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3079 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3081 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3082 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3083 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3086 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3090 case 0x70: /* pshufx insn */
3091 case 0xc6: /* pshufx insn */
3092 val
= ldub_code(s
->pc
++);
3093 sse_op3
= (GenOpFunc3
*)sse_op2
;
3094 sse_op3(op1_offset
, op2_offset
, val
);
3098 val
= ldub_code(s
->pc
++);
3101 sse_op2
= sse_op_table4
[val
][b1
];
3102 sse_op2(op1_offset
, op2_offset
);
3105 sse_op2(op1_offset
, op2_offset
);
3108 if (b
== 0x2e || b
== 0x2f) {
3109 s
->cc_op
= CC_OP_EFLAGS
;
3115 /* convert one instruction. s->is_jmp is set if the translation must
3116 be stopped. Return the next pc value */
3117 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3119 int b
, prefixes
, aflag
, dflag
;
3121 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3122 target_ulong next_eip
, tval
;
3132 #ifdef TARGET_X86_64
3137 s
->rip_offset
= 0; /* for relative ip address */
3139 b
= ldub_code(s
->pc
);
3141 /* check prefixes */
3142 #ifdef TARGET_X86_64
3146 prefixes
|= PREFIX_REPZ
;
3149 prefixes
|= PREFIX_REPNZ
;
3152 prefixes
|= PREFIX_LOCK
;
3173 prefixes
|= PREFIX_DATA
;
3176 prefixes
|= PREFIX_ADR
;
3180 rex_w
= (b
>> 3) & 1;
3181 rex_r
= (b
& 0x4) << 1;
3182 s
->rex_x
= (b
& 0x2) << 2;
3183 REX_B(s
) = (b
& 0x1) << 3;
3184 x86_64_hregs
= 1; /* select uniform byte register addressing */
3188 /* 0x66 is ignored if rex.w is set */
3191 if (prefixes
& PREFIX_DATA
)
3194 if (!(prefixes
& PREFIX_ADR
))
3201 prefixes
|= PREFIX_REPZ
;
3204 prefixes
|= PREFIX_REPNZ
;
3207 prefixes
|= PREFIX_LOCK
;
3228 prefixes
|= PREFIX_DATA
;
3231 prefixes
|= PREFIX_ADR
;
3234 if (prefixes
& PREFIX_DATA
)
3236 if (prefixes
& PREFIX_ADR
)
3240 s
->prefix
= prefixes
;
3244 /* lock generation */
3245 if (prefixes
& PREFIX_LOCK
)
3248 /* now check op code */
3252 /**************************/
3253 /* extended op code */
3254 b
= ldub_code(s
->pc
++) | 0x100;
3257 /**************************/
3275 ot
= dflag
+ OT_WORD
;
3278 case 0: /* OP Ev, Gv */
3279 modrm
= ldub_code(s
->pc
++);
3280 reg
= ((modrm
>> 3) & 7) | rex_r
;
3281 mod
= (modrm
>> 6) & 3;
3282 rm
= (modrm
& 7) | REX_B(s
);
3284 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3286 } else if (op
== OP_XORL
&& rm
== reg
) {
3288 /* xor reg, reg optimisation */
3290 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3291 gen_op_mov_reg_T0
[ot
][reg
]();
3292 gen_op_update1_cc();
3297 gen_op_mov_TN_reg
[ot
][1][reg
]();
3298 gen_op(s
, op
, ot
, opreg
);
3300 case 1: /* OP Gv, Ev */
3301 modrm
= ldub_code(s
->pc
++);
3302 mod
= (modrm
>> 6) & 3;
3303 reg
= ((modrm
>> 3) & 7) | rex_r
;
3304 rm
= (modrm
& 7) | REX_B(s
);
3306 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3307 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3308 } else if (op
== OP_XORL
&& rm
== reg
) {
3311 gen_op_mov_TN_reg
[ot
][1][rm
]();
3313 gen_op(s
, op
, ot
, reg
);
3315 case 2: /* OP A, Iv */
3316 val
= insn_get(s
, ot
);
3317 gen_op_movl_T1_im(val
);
3318 gen_op(s
, op
, ot
, OR_EAX
);
3324 case 0x80: /* GRP1 */
3334 ot
= dflag
+ OT_WORD
;
3336 modrm
= ldub_code(s
->pc
++);
3337 mod
= (modrm
>> 6) & 3;
3338 rm
= (modrm
& 7) | REX_B(s
);
3339 op
= (modrm
>> 3) & 7;
3345 s
->rip_offset
= insn_const_size(ot
);
3346 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3357 val
= insn_get(s
, ot
);
3360 val
= (int8_t)insn_get(s
, OT_BYTE
);
3363 gen_op_movl_T1_im(val
);
3364 gen_op(s
, op
, ot
, opreg
);
3368 /**************************/
3369 /* inc, dec, and other misc arith */
3370 case 0x40 ... 0x47: /* inc Gv */
3371 ot
= dflag
? OT_LONG
: OT_WORD
;
3372 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3374 case 0x48 ... 0x4f: /* dec Gv */
3375 ot
= dflag
? OT_LONG
: OT_WORD
;
3376 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3378 case 0xf6: /* GRP3 */
3383 ot
= dflag
+ OT_WORD
;
3385 modrm
= ldub_code(s
->pc
++);
3386 mod
= (modrm
>> 6) & 3;
3387 rm
= (modrm
& 7) | REX_B(s
);
3388 op
= (modrm
>> 3) & 7;
3391 s
->rip_offset
= insn_const_size(ot
);
3392 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3393 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3395 gen_op_mov_TN_reg
[ot
][0][rm
]();
3400 val
= insn_get(s
, ot
);
3401 gen_op_movl_T1_im(val
);
3402 gen_op_testl_T0_T1_cc();
3403 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3408 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3410 gen_op_mov_reg_T0
[ot
][rm
]();
3416 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3418 gen_op_mov_reg_T0
[ot
][rm
]();
3420 gen_op_update_neg_cc();
3421 s
->cc_op
= CC_OP_SUBB
+ ot
;
3426 gen_op_mulb_AL_T0();
3427 s
->cc_op
= CC_OP_MULB
;
3430 gen_op_mulw_AX_T0();
3431 s
->cc_op
= CC_OP_MULW
;
3435 gen_op_mull_EAX_T0();
3436 s
->cc_op
= CC_OP_MULL
;
3438 #ifdef TARGET_X86_64
3440 gen_op_mulq_EAX_T0();
3441 s
->cc_op
= CC_OP_MULQ
;
3449 gen_op_imulb_AL_T0();
3450 s
->cc_op
= CC_OP_MULB
;
3453 gen_op_imulw_AX_T0();
3454 s
->cc_op
= CC_OP_MULW
;
3458 gen_op_imull_EAX_T0();
3459 s
->cc_op
= CC_OP_MULL
;
3461 #ifdef TARGET_X86_64
3463 gen_op_imulq_EAX_T0();
3464 s
->cc_op
= CC_OP_MULQ
;
3472 gen_jmp_im(pc_start
- s
->cs_base
);
3473 gen_op_divb_AL_T0();
3476 gen_jmp_im(pc_start
- s
->cs_base
);
3477 gen_op_divw_AX_T0();
3481 gen_jmp_im(pc_start
- s
->cs_base
);
3482 gen_op_divl_EAX_T0();
3484 #ifdef TARGET_X86_64
3486 gen_jmp_im(pc_start
- s
->cs_base
);
3487 gen_op_divq_EAX_T0();
3495 gen_jmp_im(pc_start
- s
->cs_base
);
3496 gen_op_idivb_AL_T0();
3499 gen_jmp_im(pc_start
- s
->cs_base
);
3500 gen_op_idivw_AX_T0();
3504 gen_jmp_im(pc_start
- s
->cs_base
);
3505 gen_op_idivl_EAX_T0();
3507 #ifdef TARGET_X86_64
3509 gen_jmp_im(pc_start
- s
->cs_base
);
3510 gen_op_idivq_EAX_T0();
3520 case 0xfe: /* GRP4 */
3521 case 0xff: /* GRP5 */
3525 ot
= dflag
+ OT_WORD
;
3527 modrm
= ldub_code(s
->pc
++);
3528 mod
= (modrm
>> 6) & 3;
3529 rm
= (modrm
& 7) | REX_B(s
);
3530 op
= (modrm
>> 3) & 7;
3531 if (op
>= 2 && b
== 0xfe) {
3535 if (op
== 2 || op
== 4) {
3536 /* operand size for jumps is 64 bit */
3538 } else if (op
== 3 || op
== 5) {
3539 /* for call calls, the operand is 16 or 32 bit, even
3541 ot
= dflag
? OT_LONG
: OT_WORD
;
3542 } else if (op
== 6) {
3543 /* default push size is 64 bit */
3544 ot
= dflag
? OT_QUAD
: OT_WORD
;
3548 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3549 if (op
>= 2 && op
!= 3 && op
!= 5)
3550 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3552 gen_op_mov_TN_reg
[ot
][0][rm
]();
3556 case 0: /* inc Ev */
3561 gen_inc(s
, ot
, opreg
, 1);
3563 case 1: /* dec Ev */
3568 gen_inc(s
, ot
, opreg
, -1);
3570 case 2: /* call Ev */
3571 /* XXX: optimize if memory (no 'and' is necessary) */
3573 gen_op_andl_T0_ffff();
3574 next_eip
= s
->pc
- s
->cs_base
;
3575 gen_movtl_T1_im(next_eip
);
3580 case 3: /* lcall Ev */
3581 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3582 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3583 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3585 if (s
->pe
&& !s
->vm86
) {
3586 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3587 gen_op_set_cc_op(s
->cc_op
);
3588 gen_jmp_im(pc_start
- s
->cs_base
);
3589 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3591 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3595 case 4: /* jmp Ev */
3597 gen_op_andl_T0_ffff();
3601 case 5: /* ljmp Ev */
3602 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3603 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3604 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3606 if (s
->pe
&& !s
->vm86
) {
3607 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3608 gen_op_set_cc_op(s
->cc_op
);
3609 gen_jmp_im(pc_start
- s
->cs_base
);
3610 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3612 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3613 gen_op_movl_T0_T1();
3618 case 6: /* push Ev */
3626 case 0x84: /* test Ev, Gv */
3631 ot
= dflag
+ OT_WORD
;
3633 modrm
= ldub_code(s
->pc
++);
3634 mod
= (modrm
>> 6) & 3;
3635 rm
= (modrm
& 7) | REX_B(s
);
3636 reg
= ((modrm
>> 3) & 7) | rex_r
;
3638 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3639 gen_op_mov_TN_reg
[ot
][1][reg
]();
3640 gen_op_testl_T0_T1_cc();
3641 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3644 case 0xa8: /* test eAX, Iv */
3649 ot
= dflag
+ OT_WORD
;
3650 val
= insn_get(s
, ot
);
3652 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
3653 gen_op_movl_T1_im(val
);
3654 gen_op_testl_T0_T1_cc();
3655 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3658 case 0x98: /* CWDE/CBW */
3659 #ifdef TARGET_X86_64
3661 gen_op_movslq_RAX_EAX();
3665 gen_op_movswl_EAX_AX();
3667 gen_op_movsbw_AX_AL();
3669 case 0x99: /* CDQ/CWD */
3670 #ifdef TARGET_X86_64
3672 gen_op_movsqo_RDX_RAX();
3676 gen_op_movslq_EDX_EAX();
3678 gen_op_movswl_DX_AX();
3680 case 0x1af: /* imul Gv, Ev */
3681 case 0x69: /* imul Gv, Ev, I */
3683 ot
= dflag
+ OT_WORD
;
3684 modrm
= ldub_code(s
->pc
++);
3685 reg
= ((modrm
>> 3) & 7) | rex_r
;
3687 s
->rip_offset
= insn_const_size(ot
);
3690 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3692 val
= insn_get(s
, ot
);
3693 gen_op_movl_T1_im(val
);
3694 } else if (b
== 0x6b) {
3695 val
= (int8_t)insn_get(s
, OT_BYTE
);
3696 gen_op_movl_T1_im(val
);
3698 gen_op_mov_TN_reg
[ot
][1][reg
]();
3701 #ifdef TARGET_X86_64
3702 if (ot
== OT_QUAD
) {
3703 gen_op_imulq_T0_T1();
3706 if (ot
== OT_LONG
) {
3707 gen_op_imull_T0_T1();
3709 gen_op_imulw_T0_T1();
3711 gen_op_mov_reg_T0
[ot
][reg
]();
3712 s
->cc_op
= CC_OP_MULB
+ ot
;
3715 case 0x1c1: /* xadd Ev, Gv */
3719 ot
= dflag
+ OT_WORD
;
3720 modrm
= ldub_code(s
->pc
++);
3721 reg
= ((modrm
>> 3) & 7) | rex_r
;
3722 mod
= (modrm
>> 6) & 3;
3724 rm
= (modrm
& 7) | REX_B(s
);
3725 gen_op_mov_TN_reg
[ot
][0][reg
]();
3726 gen_op_mov_TN_reg
[ot
][1][rm
]();
3727 gen_op_addl_T0_T1();
3728 gen_op_mov_reg_T1
[ot
][reg
]();
3729 gen_op_mov_reg_T0
[ot
][rm
]();
3731 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3732 gen_op_mov_TN_reg
[ot
][0][reg
]();
3733 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3734 gen_op_addl_T0_T1();
3735 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3736 gen_op_mov_reg_T1
[ot
][reg
]();
3738 gen_op_update2_cc();
3739 s
->cc_op
= CC_OP_ADDB
+ ot
;
3742 case 0x1b1: /* cmpxchg Ev, Gv */
3746 ot
= dflag
+ OT_WORD
;
3747 modrm
= ldub_code(s
->pc
++);
3748 reg
= ((modrm
>> 3) & 7) | rex_r
;
3749 mod
= (modrm
>> 6) & 3;
3750 gen_op_mov_TN_reg
[ot
][1][reg
]();
3752 rm
= (modrm
& 7) | REX_B(s
);
3753 gen_op_mov_TN_reg
[ot
][0][rm
]();
3754 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3755 gen_op_mov_reg_T0
[ot
][rm
]();
3757 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3758 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3759 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3761 s
->cc_op
= CC_OP_SUBB
+ ot
;
3763 case 0x1c7: /* cmpxchg8b */
3764 modrm
= ldub_code(s
->pc
++);
3765 mod
= (modrm
>> 6) & 3;
3768 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3769 gen_op_set_cc_op(s
->cc_op
);
3770 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3772 s
->cc_op
= CC_OP_EFLAGS
;
3775 /**************************/
3777 case 0x50 ... 0x57: /* push */
3778 gen_op_mov_TN_reg
[OT_LONG
][0][(b
& 7) | REX_B(s
)]();
3781 case 0x58 ... 0x5f: /* pop */
3783 ot
= dflag
? OT_QUAD
: OT_WORD
;
3785 ot
= dflag
+ OT_WORD
;
3788 /* NOTE: order is important for pop %sp */
3790 gen_op_mov_reg_T0
[ot
][(b
& 7) | REX_B(s
)]();
3792 case 0x60: /* pusha */
3797 case 0x61: /* popa */
3802 case 0x68: /* push Iv */
3805 ot
= dflag
? OT_QUAD
: OT_WORD
;
3807 ot
= dflag
+ OT_WORD
;
3810 val
= insn_get(s
, ot
);
3812 val
= (int8_t)insn_get(s
, OT_BYTE
);
3813 gen_op_movl_T0_im(val
);
3816 case 0x8f: /* pop Ev */
3818 ot
= dflag
? OT_QUAD
: OT_WORD
;
3820 ot
= dflag
+ OT_WORD
;
3822 modrm
= ldub_code(s
->pc
++);
3823 mod
= (modrm
>> 6) & 3;
3826 /* NOTE: order is important for pop %sp */
3828 rm
= (modrm
& 7) | REX_B(s
);
3829 gen_op_mov_reg_T0
[ot
][rm
]();
3831 /* NOTE: order is important too for MMU exceptions */
3832 s
->popl_esp_hack
= 1 << ot
;
3833 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3834 s
->popl_esp_hack
= 0;
3838 case 0xc8: /* enter */
3841 val
= lduw_code(s
->pc
);
3843 level
= ldub_code(s
->pc
++);
3844 gen_enter(s
, val
, level
);
3847 case 0xc9: /* leave */
3848 /* XXX: exception not precise (ESP is updated before potential exception) */
3850 gen_op_mov_TN_reg
[OT_QUAD
][0][R_EBP
]();
3851 gen_op_mov_reg_T0
[OT_QUAD
][R_ESP
]();
3852 } else if (s
->ss32
) {
3853 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
3854 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
3856 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
3857 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
3861 ot
= dflag
? OT_QUAD
: OT_WORD
;
3863 ot
= dflag
+ OT_WORD
;
3865 gen_op_mov_reg_T0
[ot
][R_EBP
]();
3868 case 0x06: /* push es */
3869 case 0x0e: /* push cs */
3870 case 0x16: /* push ss */
3871 case 0x1e: /* push ds */
3874 gen_op_movl_T0_seg(b
>> 3);
3877 case 0x1a0: /* push fs */
3878 case 0x1a8: /* push gs */
3879 gen_op_movl_T0_seg((b
>> 3) & 7);
3882 case 0x07: /* pop es */
3883 case 0x17: /* pop ss */
3884 case 0x1f: /* pop ds */
3889 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3892 /* if reg == SS, inhibit interrupts/trace. */
3893 /* If several instructions disable interrupts, only the
3895 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3896 gen_op_set_inhibit_irq();
3900 gen_jmp_im(s
->pc
- s
->cs_base
);
3904 case 0x1a1: /* pop fs */
3905 case 0x1a9: /* pop gs */
3907 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
3910 gen_jmp_im(s
->pc
- s
->cs_base
);
3915 /**************************/
3918 case 0x89: /* mov Gv, Ev */
3922 ot
= dflag
+ OT_WORD
;
3923 modrm
= ldub_code(s
->pc
++);
3924 reg
= ((modrm
>> 3) & 7) | rex_r
;
3926 /* generate a generic store */
3927 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
3930 case 0xc7: /* mov Ev, Iv */
3934 ot
= dflag
+ OT_WORD
;
3935 modrm
= ldub_code(s
->pc
++);
3936 mod
= (modrm
>> 6) & 3;
3938 s
->rip_offset
= insn_const_size(ot
);
3939 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3941 val
= insn_get(s
, ot
);
3942 gen_op_movl_T0_im(val
);
3944 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3946 gen_op_mov_reg_T0
[ot
][(modrm
& 7) | REX_B(s
)]();
3949 case 0x8b: /* mov Ev, Gv */
3953 ot
= OT_WORD
+ dflag
;
3954 modrm
= ldub_code(s
->pc
++);
3955 reg
= ((modrm
>> 3) & 7) | rex_r
;
3957 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3958 gen_op_mov_reg_T0
[ot
][reg
]();
3960 case 0x8e: /* mov seg, Gv */
3961 modrm
= ldub_code(s
->pc
++);
3962 reg
= (modrm
>> 3) & 7;
3963 if (reg
>= 6 || reg
== R_CS
)
3965 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3966 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3968 /* if reg == SS, inhibit interrupts/trace */
3969 /* If several instructions disable interrupts, only the
3971 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3972 gen_op_set_inhibit_irq();
3976 gen_jmp_im(s
->pc
- s
->cs_base
);
3980 case 0x8c: /* mov Gv, seg */
3981 modrm
= ldub_code(s
->pc
++);
3982 reg
= (modrm
>> 3) & 7;
3983 mod
= (modrm
>> 6) & 3;
3986 gen_op_movl_T0_seg(reg
);
3988 ot
= OT_WORD
+ dflag
;
3991 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3994 case 0x1b6: /* movzbS Gv, Eb */
3995 case 0x1b7: /* movzwS Gv, Eb */
3996 case 0x1be: /* movsbS Gv, Eb */
3997 case 0x1bf: /* movswS Gv, Eb */
4000 /* d_ot is the size of destination */
4001 d_ot
= dflag
+ OT_WORD
;
4002 /* ot is the size of source */
4003 ot
= (b
& 1) + OT_BYTE
;
4004 modrm
= ldub_code(s
->pc
++);
4005 reg
= ((modrm
>> 3) & 7) | rex_r
;
4006 mod
= (modrm
>> 6) & 3;
4007 rm
= (modrm
& 7) | REX_B(s
);
4010 gen_op_mov_TN_reg
[ot
][0][rm
]();
4011 switch(ot
| (b
& 8)) {
4013 gen_op_movzbl_T0_T0();
4016 gen_op_movsbl_T0_T0();
4019 gen_op_movzwl_T0_T0();
4023 gen_op_movswl_T0_T0();
4026 gen_op_mov_reg_T0
[d_ot
][reg
]();
4028 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4030 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
4032 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
4034 gen_op_mov_reg_T0
[d_ot
][reg
]();
4039 case 0x8d: /* lea */
4040 ot
= dflag
+ OT_WORD
;
4041 modrm
= ldub_code(s
->pc
++);
4042 mod
= (modrm
>> 6) & 3;
4045 reg
= ((modrm
>> 3) & 7) | rex_r
;
4046 /* we must ensure that no segment is added */
4050 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4052 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
4055 case 0xa0: /* mov EAX, Ov */
4057 case 0xa2: /* mov Ov, EAX */
4060 target_ulong offset_addr
;
4065 ot
= dflag
+ OT_WORD
;
4066 #ifdef TARGET_X86_64
4067 if (s
->aflag
== 2) {
4068 offset_addr
= ldq_code(s
->pc
);
4070 if (offset_addr
== (int32_t)offset_addr
)
4071 gen_op_movq_A0_im(offset_addr
);
4073 gen_op_movq_A0_im64(offset_addr
>> 32, offset_addr
);
4078 offset_addr
= insn_get(s
, OT_LONG
);
4080 offset_addr
= insn_get(s
, OT_WORD
);
4082 gen_op_movl_A0_im(offset_addr
);
4084 gen_add_A0_ds_seg(s
);
4086 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4087 gen_op_mov_reg_T0
[ot
][R_EAX
]();
4089 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
4090 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4094 case 0xd7: /* xlat */
4095 #ifdef TARGET_X86_64
4096 if (s
->aflag
== 2) {
4097 gen_op_movq_A0_reg
[R_EBX
]();
4098 gen_op_addq_A0_AL();
4102 gen_op_movl_A0_reg
[R_EBX
]();
4103 gen_op_addl_A0_AL();
4105 gen_op_andl_A0_ffff();
4107 gen_add_A0_ds_seg(s
);
4108 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
4109 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
4111 case 0xb0 ... 0xb7: /* mov R, Ib */
4112 val
= insn_get(s
, OT_BYTE
);
4113 gen_op_movl_T0_im(val
);
4114 gen_op_mov_reg_T0
[OT_BYTE
][(b
& 7) | REX_B(s
)]();
4116 case 0xb8 ... 0xbf: /* mov R, Iv */
4117 #ifdef TARGET_X86_64
4121 tmp
= ldq_code(s
->pc
);
4123 reg
= (b
& 7) | REX_B(s
);
4124 gen_movtl_T0_im(tmp
);
4125 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
4129 ot
= dflag
? OT_LONG
: OT_WORD
;
4130 val
= insn_get(s
, ot
);
4131 reg
= (b
& 7) | REX_B(s
);
4132 gen_op_movl_T0_im(val
);
4133 gen_op_mov_reg_T0
[ot
][reg
]();
4137 case 0x91 ... 0x97: /* xchg R, EAX */
4138 ot
= dflag
+ OT_WORD
;
4139 reg
= (b
& 7) | REX_B(s
);
4143 case 0x87: /* xchg Ev, Gv */
4147 ot
= dflag
+ OT_WORD
;
4148 modrm
= ldub_code(s
->pc
++);
4149 reg
= ((modrm
>> 3) & 7) | rex_r
;
4150 mod
= (modrm
>> 6) & 3;
4152 rm
= (modrm
& 7) | REX_B(s
);
4154 gen_op_mov_TN_reg
[ot
][0][reg
]();
4155 gen_op_mov_TN_reg
[ot
][1][rm
]();
4156 gen_op_mov_reg_T0
[ot
][rm
]();
4157 gen_op_mov_reg_T1
[ot
][reg
]();
4159 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4160 gen_op_mov_TN_reg
[ot
][0][reg
]();
4161 /* for xchg, lock is implicit */
4162 if (!(prefixes
& PREFIX_LOCK
))
4164 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4165 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4166 if (!(prefixes
& PREFIX_LOCK
))
4168 gen_op_mov_reg_T1
[ot
][reg
]();
4171 case 0xc4: /* les Gv */
4176 case 0xc5: /* lds Gv */
4181 case 0x1b2: /* lss Gv */
4184 case 0x1b4: /* lfs Gv */
4187 case 0x1b5: /* lgs Gv */
4190 ot
= dflag
? OT_LONG
: OT_WORD
;
4191 modrm
= ldub_code(s
->pc
++);
4192 reg
= ((modrm
>> 3) & 7) | rex_r
;
4193 mod
= (modrm
>> 6) & 3;
4196 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4197 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4198 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4199 /* load the segment first to handle exceptions properly */
4200 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
4201 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4202 /* then put the data */
4203 gen_op_mov_reg_T1
[ot
][reg
]();
4205 gen_jmp_im(s
->pc
- s
->cs_base
);
4210 /************************/
4221 ot
= dflag
+ OT_WORD
;
4223 modrm
= ldub_code(s
->pc
++);
4224 mod
= (modrm
>> 6) & 3;
4225 op
= (modrm
>> 3) & 7;
4231 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4234 opreg
= (modrm
& 7) | REX_B(s
);
4239 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4242 shift
= ldub_code(s
->pc
++);
4244 gen_shifti(s
, op
, ot
, opreg
, shift
);
4259 case 0x1a4: /* shld imm */
4263 case 0x1a5: /* shld cl */
4267 case 0x1ac: /* shrd imm */
4271 case 0x1ad: /* shrd cl */
4275 ot
= dflag
+ OT_WORD
;
4276 modrm
= ldub_code(s
->pc
++);
4277 mod
= (modrm
>> 6) & 3;
4278 rm
= (modrm
& 7) | REX_B(s
);
4279 reg
= ((modrm
>> 3) & 7) | rex_r
;
4282 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4283 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4285 gen_op_mov_TN_reg
[ot
][0][rm
]();
4287 gen_op_mov_TN_reg
[ot
][1][reg
]();
4290 val
= ldub_code(s
->pc
++);
4297 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4299 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4300 if (op
== 0 && ot
!= OT_WORD
)
4301 s
->cc_op
= CC_OP_SHLB
+ ot
;
4303 s
->cc_op
= CC_OP_SARB
+ ot
;
4306 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4307 gen_op_set_cc_op(s
->cc_op
);
4309 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4311 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4312 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4315 gen_op_mov_reg_T0
[ot
][rm
]();
4319 /************************/
4322 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4323 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4324 /* XXX: what to do if illegal op ? */
4325 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4328 modrm
= ldub_code(s
->pc
++);
4329 mod
= (modrm
>> 6) & 3;
4331 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4334 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4336 case 0x00 ... 0x07: /* fxxxs */
4337 case 0x10 ... 0x17: /* fixxxl */
4338 case 0x20 ... 0x27: /* fxxxl */
4339 case 0x30 ... 0x37: /* fixxx */
4346 gen_op_flds_FT0_A0();
4349 gen_op_fildl_FT0_A0();
4352 gen_op_fldl_FT0_A0();
4356 gen_op_fild_FT0_A0();
4360 gen_op_fp_arith_ST0_FT0
[op1
]();
4362 /* fcomp needs pop */
4367 case 0x08: /* flds */
4368 case 0x0a: /* fsts */
4369 case 0x0b: /* fstps */
4370 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4371 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4372 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4377 gen_op_flds_ST0_A0();
4380 gen_op_fildl_ST0_A0();
4383 gen_op_fldl_ST0_A0();
4387 gen_op_fild_ST0_A0();
4394 gen_op_fisttl_ST0_A0();
4397 gen_op_fisttll_ST0_A0();
4401 gen_op_fistt_ST0_A0();
4408 gen_op_fsts_ST0_A0();
4411 gen_op_fistl_ST0_A0();
4414 gen_op_fstl_ST0_A0();
4418 gen_op_fist_ST0_A0();
4426 case 0x0c: /* fldenv mem */
4427 gen_op_fldenv_A0(s
->dflag
);
4429 case 0x0d: /* fldcw mem */
4432 case 0x0e: /* fnstenv mem */
4433 gen_op_fnstenv_A0(s
->dflag
);
4435 case 0x0f: /* fnstcw mem */
4438 case 0x1d: /* fldt mem */
4439 gen_op_fldt_ST0_A0();
4441 case 0x1f: /* fstpt mem */
4442 gen_op_fstt_ST0_A0();
4445 case 0x2c: /* frstor mem */
4446 gen_op_frstor_A0(s
->dflag
);
4448 case 0x2e: /* fnsave mem */
4449 gen_op_fnsave_A0(s
->dflag
);
4451 case 0x2f: /* fnstsw mem */
4454 case 0x3c: /* fbld */
4455 gen_op_fbld_ST0_A0();
4457 case 0x3e: /* fbstp */
4458 gen_op_fbst_ST0_A0();
4461 case 0x3d: /* fildll */
4462 gen_op_fildll_ST0_A0();
4464 case 0x3f: /* fistpll */
4465 gen_op_fistll_ST0_A0();
4472 /* register float ops */
4476 case 0x08: /* fld sti */
4478 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4480 case 0x09: /* fxchg sti */
4481 case 0x29: /* fxchg4 sti, undocumented op */
4482 case 0x39: /* fxchg7 sti, undocumented op */
4483 gen_op_fxchg_ST0_STN(opreg
);
4485 case 0x0a: /* grp d9/2 */
4488 /* check exceptions (FreeBSD FPU probe) */
4489 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4490 gen_op_set_cc_op(s
->cc_op
);
4491 gen_jmp_im(pc_start
- s
->cs_base
);
4498 case 0x0c: /* grp d9/4 */
4508 gen_op_fcom_ST0_FT0();
4517 case 0x0d: /* grp d9/5 */
4526 gen_op_fldl2t_ST0();
4530 gen_op_fldl2e_ST0();
4538 gen_op_fldlg2_ST0();
4542 gen_op_fldln2_ST0();
4553 case 0x0e: /* grp d9/6 */
4564 case 3: /* fpatan */
4567 case 4: /* fxtract */
4570 case 5: /* fprem1 */
4573 case 6: /* fdecstp */
4577 case 7: /* fincstp */
4582 case 0x0f: /* grp d9/7 */
4587 case 1: /* fyl2xp1 */
4593 case 3: /* fsincos */
4596 case 5: /* fscale */
4599 case 4: /* frndint */
4611 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4612 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4613 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4619 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4623 gen_op_fmov_FT0_STN(opreg
);
4624 gen_op_fp_arith_ST0_FT0
[op1
]();
4628 case 0x02: /* fcom */
4629 case 0x22: /* fcom2, undocumented op */
4630 gen_op_fmov_FT0_STN(opreg
);
4631 gen_op_fcom_ST0_FT0();
4633 case 0x03: /* fcomp */
4634 case 0x23: /* fcomp3, undocumented op */
4635 case 0x32: /* fcomp5, undocumented op */
4636 gen_op_fmov_FT0_STN(opreg
);
4637 gen_op_fcom_ST0_FT0();
4640 case 0x15: /* da/5 */
4642 case 1: /* fucompp */
4643 gen_op_fmov_FT0_STN(1);
4644 gen_op_fucom_ST0_FT0();
4654 case 0: /* feni (287 only, just do nop here) */
4656 case 1: /* fdisi (287 only, just do nop here) */
4661 case 3: /* fninit */
4664 case 4: /* fsetpm (287 only, just do nop here) */
4670 case 0x1d: /* fucomi */
4671 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4672 gen_op_set_cc_op(s
->cc_op
);
4673 gen_op_fmov_FT0_STN(opreg
);
4674 gen_op_fucomi_ST0_FT0();
4675 s
->cc_op
= CC_OP_EFLAGS
;
4677 case 0x1e: /* fcomi */
4678 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4679 gen_op_set_cc_op(s
->cc_op
);
4680 gen_op_fmov_FT0_STN(opreg
);
4681 gen_op_fcomi_ST0_FT0();
4682 s
->cc_op
= CC_OP_EFLAGS
;
4684 case 0x28: /* ffree sti */
4685 gen_op_ffree_STN(opreg
);
4687 case 0x2a: /* fst sti */
4688 gen_op_fmov_STN_ST0(opreg
);
4690 case 0x2b: /* fstp sti */
4691 case 0x0b: /* fstp1 sti, undocumented op */
4692 case 0x3a: /* fstp8 sti, undocumented op */
4693 case 0x3b: /* fstp9 sti, undocumented op */
4694 gen_op_fmov_STN_ST0(opreg
);
4697 case 0x2c: /* fucom st(i) */
4698 gen_op_fmov_FT0_STN(opreg
);
4699 gen_op_fucom_ST0_FT0();
4701 case 0x2d: /* fucomp st(i) */
4702 gen_op_fmov_FT0_STN(opreg
);
4703 gen_op_fucom_ST0_FT0();
4706 case 0x33: /* de/3 */
4708 case 1: /* fcompp */
4709 gen_op_fmov_FT0_STN(1);
4710 gen_op_fcom_ST0_FT0();
4718 case 0x38: /* ffreep sti, undocumented op */
4719 gen_op_ffree_STN(opreg
);
4722 case 0x3c: /* df/4 */
4725 gen_op_fnstsw_EAX();
4731 case 0x3d: /* fucomip */
4732 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4733 gen_op_set_cc_op(s
->cc_op
);
4734 gen_op_fmov_FT0_STN(opreg
);
4735 gen_op_fucomi_ST0_FT0();
4737 s
->cc_op
= CC_OP_EFLAGS
;
4739 case 0x3e: /* fcomip */
4740 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4741 gen_op_set_cc_op(s
->cc_op
);
4742 gen_op_fmov_FT0_STN(opreg
);
4743 gen_op_fcomi_ST0_FT0();
4745 s
->cc_op
= CC_OP_EFLAGS
;
4747 case 0x10 ... 0x13: /* fcmovxx */
4751 const static uint8_t fcmov_cc
[8] = {
4757 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4759 gen_op_fcmov_ST0_STN_T0(opreg
);
4766 #ifdef USE_CODE_COPY
4767 s
->tb
->cflags
|= CF_TB_FP_USED
;
4770 /************************/
4773 case 0xa4: /* movsS */
4778 ot
= dflag
+ OT_WORD
;
4780 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4781 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4787 case 0xaa: /* stosS */
4792 ot
= dflag
+ OT_WORD
;
4794 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4795 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4800 case 0xac: /* lodsS */
4805 ot
= dflag
+ OT_WORD
;
4806 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4807 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4812 case 0xae: /* scasS */
4817 ot
= dflag
+ OT_WORD
;
4818 if (prefixes
& PREFIX_REPNZ
) {
4819 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4820 } else if (prefixes
& PREFIX_REPZ
) {
4821 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4824 s
->cc_op
= CC_OP_SUBB
+ ot
;
4828 case 0xa6: /* cmpsS */
4833 ot
= dflag
+ OT_WORD
;
4834 if (prefixes
& PREFIX_REPNZ
) {
4835 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4836 } else if (prefixes
& PREFIX_REPZ
) {
4837 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4840 s
->cc_op
= CC_OP_SUBB
+ ot
;
4843 case 0x6c: /* insS */
4848 ot
= dflag
? OT_LONG
: OT_WORD
;
4849 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4850 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4851 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4856 case 0x6e: /* outsS */
4861 ot
= dflag
? OT_LONG
: OT_WORD
;
4862 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4863 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4864 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4870 /************************/
4877 ot
= dflag
? OT_LONG
: OT_WORD
;
4878 val
= ldub_code(s
->pc
++);
4879 gen_op_movl_T0_im(val
);
4880 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4882 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4889 ot
= dflag
? OT_LONG
: OT_WORD
;
4890 val
= ldub_code(s
->pc
++);
4891 gen_op_movl_T0_im(val
);
4892 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4893 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4901 ot
= dflag
? OT_LONG
: OT_WORD
;
4902 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4903 gen_op_andl_T0_ffff();
4904 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4906 gen_op_mov_reg_T1
[ot
][R_EAX
]();
4913 ot
= dflag
? OT_LONG
: OT_WORD
;
4914 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4915 gen_op_andl_T0_ffff();
4916 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4917 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
4921 /************************/
4923 case 0xc2: /* ret im */
4924 val
= ldsw_code(s
->pc
);
4927 if (CODE64(s
) && s
->dflag
)
4929 gen_stack_update(s
, val
+ (2 << s
->dflag
));
4931 gen_op_andl_T0_ffff();
4935 case 0xc3: /* ret */
4939 gen_op_andl_T0_ffff();
4943 case 0xca: /* lret im */
4944 val
= ldsw_code(s
->pc
);
4947 if (s
->pe
&& !s
->vm86
) {
4948 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4949 gen_op_set_cc_op(s
->cc_op
);
4950 gen_jmp_im(pc_start
- s
->cs_base
);
4951 gen_op_lret_protected(s
->dflag
, val
);
4955 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4957 gen_op_andl_T0_ffff();
4958 /* NOTE: keeping EIP updated is not a problem in case of
4962 gen_op_addl_A0_im(2 << s
->dflag
);
4963 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
4964 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
4965 /* add stack offset */
4966 gen_stack_update(s
, val
+ (4 << s
->dflag
));
4970 case 0xcb: /* lret */
4973 case 0xcf: /* iret */
4976 gen_op_iret_real(s
->dflag
);
4977 s
->cc_op
= CC_OP_EFLAGS
;
4978 } else if (s
->vm86
) {
4980 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
4982 gen_op_iret_real(s
->dflag
);
4983 s
->cc_op
= CC_OP_EFLAGS
;
4986 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4987 gen_op_set_cc_op(s
->cc_op
);
4988 gen_jmp_im(pc_start
- s
->cs_base
);
4989 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
4990 s
->cc_op
= CC_OP_EFLAGS
;
4994 case 0xe8: /* call im */
4997 tval
= (int32_t)insn_get(s
, OT_LONG
);
4999 tval
= (int16_t)insn_get(s
, OT_WORD
);
5000 next_eip
= s
->pc
- s
->cs_base
;
5004 gen_movtl_T0_im(next_eip
);
5009 case 0x9a: /* lcall im */
5011 unsigned int selector
, offset
;
5015 ot
= dflag
? OT_LONG
: OT_WORD
;
5016 offset
= insn_get(s
, ot
);
5017 selector
= insn_get(s
, OT_WORD
);
5019 gen_op_movl_T0_im(selector
);
5020 gen_op_movl_T1_imu(offset
);
5023 case 0xe9: /* jmp im */
5025 tval
= (int32_t)insn_get(s
, OT_LONG
);
5027 tval
= (int16_t)insn_get(s
, OT_WORD
);
5028 tval
+= s
->pc
- s
->cs_base
;
5033 case 0xea: /* ljmp im */
5035 unsigned int selector
, offset
;
5039 ot
= dflag
? OT_LONG
: OT_WORD
;
5040 offset
= insn_get(s
, ot
);
5041 selector
= insn_get(s
, OT_WORD
);
5043 gen_op_movl_T0_im(selector
);
5044 gen_op_movl_T1_imu(offset
);
5047 case 0xeb: /* jmp Jb */
5048 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5049 tval
+= s
->pc
- s
->cs_base
;
5054 case 0x70 ... 0x7f: /* jcc Jb */
5055 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5057 case 0x180 ... 0x18f: /* jcc Jv */
5059 tval
= (int32_t)insn_get(s
, OT_LONG
);
5061 tval
= (int16_t)insn_get(s
, OT_WORD
);
5064 next_eip
= s
->pc
- s
->cs_base
;
5068 gen_jcc(s
, b
, tval
, next_eip
);
5071 case 0x190 ... 0x19f: /* setcc Gv */
5072 modrm
= ldub_code(s
->pc
++);
5074 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5076 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5077 ot
= dflag
+ OT_WORD
;
5078 modrm
= ldub_code(s
->pc
++);
5079 reg
= ((modrm
>> 3) & 7) | rex_r
;
5080 mod
= (modrm
>> 6) & 3;
5083 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5084 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
5086 rm
= (modrm
& 7) | REX_B(s
);
5087 gen_op_mov_TN_reg
[ot
][1][rm
]();
5089 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5092 /************************/
5094 case 0x9c: /* pushf */
5095 if (s
->vm86
&& s
->iopl
!= 3) {
5096 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5098 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5099 gen_op_set_cc_op(s
->cc_op
);
5100 gen_op_movl_T0_eflags();
5104 case 0x9d: /* popf */
5105 if (s
->vm86
&& s
->iopl
!= 3) {
5106 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5111 gen_op_movl_eflags_T0_cpl0();
5113 gen_op_movw_eflags_T0_cpl0();
5116 if (s
->cpl
<= s
->iopl
) {
5118 gen_op_movl_eflags_T0_io();
5120 gen_op_movw_eflags_T0_io();
5124 gen_op_movl_eflags_T0();
5126 gen_op_movw_eflags_T0();
5131 s
->cc_op
= CC_OP_EFLAGS
;
5132 /* abort translation because TF flag may change */
5133 gen_jmp_im(s
->pc
- s
->cs_base
);
5137 case 0x9e: /* sahf */
5140 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
5141 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5142 gen_op_set_cc_op(s
->cc_op
);
5143 gen_op_movb_eflags_T0();
5144 s
->cc_op
= CC_OP_EFLAGS
;
5146 case 0x9f: /* lahf */
5149 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5150 gen_op_set_cc_op(s
->cc_op
);
5151 gen_op_movl_T0_eflags();
5152 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
5154 case 0xf5: /* cmc */
5155 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5156 gen_op_set_cc_op(s
->cc_op
);
5158 s
->cc_op
= CC_OP_EFLAGS
;
5160 case 0xf8: /* clc */
5161 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5162 gen_op_set_cc_op(s
->cc_op
);
5164 s
->cc_op
= CC_OP_EFLAGS
;
5166 case 0xf9: /* stc */
5167 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5168 gen_op_set_cc_op(s
->cc_op
);
5170 s
->cc_op
= CC_OP_EFLAGS
;
5172 case 0xfc: /* cld */
5175 case 0xfd: /* std */
5179 /************************/
5180 /* bit operations */
5181 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5182 ot
= dflag
+ OT_WORD
;
5183 modrm
= ldub_code(s
->pc
++);
5184 op
= (modrm
>> 3) & 7;
5185 mod
= (modrm
>> 6) & 3;
5186 rm
= (modrm
& 7) | REX_B(s
);
5189 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5190 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5192 gen_op_mov_TN_reg
[ot
][0][rm
]();
5195 val
= ldub_code(s
->pc
++);
5196 gen_op_movl_T1_im(val
);
5200 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5201 s
->cc_op
= CC_OP_SARB
+ ot
;
5204 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5206 gen_op_mov_reg_T0
[ot
][rm
]();
5207 gen_op_update_bt_cc();
5210 case 0x1a3: /* bt Gv, Ev */
5213 case 0x1ab: /* bts */
5216 case 0x1b3: /* btr */
5219 case 0x1bb: /* btc */
5222 ot
= dflag
+ OT_WORD
;
5223 modrm
= ldub_code(s
->pc
++);
5224 reg
= ((modrm
>> 3) & 7) | rex_r
;
5225 mod
= (modrm
>> 6) & 3;
5226 rm
= (modrm
& 7) | REX_B(s
);
5227 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
5229 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5230 /* specific case: we need to add a displacement */
5231 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5232 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5234 gen_op_mov_TN_reg
[ot
][0][rm
]();
5236 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5237 s
->cc_op
= CC_OP_SARB
+ ot
;
5240 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5242 gen_op_mov_reg_T0
[ot
][rm
]();
5243 gen_op_update_bt_cc();
5246 case 0x1bc: /* bsf */
5247 case 0x1bd: /* bsr */
5248 ot
= dflag
+ OT_WORD
;
5249 modrm
= ldub_code(s
->pc
++);
5250 reg
= ((modrm
>> 3) & 7) | rex_r
;
5251 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5252 /* NOTE: in order to handle the 0 case, we must load the
5253 result. It could be optimized with a generated jump */
5254 gen_op_mov_TN_reg
[ot
][1][reg
]();
5255 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5256 gen_op_mov_reg_T1
[ot
][reg
]();
5257 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5259 /************************/
5261 case 0x27: /* daa */
5264 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5265 gen_op_set_cc_op(s
->cc_op
);
5267 s
->cc_op
= CC_OP_EFLAGS
;
5269 case 0x2f: /* das */
5272 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5273 gen_op_set_cc_op(s
->cc_op
);
5275 s
->cc_op
= CC_OP_EFLAGS
;
5277 case 0x37: /* aaa */
5280 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5281 gen_op_set_cc_op(s
->cc_op
);
5283 s
->cc_op
= CC_OP_EFLAGS
;
5285 case 0x3f: /* aas */
5288 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5289 gen_op_set_cc_op(s
->cc_op
);
5291 s
->cc_op
= CC_OP_EFLAGS
;
5293 case 0xd4: /* aam */
5296 val
= ldub_code(s
->pc
++);
5298 s
->cc_op
= CC_OP_LOGICB
;
5300 case 0xd5: /* aad */
5303 val
= ldub_code(s
->pc
++);
5305 s
->cc_op
= CC_OP_LOGICB
;
5307 /************************/
5309 case 0x90: /* nop */
5310 /* XXX: xchg + rex handling */
5311 /* XXX: correct lock test for all insn */
5312 if (prefixes
& PREFIX_LOCK
)
5315 case 0x9b: /* fwait */
5316 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5317 (HF_MP_MASK
| HF_TS_MASK
)) {
5318 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5320 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5321 gen_op_set_cc_op(s
->cc_op
);
5322 gen_jmp_im(pc_start
- s
->cs_base
);
5326 case 0xcc: /* int3 */
5327 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5329 case 0xcd: /* int N */
5330 val
= ldub_code(s
->pc
++);
5331 if (s
->vm86
&& s
->iopl
!= 3) {
5332 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5334 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5337 case 0xce: /* into */
5340 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5341 gen_op_set_cc_op(s
->cc_op
);
5342 gen_jmp_im(pc_start
- s
->cs_base
);
5343 gen_op_into(s
->pc
- pc_start
);
5345 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5347 gen_debug(s
, pc_start
- s
->cs_base
);
5350 tb_flush(cpu_single_env
);
5351 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5354 case 0xfa: /* cli */
5356 if (s
->cpl
<= s
->iopl
) {
5359 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5365 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5369 case 0xfb: /* sti */
5371 if (s
->cpl
<= s
->iopl
) {
5374 /* interruptions are enabled only the first insn after sti */
5375 /* If several instructions disable interrupts, only the
5377 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5378 gen_op_set_inhibit_irq();
5379 /* give a chance to handle pending irqs */
5380 gen_jmp_im(s
->pc
- s
->cs_base
);
5383 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5389 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5393 case 0x62: /* bound */
5396 ot
= dflag
? OT_LONG
: OT_WORD
;
5397 modrm
= ldub_code(s
->pc
++);
5398 reg
= (modrm
>> 3) & 7;
5399 mod
= (modrm
>> 6) & 3;
5402 gen_op_mov_TN_reg
[ot
][0][reg
]();
5403 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5404 gen_jmp_im(pc_start
- s
->cs_base
);
5410 case 0x1c8 ... 0x1cf: /* bswap reg */
5411 reg
= (b
& 7) | REX_B(s
);
5412 #ifdef TARGET_X86_64
5414 gen_op_mov_TN_reg
[OT_QUAD
][0][reg
]();
5416 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
5420 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
5422 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
5425 case 0xd6: /* salc */
5428 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5429 gen_op_set_cc_op(s
->cc_op
);
5432 case 0xe0: /* loopnz */
5433 case 0xe1: /* loopz */
5434 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5435 gen_op_set_cc_op(s
->cc_op
);
5437 case 0xe2: /* loop */
5438 case 0xe3: /* jecxz */
5442 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5443 next_eip
= s
->pc
- s
->cs_base
;
5448 l1
= gen_new_label();
5449 l2
= gen_new_label();
5452 gen_op_jz_ecx
[s
->aflag
](l1
);
5454 gen_op_dec_ECX
[s
->aflag
]();
5457 gen_op_loop
[s
->aflag
][b
](l1
);
5460 gen_jmp_im(next_eip
);
5461 gen_op_jmp_label(l2
);
5468 case 0x130: /* wrmsr */
5469 case 0x132: /* rdmsr */
5471 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5479 case 0x131: /* rdtsc */
5480 gen_jmp_im(pc_start
- s
->cs_base
);
5483 case 0x134: /* sysenter */
5487 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5489 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5490 gen_op_set_cc_op(s
->cc_op
);
5491 s
->cc_op
= CC_OP_DYNAMIC
;
5493 gen_jmp_im(pc_start
- s
->cs_base
);
5498 case 0x135: /* sysexit */
5502 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5504 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5505 gen_op_set_cc_op(s
->cc_op
);
5506 s
->cc_op
= CC_OP_DYNAMIC
;
5508 gen_jmp_im(pc_start
- s
->cs_base
);
5513 #ifdef TARGET_X86_64
5514 case 0x105: /* syscall */
5515 /* XXX: is it usable in real mode ? */
5516 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5517 gen_op_set_cc_op(s
->cc_op
);
5518 s
->cc_op
= CC_OP_DYNAMIC
;
5520 gen_jmp_im(pc_start
- s
->cs_base
);
5521 gen_op_syscall(s
->pc
- pc_start
);
5524 case 0x107: /* sysret */
5526 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5528 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5529 gen_op_set_cc_op(s
->cc_op
);
5530 s
->cc_op
= CC_OP_DYNAMIC
;
5532 gen_jmp_im(pc_start
- s
->cs_base
);
5533 gen_op_sysret(s
->dflag
);
5534 /* condition codes are modified only in long mode */
5536 s
->cc_op
= CC_OP_EFLAGS
;
5541 case 0x1a2: /* cpuid */
5544 case 0xf4: /* hlt */
5546 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5548 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5549 gen_op_set_cc_op(s
->cc_op
);
5550 gen_jmp_im(s
->pc
- s
->cs_base
);
5556 modrm
= ldub_code(s
->pc
++);
5557 mod
= (modrm
>> 6) & 3;
5558 op
= (modrm
>> 3) & 7;
5561 if (!s
->pe
|| s
->vm86
)
5563 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5567 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5570 if (!s
->pe
|| s
->vm86
)
5573 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5575 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5576 gen_jmp_im(pc_start
- s
->cs_base
);
5581 if (!s
->pe
|| s
->vm86
)
5583 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5587 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5590 if (!s
->pe
|| s
->vm86
)
5593 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5595 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5596 gen_jmp_im(pc_start
- s
->cs_base
);
5602 if (!s
->pe
|| s
->vm86
)
5604 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5605 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5606 gen_op_set_cc_op(s
->cc_op
);
5611 s
->cc_op
= CC_OP_EFLAGS
;
5618 modrm
= ldub_code(s
->pc
++);
5619 mod
= (modrm
>> 6) & 3;
5620 op
= (modrm
>> 3) & 7;
5626 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5627 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
5628 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
5629 gen_add_A0_im(s
, 2);
5630 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
5632 gen_op_andl_T0_im(0xffffff);
5633 gen_op_st_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5638 case 0: /* monitor */
5639 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5642 gen_jmp_im(pc_start
- s
->cs_base
);
5643 #ifdef TARGET_X86_64
5644 if (s
->aflag
== 2) {
5645 gen_op_movq_A0_reg
[R_EBX
]();
5646 gen_op_addq_A0_AL();
5650 gen_op_movl_A0_reg
[R_EBX
]();
5651 gen_op_addl_A0_AL();
5653 gen_op_andl_A0_ffff();
5655 gen_add_A0_ds_seg(s
);
5659 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5662 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5663 gen_op_set_cc_op(s
->cc_op
);
5664 s
->cc_op
= CC_OP_DYNAMIC
;
5666 gen_jmp_im(s
->pc
- s
->cs_base
);
5674 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5675 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
5676 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
5677 gen_add_A0_im(s
, 2);
5678 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
5680 gen_op_andl_T0_im(0xffffff);
5681 gen_op_st_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5689 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5691 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5692 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
5693 gen_add_A0_im(s
, 2);
5694 gen_op_ld_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5696 gen_op_andl_T0_im(0xffffff);
5698 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5699 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5701 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5702 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5707 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5708 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5712 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5714 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5716 gen_jmp_im(s
->pc
- s
->cs_base
);
5720 case 7: /* invlpg */
5722 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5725 #ifdef TARGET_X86_64
5726 if (CODE64(s
) && rm
== 0) {
5728 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5729 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5730 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5731 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5738 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5740 gen_jmp_im(s
->pc
- s
->cs_base
);
5749 case 0x108: /* invd */
5750 case 0x109: /* wbinvd */
5752 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5757 case 0x63: /* arpl or movslS (x86_64) */
5758 #ifdef TARGET_X86_64
5761 /* d_ot is the size of destination */
5762 d_ot
= dflag
+ OT_WORD
;
5764 modrm
= ldub_code(s
->pc
++);
5765 reg
= ((modrm
>> 3) & 7) | rex_r
;
5766 mod
= (modrm
>> 6) & 3;
5767 rm
= (modrm
& 7) | REX_B(s
);
5770 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
5772 if (d_ot
== OT_QUAD
)
5773 gen_op_movslq_T0_T0();
5774 gen_op_mov_reg_T0
[d_ot
][reg
]();
5776 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5777 if (d_ot
== OT_QUAD
) {
5778 gen_op_lds_T0_A0
[OT_LONG
+ s
->mem_index
]();
5780 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5782 gen_op_mov_reg_T0
[d_ot
][reg
]();
5787 if (!s
->pe
|| s
->vm86
)
5789 ot
= dflag
? OT_LONG
: OT_WORD
;
5790 modrm
= ldub_code(s
->pc
++);
5791 reg
= (modrm
>> 3) & 7;
5792 mod
= (modrm
>> 6) & 3;
5795 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5796 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5798 gen_op_mov_TN_reg
[ot
][0][rm
]();
5800 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5801 gen_op_set_cc_op(s
->cc_op
);
5803 s
->cc_op
= CC_OP_EFLAGS
;
5805 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5807 gen_op_mov_reg_T0
[ot
][rm
]();
5809 gen_op_arpl_update();
5812 case 0x102: /* lar */
5813 case 0x103: /* lsl */
5814 if (!s
->pe
|| s
->vm86
)
5816 ot
= dflag
? OT_LONG
: OT_WORD
;
5817 modrm
= ldub_code(s
->pc
++);
5818 reg
= ((modrm
>> 3) & 7) | rex_r
;
5819 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5820 gen_op_mov_TN_reg
[ot
][1][reg
]();
5821 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5822 gen_op_set_cc_op(s
->cc_op
);
5827 s
->cc_op
= CC_OP_EFLAGS
;
5828 gen_op_mov_reg_T1
[ot
][reg
]();
5831 modrm
= ldub_code(s
->pc
++);
5832 mod
= (modrm
>> 6) & 3;
5833 op
= (modrm
>> 3) & 7;
5835 case 0: /* prefetchnta */
5836 case 1: /* prefetchnt0 */
5837 case 2: /* prefetchnt0 */
5838 case 3: /* prefetchnt0 */
5841 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5842 /* nothing more to do */
5844 default: /* nop (multi byte) */
5845 gen_nop_modrm(s
, modrm
);
5849 case 0x119 ... 0x11f: /* nop (multi byte) */
5850 modrm
= ldub_code(s
->pc
++);
5851 gen_nop_modrm(s
, modrm
);
5853 case 0x120: /* mov reg, crN */
5854 case 0x122: /* mov crN, reg */
5856 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5858 modrm
= ldub_code(s
->pc
++);
5859 if ((modrm
& 0xc0) != 0xc0)
5861 rm
= (modrm
& 7) | REX_B(s
);
5862 reg
= ((modrm
>> 3) & 7) | rex_r
;
5874 gen_op_mov_TN_reg
[ot
][0][rm
]();
5875 gen_op_movl_crN_T0(reg
);
5876 gen_jmp_im(s
->pc
- s
->cs_base
);
5879 #if !defined(CONFIG_USER_ONLY)
5881 gen_op_movtl_T0_cr8();
5884 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
5885 gen_op_mov_reg_T0
[ot
][rm
]();
5893 case 0x121: /* mov reg, drN */
5894 case 0x123: /* mov drN, reg */
5896 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5898 modrm
= ldub_code(s
->pc
++);
5899 if ((modrm
& 0xc0) != 0xc0)
5901 rm
= (modrm
& 7) | REX_B(s
);
5902 reg
= ((modrm
>> 3) & 7) | rex_r
;
5907 /* XXX: do it dynamically with CR4.DE bit */
5908 if (reg
== 4 || reg
== 5 || reg
>= 8)
5911 gen_op_mov_TN_reg
[ot
][0][rm
]();
5912 gen_op_movl_drN_T0(reg
);
5913 gen_jmp_im(s
->pc
- s
->cs_base
);
5916 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
5917 gen_op_mov_reg_T0
[ot
][rm
]();
5921 case 0x106: /* clts */
5923 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5926 /* abort block because static cpu state changed */
5927 gen_jmp_im(s
->pc
- s
->cs_base
);
5931 /* MMX/SSE/SSE2/PNI support */
5932 case 0x1c3: /* MOVNTI reg, mem */
5933 if (!(s
->cpuid_features
& CPUID_SSE2
))
5935 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
5936 modrm
= ldub_code(s
->pc
++);
5937 mod
= (modrm
>> 6) & 3;
5940 reg
= ((modrm
>> 3) & 7) | rex_r
;
5941 /* generate a generic store */
5942 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
5945 modrm
= ldub_code(s
->pc
++);
5946 mod
= (modrm
>> 6) & 3;
5947 op
= (modrm
>> 3) & 7;
5949 case 0: /* fxsave */
5950 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
5951 (s
->flags
& HF_EM_MASK
))
5953 if (s
->flags
& HF_TS_MASK
) {
5954 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5957 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5958 gen_op_fxsave_A0((s
->dflag
== 2));
5960 case 1: /* fxrstor */
5961 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
5962 (s
->flags
& HF_EM_MASK
))
5964 if (s
->flags
& HF_TS_MASK
) {
5965 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5968 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5969 gen_op_fxrstor_A0((s
->dflag
== 2));
5971 case 2: /* ldmxcsr */
5972 case 3: /* stmxcsr */
5973 if (s
->flags
& HF_TS_MASK
) {
5974 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5977 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
5980 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5982 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
5983 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
5985 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
5986 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
5989 case 5: /* lfence */
5990 case 6: /* mfence */
5991 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
5994 case 7: /* sfence / clflush */
5995 if ((modrm
& 0xc7) == 0xc0) {
5997 if (!(s
->cpuid_features
& CPUID_SSE
))
6001 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6003 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6010 case 0x10d: /* prefetch */
6011 modrm
= ldub_code(s
->pc
++);
6012 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6013 /* ignore for now */
6015 case 0x110 ... 0x117:
6016 case 0x128 ... 0x12f:
6017 case 0x150 ... 0x177:
6018 case 0x17c ... 0x17f:
6020 case 0x1c4 ... 0x1c6:
6021 case 0x1d0 ... 0x1fe:
6022 gen_sse(s
, b
, pc_start
, rex_r
);
6027 /* lock generation */
6028 if (s
->prefix
& PREFIX_LOCK
)
6032 if (s
->prefix
& PREFIX_LOCK
)
6034 /* XXX: ensure that no lock was generated */
6035 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6039 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6040 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6042 /* flags read by an operation */
6043 static uint16_t opc_read_flags
[NB_OPS
] = {
6044 [INDEX_op_aas
] = CC_A
,
6045 [INDEX_op_aaa
] = CC_A
,
6046 [INDEX_op_das
] = CC_A
| CC_C
,
6047 [INDEX_op_daa
] = CC_A
| CC_C
,
6049 /* subtle: due to the incl/decl implementation, C is used */
6050 [INDEX_op_update_inc_cc
] = CC_C
,
6052 [INDEX_op_into
] = CC_O
,
6054 [INDEX_op_jb_subb
] = CC_C
,
6055 [INDEX_op_jb_subw
] = CC_C
,
6056 [INDEX_op_jb_subl
] = CC_C
,
6058 [INDEX_op_jz_subb
] = CC_Z
,
6059 [INDEX_op_jz_subw
] = CC_Z
,
6060 [INDEX_op_jz_subl
] = CC_Z
,
6062 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
6063 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
6064 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
6066 [INDEX_op_js_subb
] = CC_S
,
6067 [INDEX_op_js_subw
] = CC_S
,
6068 [INDEX_op_js_subl
] = CC_S
,
6070 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
6071 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
6072 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
6074 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
6075 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
6076 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
6078 [INDEX_op_loopnzw
] = CC_Z
,
6079 [INDEX_op_loopnzl
] = CC_Z
,
6080 [INDEX_op_loopzw
] = CC_Z
,
6081 [INDEX_op_loopzl
] = CC_Z
,
6083 [INDEX_op_seto_T0_cc
] = CC_O
,
6084 [INDEX_op_setb_T0_cc
] = CC_C
,
6085 [INDEX_op_setz_T0_cc
] = CC_Z
,
6086 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
6087 [INDEX_op_sets_T0_cc
] = CC_S
,
6088 [INDEX_op_setp_T0_cc
] = CC_P
,
6089 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
6090 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
6092 [INDEX_op_setb_T0_subb
] = CC_C
,
6093 [INDEX_op_setb_T0_subw
] = CC_C
,
6094 [INDEX_op_setb_T0_subl
] = CC_C
,
6096 [INDEX_op_setz_T0_subb
] = CC_Z
,
6097 [INDEX_op_setz_T0_subw
] = CC_Z
,
6098 [INDEX_op_setz_T0_subl
] = CC_Z
,
6100 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6101 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6102 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6104 [INDEX_op_sets_T0_subb
] = CC_S
,
6105 [INDEX_op_sets_T0_subw
] = CC_S
,
6106 [INDEX_op_sets_T0_subl
] = CC_S
,
6108 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6109 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6110 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6112 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6113 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6114 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6116 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6117 [INDEX_op_cmc
] = CC_C
,
6118 [INDEX_op_salc
] = CC_C
,
6120 /* needed for correct flag optimisation before string ops */
6121 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6122 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6123 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6124 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6126 #ifdef TARGET_X86_64
6127 [INDEX_op_jb_subq
] = CC_C
,
6128 [INDEX_op_jz_subq
] = CC_Z
,
6129 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6130 [INDEX_op_js_subq
] = CC_S
,
6131 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6132 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6134 [INDEX_op_loopnzq
] = CC_Z
,
6135 [INDEX_op_loopzq
] = CC_Z
,
6137 [INDEX_op_setb_T0_subq
] = CC_C
,
6138 [INDEX_op_setz_T0_subq
] = CC_Z
,
6139 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6140 [INDEX_op_sets_T0_subq
] = CC_S
,
6141 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6142 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6144 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6145 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6148 #define DEF_READF(SUFFIX)\
6149 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6150 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6151 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6152 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6153 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6154 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6155 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6156 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6158 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6159 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6160 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6161 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6162 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6163 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6164 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6165 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6169 #ifndef CONFIG_USER_ONLY
6175 /* flags written by an operation */
6176 static uint16_t opc_write_flags
[NB_OPS
] = {
6177 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6178 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6179 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6180 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6181 /* subtle: due to the incl/decl implementation, C is used */
6182 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6183 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6185 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6186 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6187 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6188 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6189 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6190 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6191 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6192 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6193 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6194 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6195 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6198 [INDEX_op_ucomiss
] = CC_OSZAPC
,
6199 [INDEX_op_ucomisd
] = CC_OSZAPC
,
6200 [INDEX_op_comiss
] = CC_OSZAPC
,
6201 [INDEX_op_comisd
] = CC_OSZAPC
,
6204 [INDEX_op_aam
] = CC_OSZAPC
,
6205 [INDEX_op_aad
] = CC_OSZAPC
,
6206 [INDEX_op_aas
] = CC_OSZAPC
,
6207 [INDEX_op_aaa
] = CC_OSZAPC
,
6208 [INDEX_op_das
] = CC_OSZAPC
,
6209 [INDEX_op_daa
] = CC_OSZAPC
,
6211 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6212 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6213 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6214 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6215 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6216 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6217 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6218 [INDEX_op_clc
] = CC_C
,
6219 [INDEX_op_stc
] = CC_C
,
6220 [INDEX_op_cmc
] = CC_C
,
6222 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6223 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6224 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6225 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6226 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6227 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6228 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6229 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6230 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6231 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6232 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6233 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6235 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6236 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6237 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6238 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6239 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6240 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6242 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6243 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6244 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6245 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6247 [INDEX_op_cmpxchg8b
] = CC_Z
,
6248 [INDEX_op_lar
] = CC_Z
,
6249 [INDEX_op_lsl
] = CC_Z
,
6250 [INDEX_op_verr
] = CC_Z
,
6251 [INDEX_op_verw
] = CC_Z
,
6252 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6253 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6255 #define DEF_WRITEF(SUFFIX)\
6256 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6257 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6258 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6259 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6260 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6261 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6262 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6263 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6265 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6266 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6267 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6268 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6269 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6270 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6271 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6272 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6274 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6275 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6276 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6277 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6278 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6279 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6280 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6281 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6283 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6284 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6285 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6286 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6288 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6289 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6290 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6291 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6293 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6294 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6295 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6296 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6298 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6299 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6300 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6301 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6302 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6303 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6305 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6306 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6307 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6308 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6309 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6310 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6312 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6313 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6314 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6315 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6320 #ifndef CONFIG_USER_ONLY
6326 /* simpler form of an operation if no flags need to be generated */
6327 static uint16_t opc_simpler
[NB_OPS
] = {
6328 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6329 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6330 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6332 /* broken: CC_OP logic must be rewritten */
6333 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6336 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6337 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6338 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6339 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6341 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6342 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6343 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6344 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6346 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6347 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6348 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6349 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6351 #define DEF_SIMPLER(SUFFIX)\
6352 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6353 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6354 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6355 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6357 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6358 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6359 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6360 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6364 #ifndef CONFIG_USER_ONLY
6365 DEF_SIMPLER(_kernel
)
6370 void optimize_flags_init(void)
6373 /* put default values in arrays */
6374 for(i
= 0; i
< NB_OPS
; i
++) {
6375 if (opc_simpler
[i
] == 0)
6380 /* CPU flags computation optimization: we move backward thru the
6381 generated code to see which flags are needed. The operation is
6382 modified if suitable */
6383 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6386 int live_flags
, write_flags
, op
;
6388 opc_ptr
= opc_buf
+ opc_buf_len
;
6389 /* live_flags contains the flags needed by the next instructions
6390 in the code. At the end of the bloc, we consider that all the
6392 live_flags
= CC_OSZAPC
;
6393 while (opc_ptr
> opc_buf
) {
6395 /* if none of the flags written by the instruction is used,
6396 then we can try to find a simpler instruction */
6397 write_flags
= opc_write_flags
[op
];
6398 if ((live_flags
& write_flags
) == 0) {
6399 *opc_ptr
= opc_simpler
[op
];
6401 /* compute the live flags before the instruction */
6402 live_flags
&= ~write_flags
;
6403 live_flags
|= opc_read_flags
[op
];
6407 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6408 basic block 'tb'. If search_pc is TRUE, also generate PC
6409 information for each intermediate instruction. */
6410 static inline int gen_intermediate_code_internal(CPUState
*env
,
6411 TranslationBlock
*tb
,
6414 DisasContext dc1
, *dc
= &dc1
;
6415 target_ulong pc_ptr
;
6416 uint16_t *gen_opc_end
;
6417 int flags
, j
, lj
, cflags
;
6418 target_ulong pc_start
;
6419 target_ulong cs_base
;
6421 /* generate intermediate code */
6423 cs_base
= tb
->cs_base
;
6425 cflags
= tb
->cflags
;
6427 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6428 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6429 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6430 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6432 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6433 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6434 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6435 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6436 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6437 dc
->cc_op
= CC_OP_DYNAMIC
;
6438 dc
->cs_base
= cs_base
;
6440 dc
->popl_esp_hack
= 0;
6441 /* select memory access functions */
6443 if (flags
& HF_SOFTMMU_MASK
) {
6445 dc
->mem_index
= 2 * 4;
6447 dc
->mem_index
= 1 * 4;
6449 dc
->cpuid_features
= env
->cpuid_features
;
6450 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6451 #ifdef TARGET_X86_64
6452 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6453 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6456 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6457 (flags
& HF_INHIBIT_IRQ_MASK
)
6458 #ifndef CONFIG_SOFTMMU
6459 || (flags
& HF_SOFTMMU_MASK
)
6463 /* check addseg logic */
6464 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6465 printf("ERROR addseg\n");
6468 gen_opc_ptr
= gen_opc_buf
;
6469 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6470 gen_opparam_ptr
= gen_opparam_buf
;
6473 dc
->is_jmp
= DISAS_NEXT
;
6478 if (env
->nb_breakpoints
> 0) {
6479 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6480 if (env
->breakpoints
[j
] == pc_ptr
) {
6481 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6487 j
= gen_opc_ptr
- gen_opc_buf
;
6491 gen_opc_instr_start
[lj
++] = 0;
6493 gen_opc_pc
[lj
] = pc_ptr
;
6494 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6495 gen_opc_instr_start
[lj
] = 1;
6497 pc_ptr
= disas_insn(dc
, pc_ptr
);
6498 /* stop translation if indicated */
6501 /* if single step mode, we generate only one instruction and
6502 generate an exception */
6503 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6504 the flag and abort the translation to give the irqs a
6505 change to be happen */
6506 if (dc
->tf
|| dc
->singlestep_enabled
||
6507 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6508 (cflags
& CF_SINGLE_INSN
)) {
6509 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6513 /* if too long translation, stop generation too */
6514 if (gen_opc_ptr
>= gen_opc_end
||
6515 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6516 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6521 *gen_opc_ptr
= INDEX_op_end
;
6522 /* we don't forget to fill the last values */
6524 j
= gen_opc_ptr
- gen_opc_buf
;
6527 gen_opc_instr_start
[lj
++] = 0;
6531 if (loglevel
& CPU_LOG_TB_CPU
) {
6532 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6534 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6536 fprintf(logfile
, "----------------\n");
6537 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6538 #ifdef TARGET_X86_64
6543 disas_flags
= !dc
->code32
;
6544 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6545 fprintf(logfile
, "\n");
6546 if (loglevel
& CPU_LOG_TB_OP
) {
6547 fprintf(logfile
, "OP:\n");
6548 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6549 fprintf(logfile
, "\n");
6554 /* optimize flag computations */
6555 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6558 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6559 fprintf(logfile
, "AFTER FLAGS OPT:\n");
6560 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6561 fprintf(logfile
, "\n");
6565 tb
->size
= pc_ptr
- pc_start
;
6569 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6571 return gen_intermediate_code_internal(env
, tb
, 0);
6574 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6576 return gen_intermediate_code_internal(env
, tb
, 1);