4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 /* XXX: move that elsewhere */
33 static uint16_t *gen_opc_ptr
;
34 static uint32_t *gen_opparam_ptr
;
36 #define PREFIX_REPZ 0x01
37 #define PREFIX_REPNZ 0x02
38 #define PREFIX_LOCK 0x04
39 #define PREFIX_DATA 0x08
40 #define PREFIX_ADR 0x10
43 #define X86_64_ONLY(x) x
44 #define X86_64_DEF(x...) x
45 #define CODE64(s) ((s)->code64)
46 #define REX_X(s) ((s)->rex_x)
47 #define REX_B(s) ((s)->rex_b)
48 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #define BUGGY_64(x) NULL
53 #define X86_64_ONLY(x) NULL
54 #define X86_64_DEF(x...)
61 static int x86_64_hregs
;
64 #ifdef USE_DIRECT_JUMP
67 #define TBPARAM(x) (long)(x)
70 typedef struct DisasContext
{
71 /* current insn context */
72 int override
; /* -1 if no override */
75 target_ulong pc
; /* pc = eip + cs_base */
76 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
77 static state change (stop translation) */
78 /* current block context */
79 target_ulong cs_base
; /* base of CS segment */
80 int pe
; /* protected mode */
81 int code32
; /* 32 bit code segment */
83 int lma
; /* long mode active */
84 int code64
; /* 64 bit code segment */
87 int ss32
; /* 32 bit stack segment */
88 int cc_op
; /* current CC operation */
89 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
90 int f_st
; /* currently unused */
91 int vm86
; /* vm86 mode */
94 int tf
; /* TF cpu flag */
95 int singlestep_enabled
; /* "hardware" single step enabled */
96 int jmp_opt
; /* use direct block chaining for direct jumps */
97 int mem_index
; /* select memory access functions */
98 uint64_t flags
; /* all execution flags */
99 struct TranslationBlock
*tb
;
100 int popl_esp_hack
; /* for correct popl with esp base handling */
101 int rip_offset
; /* only used in x86_64, but left for simplicity */
103 int cpuid_ext_features
;
106 static void gen_eob(DisasContext
*s
);
107 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
108 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
110 /* i386 arith/logic operations */
130 OP_SHL1
, /* undocumented */
135 #define DEF(s, n, copy_size) INDEX_op_ ## s,
152 /* I386 int registers */
153 OR_EAX
, /* MUST be even numbered */
162 OR_TMP0
= 16, /* temporary operand register */
164 OR_A0
, /* temporary register used when doing address evaluation */
169 #define NB_OP_SIZES 4
171 #define DEF_REGS(prefix, suffix) \
172 prefix ## EAX ## suffix,\
173 prefix ## ECX ## suffix,\
174 prefix ## EDX ## suffix,\
175 prefix ## EBX ## suffix,\
176 prefix ## ESP ## suffix,\
177 prefix ## EBP ## suffix,\
178 prefix ## ESI ## suffix,\
179 prefix ## EDI ## suffix,\
180 prefix ## R8 ## suffix,\
181 prefix ## R9 ## suffix,\
182 prefix ## R10 ## suffix,\
183 prefix ## R11 ## suffix,\
184 prefix ## R12 ## suffix,\
185 prefix ## R13 ## suffix,\
186 prefix ## R14 ## suffix,\
187 prefix ## R15 ## suffix,
189 #define DEF_BREGS(prefixb, prefixh, suffix) \
191 static void prefixb ## ESP ## suffix ## _wrapper(void) \
194 prefixb ## ESP ## suffix (); \
196 prefixh ## EAX ## suffix (); \
199 static void prefixb ## EBP ## suffix ## _wrapper(void) \
202 prefixb ## EBP ## suffix (); \
204 prefixh ## ECX ## suffix (); \
207 static void prefixb ## ESI ## suffix ## _wrapper(void) \
210 prefixb ## ESI ## suffix (); \
212 prefixh ## EDX ## suffix (); \
215 static void prefixb ## EDI ## suffix ## _wrapper(void) \
218 prefixb ## EDI ## suffix (); \
220 prefixh ## EBX ## suffix (); \
223 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T0
)
224 DEF_BREGS(gen_op_movb_
, gen_op_movh_
, _T1
)
225 DEF_BREGS(gen_op_movl_T0_
, gen_op_movh_T0_
, )
226 DEF_BREGS(gen_op_movl_T1_
, gen_op_movh_T1_
, )
228 #else /* !TARGET_X86_64 */
230 #define NB_OP_SIZES 3
232 #define DEF_REGS(prefix, suffix) \
233 prefix ## EAX ## suffix,\
234 prefix ## ECX ## suffix,\
235 prefix ## EDX ## suffix,\
236 prefix ## EBX ## suffix,\
237 prefix ## ESP ## suffix,\
238 prefix ## EBP ## suffix,\
239 prefix ## ESI ## suffix,\
240 prefix ## EDI ## suffix,
242 #endif /* !TARGET_X86_64 */
244 static GenOpFunc
*gen_op_mov_reg_T0
[NB_OP_SIZES
][CPU_NB_REGS
] = {
251 gen_op_movb_ESP_T0_wrapper
,
252 gen_op_movb_EBP_T0_wrapper
,
253 gen_op_movb_ESI_T0_wrapper
,
254 gen_op_movb_EDI_T0_wrapper
,
271 DEF_REGS(gen_op_movw_
, _T0
)
274 DEF_REGS(gen_op_movl_
, _T0
)
278 DEF_REGS(gen_op_movq_
, _T0
)
283 static GenOpFunc
*gen_op_mov_reg_T1
[NB_OP_SIZES
][CPU_NB_REGS
] = {
290 gen_op_movb_ESP_T1_wrapper
,
291 gen_op_movb_EBP_T1_wrapper
,
292 gen_op_movb_ESI_T1_wrapper
,
293 gen_op_movb_EDI_T1_wrapper
,
310 DEF_REGS(gen_op_movw_
, _T1
)
313 DEF_REGS(gen_op_movl_
, _T1
)
317 DEF_REGS(gen_op_movq_
, _T1
)
322 static GenOpFunc
*gen_op_mov_reg_A0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
324 DEF_REGS(gen_op_movw_
, _A0
)
327 DEF_REGS(gen_op_movl_
, _A0
)
331 DEF_REGS(gen_op_movq_
, _A0
)
336 static GenOpFunc
*gen_op_mov_TN_reg
[NB_OP_SIZES
][2][CPU_NB_REGS
] =
345 gen_op_movl_T0_ESP_wrapper
,
346 gen_op_movl_T0_EBP_wrapper
,
347 gen_op_movl_T0_ESI_wrapper
,
348 gen_op_movl_T0_EDI_wrapper
,
370 gen_op_movl_T1_ESP_wrapper
,
371 gen_op_movl_T1_EBP_wrapper
,
372 gen_op_movl_T1_ESI_wrapper
,
373 gen_op_movl_T1_EDI_wrapper
,
392 DEF_REGS(gen_op_movl_T0_
, )
395 DEF_REGS(gen_op_movl_T1_
, )
400 DEF_REGS(gen_op_movl_T0_
, )
403 DEF_REGS(gen_op_movl_T1_
, )
409 DEF_REGS(gen_op_movl_T0_
, )
412 DEF_REGS(gen_op_movl_T1_
, )
418 static GenOpFunc
*gen_op_movl_A0_reg
[CPU_NB_REGS
] = {
419 DEF_REGS(gen_op_movl_A0_
, )
422 static GenOpFunc
*gen_op_addl_A0_reg_sN
[4][CPU_NB_REGS
] = {
424 DEF_REGS(gen_op_addl_A0_
, )
427 DEF_REGS(gen_op_addl_A0_
, _s1
)
430 DEF_REGS(gen_op_addl_A0_
, _s2
)
433 DEF_REGS(gen_op_addl_A0_
, _s3
)
438 static GenOpFunc
*gen_op_movq_A0_reg
[CPU_NB_REGS
] = {
439 DEF_REGS(gen_op_movq_A0_
, )
442 static GenOpFunc
*gen_op_addq_A0_reg_sN
[4][CPU_NB_REGS
] = {
444 DEF_REGS(gen_op_addq_A0_
, )
447 DEF_REGS(gen_op_addq_A0_
, _s1
)
450 DEF_REGS(gen_op_addq_A0_
, _s2
)
453 DEF_REGS(gen_op_addq_A0_
, _s3
)
458 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
460 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
463 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
467 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
472 static GenOpFunc
*gen_op_arith_T0_T1_cc
[8] = {
483 #define DEF_ARITHC(SUFFIX)\
485 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
489 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
493 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
497 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
501 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
505 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
507 #ifndef CONFIG_USER_ONLY
513 static const int cc_op_arithb
[8] = {
524 #define DEF_CMPXCHG(SUFFIX)\
525 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
530 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
534 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
536 #ifndef CONFIG_USER_ONLY
542 #define DEF_SHIFT(SUFFIX)\
544 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
554 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
564 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
574 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
584 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
588 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
590 #ifndef CONFIG_USER_ONLY
596 #define DEF_SHIFTD(SUFFIX, op)\
602 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
614 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
618 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
622 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
624 #ifndef CONFIG_USER_ONLY
625 DEF_SHIFTD(_kernel
, im
)
626 DEF_SHIFTD(_user
, im
)
630 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
631 DEF_SHIFTD(_raw
, ECX
)
632 #ifndef CONFIG_USER_ONLY
633 DEF_SHIFTD(_kernel
, ECX
)
634 DEF_SHIFTD(_user
, ECX
)
638 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
641 gen_op_btsw_T0_T1_cc
,
642 gen_op_btrw_T0_T1_cc
,
643 gen_op_btcw_T0_T1_cc
,
647 gen_op_btsl_T0_T1_cc
,
648 gen_op_btrl_T0_T1_cc
,
649 gen_op_btcl_T0_T1_cc
,
654 gen_op_btsq_T0_T1_cc
,
655 gen_op_btrq_T0_T1_cc
,
656 gen_op_btcq_T0_T1_cc
,
661 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
662 gen_op_add_bitw_A0_T1
,
663 gen_op_add_bitl_A0_T1
,
664 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
667 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
684 static GenOpFunc
*gen_op_lds_T0_A0
[3 * 4] = {
685 gen_op_ldsb_raw_T0_A0
,
686 gen_op_ldsw_raw_T0_A0
,
687 X86_64_ONLY(gen_op_ldsl_raw_T0_A0
),
689 #ifndef CONFIG_USER_ONLY
690 gen_op_ldsb_kernel_T0_A0
,
691 gen_op_ldsw_kernel_T0_A0
,
692 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0
),
695 gen_op_ldsb_user_T0_A0
,
696 gen_op_ldsw_user_T0_A0
,
697 X86_64_ONLY(gen_op_ldsl_user_T0_A0
),
702 static GenOpFunc
*gen_op_ldu_T0_A0
[3 * 4] = {
703 gen_op_ldub_raw_T0_A0
,
704 gen_op_lduw_raw_T0_A0
,
708 #ifndef CONFIG_USER_ONLY
709 gen_op_ldub_kernel_T0_A0
,
710 gen_op_lduw_kernel_T0_A0
,
714 gen_op_ldub_user_T0_A0
,
715 gen_op_lduw_user_T0_A0
,
721 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722 static GenOpFunc
*gen_op_ld_T0_A0
[3 * 4] = {
723 gen_op_ldub_raw_T0_A0
,
724 gen_op_lduw_raw_T0_A0
,
725 gen_op_ldl_raw_T0_A0
,
726 X86_64_ONLY(gen_op_ldq_raw_T0_A0
),
728 #ifndef CONFIG_USER_ONLY
729 gen_op_ldub_kernel_T0_A0
,
730 gen_op_lduw_kernel_T0_A0
,
731 gen_op_ldl_kernel_T0_A0
,
732 X86_64_ONLY(gen_op_ldq_kernel_T0_A0
),
734 gen_op_ldub_user_T0_A0
,
735 gen_op_lduw_user_T0_A0
,
736 gen_op_ldl_user_T0_A0
,
737 X86_64_ONLY(gen_op_ldq_user_T0_A0
),
741 static GenOpFunc
*gen_op_ld_T1_A0
[3 * 4] = {
742 gen_op_ldub_raw_T1_A0
,
743 gen_op_lduw_raw_T1_A0
,
744 gen_op_ldl_raw_T1_A0
,
745 X86_64_ONLY(gen_op_ldq_raw_T1_A0
),
747 #ifndef CONFIG_USER_ONLY
748 gen_op_ldub_kernel_T1_A0
,
749 gen_op_lduw_kernel_T1_A0
,
750 gen_op_ldl_kernel_T1_A0
,
751 X86_64_ONLY(gen_op_ldq_kernel_T1_A0
),
753 gen_op_ldub_user_T1_A0
,
754 gen_op_lduw_user_T1_A0
,
755 gen_op_ldl_user_T1_A0
,
756 X86_64_ONLY(gen_op_ldq_user_T1_A0
),
760 static GenOpFunc
*gen_op_st_T0_A0
[3 * 4] = {
761 gen_op_stb_raw_T0_A0
,
762 gen_op_stw_raw_T0_A0
,
763 gen_op_stl_raw_T0_A0
,
764 X86_64_ONLY(gen_op_stq_raw_T0_A0
),
766 #ifndef CONFIG_USER_ONLY
767 gen_op_stb_kernel_T0_A0
,
768 gen_op_stw_kernel_T0_A0
,
769 gen_op_stl_kernel_T0_A0
,
770 X86_64_ONLY(gen_op_stq_kernel_T0_A0
),
772 gen_op_stb_user_T0_A0
,
773 gen_op_stw_user_T0_A0
,
774 gen_op_stl_user_T0_A0
,
775 X86_64_ONLY(gen_op_stq_user_T0_A0
),
779 static GenOpFunc
*gen_op_st_T1_A0
[3 * 4] = {
781 gen_op_stw_raw_T1_A0
,
782 gen_op_stl_raw_T1_A0
,
783 X86_64_ONLY(gen_op_stq_raw_T1_A0
),
785 #ifndef CONFIG_USER_ONLY
787 gen_op_stw_kernel_T1_A0
,
788 gen_op_stl_kernel_T1_A0
,
789 X86_64_ONLY(gen_op_stq_kernel_T1_A0
),
792 gen_op_stw_user_T1_A0
,
793 gen_op_stl_user_T1_A0
,
794 X86_64_ONLY(gen_op_stq_user_T1_A0
),
798 static inline void gen_jmp_im(target_ulong pc
)
801 if (pc
== (uint32_t)pc
) {
802 gen_op_movl_eip_im(pc
);
803 } else if (pc
== (int32_t)pc
) {
804 gen_op_movq_eip_im(pc
);
806 gen_op_movq_eip_im64(pc
>> 32, pc
);
809 gen_op_movl_eip_im(pc
);
813 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
817 override
= s
->override
;
821 gen_op_movq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
822 gen_op_addq_A0_reg_sN
[0][R_ESI
]();
824 gen_op_movq_A0_reg
[R_ESI
]();
830 if (s
->addseg
&& override
< 0)
833 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
834 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
836 gen_op_movl_A0_reg
[R_ESI
]();
839 /* 16 address, always override */
842 gen_op_movl_A0_reg
[R_ESI
]();
843 gen_op_andl_A0_ffff();
844 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
848 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
852 gen_op_movq_A0_reg
[R_EDI
]();
857 gen_op_movl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
858 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
860 gen_op_movl_A0_reg
[R_EDI
]();
863 gen_op_movl_A0_reg
[R_EDI
]();
864 gen_op_andl_A0_ffff();
865 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_ES
].base
));
869 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
870 gen_op_movl_T0_Dshiftb
,
871 gen_op_movl_T0_Dshiftw
,
872 gen_op_movl_T0_Dshiftl
,
873 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
876 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
879 X86_64_ONLY(gen_op_jnz_ecxq
),
882 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
885 X86_64_ONLY(gen_op_jz_ecxq
),
888 static GenOpFunc
*gen_op_dec_ECX
[3] = {
891 X86_64_ONLY(gen_op_decq_ECX
),
894 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
899 X86_64_ONLY(gen_op_jnz_subq
),
905 X86_64_ONLY(gen_op_jz_subq
),
909 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
915 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
921 static GenOpFunc
*gen_op_in
[3] = {
927 static GenOpFunc
*gen_op_out
[3] = {
933 static GenOpFunc
*gen_check_io_T0
[3] = {
939 static GenOpFunc
*gen_check_io_DX
[3] = {
945 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
947 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
948 if (s
->cc_op
!= CC_OP_DYNAMIC
)
949 gen_op_set_cc_op(s
->cc_op
);
952 gen_check_io_DX
[ot
]();
954 gen_check_io_T0
[ot
]();
958 static inline void gen_movs(DisasContext
*s
, int ot
)
960 gen_string_movl_A0_ESI(s
);
961 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
962 gen_string_movl_A0_EDI(s
);
963 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
964 gen_op_movl_T0_Dshift
[ot
]();
967 gen_op_addq_ESI_T0();
968 gen_op_addq_EDI_T0();
972 gen_op_addl_ESI_T0();
973 gen_op_addl_EDI_T0();
975 gen_op_addw_ESI_T0();
976 gen_op_addw_EDI_T0();
980 static inline void gen_update_cc_op(DisasContext
*s
)
982 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
983 gen_op_set_cc_op(s
->cc_op
);
984 s
->cc_op
= CC_OP_DYNAMIC
;
988 /* XXX: does not work with gdbstub "ice" single step - not a
990 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
994 l1
= gen_new_label();
995 l2
= gen_new_label();
996 gen_op_jnz_ecx
[s
->aflag
](l1
);
998 gen_jmp_tb(s
, next_eip
, 1);
1003 static inline void gen_stos(DisasContext
*s
, int ot
)
1005 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1006 gen_string_movl_A0_EDI(s
);
1007 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1008 gen_op_movl_T0_Dshift
[ot
]();
1009 #ifdef TARGET_X86_64
1010 if (s
->aflag
== 2) {
1011 gen_op_addq_EDI_T0();
1015 gen_op_addl_EDI_T0();
1017 gen_op_addw_EDI_T0();
1021 static inline void gen_lods(DisasContext
*s
, int ot
)
1023 gen_string_movl_A0_ESI(s
);
1024 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1025 gen_op_mov_reg_T0
[ot
][R_EAX
]();
1026 gen_op_movl_T0_Dshift
[ot
]();
1027 #ifdef TARGET_X86_64
1028 if (s
->aflag
== 2) {
1029 gen_op_addq_ESI_T0();
1033 gen_op_addl_ESI_T0();
1035 gen_op_addw_ESI_T0();
1039 static inline void gen_scas(DisasContext
*s
, int ot
)
1041 gen_op_mov_TN_reg
[OT_LONG
][0][R_EAX
]();
1042 gen_string_movl_A0_EDI(s
);
1043 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1044 gen_op_cmpl_T0_T1_cc();
1045 gen_op_movl_T0_Dshift
[ot
]();
1046 #ifdef TARGET_X86_64
1047 if (s
->aflag
== 2) {
1048 gen_op_addq_EDI_T0();
1052 gen_op_addl_EDI_T0();
1054 gen_op_addw_EDI_T0();
1058 static inline void gen_cmps(DisasContext
*s
, int ot
)
1060 gen_string_movl_A0_ESI(s
);
1061 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1062 gen_string_movl_A0_EDI(s
);
1063 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
1064 gen_op_cmpl_T0_T1_cc();
1065 gen_op_movl_T0_Dshift
[ot
]();
1066 #ifdef TARGET_X86_64
1067 if (s
->aflag
== 2) {
1068 gen_op_addq_ESI_T0();
1069 gen_op_addq_EDI_T0();
1073 gen_op_addl_ESI_T0();
1074 gen_op_addl_EDI_T0();
1076 gen_op_addw_ESI_T0();
1077 gen_op_addw_EDI_T0();
1081 static inline void gen_ins(DisasContext
*s
, int ot
)
1083 gen_string_movl_A0_EDI(s
);
1085 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1086 gen_op_in_DX_T0
[ot
]();
1087 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1088 gen_op_movl_T0_Dshift
[ot
]();
1089 #ifdef TARGET_X86_64
1090 if (s
->aflag
== 2) {
1091 gen_op_addq_EDI_T0();
1095 gen_op_addl_EDI_T0();
1097 gen_op_addw_EDI_T0();
1101 static inline void gen_outs(DisasContext
*s
, int ot
)
1103 gen_string_movl_A0_ESI(s
);
1104 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1105 gen_op_out_DX_T0
[ot
]();
1106 gen_op_movl_T0_Dshift
[ot
]();
1107 #ifdef TARGET_X86_64
1108 if (s
->aflag
== 2) {
1109 gen_op_addq_ESI_T0();
1113 gen_op_addl_ESI_T0();
1115 gen_op_addw_ESI_T0();
1119 /* same method as Valgrind : we generate jumps to current or next
1121 #define GEN_REPZ(op) \
1122 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1123 target_ulong cur_eip, target_ulong next_eip) \
1126 gen_update_cc_op(s); \
1127 l2 = gen_jz_ecx_string(s, next_eip); \
1128 gen_ ## op(s, ot); \
1129 gen_op_dec_ECX[s->aflag](); \
1130 /* a loop would cause two single step exceptions if ECX = 1 \
1131 before rep string_insn */ \
1133 gen_op_jz_ecx[s->aflag](l2); \
1134 gen_jmp(s, cur_eip); \
1137 #define GEN_REPZ2(op) \
1138 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1139 target_ulong cur_eip, \
1140 target_ulong next_eip, \
1144 gen_update_cc_op(s); \
1145 l2 = gen_jz_ecx_string(s, next_eip); \
1146 gen_ ## op(s, ot); \
1147 gen_op_dec_ECX[s->aflag](); \
1148 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1149 gen_op_string_jnz_sub[nz][ot](l2);\
1151 gen_op_jz_ecx[s->aflag](l2); \
1152 gen_jmp(s, cur_eip); \
1174 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1205 #ifdef TARGET_X86_64
1208 BUGGY_64(gen_op_jb_subq
),
1210 BUGGY_64(gen_op_jbe_subq
),
1213 BUGGY_64(gen_op_jl_subq
),
1214 BUGGY_64(gen_op_jle_subq
),
1218 static GenOpFunc1
*gen_op_loop
[3][4] = {
1229 #ifdef TARGET_X86_64
1238 static GenOpFunc
*gen_setcc_slow
[8] = {
1249 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1252 gen_op_setb_T0_subb
,
1253 gen_op_setz_T0_subb
,
1254 gen_op_setbe_T0_subb
,
1255 gen_op_sets_T0_subb
,
1257 gen_op_setl_T0_subb
,
1258 gen_op_setle_T0_subb
,
1262 gen_op_setb_T0_subw
,
1263 gen_op_setz_T0_subw
,
1264 gen_op_setbe_T0_subw
,
1265 gen_op_sets_T0_subw
,
1267 gen_op_setl_T0_subw
,
1268 gen_op_setle_T0_subw
,
1272 gen_op_setb_T0_subl
,
1273 gen_op_setz_T0_subl
,
1274 gen_op_setbe_T0_subl
,
1275 gen_op_sets_T0_subl
,
1277 gen_op_setl_T0_subl
,
1278 gen_op_setle_T0_subl
,
1280 #ifdef TARGET_X86_64
1283 gen_op_setb_T0_subq
,
1284 gen_op_setz_T0_subq
,
1285 gen_op_setbe_T0_subq
,
1286 gen_op_sets_T0_subq
,
1288 gen_op_setl_T0_subq
,
1289 gen_op_setle_T0_subq
,
1294 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1295 gen_op_fadd_ST0_FT0
,
1296 gen_op_fmul_ST0_FT0
,
1297 gen_op_fcom_ST0_FT0
,
1298 gen_op_fcom_ST0_FT0
,
1299 gen_op_fsub_ST0_FT0
,
1300 gen_op_fsubr_ST0_FT0
,
1301 gen_op_fdiv_ST0_FT0
,
1302 gen_op_fdivr_ST0_FT0
,
1305 /* NOTE the exception in "r" op ordering */
1306 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1307 gen_op_fadd_STN_ST0
,
1308 gen_op_fmul_STN_ST0
,
1311 gen_op_fsubr_STN_ST0
,
1312 gen_op_fsub_STN_ST0
,
1313 gen_op_fdivr_STN_ST0
,
1314 gen_op_fdiv_STN_ST0
,
1317 /* if d == OR_TMP0, it means memory operand (address in A0) */
1318 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1320 GenOpFunc
*gen_update_cc
;
1323 gen_op_mov_TN_reg
[ot
][0][d
]();
1325 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1330 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1331 gen_op_set_cc_op(s1
->cc_op
);
1333 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1334 gen_op_mov_reg_T0
[ot
][d
]();
1336 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1338 s1
->cc_op
= CC_OP_DYNAMIC
;
1341 gen_op_addl_T0_T1();
1342 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1343 gen_update_cc
= gen_op_update2_cc
;
1346 gen_op_subl_T0_T1();
1347 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1348 gen_update_cc
= gen_op_update2_cc
;
1354 gen_op_arith_T0_T1_cc
[op
]();
1355 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1356 gen_update_cc
= gen_op_update1_cc
;
1359 gen_op_cmpl_T0_T1_cc();
1360 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1361 gen_update_cc
= NULL
;
1364 if (op
!= OP_CMPL
) {
1366 gen_op_mov_reg_T0
[ot
][d
]();
1368 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1370 /* the flags update must happen after the memory write (precise
1371 exception support) */
1377 /* if d == OR_TMP0, it means memory operand (address in A0) */
1378 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1381 gen_op_mov_TN_reg
[ot
][0][d
]();
1383 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1384 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1385 gen_op_set_cc_op(s1
->cc_op
);
1388 s1
->cc_op
= CC_OP_INCB
+ ot
;
1391 s1
->cc_op
= CC_OP_DECB
+ ot
;
1394 gen_op_mov_reg_T0
[ot
][d
]();
1396 gen_op_st_T0_A0
[ot
+ s1
->mem_index
]();
1397 gen_op_update_inc_cc();
1400 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1403 gen_op_mov_TN_reg
[ot
][0][d
]();
1405 gen_op_ld_T0_A0
[ot
+ s1
->mem_index
]();
1407 gen_op_mov_TN_reg
[ot
][1][s
]();
1408 /* for zero counts, flags are not updated, so must do it dynamically */
1409 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1410 gen_op_set_cc_op(s1
->cc_op
);
1413 gen_op_shift_T0_T1_cc
[ot
][op
]();
1415 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1417 gen_op_mov_reg_T0
[ot
][d
]();
1418 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1421 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1423 /* currently not optimized */
1424 gen_op_movl_T1_im(c
);
1425 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1428 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1436 int mod
, rm
, code
, override
, must_add_seg
;
1438 override
= s
->override
;
1439 must_add_seg
= s
->addseg
;
1442 mod
= (modrm
>> 6) & 3;
1454 code
= ldub_code(s
->pc
++);
1455 scale
= (code
>> 6) & 3;
1456 index
= ((code
>> 3) & 7) | REX_X(s
);
1463 if ((base
& 7) == 5) {
1465 disp
= (int32_t)ldl_code(s
->pc
);
1467 if (CODE64(s
) && !havesib
) {
1468 disp
+= s
->pc
+ s
->rip_offset
;
1475 disp
= (int8_t)ldub_code(s
->pc
++);
1479 disp
= ldl_code(s
->pc
);
1485 /* for correct popl handling with esp */
1486 if (base
== 4 && s
->popl_esp_hack
)
1487 disp
+= s
->popl_esp_hack
;
1488 #ifdef TARGET_X86_64
1489 if (s
->aflag
== 2) {
1490 gen_op_movq_A0_reg
[base
]();
1492 if ((int32_t)disp
== disp
)
1493 gen_op_addq_A0_im(disp
);
1495 gen_op_addq_A0_im64(disp
>> 32, disp
);
1500 gen_op_movl_A0_reg
[base
]();
1502 gen_op_addl_A0_im(disp
);
1505 #ifdef TARGET_X86_64
1506 if (s
->aflag
== 2) {
1507 if ((int32_t)disp
== disp
)
1508 gen_op_movq_A0_im(disp
);
1510 gen_op_movq_A0_im64(disp
>> 32, disp
);
1514 gen_op_movl_A0_im(disp
);
1517 /* XXX: index == 4 is always invalid */
1518 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1519 #ifdef TARGET_X86_64
1520 if (s
->aflag
== 2) {
1521 gen_op_addq_A0_reg_sN
[scale
][index
]();
1525 gen_op_addl_A0_reg_sN
[scale
][index
]();
1530 if (base
== R_EBP
|| base
== R_ESP
)
1535 #ifdef TARGET_X86_64
1536 if (s
->aflag
== 2) {
1537 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1541 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1548 disp
= lduw_code(s
->pc
);
1550 gen_op_movl_A0_im(disp
);
1551 rm
= 0; /* avoid SS override */
1558 disp
= (int8_t)ldub_code(s
->pc
++);
1562 disp
= lduw_code(s
->pc
);
1568 gen_op_movl_A0_reg
[R_EBX
]();
1569 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1572 gen_op_movl_A0_reg
[R_EBX
]();
1573 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1576 gen_op_movl_A0_reg
[R_EBP
]();
1577 gen_op_addl_A0_reg_sN
[0][R_ESI
]();
1580 gen_op_movl_A0_reg
[R_EBP
]();
1581 gen_op_addl_A0_reg_sN
[0][R_EDI
]();
1584 gen_op_movl_A0_reg
[R_ESI
]();
1587 gen_op_movl_A0_reg
[R_EDI
]();
1590 gen_op_movl_A0_reg
[R_EBP
]();
1594 gen_op_movl_A0_reg
[R_EBX
]();
1598 gen_op_addl_A0_im(disp
);
1599 gen_op_andl_A0_ffff();
1603 if (rm
== 2 || rm
== 3 || rm
== 6)
1608 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1618 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1620 int mod
, rm
, base
, code
;
1622 mod
= (modrm
>> 6) & 3;
1632 code
= ldub_code(s
->pc
++);
1668 /* used for LEA and MOV AX, mem */
1669 static void gen_add_A0_ds_seg(DisasContext
*s
)
1671 int override
, must_add_seg
;
1672 must_add_seg
= s
->addseg
;
1674 if (s
->override
>= 0) {
1675 override
= s
->override
;
1681 #ifdef TARGET_X86_64
1683 gen_op_addq_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1687 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[override
].base
));
1692 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1694 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1696 int mod
, rm
, opreg
, disp
;
1698 mod
= (modrm
>> 6) & 3;
1699 rm
= (modrm
& 7) | REX_B(s
);
1703 gen_op_mov_TN_reg
[ot
][0][reg
]();
1704 gen_op_mov_reg_T0
[ot
][rm
]();
1706 gen_op_mov_TN_reg
[ot
][0][rm
]();
1708 gen_op_mov_reg_T0
[ot
][reg
]();
1711 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1714 gen_op_mov_TN_reg
[ot
][0][reg
]();
1715 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
1717 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
1719 gen_op_mov_reg_T0
[ot
][reg
]();
1724 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1730 ret
= ldub_code(s
->pc
);
1734 ret
= lduw_code(s
->pc
);
1739 ret
= ldl_code(s
->pc
);
1746 static inline int insn_const_size(unsigned int ot
)
1754 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1756 TranslationBlock
*tb
;
1759 pc
= s
->cs_base
+ eip
;
1761 /* NOTE: we handle the case where the TB spans two pages here */
1762 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1763 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1764 /* jump to same page: we can use a direct jump */
1766 gen_op_goto_tb0(TBPARAM(tb
));
1768 gen_op_goto_tb1(TBPARAM(tb
));
1770 gen_op_movl_T0_im((long)tb
+ tb_num
);
1773 /* jump to another page: currently not optimized */
1779 static inline void gen_jcc(DisasContext
*s
, int b
,
1780 target_ulong val
, target_ulong next_eip
)
1782 TranslationBlock
*tb
;
1789 jcc_op
= (b
>> 1) & 7;
1793 /* we optimize the cmp/jcc case */
1798 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1801 /* some jumps are easy to compute */
1843 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1846 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1858 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1859 gen_op_set_cc_op(s
->cc_op
);
1860 s
->cc_op
= CC_OP_DYNAMIC
;
1864 gen_setcc_slow
[jcc_op
]();
1865 func
= gen_op_jnz_T0_label
;
1875 l1
= gen_new_label();
1878 gen_goto_tb(s
, 0, next_eip
);
1881 gen_goto_tb(s
, 1, val
);
1886 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1887 gen_op_set_cc_op(s
->cc_op
);
1888 s
->cc_op
= CC_OP_DYNAMIC
;
1890 gen_setcc_slow
[jcc_op
]();
1896 l1
= gen_new_label();
1897 l2
= gen_new_label();
1898 gen_op_jnz_T0_label(l1
);
1899 gen_jmp_im(next_eip
);
1900 gen_op_jmp_label(l2
);
1908 static void gen_setcc(DisasContext
*s
, int b
)
1914 jcc_op
= (b
>> 1) & 7;
1916 /* we optimize the cmp/jcc case */
1921 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1926 /* some jumps are easy to compute */
1953 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1956 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1964 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1965 gen_op_set_cc_op(s
->cc_op
);
1966 func
= gen_setcc_slow
[jcc_op
];
1975 /* move T0 to seg_reg and compute if the CPU state may change. Never
1976 call this function with seg_reg == R_CS */
1977 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1979 if (s
->pe
&& !s
->vm86
) {
1980 /* XXX: optimize by finding processor state dynamically */
1981 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1982 gen_op_set_cc_op(s
->cc_op
);
1983 gen_jmp_im(cur_eip
);
1984 gen_op_movl_seg_T0(seg_reg
);
1985 /* abort translation because the addseg value may change or
1986 because ss32 may change. For R_SS, translation must always
1987 stop as a special handling must be done to disable hardware
1988 interrupts for the next instruction */
1989 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1992 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1993 if (seg_reg
== R_SS
)
1998 #ifdef TARGET_X86_64
1999 #define SVM_movq_T1_im(x) gen_op_movq_T1_im64((x) >> 32, x)
2001 #define SVM_movq_T1_im(x) gen_op_movl_T1_im(x)
2005 gen_svm_check_io(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2007 #if !defined(CONFIG_USER_ONLY)
2008 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
2009 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2010 gen_op_set_cc_op(s
->cc_op
);
2011 SVM_movq_T1_im(s
->pc
- s
->cs_base
);
2012 gen_jmp_im(pc_start
- s
->cs_base
);
2014 gen_op_svm_check_intercept_io((uint32_t)(type
>> 32), (uint32_t)type
);
2015 s
->cc_op
= CC_OP_DYNAMIC
;
2016 /* FIXME: maybe we could move the io intercept vector to the TB as well
2017 so we know if this is an EOB or not ... let's assume it's not
2024 static inline int svm_is_rep(int prefixes
)
2026 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2030 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2031 uint64_t type
, uint64_t param
)
2033 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2034 /* no SVM activated */
2037 /* CRx and DRx reads/writes */
2038 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2039 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2040 gen_op_set_cc_op(s
->cc_op
);
2041 s
->cc_op
= CC_OP_DYNAMIC
;
2043 gen_jmp_im(pc_start
- s
->cs_base
);
2044 SVM_movq_T1_im(param
);
2046 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2047 /* this is a special case as we do not know if the interception occurs
2048 so we assume there was none */
2051 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2052 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2053 gen_op_set_cc_op(s
->cc_op
);
2054 s
->cc_op
= CC_OP_DYNAMIC
;
2056 gen_jmp_im(pc_start
- s
->cs_base
);
2057 SVM_movq_T1_im(param
);
2059 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2060 /* this is a special case as we do not know if the interception occurs
2061 so we assume there was none */
2066 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2067 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2068 gen_op_set_cc_op(s
->cc_op
);
2069 s
->cc_op
= CC_OP_EFLAGS
;
2071 gen_jmp_im(pc_start
- s
->cs_base
);
2072 SVM_movq_T1_im(param
);
2074 gen_op_svm_vmexit(type
>> 32, type
);
2075 /* we can optimize this one so TBs don't get longer
2076 than up to vmexit */
2085 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2087 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2090 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2092 #ifdef TARGET_X86_64
2095 gen_op_addq_ESP_8();
2097 gen_op_addq_ESP_im(addend
);
2102 gen_op_addl_ESP_2();
2103 else if (addend
== 4)
2104 gen_op_addl_ESP_4();
2106 gen_op_addl_ESP_im(addend
);
2109 gen_op_addw_ESP_2();
2110 else if (addend
== 4)
2111 gen_op_addw_ESP_4();
2113 gen_op_addw_ESP_im(addend
);
2117 /* generate a push. It depends on ss32, addseg and dflag */
2118 static void gen_push_T0(DisasContext
*s
)
2120 #ifdef TARGET_X86_64
2122 gen_op_movq_A0_reg
[R_ESP
]();
2125 gen_op_st_T0_A0
[OT_QUAD
+ s
->mem_index
]();
2128 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
2130 gen_op_movq_ESP_A0();
2134 gen_op_movl_A0_reg
[R_ESP
]();
2141 gen_op_movl_T1_A0();
2142 gen_op_addl_A0_SS();
2145 gen_op_andl_A0_ffff();
2146 gen_op_movl_T1_A0();
2147 gen_op_addl_A0_SS();
2149 gen_op_st_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2150 if (s
->ss32
&& !s
->addseg
)
2151 gen_op_movl_ESP_A0();
2153 gen_op_mov_reg_T1
[s
->ss32
+ 1][R_ESP
]();
2157 /* generate a push. It depends on ss32, addseg and dflag */
2158 /* slower version for T1, only used for call Ev */
2159 static void gen_push_T1(DisasContext
*s
)
2161 #ifdef TARGET_X86_64
2163 gen_op_movq_A0_reg
[R_ESP
]();
2166 gen_op_st_T1_A0
[OT_QUAD
+ s
->mem_index
]();
2169 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
2171 gen_op_movq_ESP_A0();
2175 gen_op_movl_A0_reg
[R_ESP
]();
2182 gen_op_addl_A0_SS();
2185 gen_op_andl_A0_ffff();
2186 gen_op_addl_A0_SS();
2188 gen_op_st_T1_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2190 if (s
->ss32
&& !s
->addseg
)
2191 gen_op_movl_ESP_A0();
2193 gen_stack_update(s
, (-2) << s
->dflag
);
2197 /* two step pop is necessary for precise exceptions */
2198 static void gen_pop_T0(DisasContext
*s
)
2200 #ifdef TARGET_X86_64
2202 gen_op_movq_A0_reg
[R_ESP
]();
2203 gen_op_ld_T0_A0
[(s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
]();
2207 gen_op_movl_A0_reg
[R_ESP
]();
2210 gen_op_addl_A0_SS();
2212 gen_op_andl_A0_ffff();
2213 gen_op_addl_A0_SS();
2215 gen_op_ld_T0_A0
[s
->dflag
+ 1 + s
->mem_index
]();
2219 static void gen_pop_update(DisasContext
*s
)
2221 #ifdef TARGET_X86_64
2222 if (CODE64(s
) && s
->dflag
) {
2223 gen_stack_update(s
, 8);
2227 gen_stack_update(s
, 2 << s
->dflag
);
2231 static void gen_stack_A0(DisasContext
*s
)
2233 gen_op_movl_A0_ESP();
2235 gen_op_andl_A0_ffff();
2236 gen_op_movl_T1_A0();
2238 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2241 /* NOTE: wrap around in 16 bit not fully handled */
2242 static void gen_pusha(DisasContext
*s
)
2245 gen_op_movl_A0_ESP();
2246 gen_op_addl_A0_im(-16 << s
->dflag
);
2248 gen_op_andl_A0_ffff();
2249 gen_op_movl_T1_A0();
2251 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2252 for(i
= 0;i
< 8; i
++) {
2253 gen_op_mov_TN_reg
[OT_LONG
][0][7 - i
]();
2254 gen_op_st_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2255 gen_op_addl_A0_im(2 << s
->dflag
);
2257 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2260 /* NOTE: wrap around in 16 bit not fully handled */
2261 static void gen_popa(DisasContext
*s
)
2264 gen_op_movl_A0_ESP();
2266 gen_op_andl_A0_ffff();
2267 gen_op_movl_T1_A0();
2268 gen_op_addl_T1_im(16 << s
->dflag
);
2270 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2271 for(i
= 0;i
< 8; i
++) {
2272 /* ESP is not reloaded */
2274 gen_op_ld_T0_A0
[OT_WORD
+ s
->dflag
+ s
->mem_index
]();
2275 gen_op_mov_reg_T0
[OT_WORD
+ s
->dflag
][7 - i
]();
2277 gen_op_addl_A0_im(2 << s
->dflag
);
2279 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2282 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2287 #ifdef TARGET_X86_64
2289 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2292 gen_op_movl_A0_ESP();
2293 gen_op_addq_A0_im(-opsize
);
2294 gen_op_movl_T1_A0();
2297 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2298 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2300 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2302 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2303 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2304 gen_op_mov_reg_T1
[OT_QUAD
][R_ESP
]();
2308 ot
= s
->dflag
+ OT_WORD
;
2309 opsize
= 2 << s
->dflag
;
2311 gen_op_movl_A0_ESP();
2312 gen_op_addl_A0_im(-opsize
);
2314 gen_op_andl_A0_ffff();
2315 gen_op_movl_T1_A0();
2317 gen_op_addl_A0_seg(offsetof(CPUX86State
,segs
[R_SS
].base
));
2319 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
2320 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
2322 gen_op_enter_level(level
, s
->dflag
);
2324 gen_op_mov_reg_T1
[ot
][R_EBP
]();
2325 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2326 gen_op_mov_reg_T1
[OT_WORD
+ s
->ss32
][R_ESP
]();
2330 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2332 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2333 gen_op_set_cc_op(s
->cc_op
);
2334 gen_jmp_im(cur_eip
);
2335 gen_op_raise_exception(trapno
);
2339 /* an interrupt is different from an exception because of the
2341 static void gen_interrupt(DisasContext
*s
, int intno
,
2342 target_ulong cur_eip
, target_ulong next_eip
)
2344 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2345 gen_op_set_cc_op(s
->cc_op
);
2346 gen_jmp_im(cur_eip
);
2347 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2351 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2353 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2354 gen_op_set_cc_op(s
->cc_op
);
2355 gen_jmp_im(cur_eip
);
2360 /* generate a generic end of block. Trace exception is also generated
2362 static void gen_eob(DisasContext
*s
)
2364 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2365 gen_op_set_cc_op(s
->cc_op
);
2366 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2367 gen_op_reset_inhibit_irq();
2369 if (s
->singlestep_enabled
) {
2372 gen_op_single_step();
2380 /* generate a jump to eip. No segment change must happen before as a
2381 direct call to the next block may occur */
2382 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2385 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2386 gen_op_set_cc_op(s
->cc_op
);
2387 s
->cc_op
= CC_OP_DYNAMIC
;
2389 gen_goto_tb(s
, tb_num
, eip
);
2397 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2399 gen_jmp_tb(s
, eip
, 0);
2402 static void gen_movtl_T0_im(target_ulong val
)
2404 #ifdef TARGET_X86_64
2405 if ((int32_t)val
== val
) {
2406 gen_op_movl_T0_im(val
);
2408 gen_op_movq_T0_im64(val
>> 32, val
);
2411 gen_op_movl_T0_im(val
);
2415 static void gen_movtl_T1_im(target_ulong val
)
2417 #ifdef TARGET_X86_64
2418 if ((int32_t)val
== val
) {
2419 gen_op_movl_T1_im(val
);
2421 gen_op_movq_T1_im64(val
>> 32, val
);
2424 gen_op_movl_T1_im(val
);
2428 static void gen_add_A0_im(DisasContext
*s
, int val
)
2430 #ifdef TARGET_X86_64
2432 gen_op_addq_A0_im(val
);
2435 gen_op_addl_A0_im(val
);
2438 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2439 gen_op_ldq_raw_env_A0
,
2440 #ifndef CONFIG_USER_ONLY
2441 gen_op_ldq_kernel_env_A0
,
2442 gen_op_ldq_user_env_A0
,
2446 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2447 gen_op_stq_raw_env_A0
,
2448 #ifndef CONFIG_USER_ONLY
2449 gen_op_stq_kernel_env_A0
,
2450 gen_op_stq_user_env_A0
,
2454 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2455 gen_op_ldo_raw_env_A0
,
2456 #ifndef CONFIG_USER_ONLY
2457 gen_op_ldo_kernel_env_A0
,
2458 gen_op_ldo_user_env_A0
,
2462 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2463 gen_op_sto_raw_env_A0
,
2464 #ifndef CONFIG_USER_ONLY
2465 gen_op_sto_kernel_env_A0
,
2466 gen_op_sto_user_env_A0
,
2470 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2472 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2473 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2474 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2476 static GenOpFunc2
*sse_op_table1
[256][4] = {
2477 /* pure SSE operations */
2478 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2479 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2480 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2481 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2482 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2483 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2484 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2485 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2487 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2488 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2489 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2490 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2491 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2492 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2493 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2494 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2495 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2496 [0x51] = SSE_FOP(sqrt
),
2497 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2498 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2499 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2500 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2501 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2502 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2503 [0x58] = SSE_FOP(add
),
2504 [0x59] = SSE_FOP(mul
),
2505 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2506 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2507 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2508 [0x5c] = SSE_FOP(sub
),
2509 [0x5d] = SSE_FOP(min
),
2510 [0x5e] = SSE_FOP(div
),
2511 [0x5f] = SSE_FOP(max
),
2513 [0xc2] = SSE_FOP(cmpeq
),
2514 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2516 /* MMX ops and their SSE extensions */
2517 [0x60] = MMX_OP2(punpcklbw
),
2518 [0x61] = MMX_OP2(punpcklwd
),
2519 [0x62] = MMX_OP2(punpckldq
),
2520 [0x63] = MMX_OP2(packsswb
),
2521 [0x64] = MMX_OP2(pcmpgtb
),
2522 [0x65] = MMX_OP2(pcmpgtw
),
2523 [0x66] = MMX_OP2(pcmpgtl
),
2524 [0x67] = MMX_OP2(packuswb
),
2525 [0x68] = MMX_OP2(punpckhbw
),
2526 [0x69] = MMX_OP2(punpckhwd
),
2527 [0x6a] = MMX_OP2(punpckhdq
),
2528 [0x6b] = MMX_OP2(packssdw
),
2529 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2530 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2531 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2532 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2533 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2534 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2535 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2536 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2537 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2538 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2539 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2540 [0x74] = MMX_OP2(pcmpeqb
),
2541 [0x75] = MMX_OP2(pcmpeqw
),
2542 [0x76] = MMX_OP2(pcmpeql
),
2543 [0x77] = { SSE_SPECIAL
}, /* emms */
2544 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2545 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2546 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2547 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2548 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2549 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2550 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2551 [0xd1] = MMX_OP2(psrlw
),
2552 [0xd2] = MMX_OP2(psrld
),
2553 [0xd3] = MMX_OP2(psrlq
),
2554 [0xd4] = MMX_OP2(paddq
),
2555 [0xd5] = MMX_OP2(pmullw
),
2556 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2557 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2558 [0xd8] = MMX_OP2(psubusb
),
2559 [0xd9] = MMX_OP2(psubusw
),
2560 [0xda] = MMX_OP2(pminub
),
2561 [0xdb] = MMX_OP2(pand
),
2562 [0xdc] = MMX_OP2(paddusb
),
2563 [0xdd] = MMX_OP2(paddusw
),
2564 [0xde] = MMX_OP2(pmaxub
),
2565 [0xdf] = MMX_OP2(pandn
),
2566 [0xe0] = MMX_OP2(pavgb
),
2567 [0xe1] = MMX_OP2(psraw
),
2568 [0xe2] = MMX_OP2(psrad
),
2569 [0xe3] = MMX_OP2(pavgw
),
2570 [0xe4] = MMX_OP2(pmulhuw
),
2571 [0xe5] = MMX_OP2(pmulhw
),
2572 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2573 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2574 [0xe8] = MMX_OP2(psubsb
),
2575 [0xe9] = MMX_OP2(psubsw
),
2576 [0xea] = MMX_OP2(pminsw
),
2577 [0xeb] = MMX_OP2(por
),
2578 [0xec] = MMX_OP2(paddsb
),
2579 [0xed] = MMX_OP2(paddsw
),
2580 [0xee] = MMX_OP2(pmaxsw
),
2581 [0xef] = MMX_OP2(pxor
),
2582 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2583 [0xf1] = MMX_OP2(psllw
),
2584 [0xf2] = MMX_OP2(pslld
),
2585 [0xf3] = MMX_OP2(psllq
),
2586 [0xf4] = MMX_OP2(pmuludq
),
2587 [0xf5] = MMX_OP2(pmaddwd
),
2588 [0xf6] = MMX_OP2(psadbw
),
2589 [0xf7] = MMX_OP2(maskmov
),
2590 [0xf8] = MMX_OP2(psubb
),
2591 [0xf9] = MMX_OP2(psubw
),
2592 [0xfa] = MMX_OP2(psubl
),
2593 [0xfb] = MMX_OP2(psubq
),
2594 [0xfc] = MMX_OP2(paddb
),
2595 [0xfd] = MMX_OP2(paddw
),
2596 [0xfe] = MMX_OP2(paddl
),
2599 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2600 [0 + 2] = MMX_OP2(psrlw
),
2601 [0 + 4] = MMX_OP2(psraw
),
2602 [0 + 6] = MMX_OP2(psllw
),
2603 [8 + 2] = MMX_OP2(psrld
),
2604 [8 + 4] = MMX_OP2(psrad
),
2605 [8 + 6] = MMX_OP2(pslld
),
2606 [16 + 2] = MMX_OP2(psrlq
),
2607 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2608 [16 + 6] = MMX_OP2(psllq
),
2609 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2612 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2615 X86_64_ONLY(gen_op_cvtsq2ss
),
2616 X86_64_ONLY(gen_op_cvtsq2sd
),
2620 X86_64_ONLY(gen_op_cvttss2sq
),
2621 X86_64_ONLY(gen_op_cvttsd2sq
),
2625 X86_64_ONLY(gen_op_cvtss2sq
),
2626 X86_64_ONLY(gen_op_cvtsd2sq
),
2629 static GenOpFunc2
*sse_op_table4
[8][4] = {
2640 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2642 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2643 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2644 GenOpFunc2
*sse_op2
;
2645 GenOpFunc3
*sse_op3
;
2648 if (s
->prefix
& PREFIX_DATA
)
2650 else if (s
->prefix
& PREFIX_REPZ
)
2652 else if (s
->prefix
& PREFIX_REPNZ
)
2656 sse_op2
= sse_op_table1
[b
][b1
];
2659 if (b
<= 0x5f || b
== 0xc6 || b
== 0xc2) {
2669 /* simple MMX/SSE operation */
2670 if (s
->flags
& HF_TS_MASK
) {
2671 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2674 if (s
->flags
& HF_EM_MASK
) {
2676 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2679 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2686 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2687 the static cpu state) */
2692 modrm
= ldub_code(s
->pc
++);
2693 reg
= ((modrm
>> 3) & 7);
2696 mod
= (modrm
>> 6) & 3;
2697 if (sse_op2
== SSE_SPECIAL
) {
2700 case 0x0e7: /* movntq */
2703 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2704 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2706 case 0x1e7: /* movntdq */
2707 case 0x02b: /* movntps */
2708 case 0x12b: /* movntps */
2709 case 0x3f0: /* lddqu */
2712 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2713 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2715 case 0x6e: /* movd mm, ea */
2716 #ifdef TARGET_X86_64
2717 if (s
->dflag
== 2) {
2718 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2719 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2723 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2724 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2727 case 0x16e: /* movd xmm, ea */
2728 #ifdef TARGET_X86_64
2729 if (s
->dflag
== 2) {
2730 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2731 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2735 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2736 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2739 case 0x6f: /* movq mm, ea */
2741 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2742 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2745 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2746 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2749 case 0x010: /* movups */
2750 case 0x110: /* movupd */
2751 case 0x028: /* movaps */
2752 case 0x128: /* movapd */
2753 case 0x16f: /* movdqa xmm, ea */
2754 case 0x26f: /* movdqu xmm, ea */
2756 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2757 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2759 rm
= (modrm
& 7) | REX_B(s
);
2760 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2761 offsetof(CPUX86State
,xmm_regs
[rm
]));
2764 case 0x210: /* movss xmm, ea */
2766 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2767 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
2768 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2770 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2771 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2772 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2774 rm
= (modrm
& 7) | REX_B(s
);
2775 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2776 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2779 case 0x310: /* movsd xmm, ea */
2781 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2782 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2784 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2785 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2787 rm
= (modrm
& 7) | REX_B(s
);
2788 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2789 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2792 case 0x012: /* movlps */
2793 case 0x112: /* movlpd */
2795 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2796 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2799 rm
= (modrm
& 7) | REX_B(s
);
2800 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2801 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2804 case 0x212: /* movsldup */
2806 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2807 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2809 rm
= (modrm
& 7) | REX_B(s
);
2810 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2811 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2812 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2813 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2815 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2816 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2817 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2818 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2820 case 0x312: /* movddup */
2822 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2823 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2825 rm
= (modrm
& 7) | REX_B(s
);
2826 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2827 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2829 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2830 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2832 case 0x016: /* movhps */
2833 case 0x116: /* movhpd */
2835 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2836 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2839 rm
= (modrm
& 7) | REX_B(s
);
2840 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2841 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2844 case 0x216: /* movshdup */
2846 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2847 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2849 rm
= (modrm
& 7) | REX_B(s
);
2850 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2851 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2852 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2853 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2855 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2856 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2857 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2858 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2860 case 0x7e: /* movd ea, mm */
2861 #ifdef TARGET_X86_64
2862 if (s
->dflag
== 2) {
2863 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2864 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2868 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2869 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2872 case 0x17e: /* movd ea, xmm */
2873 #ifdef TARGET_X86_64
2874 if (s
->dflag
== 2) {
2875 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2876 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2880 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2881 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2884 case 0x27e: /* movq xmm, ea */
2886 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2887 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2889 rm
= (modrm
& 7) | REX_B(s
);
2890 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2891 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2893 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2895 case 0x7f: /* movq ea, mm */
2897 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2898 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2901 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2902 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2905 case 0x011: /* movups */
2906 case 0x111: /* movupd */
2907 case 0x029: /* movaps */
2908 case 0x129: /* movapd */
2909 case 0x17f: /* movdqa ea, xmm */
2910 case 0x27f: /* movdqu ea, xmm */
2912 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2913 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2915 rm
= (modrm
& 7) | REX_B(s
);
2916 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2917 offsetof(CPUX86State
,xmm_regs
[reg
]));
2920 case 0x211: /* movss ea, xmm */
2922 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2923 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2924 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
2926 rm
= (modrm
& 7) | REX_B(s
);
2927 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2928 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2931 case 0x311: /* movsd ea, xmm */
2933 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2934 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2936 rm
= (modrm
& 7) | REX_B(s
);
2937 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2938 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2941 case 0x013: /* movlps */
2942 case 0x113: /* movlpd */
2944 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2945 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2950 case 0x017: /* movhps */
2951 case 0x117: /* movhpd */
2953 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2954 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2959 case 0x71: /* shift mm, im */
2962 case 0x171: /* shift xmm, im */
2965 val
= ldub_code(s
->pc
++);
2967 gen_op_movl_T0_im(val
);
2968 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2970 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2971 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2973 gen_op_movl_T0_im(val
);
2974 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2976 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2977 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2979 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2983 rm
= (modrm
& 7) | REX_B(s
);
2984 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2987 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2989 sse_op2(op2_offset
, op1_offset
);
2991 case 0x050: /* movmskps */
2992 rm
= (modrm
& 7) | REX_B(s
);
2993 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[rm
]));
2994 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
2996 case 0x150: /* movmskpd */
2997 rm
= (modrm
& 7) | REX_B(s
);
2998 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[rm
]));
2999 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
3001 case 0x02a: /* cvtpi2ps */
3002 case 0x12a: /* cvtpi2pd */
3005 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3006 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3007 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3010 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3012 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3015 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
3019 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
3023 case 0x22a: /* cvtsi2ss */
3024 case 0x32a: /* cvtsi2sd */
3025 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3026 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3027 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3028 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
3030 case 0x02c: /* cvttps2pi */
3031 case 0x12c: /* cvttpd2pi */
3032 case 0x02d: /* cvtps2pi */
3033 case 0x12d: /* cvtpd2pi */
3036 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3037 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3038 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3040 rm
= (modrm
& 7) | REX_B(s
);
3041 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3043 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3046 gen_op_cvttps2pi(op1_offset
, op2_offset
);
3049 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
3052 gen_op_cvtps2pi(op1_offset
, op2_offset
);
3055 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
3059 case 0x22c: /* cvttss2si */
3060 case 0x32c: /* cvttsd2si */
3061 case 0x22d: /* cvtss2si */
3062 case 0x32d: /* cvtsd2si */
3063 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3065 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3067 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3069 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
3070 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3072 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3074 rm
= (modrm
& 7) | REX_B(s
);
3075 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3077 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3078 (b
& 1) * 4](op2_offset
);
3079 gen_op_mov_reg_T0
[ot
][reg
]();
3081 case 0xc4: /* pinsrw */
3084 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3085 val
= ldub_code(s
->pc
++);
3088 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
3091 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
3094 case 0xc5: /* pextrw */
3098 val
= ldub_code(s
->pc
++);
3101 rm
= (modrm
& 7) | REX_B(s
);
3102 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
3106 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
3108 reg
= ((modrm
>> 3) & 7) | rex_r
;
3109 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
3111 case 0x1d6: /* movq ea, xmm */
3113 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3114 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3116 rm
= (modrm
& 7) | REX_B(s
);
3117 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3118 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3119 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3122 case 0x2d6: /* movq2dq */
3125 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3126 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3127 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3129 case 0x3d6: /* movdq2q */
3131 rm
= (modrm
& 7) | REX_B(s
);
3132 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3133 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3135 case 0xd7: /* pmovmskb */
3140 rm
= (modrm
& 7) | REX_B(s
);
3141 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
3144 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3146 reg
= ((modrm
>> 3) & 7) | rex_r
;
3147 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
3153 /* generic MMX or SSE operation */
3156 /* maskmov : we must prepare A0 */
3159 #ifdef TARGET_X86_64
3160 if (s
->aflag
== 2) {
3161 gen_op_movq_A0_reg
[R_EDI
]();
3165 gen_op_movl_A0_reg
[R_EDI
]();
3167 gen_op_andl_A0_ffff();
3169 gen_add_A0_ds_seg(s
);
3171 case 0x70: /* pshufx insn */
3172 case 0xc6: /* pshufx insn */
3173 case 0xc2: /* compare insns */
3180 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3182 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3183 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3184 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3186 /* specific case for SSE single instructions */
3189 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
3190 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3193 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3196 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3199 rm
= (modrm
& 7) | REX_B(s
);
3200 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3203 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3205 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3206 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3207 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3210 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3214 case 0x70: /* pshufx insn */
3215 case 0xc6: /* pshufx insn */
3216 val
= ldub_code(s
->pc
++);
3217 sse_op3
= (GenOpFunc3
*)sse_op2
;
3218 sse_op3(op1_offset
, op2_offset
, val
);
3222 val
= ldub_code(s
->pc
++);
3225 sse_op2
= sse_op_table4
[val
][b1
];
3226 sse_op2(op1_offset
, op2_offset
);
3229 sse_op2(op1_offset
, op2_offset
);
3232 if (b
== 0x2e || b
== 0x2f) {
3233 s
->cc_op
= CC_OP_EFLAGS
;
3239 /* convert one instruction. s->is_jmp is set if the translation must
3240 be stopped. Return the next pc value */
3241 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3243 int b
, prefixes
, aflag
, dflag
;
3245 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3246 target_ulong next_eip
, tval
;
3256 #ifdef TARGET_X86_64
3261 s
->rip_offset
= 0; /* for relative ip address */
3263 b
= ldub_code(s
->pc
);
3265 /* check prefixes */
3266 #ifdef TARGET_X86_64
3270 prefixes
|= PREFIX_REPZ
;
3273 prefixes
|= PREFIX_REPNZ
;
3276 prefixes
|= PREFIX_LOCK
;
3297 prefixes
|= PREFIX_DATA
;
3300 prefixes
|= PREFIX_ADR
;
3304 rex_w
= (b
>> 3) & 1;
3305 rex_r
= (b
& 0x4) << 1;
3306 s
->rex_x
= (b
& 0x2) << 2;
3307 REX_B(s
) = (b
& 0x1) << 3;
3308 x86_64_hregs
= 1; /* select uniform byte register addressing */
3312 /* 0x66 is ignored if rex.w is set */
3315 if (prefixes
& PREFIX_DATA
)
3318 if (!(prefixes
& PREFIX_ADR
))
3325 prefixes
|= PREFIX_REPZ
;
3328 prefixes
|= PREFIX_REPNZ
;
3331 prefixes
|= PREFIX_LOCK
;
3352 prefixes
|= PREFIX_DATA
;
3355 prefixes
|= PREFIX_ADR
;
3358 if (prefixes
& PREFIX_DATA
)
3360 if (prefixes
& PREFIX_ADR
)
3364 s
->prefix
= prefixes
;
3368 /* lock generation */
3369 if (prefixes
& PREFIX_LOCK
)
3372 /* now check op code */
3376 /**************************/
3377 /* extended op code */
3378 b
= ldub_code(s
->pc
++) | 0x100;
3381 /**************************/
3399 ot
= dflag
+ OT_WORD
;
3402 case 0: /* OP Ev, Gv */
3403 modrm
= ldub_code(s
->pc
++);
3404 reg
= ((modrm
>> 3) & 7) | rex_r
;
3405 mod
= (modrm
>> 6) & 3;
3406 rm
= (modrm
& 7) | REX_B(s
);
3408 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3410 } else if (op
== OP_XORL
&& rm
== reg
) {
3412 /* xor reg, reg optimisation */
3414 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3415 gen_op_mov_reg_T0
[ot
][reg
]();
3416 gen_op_update1_cc();
3421 gen_op_mov_TN_reg
[ot
][1][reg
]();
3422 gen_op(s
, op
, ot
, opreg
);
3424 case 1: /* OP Gv, Ev */
3425 modrm
= ldub_code(s
->pc
++);
3426 mod
= (modrm
>> 6) & 3;
3427 reg
= ((modrm
>> 3) & 7) | rex_r
;
3428 rm
= (modrm
& 7) | REX_B(s
);
3430 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3431 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3432 } else if (op
== OP_XORL
&& rm
== reg
) {
3435 gen_op_mov_TN_reg
[ot
][1][rm
]();
3437 gen_op(s
, op
, ot
, reg
);
3439 case 2: /* OP A, Iv */
3440 val
= insn_get(s
, ot
);
3441 gen_op_movl_T1_im(val
);
3442 gen_op(s
, op
, ot
, OR_EAX
);
3448 case 0x80: /* GRP1 */
3458 ot
= dflag
+ OT_WORD
;
3460 modrm
= ldub_code(s
->pc
++);
3461 mod
= (modrm
>> 6) & 3;
3462 rm
= (modrm
& 7) | REX_B(s
);
3463 op
= (modrm
>> 3) & 7;
3469 s
->rip_offset
= insn_const_size(ot
);
3470 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3481 val
= insn_get(s
, ot
);
3484 val
= (int8_t)insn_get(s
, OT_BYTE
);
3487 gen_op_movl_T1_im(val
);
3488 gen_op(s
, op
, ot
, opreg
);
3492 /**************************/
3493 /* inc, dec, and other misc arith */
3494 case 0x40 ... 0x47: /* inc Gv */
3495 ot
= dflag
? OT_LONG
: OT_WORD
;
3496 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3498 case 0x48 ... 0x4f: /* dec Gv */
3499 ot
= dflag
? OT_LONG
: OT_WORD
;
3500 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3502 case 0xf6: /* GRP3 */
3507 ot
= dflag
+ OT_WORD
;
3509 modrm
= ldub_code(s
->pc
++);
3510 mod
= (modrm
>> 6) & 3;
3511 rm
= (modrm
& 7) | REX_B(s
);
3512 op
= (modrm
>> 3) & 7;
3515 s
->rip_offset
= insn_const_size(ot
);
3516 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3517 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3519 gen_op_mov_TN_reg
[ot
][0][rm
]();
3524 val
= insn_get(s
, ot
);
3525 gen_op_movl_T1_im(val
);
3526 gen_op_testl_T0_T1_cc();
3527 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3532 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3534 gen_op_mov_reg_T0
[ot
][rm
]();
3540 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3542 gen_op_mov_reg_T0
[ot
][rm
]();
3544 gen_op_update_neg_cc();
3545 s
->cc_op
= CC_OP_SUBB
+ ot
;
3550 gen_op_mulb_AL_T0();
3551 s
->cc_op
= CC_OP_MULB
;
3554 gen_op_mulw_AX_T0();
3555 s
->cc_op
= CC_OP_MULW
;
3559 gen_op_mull_EAX_T0();
3560 s
->cc_op
= CC_OP_MULL
;
3562 #ifdef TARGET_X86_64
3564 gen_op_mulq_EAX_T0();
3565 s
->cc_op
= CC_OP_MULQ
;
3573 gen_op_imulb_AL_T0();
3574 s
->cc_op
= CC_OP_MULB
;
3577 gen_op_imulw_AX_T0();
3578 s
->cc_op
= CC_OP_MULW
;
3582 gen_op_imull_EAX_T0();
3583 s
->cc_op
= CC_OP_MULL
;
3585 #ifdef TARGET_X86_64
3587 gen_op_imulq_EAX_T0();
3588 s
->cc_op
= CC_OP_MULQ
;
3596 gen_jmp_im(pc_start
- s
->cs_base
);
3597 gen_op_divb_AL_T0();
3600 gen_jmp_im(pc_start
- s
->cs_base
);
3601 gen_op_divw_AX_T0();
3605 gen_jmp_im(pc_start
- s
->cs_base
);
3606 gen_op_divl_EAX_T0();
3608 #ifdef TARGET_X86_64
3610 gen_jmp_im(pc_start
- s
->cs_base
);
3611 gen_op_divq_EAX_T0();
3619 gen_jmp_im(pc_start
- s
->cs_base
);
3620 gen_op_idivb_AL_T0();
3623 gen_jmp_im(pc_start
- s
->cs_base
);
3624 gen_op_idivw_AX_T0();
3628 gen_jmp_im(pc_start
- s
->cs_base
);
3629 gen_op_idivl_EAX_T0();
3631 #ifdef TARGET_X86_64
3633 gen_jmp_im(pc_start
- s
->cs_base
);
3634 gen_op_idivq_EAX_T0();
3644 case 0xfe: /* GRP4 */
3645 case 0xff: /* GRP5 */
3649 ot
= dflag
+ OT_WORD
;
3651 modrm
= ldub_code(s
->pc
++);
3652 mod
= (modrm
>> 6) & 3;
3653 rm
= (modrm
& 7) | REX_B(s
);
3654 op
= (modrm
>> 3) & 7;
3655 if (op
>= 2 && b
== 0xfe) {
3659 if (op
== 2 || op
== 4) {
3660 /* operand size for jumps is 64 bit */
3662 } else if (op
== 3 || op
== 5) {
3663 /* for call calls, the operand is 16 or 32 bit, even
3665 ot
= dflag
? OT_LONG
: OT_WORD
;
3666 } else if (op
== 6) {
3667 /* default push size is 64 bit */
3668 ot
= dflag
? OT_QUAD
: OT_WORD
;
3672 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3673 if (op
>= 2 && op
!= 3 && op
!= 5)
3674 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3676 gen_op_mov_TN_reg
[ot
][0][rm
]();
3680 case 0: /* inc Ev */
3685 gen_inc(s
, ot
, opreg
, 1);
3687 case 1: /* dec Ev */
3692 gen_inc(s
, ot
, opreg
, -1);
3694 case 2: /* call Ev */
3695 /* XXX: optimize if memory (no 'and' is necessary) */
3697 gen_op_andl_T0_ffff();
3698 next_eip
= s
->pc
- s
->cs_base
;
3699 gen_movtl_T1_im(next_eip
);
3704 case 3: /* lcall Ev */
3705 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3706 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3707 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3709 if (s
->pe
&& !s
->vm86
) {
3710 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3711 gen_op_set_cc_op(s
->cc_op
);
3712 gen_jmp_im(pc_start
- s
->cs_base
);
3713 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3715 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3719 case 4: /* jmp Ev */
3721 gen_op_andl_T0_ffff();
3725 case 5: /* ljmp Ev */
3726 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3727 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3728 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
3730 if (s
->pe
&& !s
->vm86
) {
3731 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3732 gen_op_set_cc_op(s
->cc_op
);
3733 gen_jmp_im(pc_start
- s
->cs_base
);
3734 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3736 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3737 gen_op_movl_T0_T1();
3742 case 6: /* push Ev */
3750 case 0x84: /* test Ev, Gv */
3755 ot
= dflag
+ OT_WORD
;
3757 modrm
= ldub_code(s
->pc
++);
3758 mod
= (modrm
>> 6) & 3;
3759 rm
= (modrm
& 7) | REX_B(s
);
3760 reg
= ((modrm
>> 3) & 7) | rex_r
;
3762 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3763 gen_op_mov_TN_reg
[ot
][1][reg
]();
3764 gen_op_testl_T0_T1_cc();
3765 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3768 case 0xa8: /* test eAX, Iv */
3773 ot
= dflag
+ OT_WORD
;
3774 val
= insn_get(s
, ot
);
3776 gen_op_mov_TN_reg
[ot
][0][OR_EAX
]();
3777 gen_op_movl_T1_im(val
);
3778 gen_op_testl_T0_T1_cc();
3779 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3782 case 0x98: /* CWDE/CBW */
3783 #ifdef TARGET_X86_64
3785 gen_op_movslq_RAX_EAX();
3789 gen_op_movswl_EAX_AX();
3791 gen_op_movsbw_AX_AL();
3793 case 0x99: /* CDQ/CWD */
3794 #ifdef TARGET_X86_64
3796 gen_op_movsqo_RDX_RAX();
3800 gen_op_movslq_EDX_EAX();
3802 gen_op_movswl_DX_AX();
3804 case 0x1af: /* imul Gv, Ev */
3805 case 0x69: /* imul Gv, Ev, I */
3807 ot
= dflag
+ OT_WORD
;
3808 modrm
= ldub_code(s
->pc
++);
3809 reg
= ((modrm
>> 3) & 7) | rex_r
;
3811 s
->rip_offset
= insn_const_size(ot
);
3814 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3816 val
= insn_get(s
, ot
);
3817 gen_op_movl_T1_im(val
);
3818 } else if (b
== 0x6b) {
3819 val
= (int8_t)insn_get(s
, OT_BYTE
);
3820 gen_op_movl_T1_im(val
);
3822 gen_op_mov_TN_reg
[ot
][1][reg
]();
3825 #ifdef TARGET_X86_64
3826 if (ot
== OT_QUAD
) {
3827 gen_op_imulq_T0_T1();
3830 if (ot
== OT_LONG
) {
3831 gen_op_imull_T0_T1();
3833 gen_op_imulw_T0_T1();
3835 gen_op_mov_reg_T0
[ot
][reg
]();
3836 s
->cc_op
= CC_OP_MULB
+ ot
;
3839 case 0x1c1: /* xadd Ev, Gv */
3843 ot
= dflag
+ OT_WORD
;
3844 modrm
= ldub_code(s
->pc
++);
3845 reg
= ((modrm
>> 3) & 7) | rex_r
;
3846 mod
= (modrm
>> 6) & 3;
3848 rm
= (modrm
& 7) | REX_B(s
);
3849 gen_op_mov_TN_reg
[ot
][0][reg
]();
3850 gen_op_mov_TN_reg
[ot
][1][rm
]();
3851 gen_op_addl_T0_T1();
3852 gen_op_mov_reg_T1
[ot
][reg
]();
3853 gen_op_mov_reg_T0
[ot
][rm
]();
3855 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3856 gen_op_mov_TN_reg
[ot
][0][reg
]();
3857 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
3858 gen_op_addl_T0_T1();
3859 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
3860 gen_op_mov_reg_T1
[ot
][reg
]();
3862 gen_op_update2_cc();
3863 s
->cc_op
= CC_OP_ADDB
+ ot
;
3866 case 0x1b1: /* cmpxchg Ev, Gv */
3870 ot
= dflag
+ OT_WORD
;
3871 modrm
= ldub_code(s
->pc
++);
3872 reg
= ((modrm
>> 3) & 7) | rex_r
;
3873 mod
= (modrm
>> 6) & 3;
3874 gen_op_mov_TN_reg
[ot
][1][reg
]();
3876 rm
= (modrm
& 7) | REX_B(s
);
3877 gen_op_mov_TN_reg
[ot
][0][rm
]();
3878 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3879 gen_op_mov_reg_T0
[ot
][rm
]();
3881 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3882 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
3883 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3885 s
->cc_op
= CC_OP_SUBB
+ ot
;
3887 case 0x1c7: /* cmpxchg8b */
3888 modrm
= ldub_code(s
->pc
++);
3889 mod
= (modrm
>> 6) & 3;
3892 gen_jmp_im(pc_start
- s
->cs_base
);
3893 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3894 gen_op_set_cc_op(s
->cc_op
);
3895 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3897 s
->cc_op
= CC_OP_EFLAGS
;
3900 /**************************/
3902 case 0x50 ... 0x57: /* push */
3903 gen_op_mov_TN_reg
[OT_LONG
][0][(b
& 7) | REX_B(s
)]();
3906 case 0x58 ... 0x5f: /* pop */
3908 ot
= dflag
? OT_QUAD
: OT_WORD
;
3910 ot
= dflag
+ OT_WORD
;
3913 /* NOTE: order is important for pop %sp */
3915 gen_op_mov_reg_T0
[ot
][(b
& 7) | REX_B(s
)]();
3917 case 0x60: /* pusha */
3922 case 0x61: /* popa */
3927 case 0x68: /* push Iv */
3930 ot
= dflag
? OT_QUAD
: OT_WORD
;
3932 ot
= dflag
+ OT_WORD
;
3935 val
= insn_get(s
, ot
);
3937 val
= (int8_t)insn_get(s
, OT_BYTE
);
3938 gen_op_movl_T0_im(val
);
3941 case 0x8f: /* pop Ev */
3943 ot
= dflag
? OT_QUAD
: OT_WORD
;
3945 ot
= dflag
+ OT_WORD
;
3947 modrm
= ldub_code(s
->pc
++);
3948 mod
= (modrm
>> 6) & 3;
3951 /* NOTE: order is important for pop %sp */
3953 rm
= (modrm
& 7) | REX_B(s
);
3954 gen_op_mov_reg_T0
[ot
][rm
]();
3956 /* NOTE: order is important too for MMU exceptions */
3957 s
->popl_esp_hack
= 1 << ot
;
3958 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3959 s
->popl_esp_hack
= 0;
3963 case 0xc8: /* enter */
3966 val
= lduw_code(s
->pc
);
3968 level
= ldub_code(s
->pc
++);
3969 gen_enter(s
, val
, level
);
3972 case 0xc9: /* leave */
3973 /* XXX: exception not precise (ESP is updated before potential exception) */
3975 gen_op_mov_TN_reg
[OT_QUAD
][0][R_EBP
]();
3976 gen_op_mov_reg_T0
[OT_QUAD
][R_ESP
]();
3977 } else if (s
->ss32
) {
3978 gen_op_mov_TN_reg
[OT_LONG
][0][R_EBP
]();
3979 gen_op_mov_reg_T0
[OT_LONG
][R_ESP
]();
3981 gen_op_mov_TN_reg
[OT_WORD
][0][R_EBP
]();
3982 gen_op_mov_reg_T0
[OT_WORD
][R_ESP
]();
3986 ot
= dflag
? OT_QUAD
: OT_WORD
;
3988 ot
= dflag
+ OT_WORD
;
3990 gen_op_mov_reg_T0
[ot
][R_EBP
]();
3993 case 0x06: /* push es */
3994 case 0x0e: /* push cs */
3995 case 0x16: /* push ss */
3996 case 0x1e: /* push ds */
3999 gen_op_movl_T0_seg(b
>> 3);
4002 case 0x1a0: /* push fs */
4003 case 0x1a8: /* push gs */
4004 gen_op_movl_T0_seg((b
>> 3) & 7);
4007 case 0x07: /* pop es */
4008 case 0x17: /* pop ss */
4009 case 0x1f: /* pop ds */
4014 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4017 /* if reg == SS, inhibit interrupts/trace. */
4018 /* If several instructions disable interrupts, only the
4020 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4021 gen_op_set_inhibit_irq();
4025 gen_jmp_im(s
->pc
- s
->cs_base
);
4029 case 0x1a1: /* pop fs */
4030 case 0x1a9: /* pop gs */
4032 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4035 gen_jmp_im(s
->pc
- s
->cs_base
);
4040 /**************************/
4043 case 0x89: /* mov Gv, Ev */
4047 ot
= dflag
+ OT_WORD
;
4048 modrm
= ldub_code(s
->pc
++);
4049 reg
= ((modrm
>> 3) & 7) | rex_r
;
4051 /* generate a generic store */
4052 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4055 case 0xc7: /* mov Ev, Iv */
4059 ot
= dflag
+ OT_WORD
;
4060 modrm
= ldub_code(s
->pc
++);
4061 mod
= (modrm
>> 6) & 3;
4063 s
->rip_offset
= insn_const_size(ot
);
4064 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4066 val
= insn_get(s
, ot
);
4067 gen_op_movl_T0_im(val
);
4069 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4071 gen_op_mov_reg_T0
[ot
][(modrm
& 7) | REX_B(s
)]();
4074 case 0x8b: /* mov Ev, Gv */
4078 ot
= OT_WORD
+ dflag
;
4079 modrm
= ldub_code(s
->pc
++);
4080 reg
= ((modrm
>> 3) & 7) | rex_r
;
4082 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4083 gen_op_mov_reg_T0
[ot
][reg
]();
4085 case 0x8e: /* mov seg, Gv */
4086 modrm
= ldub_code(s
->pc
++);
4087 reg
= (modrm
>> 3) & 7;
4088 if (reg
>= 6 || reg
== R_CS
)
4090 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4091 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4093 /* if reg == SS, inhibit interrupts/trace */
4094 /* If several instructions disable interrupts, only the
4096 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4097 gen_op_set_inhibit_irq();
4101 gen_jmp_im(s
->pc
- s
->cs_base
);
4105 case 0x8c: /* mov Gv, seg */
4106 modrm
= ldub_code(s
->pc
++);
4107 reg
= (modrm
>> 3) & 7;
4108 mod
= (modrm
>> 6) & 3;
4111 gen_op_movl_T0_seg(reg
);
4113 ot
= OT_WORD
+ dflag
;
4116 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4119 case 0x1b6: /* movzbS Gv, Eb */
4120 case 0x1b7: /* movzwS Gv, Eb */
4121 case 0x1be: /* movsbS Gv, Eb */
4122 case 0x1bf: /* movswS Gv, Eb */
4125 /* d_ot is the size of destination */
4126 d_ot
= dflag
+ OT_WORD
;
4127 /* ot is the size of source */
4128 ot
= (b
& 1) + OT_BYTE
;
4129 modrm
= ldub_code(s
->pc
++);
4130 reg
= ((modrm
>> 3) & 7) | rex_r
;
4131 mod
= (modrm
>> 6) & 3;
4132 rm
= (modrm
& 7) | REX_B(s
);
4135 gen_op_mov_TN_reg
[ot
][0][rm
]();
4136 switch(ot
| (b
& 8)) {
4138 gen_op_movzbl_T0_T0();
4141 gen_op_movsbl_T0_T0();
4144 gen_op_movzwl_T0_T0();
4148 gen_op_movswl_T0_T0();
4151 gen_op_mov_reg_T0
[d_ot
][reg
]();
4153 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4155 gen_op_lds_T0_A0
[ot
+ s
->mem_index
]();
4157 gen_op_ldu_T0_A0
[ot
+ s
->mem_index
]();
4159 gen_op_mov_reg_T0
[d_ot
][reg
]();
4164 case 0x8d: /* lea */
4165 ot
= dflag
+ OT_WORD
;
4166 modrm
= ldub_code(s
->pc
++);
4167 mod
= (modrm
>> 6) & 3;
4170 reg
= ((modrm
>> 3) & 7) | rex_r
;
4171 /* we must ensure that no segment is added */
4175 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4177 gen_op_mov_reg_A0
[ot
- OT_WORD
][reg
]();
4180 case 0xa0: /* mov EAX, Ov */
4182 case 0xa2: /* mov Ov, EAX */
4185 target_ulong offset_addr
;
4190 ot
= dflag
+ OT_WORD
;
4191 #ifdef TARGET_X86_64
4192 if (s
->aflag
== 2) {
4193 offset_addr
= ldq_code(s
->pc
);
4195 if (offset_addr
== (int32_t)offset_addr
)
4196 gen_op_movq_A0_im(offset_addr
);
4198 gen_op_movq_A0_im64(offset_addr
>> 32, offset_addr
);
4203 offset_addr
= insn_get(s
, OT_LONG
);
4205 offset_addr
= insn_get(s
, OT_WORD
);
4207 gen_op_movl_A0_im(offset_addr
);
4209 gen_add_A0_ds_seg(s
);
4211 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4212 gen_op_mov_reg_T0
[ot
][R_EAX
]();
4214 gen_op_mov_TN_reg
[ot
][0][R_EAX
]();
4215 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4219 case 0xd7: /* xlat */
4220 #ifdef TARGET_X86_64
4221 if (s
->aflag
== 2) {
4222 gen_op_movq_A0_reg
[R_EBX
]();
4223 gen_op_addq_A0_AL();
4227 gen_op_movl_A0_reg
[R_EBX
]();
4228 gen_op_addl_A0_AL();
4230 gen_op_andl_A0_ffff();
4232 gen_add_A0_ds_seg(s
);
4233 gen_op_ldu_T0_A0
[OT_BYTE
+ s
->mem_index
]();
4234 gen_op_mov_reg_T0
[OT_BYTE
][R_EAX
]();
4236 case 0xb0 ... 0xb7: /* mov R, Ib */
4237 val
= insn_get(s
, OT_BYTE
);
4238 gen_op_movl_T0_im(val
);
4239 gen_op_mov_reg_T0
[OT_BYTE
][(b
& 7) | REX_B(s
)]();
4241 case 0xb8 ... 0xbf: /* mov R, Iv */
4242 #ifdef TARGET_X86_64
4246 tmp
= ldq_code(s
->pc
);
4248 reg
= (b
& 7) | REX_B(s
);
4249 gen_movtl_T0_im(tmp
);
4250 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
4254 ot
= dflag
? OT_LONG
: OT_WORD
;
4255 val
= insn_get(s
, ot
);
4256 reg
= (b
& 7) | REX_B(s
);
4257 gen_op_movl_T0_im(val
);
4258 gen_op_mov_reg_T0
[ot
][reg
]();
4262 case 0x91 ... 0x97: /* xchg R, EAX */
4263 ot
= dflag
+ OT_WORD
;
4264 reg
= (b
& 7) | REX_B(s
);
4268 case 0x87: /* xchg Ev, Gv */
4272 ot
= dflag
+ OT_WORD
;
4273 modrm
= ldub_code(s
->pc
++);
4274 reg
= ((modrm
>> 3) & 7) | rex_r
;
4275 mod
= (modrm
>> 6) & 3;
4277 rm
= (modrm
& 7) | REX_B(s
);
4279 gen_op_mov_TN_reg
[ot
][0][reg
]();
4280 gen_op_mov_TN_reg
[ot
][1][rm
]();
4281 gen_op_mov_reg_T0
[ot
][rm
]();
4282 gen_op_mov_reg_T1
[ot
][reg
]();
4284 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4285 gen_op_mov_TN_reg
[ot
][0][reg
]();
4286 /* for xchg, lock is implicit */
4287 if (!(prefixes
& PREFIX_LOCK
))
4289 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4290 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
4291 if (!(prefixes
& PREFIX_LOCK
))
4293 gen_op_mov_reg_T1
[ot
][reg
]();
4296 case 0xc4: /* les Gv */
4301 case 0xc5: /* lds Gv */
4306 case 0x1b2: /* lss Gv */
4309 case 0x1b4: /* lfs Gv */
4312 case 0x1b5: /* lgs Gv */
4315 ot
= dflag
? OT_LONG
: OT_WORD
;
4316 modrm
= ldub_code(s
->pc
++);
4317 reg
= ((modrm
>> 3) & 7) | rex_r
;
4318 mod
= (modrm
>> 6) & 3;
4321 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4322 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
4323 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4324 /* load the segment first to handle exceptions properly */
4325 gen_op_ldu_T0_A0
[OT_WORD
+ s
->mem_index
]();
4326 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4327 /* then put the data */
4328 gen_op_mov_reg_T1
[ot
][reg
]();
4330 gen_jmp_im(s
->pc
- s
->cs_base
);
4335 /************************/
4346 ot
= dflag
+ OT_WORD
;
4348 modrm
= ldub_code(s
->pc
++);
4349 mod
= (modrm
>> 6) & 3;
4350 op
= (modrm
>> 3) & 7;
4356 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4359 opreg
= (modrm
& 7) | REX_B(s
);
4364 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4367 shift
= ldub_code(s
->pc
++);
4369 gen_shifti(s
, op
, ot
, opreg
, shift
);
4384 case 0x1a4: /* shld imm */
4388 case 0x1a5: /* shld cl */
4392 case 0x1ac: /* shrd imm */
4396 case 0x1ad: /* shrd cl */
4400 ot
= dflag
+ OT_WORD
;
4401 modrm
= ldub_code(s
->pc
++);
4402 mod
= (modrm
>> 6) & 3;
4403 rm
= (modrm
& 7) | REX_B(s
);
4404 reg
= ((modrm
>> 3) & 7) | rex_r
;
4407 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4408 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
4410 gen_op_mov_TN_reg
[ot
][0][rm
]();
4412 gen_op_mov_TN_reg
[ot
][1][reg
]();
4415 val
= ldub_code(s
->pc
++);
4422 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4424 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4425 if (op
== 0 && ot
!= OT_WORD
)
4426 s
->cc_op
= CC_OP_SHLB
+ ot
;
4428 s
->cc_op
= CC_OP_SARB
+ ot
;
4431 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4432 gen_op_set_cc_op(s
->cc_op
);
4434 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4436 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4437 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4440 gen_op_mov_reg_T0
[ot
][rm
]();
4444 /************************/
4447 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4448 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4449 /* XXX: what to do if illegal op ? */
4450 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4453 modrm
= ldub_code(s
->pc
++);
4454 mod
= (modrm
>> 6) & 3;
4456 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4459 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4461 case 0x00 ... 0x07: /* fxxxs */
4462 case 0x10 ... 0x17: /* fixxxl */
4463 case 0x20 ... 0x27: /* fxxxl */
4464 case 0x30 ... 0x37: /* fixxx */
4471 gen_op_flds_FT0_A0();
4474 gen_op_fildl_FT0_A0();
4477 gen_op_fldl_FT0_A0();
4481 gen_op_fild_FT0_A0();
4485 gen_op_fp_arith_ST0_FT0
[op1
]();
4487 /* fcomp needs pop */
4492 case 0x08: /* flds */
4493 case 0x0a: /* fsts */
4494 case 0x0b: /* fstps */
4495 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4496 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4497 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4502 gen_op_flds_ST0_A0();
4505 gen_op_fildl_ST0_A0();
4508 gen_op_fldl_ST0_A0();
4512 gen_op_fild_ST0_A0();
4519 gen_op_fisttl_ST0_A0();
4522 gen_op_fisttll_ST0_A0();
4526 gen_op_fistt_ST0_A0();
4533 gen_op_fsts_ST0_A0();
4536 gen_op_fistl_ST0_A0();
4539 gen_op_fstl_ST0_A0();
4543 gen_op_fist_ST0_A0();
4551 case 0x0c: /* fldenv mem */
4552 gen_op_fldenv_A0(s
->dflag
);
4554 case 0x0d: /* fldcw mem */
4557 case 0x0e: /* fnstenv mem */
4558 gen_op_fnstenv_A0(s
->dflag
);
4560 case 0x0f: /* fnstcw mem */
4563 case 0x1d: /* fldt mem */
4564 gen_op_fldt_ST0_A0();
4566 case 0x1f: /* fstpt mem */
4567 gen_op_fstt_ST0_A0();
4570 case 0x2c: /* frstor mem */
4571 gen_op_frstor_A0(s
->dflag
);
4573 case 0x2e: /* fnsave mem */
4574 gen_op_fnsave_A0(s
->dflag
);
4576 case 0x2f: /* fnstsw mem */
4579 case 0x3c: /* fbld */
4580 gen_op_fbld_ST0_A0();
4582 case 0x3e: /* fbstp */
4583 gen_op_fbst_ST0_A0();
4586 case 0x3d: /* fildll */
4587 gen_op_fildll_ST0_A0();
4589 case 0x3f: /* fistpll */
4590 gen_op_fistll_ST0_A0();
4597 /* register float ops */
4601 case 0x08: /* fld sti */
4603 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4605 case 0x09: /* fxchg sti */
4606 case 0x29: /* fxchg4 sti, undocumented op */
4607 case 0x39: /* fxchg7 sti, undocumented op */
4608 gen_op_fxchg_ST0_STN(opreg
);
4610 case 0x0a: /* grp d9/2 */
4613 /* check exceptions (FreeBSD FPU probe) */
4614 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4615 gen_op_set_cc_op(s
->cc_op
);
4616 gen_jmp_im(pc_start
- s
->cs_base
);
4623 case 0x0c: /* grp d9/4 */
4633 gen_op_fcom_ST0_FT0();
4642 case 0x0d: /* grp d9/5 */
4651 gen_op_fldl2t_ST0();
4655 gen_op_fldl2e_ST0();
4663 gen_op_fldlg2_ST0();
4667 gen_op_fldln2_ST0();
4678 case 0x0e: /* grp d9/6 */
4689 case 3: /* fpatan */
4692 case 4: /* fxtract */
4695 case 5: /* fprem1 */
4698 case 6: /* fdecstp */
4702 case 7: /* fincstp */
4707 case 0x0f: /* grp d9/7 */
4712 case 1: /* fyl2xp1 */
4718 case 3: /* fsincos */
4721 case 5: /* fscale */
4724 case 4: /* frndint */
4736 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4737 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4738 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4744 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4748 gen_op_fmov_FT0_STN(opreg
);
4749 gen_op_fp_arith_ST0_FT0
[op1
]();
4753 case 0x02: /* fcom */
4754 case 0x22: /* fcom2, undocumented op */
4755 gen_op_fmov_FT0_STN(opreg
);
4756 gen_op_fcom_ST0_FT0();
4758 case 0x03: /* fcomp */
4759 case 0x23: /* fcomp3, undocumented op */
4760 case 0x32: /* fcomp5, undocumented op */
4761 gen_op_fmov_FT0_STN(opreg
);
4762 gen_op_fcom_ST0_FT0();
4765 case 0x15: /* da/5 */
4767 case 1: /* fucompp */
4768 gen_op_fmov_FT0_STN(1);
4769 gen_op_fucom_ST0_FT0();
4779 case 0: /* feni (287 only, just do nop here) */
4781 case 1: /* fdisi (287 only, just do nop here) */
4786 case 3: /* fninit */
4789 case 4: /* fsetpm (287 only, just do nop here) */
4795 case 0x1d: /* fucomi */
4796 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4797 gen_op_set_cc_op(s
->cc_op
);
4798 gen_op_fmov_FT0_STN(opreg
);
4799 gen_op_fucomi_ST0_FT0();
4800 s
->cc_op
= CC_OP_EFLAGS
;
4802 case 0x1e: /* fcomi */
4803 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4804 gen_op_set_cc_op(s
->cc_op
);
4805 gen_op_fmov_FT0_STN(opreg
);
4806 gen_op_fcomi_ST0_FT0();
4807 s
->cc_op
= CC_OP_EFLAGS
;
4809 case 0x28: /* ffree sti */
4810 gen_op_ffree_STN(opreg
);
4812 case 0x2a: /* fst sti */
4813 gen_op_fmov_STN_ST0(opreg
);
4815 case 0x2b: /* fstp sti */
4816 case 0x0b: /* fstp1 sti, undocumented op */
4817 case 0x3a: /* fstp8 sti, undocumented op */
4818 case 0x3b: /* fstp9 sti, undocumented op */
4819 gen_op_fmov_STN_ST0(opreg
);
4822 case 0x2c: /* fucom st(i) */
4823 gen_op_fmov_FT0_STN(opreg
);
4824 gen_op_fucom_ST0_FT0();
4826 case 0x2d: /* fucomp st(i) */
4827 gen_op_fmov_FT0_STN(opreg
);
4828 gen_op_fucom_ST0_FT0();
4831 case 0x33: /* de/3 */
4833 case 1: /* fcompp */
4834 gen_op_fmov_FT0_STN(1);
4835 gen_op_fcom_ST0_FT0();
4843 case 0x38: /* ffreep sti, undocumented op */
4844 gen_op_ffree_STN(opreg
);
4847 case 0x3c: /* df/4 */
4850 gen_op_fnstsw_EAX();
4856 case 0x3d: /* fucomip */
4857 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4858 gen_op_set_cc_op(s
->cc_op
);
4859 gen_op_fmov_FT0_STN(opreg
);
4860 gen_op_fucomi_ST0_FT0();
4862 s
->cc_op
= CC_OP_EFLAGS
;
4864 case 0x3e: /* fcomip */
4865 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4866 gen_op_set_cc_op(s
->cc_op
);
4867 gen_op_fmov_FT0_STN(opreg
);
4868 gen_op_fcomi_ST0_FT0();
4870 s
->cc_op
= CC_OP_EFLAGS
;
4872 case 0x10 ... 0x13: /* fcmovxx */
4876 const static uint8_t fcmov_cc
[8] = {
4882 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4884 gen_op_fcmov_ST0_STN_T0(opreg
);
4892 /************************/
4895 case 0xa4: /* movsS */
4900 ot
= dflag
+ OT_WORD
;
4902 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4903 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4909 case 0xaa: /* stosS */
4914 ot
= dflag
+ OT_WORD
;
4916 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4917 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4922 case 0xac: /* lodsS */
4927 ot
= dflag
+ OT_WORD
;
4928 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4929 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4934 case 0xae: /* scasS */
4939 ot
= dflag
+ OT_WORD
;
4940 if (prefixes
& PREFIX_REPNZ
) {
4941 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4942 } else if (prefixes
& PREFIX_REPZ
) {
4943 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4946 s
->cc_op
= CC_OP_SUBB
+ ot
;
4950 case 0xa6: /* cmpsS */
4955 ot
= dflag
+ OT_WORD
;
4956 if (prefixes
& PREFIX_REPNZ
) {
4957 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4958 } else if (prefixes
& PREFIX_REPZ
) {
4959 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4962 s
->cc_op
= CC_OP_SUBB
+ ot
;
4965 case 0x6c: /* insS */
4970 ot
= dflag
? OT_LONG
: OT_WORD
;
4971 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4972 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4973 gen_op_andl_T0_ffff();
4974 if (gen_svm_check_io(s
, pc_start
,
4975 SVM_IOIO_TYPE_MASK
| (1 << (4+ot
)) |
4976 svm_is_rep(prefixes
) | 4 | (1 << (7+s
->aflag
))))
4978 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4979 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4984 case 0x6e: /* outsS */
4989 ot
= dflag
? OT_LONG
: OT_WORD
;
4990 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4991 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
4992 gen_op_andl_T0_ffff();
4993 if (gen_svm_check_io(s
, pc_start
,
4994 (1 << (4+ot
)) | svm_is_rep(prefixes
) |
4995 4 | (1 << (7+s
->aflag
))))
4997 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4998 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5004 /************************/
5012 ot
= dflag
? OT_LONG
: OT_WORD
;
5013 val
= ldub_code(s
->pc
++);
5014 gen_op_movl_T0_im(val
);
5015 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5016 if (gen_svm_check_io(s
, pc_start
,
5017 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5021 gen_op_mov_reg_T1
[ot
][R_EAX
]();
5028 ot
= dflag
? OT_LONG
: OT_WORD
;
5029 val
= ldub_code(s
->pc
++);
5030 gen_op_movl_T0_im(val
);
5031 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5032 if (gen_svm_check_io(s
, pc_start
, svm_is_rep(prefixes
) |
5035 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
5043 ot
= dflag
? OT_LONG
: OT_WORD
;
5044 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
5045 gen_op_andl_T0_ffff();
5046 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5047 if (gen_svm_check_io(s
, pc_start
,
5048 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
5052 gen_op_mov_reg_T1
[ot
][R_EAX
]();
5059 ot
= dflag
? OT_LONG
: OT_WORD
;
5060 gen_op_mov_TN_reg
[OT_WORD
][0][R_EDX
]();
5061 gen_op_andl_T0_ffff();
5062 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5063 if (gen_svm_check_io(s
, pc_start
,
5064 svm_is_rep(prefixes
) | (1 << (4+ot
))))
5066 gen_op_mov_TN_reg
[ot
][1][R_EAX
]();
5070 /************************/
5072 case 0xc2: /* ret im */
5073 val
= ldsw_code(s
->pc
);
5076 if (CODE64(s
) && s
->dflag
)
5078 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5080 gen_op_andl_T0_ffff();
5084 case 0xc3: /* ret */
5088 gen_op_andl_T0_ffff();
5092 case 0xca: /* lret im */
5093 val
= ldsw_code(s
->pc
);
5096 if (s
->pe
&& !s
->vm86
) {
5097 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5098 gen_op_set_cc_op(s
->cc_op
);
5099 gen_jmp_im(pc_start
- s
->cs_base
);
5100 gen_op_lret_protected(s
->dflag
, val
);
5104 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
5106 gen_op_andl_T0_ffff();
5107 /* NOTE: keeping EIP updated is not a problem in case of
5111 gen_op_addl_A0_im(2 << s
->dflag
);
5112 gen_op_ld_T0_A0
[1 + s
->dflag
+ s
->mem_index
]();
5113 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5114 /* add stack offset */
5115 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5119 case 0xcb: /* lret */
5122 case 0xcf: /* iret */
5123 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5127 gen_op_iret_real(s
->dflag
);
5128 s
->cc_op
= CC_OP_EFLAGS
;
5129 } else if (s
->vm86
) {
5131 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5133 gen_op_iret_real(s
->dflag
);
5134 s
->cc_op
= CC_OP_EFLAGS
;
5137 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5138 gen_op_set_cc_op(s
->cc_op
);
5139 gen_jmp_im(pc_start
- s
->cs_base
);
5140 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
5141 s
->cc_op
= CC_OP_EFLAGS
;
5145 case 0xe8: /* call im */
5148 tval
= (int32_t)insn_get(s
, OT_LONG
);
5150 tval
= (int16_t)insn_get(s
, OT_WORD
);
5151 next_eip
= s
->pc
- s
->cs_base
;
5155 gen_movtl_T0_im(next_eip
);
5160 case 0x9a: /* lcall im */
5162 unsigned int selector
, offset
;
5166 ot
= dflag
? OT_LONG
: OT_WORD
;
5167 offset
= insn_get(s
, ot
);
5168 selector
= insn_get(s
, OT_WORD
);
5170 gen_op_movl_T0_im(selector
);
5171 gen_op_movl_T1_imu(offset
);
5174 case 0xe9: /* jmp im */
5176 tval
= (int32_t)insn_get(s
, OT_LONG
);
5178 tval
= (int16_t)insn_get(s
, OT_WORD
);
5179 tval
+= s
->pc
- s
->cs_base
;
5184 case 0xea: /* ljmp im */
5186 unsigned int selector
, offset
;
5190 ot
= dflag
? OT_LONG
: OT_WORD
;
5191 offset
= insn_get(s
, ot
);
5192 selector
= insn_get(s
, OT_WORD
);
5194 gen_op_movl_T0_im(selector
);
5195 gen_op_movl_T1_imu(offset
);
5198 case 0xeb: /* jmp Jb */
5199 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5200 tval
+= s
->pc
- s
->cs_base
;
5205 case 0x70 ... 0x7f: /* jcc Jb */
5206 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5208 case 0x180 ... 0x18f: /* jcc Jv */
5210 tval
= (int32_t)insn_get(s
, OT_LONG
);
5212 tval
= (int16_t)insn_get(s
, OT_WORD
);
5215 next_eip
= s
->pc
- s
->cs_base
;
5219 gen_jcc(s
, b
, tval
, next_eip
);
5222 case 0x190 ... 0x19f: /* setcc Gv */
5223 modrm
= ldub_code(s
->pc
++);
5225 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5227 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5228 ot
= dflag
+ OT_WORD
;
5229 modrm
= ldub_code(s
->pc
++);
5230 reg
= ((modrm
>> 3) & 7) | rex_r
;
5231 mod
= (modrm
>> 6) & 3;
5234 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5235 gen_op_ld_T1_A0
[ot
+ s
->mem_index
]();
5237 rm
= (modrm
& 7) | REX_B(s
);
5238 gen_op_mov_TN_reg
[ot
][1][rm
]();
5240 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5243 /************************/
5245 case 0x9c: /* pushf */
5246 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5248 if (s
->vm86
&& s
->iopl
!= 3) {
5249 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5251 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5252 gen_op_set_cc_op(s
->cc_op
);
5253 gen_op_movl_T0_eflags();
5257 case 0x9d: /* popf */
5258 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5260 if (s
->vm86
&& s
->iopl
!= 3) {
5261 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5266 gen_op_movl_eflags_T0_cpl0();
5268 gen_op_movw_eflags_T0_cpl0();
5271 if (s
->cpl
<= s
->iopl
) {
5273 gen_op_movl_eflags_T0_io();
5275 gen_op_movw_eflags_T0_io();
5279 gen_op_movl_eflags_T0();
5281 gen_op_movw_eflags_T0();
5286 s
->cc_op
= CC_OP_EFLAGS
;
5287 /* abort translation because TF flag may change */
5288 gen_jmp_im(s
->pc
- s
->cs_base
);
5292 case 0x9e: /* sahf */
5295 gen_op_mov_TN_reg
[OT_BYTE
][0][R_AH
]();
5296 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5297 gen_op_set_cc_op(s
->cc_op
);
5298 gen_op_movb_eflags_T0();
5299 s
->cc_op
= CC_OP_EFLAGS
;
5301 case 0x9f: /* lahf */
5304 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5305 gen_op_set_cc_op(s
->cc_op
);
5306 gen_op_movl_T0_eflags();
5307 gen_op_mov_reg_T0
[OT_BYTE
][R_AH
]();
5309 case 0xf5: /* cmc */
5310 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5311 gen_op_set_cc_op(s
->cc_op
);
5313 s
->cc_op
= CC_OP_EFLAGS
;
5315 case 0xf8: /* clc */
5316 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5317 gen_op_set_cc_op(s
->cc_op
);
5319 s
->cc_op
= CC_OP_EFLAGS
;
5321 case 0xf9: /* stc */
5322 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5323 gen_op_set_cc_op(s
->cc_op
);
5325 s
->cc_op
= CC_OP_EFLAGS
;
5327 case 0xfc: /* cld */
5330 case 0xfd: /* std */
5334 /************************/
5335 /* bit operations */
5336 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5337 ot
= dflag
+ OT_WORD
;
5338 modrm
= ldub_code(s
->pc
++);
5339 op
= (modrm
>> 3) & 7;
5340 mod
= (modrm
>> 6) & 3;
5341 rm
= (modrm
& 7) | REX_B(s
);
5344 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5345 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5347 gen_op_mov_TN_reg
[ot
][0][rm
]();
5350 val
= ldub_code(s
->pc
++);
5351 gen_op_movl_T1_im(val
);
5355 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5356 s
->cc_op
= CC_OP_SARB
+ ot
;
5359 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5361 gen_op_mov_reg_T0
[ot
][rm
]();
5362 gen_op_update_bt_cc();
5365 case 0x1a3: /* bt Gv, Ev */
5368 case 0x1ab: /* bts */
5371 case 0x1b3: /* btr */
5374 case 0x1bb: /* btc */
5377 ot
= dflag
+ OT_WORD
;
5378 modrm
= ldub_code(s
->pc
++);
5379 reg
= ((modrm
>> 3) & 7) | rex_r
;
5380 mod
= (modrm
>> 6) & 3;
5381 rm
= (modrm
& 7) | REX_B(s
);
5382 gen_op_mov_TN_reg
[OT_LONG
][1][reg
]();
5384 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5385 /* specific case: we need to add a displacement */
5386 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5387 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
5389 gen_op_mov_TN_reg
[ot
][0][rm
]();
5391 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5392 s
->cc_op
= CC_OP_SARB
+ ot
;
5395 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
5397 gen_op_mov_reg_T0
[ot
][rm
]();
5398 gen_op_update_bt_cc();
5401 case 0x1bc: /* bsf */
5402 case 0x1bd: /* bsr */
5403 ot
= dflag
+ OT_WORD
;
5404 modrm
= ldub_code(s
->pc
++);
5405 reg
= ((modrm
>> 3) & 7) | rex_r
;
5406 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5407 /* NOTE: in order to handle the 0 case, we must load the
5408 result. It could be optimized with a generated jump */
5409 gen_op_mov_TN_reg
[ot
][1][reg
]();
5410 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5411 gen_op_mov_reg_T1
[ot
][reg
]();
5412 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5414 /************************/
5416 case 0x27: /* daa */
5419 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5420 gen_op_set_cc_op(s
->cc_op
);
5422 s
->cc_op
= CC_OP_EFLAGS
;
5424 case 0x2f: /* das */
5427 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5428 gen_op_set_cc_op(s
->cc_op
);
5430 s
->cc_op
= CC_OP_EFLAGS
;
5432 case 0x37: /* aaa */
5435 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5436 gen_op_set_cc_op(s
->cc_op
);
5438 s
->cc_op
= CC_OP_EFLAGS
;
5440 case 0x3f: /* aas */
5443 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5444 gen_op_set_cc_op(s
->cc_op
);
5446 s
->cc_op
= CC_OP_EFLAGS
;
5448 case 0xd4: /* aam */
5451 val
= ldub_code(s
->pc
++);
5453 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5456 s
->cc_op
= CC_OP_LOGICB
;
5459 case 0xd5: /* aad */
5462 val
= ldub_code(s
->pc
++);
5464 s
->cc_op
= CC_OP_LOGICB
;
5466 /************************/
5468 case 0x90: /* nop */
5469 /* XXX: xchg + rex handling */
5470 /* XXX: correct lock test for all insn */
5471 if (prefixes
& PREFIX_LOCK
)
5473 if (prefixes
& PREFIX_REPZ
) {
5474 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5477 case 0x9b: /* fwait */
5478 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5479 (HF_MP_MASK
| HF_TS_MASK
)) {
5480 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5482 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5483 gen_op_set_cc_op(s
->cc_op
);
5484 gen_jmp_im(pc_start
- s
->cs_base
);
5488 case 0xcc: /* int3 */
5489 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5491 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5493 case 0xcd: /* int N */
5494 val
= ldub_code(s
->pc
++);
5495 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5497 if (s
->vm86
&& s
->iopl
!= 3) {
5498 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5500 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5503 case 0xce: /* into */
5506 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5508 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5509 gen_op_set_cc_op(s
->cc_op
);
5510 gen_jmp_im(pc_start
- s
->cs_base
);
5511 gen_op_into(s
->pc
- pc_start
);
5513 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5514 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
5517 gen_debug(s
, pc_start
- s
->cs_base
);
5520 tb_flush(cpu_single_env
);
5521 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5524 case 0xfa: /* cli */
5526 if (s
->cpl
<= s
->iopl
) {
5529 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5535 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5539 case 0xfb: /* sti */
5541 if (s
->cpl
<= s
->iopl
) {
5544 /* interruptions are enabled only the first insn after sti */
5545 /* If several instructions disable interrupts, only the
5547 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5548 gen_op_set_inhibit_irq();
5549 /* give a chance to handle pending irqs */
5550 gen_jmp_im(s
->pc
- s
->cs_base
);
5553 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5559 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5563 case 0x62: /* bound */
5566 ot
= dflag
? OT_LONG
: OT_WORD
;
5567 modrm
= ldub_code(s
->pc
++);
5568 reg
= (modrm
>> 3) & 7;
5569 mod
= (modrm
>> 6) & 3;
5572 gen_op_mov_TN_reg
[ot
][0][reg
]();
5573 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5574 gen_jmp_im(pc_start
- s
->cs_base
);
5580 case 0x1c8 ... 0x1cf: /* bswap reg */
5581 reg
= (b
& 7) | REX_B(s
);
5582 #ifdef TARGET_X86_64
5584 gen_op_mov_TN_reg
[OT_QUAD
][0][reg
]();
5586 gen_op_mov_reg_T0
[OT_QUAD
][reg
]();
5590 gen_op_mov_TN_reg
[OT_LONG
][0][reg
]();
5592 gen_op_mov_reg_T0
[OT_LONG
][reg
]();
5595 case 0xd6: /* salc */
5598 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5599 gen_op_set_cc_op(s
->cc_op
);
5602 case 0xe0: /* loopnz */
5603 case 0xe1: /* loopz */
5604 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5605 gen_op_set_cc_op(s
->cc_op
);
5607 case 0xe2: /* loop */
5608 case 0xe3: /* jecxz */
5612 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5613 next_eip
= s
->pc
- s
->cs_base
;
5618 l1
= gen_new_label();
5619 l2
= gen_new_label();
5622 gen_op_jz_ecx
[s
->aflag
](l1
);
5624 gen_op_dec_ECX
[s
->aflag
]();
5627 gen_op_loop
[s
->aflag
][b
](l1
);
5630 gen_jmp_im(next_eip
);
5631 gen_op_jmp_label(l2
);
5638 case 0x130: /* wrmsr */
5639 case 0x132: /* rdmsr */
5641 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5645 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
5648 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
5655 case 0x131: /* rdtsc */
5656 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
5658 gen_jmp_im(pc_start
- s
->cs_base
);
5661 case 0x134: /* sysenter */
5665 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5667 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5668 gen_op_set_cc_op(s
->cc_op
);
5669 s
->cc_op
= CC_OP_DYNAMIC
;
5671 gen_jmp_im(pc_start
- s
->cs_base
);
5676 case 0x135: /* sysexit */
5680 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5682 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5683 gen_op_set_cc_op(s
->cc_op
);
5684 s
->cc_op
= CC_OP_DYNAMIC
;
5686 gen_jmp_im(pc_start
- s
->cs_base
);
5691 #ifdef TARGET_X86_64
5692 case 0x105: /* syscall */
5693 /* XXX: is it usable in real mode ? */
5694 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5695 gen_op_set_cc_op(s
->cc_op
);
5696 s
->cc_op
= CC_OP_DYNAMIC
;
5698 gen_jmp_im(pc_start
- s
->cs_base
);
5699 gen_op_syscall(s
->pc
- pc_start
);
5702 case 0x107: /* sysret */
5704 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5706 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5707 gen_op_set_cc_op(s
->cc_op
);
5708 s
->cc_op
= CC_OP_DYNAMIC
;
5710 gen_jmp_im(pc_start
- s
->cs_base
);
5711 gen_op_sysret(s
->dflag
);
5712 /* condition codes are modified only in long mode */
5714 s
->cc_op
= CC_OP_EFLAGS
;
5719 case 0x1a2: /* cpuid */
5720 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
5724 case 0xf4: /* hlt */
5726 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5728 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
5730 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5731 gen_op_set_cc_op(s
->cc_op
);
5732 gen_jmp_im(s
->pc
- s
->cs_base
);
5738 modrm
= ldub_code(s
->pc
++);
5739 mod
= (modrm
>> 6) & 3;
5740 op
= (modrm
>> 3) & 7;
5743 if (!s
->pe
|| s
->vm86
)
5745 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
5747 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5751 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5754 if (!s
->pe
|| s
->vm86
)
5757 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5759 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
5761 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5762 gen_jmp_im(pc_start
- s
->cs_base
);
5767 if (!s
->pe
|| s
->vm86
)
5769 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
5771 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5775 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5778 if (!s
->pe
|| s
->vm86
)
5781 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5783 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
5785 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5786 gen_jmp_im(pc_start
- s
->cs_base
);
5792 if (!s
->pe
|| s
->vm86
)
5794 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5795 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5796 gen_op_set_cc_op(s
->cc_op
);
5801 s
->cc_op
= CC_OP_EFLAGS
;
5808 modrm
= ldub_code(s
->pc
++);
5809 mod
= (modrm
>> 6) & 3;
5810 op
= (modrm
>> 3) & 7;
5816 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
5818 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5819 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
5820 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
5821 gen_add_A0_im(s
, 2);
5822 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
5824 gen_op_andl_T0_im(0xffffff);
5825 gen_op_st_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5830 case 0: /* monitor */
5831 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5834 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
5836 gen_jmp_im(pc_start
- s
->cs_base
);
5837 #ifdef TARGET_X86_64
5838 if (s
->aflag
== 2) {
5839 gen_op_movq_A0_reg
[R_EBX
]();
5840 gen_op_addq_A0_AL();
5844 gen_op_movl_A0_reg
[R_EBX
]();
5845 gen_op_addl_A0_AL();
5847 gen_op_andl_A0_ffff();
5849 gen_add_A0_ds_seg(s
);
5853 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5856 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5857 gen_op_set_cc_op(s
->cc_op
);
5858 s
->cc_op
= CC_OP_DYNAMIC
;
5860 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
5862 gen_jmp_im(s
->pc
- s
->cs_base
);
5870 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
5872 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5873 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
5874 gen_op_st_T0_A0
[OT_WORD
+ s
->mem_index
]();
5875 gen_add_A0_im(s
, 2);
5876 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
5878 gen_op_andl_T0_im(0xffffff);
5879 gen_op_st_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5887 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
5889 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5890 gen_op_set_cc_op(s
->cc_op
);
5891 gen_jmp_im(s
->pc
- s
->cs_base
);
5893 s
->cc_op
= CC_OP_EFLAGS
;
5896 case 1: /* VMMCALL */
5897 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
5899 /* FIXME: cause #UD if hflags & SVM */
5902 case 2: /* VMLOAD */
5903 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
5907 case 3: /* VMSAVE */
5908 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
5913 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
5918 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
5922 case 6: /* SKINIT */
5923 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
5927 case 7: /* INVLPGA */
5928 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
5935 } else if (s
->cpl
!= 0) {
5936 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5938 if (gen_svm_check_intercept(s
, pc_start
,
5939 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
5941 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5942 gen_op_ld_T1_A0
[OT_WORD
+ s
->mem_index
]();
5943 gen_add_A0_im(s
, 2);
5944 gen_op_ld_T0_A0
[CODE64(s
) + OT_LONG
+ s
->mem_index
]();
5946 gen_op_andl_T0_im(0xffffff);
5948 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5949 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5951 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5952 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5957 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
5959 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5960 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5964 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5966 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
5968 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5970 gen_jmp_im(s
->pc
- s
->cs_base
);
5974 case 7: /* invlpg */
5976 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5979 #ifdef TARGET_X86_64
5980 if (CODE64(s
) && rm
== 0) {
5982 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5983 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5984 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5985 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5992 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
5994 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5996 gen_jmp_im(s
->pc
- s
->cs_base
);
6005 case 0x108: /* invd */
6006 case 0x109: /* wbinvd */
6008 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6010 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
6015 case 0x63: /* arpl or movslS (x86_64) */
6016 #ifdef TARGET_X86_64
6019 /* d_ot is the size of destination */
6020 d_ot
= dflag
+ OT_WORD
;
6022 modrm
= ldub_code(s
->pc
++);
6023 reg
= ((modrm
>> 3) & 7) | rex_r
;
6024 mod
= (modrm
>> 6) & 3;
6025 rm
= (modrm
& 7) | REX_B(s
);
6028 gen_op_mov_TN_reg
[OT_LONG
][0][rm
]();
6030 if (d_ot
== OT_QUAD
)
6031 gen_op_movslq_T0_T0();
6032 gen_op_mov_reg_T0
[d_ot
][reg
]();
6034 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6035 if (d_ot
== OT_QUAD
) {
6036 gen_op_lds_T0_A0
[OT_LONG
+ s
->mem_index
]();
6038 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
6040 gen_op_mov_reg_T0
[d_ot
][reg
]();
6045 if (!s
->pe
|| s
->vm86
)
6047 ot
= dflag
? OT_LONG
: OT_WORD
;
6048 modrm
= ldub_code(s
->pc
++);
6049 reg
= (modrm
>> 3) & 7;
6050 mod
= (modrm
>> 6) & 3;
6053 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6054 gen_op_ld_T0_A0
[ot
+ s
->mem_index
]();
6056 gen_op_mov_TN_reg
[ot
][0][rm
]();
6058 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6059 gen_op_set_cc_op(s
->cc_op
);
6061 s
->cc_op
= CC_OP_EFLAGS
;
6063 gen_op_st_T0_A0
[ot
+ s
->mem_index
]();
6065 gen_op_mov_reg_T0
[ot
][rm
]();
6067 gen_op_arpl_update();
6070 case 0x102: /* lar */
6071 case 0x103: /* lsl */
6072 if (!s
->pe
|| s
->vm86
)
6074 ot
= dflag
? OT_LONG
: OT_WORD
;
6075 modrm
= ldub_code(s
->pc
++);
6076 reg
= ((modrm
>> 3) & 7) | rex_r
;
6077 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6078 gen_op_mov_TN_reg
[ot
][1][reg
]();
6079 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6080 gen_op_set_cc_op(s
->cc_op
);
6085 s
->cc_op
= CC_OP_EFLAGS
;
6086 gen_op_mov_reg_T1
[ot
][reg
]();
6089 modrm
= ldub_code(s
->pc
++);
6090 mod
= (modrm
>> 6) & 3;
6091 op
= (modrm
>> 3) & 7;
6093 case 0: /* prefetchnta */
6094 case 1: /* prefetchnt0 */
6095 case 2: /* prefetchnt0 */
6096 case 3: /* prefetchnt0 */
6099 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6100 /* nothing more to do */
6102 default: /* nop (multi byte) */
6103 gen_nop_modrm(s
, modrm
);
6107 case 0x119 ... 0x11f: /* nop (multi byte) */
6108 modrm
= ldub_code(s
->pc
++);
6109 gen_nop_modrm(s
, modrm
);
6111 case 0x120: /* mov reg, crN */
6112 case 0x122: /* mov crN, reg */
6114 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6116 modrm
= ldub_code(s
->pc
++);
6117 if ((modrm
& 0xc0) != 0xc0)
6119 rm
= (modrm
& 7) | REX_B(s
);
6120 reg
= ((modrm
>> 3) & 7) | rex_r
;
6132 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6133 gen_op_mov_TN_reg
[ot
][0][rm
]();
6134 gen_op_movl_crN_T0(reg
);
6135 gen_jmp_im(s
->pc
- s
->cs_base
);
6138 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6139 #if !defined(CONFIG_USER_ONLY)
6141 gen_op_movtl_T0_cr8();
6144 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6145 gen_op_mov_reg_T0
[ot
][rm
]();
6153 case 0x121: /* mov reg, drN */
6154 case 0x123: /* mov drN, reg */
6156 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6158 modrm
= ldub_code(s
->pc
++);
6159 if ((modrm
& 0xc0) != 0xc0)
6161 rm
= (modrm
& 7) | REX_B(s
);
6162 reg
= ((modrm
>> 3) & 7) | rex_r
;
6167 /* XXX: do it dynamically with CR4.DE bit */
6168 if (reg
== 4 || reg
== 5 || reg
>= 8)
6171 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6172 gen_op_mov_TN_reg
[ot
][0][rm
]();
6173 gen_op_movl_drN_T0(reg
);
6174 gen_jmp_im(s
->pc
- s
->cs_base
);
6177 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6178 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6179 gen_op_mov_reg_T0
[ot
][rm
]();
6183 case 0x106: /* clts */
6185 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6187 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6189 /* abort block because static cpu state changed */
6190 gen_jmp_im(s
->pc
- s
->cs_base
);
6194 /* MMX/SSE/SSE2/PNI support */
6195 case 0x1c3: /* MOVNTI reg, mem */
6196 if (!(s
->cpuid_features
& CPUID_SSE2
))
6198 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6199 modrm
= ldub_code(s
->pc
++);
6200 mod
= (modrm
>> 6) & 3;
6203 reg
= ((modrm
>> 3) & 7) | rex_r
;
6204 /* generate a generic store */
6205 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6208 modrm
= ldub_code(s
->pc
++);
6209 mod
= (modrm
>> 6) & 3;
6210 op
= (modrm
>> 3) & 7;
6212 case 0: /* fxsave */
6213 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6214 (s
->flags
& HF_EM_MASK
))
6216 if (s
->flags
& HF_TS_MASK
) {
6217 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6220 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6221 gen_op_fxsave_A0((s
->dflag
== 2));
6223 case 1: /* fxrstor */
6224 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6225 (s
->flags
& HF_EM_MASK
))
6227 if (s
->flags
& HF_TS_MASK
) {
6228 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6231 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6232 gen_op_fxrstor_A0((s
->dflag
== 2));
6234 case 2: /* ldmxcsr */
6235 case 3: /* stmxcsr */
6236 if (s
->flags
& HF_TS_MASK
) {
6237 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6240 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6243 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6245 gen_op_ld_T0_A0
[OT_LONG
+ s
->mem_index
]();
6246 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6248 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6249 gen_op_st_T0_A0
[OT_LONG
+ s
->mem_index
]();
6252 case 5: /* lfence */
6253 case 6: /* mfence */
6254 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6257 case 7: /* sfence / clflush */
6258 if ((modrm
& 0xc7) == 0xc0) {
6260 if (!(s
->cpuid_features
& CPUID_SSE
))
6264 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6266 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6273 case 0x10d: /* prefetch */
6274 modrm
= ldub_code(s
->pc
++);
6275 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6276 /* ignore for now */
6278 case 0x1aa: /* rsm */
6279 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6281 if (!(s
->flags
& HF_SMM_MASK
))
6283 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6284 gen_op_set_cc_op(s
->cc_op
);
6285 s
->cc_op
= CC_OP_DYNAMIC
;
6287 gen_jmp_im(s
->pc
- s
->cs_base
);
6291 case 0x110 ... 0x117:
6292 case 0x128 ... 0x12f:
6293 case 0x150 ... 0x177:
6294 case 0x17c ... 0x17f:
6296 case 0x1c4 ... 0x1c6:
6297 case 0x1d0 ... 0x1fe:
6298 gen_sse(s
, b
, pc_start
, rex_r
);
6303 /* lock generation */
6304 if (s
->prefix
& PREFIX_LOCK
)
6308 if (s
->prefix
& PREFIX_LOCK
)
6310 /* XXX: ensure that no lock was generated */
6311 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6315 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6316 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6318 /* flags read by an operation */
6319 static uint16_t opc_read_flags
[NB_OPS
] = {
6320 [INDEX_op_aas
] = CC_A
,
6321 [INDEX_op_aaa
] = CC_A
,
6322 [INDEX_op_das
] = CC_A
| CC_C
,
6323 [INDEX_op_daa
] = CC_A
| CC_C
,
6325 /* subtle: due to the incl/decl implementation, C is used */
6326 [INDEX_op_update_inc_cc
] = CC_C
,
6328 [INDEX_op_into
] = CC_O
,
6330 [INDEX_op_jb_subb
] = CC_C
,
6331 [INDEX_op_jb_subw
] = CC_C
,
6332 [INDEX_op_jb_subl
] = CC_C
,
6334 [INDEX_op_jz_subb
] = CC_Z
,
6335 [INDEX_op_jz_subw
] = CC_Z
,
6336 [INDEX_op_jz_subl
] = CC_Z
,
6338 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
6339 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
6340 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
6342 [INDEX_op_js_subb
] = CC_S
,
6343 [INDEX_op_js_subw
] = CC_S
,
6344 [INDEX_op_js_subl
] = CC_S
,
6346 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
6347 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
6348 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
6350 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
6351 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
6352 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
6354 [INDEX_op_loopnzw
] = CC_Z
,
6355 [INDEX_op_loopnzl
] = CC_Z
,
6356 [INDEX_op_loopzw
] = CC_Z
,
6357 [INDEX_op_loopzl
] = CC_Z
,
6359 [INDEX_op_seto_T0_cc
] = CC_O
,
6360 [INDEX_op_setb_T0_cc
] = CC_C
,
6361 [INDEX_op_setz_T0_cc
] = CC_Z
,
6362 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
6363 [INDEX_op_sets_T0_cc
] = CC_S
,
6364 [INDEX_op_setp_T0_cc
] = CC_P
,
6365 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
6366 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
6368 [INDEX_op_setb_T0_subb
] = CC_C
,
6369 [INDEX_op_setb_T0_subw
] = CC_C
,
6370 [INDEX_op_setb_T0_subl
] = CC_C
,
6372 [INDEX_op_setz_T0_subb
] = CC_Z
,
6373 [INDEX_op_setz_T0_subw
] = CC_Z
,
6374 [INDEX_op_setz_T0_subl
] = CC_Z
,
6376 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6377 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6378 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6380 [INDEX_op_sets_T0_subb
] = CC_S
,
6381 [INDEX_op_sets_T0_subw
] = CC_S
,
6382 [INDEX_op_sets_T0_subl
] = CC_S
,
6384 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6385 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6386 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6388 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6389 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6390 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6392 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6393 [INDEX_op_cmc
] = CC_C
,
6394 [INDEX_op_salc
] = CC_C
,
6396 /* needed for correct flag optimisation before string ops */
6397 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6398 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6399 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6400 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6402 #ifdef TARGET_X86_64
6403 [INDEX_op_jb_subq
] = CC_C
,
6404 [INDEX_op_jz_subq
] = CC_Z
,
6405 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6406 [INDEX_op_js_subq
] = CC_S
,
6407 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6408 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6410 [INDEX_op_loopnzq
] = CC_Z
,
6411 [INDEX_op_loopzq
] = CC_Z
,
6413 [INDEX_op_setb_T0_subq
] = CC_C
,
6414 [INDEX_op_setz_T0_subq
] = CC_Z
,
6415 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6416 [INDEX_op_sets_T0_subq
] = CC_S
,
6417 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6418 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6420 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6421 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6424 #define DEF_READF(SUFFIX)\
6425 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6426 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6427 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6428 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6429 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6430 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6431 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6432 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6434 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6435 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6436 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6437 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6438 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6439 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6440 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6441 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6445 #ifndef CONFIG_USER_ONLY
6451 /* flags written by an operation */
6452 static uint16_t opc_write_flags
[NB_OPS
] = {
6453 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6454 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6455 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6456 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6457 /* subtle: due to the incl/decl implementation, C is used */
6458 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6459 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6461 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6462 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6463 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6464 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6465 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6466 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6467 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6468 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6469 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6470 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6471 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6474 [INDEX_op_ucomiss
] = CC_OSZAPC
,
6475 [INDEX_op_ucomisd
] = CC_OSZAPC
,
6476 [INDEX_op_comiss
] = CC_OSZAPC
,
6477 [INDEX_op_comisd
] = CC_OSZAPC
,
6480 [INDEX_op_aam
] = CC_OSZAPC
,
6481 [INDEX_op_aad
] = CC_OSZAPC
,
6482 [INDEX_op_aas
] = CC_OSZAPC
,
6483 [INDEX_op_aaa
] = CC_OSZAPC
,
6484 [INDEX_op_das
] = CC_OSZAPC
,
6485 [INDEX_op_daa
] = CC_OSZAPC
,
6487 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6488 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6489 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6490 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6491 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6492 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6493 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6494 [INDEX_op_clc
] = CC_C
,
6495 [INDEX_op_stc
] = CC_C
,
6496 [INDEX_op_cmc
] = CC_C
,
6498 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6499 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6500 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6501 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6502 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6503 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6504 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6505 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6506 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6507 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6508 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6509 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6511 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6512 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6513 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6514 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6515 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6516 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6518 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6519 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6520 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6521 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6523 [INDEX_op_cmpxchg8b
] = CC_Z
,
6524 [INDEX_op_lar
] = CC_Z
,
6525 [INDEX_op_lsl
] = CC_Z
,
6526 [INDEX_op_verr
] = CC_Z
,
6527 [INDEX_op_verw
] = CC_Z
,
6528 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6529 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6531 #define DEF_WRITEF(SUFFIX)\
6532 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6533 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6534 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6535 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6536 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6537 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6538 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6539 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6541 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6542 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6543 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6544 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6545 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6546 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6547 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6548 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6550 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6551 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6552 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6553 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6554 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6555 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6556 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6557 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6559 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6560 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6561 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6562 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6564 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6565 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6566 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6567 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6569 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6570 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6571 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6572 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6574 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6575 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6576 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6577 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6578 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6579 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6581 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6582 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6583 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6584 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6585 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6586 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6588 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6589 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6590 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6591 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6596 #ifndef CONFIG_USER_ONLY
6602 /* simpler form of an operation if no flags need to be generated */
6603 static uint16_t opc_simpler
[NB_OPS
] = {
6604 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6605 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6606 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6608 /* broken: CC_OP logic must be rewritten */
6609 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6612 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6613 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6614 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6615 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6617 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6618 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6619 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6620 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6622 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6623 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6624 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6625 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6627 #define DEF_SIMPLER(SUFFIX)\
6628 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6629 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6630 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6631 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6633 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6634 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6635 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6636 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6640 #ifndef CONFIG_USER_ONLY
6641 DEF_SIMPLER(_kernel
)
6646 void optimize_flags_init(void)
6649 /* put default values in arrays */
6650 for(i
= 0; i
< NB_OPS
; i
++) {
6651 if (opc_simpler
[i
] == 0)
6656 /* CPU flags computation optimization: we move backward thru the
6657 generated code to see which flags are needed. The operation is
6658 modified if suitable */
6659 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6662 int live_flags
, write_flags
, op
;
6664 opc_ptr
= opc_buf
+ opc_buf_len
;
6665 /* live_flags contains the flags needed by the next instructions
6666 in the code. At the end of the block, we consider that all the
6668 live_flags
= CC_OSZAPC
;
6669 while (opc_ptr
> opc_buf
) {
6671 /* if none of the flags written by the instruction is used,
6672 then we can try to find a simpler instruction */
6673 write_flags
= opc_write_flags
[op
];
6674 if ((live_flags
& write_flags
) == 0) {
6675 *opc_ptr
= opc_simpler
[op
];
6677 /* compute the live flags before the instruction */
6678 live_flags
&= ~write_flags
;
6679 live_flags
|= opc_read_flags
[op
];
6683 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6684 basic block 'tb'. If search_pc is TRUE, also generate PC
6685 information for each intermediate instruction. */
6686 static inline int gen_intermediate_code_internal(CPUState
*env
,
6687 TranslationBlock
*tb
,
6690 DisasContext dc1
, *dc
= &dc1
;
6691 target_ulong pc_ptr
;
6692 uint16_t *gen_opc_end
;
6695 target_ulong pc_start
;
6696 target_ulong cs_base
;
6698 /* generate intermediate code */
6700 cs_base
= tb
->cs_base
;
6702 cflags
= tb
->cflags
;
6704 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6705 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6706 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6707 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6709 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6710 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6711 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6712 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6713 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6714 dc
->cc_op
= CC_OP_DYNAMIC
;
6715 dc
->cs_base
= cs_base
;
6717 dc
->popl_esp_hack
= 0;
6718 /* select memory access functions */
6720 if (flags
& HF_SOFTMMU_MASK
) {
6722 dc
->mem_index
= 2 * 4;
6724 dc
->mem_index
= 1 * 4;
6726 dc
->cpuid_features
= env
->cpuid_features
;
6727 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6728 #ifdef TARGET_X86_64
6729 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6730 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6733 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6734 (flags
& HF_INHIBIT_IRQ_MASK
)
6735 #ifndef CONFIG_SOFTMMU
6736 || (flags
& HF_SOFTMMU_MASK
)
6740 /* check addseg logic */
6741 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6742 printf("ERROR addseg\n");
6745 gen_opc_ptr
= gen_opc_buf
;
6746 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6747 gen_opparam_ptr
= gen_opparam_buf
;
6750 dc
->is_jmp
= DISAS_NEXT
;
6755 if (env
->nb_breakpoints
> 0) {
6756 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6757 if (env
->breakpoints
[j
] == pc_ptr
) {
6758 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6764 j
= gen_opc_ptr
- gen_opc_buf
;
6768 gen_opc_instr_start
[lj
++] = 0;
6770 gen_opc_pc
[lj
] = pc_ptr
;
6771 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6772 gen_opc_instr_start
[lj
] = 1;
6774 pc_ptr
= disas_insn(dc
, pc_ptr
);
6775 /* stop translation if indicated */
6778 /* if single step mode, we generate only one instruction and
6779 generate an exception */
6780 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6781 the flag and abort the translation to give the irqs a
6782 change to be happen */
6783 if (dc
->tf
|| dc
->singlestep_enabled
||
6784 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6785 (cflags
& CF_SINGLE_INSN
)) {
6786 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6790 /* if too long translation, stop generation too */
6791 if (gen_opc_ptr
>= gen_opc_end
||
6792 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6793 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6798 *gen_opc_ptr
= INDEX_op_end
;
6799 /* we don't forget to fill the last values */
6801 j
= gen_opc_ptr
- gen_opc_buf
;
6804 gen_opc_instr_start
[lj
++] = 0;
6808 if (loglevel
& CPU_LOG_TB_CPU
) {
6809 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6811 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6813 fprintf(logfile
, "----------------\n");
6814 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6815 #ifdef TARGET_X86_64
6820 disas_flags
= !dc
->code32
;
6821 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6822 fprintf(logfile
, "\n");
6823 if (loglevel
& CPU_LOG_TB_OP
) {
6824 fprintf(logfile
, "OP:\n");
6825 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6826 fprintf(logfile
, "\n");
6831 /* optimize flag computations */
6832 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6835 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6836 fprintf(logfile
, "AFTER FLAGS OPT:\n");
6837 dump_ops(gen_opc_buf
, gen_opparam_buf
);
6838 fprintf(logfile
, "\n");
6842 tb
->size
= pc_ptr
- pc_start
;
6846 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6848 return gen_intermediate_code_internal(env
, tb
, 0);
6851 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6853 return gen_intermediate_code_internal(env
, tb
, 1);