4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define CODE64(s) ((s)->code64)
42 #define REX_X(s) ((s)->rex_x)
43 #define REX_B(s) ((s)->rex_b)
50 //#define MACRO_TEST 1
52 /* global register indexes */
53 static TCGv_ptr cpu_env
;
54 static TCGv cpu_A0
, cpu_cc_src
, cpu_cc_dst
, cpu_cc_tmp
;
55 static TCGv_i32 cpu_cc_op
;
56 static TCGv cpu_regs
[CPU_NB_REGS
];
58 static TCGv cpu_T
[2], cpu_T3
;
59 /* local register indexes (only used inside old micro ops) */
60 static TCGv cpu_tmp0
, cpu_tmp4
;
61 static TCGv_ptr cpu_ptr0
, cpu_ptr1
;
62 static TCGv_i32 cpu_tmp2_i32
, cpu_tmp3_i32
;
63 static TCGv_i64 cpu_tmp1_i64
;
66 static uint8_t gen_opc_cc_op
[OPC_BUF_SIZE
];
68 #include "gen-icount.h"
71 static int x86_64_hregs
;
74 typedef struct DisasContext
{
75 /* current insn context */
76 int override
; /* -1 if no override */
79 target_ulong pc
; /* pc = eip + cs_base */
80 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
81 static state change (stop translation) */
82 /* current block context */
83 target_ulong cs_base
; /* base of CS segment */
84 int pe
; /* protected mode */
85 int code32
; /* 32 bit code segment */
87 int lma
; /* long mode active */
88 int code64
; /* 64 bit code segment */
91 int ss32
; /* 32 bit stack segment */
92 int cc_op
; /* current CC operation */
93 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
94 int f_st
; /* currently unused */
95 int vm86
; /* vm86 mode */
98 int tf
; /* TF cpu flag */
99 int singlestep_enabled
; /* "hardware" single step enabled */
100 int jmp_opt
; /* use direct block chaining for direct jumps */
101 int mem_index
; /* select memory access functions */
102 uint64_t flags
; /* all execution flags */
103 struct TranslationBlock
*tb
;
104 int popl_esp_hack
; /* for correct popl with esp base handling */
105 int rip_offset
; /* only used in x86_64, but left for simplicity */
107 int cpuid_ext_features
;
108 int cpuid_ext2_features
;
109 int cpuid_ext3_features
;
112 static void gen_eob(DisasContext
*s
);
113 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
114 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
116 /* i386 arith/logic operations */
136 OP_SHL1
, /* undocumented */
160 /* I386 int registers */
161 OR_EAX
, /* MUST be even numbered */
170 OR_TMP0
= 16, /* temporary operand register */
172 OR_A0
, /* temporary register used when doing address evaluation */
175 static inline void gen_op_movl_T0_0(void)
177 tcg_gen_movi_tl(cpu_T
[0], 0);
180 static inline void gen_op_movl_T0_im(int32_t val
)
182 tcg_gen_movi_tl(cpu_T
[0], val
);
185 static inline void gen_op_movl_T0_imu(uint32_t val
)
187 tcg_gen_movi_tl(cpu_T
[0], val
);
190 static inline void gen_op_movl_T1_im(int32_t val
)
192 tcg_gen_movi_tl(cpu_T
[1], val
);
195 static inline void gen_op_movl_T1_imu(uint32_t val
)
197 tcg_gen_movi_tl(cpu_T
[1], val
);
200 static inline void gen_op_movl_A0_im(uint32_t val
)
202 tcg_gen_movi_tl(cpu_A0
, val
);
206 static inline void gen_op_movq_A0_im(int64_t val
)
208 tcg_gen_movi_tl(cpu_A0
, val
);
212 static inline void gen_movtl_T0_im(target_ulong val
)
214 tcg_gen_movi_tl(cpu_T
[0], val
);
217 static inline void gen_movtl_T1_im(target_ulong val
)
219 tcg_gen_movi_tl(cpu_T
[1], val
);
222 static inline void gen_op_andl_T0_ffff(void)
224 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
227 static inline void gen_op_andl_T0_im(uint32_t val
)
229 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
232 static inline void gen_op_movl_T0_T1(void)
234 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
237 static inline void gen_op_andl_A0_ffff(void)
239 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
244 #define NB_OP_SIZES 4
246 #else /* !TARGET_X86_64 */
248 #define NB_OP_SIZES 3
250 #endif /* !TARGET_X86_64 */
252 #if defined(HOST_WORDS_BIGENDIAN)
253 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
254 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
255 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
256 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
257 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
259 #define REG_B_OFFSET 0
260 #define REG_H_OFFSET 1
261 #define REG_W_OFFSET 0
262 #define REG_L_OFFSET 0
263 #define REG_LH_OFFSET 4
266 /* In instruction encodings for byte register accesses the
267 * register number usually indicates "low 8 bits of register N";
268 * however there are some special cases where N 4..7 indicates
269 * [AH, CH, DH, BH], ie "bits 15..8 of register N-4". Return
270 * true for this special case, false otherwise.
272 static inline bool byte_reg_is_xH(int reg
)
278 if (reg
>= 8 || x86_64_hregs
) {
285 static inline void gen_op_mov_reg_v(int ot
, int reg
, TCGv t0
)
289 if (!byte_reg_is_xH(reg
)) {
290 tcg_gen_deposit_tl(cpu_regs
[reg
], cpu_regs
[reg
], t0
, 0, 8);
292 tcg_gen_deposit_tl(cpu_regs
[reg
- 4], cpu_regs
[reg
- 4], t0
, 8, 8);
296 tcg_gen_deposit_tl(cpu_regs
[reg
], cpu_regs
[reg
], t0
, 0, 16);
298 default: /* XXX this shouldn't be reached; abort? */
300 /* For x86_64, this sets the higher half of register to zero.
301 For i386, this is equivalent to a mov. */
302 tcg_gen_ext32u_tl(cpu_regs
[reg
], t0
);
306 tcg_gen_mov_tl(cpu_regs
[reg
], t0
);
312 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
314 gen_op_mov_reg_v(ot
, reg
, cpu_T
[0]);
317 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
319 gen_op_mov_reg_v(ot
, reg
, cpu_T
[1]);
322 static inline void gen_op_mov_reg_A0(int size
, int reg
)
326 tcg_gen_deposit_tl(cpu_regs
[reg
], cpu_regs
[reg
], cpu_A0
, 0, 16);
328 default: /* XXX this shouldn't be reached; abort? */
330 /* For x86_64, this sets the higher half of register to zero.
331 For i386, this is equivalent to a mov. */
332 tcg_gen_ext32u_tl(cpu_regs
[reg
], cpu_A0
);
336 tcg_gen_mov_tl(cpu_regs
[reg
], cpu_A0
);
342 static inline void gen_op_mov_v_reg(int ot
, TCGv t0
, int reg
)
344 if (ot
== OT_BYTE
&& byte_reg_is_xH(reg
)) {
345 tcg_gen_shri_tl(t0
, cpu_regs
[reg
- 4], 8);
346 tcg_gen_ext8u_tl(t0
, t0
);
348 tcg_gen_mov_tl(t0
, cpu_regs
[reg
]);
352 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
354 gen_op_mov_v_reg(ot
, cpu_T
[t_index
], reg
);
357 static inline void gen_op_movl_A0_reg(int reg
)
359 tcg_gen_mov_tl(cpu_A0
, cpu_regs
[reg
]);
362 static inline void gen_op_addl_A0_im(int32_t val
)
364 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
366 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
371 static inline void gen_op_addq_A0_im(int64_t val
)
373 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
377 static void gen_add_A0_im(DisasContext
*s
, int val
)
381 gen_op_addq_A0_im(val
);
384 gen_op_addl_A0_im(val
);
387 static inline void gen_op_addl_T0_T1(void)
389 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
392 static inline void gen_op_jmp_T0(void)
394 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, eip
));
397 static inline void gen_op_add_reg_im(int size
, int reg
, int32_t val
)
401 tcg_gen_addi_tl(cpu_tmp0
, cpu_regs
[reg
], val
);
402 tcg_gen_deposit_tl(cpu_regs
[reg
], cpu_regs
[reg
], cpu_tmp0
, 0, 16);
405 tcg_gen_addi_tl(cpu_tmp0
, cpu_regs
[reg
], val
);
406 /* For x86_64, this sets the higher half of register to zero.
407 For i386, this is equivalent to a nop. */
408 tcg_gen_ext32u_tl(cpu_tmp0
, cpu_tmp0
);
409 tcg_gen_mov_tl(cpu_regs
[reg
], cpu_tmp0
);
413 tcg_gen_addi_tl(cpu_regs
[reg
], cpu_regs
[reg
], val
);
419 static inline void gen_op_add_reg_T0(int size
, int reg
)
423 tcg_gen_add_tl(cpu_tmp0
, cpu_regs
[reg
], cpu_T
[0]);
424 tcg_gen_deposit_tl(cpu_regs
[reg
], cpu_regs
[reg
], cpu_tmp0
, 0, 16);
427 tcg_gen_add_tl(cpu_tmp0
, cpu_regs
[reg
], cpu_T
[0]);
428 /* For x86_64, this sets the higher half of register to zero.
429 For i386, this is equivalent to a nop. */
430 tcg_gen_ext32u_tl(cpu_tmp0
, cpu_tmp0
);
431 tcg_gen_mov_tl(cpu_regs
[reg
], cpu_tmp0
);
435 tcg_gen_add_tl(cpu_regs
[reg
], cpu_regs
[reg
], cpu_T
[0]);
441 static inline void gen_op_set_cc_op(int32_t val
)
443 tcg_gen_movi_i32(cpu_cc_op
, val
);
446 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
448 tcg_gen_mov_tl(cpu_tmp0
, cpu_regs
[reg
]);
450 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
451 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
452 /* For x86_64, this sets the higher half of register to zero.
453 For i386, this is equivalent to a nop. */
454 tcg_gen_ext32u_tl(cpu_A0
, cpu_A0
);
457 static inline void gen_op_movl_A0_seg(int reg
)
459 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUX86State
, segs
[reg
].base
) + REG_L_OFFSET
);
462 static inline void gen_op_addl_A0_seg(int reg
)
464 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUX86State
, segs
[reg
].base
));
465 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
467 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
472 static inline void gen_op_movq_A0_seg(int reg
)
474 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUX86State
, segs
[reg
].base
));
477 static inline void gen_op_addq_A0_seg(int reg
)
479 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUX86State
, segs
[reg
].base
));
480 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
483 static inline void gen_op_movq_A0_reg(int reg
)
485 tcg_gen_mov_tl(cpu_A0
, cpu_regs
[reg
]);
488 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
490 tcg_gen_mov_tl(cpu_tmp0
, cpu_regs
[reg
]);
492 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
493 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
497 static inline void gen_op_lds_T0_A0(int idx
)
499 int mem_index
= (idx
>> 2) - 1;
502 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
505 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
509 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
514 static inline void gen_op_ld_v(int idx
, TCGv t0
, TCGv a0
)
516 int mem_index
= (idx
>> 2) - 1;
519 tcg_gen_qemu_ld8u(t0
, a0
, mem_index
);
522 tcg_gen_qemu_ld16u(t0
, a0
, mem_index
);
525 tcg_gen_qemu_ld32u(t0
, a0
, mem_index
);
529 /* Should never happen on 32-bit targets. */
531 tcg_gen_qemu_ld64(t0
, a0
, mem_index
);
537 /* XXX: always use ldu or lds */
538 static inline void gen_op_ld_T0_A0(int idx
)
540 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
543 static inline void gen_op_ldu_T0_A0(int idx
)
545 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
548 static inline void gen_op_ld_T1_A0(int idx
)
550 gen_op_ld_v(idx
, cpu_T
[1], cpu_A0
);
553 static inline void gen_op_st_v(int idx
, TCGv t0
, TCGv a0
)
555 int mem_index
= (idx
>> 2) - 1;
558 tcg_gen_qemu_st8(t0
, a0
, mem_index
);
561 tcg_gen_qemu_st16(t0
, a0
, mem_index
);
564 tcg_gen_qemu_st32(t0
, a0
, mem_index
);
568 /* Should never happen on 32-bit targets. */
570 tcg_gen_qemu_st64(t0
, a0
, mem_index
);
576 static inline void gen_op_st_T0_A0(int idx
)
578 gen_op_st_v(idx
, cpu_T
[0], cpu_A0
);
581 static inline void gen_op_st_T1_A0(int idx
)
583 gen_op_st_v(idx
, cpu_T
[1], cpu_A0
);
586 static inline void gen_jmp_im(target_ulong pc
)
588 tcg_gen_movi_tl(cpu_tmp0
, pc
);
589 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUX86State
, eip
));
592 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
596 override
= s
->override
;
600 gen_op_movq_A0_seg(override
);
601 gen_op_addq_A0_reg_sN(0, R_ESI
);
603 gen_op_movq_A0_reg(R_ESI
);
609 if (s
->addseg
&& override
< 0)
612 gen_op_movl_A0_seg(override
);
613 gen_op_addl_A0_reg_sN(0, R_ESI
);
615 gen_op_movl_A0_reg(R_ESI
);
618 /* 16 address, always override */
621 gen_op_movl_A0_reg(R_ESI
);
622 gen_op_andl_A0_ffff();
623 gen_op_addl_A0_seg(override
);
627 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
631 gen_op_movq_A0_reg(R_EDI
);
636 gen_op_movl_A0_seg(R_ES
);
637 gen_op_addl_A0_reg_sN(0, R_EDI
);
639 gen_op_movl_A0_reg(R_EDI
);
642 gen_op_movl_A0_reg(R_EDI
);
643 gen_op_andl_A0_ffff();
644 gen_op_addl_A0_seg(R_ES
);
648 static inline void gen_op_movl_T0_Dshift(int ot
)
650 tcg_gen_ld32s_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, df
));
651 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], ot
);
654 static void gen_extu(int ot
, TCGv reg
)
658 tcg_gen_ext8u_tl(reg
, reg
);
661 tcg_gen_ext16u_tl(reg
, reg
);
664 tcg_gen_ext32u_tl(reg
, reg
);
671 static void gen_exts(int ot
, TCGv reg
)
675 tcg_gen_ext8s_tl(reg
, reg
);
678 tcg_gen_ext16s_tl(reg
, reg
);
681 tcg_gen_ext32s_tl(reg
, reg
);
688 static inline void gen_op_jnz_ecx(int size
, int label1
)
690 tcg_gen_mov_tl(cpu_tmp0
, cpu_regs
[R_ECX
]);
691 gen_extu(size
+ 1, cpu_tmp0
);
692 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, label1
);
695 static inline void gen_op_jz_ecx(int size
, int label1
)
697 tcg_gen_mov_tl(cpu_tmp0
, cpu_regs
[R_ECX
]);
698 gen_extu(size
+ 1, cpu_tmp0
);
699 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
702 static void gen_helper_in_func(int ot
, TCGv v
, TCGv_i32 n
)
705 case 0: gen_helper_inb(v
, n
); break;
706 case 1: gen_helper_inw(v
, n
); break;
707 case 2: gen_helper_inl(v
, n
); break;
712 static void gen_helper_out_func(int ot
, TCGv_i32 v
, TCGv_i32 n
)
715 case 0: gen_helper_outb(v
, n
); break;
716 case 1: gen_helper_outw(v
, n
); break;
717 case 2: gen_helper_outl(v
, n
); break;
722 static void gen_check_io(DisasContext
*s
, int ot
, target_ulong cur_eip
,
726 target_ulong next_eip
;
729 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
730 if (s
->cc_op
!= CC_OP_DYNAMIC
)
731 gen_op_set_cc_op(s
->cc_op
);
734 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
736 case 0: gen_helper_check_iob(cpu_tmp2_i32
); break;
737 case 1: gen_helper_check_iow(cpu_tmp2_i32
); break;
738 case 2: gen_helper_check_iol(cpu_tmp2_i32
); break;
741 if(s
->flags
& HF_SVMI_MASK
) {
743 if (s
->cc_op
!= CC_OP_DYNAMIC
)
744 gen_op_set_cc_op(s
->cc_op
);
747 svm_flags
|= (1 << (4 + ot
));
748 next_eip
= s
->pc
- s
->cs_base
;
749 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
750 gen_helper_svm_check_io(cpu_tmp2_i32
, tcg_const_i32(svm_flags
),
751 tcg_const_i32(next_eip
- cur_eip
));
755 static inline void gen_movs(DisasContext
*s
, int ot
)
757 gen_string_movl_A0_ESI(s
);
758 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
759 gen_string_movl_A0_EDI(s
);
760 gen_op_st_T0_A0(ot
+ s
->mem_index
);
761 gen_op_movl_T0_Dshift(ot
);
762 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
763 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
766 static inline void gen_update_cc_op(DisasContext
*s
)
768 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
769 gen_op_set_cc_op(s
->cc_op
);
770 s
->cc_op
= CC_OP_DYNAMIC
;
774 static void gen_op_update1_cc(void)
776 tcg_gen_discard_tl(cpu_cc_src
);
777 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
780 static void gen_op_update2_cc(void)
782 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
783 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
786 static inline void gen_op_cmpl_T0_T1_cc(void)
788 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
789 tcg_gen_sub_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
792 static inline void gen_op_testl_T0_T1_cc(void)
794 tcg_gen_discard_tl(cpu_cc_src
);
795 tcg_gen_and_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
798 static void gen_op_update_neg_cc(void)
800 tcg_gen_neg_tl(cpu_cc_src
, cpu_T
[0]);
801 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
804 /* compute eflags.C to reg */
805 static void gen_compute_eflags_c(TCGv reg
)
807 gen_helper_cc_compute_c(cpu_tmp2_i32
, cpu_cc_op
);
808 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
811 /* compute all eflags to cc_src */
812 static void gen_compute_eflags(TCGv reg
)
814 gen_helper_cc_compute_all(cpu_tmp2_i32
, cpu_cc_op
);
815 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
818 static inline void gen_setcc_slow_T0(DisasContext
*s
, int jcc_op
)
820 if (s
->cc_op
!= CC_OP_DYNAMIC
)
821 gen_op_set_cc_op(s
->cc_op
);
824 gen_compute_eflags(cpu_T
[0]);
825 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 11);
826 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
829 gen_compute_eflags_c(cpu_T
[0]);
832 gen_compute_eflags(cpu_T
[0]);
833 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 6);
834 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
837 gen_compute_eflags(cpu_tmp0
);
838 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 6);
839 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
840 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
843 gen_compute_eflags(cpu_T
[0]);
844 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 7);
845 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
848 gen_compute_eflags(cpu_T
[0]);
849 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 2);
850 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
853 gen_compute_eflags(cpu_tmp0
);
854 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
855 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 7); /* CC_S */
856 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
857 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
861 gen_compute_eflags(cpu_tmp0
);
862 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
863 tcg_gen_shri_tl(cpu_tmp4
, cpu_tmp0
, 7); /* CC_S */
864 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 6); /* CC_Z */
865 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
866 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
867 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
872 /* return true if setcc_slow is not needed (WARNING: must be kept in
873 sync with gen_jcc1) */
874 static int is_fast_jcc_case(DisasContext
*s
, int b
)
877 jcc_op
= (b
>> 1) & 7;
879 /* we optimize the cmp/jcc case */
884 if (jcc_op
== JCC_O
|| jcc_op
== JCC_P
)
888 /* some jumps are easy to compute */
913 if (jcc_op
!= JCC_Z
&& jcc_op
!= JCC_S
)
923 /* generate a conditional jump to label 'l1' according to jump opcode
924 value 'b'. In the fast case, T0 is guaranted not to be used. */
925 static inline void gen_jcc1(DisasContext
*s
, int cc_op
, int b
, int l1
)
927 int inv
, jcc_op
, size
, cond
;
931 jcc_op
= (b
>> 1) & 7;
934 /* we optimize the cmp/jcc case */
940 size
= cc_op
- CC_OP_SUBB
;
946 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xff);
950 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffff);
955 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffffffff);
963 tcg_gen_brcondi_tl(inv
? TCG_COND_NE
: TCG_COND_EQ
, t0
, 0, l1
);
969 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80);
970 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
974 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x8000);
975 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
980 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80000000);
981 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
986 tcg_gen_brcondi_tl(inv
? TCG_COND_GE
: TCG_COND_LT
, cpu_cc_dst
,
993 cond
= inv
? TCG_COND_GEU
: TCG_COND_LTU
;
996 cond
= inv
? TCG_COND_GTU
: TCG_COND_LEU
;
998 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1002 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xff);
1003 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xff);
1007 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffff);
1008 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffff);
1010 #ifdef TARGET_X86_64
1013 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffffffff);
1014 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffffffff);
1021 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1025 cond
= inv
? TCG_COND_GE
: TCG_COND_LT
;
1028 cond
= inv
? TCG_COND_GT
: TCG_COND_LE
;
1030 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1034 tcg_gen_ext8s_tl(cpu_tmp4
, cpu_tmp4
);
1035 tcg_gen_ext8s_tl(t0
, cpu_cc_src
);
1039 tcg_gen_ext16s_tl(cpu_tmp4
, cpu_tmp4
);
1040 tcg_gen_ext16s_tl(t0
, cpu_cc_src
);
1042 #ifdef TARGET_X86_64
1045 tcg_gen_ext32s_tl(cpu_tmp4
, cpu_tmp4
);
1046 tcg_gen_ext32s_tl(t0
, cpu_cc_src
);
1053 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1061 /* some jumps are easy to compute */
1103 size
= (cc_op
- CC_OP_ADDB
) & 3;
1106 size
= (cc_op
- CC_OP_ADDB
) & 3;
1114 gen_setcc_slow_T0(s
, jcc_op
);
1115 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
,
1121 /* XXX: does not work with gdbstub "ice" single step - not a
1123 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
1127 l1
= gen_new_label();
1128 l2
= gen_new_label();
1129 gen_op_jnz_ecx(s
->aflag
, l1
);
1131 gen_jmp_tb(s
, next_eip
, 1);
1136 static inline void gen_stos(DisasContext
*s
, int ot
)
1138 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1139 gen_string_movl_A0_EDI(s
);
1140 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1141 gen_op_movl_T0_Dshift(ot
);
1142 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1145 static inline void gen_lods(DisasContext
*s
, int ot
)
1147 gen_string_movl_A0_ESI(s
);
1148 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1149 gen_op_mov_reg_T0(ot
, R_EAX
);
1150 gen_op_movl_T0_Dshift(ot
);
1151 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1154 static inline void gen_scas(DisasContext
*s
, int ot
)
1156 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1157 gen_string_movl_A0_EDI(s
);
1158 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1159 gen_op_cmpl_T0_T1_cc();
1160 gen_op_movl_T0_Dshift(ot
);
1161 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1164 static inline void gen_cmps(DisasContext
*s
, int ot
)
1166 gen_string_movl_A0_ESI(s
);
1167 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1168 gen_string_movl_A0_EDI(s
);
1169 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1170 gen_op_cmpl_T0_T1_cc();
1171 gen_op_movl_T0_Dshift(ot
);
1172 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1173 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1176 static inline void gen_ins(DisasContext
*s
, int ot
)
1180 gen_string_movl_A0_EDI(s
);
1181 /* Note: we must do this dummy write first to be restartable in
1182 case of page fault. */
1184 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1185 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1186 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1187 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1188 gen_helper_in_func(ot
, cpu_T
[0], cpu_tmp2_i32
);
1189 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1190 gen_op_movl_T0_Dshift(ot
);
1191 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1196 static inline void gen_outs(DisasContext
*s
, int ot
)
1200 gen_string_movl_A0_ESI(s
);
1201 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1203 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1204 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1205 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1206 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[0]);
1207 gen_helper_out_func(ot
, cpu_tmp2_i32
, cpu_tmp3_i32
);
1209 gen_op_movl_T0_Dshift(ot
);
1210 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1215 /* same method as Valgrind : we generate jumps to current or next
1217 #define GEN_REPZ(op) \
1218 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1219 target_ulong cur_eip, target_ulong next_eip) \
1222 gen_update_cc_op(s); \
1223 l2 = gen_jz_ecx_string(s, next_eip); \
1224 gen_ ## op(s, ot); \
1225 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1226 /* a loop would cause two single step exceptions if ECX = 1 \
1227 before rep string_insn */ \
1229 gen_op_jz_ecx(s->aflag, l2); \
1230 gen_jmp(s, cur_eip); \
1233 #define GEN_REPZ2(op) \
1234 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1235 target_ulong cur_eip, \
1236 target_ulong next_eip, \
1240 gen_update_cc_op(s); \
1241 l2 = gen_jz_ecx_string(s, next_eip); \
1242 gen_ ## op(s, ot); \
1243 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1244 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1245 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1247 gen_op_jz_ecx(s->aflag, l2); \
1248 gen_jmp(s, cur_eip); \
1259 static void gen_helper_fp_arith_ST0_FT0(int op
)
1262 case 0: gen_helper_fadd_ST0_FT0(); break;
1263 case 1: gen_helper_fmul_ST0_FT0(); break;
1264 case 2: gen_helper_fcom_ST0_FT0(); break;
1265 case 3: gen_helper_fcom_ST0_FT0(); break;
1266 case 4: gen_helper_fsub_ST0_FT0(); break;
1267 case 5: gen_helper_fsubr_ST0_FT0(); break;
1268 case 6: gen_helper_fdiv_ST0_FT0(); break;
1269 case 7: gen_helper_fdivr_ST0_FT0(); break;
1273 /* NOTE the exception in "r" op ordering */
1274 static void gen_helper_fp_arith_STN_ST0(int op
, int opreg
)
1276 TCGv_i32 tmp
= tcg_const_i32(opreg
);
1278 case 0: gen_helper_fadd_STN_ST0(tmp
); break;
1279 case 1: gen_helper_fmul_STN_ST0(tmp
); break;
1280 case 4: gen_helper_fsubr_STN_ST0(tmp
); break;
1281 case 5: gen_helper_fsub_STN_ST0(tmp
); break;
1282 case 6: gen_helper_fdivr_STN_ST0(tmp
); break;
1283 case 7: gen_helper_fdiv_STN_ST0(tmp
); break;
1287 /* if d == OR_TMP0, it means memory operand (address in A0) */
1288 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1291 gen_op_mov_TN_reg(ot
, 0, d
);
1293 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1297 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1298 gen_op_set_cc_op(s1
->cc_op
);
1299 gen_compute_eflags_c(cpu_tmp4
);
1300 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1301 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1303 gen_op_mov_reg_T0(ot
, d
);
1305 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1306 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1307 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1308 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1309 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1310 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_ADDB
+ ot
);
1311 s1
->cc_op
= CC_OP_DYNAMIC
;
1314 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1315 gen_op_set_cc_op(s1
->cc_op
);
1316 gen_compute_eflags_c(cpu_tmp4
);
1317 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1318 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1320 gen_op_mov_reg_T0(ot
, d
);
1322 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1323 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1324 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1325 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1326 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1327 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_SUBB
+ ot
);
1328 s1
->cc_op
= CC_OP_DYNAMIC
;
1331 gen_op_addl_T0_T1();
1333 gen_op_mov_reg_T0(ot
, d
);
1335 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1336 gen_op_update2_cc();
1337 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1340 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1342 gen_op_mov_reg_T0(ot
, d
);
1344 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1345 gen_op_update2_cc();
1346 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1350 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1352 gen_op_mov_reg_T0(ot
, d
);
1354 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1355 gen_op_update1_cc();
1356 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1359 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1361 gen_op_mov_reg_T0(ot
, d
);
1363 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1364 gen_op_update1_cc();
1365 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1368 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1370 gen_op_mov_reg_T0(ot
, d
);
1372 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1373 gen_op_update1_cc();
1374 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1377 gen_op_cmpl_T0_T1_cc();
1378 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1383 /* if d == OR_TMP0, it means memory operand (address in A0) */
1384 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1387 gen_op_mov_TN_reg(ot
, 0, d
);
1389 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1390 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1391 gen_op_set_cc_op(s1
->cc_op
);
1393 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], 1);
1394 s1
->cc_op
= CC_OP_INCB
+ ot
;
1396 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], -1);
1397 s1
->cc_op
= CC_OP_DECB
+ ot
;
1400 gen_op_mov_reg_T0(ot
, d
);
1402 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1403 gen_compute_eflags_c(cpu_cc_src
);
1404 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1407 static void gen_shift_rm_T1(DisasContext
*s
, int ot
, int op1
,
1408 int is_right
, int is_arith
)
1414 if (ot
== OT_QUAD
) {
1421 if (op1
== OR_TMP0
) {
1422 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1424 gen_op_mov_TN_reg(ot
, 0, op1
);
1427 t0
= tcg_temp_local_new();
1428 t1
= tcg_temp_local_new();
1429 t2
= tcg_temp_local_new();
1431 tcg_gen_andi_tl(t2
, cpu_T
[1], mask
);
1435 gen_exts(ot
, cpu_T
[0]);
1436 tcg_gen_mov_tl(t0
, cpu_T
[0]);
1437 tcg_gen_sar_tl(cpu_T
[0], cpu_T
[0], t2
);
1439 gen_extu(ot
, cpu_T
[0]);
1440 tcg_gen_mov_tl(t0
, cpu_T
[0]);
1441 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], t2
);
1444 tcg_gen_mov_tl(t0
, cpu_T
[0]);
1445 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], t2
);
1449 if (op1
== OR_TMP0
) {
1450 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1452 gen_op_mov_reg_T0(ot
, op1
);
1455 /* update eflags if non zero shift */
1456 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1457 gen_op_set_cc_op(s
->cc_op
);
1460 tcg_gen_mov_tl(t1
, cpu_T
[0]);
1462 shift_label
= gen_new_label();
1463 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, shift_label
);
1465 tcg_gen_addi_tl(t2
, t2
, -1);
1466 tcg_gen_mov_tl(cpu_cc_dst
, t1
);
1470 tcg_gen_sar_tl(cpu_cc_src
, t0
, t2
);
1472 tcg_gen_shr_tl(cpu_cc_src
, t0
, t2
);
1475 tcg_gen_shl_tl(cpu_cc_src
, t0
, t2
);
1479 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1481 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1484 gen_set_label(shift_label
);
1485 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1492 static void gen_shift_rm_im(DisasContext
*s
, int ot
, int op1
, int op2
,
1493 int is_right
, int is_arith
)
1504 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1506 gen_op_mov_TN_reg(ot
, 0, op1
);
1512 gen_exts(ot
, cpu_T
[0]);
1513 tcg_gen_sari_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1514 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], op2
);
1516 gen_extu(ot
, cpu_T
[0]);
1517 tcg_gen_shri_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1518 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], op2
);
1521 tcg_gen_shli_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1522 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], op2
);
1528 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1530 gen_op_mov_reg_T0(ot
, op1
);
1532 /* update eflags if non zero shift */
1534 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
1535 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1537 s
->cc_op
= CC_OP_SARB
+ ot
;
1539 s
->cc_op
= CC_OP_SHLB
+ ot
;
1543 static inline void tcg_gen_lshift(TCGv ret
, TCGv arg1
, target_long arg2
)
1546 tcg_gen_shli_tl(ret
, arg1
, arg2
);
1548 tcg_gen_shri_tl(ret
, arg1
, -arg2
);
1551 static void gen_rot_rm_T1(DisasContext
*s
, int ot
, int op1
,
1555 int label1
, label2
, data_bits
;
1556 TCGv t0
, t1
, t2
, a0
;
1558 /* XXX: inefficient, but we must use local temps */
1559 t0
= tcg_temp_local_new();
1560 t1
= tcg_temp_local_new();
1561 t2
= tcg_temp_local_new();
1562 a0
= tcg_temp_local_new();
1570 if (op1
== OR_TMP0
) {
1571 tcg_gen_mov_tl(a0
, cpu_A0
);
1572 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1574 gen_op_mov_v_reg(ot
, t0
, op1
);
1577 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1579 tcg_gen_andi_tl(t1
, t1
, mask
);
1581 /* Must test zero case to avoid using undefined behaviour in TCG
1583 label1
= gen_new_label();
1584 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label1
);
1587 tcg_gen_andi_tl(cpu_tmp0
, t1
, (1 << (3 + ot
)) - 1);
1589 tcg_gen_mov_tl(cpu_tmp0
, t1
);
1592 tcg_gen_mov_tl(t2
, t0
);
1594 data_bits
= 8 << ot
;
1595 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1596 fix TCG definition) */
1598 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1599 tcg_gen_subfi_tl(cpu_tmp0
, data_bits
, cpu_tmp0
);
1600 tcg_gen_shl_tl(t0
, t0
, cpu_tmp0
);
1602 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1603 tcg_gen_subfi_tl(cpu_tmp0
, data_bits
, cpu_tmp0
);
1604 tcg_gen_shr_tl(t0
, t0
, cpu_tmp0
);
1606 tcg_gen_or_tl(t0
, t0
, cpu_tmp4
);
1608 gen_set_label(label1
);
1610 if (op1
== OR_TMP0
) {
1611 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1613 gen_op_mov_reg_v(ot
, op1
, t0
);
1617 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1618 gen_op_set_cc_op(s
->cc_op
);
1620 label2
= gen_new_label();
1621 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label2
);
1623 gen_compute_eflags(cpu_cc_src
);
1624 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~(CC_O
| CC_C
));
1625 tcg_gen_xor_tl(cpu_tmp0
, t2
, t0
);
1626 tcg_gen_lshift(cpu_tmp0
, cpu_tmp0
, 11 - (data_bits
- 1));
1627 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_O
);
1628 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
1630 tcg_gen_shri_tl(t0
, t0
, data_bits
- 1);
1632 tcg_gen_andi_tl(t0
, t0
, CC_C
);
1633 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t0
);
1635 tcg_gen_discard_tl(cpu_cc_dst
);
1636 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1638 gen_set_label(label2
);
1639 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1647 static void gen_rot_rm_im(DisasContext
*s
, int ot
, int op1
, int op2
,
1654 /* XXX: inefficient, but we must use local temps */
1655 t0
= tcg_temp_local_new();
1656 t1
= tcg_temp_local_new();
1657 a0
= tcg_temp_local_new();
1665 if (op1
== OR_TMP0
) {
1666 tcg_gen_mov_tl(a0
, cpu_A0
);
1667 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1669 gen_op_mov_v_reg(ot
, t0
, op1
);
1673 tcg_gen_mov_tl(t1
, t0
);
1676 data_bits
= 8 << ot
;
1678 int shift
= op2
& ((1 << (3 + ot
)) - 1);
1680 tcg_gen_shri_tl(cpu_tmp4
, t0
, shift
);
1681 tcg_gen_shli_tl(t0
, t0
, data_bits
- shift
);
1684 tcg_gen_shli_tl(cpu_tmp4
, t0
, shift
);
1685 tcg_gen_shri_tl(t0
, t0
, data_bits
- shift
);
1687 tcg_gen_or_tl(t0
, t0
, cpu_tmp4
);
1691 if (op1
== OR_TMP0
) {
1692 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1694 gen_op_mov_reg_v(ot
, op1
, t0
);
1699 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1700 gen_op_set_cc_op(s
->cc_op
);
1702 gen_compute_eflags(cpu_cc_src
);
1703 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~(CC_O
| CC_C
));
1704 tcg_gen_xor_tl(cpu_tmp0
, t1
, t0
);
1705 tcg_gen_lshift(cpu_tmp0
, cpu_tmp0
, 11 - (data_bits
- 1));
1706 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_O
);
1707 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
1709 tcg_gen_shri_tl(t0
, t0
, data_bits
- 1);
1711 tcg_gen_andi_tl(t0
, t0
, CC_C
);
1712 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t0
);
1714 tcg_gen_discard_tl(cpu_cc_dst
);
1715 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1716 s
->cc_op
= CC_OP_EFLAGS
;
1724 /* XXX: add faster immediate = 1 case */
1725 static void gen_rotc_rm_T1(DisasContext
*s
, int ot
, int op1
,
1730 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1731 gen_op_set_cc_op(s
->cc_op
);
1735 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1737 gen_op_mov_TN_reg(ot
, 0, op1
);
1741 case 0: gen_helper_rcrb(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1742 case 1: gen_helper_rcrw(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1743 case 2: gen_helper_rcrl(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1744 #ifdef TARGET_X86_64
1745 case 3: gen_helper_rcrq(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1750 case 0: gen_helper_rclb(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1751 case 1: gen_helper_rclw(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1752 case 2: gen_helper_rcll(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1753 #ifdef TARGET_X86_64
1754 case 3: gen_helper_rclq(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1760 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1762 gen_op_mov_reg_T0(ot
, op1
);
1765 label1
= gen_new_label();
1766 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_tmp
, -1, label1
);
1768 tcg_gen_mov_tl(cpu_cc_src
, cpu_cc_tmp
);
1769 tcg_gen_discard_tl(cpu_cc_dst
);
1770 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1772 gen_set_label(label1
);
1773 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1776 /* XXX: add faster immediate case */
1777 static void gen_shiftd_rm_T1_T3(DisasContext
*s
, int ot
, int op1
,
1780 int label1
, label2
, data_bits
;
1782 TCGv t0
, t1
, t2
, a0
;
1784 t0
= tcg_temp_local_new();
1785 t1
= tcg_temp_local_new();
1786 t2
= tcg_temp_local_new();
1787 a0
= tcg_temp_local_new();
1795 if (op1
== OR_TMP0
) {
1796 tcg_gen_mov_tl(a0
, cpu_A0
);
1797 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1799 gen_op_mov_v_reg(ot
, t0
, op1
);
1802 tcg_gen_andi_tl(cpu_T3
, cpu_T3
, mask
);
1804 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1805 tcg_gen_mov_tl(t2
, cpu_T3
);
1807 /* Must test zero case to avoid using undefined behaviour in TCG
1809 label1
= gen_new_label();
1810 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
1812 tcg_gen_addi_tl(cpu_tmp5
, t2
, -1);
1813 if (ot
== OT_WORD
) {
1814 /* Note: we implement the Intel behaviour for shift count > 16 */
1816 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1817 tcg_gen_shli_tl(cpu_tmp0
, t1
, 16);
1818 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1819 tcg_gen_ext32u_tl(t0
, t0
);
1821 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1823 /* only needed if count > 16, but a test would complicate */
1824 tcg_gen_subfi_tl(cpu_tmp5
, 32, t2
);
1825 tcg_gen_shl_tl(cpu_tmp0
, t0
, cpu_tmp5
);
1827 tcg_gen_shr_tl(t0
, t0
, t2
);
1829 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1831 /* XXX: not optimal */
1832 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1833 tcg_gen_shli_tl(t1
, t1
, 16);
1834 tcg_gen_or_tl(t1
, t1
, t0
);
1835 tcg_gen_ext32u_tl(t1
, t1
);
1837 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1838 tcg_gen_subfi_tl(cpu_tmp0
, 32, cpu_tmp5
);
1839 tcg_gen_shr_tl(cpu_tmp5
, t1
, cpu_tmp0
);
1840 tcg_gen_or_tl(cpu_tmp4
, cpu_tmp4
, cpu_tmp5
);
1842 tcg_gen_shl_tl(t0
, t0
, t2
);
1843 tcg_gen_subfi_tl(cpu_tmp5
, 32, t2
);
1844 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1845 tcg_gen_or_tl(t0
, t0
, t1
);
1848 data_bits
= 8 << ot
;
1851 tcg_gen_ext32u_tl(t0
, t0
);
1853 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1855 tcg_gen_shr_tl(t0
, t0
, t2
);
1856 tcg_gen_subfi_tl(cpu_tmp5
, data_bits
, t2
);
1857 tcg_gen_shl_tl(t1
, t1
, cpu_tmp5
);
1858 tcg_gen_or_tl(t0
, t0
, t1
);
1862 tcg_gen_ext32u_tl(t1
, t1
);
1864 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1866 tcg_gen_shl_tl(t0
, t0
, t2
);
1867 tcg_gen_subfi_tl(cpu_tmp5
, data_bits
, t2
);
1868 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1869 tcg_gen_or_tl(t0
, t0
, t1
);
1872 tcg_gen_mov_tl(t1
, cpu_tmp4
);
1874 gen_set_label(label1
);
1876 if (op1
== OR_TMP0
) {
1877 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1879 gen_op_mov_reg_v(ot
, op1
, t0
);
1883 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1884 gen_op_set_cc_op(s
->cc_op
);
1886 label2
= gen_new_label();
1887 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label2
);
1889 tcg_gen_mov_tl(cpu_cc_src
, t1
);
1890 tcg_gen_mov_tl(cpu_cc_dst
, t0
);
1892 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1894 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1896 gen_set_label(label2
);
1897 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1905 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1908 gen_op_mov_TN_reg(ot
, 1, s
);
1911 gen_rot_rm_T1(s1
, ot
, d
, 0);
1914 gen_rot_rm_T1(s1
, ot
, d
, 1);
1918 gen_shift_rm_T1(s1
, ot
, d
, 0, 0);
1921 gen_shift_rm_T1(s1
, ot
, d
, 1, 0);
1924 gen_shift_rm_T1(s1
, ot
, d
, 1, 1);
1927 gen_rotc_rm_T1(s1
, ot
, d
, 0);
1930 gen_rotc_rm_T1(s1
, ot
, d
, 1);
1935 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1939 gen_rot_rm_im(s1
, ot
, d
, c
, 0);
1942 gen_rot_rm_im(s1
, ot
, d
, c
, 1);
1946 gen_shift_rm_im(s1
, ot
, d
, c
, 0, 0);
1949 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 0);
1952 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 1);
1955 /* currently not optimized */
1956 gen_op_movl_T1_im(c
);
1957 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1962 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1970 int mod
, rm
, code
, override
, must_add_seg
;
1972 override
= s
->override
;
1973 must_add_seg
= s
->addseg
;
1976 mod
= (modrm
>> 6) & 3;
1988 code
= ldub_code(s
->pc
++);
1989 scale
= (code
>> 6) & 3;
1990 index
= ((code
>> 3) & 7) | REX_X(s
);
1997 if ((base
& 7) == 5) {
1999 disp
= (int32_t)ldl_code(s
->pc
);
2001 if (CODE64(s
) && !havesib
) {
2002 disp
+= s
->pc
+ s
->rip_offset
;
2009 disp
= (int8_t)ldub_code(s
->pc
++);
2013 disp
= (int32_t)ldl_code(s
->pc
);
2019 /* for correct popl handling with esp */
2020 if (base
== 4 && s
->popl_esp_hack
)
2021 disp
+= s
->popl_esp_hack
;
2022 #ifdef TARGET_X86_64
2023 if (s
->aflag
== 2) {
2024 gen_op_movq_A0_reg(base
);
2026 gen_op_addq_A0_im(disp
);
2031 gen_op_movl_A0_reg(base
);
2033 gen_op_addl_A0_im(disp
);
2036 #ifdef TARGET_X86_64
2037 if (s
->aflag
== 2) {
2038 gen_op_movq_A0_im(disp
);
2042 gen_op_movl_A0_im(disp
);
2045 /* index == 4 means no index */
2046 if (havesib
&& (index
!= 4)) {
2047 #ifdef TARGET_X86_64
2048 if (s
->aflag
== 2) {
2049 gen_op_addq_A0_reg_sN(scale
, index
);
2053 gen_op_addl_A0_reg_sN(scale
, index
);
2058 if (base
== R_EBP
|| base
== R_ESP
)
2063 #ifdef TARGET_X86_64
2064 if (s
->aflag
== 2) {
2065 gen_op_addq_A0_seg(override
);
2069 gen_op_addl_A0_seg(override
);
2076 disp
= lduw_code(s
->pc
);
2078 gen_op_movl_A0_im(disp
);
2079 rm
= 0; /* avoid SS override */
2086 disp
= (int8_t)ldub_code(s
->pc
++);
2090 disp
= lduw_code(s
->pc
);
2096 gen_op_movl_A0_reg(R_EBX
);
2097 gen_op_addl_A0_reg_sN(0, R_ESI
);
2100 gen_op_movl_A0_reg(R_EBX
);
2101 gen_op_addl_A0_reg_sN(0, R_EDI
);
2104 gen_op_movl_A0_reg(R_EBP
);
2105 gen_op_addl_A0_reg_sN(0, R_ESI
);
2108 gen_op_movl_A0_reg(R_EBP
);
2109 gen_op_addl_A0_reg_sN(0, R_EDI
);
2112 gen_op_movl_A0_reg(R_ESI
);
2115 gen_op_movl_A0_reg(R_EDI
);
2118 gen_op_movl_A0_reg(R_EBP
);
2122 gen_op_movl_A0_reg(R_EBX
);
2126 gen_op_addl_A0_im(disp
);
2127 gen_op_andl_A0_ffff();
2131 if (rm
== 2 || rm
== 3 || rm
== 6)
2136 gen_op_addl_A0_seg(override
);
2146 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
2148 int mod
, rm
, base
, code
;
2150 mod
= (modrm
>> 6) & 3;
2160 code
= ldub_code(s
->pc
++);
2196 /* used for LEA and MOV AX, mem */
2197 static void gen_add_A0_ds_seg(DisasContext
*s
)
2199 int override
, must_add_seg
;
2200 must_add_seg
= s
->addseg
;
2202 if (s
->override
>= 0) {
2203 override
= s
->override
;
2207 #ifdef TARGET_X86_64
2209 gen_op_addq_A0_seg(override
);
2213 gen_op_addl_A0_seg(override
);
2218 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2220 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
2222 int mod
, rm
, opreg
, disp
;
2224 mod
= (modrm
>> 6) & 3;
2225 rm
= (modrm
& 7) | REX_B(s
);
2229 gen_op_mov_TN_reg(ot
, 0, reg
);
2230 gen_op_mov_reg_T0(ot
, rm
);
2232 gen_op_mov_TN_reg(ot
, 0, rm
);
2234 gen_op_mov_reg_T0(ot
, reg
);
2237 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
2240 gen_op_mov_TN_reg(ot
, 0, reg
);
2241 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2243 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
2245 gen_op_mov_reg_T0(ot
, reg
);
2250 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
2256 ret
= ldub_code(s
->pc
);
2260 ret
= lduw_code(s
->pc
);
2265 ret
= ldl_code(s
->pc
);
2272 static inline int insn_const_size(unsigned int ot
)
2280 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
2282 TranslationBlock
*tb
;
2285 pc
= s
->cs_base
+ eip
;
2287 /* NOTE: we handle the case where the TB spans two pages here */
2288 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
2289 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
2290 /* jump to same page: we can use a direct jump */
2291 tcg_gen_goto_tb(tb_num
);
2293 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
2295 /* jump to another page: currently not optimized */
2301 static inline void gen_jcc(DisasContext
*s
, int b
,
2302 target_ulong val
, target_ulong next_eip
)
2307 gen_update_cc_op(s
);
2309 l1
= gen_new_label();
2310 gen_jcc1(s
, cc_op
, b
, l1
);
2312 gen_goto_tb(s
, 0, next_eip
);
2315 gen_goto_tb(s
, 1, val
);
2316 s
->is_jmp
= DISAS_TB_JUMP
;
2319 l1
= gen_new_label();
2320 l2
= gen_new_label();
2321 gen_jcc1(s
, cc_op
, b
, l1
);
2323 gen_jmp_im(next_eip
);
2333 static void gen_setcc(DisasContext
*s
, int b
)
2335 int inv
, jcc_op
, l1
;
2338 if (is_fast_jcc_case(s
, b
)) {
2339 /* nominal case: we use a jump */
2340 /* XXX: make it faster by adding new instructions in TCG */
2341 t0
= tcg_temp_local_new();
2342 tcg_gen_movi_tl(t0
, 0);
2343 l1
= gen_new_label();
2344 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
2345 tcg_gen_movi_tl(t0
, 1);
2347 tcg_gen_mov_tl(cpu_T
[0], t0
);
2350 /* slow case: it is more efficient not to generate a jump,
2351 although it is questionnable whether this optimization is
2354 jcc_op
= (b
>> 1) & 7;
2355 gen_setcc_slow_T0(s
, jcc_op
);
2357 tcg_gen_xori_tl(cpu_T
[0], cpu_T
[0], 1);
2362 static inline void gen_op_movl_T0_seg(int seg_reg
)
2364 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
2365 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2368 static inline void gen_op_movl_seg_T0_vm(int seg_reg
)
2370 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
2371 tcg_gen_st32_tl(cpu_T
[0], cpu_env
,
2372 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2373 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], 4);
2374 tcg_gen_st_tl(cpu_T
[0], cpu_env
,
2375 offsetof(CPUX86State
,segs
[seg_reg
].base
));
2378 /* move T0 to seg_reg and compute if the CPU state may change. Never
2379 call this function with seg_reg == R_CS */
2380 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
2382 if (s
->pe
&& !s
->vm86
) {
2383 /* XXX: optimize by finding processor state dynamically */
2384 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2385 gen_op_set_cc_op(s
->cc_op
);
2386 gen_jmp_im(cur_eip
);
2387 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
2388 gen_helper_load_seg(tcg_const_i32(seg_reg
), cpu_tmp2_i32
);
2389 /* abort translation because the addseg value may change or
2390 because ss32 may change. For R_SS, translation must always
2391 stop as a special handling must be done to disable hardware
2392 interrupts for the next instruction */
2393 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
2394 s
->is_jmp
= DISAS_TB_JUMP
;
2396 gen_op_movl_seg_T0_vm(seg_reg
);
2397 if (seg_reg
== R_SS
)
2398 s
->is_jmp
= DISAS_TB_JUMP
;
2402 static inline int svm_is_rep(int prefixes
)
2404 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2408 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2409 uint32_t type
, uint64_t param
)
2411 /* no SVM activated; fast case */
2412 if (likely(!(s
->flags
& HF_SVMI_MASK
)))
2414 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2415 gen_op_set_cc_op(s
->cc_op
);
2416 gen_jmp_im(pc_start
- s
->cs_base
);
2417 gen_helper_svm_check_intercept_param(tcg_const_i32(type
),
2418 tcg_const_i64(param
));
2422 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2424 gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2427 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2429 #ifdef TARGET_X86_64
2431 gen_op_add_reg_im(2, R_ESP
, addend
);
2435 gen_op_add_reg_im(1, R_ESP
, addend
);
2437 gen_op_add_reg_im(0, R_ESP
, addend
);
2441 /* generate a push. It depends on ss32, addseg and dflag */
2442 static void gen_push_T0(DisasContext
*s
)
2444 #ifdef TARGET_X86_64
2446 gen_op_movq_A0_reg(R_ESP
);
2448 gen_op_addq_A0_im(-8);
2449 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2451 gen_op_addq_A0_im(-2);
2452 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2454 gen_op_mov_reg_A0(2, R_ESP
);
2458 gen_op_movl_A0_reg(R_ESP
);
2460 gen_op_addl_A0_im(-2);
2462 gen_op_addl_A0_im(-4);
2465 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2466 gen_op_addl_A0_seg(R_SS
);
2469 gen_op_andl_A0_ffff();
2470 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2471 gen_op_addl_A0_seg(R_SS
);
2473 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2474 if (s
->ss32
&& !s
->addseg
)
2475 gen_op_mov_reg_A0(1, R_ESP
);
2477 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2481 /* generate a push. It depends on ss32, addseg and dflag */
2482 /* slower version for T1, only used for call Ev */
2483 static void gen_push_T1(DisasContext
*s
)
2485 #ifdef TARGET_X86_64
2487 gen_op_movq_A0_reg(R_ESP
);
2489 gen_op_addq_A0_im(-8);
2490 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2492 gen_op_addq_A0_im(-2);
2493 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2495 gen_op_mov_reg_A0(2, R_ESP
);
2499 gen_op_movl_A0_reg(R_ESP
);
2501 gen_op_addl_A0_im(-2);
2503 gen_op_addl_A0_im(-4);
2506 gen_op_addl_A0_seg(R_SS
);
2509 gen_op_andl_A0_ffff();
2510 gen_op_addl_A0_seg(R_SS
);
2512 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2514 if (s
->ss32
&& !s
->addseg
)
2515 gen_op_mov_reg_A0(1, R_ESP
);
2517 gen_stack_update(s
, (-2) << s
->dflag
);
2521 /* two step pop is necessary for precise exceptions */
2522 static void gen_pop_T0(DisasContext
*s
)
2524 #ifdef TARGET_X86_64
2526 gen_op_movq_A0_reg(R_ESP
);
2527 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2531 gen_op_movl_A0_reg(R_ESP
);
2534 gen_op_addl_A0_seg(R_SS
);
2536 gen_op_andl_A0_ffff();
2537 gen_op_addl_A0_seg(R_SS
);
2539 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2543 static void gen_pop_update(DisasContext
*s
)
2545 #ifdef TARGET_X86_64
2546 if (CODE64(s
) && s
->dflag
) {
2547 gen_stack_update(s
, 8);
2551 gen_stack_update(s
, 2 << s
->dflag
);
2555 static void gen_stack_A0(DisasContext
*s
)
2557 gen_op_movl_A0_reg(R_ESP
);
2559 gen_op_andl_A0_ffff();
2560 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2562 gen_op_addl_A0_seg(R_SS
);
2565 /* NOTE: wrap around in 16 bit not fully handled */
2566 static void gen_pusha(DisasContext
*s
)
2569 gen_op_movl_A0_reg(R_ESP
);
2570 gen_op_addl_A0_im(-16 << s
->dflag
);
2572 gen_op_andl_A0_ffff();
2573 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2575 gen_op_addl_A0_seg(R_SS
);
2576 for(i
= 0;i
< 8; i
++) {
2577 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2578 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2579 gen_op_addl_A0_im(2 << s
->dflag
);
2581 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2584 /* NOTE: wrap around in 16 bit not fully handled */
2585 static void gen_popa(DisasContext
*s
)
2588 gen_op_movl_A0_reg(R_ESP
);
2590 gen_op_andl_A0_ffff();
2591 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2592 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], 16 << s
->dflag
);
2594 gen_op_addl_A0_seg(R_SS
);
2595 for(i
= 0;i
< 8; i
++) {
2596 /* ESP is not reloaded */
2598 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2599 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2601 gen_op_addl_A0_im(2 << s
->dflag
);
2603 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2606 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2611 #ifdef TARGET_X86_64
2613 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2616 gen_op_movl_A0_reg(R_ESP
);
2617 gen_op_addq_A0_im(-opsize
);
2618 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2621 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2622 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2624 /* XXX: must save state */
2625 gen_helper_enter64_level(tcg_const_i32(level
),
2626 tcg_const_i32((ot
== OT_QUAD
)),
2629 gen_op_mov_reg_T1(ot
, R_EBP
);
2630 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2631 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2635 ot
= s
->dflag
+ OT_WORD
;
2636 opsize
= 2 << s
->dflag
;
2638 gen_op_movl_A0_reg(R_ESP
);
2639 gen_op_addl_A0_im(-opsize
);
2641 gen_op_andl_A0_ffff();
2642 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2644 gen_op_addl_A0_seg(R_SS
);
2646 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2647 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2649 /* XXX: must save state */
2650 gen_helper_enter_level(tcg_const_i32(level
),
2651 tcg_const_i32(s
->dflag
),
2654 gen_op_mov_reg_T1(ot
, R_EBP
);
2655 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2656 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2660 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2662 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2663 gen_op_set_cc_op(s
->cc_op
);
2664 gen_jmp_im(cur_eip
);
2665 gen_helper_raise_exception(cpu_env
, tcg_const_i32(trapno
));
2666 s
->is_jmp
= DISAS_TB_JUMP
;
2669 /* an interrupt is different from an exception because of the
2671 static void gen_interrupt(DisasContext
*s
, int intno
,
2672 target_ulong cur_eip
, target_ulong next_eip
)
2674 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2675 gen_op_set_cc_op(s
->cc_op
);
2676 gen_jmp_im(cur_eip
);
2677 gen_helper_raise_interrupt(cpu_env
, tcg_const_i32(intno
),
2678 tcg_const_i32(next_eip
- cur_eip
));
2679 s
->is_jmp
= DISAS_TB_JUMP
;
2682 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2684 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2685 gen_op_set_cc_op(s
->cc_op
);
2686 gen_jmp_im(cur_eip
);
2688 s
->is_jmp
= DISAS_TB_JUMP
;
2691 /* generate a generic end of block. Trace exception is also generated
2693 static void gen_eob(DisasContext
*s
)
2695 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2696 gen_op_set_cc_op(s
->cc_op
);
2697 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2698 gen_helper_reset_inhibit_irq();
2700 if (s
->tb
->flags
& HF_RF_MASK
) {
2701 gen_helper_reset_rf();
2703 if (s
->singlestep_enabled
) {
2706 gen_helper_single_step();
2710 s
->is_jmp
= DISAS_TB_JUMP
;
2713 /* generate a jump to eip. No segment change must happen before as a
2714 direct call to the next block may occur */
2715 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2718 gen_update_cc_op(s
);
2719 gen_goto_tb(s
, tb_num
, eip
);
2720 s
->is_jmp
= DISAS_TB_JUMP
;
2727 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2729 gen_jmp_tb(s
, eip
, 0);
2732 static inline void gen_ldq_env_A0(int idx
, int offset
)
2734 int mem_index
= (idx
>> 2) - 1;
2735 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2736 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2739 static inline void gen_stq_env_A0(int idx
, int offset
)
2741 int mem_index
= (idx
>> 2) - 1;
2742 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2743 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2746 static inline void gen_ldo_env_A0(int idx
, int offset
)
2748 int mem_index
= (idx
>> 2) - 1;
2749 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2750 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2751 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2752 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2753 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2756 static inline void gen_sto_env_A0(int idx
, int offset
)
2758 int mem_index
= (idx
>> 2) - 1;
2759 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2760 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2761 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2762 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2763 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2766 static inline void gen_op_movo(int d_offset
, int s_offset
)
2768 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2769 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2770 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
+ 8);
2771 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
+ 8);
2774 static inline void gen_op_movq(int d_offset
, int s_offset
)
2776 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2777 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2780 static inline void gen_op_movl(int d_offset
, int s_offset
)
2782 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
, s_offset
);
2783 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, d_offset
);
2786 static inline void gen_op_movq_env_0(int d_offset
)
2788 tcg_gen_movi_i64(cpu_tmp1_i64
, 0);
2789 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2792 typedef void (*SSEFunc_i_p
)(TCGv_i32 val
, TCGv_ptr reg
);
2793 typedef void (*SSEFunc_l_p
)(TCGv_i64 val
, TCGv_ptr reg
);
2794 typedef void (*SSEFunc_0_pi
)(TCGv_ptr reg
, TCGv_i32 val
);
2795 typedef void (*SSEFunc_0_pl
)(TCGv_ptr reg
, TCGv_i64 val
);
2796 typedef void (*SSEFunc_0_pp
)(TCGv_ptr reg_a
, TCGv_ptr reg_b
);
2797 typedef void (*SSEFunc_0_ppi
)(TCGv_ptr reg_a
, TCGv_ptr reg_b
, TCGv_i32 val
);
2798 typedef void (*SSEFunc_0_ppt
)(TCGv_ptr reg_a
, TCGv_ptr reg_b
, TCGv val
);
2800 #define SSE_SPECIAL ((void *)1)
2801 #define SSE_DUMMY ((void *)2)
2803 #define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2804 #define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2805 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
2807 static const SSEFunc_0_pp sse_op_table1
[256][4] = {
2808 /* 3DNow! extensions */
2809 [0x0e] = { SSE_DUMMY
}, /* femms */
2810 [0x0f] = { SSE_DUMMY
}, /* pf... */
2811 /* pure SSE operations */
2812 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2813 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2814 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2815 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2816 [0x14] = { gen_helper_punpckldq_xmm
, gen_helper_punpcklqdq_xmm
},
2817 [0x15] = { gen_helper_punpckhdq_xmm
, gen_helper_punpckhqdq_xmm
},
2818 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2819 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2821 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2822 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2823 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2824 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd, movntss, movntsd */
2825 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2826 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2827 [0x2e] = { gen_helper_ucomiss
, gen_helper_ucomisd
},
2828 [0x2f] = { gen_helper_comiss
, gen_helper_comisd
},
2829 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2830 [0x51] = SSE_FOP(sqrt
),
2831 [0x52] = { gen_helper_rsqrtps
, NULL
, gen_helper_rsqrtss
, NULL
},
2832 [0x53] = { gen_helper_rcpps
, NULL
, gen_helper_rcpss
, NULL
},
2833 [0x54] = { gen_helper_pand_xmm
, gen_helper_pand_xmm
}, /* andps, andpd */
2834 [0x55] = { gen_helper_pandn_xmm
, gen_helper_pandn_xmm
}, /* andnps, andnpd */
2835 [0x56] = { gen_helper_por_xmm
, gen_helper_por_xmm
}, /* orps, orpd */
2836 [0x57] = { gen_helper_pxor_xmm
, gen_helper_pxor_xmm
}, /* xorps, xorpd */
2837 [0x58] = SSE_FOP(add
),
2838 [0x59] = SSE_FOP(mul
),
2839 [0x5a] = { gen_helper_cvtps2pd
, gen_helper_cvtpd2ps
,
2840 gen_helper_cvtss2sd
, gen_helper_cvtsd2ss
},
2841 [0x5b] = { gen_helper_cvtdq2ps
, gen_helper_cvtps2dq
, gen_helper_cvttps2dq
},
2842 [0x5c] = SSE_FOP(sub
),
2843 [0x5d] = SSE_FOP(min
),
2844 [0x5e] = SSE_FOP(div
),
2845 [0x5f] = SSE_FOP(max
),
2847 [0xc2] = SSE_FOP(cmpeq
),
2848 [0xc6] = { (SSEFunc_0_pp
)gen_helper_shufps
,
2849 (SSEFunc_0_pp
)gen_helper_shufpd
}, /* XXX: casts */
2851 [0x38] = { SSE_SPECIAL
, SSE_SPECIAL
, NULL
, SSE_SPECIAL
}, /* SSSE3/SSE4 */
2852 [0x3a] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* SSSE3/SSE4 */
2854 /* MMX ops and their SSE extensions */
2855 [0x60] = MMX_OP2(punpcklbw
),
2856 [0x61] = MMX_OP2(punpcklwd
),
2857 [0x62] = MMX_OP2(punpckldq
),
2858 [0x63] = MMX_OP2(packsswb
),
2859 [0x64] = MMX_OP2(pcmpgtb
),
2860 [0x65] = MMX_OP2(pcmpgtw
),
2861 [0x66] = MMX_OP2(pcmpgtl
),
2862 [0x67] = MMX_OP2(packuswb
),
2863 [0x68] = MMX_OP2(punpckhbw
),
2864 [0x69] = MMX_OP2(punpckhwd
),
2865 [0x6a] = MMX_OP2(punpckhdq
),
2866 [0x6b] = MMX_OP2(packssdw
),
2867 [0x6c] = { NULL
, gen_helper_punpcklqdq_xmm
},
2868 [0x6d] = { NULL
, gen_helper_punpckhqdq_xmm
},
2869 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2870 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2871 [0x70] = { (SSEFunc_0_pp
)gen_helper_pshufw_mmx
,
2872 (SSEFunc_0_pp
)gen_helper_pshufd_xmm
,
2873 (SSEFunc_0_pp
)gen_helper_pshufhw_xmm
,
2874 (SSEFunc_0_pp
)gen_helper_pshuflw_xmm
}, /* XXX: casts */
2875 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2876 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2877 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2878 [0x74] = MMX_OP2(pcmpeqb
),
2879 [0x75] = MMX_OP2(pcmpeqw
),
2880 [0x76] = MMX_OP2(pcmpeql
),
2881 [0x77] = { SSE_DUMMY
}, /* emms */
2882 [0x78] = { NULL
, SSE_SPECIAL
, NULL
, SSE_SPECIAL
}, /* extrq_i, insertq_i */
2883 [0x79] = { NULL
, gen_helper_extrq_r
, NULL
, gen_helper_insertq_r
},
2884 [0x7c] = { NULL
, gen_helper_haddpd
, NULL
, gen_helper_haddps
},
2885 [0x7d] = { NULL
, gen_helper_hsubpd
, NULL
, gen_helper_hsubps
},
2886 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2887 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2888 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2889 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2890 [0xd0] = { NULL
, gen_helper_addsubpd
, NULL
, gen_helper_addsubps
},
2891 [0xd1] = MMX_OP2(psrlw
),
2892 [0xd2] = MMX_OP2(psrld
),
2893 [0xd3] = MMX_OP2(psrlq
),
2894 [0xd4] = MMX_OP2(paddq
),
2895 [0xd5] = MMX_OP2(pmullw
),
2896 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2897 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2898 [0xd8] = MMX_OP2(psubusb
),
2899 [0xd9] = MMX_OP2(psubusw
),
2900 [0xda] = MMX_OP2(pminub
),
2901 [0xdb] = MMX_OP2(pand
),
2902 [0xdc] = MMX_OP2(paddusb
),
2903 [0xdd] = MMX_OP2(paddusw
),
2904 [0xde] = MMX_OP2(pmaxub
),
2905 [0xdf] = MMX_OP2(pandn
),
2906 [0xe0] = MMX_OP2(pavgb
),
2907 [0xe1] = MMX_OP2(psraw
),
2908 [0xe2] = MMX_OP2(psrad
),
2909 [0xe3] = MMX_OP2(pavgw
),
2910 [0xe4] = MMX_OP2(pmulhuw
),
2911 [0xe5] = MMX_OP2(pmulhw
),
2912 [0xe6] = { NULL
, gen_helper_cvttpd2dq
, gen_helper_cvtdq2pd
, gen_helper_cvtpd2dq
},
2913 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2914 [0xe8] = MMX_OP2(psubsb
),
2915 [0xe9] = MMX_OP2(psubsw
),
2916 [0xea] = MMX_OP2(pminsw
),
2917 [0xeb] = MMX_OP2(por
),
2918 [0xec] = MMX_OP2(paddsb
),
2919 [0xed] = MMX_OP2(paddsw
),
2920 [0xee] = MMX_OP2(pmaxsw
),
2921 [0xef] = MMX_OP2(pxor
),
2922 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2923 [0xf1] = MMX_OP2(psllw
),
2924 [0xf2] = MMX_OP2(pslld
),
2925 [0xf3] = MMX_OP2(psllq
),
2926 [0xf4] = MMX_OP2(pmuludq
),
2927 [0xf5] = MMX_OP2(pmaddwd
),
2928 [0xf6] = MMX_OP2(psadbw
),
2929 [0xf7] = { (SSEFunc_0_pp
)gen_helper_maskmov_mmx
,
2930 (SSEFunc_0_pp
)gen_helper_maskmov_xmm
}, /* XXX: casts */
2931 [0xf8] = MMX_OP2(psubb
),
2932 [0xf9] = MMX_OP2(psubw
),
2933 [0xfa] = MMX_OP2(psubl
),
2934 [0xfb] = MMX_OP2(psubq
),
2935 [0xfc] = MMX_OP2(paddb
),
2936 [0xfd] = MMX_OP2(paddw
),
2937 [0xfe] = MMX_OP2(paddl
),
2940 static const SSEFunc_0_pp sse_op_table2
[3 * 8][2] = {
2941 [0 + 2] = MMX_OP2(psrlw
),
2942 [0 + 4] = MMX_OP2(psraw
),
2943 [0 + 6] = MMX_OP2(psllw
),
2944 [8 + 2] = MMX_OP2(psrld
),
2945 [8 + 4] = MMX_OP2(psrad
),
2946 [8 + 6] = MMX_OP2(pslld
),
2947 [16 + 2] = MMX_OP2(psrlq
),
2948 [16 + 3] = { NULL
, gen_helper_psrldq_xmm
},
2949 [16 + 6] = MMX_OP2(psllq
),
2950 [16 + 7] = { NULL
, gen_helper_pslldq_xmm
},
2953 static const SSEFunc_0_pi sse_op_table3ai
[] = {
2954 gen_helper_cvtsi2ss
,
2958 #ifdef TARGET_X86_64
2959 static const SSEFunc_0_pl sse_op_table3aq
[] = {
2960 gen_helper_cvtsq2ss
,
2965 static const SSEFunc_i_p sse_op_table3bi
[] = {
2966 gen_helper_cvttss2si
,
2967 gen_helper_cvtss2si
,
2968 gen_helper_cvttsd2si
,
2972 #ifdef TARGET_X86_64
2973 static const SSEFunc_l_p sse_op_table3bq
[] = {
2974 gen_helper_cvttss2sq
,
2975 gen_helper_cvtss2sq
,
2976 gen_helper_cvttsd2sq
,
2981 static const SSEFunc_0_pp sse_op_table4
[8][4] = {
2992 static const SSEFunc_0_pp sse_op_table5
[256] = {
2993 [0x0c] = gen_helper_pi2fw
,
2994 [0x0d] = gen_helper_pi2fd
,
2995 [0x1c] = gen_helper_pf2iw
,
2996 [0x1d] = gen_helper_pf2id
,
2997 [0x8a] = gen_helper_pfnacc
,
2998 [0x8e] = gen_helper_pfpnacc
,
2999 [0x90] = gen_helper_pfcmpge
,
3000 [0x94] = gen_helper_pfmin
,
3001 [0x96] = gen_helper_pfrcp
,
3002 [0x97] = gen_helper_pfrsqrt
,
3003 [0x9a] = gen_helper_pfsub
,
3004 [0x9e] = gen_helper_pfadd
,
3005 [0xa0] = gen_helper_pfcmpgt
,
3006 [0xa4] = gen_helper_pfmax
,
3007 [0xa6] = gen_helper_movq
, /* pfrcpit1; no need to actually increase precision */
3008 [0xa7] = gen_helper_movq
, /* pfrsqit1 */
3009 [0xaa] = gen_helper_pfsubr
,
3010 [0xae] = gen_helper_pfacc
,
3011 [0xb0] = gen_helper_pfcmpeq
,
3012 [0xb4] = gen_helper_pfmul
,
3013 [0xb6] = gen_helper_movq
, /* pfrcpit2 */
3014 [0xb7] = gen_helper_pmulhrw_mmx
,
3015 [0xbb] = gen_helper_pswapd
,
3016 [0xbf] = gen_helper_pavgb_mmx
/* pavgusb */
3019 struct SSEOpHelper_pp
{
3024 struct SSEOpHelper_ppi
{
3025 SSEFunc_0_ppi op
[2];
3029 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3030 #define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3031 #define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3032 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3034 static const struct SSEOpHelper_pp sse_op_table6
[256] = {
3035 [0x00] = SSSE3_OP(pshufb
),
3036 [0x01] = SSSE3_OP(phaddw
),
3037 [0x02] = SSSE3_OP(phaddd
),
3038 [0x03] = SSSE3_OP(phaddsw
),
3039 [0x04] = SSSE3_OP(pmaddubsw
),
3040 [0x05] = SSSE3_OP(phsubw
),
3041 [0x06] = SSSE3_OP(phsubd
),
3042 [0x07] = SSSE3_OP(phsubsw
),
3043 [0x08] = SSSE3_OP(psignb
),
3044 [0x09] = SSSE3_OP(psignw
),
3045 [0x0a] = SSSE3_OP(psignd
),
3046 [0x0b] = SSSE3_OP(pmulhrsw
),
3047 [0x10] = SSE41_OP(pblendvb
),
3048 [0x14] = SSE41_OP(blendvps
),
3049 [0x15] = SSE41_OP(blendvpd
),
3050 [0x17] = SSE41_OP(ptest
),
3051 [0x1c] = SSSE3_OP(pabsb
),
3052 [0x1d] = SSSE3_OP(pabsw
),
3053 [0x1e] = SSSE3_OP(pabsd
),
3054 [0x20] = SSE41_OP(pmovsxbw
),
3055 [0x21] = SSE41_OP(pmovsxbd
),
3056 [0x22] = SSE41_OP(pmovsxbq
),
3057 [0x23] = SSE41_OP(pmovsxwd
),
3058 [0x24] = SSE41_OP(pmovsxwq
),
3059 [0x25] = SSE41_OP(pmovsxdq
),
3060 [0x28] = SSE41_OP(pmuldq
),
3061 [0x29] = SSE41_OP(pcmpeqq
),
3062 [0x2a] = SSE41_SPECIAL
, /* movntqda */
3063 [0x2b] = SSE41_OP(packusdw
),
3064 [0x30] = SSE41_OP(pmovzxbw
),
3065 [0x31] = SSE41_OP(pmovzxbd
),
3066 [0x32] = SSE41_OP(pmovzxbq
),
3067 [0x33] = SSE41_OP(pmovzxwd
),
3068 [0x34] = SSE41_OP(pmovzxwq
),
3069 [0x35] = SSE41_OP(pmovzxdq
),
3070 [0x37] = SSE42_OP(pcmpgtq
),
3071 [0x38] = SSE41_OP(pminsb
),
3072 [0x39] = SSE41_OP(pminsd
),
3073 [0x3a] = SSE41_OP(pminuw
),
3074 [0x3b] = SSE41_OP(pminud
),
3075 [0x3c] = SSE41_OP(pmaxsb
),
3076 [0x3d] = SSE41_OP(pmaxsd
),
3077 [0x3e] = SSE41_OP(pmaxuw
),
3078 [0x3f] = SSE41_OP(pmaxud
),
3079 [0x40] = SSE41_OP(pmulld
),
3080 [0x41] = SSE41_OP(phminposuw
),
3083 static const struct SSEOpHelper_ppi sse_op_table7
[256] = {
3084 [0x08] = SSE41_OP(roundps
),
3085 [0x09] = SSE41_OP(roundpd
),
3086 [0x0a] = SSE41_OP(roundss
),
3087 [0x0b] = SSE41_OP(roundsd
),
3088 [0x0c] = SSE41_OP(blendps
),
3089 [0x0d] = SSE41_OP(blendpd
),
3090 [0x0e] = SSE41_OP(pblendw
),
3091 [0x0f] = SSSE3_OP(palignr
),
3092 [0x14] = SSE41_SPECIAL
, /* pextrb */
3093 [0x15] = SSE41_SPECIAL
, /* pextrw */
3094 [0x16] = SSE41_SPECIAL
, /* pextrd/pextrq */
3095 [0x17] = SSE41_SPECIAL
, /* extractps */
3096 [0x20] = SSE41_SPECIAL
, /* pinsrb */
3097 [0x21] = SSE41_SPECIAL
, /* insertps */
3098 [0x22] = SSE41_SPECIAL
, /* pinsrd/pinsrq */
3099 [0x40] = SSE41_OP(dpps
),
3100 [0x41] = SSE41_OP(dppd
),
3101 [0x42] = SSE41_OP(mpsadbw
),
3102 [0x60] = SSE42_OP(pcmpestrm
),
3103 [0x61] = SSE42_OP(pcmpestri
),
3104 [0x62] = SSE42_OP(pcmpistrm
),
3105 [0x63] = SSE42_OP(pcmpistri
),
3108 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
3110 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
3111 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
3112 SSEFunc_0_pp sse_fn_pp
;
3113 SSEFunc_0_ppi sse_fn_ppi
;
3114 SSEFunc_0_ppt sse_fn_ppt
;
3117 if (s
->prefix
& PREFIX_DATA
)
3119 else if (s
->prefix
& PREFIX_REPZ
)
3121 else if (s
->prefix
& PREFIX_REPNZ
)
3125 sse_fn_pp
= sse_op_table1
[b
][b1
];
3129 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
3139 /* simple MMX/SSE operation */
3140 if (s
->flags
& HF_TS_MASK
) {
3141 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
3144 if (s
->flags
& HF_EM_MASK
) {
3146 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
3149 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
3150 if ((b
!= 0x38 && b
!= 0x3a) || (s
->prefix
& PREFIX_DATA
))
3153 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3164 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3165 the static cpu state) */
3167 gen_helper_enter_mmx();
3170 modrm
= ldub_code(s
->pc
++);
3171 reg
= ((modrm
>> 3) & 7);
3174 mod
= (modrm
>> 6) & 3;
3175 if (sse_fn_pp
== SSE_SPECIAL
) {
3178 case 0x0e7: /* movntq */
3181 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3182 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3184 case 0x1e7: /* movntdq */
3185 case 0x02b: /* movntps */
3186 case 0x12b: /* movntps */
3189 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3190 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3192 case 0x3f0: /* lddqu */
3195 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3196 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3198 case 0x22b: /* movntss */
3199 case 0x32b: /* movntsd */
3202 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3204 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,
3207 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3208 xmm_regs
[reg
].XMM_L(0)));
3209 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
3212 case 0x6e: /* movd mm, ea */
3213 #ifdef TARGET_X86_64
3214 if (s
->dflag
== 2) {
3215 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3216 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3220 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3221 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3222 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3223 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3224 gen_helper_movl_mm_T0_mmx(cpu_ptr0
, cpu_tmp2_i32
);
3227 case 0x16e: /* movd xmm, ea */
3228 #ifdef TARGET_X86_64
3229 if (s
->dflag
== 2) {
3230 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3231 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3232 offsetof(CPUX86State
,xmm_regs
[reg
]));
3233 gen_helper_movq_mm_T0_xmm(cpu_ptr0
, cpu_T
[0]);
3237 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3238 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3239 offsetof(CPUX86State
,xmm_regs
[reg
]));
3240 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3241 gen_helper_movl_mm_T0_xmm(cpu_ptr0
, cpu_tmp2_i32
);
3244 case 0x6f: /* movq mm, ea */
3246 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3247 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3250 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3251 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3252 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3253 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3256 case 0x010: /* movups */
3257 case 0x110: /* movupd */
3258 case 0x028: /* movaps */
3259 case 0x128: /* movapd */
3260 case 0x16f: /* movdqa xmm, ea */
3261 case 0x26f: /* movdqu xmm, ea */
3263 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3264 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3266 rm
= (modrm
& 7) | REX_B(s
);
3267 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
3268 offsetof(CPUX86State
,xmm_regs
[rm
]));
3271 case 0x210: /* movss xmm, ea */
3273 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3274 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3275 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3277 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3278 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3279 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3281 rm
= (modrm
& 7) | REX_B(s
);
3282 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3283 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3286 case 0x310: /* movsd xmm, ea */
3288 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3289 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3291 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3292 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3294 rm
= (modrm
& 7) | REX_B(s
);
3295 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3296 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3299 case 0x012: /* movlps */
3300 case 0x112: /* movlpd */
3302 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3303 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3306 rm
= (modrm
& 7) | REX_B(s
);
3307 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3308 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3311 case 0x212: /* movsldup */
3313 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3314 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3316 rm
= (modrm
& 7) | REX_B(s
);
3317 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3318 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3319 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3320 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
3322 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3323 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3324 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3325 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3327 case 0x312: /* movddup */
3329 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3330 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3332 rm
= (modrm
& 7) | REX_B(s
);
3333 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3334 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3336 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3337 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3339 case 0x016: /* movhps */
3340 case 0x116: /* movhpd */
3342 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3343 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3346 rm
= (modrm
& 7) | REX_B(s
);
3347 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3348 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3351 case 0x216: /* movshdup */
3353 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3354 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3356 rm
= (modrm
& 7) | REX_B(s
);
3357 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3358 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
3359 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3360 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
3362 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3363 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3364 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3365 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3370 int bit_index
, field_length
;
3372 if (b1
== 1 && reg
!= 0)
3374 field_length
= ldub_code(s
->pc
++) & 0x3F;
3375 bit_index
= ldub_code(s
->pc
++) & 0x3F;
3376 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3377 offsetof(CPUX86State
,xmm_regs
[reg
]));
3379 gen_helper_extrq_i(cpu_ptr0
, tcg_const_i32(bit_index
),
3380 tcg_const_i32(field_length
));
3382 gen_helper_insertq_i(cpu_ptr0
, tcg_const_i32(bit_index
),
3383 tcg_const_i32(field_length
));
3386 case 0x7e: /* movd ea, mm */
3387 #ifdef TARGET_X86_64
3388 if (s
->dflag
== 2) {
3389 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3390 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3391 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3395 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3396 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_L(0)));
3397 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3400 case 0x17e: /* movd ea, xmm */
3401 #ifdef TARGET_X86_64
3402 if (s
->dflag
== 2) {
3403 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3404 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3405 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3409 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3410 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3411 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3414 case 0x27e: /* movq xmm, ea */
3416 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3417 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3419 rm
= (modrm
& 7) | REX_B(s
);
3420 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3421 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3423 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3425 case 0x7f: /* movq ea, mm */
3427 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3428 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3431 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
3432 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3435 case 0x011: /* movups */
3436 case 0x111: /* movupd */
3437 case 0x029: /* movaps */
3438 case 0x129: /* movapd */
3439 case 0x17f: /* movdqa ea, xmm */
3440 case 0x27f: /* movdqu ea, xmm */
3442 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3443 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3445 rm
= (modrm
& 7) | REX_B(s
);
3446 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
3447 offsetof(CPUX86State
,xmm_regs
[reg
]));
3450 case 0x211: /* movss ea, xmm */
3452 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3453 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3454 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
3456 rm
= (modrm
& 7) | REX_B(s
);
3457 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
3458 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3461 case 0x311: /* movsd ea, xmm */
3463 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3464 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3466 rm
= (modrm
& 7) | REX_B(s
);
3467 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3468 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3471 case 0x013: /* movlps */
3472 case 0x113: /* movlpd */
3474 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3475 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3480 case 0x017: /* movhps */
3481 case 0x117: /* movhpd */
3483 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3484 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3489 case 0x71: /* shift mm, im */
3492 case 0x171: /* shift xmm, im */
3498 val
= ldub_code(s
->pc
++);
3500 gen_op_movl_T0_im(val
);
3501 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3503 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
3504 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
3506 gen_op_movl_T0_im(val
);
3507 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
3509 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
3510 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
3512 sse_fn_pp
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
3517 rm
= (modrm
& 7) | REX_B(s
);
3518 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3521 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3523 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3524 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op1_offset
);
3525 sse_fn_pp(cpu_ptr0
, cpu_ptr1
);
3527 case 0x050: /* movmskps */
3528 rm
= (modrm
& 7) | REX_B(s
);
3529 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3530 offsetof(CPUX86State
,xmm_regs
[rm
]));
3531 gen_helper_movmskps(cpu_tmp2_i32
, cpu_ptr0
);
3532 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3533 gen_op_mov_reg_T0(OT_LONG
, reg
);
3535 case 0x150: /* movmskpd */
3536 rm
= (modrm
& 7) | REX_B(s
);
3537 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3538 offsetof(CPUX86State
,xmm_regs
[rm
]));
3539 gen_helper_movmskpd(cpu_tmp2_i32
, cpu_ptr0
);
3540 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3541 gen_op_mov_reg_T0(OT_LONG
, reg
);
3543 case 0x02a: /* cvtpi2ps */
3544 case 0x12a: /* cvtpi2pd */
3545 gen_helper_enter_mmx();
3547 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3548 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3549 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3552 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3554 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3555 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3556 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3559 gen_helper_cvtpi2ps(cpu_ptr0
, cpu_ptr1
);
3563 gen_helper_cvtpi2pd(cpu_ptr0
, cpu_ptr1
);
3567 case 0x22a: /* cvtsi2ss */
3568 case 0x32a: /* cvtsi2sd */
3569 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3570 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3571 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3572 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3573 if (ot
== OT_LONG
) {
3574 SSEFunc_0_pi sse_fn_pi
= sse_op_table3ai
[(b
>> 8) & 1];
3575 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3576 sse_fn_pi(cpu_ptr0
, cpu_tmp2_i32
);
3578 #ifdef TARGET_X86_64
3579 SSEFunc_0_pl sse_fn_pl
= sse_op_table3aq
[(b
>> 8) & 1];
3580 sse_fn_pl(cpu_ptr0
, cpu_T
[0]);
3586 case 0x02c: /* cvttps2pi */
3587 case 0x12c: /* cvttpd2pi */
3588 case 0x02d: /* cvtps2pi */
3589 case 0x12d: /* cvtpd2pi */
3590 gen_helper_enter_mmx();
3592 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3593 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3594 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3596 rm
= (modrm
& 7) | REX_B(s
);
3597 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3599 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3600 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3601 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3604 gen_helper_cvttps2pi(cpu_ptr0
, cpu_ptr1
);
3607 gen_helper_cvttpd2pi(cpu_ptr0
, cpu_ptr1
);
3610 gen_helper_cvtps2pi(cpu_ptr0
, cpu_ptr1
);
3613 gen_helper_cvtpd2pi(cpu_ptr0
, cpu_ptr1
);
3617 case 0x22c: /* cvttss2si */
3618 case 0x32c: /* cvttsd2si */
3619 case 0x22d: /* cvtss2si */
3620 case 0x32d: /* cvtsd2si */
3621 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3623 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3625 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3627 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3628 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3630 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3632 rm
= (modrm
& 7) | REX_B(s
);
3633 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3635 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3636 if (ot
== OT_LONG
) {
3637 SSEFunc_i_p sse_fn_i_p
=
3638 sse_op_table3bi
[((b
>> 7) & 2) | (b
& 1)];
3639 sse_fn_i_p(cpu_tmp2_i32
, cpu_ptr0
);
3640 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3642 #ifdef TARGET_X86_64
3643 SSEFunc_l_p sse_fn_l_p
=
3644 sse_op_table3bq
[((b
>> 7) & 2) | (b
& 1)];
3645 sse_fn_l_p(cpu_T
[0], cpu_ptr0
);
3650 gen_op_mov_reg_T0(ot
, reg
);
3652 case 0xc4: /* pinsrw */
3655 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3656 val
= ldub_code(s
->pc
++);
3659 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3660 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_W(val
)));
3663 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3664 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_W(val
)));
3667 case 0xc5: /* pextrw */
3671 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3672 val
= ldub_code(s
->pc
++);
3675 rm
= (modrm
& 7) | REX_B(s
);
3676 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3677 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_W(val
)));
3681 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3682 offsetof(CPUX86State
,fpregs
[rm
].mmx
.MMX_W(val
)));
3684 reg
= ((modrm
>> 3) & 7) | rex_r
;
3685 gen_op_mov_reg_T0(ot
, reg
);
3687 case 0x1d6: /* movq ea, xmm */
3689 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3690 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3692 rm
= (modrm
& 7) | REX_B(s
);
3693 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3694 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3695 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3698 case 0x2d6: /* movq2dq */
3699 gen_helper_enter_mmx();
3701 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3702 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3703 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3705 case 0x3d6: /* movdq2q */
3706 gen_helper_enter_mmx();
3707 rm
= (modrm
& 7) | REX_B(s
);
3708 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3709 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3711 case 0xd7: /* pmovmskb */
3716 rm
= (modrm
& 7) | REX_B(s
);
3717 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,xmm_regs
[rm
]));
3718 gen_helper_pmovmskb_xmm(cpu_tmp2_i32
, cpu_ptr0
);
3721 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3722 gen_helper_pmovmskb_mmx(cpu_tmp2_i32
, cpu_ptr0
);
3724 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3725 reg
= ((modrm
>> 3) & 7) | rex_r
;
3726 gen_op_mov_reg_T0(OT_LONG
, reg
);
3729 if (s
->prefix
& PREFIX_REPNZ
)
3733 modrm
= ldub_code(s
->pc
++);
3735 reg
= ((modrm
>> 3) & 7) | rex_r
;
3736 mod
= (modrm
>> 6) & 3;
3741 sse_fn_pp
= sse_op_table6
[b
].op
[b1
];
3745 if (!(s
->cpuid_ext_features
& sse_op_table6
[b
].ext_mask
))
3749 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3751 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
| REX_B(s
)]);
3753 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3754 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3756 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3757 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3758 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3759 gen_ldq_env_A0(s
->mem_index
, op2_offset
+
3760 offsetof(XMMReg
, XMM_Q(0)));
3762 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3763 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3764 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_A0
,
3765 (s
->mem_index
>> 2) - 1);
3766 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp0
);
3767 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, op2_offset
+
3768 offsetof(XMMReg
, XMM_L(0)));
3770 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3771 tcg_gen_qemu_ld16u(cpu_tmp0
, cpu_A0
,
3772 (s
->mem_index
>> 2) - 1);
3773 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, op2_offset
+
3774 offsetof(XMMReg
, XMM_W(0)));
3776 case 0x2a: /* movntqda */
3777 gen_ldo_env_A0(s
->mem_index
, op1_offset
);
3780 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3784 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3786 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3788 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3789 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3790 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3793 if (sse_fn_pp
== SSE_SPECIAL
) {
3797 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3798 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3799 sse_fn_pp(cpu_ptr0
, cpu_ptr1
);
3802 s
->cc_op
= CC_OP_EFLAGS
;
3804 case 0x338: /* crc32 */
3807 modrm
= ldub_code(s
->pc
++);
3808 reg
= ((modrm
>> 3) & 7) | rex_r
;
3810 if (b
!= 0xf0 && b
!= 0xf1)
3812 if (!(s
->cpuid_ext_features
& CPUID_EXT_SSE42
))
3817 else if (b
== 0xf1 && s
->dflag
!= 2)
3818 if (s
->prefix
& PREFIX_DATA
)
3825 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
3826 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3827 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3828 gen_helper_crc32(cpu_T
[0], cpu_tmp2_i32
,
3829 cpu_T
[0], tcg_const_i32(8 << ot
));
3831 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3832 gen_op_mov_reg_T0(ot
, reg
);
3837 modrm
= ldub_code(s
->pc
++);
3839 reg
= ((modrm
>> 3) & 7) | rex_r
;
3840 mod
= (modrm
>> 6) & 3;
3845 sse_fn_ppi
= sse_op_table7
[b
].op
[b1
];
3849 if (!(s
->cpuid_ext_features
& sse_op_table7
[b
].ext_mask
))
3852 if (sse_fn_ppi
== SSE_SPECIAL
) {
3853 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3854 rm
= (modrm
& 7) | REX_B(s
);
3856 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3857 reg
= ((modrm
>> 3) & 7) | rex_r
;
3858 val
= ldub_code(s
->pc
++);
3860 case 0x14: /* pextrb */
3861 tcg_gen_ld8u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3862 xmm_regs
[reg
].XMM_B(val
& 15)));
3864 gen_op_mov_reg_T0(ot
, rm
);
3866 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
,
3867 (s
->mem_index
>> 2) - 1);
3869 case 0x15: /* pextrw */
3870 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3871 xmm_regs
[reg
].XMM_W(val
& 7)));
3873 gen_op_mov_reg_T0(ot
, rm
);
3875 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
,
3876 (s
->mem_index
>> 2) - 1);
3879 if (ot
== OT_LONG
) { /* pextrd */
3880 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
,
3881 offsetof(CPUX86State
,
3882 xmm_regs
[reg
].XMM_L(val
& 3)));
3883 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3885 gen_op_mov_reg_v(ot
, rm
, cpu_T
[0]);
3887 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
,
3888 (s
->mem_index
>> 2) - 1);
3889 } else { /* pextrq */
3890 #ifdef TARGET_X86_64
3891 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3892 offsetof(CPUX86State
,
3893 xmm_regs
[reg
].XMM_Q(val
& 1)));
3895 gen_op_mov_reg_v(ot
, rm
, cpu_tmp1_i64
);
3897 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
3898 (s
->mem_index
>> 2) - 1);
3904 case 0x17: /* extractps */
3905 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3906 xmm_regs
[reg
].XMM_L(val
& 3)));
3908 gen_op_mov_reg_T0(ot
, rm
);
3910 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
,
3911 (s
->mem_index
>> 2) - 1);
3913 case 0x20: /* pinsrb */
3915 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
3917 tcg_gen_qemu_ld8u(cpu_tmp0
, cpu_A0
,
3918 (s
->mem_index
>> 2) - 1);
3919 tcg_gen_st8_tl(cpu_tmp0
, cpu_env
, offsetof(CPUX86State
,
3920 xmm_regs
[reg
].XMM_B(val
& 15)));
3922 case 0x21: /* insertps */
3924 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
,
3925 offsetof(CPUX86State
,xmm_regs
[rm
]
3926 .XMM_L((val
>> 6) & 3)));
3928 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_A0
,
3929 (s
->mem_index
>> 2) - 1);
3930 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp0
);
3932 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
,
3933 offsetof(CPUX86State
,xmm_regs
[reg
]
3934 .XMM_L((val
>> 4) & 3)));
3936 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3937 cpu_env
, offsetof(CPUX86State
,
3938 xmm_regs
[reg
].XMM_L(0)));
3940 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3941 cpu_env
, offsetof(CPUX86State
,
3942 xmm_regs
[reg
].XMM_L(1)));
3944 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3945 cpu_env
, offsetof(CPUX86State
,
3946 xmm_regs
[reg
].XMM_L(2)));
3948 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3949 cpu_env
, offsetof(CPUX86State
,
3950 xmm_regs
[reg
].XMM_L(3)));
3953 if (ot
== OT_LONG
) { /* pinsrd */
3955 gen_op_mov_v_reg(ot
, cpu_tmp0
, rm
);
3957 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_A0
,
3958 (s
->mem_index
>> 2) - 1);
3959 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp0
);
3960 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
,
3961 offsetof(CPUX86State
,
3962 xmm_regs
[reg
].XMM_L(val
& 3)));
3963 } else { /* pinsrq */
3964 #ifdef TARGET_X86_64
3966 gen_op_mov_v_reg(ot
, cpu_tmp1_i64
, rm
);
3968 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
3969 (s
->mem_index
>> 2) - 1);
3970 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3971 offsetof(CPUX86State
,
3972 xmm_regs
[reg
].XMM_Q(val
& 1)));
3983 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3985 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
| REX_B(s
)]);
3987 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3988 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3989 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3992 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3994 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3996 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3997 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3998 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
4001 val
= ldub_code(s
->pc
++);
4003 if ((b
& 0xfc) == 0x60) { /* pcmpXstrX */
4004 s
->cc_op
= CC_OP_EFLAGS
;
4007 /* The helper must use entire 64-bit gp registers */
4011 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
4012 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
4013 sse_fn_ppi(cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
4019 /* generic MMX or SSE operation */
4021 case 0x70: /* pshufx insn */
4022 case 0xc6: /* pshufx insn */
4023 case 0xc2: /* compare insns */
4030 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
4032 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4033 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
4034 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
4036 /* specific case for SSE single instructions */
4039 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
4040 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
4043 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
4046 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
4049 rm
= (modrm
& 7) | REX_B(s
);
4050 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
4053 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
4055 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4056 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
4057 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
4060 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
4064 case 0x0f: /* 3DNow! data insns */
4065 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
4067 val
= ldub_code(s
->pc
++);
4068 sse_fn_pp
= sse_op_table5
[val
];
4072 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
4073 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
4074 sse_fn_pp(cpu_ptr0
, cpu_ptr1
);
4076 case 0x70: /* pshufx insn */
4077 case 0xc6: /* pshufx insn */
4078 val
= ldub_code(s
->pc
++);
4079 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
4080 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
4081 /* XXX: introduce a new table? */
4082 sse_fn_ppi
= (SSEFunc_0_ppi
)sse_fn_pp
;
4083 sse_fn_ppi(cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
4087 val
= ldub_code(s
->pc
++);
4090 sse_fn_pp
= sse_op_table4
[val
][b1
];
4092 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
4093 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
4094 sse_fn_pp(cpu_ptr0
, cpu_ptr1
);
4097 /* maskmov : we must prepare A0 */
4100 #ifdef TARGET_X86_64
4101 if (s
->aflag
== 2) {
4102 gen_op_movq_A0_reg(R_EDI
);
4106 gen_op_movl_A0_reg(R_EDI
);
4108 gen_op_andl_A0_ffff();
4110 gen_add_A0_ds_seg(s
);
4112 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
4113 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
4114 /* XXX: introduce a new table? */
4115 sse_fn_ppt
= (SSEFunc_0_ppt
)sse_fn_pp
;
4116 sse_fn_ppt(cpu_ptr0
, cpu_ptr1
, cpu_A0
);
4119 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
4120 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
4121 sse_fn_pp(cpu_ptr0
, cpu_ptr1
);
4124 if (b
== 0x2e || b
== 0x2f) {
4125 s
->cc_op
= CC_OP_EFLAGS
;
4130 /* convert one instruction. s->is_jmp is set if the translation must
4131 be stopped. Return the next pc value */
4132 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
4134 int b
, prefixes
, aflag
, dflag
;
4136 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
4137 target_ulong next_eip
, tval
;
4140 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
4141 tcg_gen_debug_insn_start(pc_start
);
4149 #ifdef TARGET_X86_64
4154 s
->rip_offset
= 0; /* for relative ip address */
4156 b
= ldub_code(s
->pc
);
4158 /* check prefixes */
4159 #ifdef TARGET_X86_64
4163 prefixes
|= PREFIX_REPZ
;
4166 prefixes
|= PREFIX_REPNZ
;
4169 prefixes
|= PREFIX_LOCK
;
4190 prefixes
|= PREFIX_DATA
;
4193 prefixes
|= PREFIX_ADR
;
4197 rex_w
= (b
>> 3) & 1;
4198 rex_r
= (b
& 0x4) << 1;
4199 s
->rex_x
= (b
& 0x2) << 2;
4200 REX_B(s
) = (b
& 0x1) << 3;
4201 x86_64_hregs
= 1; /* select uniform byte register addressing */
4205 /* 0x66 is ignored if rex.w is set */
4208 if (prefixes
& PREFIX_DATA
)
4211 if (!(prefixes
& PREFIX_ADR
))
4218 prefixes
|= PREFIX_REPZ
;
4221 prefixes
|= PREFIX_REPNZ
;
4224 prefixes
|= PREFIX_LOCK
;
4245 prefixes
|= PREFIX_DATA
;
4248 prefixes
|= PREFIX_ADR
;
4251 if (prefixes
& PREFIX_DATA
)
4253 if (prefixes
& PREFIX_ADR
)
4257 s
->prefix
= prefixes
;
4261 /* lock generation */
4262 if (prefixes
& PREFIX_LOCK
)
4265 /* now check op code */
4269 /**************************/
4270 /* extended op code */
4271 b
= ldub_code(s
->pc
++) | 0x100;
4274 /**************************/
4292 ot
= dflag
+ OT_WORD
;
4295 case 0: /* OP Ev, Gv */
4296 modrm
= ldub_code(s
->pc
++);
4297 reg
= ((modrm
>> 3) & 7) | rex_r
;
4298 mod
= (modrm
>> 6) & 3;
4299 rm
= (modrm
& 7) | REX_B(s
);
4301 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4303 } else if (op
== OP_XORL
&& rm
== reg
) {
4305 /* xor reg, reg optimisation */
4307 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4308 gen_op_mov_reg_T0(ot
, reg
);
4309 gen_op_update1_cc();
4314 gen_op_mov_TN_reg(ot
, 1, reg
);
4315 gen_op(s
, op
, ot
, opreg
);
4317 case 1: /* OP Gv, Ev */
4318 modrm
= ldub_code(s
->pc
++);
4319 mod
= (modrm
>> 6) & 3;
4320 reg
= ((modrm
>> 3) & 7) | rex_r
;
4321 rm
= (modrm
& 7) | REX_B(s
);
4323 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4324 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4325 } else if (op
== OP_XORL
&& rm
== reg
) {
4328 gen_op_mov_TN_reg(ot
, 1, rm
);
4330 gen_op(s
, op
, ot
, reg
);
4332 case 2: /* OP A, Iv */
4333 val
= insn_get(s
, ot
);
4334 gen_op_movl_T1_im(val
);
4335 gen_op(s
, op
, ot
, OR_EAX
);
4344 case 0x80: /* GRP1 */
4353 ot
= dflag
+ OT_WORD
;
4355 modrm
= ldub_code(s
->pc
++);
4356 mod
= (modrm
>> 6) & 3;
4357 rm
= (modrm
& 7) | REX_B(s
);
4358 op
= (modrm
>> 3) & 7;
4364 s
->rip_offset
= insn_const_size(ot
);
4365 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4376 val
= insn_get(s
, ot
);
4379 val
= (int8_t)insn_get(s
, OT_BYTE
);
4382 gen_op_movl_T1_im(val
);
4383 gen_op(s
, op
, ot
, opreg
);
4387 /**************************/
4388 /* inc, dec, and other misc arith */
4389 case 0x40 ... 0x47: /* inc Gv */
4390 ot
= dflag
? OT_LONG
: OT_WORD
;
4391 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
4393 case 0x48 ... 0x4f: /* dec Gv */
4394 ot
= dflag
? OT_LONG
: OT_WORD
;
4395 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
4397 case 0xf6: /* GRP3 */
4402 ot
= dflag
+ OT_WORD
;
4404 modrm
= ldub_code(s
->pc
++);
4405 mod
= (modrm
>> 6) & 3;
4406 rm
= (modrm
& 7) | REX_B(s
);
4407 op
= (modrm
>> 3) & 7;
4410 s
->rip_offset
= insn_const_size(ot
);
4411 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4412 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4414 gen_op_mov_TN_reg(ot
, 0, rm
);
4419 val
= insn_get(s
, ot
);
4420 gen_op_movl_T1_im(val
);
4421 gen_op_testl_T0_T1_cc();
4422 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4425 tcg_gen_not_tl(cpu_T
[0], cpu_T
[0]);
4427 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4429 gen_op_mov_reg_T0(ot
, rm
);
4433 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
4435 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4437 gen_op_mov_reg_T0(ot
, rm
);
4439 gen_op_update_neg_cc();
4440 s
->cc_op
= CC_OP_SUBB
+ ot
;
4445 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
4446 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
4447 tcg_gen_ext8u_tl(cpu_T
[1], cpu_T
[1]);
4448 /* XXX: use 32 bit mul which could be faster */
4449 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4450 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4451 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4452 tcg_gen_andi_tl(cpu_cc_src
, cpu_T
[0], 0xff00);
4453 s
->cc_op
= CC_OP_MULB
;
4456 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
4457 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
4458 tcg_gen_ext16u_tl(cpu_T
[1], cpu_T
[1]);
4459 /* XXX: use 32 bit mul which could be faster */
4460 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4461 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4462 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4463 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
4464 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4465 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4466 s
->cc_op
= CC_OP_MULW
;
4470 #ifdef TARGET_X86_64
4471 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4472 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
4473 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
4474 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4475 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4476 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4477 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
4478 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4479 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4483 t0
= tcg_temp_new_i64();
4484 t1
= tcg_temp_new_i64();
4485 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4486 tcg_gen_extu_i32_i64(t0
, cpu_T
[0]);
4487 tcg_gen_extu_i32_i64(t1
, cpu_T
[1]);
4488 tcg_gen_mul_i64(t0
, t0
, t1
);
4489 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4490 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4491 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4492 tcg_gen_shri_i64(t0
, t0
, 32);
4493 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4494 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4495 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4498 s
->cc_op
= CC_OP_MULL
;
4500 #ifdef TARGET_X86_64
4502 gen_helper_mulq_EAX_T0(cpu_T
[0]);
4503 s
->cc_op
= CC_OP_MULQ
;
4511 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
4512 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4513 tcg_gen_ext8s_tl(cpu_T
[1], cpu_T
[1]);
4514 /* XXX: use 32 bit mul which could be faster */
4515 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4516 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4517 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4518 tcg_gen_ext8s_tl(cpu_tmp0
, cpu_T
[0]);
4519 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4520 s
->cc_op
= CC_OP_MULB
;
4523 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
4524 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4525 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4526 /* XXX: use 32 bit mul which could be faster */
4527 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4528 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4529 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4530 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4531 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4532 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
4533 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4534 s
->cc_op
= CC_OP_MULW
;
4538 #ifdef TARGET_X86_64
4539 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4540 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4541 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4542 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4543 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4544 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4545 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4546 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4547 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
4548 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4552 t0
= tcg_temp_new_i64();
4553 t1
= tcg_temp_new_i64();
4554 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4555 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4556 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4557 tcg_gen_mul_i64(t0
, t0
, t1
);
4558 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4559 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4560 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4561 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4562 tcg_gen_shri_i64(t0
, t0
, 32);
4563 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4564 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4565 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4568 s
->cc_op
= CC_OP_MULL
;
4570 #ifdef TARGET_X86_64
4572 gen_helper_imulq_EAX_T0(cpu_T
[0]);
4573 s
->cc_op
= CC_OP_MULQ
;
4581 gen_jmp_im(pc_start
- s
->cs_base
);
4582 gen_helper_divb_AL(cpu_T
[0]);
4585 gen_jmp_im(pc_start
- s
->cs_base
);
4586 gen_helper_divw_AX(cpu_T
[0]);
4590 gen_jmp_im(pc_start
- s
->cs_base
);
4591 gen_helper_divl_EAX(cpu_T
[0]);
4593 #ifdef TARGET_X86_64
4595 gen_jmp_im(pc_start
- s
->cs_base
);
4596 gen_helper_divq_EAX(cpu_T
[0]);
4604 gen_jmp_im(pc_start
- s
->cs_base
);
4605 gen_helper_idivb_AL(cpu_T
[0]);
4608 gen_jmp_im(pc_start
- s
->cs_base
);
4609 gen_helper_idivw_AX(cpu_T
[0]);
4613 gen_jmp_im(pc_start
- s
->cs_base
);
4614 gen_helper_idivl_EAX(cpu_T
[0]);
4616 #ifdef TARGET_X86_64
4618 gen_jmp_im(pc_start
- s
->cs_base
);
4619 gen_helper_idivq_EAX(cpu_T
[0]);
4629 case 0xfe: /* GRP4 */
4630 case 0xff: /* GRP5 */
4634 ot
= dflag
+ OT_WORD
;
4636 modrm
= ldub_code(s
->pc
++);
4637 mod
= (modrm
>> 6) & 3;
4638 rm
= (modrm
& 7) | REX_B(s
);
4639 op
= (modrm
>> 3) & 7;
4640 if (op
>= 2 && b
== 0xfe) {
4644 if (op
== 2 || op
== 4) {
4645 /* operand size for jumps is 64 bit */
4647 } else if (op
== 3 || op
== 5) {
4648 ot
= dflag
? OT_LONG
+ (rex_w
== 1) : OT_WORD
;
4649 } else if (op
== 6) {
4650 /* default push size is 64 bit */
4651 ot
= dflag
? OT_QUAD
: OT_WORD
;
4655 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4656 if (op
>= 2 && op
!= 3 && op
!= 5)
4657 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4659 gen_op_mov_TN_reg(ot
, 0, rm
);
4663 case 0: /* inc Ev */
4668 gen_inc(s
, ot
, opreg
, 1);
4670 case 1: /* dec Ev */
4675 gen_inc(s
, ot
, opreg
, -1);
4677 case 2: /* call Ev */
4678 /* XXX: optimize if memory (no 'and' is necessary) */
4680 gen_op_andl_T0_ffff();
4681 next_eip
= s
->pc
- s
->cs_base
;
4682 gen_movtl_T1_im(next_eip
);
4687 case 3: /* lcall Ev */
4688 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4689 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4690 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4692 if (s
->pe
&& !s
->vm86
) {
4693 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4694 gen_op_set_cc_op(s
->cc_op
);
4695 gen_jmp_im(pc_start
- s
->cs_base
);
4696 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4697 gen_helper_lcall_protected(cpu_tmp2_i32
, cpu_T
[1],
4698 tcg_const_i32(dflag
),
4699 tcg_const_i32(s
->pc
- pc_start
));
4701 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4702 gen_helper_lcall_real(cpu_tmp2_i32
, cpu_T
[1],
4703 tcg_const_i32(dflag
),
4704 tcg_const_i32(s
->pc
- s
->cs_base
));
4708 case 4: /* jmp Ev */
4710 gen_op_andl_T0_ffff();
4714 case 5: /* ljmp Ev */
4715 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4716 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4717 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4719 if (s
->pe
&& !s
->vm86
) {
4720 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4721 gen_op_set_cc_op(s
->cc_op
);
4722 gen_jmp_im(pc_start
- s
->cs_base
);
4723 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4724 gen_helper_ljmp_protected(cpu_tmp2_i32
, cpu_T
[1],
4725 tcg_const_i32(s
->pc
- pc_start
));
4727 gen_op_movl_seg_T0_vm(R_CS
);
4728 gen_op_movl_T0_T1();
4733 case 6: /* push Ev */
4741 case 0x84: /* test Ev, Gv */
4746 ot
= dflag
+ OT_WORD
;
4748 modrm
= ldub_code(s
->pc
++);
4749 reg
= ((modrm
>> 3) & 7) | rex_r
;
4751 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4752 gen_op_mov_TN_reg(ot
, 1, reg
);
4753 gen_op_testl_T0_T1_cc();
4754 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4757 case 0xa8: /* test eAX, Iv */
4762 ot
= dflag
+ OT_WORD
;
4763 val
= insn_get(s
, ot
);
4765 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
4766 gen_op_movl_T1_im(val
);
4767 gen_op_testl_T0_T1_cc();
4768 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4771 case 0x98: /* CWDE/CBW */
4772 #ifdef TARGET_X86_64
4774 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4775 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4776 gen_op_mov_reg_T0(OT_QUAD
, R_EAX
);
4780 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4781 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4782 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4784 gen_op_mov_TN_reg(OT_BYTE
, 0, R_EAX
);
4785 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4786 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4789 case 0x99: /* CDQ/CWD */
4790 #ifdef TARGET_X86_64
4792 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
4793 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 63);
4794 gen_op_mov_reg_T0(OT_QUAD
, R_EDX
);
4798 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4799 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4800 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 31);
4801 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4803 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4804 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4805 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 15);
4806 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4809 case 0x1af: /* imul Gv, Ev */
4810 case 0x69: /* imul Gv, Ev, I */
4812 ot
= dflag
+ OT_WORD
;
4813 modrm
= ldub_code(s
->pc
++);
4814 reg
= ((modrm
>> 3) & 7) | rex_r
;
4816 s
->rip_offset
= insn_const_size(ot
);
4819 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4821 val
= insn_get(s
, ot
);
4822 gen_op_movl_T1_im(val
);
4823 } else if (b
== 0x6b) {
4824 val
= (int8_t)insn_get(s
, OT_BYTE
);
4825 gen_op_movl_T1_im(val
);
4827 gen_op_mov_TN_reg(ot
, 1, reg
);
4830 #ifdef TARGET_X86_64
4831 if (ot
== OT_QUAD
) {
4832 gen_helper_imulq_T0_T1(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4835 if (ot
== OT_LONG
) {
4836 #ifdef TARGET_X86_64
4837 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4838 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4839 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4840 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4841 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4842 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4846 t0
= tcg_temp_new_i64();
4847 t1
= tcg_temp_new_i64();
4848 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4849 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4850 tcg_gen_mul_i64(t0
, t0
, t1
);
4851 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4852 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4853 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4854 tcg_gen_shri_i64(t0
, t0
, 32);
4855 tcg_gen_trunc_i64_i32(cpu_T
[1], t0
);
4856 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[1], cpu_tmp0
);
4860 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4861 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4862 /* XXX: use 32 bit mul which could be faster */
4863 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4864 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4865 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4866 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4868 gen_op_mov_reg_T0(ot
, reg
);
4869 s
->cc_op
= CC_OP_MULB
+ ot
;
4872 case 0x1c1: /* xadd Ev, Gv */
4876 ot
= dflag
+ OT_WORD
;
4877 modrm
= ldub_code(s
->pc
++);
4878 reg
= ((modrm
>> 3) & 7) | rex_r
;
4879 mod
= (modrm
>> 6) & 3;
4881 rm
= (modrm
& 7) | REX_B(s
);
4882 gen_op_mov_TN_reg(ot
, 0, reg
);
4883 gen_op_mov_TN_reg(ot
, 1, rm
);
4884 gen_op_addl_T0_T1();
4885 gen_op_mov_reg_T1(ot
, reg
);
4886 gen_op_mov_reg_T0(ot
, rm
);
4888 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4889 gen_op_mov_TN_reg(ot
, 0, reg
);
4890 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4891 gen_op_addl_T0_T1();
4892 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4893 gen_op_mov_reg_T1(ot
, reg
);
4895 gen_op_update2_cc();
4896 s
->cc_op
= CC_OP_ADDB
+ ot
;
4899 case 0x1b1: /* cmpxchg Ev, Gv */
4902 TCGv t0
, t1
, t2
, a0
;
4907 ot
= dflag
+ OT_WORD
;
4908 modrm
= ldub_code(s
->pc
++);
4909 reg
= ((modrm
>> 3) & 7) | rex_r
;
4910 mod
= (modrm
>> 6) & 3;
4911 t0
= tcg_temp_local_new();
4912 t1
= tcg_temp_local_new();
4913 t2
= tcg_temp_local_new();
4914 a0
= tcg_temp_local_new();
4915 gen_op_mov_v_reg(ot
, t1
, reg
);
4917 rm
= (modrm
& 7) | REX_B(s
);
4918 gen_op_mov_v_reg(ot
, t0
, rm
);
4920 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4921 tcg_gen_mov_tl(a0
, cpu_A0
);
4922 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
4923 rm
= 0; /* avoid warning */
4925 label1
= gen_new_label();
4926 tcg_gen_sub_tl(t2
, cpu_regs
[R_EAX
], t0
);
4928 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
4929 label2
= gen_new_label();
4931 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4933 gen_set_label(label1
);
4934 gen_op_mov_reg_v(ot
, rm
, t1
);
4936 /* perform no-op store cycle like physical cpu; must be
4937 before changing accumulator to ensure idempotency if
4938 the store faults and the instruction is restarted */
4939 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
4940 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4942 gen_set_label(label1
);
4943 gen_op_st_v(ot
+ s
->mem_index
, t1
, a0
);
4945 gen_set_label(label2
);
4946 tcg_gen_mov_tl(cpu_cc_src
, t0
);
4947 tcg_gen_mov_tl(cpu_cc_dst
, t2
);
4948 s
->cc_op
= CC_OP_SUBB
+ ot
;
4955 case 0x1c7: /* cmpxchg8b */
4956 modrm
= ldub_code(s
->pc
++);
4957 mod
= (modrm
>> 6) & 3;
4958 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
4960 #ifdef TARGET_X86_64
4962 if (!(s
->cpuid_ext_features
& CPUID_EXT_CX16
))
4964 gen_jmp_im(pc_start
- s
->cs_base
);
4965 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4966 gen_op_set_cc_op(s
->cc_op
);
4967 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4968 gen_helper_cmpxchg16b(cpu_A0
);
4972 if (!(s
->cpuid_features
& CPUID_CX8
))
4974 gen_jmp_im(pc_start
- s
->cs_base
);
4975 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4976 gen_op_set_cc_op(s
->cc_op
);
4977 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4978 gen_helper_cmpxchg8b(cpu_A0
);
4980 s
->cc_op
= CC_OP_EFLAGS
;
4983 /**************************/
4985 case 0x50 ... 0x57: /* push */
4986 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
4989 case 0x58 ... 0x5f: /* pop */
4991 ot
= dflag
? OT_QUAD
: OT_WORD
;
4993 ot
= dflag
+ OT_WORD
;
4996 /* NOTE: order is important for pop %sp */
4998 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
5000 case 0x60: /* pusha */
5005 case 0x61: /* popa */
5010 case 0x68: /* push Iv */
5013 ot
= dflag
? OT_QUAD
: OT_WORD
;
5015 ot
= dflag
+ OT_WORD
;
5018 val
= insn_get(s
, ot
);
5020 val
= (int8_t)insn_get(s
, OT_BYTE
);
5021 gen_op_movl_T0_im(val
);
5024 case 0x8f: /* pop Ev */
5026 ot
= dflag
? OT_QUAD
: OT_WORD
;
5028 ot
= dflag
+ OT_WORD
;
5030 modrm
= ldub_code(s
->pc
++);
5031 mod
= (modrm
>> 6) & 3;
5034 /* NOTE: order is important for pop %sp */
5036 rm
= (modrm
& 7) | REX_B(s
);
5037 gen_op_mov_reg_T0(ot
, rm
);
5039 /* NOTE: order is important too for MMU exceptions */
5040 s
->popl_esp_hack
= 1 << ot
;
5041 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5042 s
->popl_esp_hack
= 0;
5046 case 0xc8: /* enter */
5049 val
= lduw_code(s
->pc
);
5051 level
= ldub_code(s
->pc
++);
5052 gen_enter(s
, val
, level
);
5055 case 0xc9: /* leave */
5056 /* XXX: exception not precise (ESP is updated before potential exception) */
5058 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
5059 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
5060 } else if (s
->ss32
) {
5061 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
5062 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
5064 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
5065 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
5069 ot
= dflag
? OT_QUAD
: OT_WORD
;
5071 ot
= dflag
+ OT_WORD
;
5073 gen_op_mov_reg_T0(ot
, R_EBP
);
5076 case 0x06: /* push es */
5077 case 0x0e: /* push cs */
5078 case 0x16: /* push ss */
5079 case 0x1e: /* push ds */
5082 gen_op_movl_T0_seg(b
>> 3);
5085 case 0x1a0: /* push fs */
5086 case 0x1a8: /* push gs */
5087 gen_op_movl_T0_seg((b
>> 3) & 7);
5090 case 0x07: /* pop es */
5091 case 0x17: /* pop ss */
5092 case 0x1f: /* pop ds */
5097 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
5100 /* if reg == SS, inhibit interrupts/trace. */
5101 /* If several instructions disable interrupts, only the
5103 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5104 gen_helper_set_inhibit_irq();
5108 gen_jmp_im(s
->pc
- s
->cs_base
);
5112 case 0x1a1: /* pop fs */
5113 case 0x1a9: /* pop gs */
5115 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
5118 gen_jmp_im(s
->pc
- s
->cs_base
);
5123 /**************************/
5126 case 0x89: /* mov Gv, Ev */
5130 ot
= dflag
+ OT_WORD
;
5131 modrm
= ldub_code(s
->pc
++);
5132 reg
= ((modrm
>> 3) & 7) | rex_r
;
5134 /* generate a generic store */
5135 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
5138 case 0xc7: /* mov Ev, Iv */
5142 ot
= dflag
+ OT_WORD
;
5143 modrm
= ldub_code(s
->pc
++);
5144 mod
= (modrm
>> 6) & 3;
5146 s
->rip_offset
= insn_const_size(ot
);
5147 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5149 val
= insn_get(s
, ot
);
5150 gen_op_movl_T0_im(val
);
5152 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5154 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
5157 case 0x8b: /* mov Ev, Gv */
5161 ot
= OT_WORD
+ dflag
;
5162 modrm
= ldub_code(s
->pc
++);
5163 reg
= ((modrm
>> 3) & 7) | rex_r
;
5165 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5166 gen_op_mov_reg_T0(ot
, reg
);
5168 case 0x8e: /* mov seg, Gv */
5169 modrm
= ldub_code(s
->pc
++);
5170 reg
= (modrm
>> 3) & 7;
5171 if (reg
>= 6 || reg
== R_CS
)
5173 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5174 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
5176 /* if reg == SS, inhibit interrupts/trace */
5177 /* If several instructions disable interrupts, only the
5179 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5180 gen_helper_set_inhibit_irq();
5184 gen_jmp_im(s
->pc
- s
->cs_base
);
5188 case 0x8c: /* mov Gv, seg */
5189 modrm
= ldub_code(s
->pc
++);
5190 reg
= (modrm
>> 3) & 7;
5191 mod
= (modrm
>> 6) & 3;
5194 gen_op_movl_T0_seg(reg
);
5196 ot
= OT_WORD
+ dflag
;
5199 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5202 case 0x1b6: /* movzbS Gv, Eb */
5203 case 0x1b7: /* movzwS Gv, Eb */
5204 case 0x1be: /* movsbS Gv, Eb */
5205 case 0x1bf: /* movswS Gv, Eb */
5208 /* d_ot is the size of destination */
5209 d_ot
= dflag
+ OT_WORD
;
5210 /* ot is the size of source */
5211 ot
= (b
& 1) + OT_BYTE
;
5212 modrm
= ldub_code(s
->pc
++);
5213 reg
= ((modrm
>> 3) & 7) | rex_r
;
5214 mod
= (modrm
>> 6) & 3;
5215 rm
= (modrm
& 7) | REX_B(s
);
5218 gen_op_mov_TN_reg(ot
, 0, rm
);
5219 switch(ot
| (b
& 8)) {
5221 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
5224 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
5227 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
5231 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
5234 gen_op_mov_reg_T0(d_ot
, reg
);
5236 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5238 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
5240 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
5242 gen_op_mov_reg_T0(d_ot
, reg
);
5247 case 0x8d: /* lea */
5248 ot
= dflag
+ OT_WORD
;
5249 modrm
= ldub_code(s
->pc
++);
5250 mod
= (modrm
>> 6) & 3;
5253 reg
= ((modrm
>> 3) & 7) | rex_r
;
5254 /* we must ensure that no segment is added */
5258 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5260 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
5263 case 0xa0: /* mov EAX, Ov */
5265 case 0xa2: /* mov Ov, EAX */
5268 target_ulong offset_addr
;
5273 ot
= dflag
+ OT_WORD
;
5274 #ifdef TARGET_X86_64
5275 if (s
->aflag
== 2) {
5276 offset_addr
= ldq_code(s
->pc
);
5278 gen_op_movq_A0_im(offset_addr
);
5283 offset_addr
= insn_get(s
, OT_LONG
);
5285 offset_addr
= insn_get(s
, OT_WORD
);
5287 gen_op_movl_A0_im(offset_addr
);
5289 gen_add_A0_ds_seg(s
);
5291 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5292 gen_op_mov_reg_T0(ot
, R_EAX
);
5294 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
5295 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5299 case 0xd7: /* xlat */
5300 #ifdef TARGET_X86_64
5301 if (s
->aflag
== 2) {
5302 gen_op_movq_A0_reg(R_EBX
);
5303 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
5304 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
5305 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
5309 gen_op_movl_A0_reg(R_EBX
);
5310 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
5311 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
5312 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
5314 gen_op_andl_A0_ffff();
5316 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
5318 gen_add_A0_ds_seg(s
);
5319 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
5320 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
5322 case 0xb0 ... 0xb7: /* mov R, Ib */
5323 val
= insn_get(s
, OT_BYTE
);
5324 gen_op_movl_T0_im(val
);
5325 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
5327 case 0xb8 ... 0xbf: /* mov R, Iv */
5328 #ifdef TARGET_X86_64
5332 tmp
= ldq_code(s
->pc
);
5334 reg
= (b
& 7) | REX_B(s
);
5335 gen_movtl_T0_im(tmp
);
5336 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5340 ot
= dflag
? OT_LONG
: OT_WORD
;
5341 val
= insn_get(s
, ot
);
5342 reg
= (b
& 7) | REX_B(s
);
5343 gen_op_movl_T0_im(val
);
5344 gen_op_mov_reg_T0(ot
, reg
);
5348 case 0x91 ... 0x97: /* xchg R, EAX */
5350 ot
= dflag
+ OT_WORD
;
5351 reg
= (b
& 7) | REX_B(s
);
5355 case 0x87: /* xchg Ev, Gv */
5359 ot
= dflag
+ OT_WORD
;
5360 modrm
= ldub_code(s
->pc
++);
5361 reg
= ((modrm
>> 3) & 7) | rex_r
;
5362 mod
= (modrm
>> 6) & 3;
5364 rm
= (modrm
& 7) | REX_B(s
);
5366 gen_op_mov_TN_reg(ot
, 0, reg
);
5367 gen_op_mov_TN_reg(ot
, 1, rm
);
5368 gen_op_mov_reg_T0(ot
, rm
);
5369 gen_op_mov_reg_T1(ot
, reg
);
5371 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5372 gen_op_mov_TN_reg(ot
, 0, reg
);
5373 /* for xchg, lock is implicit */
5374 if (!(prefixes
& PREFIX_LOCK
))
5376 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5377 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5378 if (!(prefixes
& PREFIX_LOCK
))
5379 gen_helper_unlock();
5380 gen_op_mov_reg_T1(ot
, reg
);
5383 case 0xc4: /* les Gv */
5388 case 0xc5: /* lds Gv */
5393 case 0x1b2: /* lss Gv */
5396 case 0x1b4: /* lfs Gv */
5399 case 0x1b5: /* lgs Gv */
5402 ot
= dflag
? OT_LONG
: OT_WORD
;
5403 modrm
= ldub_code(s
->pc
++);
5404 reg
= ((modrm
>> 3) & 7) | rex_r
;
5405 mod
= (modrm
>> 6) & 3;
5408 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5409 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5410 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
5411 /* load the segment first to handle exceptions properly */
5412 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
5413 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
5414 /* then put the data */
5415 gen_op_mov_reg_T1(ot
, reg
);
5417 gen_jmp_im(s
->pc
- s
->cs_base
);
5422 /************************/
5433 ot
= dflag
+ OT_WORD
;
5435 modrm
= ldub_code(s
->pc
++);
5436 mod
= (modrm
>> 6) & 3;
5437 op
= (modrm
>> 3) & 7;
5443 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5446 opreg
= (modrm
& 7) | REX_B(s
);
5451 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
5454 shift
= ldub_code(s
->pc
++);
5456 gen_shifti(s
, op
, ot
, opreg
, shift
);
5471 case 0x1a4: /* shld imm */
5475 case 0x1a5: /* shld cl */
5479 case 0x1ac: /* shrd imm */
5483 case 0x1ad: /* shrd cl */
5487 ot
= dflag
+ OT_WORD
;
5488 modrm
= ldub_code(s
->pc
++);
5489 mod
= (modrm
>> 6) & 3;
5490 rm
= (modrm
& 7) | REX_B(s
);
5491 reg
= ((modrm
>> 3) & 7) | rex_r
;
5493 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5498 gen_op_mov_TN_reg(ot
, 1, reg
);
5501 val
= ldub_code(s
->pc
++);
5502 tcg_gen_movi_tl(cpu_T3
, val
);
5504 tcg_gen_mov_tl(cpu_T3
, cpu_regs
[R_ECX
]);
5506 gen_shiftd_rm_T1_T3(s
, ot
, opreg
, op
);
5509 /************************/
5512 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
5513 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5514 /* XXX: what to do if illegal op ? */
5515 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5518 modrm
= ldub_code(s
->pc
++);
5519 mod
= (modrm
>> 6) & 3;
5521 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
5524 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5526 case 0x00 ... 0x07: /* fxxxs */
5527 case 0x10 ... 0x17: /* fixxxl */
5528 case 0x20 ... 0x27: /* fxxxl */
5529 case 0x30 ... 0x37: /* fixxx */
5536 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5537 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5538 gen_helper_flds_FT0(cpu_tmp2_i32
);
5541 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5542 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5543 gen_helper_fildl_FT0(cpu_tmp2_i32
);
5546 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5547 (s
->mem_index
>> 2) - 1);
5548 gen_helper_fldl_FT0(cpu_tmp1_i64
);
5552 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5553 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5554 gen_helper_fildl_FT0(cpu_tmp2_i32
);
5558 gen_helper_fp_arith_ST0_FT0(op1
);
5560 /* fcomp needs pop */
5565 case 0x08: /* flds */
5566 case 0x0a: /* fsts */
5567 case 0x0b: /* fstps */
5568 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5569 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5570 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5575 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5576 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5577 gen_helper_flds_ST0(cpu_tmp2_i32
);
5580 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5581 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5582 gen_helper_fildl_ST0(cpu_tmp2_i32
);
5585 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5586 (s
->mem_index
>> 2) - 1);
5587 gen_helper_fldl_ST0(cpu_tmp1_i64
);
5591 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5592 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5593 gen_helper_fildl_ST0(cpu_tmp2_i32
);
5598 /* XXX: the corresponding CPUID bit must be tested ! */
5601 gen_helper_fisttl_ST0(cpu_tmp2_i32
);
5602 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5603 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5606 gen_helper_fisttll_ST0(cpu_tmp1_i64
);
5607 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5608 (s
->mem_index
>> 2) - 1);
5612 gen_helper_fistt_ST0(cpu_tmp2_i32
);
5613 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5614 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5622 gen_helper_fsts_ST0(cpu_tmp2_i32
);
5623 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5624 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5627 gen_helper_fistl_ST0(cpu_tmp2_i32
);
5628 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5629 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5632 gen_helper_fstl_ST0(cpu_tmp1_i64
);
5633 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5634 (s
->mem_index
>> 2) - 1);
5638 gen_helper_fist_ST0(cpu_tmp2_i32
);
5639 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5640 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5648 case 0x0c: /* fldenv mem */
5649 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5650 gen_op_set_cc_op(s
->cc_op
);
5651 gen_jmp_im(pc_start
- s
->cs_base
);
5653 cpu_A0
, tcg_const_i32(s
->dflag
));
5655 case 0x0d: /* fldcw mem */
5656 gen_op_ld_T0_A0(OT_WORD
+ s
->mem_index
);
5657 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5658 gen_helper_fldcw(cpu_tmp2_i32
);
5660 case 0x0e: /* fnstenv mem */
5661 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5662 gen_op_set_cc_op(s
->cc_op
);
5663 gen_jmp_im(pc_start
- s
->cs_base
);
5664 gen_helper_fstenv(cpu_A0
, tcg_const_i32(s
->dflag
));
5666 case 0x0f: /* fnstcw mem */
5667 gen_helper_fnstcw(cpu_tmp2_i32
);
5668 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5669 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5671 case 0x1d: /* fldt mem */
5672 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5673 gen_op_set_cc_op(s
->cc_op
);
5674 gen_jmp_im(pc_start
- s
->cs_base
);
5675 gen_helper_fldt_ST0(cpu_A0
);
5677 case 0x1f: /* fstpt mem */
5678 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5679 gen_op_set_cc_op(s
->cc_op
);
5680 gen_jmp_im(pc_start
- s
->cs_base
);
5681 gen_helper_fstt_ST0(cpu_A0
);
5684 case 0x2c: /* frstor mem */
5685 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5686 gen_op_set_cc_op(s
->cc_op
);
5687 gen_jmp_im(pc_start
- s
->cs_base
);
5688 gen_helper_frstor(cpu_A0
, tcg_const_i32(s
->dflag
));
5690 case 0x2e: /* fnsave mem */
5691 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5692 gen_op_set_cc_op(s
->cc_op
);
5693 gen_jmp_im(pc_start
- s
->cs_base
);
5694 gen_helper_fsave(cpu_A0
, tcg_const_i32(s
->dflag
));
5696 case 0x2f: /* fnstsw mem */
5697 gen_helper_fnstsw(cpu_tmp2_i32
);
5698 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5699 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5701 case 0x3c: /* fbld */
5702 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5703 gen_op_set_cc_op(s
->cc_op
);
5704 gen_jmp_im(pc_start
- s
->cs_base
);
5705 gen_helper_fbld_ST0(cpu_A0
);
5707 case 0x3e: /* fbstp */
5708 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5709 gen_op_set_cc_op(s
->cc_op
);
5710 gen_jmp_im(pc_start
- s
->cs_base
);
5711 gen_helper_fbst_ST0(cpu_A0
);
5714 case 0x3d: /* fildll */
5715 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5716 (s
->mem_index
>> 2) - 1);
5717 gen_helper_fildll_ST0(cpu_tmp1_i64
);
5719 case 0x3f: /* fistpll */
5720 gen_helper_fistll_ST0(cpu_tmp1_i64
);
5721 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5722 (s
->mem_index
>> 2) - 1);
5729 /* register float ops */
5733 case 0x08: /* fld sti */
5735 gen_helper_fmov_ST0_STN(tcg_const_i32((opreg
+ 1) & 7));
5737 case 0x09: /* fxchg sti */
5738 case 0x29: /* fxchg4 sti, undocumented op */
5739 case 0x39: /* fxchg7 sti, undocumented op */
5740 gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg
));
5742 case 0x0a: /* grp d9/2 */
5745 /* check exceptions (FreeBSD FPU probe) */
5746 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5747 gen_op_set_cc_op(s
->cc_op
);
5748 gen_jmp_im(pc_start
- s
->cs_base
);
5755 case 0x0c: /* grp d9/4 */
5758 gen_helper_fchs_ST0();
5761 gen_helper_fabs_ST0();
5764 gen_helper_fldz_FT0();
5765 gen_helper_fcom_ST0_FT0();
5768 gen_helper_fxam_ST0();
5774 case 0x0d: /* grp d9/5 */
5779 gen_helper_fld1_ST0();
5783 gen_helper_fldl2t_ST0();
5787 gen_helper_fldl2e_ST0();
5791 gen_helper_fldpi_ST0();
5795 gen_helper_fldlg2_ST0();
5799 gen_helper_fldln2_ST0();
5803 gen_helper_fldz_ST0();
5810 case 0x0e: /* grp d9/6 */
5821 case 3: /* fpatan */
5822 gen_helper_fpatan();
5824 case 4: /* fxtract */
5825 gen_helper_fxtract();
5827 case 5: /* fprem1 */
5828 gen_helper_fprem1();
5830 case 6: /* fdecstp */
5831 gen_helper_fdecstp();
5834 case 7: /* fincstp */
5835 gen_helper_fincstp();
5839 case 0x0f: /* grp d9/7 */
5844 case 1: /* fyl2xp1 */
5845 gen_helper_fyl2xp1();
5850 case 3: /* fsincos */
5851 gen_helper_fsincos();
5853 case 5: /* fscale */
5854 gen_helper_fscale();
5856 case 4: /* frndint */
5857 gen_helper_frndint();
5868 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5869 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5870 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5876 gen_helper_fp_arith_STN_ST0(op1
, opreg
);
5880 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5881 gen_helper_fp_arith_ST0_FT0(op1
);
5885 case 0x02: /* fcom */
5886 case 0x22: /* fcom2, undocumented op */
5887 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5888 gen_helper_fcom_ST0_FT0();
5890 case 0x03: /* fcomp */
5891 case 0x23: /* fcomp3, undocumented op */
5892 case 0x32: /* fcomp5, undocumented op */
5893 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5894 gen_helper_fcom_ST0_FT0();
5897 case 0x15: /* da/5 */
5899 case 1: /* fucompp */
5900 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5901 gen_helper_fucom_ST0_FT0();
5911 case 0: /* feni (287 only, just do nop here) */
5913 case 1: /* fdisi (287 only, just do nop here) */
5918 case 3: /* fninit */
5919 gen_helper_fninit();
5921 case 4: /* fsetpm (287 only, just do nop here) */
5927 case 0x1d: /* fucomi */
5928 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5929 gen_op_set_cc_op(s
->cc_op
);
5930 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5931 gen_helper_fucomi_ST0_FT0();
5932 s
->cc_op
= CC_OP_EFLAGS
;
5934 case 0x1e: /* fcomi */
5935 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5936 gen_op_set_cc_op(s
->cc_op
);
5937 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5938 gen_helper_fcomi_ST0_FT0();
5939 s
->cc_op
= CC_OP_EFLAGS
;
5941 case 0x28: /* ffree sti */
5942 gen_helper_ffree_STN(tcg_const_i32(opreg
));
5944 case 0x2a: /* fst sti */
5945 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg
));
5947 case 0x2b: /* fstp sti */
5948 case 0x0b: /* fstp1 sti, undocumented op */
5949 case 0x3a: /* fstp8 sti, undocumented op */
5950 case 0x3b: /* fstp9 sti, undocumented op */
5951 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg
));
5954 case 0x2c: /* fucom st(i) */
5955 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5956 gen_helper_fucom_ST0_FT0();
5958 case 0x2d: /* fucomp st(i) */
5959 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5960 gen_helper_fucom_ST0_FT0();
5963 case 0x33: /* de/3 */
5965 case 1: /* fcompp */
5966 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5967 gen_helper_fcom_ST0_FT0();
5975 case 0x38: /* ffreep sti, undocumented op */
5976 gen_helper_ffree_STN(tcg_const_i32(opreg
));
5979 case 0x3c: /* df/4 */
5982 gen_helper_fnstsw(cpu_tmp2_i32
);
5983 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5984 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
5990 case 0x3d: /* fucomip */
5991 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5992 gen_op_set_cc_op(s
->cc_op
);
5993 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5994 gen_helper_fucomi_ST0_FT0();
5996 s
->cc_op
= CC_OP_EFLAGS
;
5998 case 0x3e: /* fcomip */
5999 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6000 gen_op_set_cc_op(s
->cc_op
);
6001 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
6002 gen_helper_fcomi_ST0_FT0();
6004 s
->cc_op
= CC_OP_EFLAGS
;
6006 case 0x10 ... 0x13: /* fcmovxx */
6010 static const uint8_t fcmov_cc
[8] = {
6016 op1
= fcmov_cc
[op
& 3] | (((op
>> 3) & 1) ^ 1);
6017 l1
= gen_new_label();
6018 gen_jcc1(s
, s
->cc_op
, op1
, l1
);
6019 gen_helper_fmov_ST0_STN(tcg_const_i32(opreg
));
6028 /************************/
6031 case 0xa4: /* movsS */
6036 ot
= dflag
+ OT_WORD
;
6038 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
6039 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6045 case 0xaa: /* stosS */
6050 ot
= dflag
+ OT_WORD
;
6052 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
6053 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6058 case 0xac: /* lodsS */
6063 ot
= dflag
+ OT_WORD
;
6064 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
6065 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6070 case 0xae: /* scasS */
6075 ot
= dflag
+ OT_WORD
;
6076 if (prefixes
& PREFIX_REPNZ
) {
6077 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
6078 } else if (prefixes
& PREFIX_REPZ
) {
6079 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
6082 s
->cc_op
= CC_OP_SUBB
+ ot
;
6086 case 0xa6: /* cmpsS */
6091 ot
= dflag
+ OT_WORD
;
6092 if (prefixes
& PREFIX_REPNZ
) {
6093 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
6094 } else if (prefixes
& PREFIX_REPZ
) {
6095 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
6098 s
->cc_op
= CC_OP_SUBB
+ ot
;
6101 case 0x6c: /* insS */
6106 ot
= dflag
? OT_LONG
: OT_WORD
;
6107 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6108 gen_op_andl_T0_ffff();
6109 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6110 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) | 4);
6111 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
6112 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6116 gen_jmp(s
, s
->pc
- s
->cs_base
);
6120 case 0x6e: /* outsS */
6125 ot
= dflag
? OT_LONG
: OT_WORD
;
6126 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6127 gen_op_andl_T0_ffff();
6128 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6129 svm_is_rep(prefixes
) | 4);
6130 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
6131 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6135 gen_jmp(s
, s
->pc
- s
->cs_base
);
6140 /************************/
6148 ot
= dflag
? OT_LONG
: OT_WORD
;
6149 val
= ldub_code(s
->pc
++);
6150 gen_op_movl_T0_im(val
);
6151 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6152 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
6155 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6156 gen_helper_in_func(ot
, cpu_T
[1], cpu_tmp2_i32
);
6157 gen_op_mov_reg_T1(ot
, R_EAX
);
6160 gen_jmp(s
, s
->pc
- s
->cs_base
);
6168 ot
= dflag
? OT_LONG
: OT_WORD
;
6169 val
= ldub_code(s
->pc
++);
6170 gen_op_movl_T0_im(val
);
6171 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6172 svm_is_rep(prefixes
));
6173 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
6177 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6178 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
6179 gen_helper_out_func(ot
, cpu_tmp2_i32
, cpu_tmp3_i32
);
6182 gen_jmp(s
, s
->pc
- s
->cs_base
);
6190 ot
= dflag
? OT_LONG
: OT_WORD
;
6191 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6192 gen_op_andl_T0_ffff();
6193 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6194 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
6197 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6198 gen_helper_in_func(ot
, cpu_T
[1], cpu_tmp2_i32
);
6199 gen_op_mov_reg_T1(ot
, R_EAX
);
6202 gen_jmp(s
, s
->pc
- s
->cs_base
);
6210 ot
= dflag
? OT_LONG
: OT_WORD
;
6211 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6212 gen_op_andl_T0_ffff();
6213 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6214 svm_is_rep(prefixes
));
6215 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
6219 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6220 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
6221 gen_helper_out_func(ot
, cpu_tmp2_i32
, cpu_tmp3_i32
);
6224 gen_jmp(s
, s
->pc
- s
->cs_base
);
6228 /************************/
6230 case 0xc2: /* ret im */
6231 val
= ldsw_code(s
->pc
);
6234 if (CODE64(s
) && s
->dflag
)
6236 gen_stack_update(s
, val
+ (2 << s
->dflag
));
6238 gen_op_andl_T0_ffff();
6242 case 0xc3: /* ret */
6246 gen_op_andl_T0_ffff();
6250 case 0xca: /* lret im */
6251 val
= ldsw_code(s
->pc
);
6254 if (s
->pe
&& !s
->vm86
) {
6255 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6256 gen_op_set_cc_op(s
->cc_op
);
6257 gen_jmp_im(pc_start
- s
->cs_base
);
6258 gen_helper_lret_protected(tcg_const_i32(s
->dflag
),
6259 tcg_const_i32(val
));
6263 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
6265 gen_op_andl_T0_ffff();
6266 /* NOTE: keeping EIP updated is not a problem in case of
6270 gen_op_addl_A0_im(2 << s
->dflag
);
6271 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
6272 gen_op_movl_seg_T0_vm(R_CS
);
6273 /* add stack offset */
6274 gen_stack_update(s
, val
+ (4 << s
->dflag
));
6278 case 0xcb: /* lret */
6281 case 0xcf: /* iret */
6282 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
);
6285 gen_helper_iret_real(tcg_const_i32(s
->dflag
));
6286 s
->cc_op
= CC_OP_EFLAGS
;
6287 } else if (s
->vm86
) {
6289 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6291 gen_helper_iret_real(tcg_const_i32(s
->dflag
));
6292 s
->cc_op
= CC_OP_EFLAGS
;
6295 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6296 gen_op_set_cc_op(s
->cc_op
);
6297 gen_jmp_im(pc_start
- s
->cs_base
);
6298 gen_helper_iret_protected(tcg_const_i32(s
->dflag
),
6299 tcg_const_i32(s
->pc
- s
->cs_base
));
6300 s
->cc_op
= CC_OP_EFLAGS
;
6304 case 0xe8: /* call im */
6307 tval
= (int32_t)insn_get(s
, OT_LONG
);
6309 tval
= (int16_t)insn_get(s
, OT_WORD
);
6310 next_eip
= s
->pc
- s
->cs_base
;
6316 gen_movtl_T0_im(next_eip
);
6321 case 0x9a: /* lcall im */
6323 unsigned int selector
, offset
;
6327 ot
= dflag
? OT_LONG
: OT_WORD
;
6328 offset
= insn_get(s
, ot
);
6329 selector
= insn_get(s
, OT_WORD
);
6331 gen_op_movl_T0_im(selector
);
6332 gen_op_movl_T1_imu(offset
);
6335 case 0xe9: /* jmp im */
6337 tval
= (int32_t)insn_get(s
, OT_LONG
);
6339 tval
= (int16_t)insn_get(s
, OT_WORD
);
6340 tval
+= s
->pc
- s
->cs_base
;
6347 case 0xea: /* ljmp im */
6349 unsigned int selector
, offset
;
6353 ot
= dflag
? OT_LONG
: OT_WORD
;
6354 offset
= insn_get(s
, ot
);
6355 selector
= insn_get(s
, OT_WORD
);
6357 gen_op_movl_T0_im(selector
);
6358 gen_op_movl_T1_imu(offset
);
6361 case 0xeb: /* jmp Jb */
6362 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6363 tval
+= s
->pc
- s
->cs_base
;
6368 case 0x70 ... 0x7f: /* jcc Jb */
6369 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6371 case 0x180 ... 0x18f: /* jcc Jv */
6373 tval
= (int32_t)insn_get(s
, OT_LONG
);
6375 tval
= (int16_t)insn_get(s
, OT_WORD
);
6378 next_eip
= s
->pc
- s
->cs_base
;
6382 gen_jcc(s
, b
, tval
, next_eip
);
6385 case 0x190 ... 0x19f: /* setcc Gv */
6386 modrm
= ldub_code(s
->pc
++);
6388 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
6390 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6395 ot
= dflag
+ OT_WORD
;
6396 modrm
= ldub_code(s
->pc
++);
6397 reg
= ((modrm
>> 3) & 7) | rex_r
;
6398 mod
= (modrm
>> 6) & 3;
6399 t0
= tcg_temp_local_new();
6401 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6402 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
6404 rm
= (modrm
& 7) | REX_B(s
);
6405 gen_op_mov_v_reg(ot
, t0
, rm
);
6407 #ifdef TARGET_X86_64
6408 if (ot
== OT_LONG
) {
6409 /* XXX: specific Intel behaviour ? */
6410 l1
= gen_new_label();
6411 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
6412 tcg_gen_mov_tl(cpu_regs
[reg
], t0
);
6414 tcg_gen_ext32u_tl(cpu_regs
[reg
], cpu_regs
[reg
]);
6418 l1
= gen_new_label();
6419 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
6420 gen_op_mov_reg_v(ot
, reg
, t0
);
6427 /************************/
6429 case 0x9c: /* pushf */
6430 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
);
6431 if (s
->vm86
&& s
->iopl
!= 3) {
6432 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6434 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6435 gen_op_set_cc_op(s
->cc_op
);
6436 gen_helper_read_eflags(cpu_T
[0]);
6440 case 0x9d: /* popf */
6441 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
);
6442 if (s
->vm86
&& s
->iopl
!= 3) {
6443 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6448 gen_helper_write_eflags(cpu_T
[0],
6449 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
)));
6451 gen_helper_write_eflags(cpu_T
[0],
6452 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
) & 0xffff));
6455 if (s
->cpl
<= s
->iopl
) {
6457 gen_helper_write_eflags(cpu_T
[0],
6458 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
)));
6460 gen_helper_write_eflags(cpu_T
[0],
6461 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
) & 0xffff));
6465 gen_helper_write_eflags(cpu_T
[0],
6466 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
)));
6468 gen_helper_write_eflags(cpu_T
[0],
6469 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
) & 0xffff));
6474 s
->cc_op
= CC_OP_EFLAGS
;
6475 /* abort translation because TF flag may change */
6476 gen_jmp_im(s
->pc
- s
->cs_base
);
6480 case 0x9e: /* sahf */
6481 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
6483 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
6484 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6485 gen_op_set_cc_op(s
->cc_op
);
6486 gen_compute_eflags(cpu_cc_src
);
6487 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, CC_O
);
6488 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
);
6489 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_T
[0]);
6490 s
->cc_op
= CC_OP_EFLAGS
;
6492 case 0x9f: /* lahf */
6493 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
6495 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6496 gen_op_set_cc_op(s
->cc_op
);
6497 gen_compute_eflags(cpu_T
[0]);
6498 /* Note: gen_compute_eflags() only gives the condition codes */
6499 tcg_gen_ori_tl(cpu_T
[0], cpu_T
[0], 0x02);
6500 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
6502 case 0xf5: /* cmc */
6503 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6504 gen_op_set_cc_op(s
->cc_op
);
6505 gen_compute_eflags(cpu_cc_src
);
6506 tcg_gen_xori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
6507 s
->cc_op
= CC_OP_EFLAGS
;
6509 case 0xf8: /* clc */
6510 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6511 gen_op_set_cc_op(s
->cc_op
);
6512 gen_compute_eflags(cpu_cc_src
);
6513 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_C
);
6514 s
->cc_op
= CC_OP_EFLAGS
;
6516 case 0xf9: /* stc */
6517 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6518 gen_op_set_cc_op(s
->cc_op
);
6519 gen_compute_eflags(cpu_cc_src
);
6520 tcg_gen_ori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
6521 s
->cc_op
= CC_OP_EFLAGS
;
6523 case 0xfc: /* cld */
6524 tcg_gen_movi_i32(cpu_tmp2_i32
, 1);
6525 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUX86State
, df
));
6527 case 0xfd: /* std */
6528 tcg_gen_movi_i32(cpu_tmp2_i32
, -1);
6529 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUX86State
, df
));
6532 /************************/
6533 /* bit operations */
6534 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6535 ot
= dflag
+ OT_WORD
;
6536 modrm
= ldub_code(s
->pc
++);
6537 op
= (modrm
>> 3) & 7;
6538 mod
= (modrm
>> 6) & 3;
6539 rm
= (modrm
& 7) | REX_B(s
);
6542 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6543 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6545 gen_op_mov_TN_reg(ot
, 0, rm
);
6548 val
= ldub_code(s
->pc
++);
6549 gen_op_movl_T1_im(val
);
6554 case 0x1a3: /* bt Gv, Ev */
6557 case 0x1ab: /* bts */
6560 case 0x1b3: /* btr */
6563 case 0x1bb: /* btc */
6566 ot
= dflag
+ OT_WORD
;
6567 modrm
= ldub_code(s
->pc
++);
6568 reg
= ((modrm
>> 3) & 7) | rex_r
;
6569 mod
= (modrm
>> 6) & 3;
6570 rm
= (modrm
& 7) | REX_B(s
);
6571 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
6573 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6574 /* specific case: we need to add a displacement */
6575 gen_exts(ot
, cpu_T
[1]);
6576 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[1], 3 + ot
);
6577 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, ot
);
6578 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
6579 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6581 gen_op_mov_TN_reg(ot
, 0, rm
);
6584 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], (1 << (3 + ot
)) - 1);
6587 tcg_gen_shr_tl(cpu_cc_src
, cpu_T
[0], cpu_T
[1]);
6588 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6591 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6592 tcg_gen_movi_tl(cpu_tmp0
, 1);
6593 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6594 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6597 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6598 tcg_gen_movi_tl(cpu_tmp0
, 1);
6599 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6600 tcg_gen_not_tl(cpu_tmp0
, cpu_tmp0
);
6601 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6605 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6606 tcg_gen_movi_tl(cpu_tmp0
, 1);
6607 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6608 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6611 s
->cc_op
= CC_OP_SARB
+ ot
;
6614 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6616 gen_op_mov_reg_T0(ot
, rm
);
6617 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
6618 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6621 case 0x1bc: /* bsf */
6622 case 0x1bd: /* bsr */
6627 ot
= dflag
+ OT_WORD
;
6628 modrm
= ldub_code(s
->pc
++);
6629 reg
= ((modrm
>> 3) & 7) | rex_r
;
6630 gen_ldst_modrm(s
,modrm
, ot
, OR_TMP0
, 0);
6631 gen_extu(ot
, cpu_T
[0]);
6632 t0
= tcg_temp_local_new();
6633 tcg_gen_mov_tl(t0
, cpu_T
[0]);
6634 if ((b
& 1) && (prefixes
& PREFIX_REPZ
) &&
6635 (s
->cpuid_ext3_features
& CPUID_EXT3_ABM
)) {
6637 case OT_WORD
: gen_helper_lzcnt(cpu_T
[0], t0
,
6638 tcg_const_i32(16)); break;
6639 case OT_LONG
: gen_helper_lzcnt(cpu_T
[0], t0
,
6640 tcg_const_i32(32)); break;
6641 case OT_QUAD
: gen_helper_lzcnt(cpu_T
[0], t0
,
6642 tcg_const_i32(64)); break;
6644 gen_op_mov_reg_T0(ot
, reg
);
6646 label1
= gen_new_label();
6647 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6648 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, label1
);
6650 gen_helper_bsr(cpu_T
[0], t0
);
6652 gen_helper_bsf(cpu_T
[0], t0
);
6654 gen_op_mov_reg_T0(ot
, reg
);
6655 tcg_gen_movi_tl(cpu_cc_dst
, 1);
6656 gen_set_label(label1
);
6657 tcg_gen_discard_tl(cpu_cc_src
);
6658 s
->cc_op
= CC_OP_LOGICB
+ ot
;
6663 /************************/
6665 case 0x27: /* daa */
6668 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6669 gen_op_set_cc_op(s
->cc_op
);
6671 s
->cc_op
= CC_OP_EFLAGS
;
6673 case 0x2f: /* das */
6676 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6677 gen_op_set_cc_op(s
->cc_op
);
6679 s
->cc_op
= CC_OP_EFLAGS
;
6681 case 0x37: /* aaa */
6684 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6685 gen_op_set_cc_op(s
->cc_op
);
6687 s
->cc_op
= CC_OP_EFLAGS
;
6689 case 0x3f: /* aas */
6692 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6693 gen_op_set_cc_op(s
->cc_op
);
6695 s
->cc_op
= CC_OP_EFLAGS
;
6697 case 0xd4: /* aam */
6700 val
= ldub_code(s
->pc
++);
6702 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
6704 gen_helper_aam(tcg_const_i32(val
));
6705 s
->cc_op
= CC_OP_LOGICB
;
6708 case 0xd5: /* aad */
6711 val
= ldub_code(s
->pc
++);
6712 gen_helper_aad(tcg_const_i32(val
));
6713 s
->cc_op
= CC_OP_LOGICB
;
6715 /************************/
6717 case 0x90: /* nop */
6718 /* XXX: correct lock test for all insn */
6719 if (prefixes
& PREFIX_LOCK
) {
6722 /* If REX_B is set, then this is xchg eax, r8d, not a nop. */
6724 goto do_xchg_reg_eax
;
6726 if (prefixes
& PREFIX_REPZ
) {
6727 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
6730 case 0x9b: /* fwait */
6731 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
6732 (HF_MP_MASK
| HF_TS_MASK
)) {
6733 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6735 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6736 gen_op_set_cc_op(s
->cc_op
);
6737 gen_jmp_im(pc_start
- s
->cs_base
);
6741 case 0xcc: /* int3 */
6742 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6744 case 0xcd: /* int N */
6745 val
= ldub_code(s
->pc
++);
6746 if (s
->vm86
&& s
->iopl
!= 3) {
6747 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6749 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6752 case 0xce: /* into */
6755 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6756 gen_op_set_cc_op(s
->cc_op
);
6757 gen_jmp_im(pc_start
- s
->cs_base
);
6758 gen_helper_into(tcg_const_i32(s
->pc
- pc_start
));
6761 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6762 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
);
6764 gen_debug(s
, pc_start
- s
->cs_base
);
6767 tb_flush(cpu_single_env
);
6768 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
6772 case 0xfa: /* cli */
6774 if (s
->cpl
<= s
->iopl
) {
6777 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6783 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6787 case 0xfb: /* sti */
6789 if (s
->cpl
<= s
->iopl
) {
6792 /* interruptions are enabled only the first insn after sti */
6793 /* If several instructions disable interrupts, only the
6795 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
6796 gen_helper_set_inhibit_irq();
6797 /* give a chance to handle pending irqs */
6798 gen_jmp_im(s
->pc
- s
->cs_base
);
6801 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6807 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6811 case 0x62: /* bound */
6814 ot
= dflag
? OT_LONG
: OT_WORD
;
6815 modrm
= ldub_code(s
->pc
++);
6816 reg
= (modrm
>> 3) & 7;
6817 mod
= (modrm
>> 6) & 3;
6820 gen_op_mov_TN_reg(ot
, 0, reg
);
6821 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6822 gen_jmp_im(pc_start
- s
->cs_base
);
6823 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6825 gen_helper_boundw(cpu_A0
, cpu_tmp2_i32
);
6827 gen_helper_boundl(cpu_A0
, cpu_tmp2_i32
);
6829 case 0x1c8 ... 0x1cf: /* bswap reg */
6830 reg
= (b
& 7) | REX_B(s
);
6831 #ifdef TARGET_X86_64
6833 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
6834 tcg_gen_bswap64_i64(cpu_T
[0], cpu_T
[0]);
6835 gen_op_mov_reg_T0(OT_QUAD
, reg
);
6839 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6840 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
6841 tcg_gen_bswap32_tl(cpu_T
[0], cpu_T
[0]);
6842 gen_op_mov_reg_T0(OT_LONG
, reg
);
6845 case 0xd6: /* salc */
6848 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6849 gen_op_set_cc_op(s
->cc_op
);
6850 gen_compute_eflags_c(cpu_T
[0]);
6851 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
6852 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
6854 case 0xe0: /* loopnz */
6855 case 0xe1: /* loopz */
6856 case 0xe2: /* loop */
6857 case 0xe3: /* jecxz */
6861 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6862 next_eip
= s
->pc
- s
->cs_base
;
6867 l1
= gen_new_label();
6868 l2
= gen_new_label();
6869 l3
= gen_new_label();
6872 case 0: /* loopnz */
6874 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6875 gen_op_set_cc_op(s
->cc_op
);
6876 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6877 gen_op_jz_ecx(s
->aflag
, l3
);
6878 gen_compute_eflags(cpu_tmp0
);
6879 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_Z
);
6881 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
6883 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, l1
);
6887 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6888 gen_op_jnz_ecx(s
->aflag
, l1
);
6892 gen_op_jz_ecx(s
->aflag
, l1
);
6897 gen_jmp_im(next_eip
);
6906 case 0x130: /* wrmsr */
6907 case 0x132: /* rdmsr */
6909 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6911 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6912 gen_op_set_cc_op(s
->cc_op
);
6913 gen_jmp_im(pc_start
- s
->cs_base
);
6921 case 0x131: /* rdtsc */
6922 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6923 gen_op_set_cc_op(s
->cc_op
);
6924 gen_jmp_im(pc_start
- s
->cs_base
);
6930 gen_jmp(s
, s
->pc
- s
->cs_base
);
6933 case 0x133: /* rdpmc */
6934 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6935 gen_op_set_cc_op(s
->cc_op
);
6936 gen_jmp_im(pc_start
- s
->cs_base
);
6939 case 0x134: /* sysenter */
6940 /* For Intel SYSENTER is valid on 64-bit */
6941 if (CODE64(s
) && cpu_single_env
->cpuid_vendor1
!= CPUID_VENDOR_INTEL_1
)
6944 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6946 gen_update_cc_op(s
);
6947 gen_jmp_im(pc_start
- s
->cs_base
);
6948 gen_helper_sysenter();
6952 case 0x135: /* sysexit */
6953 /* For Intel SYSEXIT is valid on 64-bit */
6954 if (CODE64(s
) && cpu_single_env
->cpuid_vendor1
!= CPUID_VENDOR_INTEL_1
)
6957 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6959 gen_update_cc_op(s
);
6960 gen_jmp_im(pc_start
- s
->cs_base
);
6961 gen_helper_sysexit(tcg_const_i32(dflag
));
6965 #ifdef TARGET_X86_64
6966 case 0x105: /* syscall */
6967 /* XXX: is it usable in real mode ? */
6968 gen_update_cc_op(s
);
6969 gen_jmp_im(pc_start
- s
->cs_base
);
6970 gen_helper_syscall(tcg_const_i32(s
->pc
- pc_start
));
6973 case 0x107: /* sysret */
6975 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6977 gen_update_cc_op(s
);
6978 gen_jmp_im(pc_start
- s
->cs_base
);
6979 gen_helper_sysret(tcg_const_i32(s
->dflag
));
6980 /* condition codes are modified only in long mode */
6982 s
->cc_op
= CC_OP_EFLAGS
;
6987 case 0x1a2: /* cpuid */
6988 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6989 gen_op_set_cc_op(s
->cc_op
);
6990 gen_jmp_im(pc_start
- s
->cs_base
);
6993 case 0xf4: /* hlt */
6995 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6997 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6998 gen_op_set_cc_op(s
->cc_op
);
6999 gen_jmp_im(pc_start
- s
->cs_base
);
7000 gen_helper_hlt(tcg_const_i32(s
->pc
- pc_start
));
7001 s
->is_jmp
= DISAS_TB_JUMP
;
7005 modrm
= ldub_code(s
->pc
++);
7006 mod
= (modrm
>> 6) & 3;
7007 op
= (modrm
>> 3) & 7;
7010 if (!s
->pe
|| s
->vm86
)
7012 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
);
7013 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,ldt
.selector
));
7017 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
7020 if (!s
->pe
|| s
->vm86
)
7023 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7025 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
);
7026 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7027 gen_jmp_im(pc_start
- s
->cs_base
);
7028 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
7029 gen_helper_lldt(cpu_tmp2_i32
);
7033 if (!s
->pe
|| s
->vm86
)
7035 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
);
7036 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,tr
.selector
));
7040 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
7043 if (!s
->pe
|| s
->vm86
)
7046 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7048 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
);
7049 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7050 gen_jmp_im(pc_start
- s
->cs_base
);
7051 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
7052 gen_helper_ltr(cpu_tmp2_i32
);
7057 if (!s
->pe
|| s
->vm86
)
7059 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7060 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7061 gen_op_set_cc_op(s
->cc_op
);
7063 gen_helper_verr(cpu_T
[0]);
7065 gen_helper_verw(cpu_T
[0]);
7066 s
->cc_op
= CC_OP_EFLAGS
;
7073 modrm
= ldub_code(s
->pc
++);
7074 mod
= (modrm
>> 6) & 3;
7075 op
= (modrm
>> 3) & 7;
7081 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
);
7082 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7083 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.limit
));
7084 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
7085 gen_add_A0_im(s
, 2);
7086 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.base
));
7088 gen_op_andl_T0_im(0xffffff);
7089 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
7094 case 0: /* monitor */
7095 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
7098 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7099 gen_op_set_cc_op(s
->cc_op
);
7100 gen_jmp_im(pc_start
- s
->cs_base
);
7101 #ifdef TARGET_X86_64
7102 if (s
->aflag
== 2) {
7103 gen_op_movq_A0_reg(R_EAX
);
7107 gen_op_movl_A0_reg(R_EAX
);
7109 gen_op_andl_A0_ffff();
7111 gen_add_A0_ds_seg(s
);
7112 gen_helper_monitor(cpu_A0
);
7115 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
7118 gen_update_cc_op(s
);
7119 gen_jmp_im(pc_start
- s
->cs_base
);
7120 gen_helper_mwait(tcg_const_i32(s
->pc
- pc_start
));
7127 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
);
7128 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7129 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.limit
));
7130 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
7131 gen_add_A0_im(s
, 2);
7132 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.base
));
7134 gen_op_andl_T0_im(0xffffff);
7135 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
7141 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7142 gen_op_set_cc_op(s
->cc_op
);
7143 gen_jmp_im(pc_start
- s
->cs_base
);
7146 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7149 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7152 gen_helper_vmrun(tcg_const_i32(s
->aflag
),
7153 tcg_const_i32(s
->pc
- pc_start
));
7155 s
->is_jmp
= DISAS_TB_JUMP
;
7158 case 1: /* VMMCALL */
7159 if (!(s
->flags
& HF_SVME_MASK
))
7161 gen_helper_vmmcall();
7163 case 2: /* VMLOAD */
7164 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7167 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7170 gen_helper_vmload(tcg_const_i32(s
->aflag
));
7173 case 3: /* VMSAVE */
7174 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7177 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7180 gen_helper_vmsave(tcg_const_i32(s
->aflag
));
7184 if ((!(s
->flags
& HF_SVME_MASK
) &&
7185 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
7189 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7196 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7199 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7205 case 6: /* SKINIT */
7206 if ((!(s
->flags
& HF_SVME_MASK
) &&
7207 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
7210 gen_helper_skinit();
7212 case 7: /* INVLPGA */
7213 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7216 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7219 gen_helper_invlpga(tcg_const_i32(s
->aflag
));
7225 } else if (s
->cpl
!= 0) {
7226 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7228 gen_svm_check_intercept(s
, pc_start
,
7229 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
);
7230 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7231 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
7232 gen_add_A0_im(s
, 2);
7233 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
7235 gen_op_andl_T0_im(0xffffff);
7237 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,gdt
.base
));
7238 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,gdt
.limit
));
7240 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,idt
.base
));
7241 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,idt
.limit
));
7246 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
);
7247 #if defined TARGET_X86_64 && defined HOST_WORDS_BIGENDIAN
7248 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,cr
[0]) + 4);
7250 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,cr
[0]));
7252 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
7256 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7258 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
7259 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7260 gen_helper_lmsw(cpu_T
[0]);
7261 gen_jmp_im(s
->pc
- s
->cs_base
);
7266 if (mod
!= 3) { /* invlpg */
7268 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7270 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7271 gen_op_set_cc_op(s
->cc_op
);
7272 gen_jmp_im(pc_start
- s
->cs_base
);
7273 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7274 gen_helper_invlpg(cpu_A0
);
7275 gen_jmp_im(s
->pc
- s
->cs_base
);
7280 case 0: /* swapgs */
7281 #ifdef TARGET_X86_64
7284 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7286 tcg_gen_ld_tl(cpu_T
[0], cpu_env
,
7287 offsetof(CPUX86State
,segs
[R_GS
].base
));
7288 tcg_gen_ld_tl(cpu_T
[1], cpu_env
,
7289 offsetof(CPUX86State
,kernelgsbase
));
7290 tcg_gen_st_tl(cpu_T
[1], cpu_env
,
7291 offsetof(CPUX86State
,segs
[R_GS
].base
));
7292 tcg_gen_st_tl(cpu_T
[0], cpu_env
,
7293 offsetof(CPUX86State
,kernelgsbase
));
7301 case 1: /* rdtscp */
7302 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_RDTSCP
))
7304 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7305 gen_op_set_cc_op(s
->cc_op
);
7306 gen_jmp_im(pc_start
- s
->cs_base
);
7309 gen_helper_rdtscp();
7312 gen_jmp(s
, s
->pc
- s
->cs_base
);
7324 case 0x108: /* invd */
7325 case 0x109: /* wbinvd */
7327 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7329 gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
);
7333 case 0x63: /* arpl or movslS (x86_64) */
7334 #ifdef TARGET_X86_64
7337 /* d_ot is the size of destination */
7338 d_ot
= dflag
+ OT_WORD
;
7340 modrm
= ldub_code(s
->pc
++);
7341 reg
= ((modrm
>> 3) & 7) | rex_r
;
7342 mod
= (modrm
>> 6) & 3;
7343 rm
= (modrm
& 7) | REX_B(s
);
7346 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
7348 if (d_ot
== OT_QUAD
)
7349 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
7350 gen_op_mov_reg_T0(d_ot
, reg
);
7352 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7353 if (d_ot
== OT_QUAD
) {
7354 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
7356 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
7358 gen_op_mov_reg_T0(d_ot
, reg
);
7364 TCGv t0
, t1
, t2
, a0
;
7366 if (!s
->pe
|| s
->vm86
)
7368 t0
= tcg_temp_local_new();
7369 t1
= tcg_temp_local_new();
7370 t2
= tcg_temp_local_new();
7372 modrm
= ldub_code(s
->pc
++);
7373 reg
= (modrm
>> 3) & 7;
7374 mod
= (modrm
>> 6) & 3;
7377 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7378 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
7379 a0
= tcg_temp_local_new();
7380 tcg_gen_mov_tl(a0
, cpu_A0
);
7382 gen_op_mov_v_reg(ot
, t0
, rm
);
7385 gen_op_mov_v_reg(ot
, t1
, reg
);
7386 tcg_gen_andi_tl(cpu_tmp0
, t0
, 3);
7387 tcg_gen_andi_tl(t1
, t1
, 3);
7388 tcg_gen_movi_tl(t2
, 0);
7389 label1
= gen_new_label();
7390 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_tmp0
, t1
, label1
);
7391 tcg_gen_andi_tl(t0
, t0
, ~3);
7392 tcg_gen_or_tl(t0
, t0
, t1
);
7393 tcg_gen_movi_tl(t2
, CC_Z
);
7394 gen_set_label(label1
);
7396 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
7399 gen_op_mov_reg_v(ot
, rm
, t0
);
7401 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7402 gen_op_set_cc_op(s
->cc_op
);
7403 gen_compute_eflags(cpu_cc_src
);
7404 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_Z
);
7405 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t2
);
7406 s
->cc_op
= CC_OP_EFLAGS
;
7412 case 0x102: /* lar */
7413 case 0x103: /* lsl */
7417 if (!s
->pe
|| s
->vm86
)
7419 ot
= dflag
? OT_LONG
: OT_WORD
;
7420 modrm
= ldub_code(s
->pc
++);
7421 reg
= ((modrm
>> 3) & 7) | rex_r
;
7422 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7423 t0
= tcg_temp_local_new();
7424 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7425 gen_op_set_cc_op(s
->cc_op
);
7427 gen_helper_lar(t0
, cpu_T
[0]);
7429 gen_helper_lsl(t0
, cpu_T
[0]);
7430 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_src
, CC_Z
);
7431 label1
= gen_new_label();
7432 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
7433 gen_op_mov_reg_v(ot
, reg
, t0
);
7434 gen_set_label(label1
);
7435 s
->cc_op
= CC_OP_EFLAGS
;
7440 modrm
= ldub_code(s
->pc
++);
7441 mod
= (modrm
>> 6) & 3;
7442 op
= (modrm
>> 3) & 7;
7444 case 0: /* prefetchnta */
7445 case 1: /* prefetchnt0 */
7446 case 2: /* prefetchnt0 */
7447 case 3: /* prefetchnt0 */
7450 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7451 /* nothing more to do */
7453 default: /* nop (multi byte) */
7454 gen_nop_modrm(s
, modrm
);
7458 case 0x119 ... 0x11f: /* nop (multi byte) */
7459 modrm
= ldub_code(s
->pc
++);
7460 gen_nop_modrm(s
, modrm
);
7462 case 0x120: /* mov reg, crN */
7463 case 0x122: /* mov crN, reg */
7465 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7467 modrm
= ldub_code(s
->pc
++);
7468 if ((modrm
& 0xc0) != 0xc0)
7470 rm
= (modrm
& 7) | REX_B(s
);
7471 reg
= ((modrm
>> 3) & 7) | rex_r
;
7476 if ((prefixes
& PREFIX_LOCK
) && (reg
== 0) &&
7477 (s
->cpuid_ext3_features
& CPUID_EXT3_CR8LEG
)) {
7486 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7487 gen_op_set_cc_op(s
->cc_op
);
7488 gen_jmp_im(pc_start
- s
->cs_base
);
7490 gen_op_mov_TN_reg(ot
, 0, rm
);
7491 gen_helper_write_crN(tcg_const_i32(reg
), cpu_T
[0]);
7492 gen_jmp_im(s
->pc
- s
->cs_base
);
7495 gen_helper_read_crN(cpu_T
[0], tcg_const_i32(reg
));
7496 gen_op_mov_reg_T0(ot
, rm
);
7504 case 0x121: /* mov reg, drN */
7505 case 0x123: /* mov drN, reg */
7507 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7509 modrm
= ldub_code(s
->pc
++);
7510 if ((modrm
& 0xc0) != 0xc0)
7512 rm
= (modrm
& 7) | REX_B(s
);
7513 reg
= ((modrm
>> 3) & 7) | rex_r
;
7518 /* XXX: do it dynamically with CR4.DE bit */
7519 if (reg
== 4 || reg
== 5 || reg
>= 8)
7522 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
7523 gen_op_mov_TN_reg(ot
, 0, rm
);
7524 gen_helper_movl_drN_T0(tcg_const_i32(reg
), cpu_T
[0]);
7525 gen_jmp_im(s
->pc
- s
->cs_base
);
7528 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
7529 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,dr
[reg
]));
7530 gen_op_mov_reg_T0(ot
, rm
);
7534 case 0x106: /* clts */
7536 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7538 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
7540 /* abort block because static cpu state changed */
7541 gen_jmp_im(s
->pc
- s
->cs_base
);
7545 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7546 case 0x1c3: /* MOVNTI reg, mem */
7547 if (!(s
->cpuid_features
& CPUID_SSE2
))
7549 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
7550 modrm
= ldub_code(s
->pc
++);
7551 mod
= (modrm
>> 6) & 3;
7554 reg
= ((modrm
>> 3) & 7) | rex_r
;
7555 /* generate a generic store */
7556 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
7559 modrm
= ldub_code(s
->pc
++);
7560 mod
= (modrm
>> 6) & 3;
7561 op
= (modrm
>> 3) & 7;
7563 case 0: /* fxsave */
7564 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
7565 (s
->prefix
& PREFIX_LOCK
))
7567 if ((s
->flags
& HF_EM_MASK
) || (s
->flags
& HF_TS_MASK
)) {
7568 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7571 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7572 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7573 gen_op_set_cc_op(s
->cc_op
);
7574 gen_jmp_im(pc_start
- s
->cs_base
);
7575 gen_helper_fxsave(cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
7577 case 1: /* fxrstor */
7578 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
7579 (s
->prefix
& PREFIX_LOCK
))
7581 if ((s
->flags
& HF_EM_MASK
) || (s
->flags
& HF_TS_MASK
)) {
7582 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7585 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7586 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7587 gen_op_set_cc_op(s
->cc_op
);
7588 gen_jmp_im(pc_start
- s
->cs_base
);
7589 gen_helper_fxrstor(cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
7591 case 2: /* ldmxcsr */
7592 case 3: /* stmxcsr */
7593 if (s
->flags
& HF_TS_MASK
) {
7594 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7597 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
7600 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7602 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
7603 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
7604 gen_helper_ldmxcsr(cpu_tmp2_i32
);
7606 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, mxcsr
));
7607 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
7610 case 5: /* lfence */
7611 case 6: /* mfence */
7612 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE2
))
7615 case 7: /* sfence / clflush */
7616 if ((modrm
& 0xc7) == 0xc0) {
7618 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7619 if (!(s
->cpuid_features
& CPUID_SSE
))
7623 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
7625 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7632 case 0x10d: /* 3DNow! prefetch(w) */
7633 modrm
= ldub_code(s
->pc
++);
7634 mod
= (modrm
>> 6) & 3;
7637 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7638 /* ignore for now */
7640 case 0x1aa: /* rsm */
7641 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
);
7642 if (!(s
->flags
& HF_SMM_MASK
))
7644 gen_update_cc_op(s
);
7645 gen_jmp_im(s
->pc
- s
->cs_base
);
7649 case 0x1b8: /* SSE4.2 popcnt */
7650 if ((prefixes
& (PREFIX_REPZ
| PREFIX_LOCK
| PREFIX_REPNZ
)) !=
7653 if (!(s
->cpuid_ext_features
& CPUID_EXT_POPCNT
))
7656 modrm
= ldub_code(s
->pc
++);
7657 reg
= ((modrm
>> 3) & 7);
7659 if (s
->prefix
& PREFIX_DATA
)
7661 else if (s
->dflag
!= 2)
7666 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
7667 gen_helper_popcnt(cpu_T
[0], cpu_T
[0], tcg_const_i32(ot
));
7668 gen_op_mov_reg_T0(ot
, reg
);
7670 s
->cc_op
= CC_OP_EFLAGS
;
7672 case 0x10e ... 0x10f:
7673 /* 3DNow! instructions, ignore prefixes */
7674 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
7675 case 0x110 ... 0x117:
7676 case 0x128 ... 0x12f:
7677 case 0x138 ... 0x13a:
7678 case 0x150 ... 0x179:
7679 case 0x17c ... 0x17f:
7681 case 0x1c4 ... 0x1c6:
7682 case 0x1d0 ... 0x1fe:
7683 gen_sse(s
, b
, pc_start
, rex_r
);
7688 /* lock generation */
7689 if (s
->prefix
& PREFIX_LOCK
)
7690 gen_helper_unlock();
7693 if (s
->prefix
& PREFIX_LOCK
)
7694 gen_helper_unlock();
7695 /* XXX: ensure that no lock was generated */
7696 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
7700 void optimize_flags_init(void)
7702 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
7703 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
,
7704 offsetof(CPUX86State
, cc_op
), "cc_op");
7705 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUX86State
, cc_src
),
7707 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUX86State
, cc_dst
),
7709 cpu_cc_tmp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUX86State
, cc_tmp
),
7712 #ifdef TARGET_X86_64
7713 cpu_regs
[R_EAX
] = tcg_global_mem_new_i64(TCG_AREG0
,
7714 offsetof(CPUX86State
, regs
[R_EAX
]), "rax");
7715 cpu_regs
[R_ECX
] = tcg_global_mem_new_i64(TCG_AREG0
,
7716 offsetof(CPUX86State
, regs
[R_ECX
]), "rcx");
7717 cpu_regs
[R_EDX
] = tcg_global_mem_new_i64(TCG_AREG0
,
7718 offsetof(CPUX86State
, regs
[R_EDX
]), "rdx");
7719 cpu_regs
[R_EBX
] = tcg_global_mem_new_i64(TCG_AREG0
,
7720 offsetof(CPUX86State
, regs
[R_EBX
]), "rbx");
7721 cpu_regs
[R_ESP
] = tcg_global_mem_new_i64(TCG_AREG0
,
7722 offsetof(CPUX86State
, regs
[R_ESP
]), "rsp");
7723 cpu_regs
[R_EBP
] = tcg_global_mem_new_i64(TCG_AREG0
,
7724 offsetof(CPUX86State
, regs
[R_EBP
]), "rbp");
7725 cpu_regs
[R_ESI
] = tcg_global_mem_new_i64(TCG_AREG0
,
7726 offsetof(CPUX86State
, regs
[R_ESI
]), "rsi");
7727 cpu_regs
[R_EDI
] = tcg_global_mem_new_i64(TCG_AREG0
,
7728 offsetof(CPUX86State
, regs
[R_EDI
]), "rdi");
7729 cpu_regs
[8] = tcg_global_mem_new_i64(TCG_AREG0
,
7730 offsetof(CPUX86State
, regs
[8]), "r8");
7731 cpu_regs
[9] = tcg_global_mem_new_i64(TCG_AREG0
,
7732 offsetof(CPUX86State
, regs
[9]), "r9");
7733 cpu_regs
[10] = tcg_global_mem_new_i64(TCG_AREG0
,
7734 offsetof(CPUX86State
, regs
[10]), "r10");
7735 cpu_regs
[11] = tcg_global_mem_new_i64(TCG_AREG0
,
7736 offsetof(CPUX86State
, regs
[11]), "r11");
7737 cpu_regs
[12] = tcg_global_mem_new_i64(TCG_AREG0
,
7738 offsetof(CPUX86State
, regs
[12]), "r12");
7739 cpu_regs
[13] = tcg_global_mem_new_i64(TCG_AREG0
,
7740 offsetof(CPUX86State
, regs
[13]), "r13");
7741 cpu_regs
[14] = tcg_global_mem_new_i64(TCG_AREG0
,
7742 offsetof(CPUX86State
, regs
[14]), "r14");
7743 cpu_regs
[15] = tcg_global_mem_new_i64(TCG_AREG0
,
7744 offsetof(CPUX86State
, regs
[15]), "r15");
7746 cpu_regs
[R_EAX
] = tcg_global_mem_new_i32(TCG_AREG0
,
7747 offsetof(CPUX86State
, regs
[R_EAX
]), "eax");
7748 cpu_regs
[R_ECX
] = tcg_global_mem_new_i32(TCG_AREG0
,
7749 offsetof(CPUX86State
, regs
[R_ECX
]), "ecx");
7750 cpu_regs
[R_EDX
] = tcg_global_mem_new_i32(TCG_AREG0
,
7751 offsetof(CPUX86State
, regs
[R_EDX
]), "edx");
7752 cpu_regs
[R_EBX
] = tcg_global_mem_new_i32(TCG_AREG0
,
7753 offsetof(CPUX86State
, regs
[R_EBX
]), "ebx");
7754 cpu_regs
[R_ESP
] = tcg_global_mem_new_i32(TCG_AREG0
,
7755 offsetof(CPUX86State
, regs
[R_ESP
]), "esp");
7756 cpu_regs
[R_EBP
] = tcg_global_mem_new_i32(TCG_AREG0
,
7757 offsetof(CPUX86State
, regs
[R_EBP
]), "ebp");
7758 cpu_regs
[R_ESI
] = tcg_global_mem_new_i32(TCG_AREG0
,
7759 offsetof(CPUX86State
, regs
[R_ESI
]), "esi");
7760 cpu_regs
[R_EDI
] = tcg_global_mem_new_i32(TCG_AREG0
,
7761 offsetof(CPUX86State
, regs
[R_EDI
]), "edi");
7764 /* register helpers */
7765 #define GEN_HELPER 2
7769 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7770 basic block 'tb'. If search_pc is TRUE, also generate PC
7771 information for each intermediate instruction. */
7772 static inline void gen_intermediate_code_internal(CPUX86State
*env
,
7773 TranslationBlock
*tb
,
7776 DisasContext dc1
, *dc
= &dc1
;
7777 target_ulong pc_ptr
;
7778 uint16_t *gen_opc_end
;
7782 target_ulong pc_start
;
7783 target_ulong cs_base
;
7787 /* generate intermediate code */
7789 cs_base
= tb
->cs_base
;
7792 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
7793 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
7794 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
7795 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
7797 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
7798 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
7799 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
7800 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
7801 dc
->singlestep_enabled
= env
->singlestep_enabled
;
7802 dc
->cc_op
= CC_OP_DYNAMIC
;
7803 dc
->cs_base
= cs_base
;
7805 dc
->popl_esp_hack
= 0;
7806 /* select memory access functions */
7808 if (flags
& HF_SOFTMMU_MASK
) {
7810 dc
->mem_index
= 2 * 4;
7812 dc
->mem_index
= 1 * 4;
7814 dc
->cpuid_features
= env
->cpuid_features
;
7815 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
7816 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
7817 dc
->cpuid_ext3_features
= env
->cpuid_ext3_features
;
7818 #ifdef TARGET_X86_64
7819 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
7820 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
7823 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
7824 (flags
& HF_INHIBIT_IRQ_MASK
)
7825 #ifndef CONFIG_SOFTMMU
7826 || (flags
& HF_SOFTMMU_MASK
)
7830 /* check addseg logic */
7831 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
7832 printf("ERROR addseg\n");
7835 cpu_T
[0] = tcg_temp_new();
7836 cpu_T
[1] = tcg_temp_new();
7837 cpu_A0
= tcg_temp_new();
7838 cpu_T3
= tcg_temp_new();
7840 cpu_tmp0
= tcg_temp_new();
7841 cpu_tmp1_i64
= tcg_temp_new_i64();
7842 cpu_tmp2_i32
= tcg_temp_new_i32();
7843 cpu_tmp3_i32
= tcg_temp_new_i32();
7844 cpu_tmp4
= tcg_temp_new();
7845 cpu_tmp5
= tcg_temp_new();
7846 cpu_ptr0
= tcg_temp_new_ptr();
7847 cpu_ptr1
= tcg_temp_new_ptr();
7849 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
7851 dc
->is_jmp
= DISAS_NEXT
;
7855 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
7857 max_insns
= CF_COUNT_MASK
;
7861 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
7862 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
7863 if (bp
->pc
== pc_ptr
&&
7864 !((bp
->flags
& BP_CPU
) && (tb
->flags
& HF_RF_MASK
))) {
7865 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
7871 j
= gen_opc_ptr
- gen_opc_buf
;
7875 gen_opc_instr_start
[lj
++] = 0;
7877 gen_opc_pc
[lj
] = pc_ptr
;
7878 gen_opc_cc_op
[lj
] = dc
->cc_op
;
7879 gen_opc_instr_start
[lj
] = 1;
7880 gen_opc_icount
[lj
] = num_insns
;
7882 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
7885 pc_ptr
= disas_insn(dc
, pc_ptr
);
7887 /* stop translation if indicated */
7890 /* if single step mode, we generate only one instruction and
7891 generate an exception */
7892 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7893 the flag and abort the translation to give the irqs a
7894 change to be happen */
7895 if (dc
->tf
|| dc
->singlestep_enabled
||
7896 (flags
& HF_INHIBIT_IRQ_MASK
)) {
7897 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7901 /* if too long translation, stop generation too */
7902 if (gen_opc_ptr
>= gen_opc_end
||
7903 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32) ||
7904 num_insns
>= max_insns
) {
7905 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7910 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7915 if (tb
->cflags
& CF_LAST_IO
)
7917 gen_icount_end(tb
, num_insns
);
7918 *gen_opc_ptr
= INDEX_op_end
;
7919 /* we don't forget to fill the last values */
7921 j
= gen_opc_ptr
- gen_opc_buf
;
7924 gen_opc_instr_start
[lj
++] = 0;
7928 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
7930 qemu_log("----------------\n");
7931 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
7932 #ifdef TARGET_X86_64
7937 disas_flags
= !dc
->code32
;
7938 log_target_disas(pc_start
, pc_ptr
- pc_start
, disas_flags
);
7944 tb
->size
= pc_ptr
- pc_start
;
7945 tb
->icount
= num_insns
;
7949 void gen_intermediate_code(CPUX86State
*env
, TranslationBlock
*tb
)
7951 gen_intermediate_code_internal(env
, tb
, 0);
7954 void gen_intermediate_code_pc(CPUX86State
*env
, TranslationBlock
*tb
)
7956 gen_intermediate_code_internal(env
, tb
, 1);
7959 void restore_state_to_opc(CPUX86State
*env
, TranslationBlock
*tb
, int pc_pos
)
7963 if (qemu_loglevel_mask(CPU_LOG_TB_OP
)) {
7965 qemu_log("RESTORE:\n");
7966 for(i
= 0;i
<= pc_pos
; i
++) {
7967 if (gen_opc_instr_start
[i
]) {
7968 qemu_log("0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
7971 qemu_log("pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
7972 pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
7973 (uint32_t)tb
->cs_base
);
7976 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
7977 cc_op
= gen_opc_cc_op
[pc_pos
];
7978 if (cc_op
!= CC_OP_DYNAMIC
)