4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
37 #define PREFIX_REPZ 0x01
38 #define PREFIX_REPNZ 0x02
39 #define PREFIX_LOCK 0x04
40 #define PREFIX_DATA 0x08
41 #define PREFIX_ADR 0x10
44 #define X86_64_ONLY(x) x
45 #define X86_64_DEF(x...) x
46 #define CODE64(s) ((s)->code64)
47 #define REX_X(s) ((s)->rex_x)
48 #define REX_B(s) ((s)->rex_b)
49 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
51 #define BUGGY_64(x) NULL
54 #define X86_64_ONLY(x) NULL
55 #define X86_64_DEF(x...)
61 //#define MACRO_TEST 1
63 /* global register indexes */
64 static TCGv_ptr cpu_env
;
65 static TCGv cpu_A0
, cpu_cc_src
, cpu_cc_dst
, cpu_cc_tmp
;
66 static TCGv_i32 cpu_cc_op
;
68 static TCGv cpu_T
[2], cpu_T3
;
69 /* local register indexes (only used inside old micro ops) */
70 static TCGv cpu_tmp0
, cpu_tmp4
;
71 static TCGv_ptr cpu_ptr0
, cpu_ptr1
;
72 static TCGv_i32 cpu_tmp2_i32
, cpu_tmp3_i32
;
73 static TCGv_i64 cpu_tmp1_i64
;
74 static TCGv cpu_tmp5
, cpu_tmp6
;
76 #include "gen-icount.h"
79 static int x86_64_hregs
;
82 typedef struct DisasContext
{
83 /* current insn context */
84 int override
; /* -1 if no override */
87 target_ulong pc
; /* pc = eip + cs_base */
88 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
89 static state change (stop translation) */
90 /* current block context */
91 target_ulong cs_base
; /* base of CS segment */
92 int pe
; /* protected mode */
93 int code32
; /* 32 bit code segment */
95 int lma
; /* long mode active */
96 int code64
; /* 64 bit code segment */
99 int ss32
; /* 32 bit stack segment */
100 int cc_op
; /* current CC operation */
101 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
102 int f_st
; /* currently unused */
103 int vm86
; /* vm86 mode */
106 int tf
; /* TF cpu flag */
107 int singlestep_enabled
; /* "hardware" single step enabled */
108 int jmp_opt
; /* use direct block chaining for direct jumps */
109 int mem_index
; /* select memory access functions */
110 uint64_t flags
; /* all execution flags */
111 struct TranslationBlock
*tb
;
112 int popl_esp_hack
; /* for correct popl with esp base handling */
113 int rip_offset
; /* only used in x86_64, but left for simplicity */
115 int cpuid_ext_features
;
116 int cpuid_ext2_features
;
117 int cpuid_ext3_features
;
120 static void gen_eob(DisasContext
*s
);
121 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
122 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
124 /* i386 arith/logic operations */
144 OP_SHL1
, /* undocumented */
168 /* I386 int registers */
169 OR_EAX
, /* MUST be even numbered */
178 OR_TMP0
= 16, /* temporary operand register */
180 OR_A0
, /* temporary register used when doing address evaluation */
183 static inline void gen_op_movl_T0_0(void)
185 tcg_gen_movi_tl(cpu_T
[0], 0);
188 static inline void gen_op_movl_T0_im(int32_t val
)
190 tcg_gen_movi_tl(cpu_T
[0], val
);
193 static inline void gen_op_movl_T0_imu(uint32_t val
)
195 tcg_gen_movi_tl(cpu_T
[0], val
);
198 static inline void gen_op_movl_T1_im(int32_t val
)
200 tcg_gen_movi_tl(cpu_T
[1], val
);
203 static inline void gen_op_movl_T1_imu(uint32_t val
)
205 tcg_gen_movi_tl(cpu_T
[1], val
);
208 static inline void gen_op_movl_A0_im(uint32_t val
)
210 tcg_gen_movi_tl(cpu_A0
, val
);
214 static inline void gen_op_movq_A0_im(int64_t val
)
216 tcg_gen_movi_tl(cpu_A0
, val
);
220 static inline void gen_movtl_T0_im(target_ulong val
)
222 tcg_gen_movi_tl(cpu_T
[0], val
);
225 static inline void gen_movtl_T1_im(target_ulong val
)
227 tcg_gen_movi_tl(cpu_T
[1], val
);
230 static inline void gen_op_andl_T0_ffff(void)
232 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
235 static inline void gen_op_andl_T0_im(uint32_t val
)
237 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
240 static inline void gen_op_movl_T0_T1(void)
242 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
245 static inline void gen_op_andl_A0_ffff(void)
247 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
252 #define NB_OP_SIZES 4
254 #else /* !TARGET_X86_64 */
256 #define NB_OP_SIZES 3
258 #endif /* !TARGET_X86_64 */
260 #if defined(WORDS_BIGENDIAN)
261 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
262 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
263 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
264 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
265 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
267 #define REG_B_OFFSET 0
268 #define REG_H_OFFSET 1
269 #define REG_W_OFFSET 0
270 #define REG_L_OFFSET 0
271 #define REG_LH_OFFSET 4
274 static inline void gen_op_mov_reg_v(int ot
, int reg
, TCGv t0
)
278 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
279 tcg_gen_st8_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
281 tcg_gen_st8_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
285 tcg_gen_st16_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
289 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
290 /* high part of register set to zero */
291 tcg_gen_movi_tl(cpu_tmp0
, 0);
292 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
296 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
301 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
307 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
309 gen_op_mov_reg_v(ot
, reg
, cpu_T
[0]);
312 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
314 gen_op_mov_reg_v(ot
, reg
, cpu_T
[1]);
317 static inline void gen_op_mov_reg_A0(int size
, int reg
)
321 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
325 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
326 /* high part of register set to zero */
327 tcg_gen_movi_tl(cpu_tmp0
, 0);
328 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
332 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
337 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
343 static inline void gen_op_mov_v_reg(int ot
, TCGv t0
, int reg
)
347 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
350 tcg_gen_ld8u_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
355 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
360 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
362 gen_op_mov_v_reg(ot
, cpu_T
[t_index
], reg
);
365 static inline void gen_op_movl_A0_reg(int reg
)
367 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
370 static inline void gen_op_addl_A0_im(int32_t val
)
372 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
374 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
379 static inline void gen_op_addq_A0_im(int64_t val
)
381 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
385 static void gen_add_A0_im(DisasContext
*s
, int val
)
389 gen_op_addq_A0_im(val
);
392 gen_op_addl_A0_im(val
);
395 static inline void gen_op_addl_T0_T1(void)
397 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
400 static inline void gen_op_jmp_T0(void)
402 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
405 static inline void gen_op_add_reg_im(int size
, int reg
, int32_t val
)
409 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
410 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
411 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
414 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
415 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
417 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
419 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
423 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
424 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
425 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
431 static inline void gen_op_add_reg_T0(int size
, int reg
)
435 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
436 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
437 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
440 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
441 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
443 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
445 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
449 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
450 tcg_gen_add_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[0]);
451 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
457 static inline void gen_op_set_cc_op(int32_t val
)
459 tcg_gen_movi_i32(cpu_cc_op
, val
);
462 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
464 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
466 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
467 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
469 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
473 static inline void gen_op_movl_A0_seg(int reg
)
475 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
478 static inline void gen_op_addl_A0_seg(int reg
)
480 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
481 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
483 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
488 static inline void gen_op_movq_A0_seg(int reg
)
490 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
493 static inline void gen_op_addq_A0_seg(int reg
)
495 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
496 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
499 static inline void gen_op_movq_A0_reg(int reg
)
501 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
504 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
506 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
508 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
509 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
513 static inline void gen_op_lds_T0_A0(int idx
)
515 int mem_index
= (idx
>> 2) - 1;
518 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
521 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
525 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
530 static inline void gen_op_ld_v(int idx
, TCGv t0
, TCGv a0
)
532 int mem_index
= (idx
>> 2) - 1;
535 tcg_gen_qemu_ld8u(t0
, a0
, mem_index
);
538 tcg_gen_qemu_ld16u(t0
, a0
, mem_index
);
541 tcg_gen_qemu_ld32u(t0
, a0
, mem_index
);
545 /* Should never happen on 32-bit targets. */
547 tcg_gen_qemu_ld64(t0
, a0
, mem_index
);
553 /* XXX: always use ldu or lds */
554 static inline void gen_op_ld_T0_A0(int idx
)
556 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
559 static inline void gen_op_ldu_T0_A0(int idx
)
561 gen_op_ld_v(idx
, cpu_T
[0], cpu_A0
);
564 static inline void gen_op_ld_T1_A0(int idx
)
566 gen_op_ld_v(idx
, cpu_T
[1], cpu_A0
);
569 static inline void gen_op_st_v(int idx
, TCGv t0
, TCGv a0
)
571 int mem_index
= (idx
>> 2) - 1;
574 tcg_gen_qemu_st8(t0
, a0
, mem_index
);
577 tcg_gen_qemu_st16(t0
, a0
, mem_index
);
580 tcg_gen_qemu_st32(t0
, a0
, mem_index
);
584 /* Should never happen on 32-bit targets. */
586 tcg_gen_qemu_st64(t0
, a0
, mem_index
);
592 static inline void gen_op_st_T0_A0(int idx
)
594 gen_op_st_v(idx
, cpu_T
[0], cpu_A0
);
597 static inline void gen_op_st_T1_A0(int idx
)
599 gen_op_st_v(idx
, cpu_T
[1], cpu_A0
);
602 static inline void gen_jmp_im(target_ulong pc
)
604 tcg_gen_movi_tl(cpu_tmp0
, pc
);
605 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
608 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
612 override
= s
->override
;
616 gen_op_movq_A0_seg(override
);
617 gen_op_addq_A0_reg_sN(0, R_ESI
);
619 gen_op_movq_A0_reg(R_ESI
);
625 if (s
->addseg
&& override
< 0)
628 gen_op_movl_A0_seg(override
);
629 gen_op_addl_A0_reg_sN(0, R_ESI
);
631 gen_op_movl_A0_reg(R_ESI
);
634 /* 16 address, always override */
637 gen_op_movl_A0_reg(R_ESI
);
638 gen_op_andl_A0_ffff();
639 gen_op_addl_A0_seg(override
);
643 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
647 gen_op_movq_A0_reg(R_EDI
);
652 gen_op_movl_A0_seg(R_ES
);
653 gen_op_addl_A0_reg_sN(0, R_EDI
);
655 gen_op_movl_A0_reg(R_EDI
);
658 gen_op_movl_A0_reg(R_EDI
);
659 gen_op_andl_A0_ffff();
660 gen_op_addl_A0_seg(R_ES
);
664 static inline void gen_op_movl_T0_Dshift(int ot
)
666 tcg_gen_ld32s_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, df
));
667 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], ot
);
670 static void gen_extu(int ot
, TCGv reg
)
674 tcg_gen_ext8u_tl(reg
, reg
);
677 tcg_gen_ext16u_tl(reg
, reg
);
680 tcg_gen_ext32u_tl(reg
, reg
);
687 static void gen_exts(int ot
, TCGv reg
)
691 tcg_gen_ext8s_tl(reg
, reg
);
694 tcg_gen_ext16s_tl(reg
, reg
);
697 tcg_gen_ext32s_tl(reg
, reg
);
704 static inline void gen_op_jnz_ecx(int size
, int label1
)
706 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
707 gen_extu(size
+ 1, cpu_tmp0
);
708 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, label1
);
711 static inline void gen_op_jz_ecx(int size
, int label1
)
713 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
714 gen_extu(size
+ 1, cpu_tmp0
);
715 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
718 static void gen_helper_in_func(int ot
, TCGv v
, TCGv_i32 n
)
721 case 0: gen_helper_inb(v
, n
); break;
722 case 1: gen_helper_inw(v
, n
); break;
723 case 2: gen_helper_inl(v
, n
); break;
728 static void gen_helper_out_func(int ot
, TCGv_i32 v
, TCGv_i32 n
)
731 case 0: gen_helper_outb(v
, n
); break;
732 case 1: gen_helper_outw(v
, n
); break;
733 case 2: gen_helper_outl(v
, n
); break;
738 static void gen_check_io(DisasContext
*s
, int ot
, target_ulong cur_eip
,
742 target_ulong next_eip
;
745 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
746 if (s
->cc_op
!= CC_OP_DYNAMIC
)
747 gen_op_set_cc_op(s
->cc_op
);
750 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
752 case 0: gen_helper_check_iob(cpu_tmp2_i32
); break;
753 case 1: gen_helper_check_iow(cpu_tmp2_i32
); break;
754 case 2: gen_helper_check_iol(cpu_tmp2_i32
); break;
757 if(s
->flags
& HF_SVMI_MASK
) {
759 if (s
->cc_op
!= CC_OP_DYNAMIC
)
760 gen_op_set_cc_op(s
->cc_op
);
764 svm_flags
|= (1 << (4 + ot
));
765 next_eip
= s
->pc
- s
->cs_base
;
766 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
767 gen_helper_svm_check_io(cpu_tmp2_i32
, tcg_const_i32(svm_flags
),
768 tcg_const_i32(next_eip
- cur_eip
));
772 static inline void gen_movs(DisasContext
*s
, int ot
)
774 gen_string_movl_A0_ESI(s
);
775 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
776 gen_string_movl_A0_EDI(s
);
777 gen_op_st_T0_A0(ot
+ s
->mem_index
);
778 gen_op_movl_T0_Dshift(ot
);
779 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
780 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
783 static inline void gen_update_cc_op(DisasContext
*s
)
785 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
786 gen_op_set_cc_op(s
->cc_op
);
787 s
->cc_op
= CC_OP_DYNAMIC
;
791 static void gen_op_update1_cc(void)
793 tcg_gen_discard_tl(cpu_cc_src
);
794 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
797 static void gen_op_update2_cc(void)
799 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
800 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
803 static inline void gen_op_cmpl_T0_T1_cc(void)
805 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
806 tcg_gen_sub_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
809 static inline void gen_op_testl_T0_T1_cc(void)
811 tcg_gen_discard_tl(cpu_cc_src
);
812 tcg_gen_and_tl(cpu_cc_dst
, cpu_T
[0], cpu_T
[1]);
815 static void gen_op_update_neg_cc(void)
817 tcg_gen_neg_tl(cpu_cc_src
, cpu_T
[0]);
818 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
821 /* compute eflags.C to reg */
822 static void gen_compute_eflags_c(TCGv reg
)
824 gen_helper_cc_compute_c(cpu_tmp2_i32
, cpu_cc_op
);
825 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
828 /* compute all eflags to cc_src */
829 static void gen_compute_eflags(TCGv reg
)
831 gen_helper_cc_compute_all(cpu_tmp2_i32
, cpu_cc_op
);
832 tcg_gen_extu_i32_tl(reg
, cpu_tmp2_i32
);
835 static inline void gen_setcc_slow_T0(DisasContext
*s
, int jcc_op
)
837 if (s
->cc_op
!= CC_OP_DYNAMIC
)
838 gen_op_set_cc_op(s
->cc_op
);
841 gen_compute_eflags(cpu_T
[0]);
842 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 11);
843 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
846 gen_compute_eflags_c(cpu_T
[0]);
849 gen_compute_eflags(cpu_T
[0]);
850 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 6);
851 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
854 gen_compute_eflags(cpu_tmp0
);
855 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 6);
856 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
857 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
860 gen_compute_eflags(cpu_T
[0]);
861 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 7);
862 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
865 gen_compute_eflags(cpu_T
[0]);
866 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 2);
867 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
870 gen_compute_eflags(cpu_tmp0
);
871 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
872 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 7); /* CC_S */
873 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
874 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
878 gen_compute_eflags(cpu_tmp0
);
879 tcg_gen_shri_tl(cpu_T
[0], cpu_tmp0
, 11); /* CC_O */
880 tcg_gen_shri_tl(cpu_tmp4
, cpu_tmp0
, 7); /* CC_S */
881 tcg_gen_shri_tl(cpu_tmp0
, cpu_tmp0
, 6); /* CC_Z */
882 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
883 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
884 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 1);
889 /* return true if setcc_slow is not needed (WARNING: must be kept in
890 sync with gen_jcc1) */
891 static int is_fast_jcc_case(DisasContext
*s
, int b
)
894 jcc_op
= (b
>> 1) & 7;
896 /* we optimize the cmp/jcc case */
901 if (jcc_op
== JCC_O
|| jcc_op
== JCC_P
)
905 /* some jumps are easy to compute */
930 if (jcc_op
!= JCC_Z
&& jcc_op
!= JCC_S
)
940 /* generate a conditional jump to label 'l1' according to jump opcode
941 value 'b'. In the fast case, T0 is guaranted not to be used. */
942 static inline void gen_jcc1(DisasContext
*s
, int cc_op
, int b
, int l1
)
944 int inv
, jcc_op
, size
, cond
;
948 jcc_op
= (b
>> 1) & 7;
951 /* we optimize the cmp/jcc case */
957 size
= cc_op
- CC_OP_SUBB
;
963 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xff);
967 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffff);
972 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0xffffffff);
980 tcg_gen_brcondi_tl(inv
? TCG_COND_NE
: TCG_COND_EQ
, t0
, 0, l1
);
986 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80);
987 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
991 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x8000);
992 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
997 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_dst
, 0x80000000);
998 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
, cpu_tmp0
,
1003 tcg_gen_brcondi_tl(inv
? TCG_COND_GE
: TCG_COND_LT
, cpu_cc_dst
,
1010 cond
= inv
? TCG_COND_GEU
: TCG_COND_LTU
;
1013 cond
= inv
? TCG_COND_GTU
: TCG_COND_LEU
;
1015 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1019 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xff);
1020 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xff);
1024 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffff);
1025 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffff);
1027 #ifdef TARGET_X86_64
1030 tcg_gen_andi_tl(cpu_tmp4
, cpu_tmp4
, 0xffffffff);
1031 tcg_gen_andi_tl(t0
, cpu_cc_src
, 0xffffffff);
1038 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1042 cond
= inv
? TCG_COND_GE
: TCG_COND_LT
;
1045 cond
= inv
? TCG_COND_GT
: TCG_COND_LE
;
1047 tcg_gen_add_tl(cpu_tmp4
, cpu_cc_dst
, cpu_cc_src
);
1051 tcg_gen_ext8s_tl(cpu_tmp4
, cpu_tmp4
);
1052 tcg_gen_ext8s_tl(t0
, cpu_cc_src
);
1056 tcg_gen_ext16s_tl(cpu_tmp4
, cpu_tmp4
);
1057 tcg_gen_ext16s_tl(t0
, cpu_cc_src
);
1059 #ifdef TARGET_X86_64
1062 tcg_gen_ext32s_tl(cpu_tmp4
, cpu_tmp4
);
1063 tcg_gen_ext32s_tl(t0
, cpu_cc_src
);
1070 tcg_gen_brcond_tl(cond
, cpu_tmp4
, t0
, l1
);
1078 /* some jumps are easy to compute */
1120 size
= (cc_op
- CC_OP_ADDB
) & 3;
1123 size
= (cc_op
- CC_OP_ADDB
) & 3;
1131 gen_setcc_slow_T0(s
, jcc_op
);
1132 tcg_gen_brcondi_tl(inv
? TCG_COND_EQ
: TCG_COND_NE
,
1138 /* XXX: does not work with gdbstub "ice" single step - not a
1140 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
1144 l1
= gen_new_label();
1145 l2
= gen_new_label();
1146 gen_op_jnz_ecx(s
->aflag
, l1
);
1148 gen_jmp_tb(s
, next_eip
, 1);
1153 static inline void gen_stos(DisasContext
*s
, int ot
)
1155 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1156 gen_string_movl_A0_EDI(s
);
1157 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1158 gen_op_movl_T0_Dshift(ot
);
1159 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1162 static inline void gen_lods(DisasContext
*s
, int ot
)
1164 gen_string_movl_A0_ESI(s
);
1165 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1166 gen_op_mov_reg_T0(ot
, R_EAX
);
1167 gen_op_movl_T0_Dshift(ot
);
1168 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1171 static inline void gen_scas(DisasContext
*s
, int ot
)
1173 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1174 gen_string_movl_A0_EDI(s
);
1175 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1176 gen_op_cmpl_T0_T1_cc();
1177 gen_op_movl_T0_Dshift(ot
);
1178 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1181 static inline void gen_cmps(DisasContext
*s
, int ot
)
1183 gen_string_movl_A0_ESI(s
);
1184 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1185 gen_string_movl_A0_EDI(s
);
1186 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1187 gen_op_cmpl_T0_T1_cc();
1188 gen_op_movl_T0_Dshift(ot
);
1189 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1190 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1193 static inline void gen_ins(DisasContext
*s
, int ot
)
1197 gen_string_movl_A0_EDI(s
);
1198 /* Note: we must do this dummy write first to be restartable in
1199 case of page fault. */
1201 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1202 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1203 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1204 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1205 gen_helper_in_func(ot
, cpu_T
[0], cpu_tmp2_i32
);
1206 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1207 gen_op_movl_T0_Dshift(ot
);
1208 gen_op_add_reg_T0(s
->aflag
, R_EDI
);
1213 static inline void gen_outs(DisasContext
*s
, int ot
)
1217 gen_string_movl_A0_ESI(s
);
1218 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1220 gen_op_mov_TN_reg(OT_WORD
, 1, R_EDX
);
1221 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[1]);
1222 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
1223 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[0]);
1224 gen_helper_out_func(ot
, cpu_tmp2_i32
, cpu_tmp3_i32
);
1226 gen_op_movl_T0_Dshift(ot
);
1227 gen_op_add_reg_T0(s
->aflag
, R_ESI
);
1232 /* same method as Valgrind : we generate jumps to current or next
1234 #define GEN_REPZ(op) \
1235 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1236 target_ulong cur_eip, target_ulong next_eip) \
1239 gen_update_cc_op(s); \
1240 l2 = gen_jz_ecx_string(s, next_eip); \
1241 gen_ ## op(s, ot); \
1242 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1243 /* a loop would cause two single step exceptions if ECX = 1 \
1244 before rep string_insn */ \
1246 gen_op_jz_ecx(s->aflag, l2); \
1247 gen_jmp(s, cur_eip); \
1250 #define GEN_REPZ2(op) \
1251 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1252 target_ulong cur_eip, \
1253 target_ulong next_eip, \
1257 gen_update_cc_op(s); \
1258 l2 = gen_jz_ecx_string(s, next_eip); \
1259 gen_ ## op(s, ot); \
1260 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1261 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1262 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1264 gen_op_jz_ecx(s->aflag, l2); \
1265 gen_jmp(s, cur_eip); \
1276 static void gen_helper_fp_arith_ST0_FT0(int op
)
1279 case 0: gen_helper_fadd_ST0_FT0(); break;
1280 case 1: gen_helper_fmul_ST0_FT0(); break;
1281 case 2: gen_helper_fcom_ST0_FT0(); break;
1282 case 3: gen_helper_fcom_ST0_FT0(); break;
1283 case 4: gen_helper_fsub_ST0_FT0(); break;
1284 case 5: gen_helper_fsubr_ST0_FT0(); break;
1285 case 6: gen_helper_fdiv_ST0_FT0(); break;
1286 case 7: gen_helper_fdivr_ST0_FT0(); break;
1290 /* NOTE the exception in "r" op ordering */
1291 static void gen_helper_fp_arith_STN_ST0(int op
, int opreg
)
1293 TCGv_i32 tmp
= tcg_const_i32(opreg
);
1295 case 0: gen_helper_fadd_STN_ST0(tmp
); break;
1296 case 1: gen_helper_fmul_STN_ST0(tmp
); break;
1297 case 4: gen_helper_fsubr_STN_ST0(tmp
); break;
1298 case 5: gen_helper_fsub_STN_ST0(tmp
); break;
1299 case 6: gen_helper_fdivr_STN_ST0(tmp
); break;
1300 case 7: gen_helper_fdiv_STN_ST0(tmp
); break;
1304 /* if d == OR_TMP0, it means memory operand (address in A0) */
1305 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1308 gen_op_mov_TN_reg(ot
, 0, d
);
1310 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1314 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1315 gen_op_set_cc_op(s1
->cc_op
);
1316 gen_compute_eflags_c(cpu_tmp4
);
1317 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1318 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1320 gen_op_mov_reg_T0(ot
, d
);
1322 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1323 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1324 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1325 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1326 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1327 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_ADDB
+ ot
);
1328 s1
->cc_op
= CC_OP_DYNAMIC
;
1331 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1332 gen_op_set_cc_op(s1
->cc_op
);
1333 gen_compute_eflags_c(cpu_tmp4
);
1334 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1335 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp4
);
1337 gen_op_mov_reg_T0(ot
, d
);
1339 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1340 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[1]);
1341 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1342 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp4
);
1343 tcg_gen_shli_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 2);
1344 tcg_gen_addi_i32(cpu_cc_op
, cpu_tmp2_i32
, CC_OP_SUBB
+ ot
);
1345 s1
->cc_op
= CC_OP_DYNAMIC
;
1348 gen_op_addl_T0_T1();
1350 gen_op_mov_reg_T0(ot
, d
);
1352 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1353 gen_op_update2_cc();
1354 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1357 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1359 gen_op_mov_reg_T0(ot
, d
);
1361 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1362 gen_op_update2_cc();
1363 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1367 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1369 gen_op_mov_reg_T0(ot
, d
);
1371 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1372 gen_op_update1_cc();
1373 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1376 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1378 gen_op_mov_reg_T0(ot
, d
);
1380 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1381 gen_op_update1_cc();
1382 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1385 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1387 gen_op_mov_reg_T0(ot
, d
);
1389 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1390 gen_op_update1_cc();
1391 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1394 gen_op_cmpl_T0_T1_cc();
1395 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1400 /* if d == OR_TMP0, it means memory operand (address in A0) */
1401 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1404 gen_op_mov_TN_reg(ot
, 0, d
);
1406 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1407 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1408 gen_op_set_cc_op(s1
->cc_op
);
1410 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], 1);
1411 s1
->cc_op
= CC_OP_INCB
+ ot
;
1413 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], -1);
1414 s1
->cc_op
= CC_OP_DECB
+ ot
;
1417 gen_op_mov_reg_T0(ot
, d
);
1419 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1420 gen_compute_eflags_c(cpu_cc_src
);
1421 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1424 static void gen_shift_rm_T1(DisasContext
*s
, int ot
, int op1
,
1425 int is_right
, int is_arith
)
1438 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1440 gen_op_mov_TN_reg(ot
, 0, op1
);
1442 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], mask
);
1444 tcg_gen_addi_tl(cpu_tmp5
, cpu_T
[1], -1);
1448 gen_exts(ot
, cpu_T
[0]);
1449 tcg_gen_sar_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1450 tcg_gen_sar_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1452 gen_extu(ot
, cpu_T
[0]);
1453 tcg_gen_shr_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1454 tcg_gen_shr_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1457 tcg_gen_shl_tl(cpu_T3
, cpu_T
[0], cpu_tmp5
);
1458 tcg_gen_shl_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1463 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1465 gen_op_mov_reg_T0(ot
, op1
);
1467 /* update eflags if non zero shift */
1468 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1469 gen_op_set_cc_op(s
->cc_op
);
1471 /* XXX: inefficient */
1472 t0
= tcg_temp_local_new();
1473 t1
= tcg_temp_local_new();
1475 tcg_gen_mov_tl(t0
, cpu_T
[0]);
1476 tcg_gen_mov_tl(t1
, cpu_T3
);
1478 shift_label
= gen_new_label();
1479 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_T
[1], 0, shift_label
);
1481 tcg_gen_mov_tl(cpu_cc_src
, t1
);
1482 tcg_gen_mov_tl(cpu_cc_dst
, t0
);
1484 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1486 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1488 gen_set_label(shift_label
);
1489 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1495 static void gen_shift_rm_im(DisasContext
*s
, int ot
, int op1
, int op2
,
1496 int is_right
, int is_arith
)
1507 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1509 gen_op_mov_TN_reg(ot
, 0, op1
);
1515 gen_exts(ot
, cpu_T
[0]);
1516 tcg_gen_sari_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1517 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], op2
);
1519 gen_extu(ot
, cpu_T
[0]);
1520 tcg_gen_shri_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1521 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], op2
);
1524 tcg_gen_shli_tl(cpu_tmp4
, cpu_T
[0], op2
- 1);
1525 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], op2
);
1531 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1533 gen_op_mov_reg_T0(ot
, op1
);
1535 /* update eflags if non zero shift */
1537 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
1538 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
1540 s
->cc_op
= CC_OP_SARB
+ ot
;
1542 s
->cc_op
= CC_OP_SHLB
+ ot
;
1546 static inline void tcg_gen_lshift(TCGv ret
, TCGv arg1
, target_long arg2
)
1549 tcg_gen_shli_tl(ret
, arg1
, arg2
);
1551 tcg_gen_shri_tl(ret
, arg1
, -arg2
);
1554 /* XXX: add faster immediate case */
1555 static void gen_rot_rm_T1(DisasContext
*s
, int ot
, int op1
,
1559 int label1
, label2
, data_bits
;
1560 TCGv t0
, t1
, t2
, a0
;
1562 /* XXX: inefficient, but we must use local temps */
1563 t0
= tcg_temp_local_new();
1564 t1
= tcg_temp_local_new();
1565 t2
= tcg_temp_local_new();
1566 a0
= tcg_temp_local_new();
1574 if (op1
== OR_TMP0
) {
1575 tcg_gen_mov_tl(a0
, cpu_A0
);
1576 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1578 gen_op_mov_v_reg(ot
, t0
, op1
);
1581 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1583 tcg_gen_andi_tl(t1
, t1
, mask
);
1585 /* Must test zero case to avoid using undefined behaviour in TCG
1587 label1
= gen_new_label();
1588 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label1
);
1591 tcg_gen_andi_tl(cpu_tmp0
, t1
, (1 << (3 + ot
)) - 1);
1593 tcg_gen_mov_tl(cpu_tmp0
, t1
);
1596 tcg_gen_mov_tl(t2
, t0
);
1598 data_bits
= 8 << ot
;
1599 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1600 fix TCG definition) */
1602 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1603 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1604 tcg_gen_shl_tl(t0
, t0
, cpu_tmp0
);
1606 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp0
);
1607 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(data_bits
), cpu_tmp0
);
1608 tcg_gen_shr_tl(t0
, t0
, cpu_tmp0
);
1610 tcg_gen_or_tl(t0
, t0
, cpu_tmp4
);
1612 gen_set_label(label1
);
1614 if (op1
== OR_TMP0
) {
1615 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1617 gen_op_mov_reg_v(ot
, op1
, t0
);
1621 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1622 gen_op_set_cc_op(s
->cc_op
);
1624 label2
= gen_new_label();
1625 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, label2
);
1627 gen_compute_eflags(cpu_cc_src
);
1628 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~(CC_O
| CC_C
));
1629 tcg_gen_xor_tl(cpu_tmp0
, t2
, t0
);
1630 tcg_gen_lshift(cpu_tmp0
, cpu_tmp0
, 11 - (data_bits
- 1));
1631 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_O
);
1632 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
1634 tcg_gen_shri_tl(t0
, t0
, data_bits
- 1);
1636 tcg_gen_andi_tl(t0
, t0
, CC_C
);
1637 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t0
);
1639 tcg_gen_discard_tl(cpu_cc_dst
);
1640 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1642 gen_set_label(label2
);
1643 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1651 /* XXX: add faster immediate = 1 case */
1652 static void gen_rotc_rm_T1(DisasContext
*s
, int ot
, int op1
,
1657 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1658 gen_op_set_cc_op(s
->cc_op
);
1662 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1664 gen_op_mov_TN_reg(ot
, 0, op1
);
1668 case 0: gen_helper_rcrb(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1669 case 1: gen_helper_rcrw(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1670 case 2: gen_helper_rcrl(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1671 #ifdef TARGET_X86_64
1672 case 3: gen_helper_rcrq(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1677 case 0: gen_helper_rclb(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1678 case 1: gen_helper_rclw(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1679 case 2: gen_helper_rcll(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1680 #ifdef TARGET_X86_64
1681 case 3: gen_helper_rclq(cpu_T
[0], cpu_T
[0], cpu_T
[1]); break;
1687 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1689 gen_op_mov_reg_T0(ot
, op1
);
1692 label1
= gen_new_label();
1693 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_tmp
, -1, label1
);
1695 tcg_gen_mov_tl(cpu_cc_src
, cpu_cc_tmp
);
1696 tcg_gen_discard_tl(cpu_cc_dst
);
1697 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_EFLAGS
);
1699 gen_set_label(label1
);
1700 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1703 /* XXX: add faster immediate case */
1704 static void gen_shiftd_rm_T1_T3(DisasContext
*s
, int ot
, int op1
,
1707 int label1
, label2
, data_bits
;
1709 TCGv t0
, t1
, t2
, a0
;
1711 t0
= tcg_temp_local_new();
1712 t1
= tcg_temp_local_new();
1713 t2
= tcg_temp_local_new();
1714 a0
= tcg_temp_local_new();
1722 if (op1
== OR_TMP0
) {
1723 tcg_gen_mov_tl(a0
, cpu_A0
);
1724 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
1726 gen_op_mov_v_reg(ot
, t0
, op1
);
1729 tcg_gen_andi_tl(cpu_T3
, cpu_T3
, mask
);
1731 tcg_gen_mov_tl(t1
, cpu_T
[1]);
1732 tcg_gen_mov_tl(t2
, cpu_T3
);
1734 /* Must test zero case to avoid using undefined behaviour in TCG
1736 label1
= gen_new_label();
1737 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
1739 tcg_gen_addi_tl(cpu_tmp5
, t2
, -1);
1740 if (ot
== OT_WORD
) {
1741 /* Note: we implement the Intel behaviour for shift count > 16 */
1743 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1744 tcg_gen_shli_tl(cpu_tmp0
, t1
, 16);
1745 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1746 tcg_gen_ext32u_tl(t0
, t0
);
1748 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1750 /* only needed if count > 16, but a test would complicate */
1751 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), t2
);
1752 tcg_gen_shl_tl(cpu_tmp0
, t0
, cpu_tmp5
);
1754 tcg_gen_shr_tl(t0
, t0
, t2
);
1756 tcg_gen_or_tl(t0
, t0
, cpu_tmp0
);
1758 /* XXX: not optimal */
1759 tcg_gen_andi_tl(t0
, t0
, 0xffff);
1760 tcg_gen_shli_tl(t1
, t1
, 16);
1761 tcg_gen_or_tl(t1
, t1
, t0
);
1762 tcg_gen_ext32u_tl(t1
, t1
);
1764 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1765 tcg_gen_sub_tl(cpu_tmp0
, tcg_const_tl(32), cpu_tmp5
);
1766 tcg_gen_shr_tl(cpu_tmp6
, t1
, cpu_tmp0
);
1767 tcg_gen_or_tl(cpu_tmp4
, cpu_tmp4
, cpu_tmp6
);
1769 tcg_gen_shl_tl(t0
, t0
, t2
);
1770 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(32), t2
);
1771 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1772 tcg_gen_or_tl(t0
, t0
, t1
);
1775 data_bits
= 8 << ot
;
1778 tcg_gen_ext32u_tl(t0
, t0
);
1780 tcg_gen_shr_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1782 tcg_gen_shr_tl(t0
, t0
, t2
);
1783 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), t2
);
1784 tcg_gen_shl_tl(t1
, t1
, cpu_tmp5
);
1785 tcg_gen_or_tl(t0
, t0
, t1
);
1789 tcg_gen_ext32u_tl(t1
, t1
);
1791 tcg_gen_shl_tl(cpu_tmp4
, t0
, cpu_tmp5
);
1793 tcg_gen_shl_tl(t0
, t0
, t2
);
1794 tcg_gen_sub_tl(cpu_tmp5
, tcg_const_tl(data_bits
), t2
);
1795 tcg_gen_shr_tl(t1
, t1
, cpu_tmp5
);
1796 tcg_gen_or_tl(t0
, t0
, t1
);
1799 tcg_gen_mov_tl(t1
, cpu_tmp4
);
1801 gen_set_label(label1
);
1803 if (op1
== OR_TMP0
) {
1804 gen_op_st_v(ot
+ s
->mem_index
, t0
, a0
);
1806 gen_op_mov_reg_v(ot
, op1
, t0
);
1810 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1811 gen_op_set_cc_op(s
->cc_op
);
1813 label2
= gen_new_label();
1814 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label2
);
1816 tcg_gen_mov_tl(cpu_cc_src
, t1
);
1817 tcg_gen_mov_tl(cpu_cc_dst
, t0
);
1819 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SARB
+ ot
);
1821 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SHLB
+ ot
);
1823 gen_set_label(label2
);
1824 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1832 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1835 gen_op_mov_TN_reg(ot
, 1, s
);
1838 gen_rot_rm_T1(s1
, ot
, d
, 0);
1841 gen_rot_rm_T1(s1
, ot
, d
, 1);
1845 gen_shift_rm_T1(s1
, ot
, d
, 0, 0);
1848 gen_shift_rm_T1(s1
, ot
, d
, 1, 0);
1851 gen_shift_rm_T1(s1
, ot
, d
, 1, 1);
1854 gen_rotc_rm_T1(s1
, ot
, d
, 0);
1857 gen_rotc_rm_T1(s1
, ot
, d
, 1);
1862 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1867 gen_shift_rm_im(s1
, ot
, d
, c
, 0, 0);
1870 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 0);
1873 gen_shift_rm_im(s1
, ot
, d
, c
, 1, 1);
1876 /* currently not optimized */
1877 gen_op_movl_T1_im(c
);
1878 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1883 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1891 int mod
, rm
, code
, override
, must_add_seg
;
1893 override
= s
->override
;
1894 must_add_seg
= s
->addseg
;
1897 mod
= (modrm
>> 6) & 3;
1909 code
= ldub_code(s
->pc
++);
1910 scale
= (code
>> 6) & 3;
1911 index
= ((code
>> 3) & 7) | REX_X(s
);
1918 if ((base
& 7) == 5) {
1920 disp
= (int32_t)ldl_code(s
->pc
);
1922 if (CODE64(s
) && !havesib
) {
1923 disp
+= s
->pc
+ s
->rip_offset
;
1930 disp
= (int8_t)ldub_code(s
->pc
++);
1934 disp
= ldl_code(s
->pc
);
1940 /* for correct popl handling with esp */
1941 if (base
== 4 && s
->popl_esp_hack
)
1942 disp
+= s
->popl_esp_hack
;
1943 #ifdef TARGET_X86_64
1944 if (s
->aflag
== 2) {
1945 gen_op_movq_A0_reg(base
);
1947 gen_op_addq_A0_im(disp
);
1952 gen_op_movl_A0_reg(base
);
1954 gen_op_addl_A0_im(disp
);
1957 #ifdef TARGET_X86_64
1958 if (s
->aflag
== 2) {
1959 gen_op_movq_A0_im(disp
);
1963 gen_op_movl_A0_im(disp
);
1966 /* XXX: index == 4 is always invalid */
1967 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1968 #ifdef TARGET_X86_64
1969 if (s
->aflag
== 2) {
1970 gen_op_addq_A0_reg_sN(scale
, index
);
1974 gen_op_addl_A0_reg_sN(scale
, index
);
1979 if (base
== R_EBP
|| base
== R_ESP
)
1984 #ifdef TARGET_X86_64
1985 if (s
->aflag
== 2) {
1986 gen_op_addq_A0_seg(override
);
1990 gen_op_addl_A0_seg(override
);
1997 disp
= lduw_code(s
->pc
);
1999 gen_op_movl_A0_im(disp
);
2000 rm
= 0; /* avoid SS override */
2007 disp
= (int8_t)ldub_code(s
->pc
++);
2011 disp
= lduw_code(s
->pc
);
2017 gen_op_movl_A0_reg(R_EBX
);
2018 gen_op_addl_A0_reg_sN(0, R_ESI
);
2021 gen_op_movl_A0_reg(R_EBX
);
2022 gen_op_addl_A0_reg_sN(0, R_EDI
);
2025 gen_op_movl_A0_reg(R_EBP
);
2026 gen_op_addl_A0_reg_sN(0, R_ESI
);
2029 gen_op_movl_A0_reg(R_EBP
);
2030 gen_op_addl_A0_reg_sN(0, R_EDI
);
2033 gen_op_movl_A0_reg(R_ESI
);
2036 gen_op_movl_A0_reg(R_EDI
);
2039 gen_op_movl_A0_reg(R_EBP
);
2043 gen_op_movl_A0_reg(R_EBX
);
2047 gen_op_addl_A0_im(disp
);
2048 gen_op_andl_A0_ffff();
2052 if (rm
== 2 || rm
== 3 || rm
== 6)
2057 gen_op_addl_A0_seg(override
);
2067 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
2069 int mod
, rm
, base
, code
;
2071 mod
= (modrm
>> 6) & 3;
2081 code
= ldub_code(s
->pc
++);
2117 /* used for LEA and MOV AX, mem */
2118 static void gen_add_A0_ds_seg(DisasContext
*s
)
2120 int override
, must_add_seg
;
2121 must_add_seg
= s
->addseg
;
2123 if (s
->override
>= 0) {
2124 override
= s
->override
;
2130 #ifdef TARGET_X86_64
2132 gen_op_addq_A0_seg(override
);
2136 gen_op_addl_A0_seg(override
);
2141 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2143 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
2145 int mod
, rm
, opreg
, disp
;
2147 mod
= (modrm
>> 6) & 3;
2148 rm
= (modrm
& 7) | REX_B(s
);
2152 gen_op_mov_TN_reg(ot
, 0, reg
);
2153 gen_op_mov_reg_T0(ot
, rm
);
2155 gen_op_mov_TN_reg(ot
, 0, rm
);
2157 gen_op_mov_reg_T0(ot
, reg
);
2160 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
2163 gen_op_mov_TN_reg(ot
, 0, reg
);
2164 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2166 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
2168 gen_op_mov_reg_T0(ot
, reg
);
2173 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
2179 ret
= ldub_code(s
->pc
);
2183 ret
= lduw_code(s
->pc
);
2188 ret
= ldl_code(s
->pc
);
2195 static inline int insn_const_size(unsigned int ot
)
2203 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
2205 TranslationBlock
*tb
;
2208 pc
= s
->cs_base
+ eip
;
2210 /* NOTE: we handle the case where the TB spans two pages here */
2211 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
2212 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
2213 /* jump to same page: we can use a direct jump */
2214 tcg_gen_goto_tb(tb_num
);
2216 tcg_gen_exit_tb((long)tb
+ tb_num
);
2218 /* jump to another page: currently not optimized */
2224 static inline void gen_jcc(DisasContext
*s
, int b
,
2225 target_ulong val
, target_ulong next_eip
)
2230 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2231 gen_op_set_cc_op(s
->cc_op
);
2232 s
->cc_op
= CC_OP_DYNAMIC
;
2235 l1
= gen_new_label();
2236 gen_jcc1(s
, cc_op
, b
, l1
);
2238 gen_goto_tb(s
, 0, next_eip
);
2241 gen_goto_tb(s
, 1, val
);
2245 l1
= gen_new_label();
2246 l2
= gen_new_label();
2247 gen_jcc1(s
, cc_op
, b
, l1
);
2249 gen_jmp_im(next_eip
);
2259 static void gen_setcc(DisasContext
*s
, int b
)
2261 int inv
, jcc_op
, l1
;
2264 if (is_fast_jcc_case(s
, b
)) {
2265 /* nominal case: we use a jump */
2266 /* XXX: make it faster by adding new instructions in TCG */
2267 t0
= tcg_temp_local_new();
2268 tcg_gen_movi_tl(t0
, 0);
2269 l1
= gen_new_label();
2270 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
2271 tcg_gen_movi_tl(t0
, 1);
2273 tcg_gen_mov_tl(cpu_T
[0], t0
);
2276 /* slow case: it is more efficient not to generate a jump,
2277 although it is questionnable whether this optimization is
2280 jcc_op
= (b
>> 1) & 7;
2281 gen_setcc_slow_T0(s
, jcc_op
);
2283 tcg_gen_xori_tl(cpu_T
[0], cpu_T
[0], 1);
2288 static inline void gen_op_movl_T0_seg(int seg_reg
)
2290 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
2291 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2294 static inline void gen_op_movl_seg_T0_vm(int seg_reg
)
2296 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
2297 tcg_gen_st32_tl(cpu_T
[0], cpu_env
,
2298 offsetof(CPUX86State
,segs
[seg_reg
].selector
));
2299 tcg_gen_shli_tl(cpu_T
[0], cpu_T
[0], 4);
2300 tcg_gen_st_tl(cpu_T
[0], cpu_env
,
2301 offsetof(CPUX86State
,segs
[seg_reg
].base
));
2304 /* move T0 to seg_reg and compute if the CPU state may change. Never
2305 call this function with seg_reg == R_CS */
2306 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
2308 if (s
->pe
&& !s
->vm86
) {
2309 /* XXX: optimize by finding processor state dynamically */
2310 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2311 gen_op_set_cc_op(s
->cc_op
);
2312 gen_jmp_im(cur_eip
);
2313 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
2314 gen_helper_load_seg(tcg_const_i32(seg_reg
), cpu_tmp2_i32
);
2315 /* abort translation because the addseg value may change or
2316 because ss32 may change. For R_SS, translation must always
2317 stop as a special handling must be done to disable hardware
2318 interrupts for the next instruction */
2319 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
2322 gen_op_movl_seg_T0_vm(seg_reg
);
2323 if (seg_reg
== R_SS
)
2328 static inline int svm_is_rep(int prefixes
)
2330 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2334 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2335 uint32_t type
, uint64_t param
)
2337 /* no SVM activated; fast case */
2338 if (likely(!(s
->flags
& HF_SVMI_MASK
)))
2340 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2341 gen_op_set_cc_op(s
->cc_op
);
2342 gen_jmp_im(pc_start
- s
->cs_base
);
2343 gen_helper_svm_check_intercept_param(tcg_const_i32(type
),
2344 tcg_const_i64(param
));
2348 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2350 gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2353 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2355 #ifdef TARGET_X86_64
2357 gen_op_add_reg_im(2, R_ESP
, addend
);
2361 gen_op_add_reg_im(1, R_ESP
, addend
);
2363 gen_op_add_reg_im(0, R_ESP
, addend
);
2367 /* generate a push. It depends on ss32, addseg and dflag */
2368 static void gen_push_T0(DisasContext
*s
)
2370 #ifdef TARGET_X86_64
2372 gen_op_movq_A0_reg(R_ESP
);
2374 gen_op_addq_A0_im(-8);
2375 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2377 gen_op_addq_A0_im(-2);
2378 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2380 gen_op_mov_reg_A0(2, R_ESP
);
2384 gen_op_movl_A0_reg(R_ESP
);
2386 gen_op_addl_A0_im(-2);
2388 gen_op_addl_A0_im(-4);
2391 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2392 gen_op_addl_A0_seg(R_SS
);
2395 gen_op_andl_A0_ffff();
2396 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2397 gen_op_addl_A0_seg(R_SS
);
2399 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2400 if (s
->ss32
&& !s
->addseg
)
2401 gen_op_mov_reg_A0(1, R_ESP
);
2403 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2407 /* generate a push. It depends on ss32, addseg and dflag */
2408 /* slower version for T1, only used for call Ev */
2409 static void gen_push_T1(DisasContext
*s
)
2411 #ifdef TARGET_X86_64
2413 gen_op_movq_A0_reg(R_ESP
);
2415 gen_op_addq_A0_im(-8);
2416 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2418 gen_op_addq_A0_im(-2);
2419 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2421 gen_op_mov_reg_A0(2, R_ESP
);
2425 gen_op_movl_A0_reg(R_ESP
);
2427 gen_op_addl_A0_im(-2);
2429 gen_op_addl_A0_im(-4);
2432 gen_op_addl_A0_seg(R_SS
);
2435 gen_op_andl_A0_ffff();
2436 gen_op_addl_A0_seg(R_SS
);
2438 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2440 if (s
->ss32
&& !s
->addseg
)
2441 gen_op_mov_reg_A0(1, R_ESP
);
2443 gen_stack_update(s
, (-2) << s
->dflag
);
2447 /* two step pop is necessary for precise exceptions */
2448 static void gen_pop_T0(DisasContext
*s
)
2450 #ifdef TARGET_X86_64
2452 gen_op_movq_A0_reg(R_ESP
);
2453 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2457 gen_op_movl_A0_reg(R_ESP
);
2460 gen_op_addl_A0_seg(R_SS
);
2462 gen_op_andl_A0_ffff();
2463 gen_op_addl_A0_seg(R_SS
);
2465 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2469 static void gen_pop_update(DisasContext
*s
)
2471 #ifdef TARGET_X86_64
2472 if (CODE64(s
) && s
->dflag
) {
2473 gen_stack_update(s
, 8);
2477 gen_stack_update(s
, 2 << s
->dflag
);
2481 static void gen_stack_A0(DisasContext
*s
)
2483 gen_op_movl_A0_reg(R_ESP
);
2485 gen_op_andl_A0_ffff();
2486 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2488 gen_op_addl_A0_seg(R_SS
);
2491 /* NOTE: wrap around in 16 bit not fully handled */
2492 static void gen_pusha(DisasContext
*s
)
2495 gen_op_movl_A0_reg(R_ESP
);
2496 gen_op_addl_A0_im(-16 << s
->dflag
);
2498 gen_op_andl_A0_ffff();
2499 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2501 gen_op_addl_A0_seg(R_SS
);
2502 for(i
= 0;i
< 8; i
++) {
2503 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2504 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2505 gen_op_addl_A0_im(2 << s
->dflag
);
2507 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2510 /* NOTE: wrap around in 16 bit not fully handled */
2511 static void gen_popa(DisasContext
*s
)
2514 gen_op_movl_A0_reg(R_ESP
);
2516 gen_op_andl_A0_ffff();
2517 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2518 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], 16 << s
->dflag
);
2520 gen_op_addl_A0_seg(R_SS
);
2521 for(i
= 0;i
< 8; i
++) {
2522 /* ESP is not reloaded */
2524 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2525 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2527 gen_op_addl_A0_im(2 << s
->dflag
);
2529 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2532 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2537 #ifdef TARGET_X86_64
2539 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2542 gen_op_movl_A0_reg(R_ESP
);
2543 gen_op_addq_A0_im(-opsize
);
2544 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2547 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2548 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2550 /* XXX: must save state */
2551 gen_helper_enter64_level(tcg_const_i32(level
),
2552 tcg_const_i32((ot
== OT_QUAD
)),
2555 gen_op_mov_reg_T1(ot
, R_EBP
);
2556 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2557 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2561 ot
= s
->dflag
+ OT_WORD
;
2562 opsize
= 2 << s
->dflag
;
2564 gen_op_movl_A0_reg(R_ESP
);
2565 gen_op_addl_A0_im(-opsize
);
2567 gen_op_andl_A0_ffff();
2568 tcg_gen_mov_tl(cpu_T
[1], cpu_A0
);
2570 gen_op_addl_A0_seg(R_SS
);
2572 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2573 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2575 /* XXX: must save state */
2576 gen_helper_enter_level(tcg_const_i32(level
),
2577 tcg_const_i32(s
->dflag
),
2580 gen_op_mov_reg_T1(ot
, R_EBP
);
2581 tcg_gen_addi_tl(cpu_T
[1], cpu_T
[1], -esp_addend
+ (-opsize
* level
));
2582 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2586 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2588 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2589 gen_op_set_cc_op(s
->cc_op
);
2590 gen_jmp_im(cur_eip
);
2591 gen_helper_raise_exception(tcg_const_i32(trapno
));
2595 /* an interrupt is different from an exception because of the
2597 static void gen_interrupt(DisasContext
*s
, int intno
,
2598 target_ulong cur_eip
, target_ulong next_eip
)
2600 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2601 gen_op_set_cc_op(s
->cc_op
);
2602 gen_jmp_im(cur_eip
);
2603 gen_helper_raise_interrupt(tcg_const_i32(intno
),
2604 tcg_const_i32(next_eip
- cur_eip
));
2608 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2610 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2611 gen_op_set_cc_op(s
->cc_op
);
2612 gen_jmp_im(cur_eip
);
2617 /* generate a generic end of block. Trace exception is also generated
2619 static void gen_eob(DisasContext
*s
)
2621 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2622 gen_op_set_cc_op(s
->cc_op
);
2623 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2624 gen_helper_reset_inhibit_irq();
2626 if (s
->singlestep_enabled
) {
2629 gen_helper_single_step();
2636 /* generate a jump to eip. No segment change must happen before as a
2637 direct call to the next block may occur */
2638 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2641 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2642 gen_op_set_cc_op(s
->cc_op
);
2643 s
->cc_op
= CC_OP_DYNAMIC
;
2645 gen_goto_tb(s
, tb_num
, eip
);
2653 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2655 gen_jmp_tb(s
, eip
, 0);
2658 static inline void gen_ldq_env_A0(int idx
, int offset
)
2660 int mem_index
= (idx
>> 2) - 1;
2661 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2662 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2665 static inline void gen_stq_env_A0(int idx
, int offset
)
2667 int mem_index
= (idx
>> 2) - 1;
2668 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
);
2669 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2672 static inline void gen_ldo_env_A0(int idx
, int offset
)
2674 int mem_index
= (idx
>> 2) - 1;
2675 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2676 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2677 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2678 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2679 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2682 static inline void gen_sto_env_A0(int idx
, int offset
)
2684 int mem_index
= (idx
>> 2) - 1;
2685 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(0)));
2686 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
, mem_index
);
2687 tcg_gen_addi_tl(cpu_tmp0
, cpu_A0
, 8);
2688 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, offset
+ offsetof(XMMReg
, XMM_Q(1)));
2689 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_tmp0
, mem_index
);
2692 static inline void gen_op_movo(int d_offset
, int s_offset
)
2694 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2695 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2696 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
+ 8);
2697 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
+ 8);
2700 static inline void gen_op_movq(int d_offset
, int s_offset
)
2702 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
, s_offset
);
2703 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2706 static inline void gen_op_movl(int d_offset
, int s_offset
)
2708 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
, s_offset
);
2709 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, d_offset
);
2712 static inline void gen_op_movq_env_0(int d_offset
)
2714 tcg_gen_movi_i64(cpu_tmp1_i64
, 0);
2715 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
, d_offset
);
2718 #define SSE_SPECIAL ((void *)1)
2719 #define SSE_DUMMY ((void *)2)
2721 #define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2722 #define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2723 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
2725 static void *sse_op_table1
[256][4] = {
2726 /* 3DNow! extensions */
2727 [0x0e] = { SSE_DUMMY
}, /* femms */
2728 [0x0f] = { SSE_DUMMY
}, /* pf... */
2729 /* pure SSE operations */
2730 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2731 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2732 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2733 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2734 [0x14] = { gen_helper_punpckldq_xmm
, gen_helper_punpcklqdq_xmm
},
2735 [0x15] = { gen_helper_punpckhdq_xmm
, gen_helper_punpckhqdq_xmm
},
2736 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2737 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2739 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2740 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2741 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2742 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2743 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2744 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2745 [0x2e] = { gen_helper_ucomiss
, gen_helper_ucomisd
},
2746 [0x2f] = { gen_helper_comiss
, gen_helper_comisd
},
2747 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2748 [0x51] = SSE_FOP(sqrt
),
2749 [0x52] = { gen_helper_rsqrtps
, NULL
, gen_helper_rsqrtss
, NULL
},
2750 [0x53] = { gen_helper_rcpps
, NULL
, gen_helper_rcpss
, NULL
},
2751 [0x54] = { gen_helper_pand_xmm
, gen_helper_pand_xmm
}, /* andps, andpd */
2752 [0x55] = { gen_helper_pandn_xmm
, gen_helper_pandn_xmm
}, /* andnps, andnpd */
2753 [0x56] = { gen_helper_por_xmm
, gen_helper_por_xmm
}, /* orps, orpd */
2754 [0x57] = { gen_helper_pxor_xmm
, gen_helper_pxor_xmm
}, /* xorps, xorpd */
2755 [0x58] = SSE_FOP(add
),
2756 [0x59] = SSE_FOP(mul
),
2757 [0x5a] = { gen_helper_cvtps2pd
, gen_helper_cvtpd2ps
,
2758 gen_helper_cvtss2sd
, gen_helper_cvtsd2ss
},
2759 [0x5b] = { gen_helper_cvtdq2ps
, gen_helper_cvtps2dq
, gen_helper_cvttps2dq
},
2760 [0x5c] = SSE_FOP(sub
),
2761 [0x5d] = SSE_FOP(min
),
2762 [0x5e] = SSE_FOP(div
),
2763 [0x5f] = SSE_FOP(max
),
2765 [0xc2] = SSE_FOP(cmpeq
),
2766 [0xc6] = { gen_helper_shufps
, gen_helper_shufpd
},
2768 [0x38] = { SSE_SPECIAL
, SSE_SPECIAL
, NULL
, SSE_SPECIAL
}, /* SSSE3/SSE4 */
2769 [0x3a] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* SSSE3/SSE4 */
2771 /* MMX ops and their SSE extensions */
2772 [0x60] = MMX_OP2(punpcklbw
),
2773 [0x61] = MMX_OP2(punpcklwd
),
2774 [0x62] = MMX_OP2(punpckldq
),
2775 [0x63] = MMX_OP2(packsswb
),
2776 [0x64] = MMX_OP2(pcmpgtb
),
2777 [0x65] = MMX_OP2(pcmpgtw
),
2778 [0x66] = MMX_OP2(pcmpgtl
),
2779 [0x67] = MMX_OP2(packuswb
),
2780 [0x68] = MMX_OP2(punpckhbw
),
2781 [0x69] = MMX_OP2(punpckhwd
),
2782 [0x6a] = MMX_OP2(punpckhdq
),
2783 [0x6b] = MMX_OP2(packssdw
),
2784 [0x6c] = { NULL
, gen_helper_punpcklqdq_xmm
},
2785 [0x6d] = { NULL
, gen_helper_punpckhqdq_xmm
},
2786 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2787 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2788 [0x70] = { gen_helper_pshufw_mmx
,
2789 gen_helper_pshufd_xmm
,
2790 gen_helper_pshufhw_xmm
,
2791 gen_helper_pshuflw_xmm
},
2792 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2793 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2794 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2795 [0x74] = MMX_OP2(pcmpeqb
),
2796 [0x75] = MMX_OP2(pcmpeqw
),
2797 [0x76] = MMX_OP2(pcmpeql
),
2798 [0x77] = { SSE_DUMMY
}, /* emms */
2799 [0x7c] = { NULL
, gen_helper_haddpd
, NULL
, gen_helper_haddps
},
2800 [0x7d] = { NULL
, gen_helper_hsubpd
, NULL
, gen_helper_hsubps
},
2801 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2802 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2803 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2804 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2805 [0xd0] = { NULL
, gen_helper_addsubpd
, NULL
, gen_helper_addsubps
},
2806 [0xd1] = MMX_OP2(psrlw
),
2807 [0xd2] = MMX_OP2(psrld
),
2808 [0xd3] = MMX_OP2(psrlq
),
2809 [0xd4] = MMX_OP2(paddq
),
2810 [0xd5] = MMX_OP2(pmullw
),
2811 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2812 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2813 [0xd8] = MMX_OP2(psubusb
),
2814 [0xd9] = MMX_OP2(psubusw
),
2815 [0xda] = MMX_OP2(pminub
),
2816 [0xdb] = MMX_OP2(pand
),
2817 [0xdc] = MMX_OP2(paddusb
),
2818 [0xdd] = MMX_OP2(paddusw
),
2819 [0xde] = MMX_OP2(pmaxub
),
2820 [0xdf] = MMX_OP2(pandn
),
2821 [0xe0] = MMX_OP2(pavgb
),
2822 [0xe1] = MMX_OP2(psraw
),
2823 [0xe2] = MMX_OP2(psrad
),
2824 [0xe3] = MMX_OP2(pavgw
),
2825 [0xe4] = MMX_OP2(pmulhuw
),
2826 [0xe5] = MMX_OP2(pmulhw
),
2827 [0xe6] = { NULL
, gen_helper_cvttpd2dq
, gen_helper_cvtdq2pd
, gen_helper_cvtpd2dq
},
2828 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2829 [0xe8] = MMX_OP2(psubsb
),
2830 [0xe9] = MMX_OP2(psubsw
),
2831 [0xea] = MMX_OP2(pminsw
),
2832 [0xeb] = MMX_OP2(por
),
2833 [0xec] = MMX_OP2(paddsb
),
2834 [0xed] = MMX_OP2(paddsw
),
2835 [0xee] = MMX_OP2(pmaxsw
),
2836 [0xef] = MMX_OP2(pxor
),
2837 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2838 [0xf1] = MMX_OP2(psllw
),
2839 [0xf2] = MMX_OP2(pslld
),
2840 [0xf3] = MMX_OP2(psllq
),
2841 [0xf4] = MMX_OP2(pmuludq
),
2842 [0xf5] = MMX_OP2(pmaddwd
),
2843 [0xf6] = MMX_OP2(psadbw
),
2844 [0xf7] = MMX_OP2(maskmov
),
2845 [0xf8] = MMX_OP2(psubb
),
2846 [0xf9] = MMX_OP2(psubw
),
2847 [0xfa] = MMX_OP2(psubl
),
2848 [0xfb] = MMX_OP2(psubq
),
2849 [0xfc] = MMX_OP2(paddb
),
2850 [0xfd] = MMX_OP2(paddw
),
2851 [0xfe] = MMX_OP2(paddl
),
2854 static void *sse_op_table2
[3 * 8][2] = {
2855 [0 + 2] = MMX_OP2(psrlw
),
2856 [0 + 4] = MMX_OP2(psraw
),
2857 [0 + 6] = MMX_OP2(psllw
),
2858 [8 + 2] = MMX_OP2(psrld
),
2859 [8 + 4] = MMX_OP2(psrad
),
2860 [8 + 6] = MMX_OP2(pslld
),
2861 [16 + 2] = MMX_OP2(psrlq
),
2862 [16 + 3] = { NULL
, gen_helper_psrldq_xmm
},
2863 [16 + 6] = MMX_OP2(psllq
),
2864 [16 + 7] = { NULL
, gen_helper_pslldq_xmm
},
2867 static void *sse_op_table3
[4 * 3] = {
2868 gen_helper_cvtsi2ss
,
2869 gen_helper_cvtsi2sd
,
2870 X86_64_ONLY(gen_helper_cvtsq2ss
),
2871 X86_64_ONLY(gen_helper_cvtsq2sd
),
2873 gen_helper_cvttss2si
,
2874 gen_helper_cvttsd2si
,
2875 X86_64_ONLY(gen_helper_cvttss2sq
),
2876 X86_64_ONLY(gen_helper_cvttsd2sq
),
2878 gen_helper_cvtss2si
,
2879 gen_helper_cvtsd2si
,
2880 X86_64_ONLY(gen_helper_cvtss2sq
),
2881 X86_64_ONLY(gen_helper_cvtsd2sq
),
2884 static void *sse_op_table4
[8][4] = {
2895 static void *sse_op_table5
[256] = {
2896 [0x0c] = gen_helper_pi2fw
,
2897 [0x0d] = gen_helper_pi2fd
,
2898 [0x1c] = gen_helper_pf2iw
,
2899 [0x1d] = gen_helper_pf2id
,
2900 [0x8a] = gen_helper_pfnacc
,
2901 [0x8e] = gen_helper_pfpnacc
,
2902 [0x90] = gen_helper_pfcmpge
,
2903 [0x94] = gen_helper_pfmin
,
2904 [0x96] = gen_helper_pfrcp
,
2905 [0x97] = gen_helper_pfrsqrt
,
2906 [0x9a] = gen_helper_pfsub
,
2907 [0x9e] = gen_helper_pfadd
,
2908 [0xa0] = gen_helper_pfcmpgt
,
2909 [0xa4] = gen_helper_pfmax
,
2910 [0xa6] = gen_helper_movq
, /* pfrcpit1; no need to actually increase precision */
2911 [0xa7] = gen_helper_movq
, /* pfrsqit1 */
2912 [0xaa] = gen_helper_pfsubr
,
2913 [0xae] = gen_helper_pfacc
,
2914 [0xb0] = gen_helper_pfcmpeq
,
2915 [0xb4] = gen_helper_pfmul
,
2916 [0xb6] = gen_helper_movq
, /* pfrcpit2 */
2917 [0xb7] = gen_helper_pmulhrw_mmx
,
2918 [0xbb] = gen_helper_pswapd
,
2919 [0xbf] = gen_helper_pavgb_mmx
/* pavgusb */
2922 struct sse_op_helper_s
{
2923 void *op
[2]; uint32_t ext_mask
;
2925 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
2926 #define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
2927 #define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
2928 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
2929 static struct sse_op_helper_s sse_op_table6
[256] = {
2930 [0x00] = SSSE3_OP(pshufb
),
2931 [0x01] = SSSE3_OP(phaddw
),
2932 [0x02] = SSSE3_OP(phaddd
),
2933 [0x03] = SSSE3_OP(phaddsw
),
2934 [0x04] = SSSE3_OP(pmaddubsw
),
2935 [0x05] = SSSE3_OP(phsubw
),
2936 [0x06] = SSSE3_OP(phsubd
),
2937 [0x07] = SSSE3_OP(phsubsw
),
2938 [0x08] = SSSE3_OP(psignb
),
2939 [0x09] = SSSE3_OP(psignw
),
2940 [0x0a] = SSSE3_OP(psignd
),
2941 [0x0b] = SSSE3_OP(pmulhrsw
),
2942 [0x10] = SSE41_OP(pblendvb
),
2943 [0x14] = SSE41_OP(blendvps
),
2944 [0x15] = SSE41_OP(blendvpd
),
2945 [0x17] = SSE41_OP(ptest
),
2946 [0x1c] = SSSE3_OP(pabsb
),
2947 [0x1d] = SSSE3_OP(pabsw
),
2948 [0x1e] = SSSE3_OP(pabsd
),
2949 [0x20] = SSE41_OP(pmovsxbw
),
2950 [0x21] = SSE41_OP(pmovsxbd
),
2951 [0x22] = SSE41_OP(pmovsxbq
),
2952 [0x23] = SSE41_OP(pmovsxwd
),
2953 [0x24] = SSE41_OP(pmovsxwq
),
2954 [0x25] = SSE41_OP(pmovsxdq
),
2955 [0x28] = SSE41_OP(pmuldq
),
2956 [0x29] = SSE41_OP(pcmpeqq
),
2957 [0x2a] = SSE41_SPECIAL
, /* movntqda */
2958 [0x2b] = SSE41_OP(packusdw
),
2959 [0x30] = SSE41_OP(pmovzxbw
),
2960 [0x31] = SSE41_OP(pmovzxbd
),
2961 [0x32] = SSE41_OP(pmovzxbq
),
2962 [0x33] = SSE41_OP(pmovzxwd
),
2963 [0x34] = SSE41_OP(pmovzxwq
),
2964 [0x35] = SSE41_OP(pmovzxdq
),
2965 [0x37] = SSE42_OP(pcmpgtq
),
2966 [0x38] = SSE41_OP(pminsb
),
2967 [0x39] = SSE41_OP(pminsd
),
2968 [0x3a] = SSE41_OP(pminuw
),
2969 [0x3b] = SSE41_OP(pminud
),
2970 [0x3c] = SSE41_OP(pmaxsb
),
2971 [0x3d] = SSE41_OP(pmaxsd
),
2972 [0x3e] = SSE41_OP(pmaxuw
),
2973 [0x3f] = SSE41_OP(pmaxud
),
2974 [0x40] = SSE41_OP(pmulld
),
2975 [0x41] = SSE41_OP(phminposuw
),
2978 static struct sse_op_helper_s sse_op_table7
[256] = {
2979 [0x08] = SSE41_OP(roundps
),
2980 [0x09] = SSE41_OP(roundpd
),
2981 [0x0a] = SSE41_OP(roundss
),
2982 [0x0b] = SSE41_OP(roundsd
),
2983 [0x0c] = SSE41_OP(blendps
),
2984 [0x0d] = SSE41_OP(blendpd
),
2985 [0x0e] = SSE41_OP(pblendw
),
2986 [0x0f] = SSSE3_OP(palignr
),
2987 [0x14] = SSE41_SPECIAL
, /* pextrb */
2988 [0x15] = SSE41_SPECIAL
, /* pextrw */
2989 [0x16] = SSE41_SPECIAL
, /* pextrd/pextrq */
2990 [0x17] = SSE41_SPECIAL
, /* extractps */
2991 [0x20] = SSE41_SPECIAL
, /* pinsrb */
2992 [0x21] = SSE41_SPECIAL
, /* insertps */
2993 [0x22] = SSE41_SPECIAL
, /* pinsrd/pinsrq */
2994 [0x40] = SSE41_OP(dpps
),
2995 [0x41] = SSE41_OP(dppd
),
2996 [0x42] = SSE41_OP(mpsadbw
),
2997 [0x60] = SSE42_OP(pcmpestrm
),
2998 [0x61] = SSE42_OP(pcmpestri
),
2999 [0x62] = SSE42_OP(pcmpistrm
),
3000 [0x63] = SSE42_OP(pcmpistri
),
3003 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
3005 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
3006 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
3010 if (s
->prefix
& PREFIX_DATA
)
3012 else if (s
->prefix
& PREFIX_REPZ
)
3014 else if (s
->prefix
& PREFIX_REPNZ
)
3018 sse_op2
= sse_op_table1
[b
][b1
];
3021 if ((b
<= 0x5f && b
>= 0x10) || b
== 0xc6 || b
== 0xc2) {
3031 /* simple MMX/SSE operation */
3032 if (s
->flags
& HF_TS_MASK
) {
3033 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
3036 if (s
->flags
& HF_EM_MASK
) {
3038 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
3041 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
3042 if ((b
!= 0x38 && b
!= 0x3a) || (s
->prefix
& PREFIX_DATA
))
3045 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3056 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3057 the static cpu state) */
3059 gen_helper_enter_mmx();
3062 modrm
= ldub_code(s
->pc
++);
3063 reg
= ((modrm
>> 3) & 7);
3066 mod
= (modrm
>> 6) & 3;
3067 if (sse_op2
== SSE_SPECIAL
) {
3070 case 0x0e7: /* movntq */
3073 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3074 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3076 case 0x1e7: /* movntdq */
3077 case 0x02b: /* movntps */
3078 case 0x12b: /* movntps */
3079 case 0x3f0: /* lddqu */
3082 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3083 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3085 case 0x6e: /* movd mm, ea */
3086 #ifdef TARGET_X86_64
3087 if (s
->dflag
== 2) {
3088 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3089 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3093 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3094 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3095 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3096 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3097 gen_helper_movl_mm_T0_mmx(cpu_ptr0
, cpu_tmp2_i32
);
3100 case 0x16e: /* movd xmm, ea */
3101 #ifdef TARGET_X86_64
3102 if (s
->dflag
== 2) {
3103 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
3104 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3105 offsetof(CPUX86State
,xmm_regs
[reg
]));
3106 gen_helper_movq_mm_T0_xmm(cpu_ptr0
, cpu_T
[0]);
3110 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
3111 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3112 offsetof(CPUX86State
,xmm_regs
[reg
]));
3113 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3114 gen_helper_movl_mm_T0_xmm(cpu_ptr0
, cpu_tmp2_i32
);
3117 case 0x6f: /* movq mm, ea */
3119 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3120 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3123 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3124 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3125 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3126 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3129 case 0x010: /* movups */
3130 case 0x110: /* movupd */
3131 case 0x028: /* movaps */
3132 case 0x128: /* movapd */
3133 case 0x16f: /* movdqa xmm, ea */
3134 case 0x26f: /* movdqu xmm, ea */
3136 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3137 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3139 rm
= (modrm
& 7) | REX_B(s
);
3140 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
3141 offsetof(CPUX86State
,xmm_regs
[rm
]));
3144 case 0x210: /* movss xmm, ea */
3146 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3147 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3148 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3150 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3151 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3152 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3154 rm
= (modrm
& 7) | REX_B(s
);
3155 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3156 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3159 case 0x310: /* movsd xmm, ea */
3161 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3162 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3164 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3165 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3167 rm
= (modrm
& 7) | REX_B(s
);
3168 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3169 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3172 case 0x012: /* movlps */
3173 case 0x112: /* movlpd */
3175 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3176 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3179 rm
= (modrm
& 7) | REX_B(s
);
3180 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3181 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3184 case 0x212: /* movsldup */
3186 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3187 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3189 rm
= (modrm
& 7) | REX_B(s
);
3190 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3191 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
3192 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3193 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
3195 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3196 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3197 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3198 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
3200 case 0x312: /* movddup */
3202 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3203 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3205 rm
= (modrm
& 7) | REX_B(s
);
3206 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3207 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3209 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3210 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3212 case 0x016: /* movhps */
3213 case 0x116: /* movhpd */
3215 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3216 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3219 rm
= (modrm
& 7) | REX_B(s
);
3220 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
3221 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3224 case 0x216: /* movshdup */
3226 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3227 gen_ldo_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3229 rm
= (modrm
& 7) | REX_B(s
);
3230 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
3231 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
3232 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
3233 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
3235 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
3236 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
3237 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
3238 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
3240 case 0x7e: /* movd ea, mm */
3241 #ifdef TARGET_X86_64
3242 if (s
->dflag
== 2) {
3243 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3244 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3245 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3249 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3250 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_L(0)));
3251 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3254 case 0x17e: /* movd ea, xmm */
3255 #ifdef TARGET_X86_64
3256 if (s
->dflag
== 2) {
3257 tcg_gen_ld_i64(cpu_T
[0], cpu_env
,
3258 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3259 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
3263 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
,
3264 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3265 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
3268 case 0x27e: /* movq xmm, ea */
3270 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3271 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3273 rm
= (modrm
& 7) | REX_B(s
);
3274 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3275 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3277 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3279 case 0x7f: /* movq ea, mm */
3281 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3282 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3285 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
3286 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
3289 case 0x011: /* movups */
3290 case 0x111: /* movupd */
3291 case 0x029: /* movaps */
3292 case 0x129: /* movapd */
3293 case 0x17f: /* movdqa ea, xmm */
3294 case 0x27f: /* movdqu ea, xmm */
3296 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3297 gen_sto_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
]));
3299 rm
= (modrm
& 7) | REX_B(s
);
3300 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
3301 offsetof(CPUX86State
,xmm_regs
[reg
]));
3304 case 0x211: /* movss ea, xmm */
3306 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3307 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3308 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
3310 rm
= (modrm
& 7) | REX_B(s
);
3311 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
3312 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
3315 case 0x311: /* movsd ea, xmm */
3317 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3318 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3320 rm
= (modrm
& 7) | REX_B(s
);
3321 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3322 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3325 case 0x013: /* movlps */
3326 case 0x113: /* movlpd */
3328 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3329 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3334 case 0x017: /* movhps */
3335 case 0x117: /* movhpd */
3337 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3338 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3343 case 0x71: /* shift mm, im */
3346 case 0x171: /* shift xmm, im */
3349 val
= ldub_code(s
->pc
++);
3351 gen_op_movl_T0_im(val
);
3352 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3354 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
3355 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
3357 gen_op_movl_T0_im(val
);
3358 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
3360 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
3361 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
3363 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
3367 rm
= (modrm
& 7) | REX_B(s
);
3368 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3371 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3373 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3374 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op1_offset
);
3375 ((void (*)(TCGv_ptr
, TCGv_ptr
))sse_op2
)(cpu_ptr0
, cpu_ptr1
);
3377 case 0x050: /* movmskps */
3378 rm
= (modrm
& 7) | REX_B(s
);
3379 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3380 offsetof(CPUX86State
,xmm_regs
[rm
]));
3381 gen_helper_movmskps(cpu_tmp2_i32
, cpu_ptr0
);
3382 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3383 gen_op_mov_reg_T0(OT_LONG
, reg
);
3385 case 0x150: /* movmskpd */
3386 rm
= (modrm
& 7) | REX_B(s
);
3387 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
,
3388 offsetof(CPUX86State
,xmm_regs
[rm
]));
3389 gen_helper_movmskpd(cpu_tmp2_i32
, cpu_ptr0
);
3390 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3391 gen_op_mov_reg_T0(OT_LONG
, reg
);
3393 case 0x02a: /* cvtpi2ps */
3394 case 0x12a: /* cvtpi2pd */
3395 gen_helper_enter_mmx();
3397 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3398 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3399 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3402 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3404 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3405 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3406 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3409 gen_helper_cvtpi2ps(cpu_ptr0
, cpu_ptr1
);
3413 gen_helper_cvtpi2pd(cpu_ptr0
, cpu_ptr1
);
3417 case 0x22a: /* cvtsi2ss */
3418 case 0x32a: /* cvtsi2sd */
3419 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3420 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3421 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3422 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3423 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)];
3424 if (ot
== OT_LONG
) {
3425 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3426 ((void (*)(TCGv_ptr
, TCGv_i32
))sse_op2
)(cpu_ptr0
, cpu_tmp2_i32
);
3428 ((void (*)(TCGv_ptr
, TCGv
))sse_op2
)(cpu_ptr0
, cpu_T
[0]);
3431 case 0x02c: /* cvttps2pi */
3432 case 0x12c: /* cvttpd2pi */
3433 case 0x02d: /* cvtps2pi */
3434 case 0x12d: /* cvtpd2pi */
3435 gen_helper_enter_mmx();
3437 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3438 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3439 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3441 rm
= (modrm
& 7) | REX_B(s
);
3442 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3444 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
3445 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3446 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3449 gen_helper_cvttps2pi(cpu_ptr0
, cpu_ptr1
);
3452 gen_helper_cvttpd2pi(cpu_ptr0
, cpu_ptr1
);
3455 gen_helper_cvtps2pi(cpu_ptr0
, cpu_ptr1
);
3458 gen_helper_cvtpd2pi(cpu_ptr0
, cpu_ptr1
);
3462 case 0x22c: /* cvttss2si */
3463 case 0x32c: /* cvttsd2si */
3464 case 0x22d: /* cvtss2si */
3465 case 0x32d: /* cvtsd2si */
3466 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3468 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3470 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3472 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3473 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3475 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3477 rm
= (modrm
& 7) | REX_B(s
);
3478 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3480 sse_op2
= sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3482 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op2_offset
);
3483 if (ot
== OT_LONG
) {
3484 ((void (*)(TCGv_i32
, TCGv_ptr
))sse_op2
)(cpu_tmp2_i32
, cpu_ptr0
);
3485 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3487 ((void (*)(TCGv
, TCGv_ptr
))sse_op2
)(cpu_T
[0], cpu_ptr0
);
3489 gen_op_mov_reg_T0(ot
, reg
);
3491 case 0xc4: /* pinsrw */
3494 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3495 val
= ldub_code(s
->pc
++);
3498 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3499 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_W(val
)));
3502 tcg_gen_st16_tl(cpu_T
[0], cpu_env
,
3503 offsetof(CPUX86State
,fpregs
[reg
].mmx
.MMX_W(val
)));
3506 case 0xc5: /* pextrw */
3510 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3511 val
= ldub_code(s
->pc
++);
3514 rm
= (modrm
& 7) | REX_B(s
);
3515 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3516 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_W(val
)));
3520 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
,
3521 offsetof(CPUX86State
,fpregs
[rm
].mmx
.MMX_W(val
)));
3523 reg
= ((modrm
>> 3) & 7) | rex_r
;
3524 gen_op_mov_reg_T0(ot
, reg
);
3526 case 0x1d6: /* movq ea, xmm */
3528 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3529 gen_stq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3531 rm
= (modrm
& 7) | REX_B(s
);
3532 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3533 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3534 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3537 case 0x2d6: /* movq2dq */
3538 gen_helper_enter_mmx();
3540 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3541 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3542 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3544 case 0x3d6: /* movdq2q */
3545 gen_helper_enter_mmx();
3546 rm
= (modrm
& 7) | REX_B(s
);
3547 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3548 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3550 case 0xd7: /* pmovmskb */
3555 rm
= (modrm
& 7) | REX_B(s
);
3556 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,xmm_regs
[rm
]));
3557 gen_helper_pmovmskb_xmm(cpu_tmp2_i32
, cpu_ptr0
);
3560 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3561 gen_helper_pmovmskb_mmx(cpu_tmp2_i32
, cpu_ptr0
);
3563 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3564 reg
= ((modrm
>> 3) & 7) | rex_r
;
3565 gen_op_mov_reg_T0(OT_LONG
, reg
);
3568 if (s
->prefix
& PREFIX_REPNZ
)
3572 modrm
= ldub_code(s
->pc
++);
3574 reg
= ((modrm
>> 3) & 7) | rex_r
;
3575 mod
= (modrm
>> 6) & 3;
3577 sse_op2
= sse_op_table6
[b
].op
[b1
];
3580 if (!(s
->cpuid_ext_features
& sse_op_table6
[b
].ext_mask
))
3584 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3586 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
| REX_B(s
)]);
3588 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3589 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3591 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3592 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3593 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3594 gen_ldq_env_A0(s
->mem_index
, op2_offset
+
3595 offsetof(XMMReg
, XMM_Q(0)));
3597 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3598 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3599 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_A0
,
3600 (s
->mem_index
>> 2) - 1);
3601 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp0
);
3602 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, op2_offset
+
3603 offsetof(XMMReg
, XMM_L(0)));
3605 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3606 tcg_gen_qemu_ld16u(cpu_tmp0
, cpu_A0
,
3607 (s
->mem_index
>> 2) - 1);
3608 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, op2_offset
+
3609 offsetof(XMMReg
, XMM_W(0)));
3611 case 0x2a: /* movntqda */
3612 gen_ldo_env_A0(s
->mem_index
, op1_offset
);
3615 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3619 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3621 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3623 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3624 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3625 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3628 if (sse_op2
== SSE_SPECIAL
)
3631 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3632 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3633 ((void (*)(TCGv_ptr
, TCGv_ptr
))sse_op2
)(cpu_ptr0
, cpu_ptr1
);
3636 s
->cc_op
= CC_OP_EFLAGS
;
3638 case 0x338: /* crc32 */
3641 modrm
= ldub_code(s
->pc
++);
3642 reg
= ((modrm
>> 3) & 7) | rex_r
;
3644 if (b
!= 0xf0 && b
!= 0xf1)
3646 if (!(s
->cpuid_ext_features
& CPUID_EXT_SSE42
))
3651 else if (b
== 0xf1 && s
->dflag
!= 2)
3652 if (s
->prefix
& PREFIX_DATA
)
3659 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
3660 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
3661 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3662 gen_helper_crc32(cpu_T
[0], cpu_tmp2_i32
,
3663 cpu_T
[0], tcg_const_i32(8 << ot
));
3665 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3666 gen_op_mov_reg_T0(ot
, reg
);
3671 modrm
= ldub_code(s
->pc
++);
3673 reg
= ((modrm
>> 3) & 7) | rex_r
;
3674 mod
= (modrm
>> 6) & 3;
3676 sse_op2
= sse_op_table7
[b
].op
[b1
];
3679 if (!(s
->cpuid_ext_features
& sse_op_table7
[b
].ext_mask
))
3682 if (sse_op2
== SSE_SPECIAL
) {
3683 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3684 rm
= (modrm
& 7) | REX_B(s
);
3686 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3687 reg
= ((modrm
>> 3) & 7) | rex_r
;
3688 val
= ldub_code(s
->pc
++);
3690 case 0x14: /* pextrb */
3691 tcg_gen_ld8u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3692 xmm_regs
[reg
].XMM_B(val
& 15)));
3694 gen_op_mov_reg_T0(ot
, rm
);
3696 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
,
3697 (s
->mem_index
>> 2) - 1);
3699 case 0x15: /* pextrw */
3700 tcg_gen_ld16u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3701 xmm_regs
[reg
].XMM_W(val
& 7)));
3703 gen_op_mov_reg_T0(ot
, rm
);
3705 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
,
3706 (s
->mem_index
>> 2) - 1);
3709 if (ot
== OT_LONG
) { /* pextrd */
3710 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
,
3711 offsetof(CPUX86State
,
3712 xmm_regs
[reg
].XMM_L(val
& 3)));
3713 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
3715 gen_op_mov_reg_v(ot
, rm
, cpu_T
[0]);
3717 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
,
3718 (s
->mem_index
>> 2) - 1);
3719 } else { /* pextrq */
3720 #ifdef TARGET_X86_64
3721 tcg_gen_ld_i64(cpu_tmp1_i64
, cpu_env
,
3722 offsetof(CPUX86State
,
3723 xmm_regs
[reg
].XMM_Q(val
& 1)));
3725 gen_op_mov_reg_v(ot
, rm
, cpu_tmp1_i64
);
3727 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
3728 (s
->mem_index
>> 2) - 1);
3734 case 0x17: /* extractps */
3735 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,
3736 xmm_regs
[reg
].XMM_L(val
& 3)));
3738 gen_op_mov_reg_T0(ot
, rm
);
3740 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
,
3741 (s
->mem_index
>> 2) - 1);
3743 case 0x20: /* pinsrb */
3745 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
3747 tcg_gen_qemu_ld8u(cpu_tmp0
, cpu_A0
,
3748 (s
->mem_index
>> 2) - 1);
3749 tcg_gen_st8_tl(cpu_tmp0
, cpu_env
, offsetof(CPUX86State
,
3750 xmm_regs
[reg
].XMM_B(val
& 15)));
3752 case 0x21: /* insertps */
3754 tcg_gen_ld_i32(cpu_tmp2_i32
, cpu_env
,
3755 offsetof(CPUX86State
,xmm_regs
[rm
]
3756 .XMM_L((val
>> 6) & 3)));
3758 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_A0
,
3759 (s
->mem_index
>> 2) - 1);
3760 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp0
);
3762 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
,
3763 offsetof(CPUX86State
,xmm_regs
[reg
]
3764 .XMM_L((val
>> 4) & 3)));
3766 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3767 cpu_env
, offsetof(CPUX86State
,
3768 xmm_regs
[reg
].XMM_L(0)));
3770 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3771 cpu_env
, offsetof(CPUX86State
,
3772 xmm_regs
[reg
].XMM_L(1)));
3774 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3775 cpu_env
, offsetof(CPUX86State
,
3776 xmm_regs
[reg
].XMM_L(2)));
3778 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3779 cpu_env
, offsetof(CPUX86State
,
3780 xmm_regs
[reg
].XMM_L(3)));
3783 if (ot
== OT_LONG
) { /* pinsrd */
3785 gen_op_mov_v_reg(ot
, cpu_tmp0
, rm
);
3787 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_A0
,
3788 (s
->mem_index
>> 2) - 1);
3789 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_tmp0
);
3790 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
,
3791 offsetof(CPUX86State
,
3792 xmm_regs
[reg
].XMM_L(val
& 3)));
3793 } else { /* pinsrq */
3794 #ifdef TARGET_X86_64
3796 gen_op_mov_v_reg(ot
, cpu_tmp1_i64
, rm
);
3798 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
3799 (s
->mem_index
>> 2) - 1);
3800 tcg_gen_st_i64(cpu_tmp1_i64
, cpu_env
,
3801 offsetof(CPUX86State
,
3802 xmm_regs
[reg
].XMM_Q(val
& 1)));
3813 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3815 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
| REX_B(s
)]);
3817 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3818 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3819 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3822 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3824 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3826 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3827 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3828 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3831 val
= ldub_code(s
->pc
++);
3833 if ((b
& 0xfc) == 0x60) { /* pcmpXstrX */
3834 s
->cc_op
= CC_OP_EFLAGS
;
3837 /* The helper must use entire 64-bit gp registers */
3841 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3842 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3843 ((void (*)(TCGv_ptr
, TCGv_ptr
, TCGv_i32
))sse_op2
)(cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3849 /* generic MMX or SSE operation */
3851 case 0x70: /* pshufx insn */
3852 case 0xc6: /* pshufx insn */
3853 case 0xc2: /* compare insns */
3860 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3862 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3863 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3864 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3866 /* specific case for SSE single instructions */
3869 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3870 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3873 gen_ldq_env_A0(s
->mem_index
, offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3876 gen_ldo_env_A0(s
->mem_index
, op2_offset
);
3879 rm
= (modrm
& 7) | REX_B(s
);
3880 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3883 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3885 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3886 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3887 gen_ldq_env_A0(s
->mem_index
, op2_offset
);
3890 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3894 case 0x0f: /* 3DNow! data insns */
3895 if (!(s
->cpuid_ext2_features
& CPUID_EXT2_3DNOW
))
3897 val
= ldub_code(s
->pc
++);
3898 sse_op2
= sse_op_table5
[val
];
3901 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3902 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3903 ((void (*)(TCGv_ptr
, TCGv_ptr
))sse_op2
)(cpu_ptr0
, cpu_ptr1
);
3905 case 0x70: /* pshufx insn */
3906 case 0xc6: /* pshufx insn */
3907 val
= ldub_code(s
->pc
++);
3908 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3909 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3910 ((void (*)(TCGv_ptr
, TCGv_ptr
, TCGv_i32
))sse_op2
)(cpu_ptr0
, cpu_ptr1
, tcg_const_i32(val
));
3914 val
= ldub_code(s
->pc
++);
3917 sse_op2
= sse_op_table4
[val
][b1
];
3918 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3919 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3920 ((void (*)(TCGv_ptr
, TCGv_ptr
))sse_op2
)(cpu_ptr0
, cpu_ptr1
);
3923 /* maskmov : we must prepare A0 */
3926 #ifdef TARGET_X86_64
3927 if (s
->aflag
== 2) {
3928 gen_op_movq_A0_reg(R_EDI
);
3932 gen_op_movl_A0_reg(R_EDI
);
3934 gen_op_andl_A0_ffff();
3936 gen_add_A0_ds_seg(s
);
3938 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3939 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3940 ((void (*)(TCGv_ptr
, TCGv_ptr
, TCGv
))sse_op2
)(cpu_ptr0
, cpu_ptr1
, cpu_A0
);
3943 tcg_gen_addi_ptr(cpu_ptr0
, cpu_env
, op1_offset
);
3944 tcg_gen_addi_ptr(cpu_ptr1
, cpu_env
, op2_offset
);
3945 ((void (*)(TCGv_ptr
, TCGv_ptr
))sse_op2
)(cpu_ptr0
, cpu_ptr1
);
3948 if (b
== 0x2e || b
== 0x2f) {
3949 s
->cc_op
= CC_OP_EFLAGS
;
3954 /* convert one instruction. s->is_jmp is set if the translation must
3955 be stopped. Return the next pc value */
3956 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3958 int b
, prefixes
, aflag
, dflag
;
3960 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3961 target_ulong next_eip
, tval
;
3964 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
3965 tcg_gen_debug_insn_start(pc_start
);
3973 #ifdef TARGET_X86_64
3978 s
->rip_offset
= 0; /* for relative ip address */
3980 b
= ldub_code(s
->pc
);
3982 /* check prefixes */
3983 #ifdef TARGET_X86_64
3987 prefixes
|= PREFIX_REPZ
;
3990 prefixes
|= PREFIX_REPNZ
;
3993 prefixes
|= PREFIX_LOCK
;
4014 prefixes
|= PREFIX_DATA
;
4017 prefixes
|= PREFIX_ADR
;
4021 rex_w
= (b
>> 3) & 1;
4022 rex_r
= (b
& 0x4) << 1;
4023 s
->rex_x
= (b
& 0x2) << 2;
4024 REX_B(s
) = (b
& 0x1) << 3;
4025 x86_64_hregs
= 1; /* select uniform byte register addressing */
4029 /* 0x66 is ignored if rex.w is set */
4032 if (prefixes
& PREFIX_DATA
)
4035 if (!(prefixes
& PREFIX_ADR
))
4042 prefixes
|= PREFIX_REPZ
;
4045 prefixes
|= PREFIX_REPNZ
;
4048 prefixes
|= PREFIX_LOCK
;
4069 prefixes
|= PREFIX_DATA
;
4072 prefixes
|= PREFIX_ADR
;
4075 if (prefixes
& PREFIX_DATA
)
4077 if (prefixes
& PREFIX_ADR
)
4081 s
->prefix
= prefixes
;
4085 /* lock generation */
4086 if (prefixes
& PREFIX_LOCK
)
4089 /* now check op code */
4093 /**************************/
4094 /* extended op code */
4095 b
= ldub_code(s
->pc
++) | 0x100;
4098 /**************************/
4116 ot
= dflag
+ OT_WORD
;
4119 case 0: /* OP Ev, Gv */
4120 modrm
= ldub_code(s
->pc
++);
4121 reg
= ((modrm
>> 3) & 7) | rex_r
;
4122 mod
= (modrm
>> 6) & 3;
4123 rm
= (modrm
& 7) | REX_B(s
);
4125 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4127 } else if (op
== OP_XORL
&& rm
== reg
) {
4129 /* xor reg, reg optimisation */
4131 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4132 gen_op_mov_reg_T0(ot
, reg
);
4133 gen_op_update1_cc();
4138 gen_op_mov_TN_reg(ot
, 1, reg
);
4139 gen_op(s
, op
, ot
, opreg
);
4141 case 1: /* OP Gv, Ev */
4142 modrm
= ldub_code(s
->pc
++);
4143 mod
= (modrm
>> 6) & 3;
4144 reg
= ((modrm
>> 3) & 7) | rex_r
;
4145 rm
= (modrm
& 7) | REX_B(s
);
4147 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4148 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4149 } else if (op
== OP_XORL
&& rm
== reg
) {
4152 gen_op_mov_TN_reg(ot
, 1, rm
);
4154 gen_op(s
, op
, ot
, reg
);
4156 case 2: /* OP A, Iv */
4157 val
= insn_get(s
, ot
);
4158 gen_op_movl_T1_im(val
);
4159 gen_op(s
, op
, ot
, OR_EAX
);
4168 case 0x80: /* GRP1 */
4177 ot
= dflag
+ OT_WORD
;
4179 modrm
= ldub_code(s
->pc
++);
4180 mod
= (modrm
>> 6) & 3;
4181 rm
= (modrm
& 7) | REX_B(s
);
4182 op
= (modrm
>> 3) & 7;
4188 s
->rip_offset
= insn_const_size(ot
);
4189 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4200 val
= insn_get(s
, ot
);
4203 val
= (int8_t)insn_get(s
, OT_BYTE
);
4206 gen_op_movl_T1_im(val
);
4207 gen_op(s
, op
, ot
, opreg
);
4211 /**************************/
4212 /* inc, dec, and other misc arith */
4213 case 0x40 ... 0x47: /* inc Gv */
4214 ot
= dflag
? OT_LONG
: OT_WORD
;
4215 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
4217 case 0x48 ... 0x4f: /* dec Gv */
4218 ot
= dflag
? OT_LONG
: OT_WORD
;
4219 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
4221 case 0xf6: /* GRP3 */
4226 ot
= dflag
+ OT_WORD
;
4228 modrm
= ldub_code(s
->pc
++);
4229 mod
= (modrm
>> 6) & 3;
4230 rm
= (modrm
& 7) | REX_B(s
);
4231 op
= (modrm
>> 3) & 7;
4234 s
->rip_offset
= insn_const_size(ot
);
4235 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4236 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4238 gen_op_mov_TN_reg(ot
, 0, rm
);
4243 val
= insn_get(s
, ot
);
4244 gen_op_movl_T1_im(val
);
4245 gen_op_testl_T0_T1_cc();
4246 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4249 tcg_gen_not_tl(cpu_T
[0], cpu_T
[0]);
4251 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4253 gen_op_mov_reg_T0(ot
, rm
);
4257 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
4259 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4261 gen_op_mov_reg_T0(ot
, rm
);
4263 gen_op_update_neg_cc();
4264 s
->cc_op
= CC_OP_SUBB
+ ot
;
4269 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
4270 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
4271 tcg_gen_ext8u_tl(cpu_T
[1], cpu_T
[1]);
4272 /* XXX: use 32 bit mul which could be faster */
4273 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4274 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4275 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4276 tcg_gen_andi_tl(cpu_cc_src
, cpu_T
[0], 0xff00);
4277 s
->cc_op
= CC_OP_MULB
;
4280 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
4281 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
4282 tcg_gen_ext16u_tl(cpu_T
[1], cpu_T
[1]);
4283 /* XXX: use 32 bit mul which could be faster */
4284 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4285 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4286 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4287 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
4288 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4289 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4290 s
->cc_op
= CC_OP_MULW
;
4294 #ifdef TARGET_X86_64
4295 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4296 tcg_gen_ext32u_tl(cpu_T
[0], cpu_T
[0]);
4297 tcg_gen_ext32u_tl(cpu_T
[1], cpu_T
[1]);
4298 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4299 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4300 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4301 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
4302 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4303 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4307 t0
= tcg_temp_new_i64();
4308 t1
= tcg_temp_new_i64();
4309 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4310 tcg_gen_extu_i32_i64(t0
, cpu_T
[0]);
4311 tcg_gen_extu_i32_i64(t1
, cpu_T
[1]);
4312 tcg_gen_mul_i64(t0
, t0
, t1
);
4313 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4314 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4315 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4316 tcg_gen_shri_i64(t0
, t0
, 32);
4317 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4318 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4319 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
4322 s
->cc_op
= CC_OP_MULL
;
4324 #ifdef TARGET_X86_64
4326 gen_helper_mulq_EAX_T0(cpu_T
[0]);
4327 s
->cc_op
= CC_OP_MULQ
;
4335 gen_op_mov_TN_reg(OT_BYTE
, 1, R_EAX
);
4336 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4337 tcg_gen_ext8s_tl(cpu_T
[1], cpu_T
[1]);
4338 /* XXX: use 32 bit mul which could be faster */
4339 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4340 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4341 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4342 tcg_gen_ext8s_tl(cpu_tmp0
, cpu_T
[0]);
4343 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4344 s
->cc_op
= CC_OP_MULB
;
4347 gen_op_mov_TN_reg(OT_WORD
, 1, R_EAX
);
4348 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4349 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4350 /* XXX: use 32 bit mul which could be faster */
4351 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4352 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4353 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4354 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4355 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4356 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 16);
4357 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4358 s
->cc_op
= CC_OP_MULW
;
4362 #ifdef TARGET_X86_64
4363 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4364 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4365 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4366 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4367 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4368 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4369 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4370 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4371 tcg_gen_shri_tl(cpu_T
[0], cpu_T
[0], 32);
4372 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4376 t0
= tcg_temp_new_i64();
4377 t1
= tcg_temp_new_i64();
4378 gen_op_mov_TN_reg(OT_LONG
, 1, R_EAX
);
4379 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4380 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4381 tcg_gen_mul_i64(t0
, t0
, t1
);
4382 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4383 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4384 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4385 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4386 tcg_gen_shri_i64(t0
, t0
, 32);
4387 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4388 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4389 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4392 s
->cc_op
= CC_OP_MULL
;
4394 #ifdef TARGET_X86_64
4396 gen_helper_imulq_EAX_T0(cpu_T
[0]);
4397 s
->cc_op
= CC_OP_MULQ
;
4405 gen_jmp_im(pc_start
- s
->cs_base
);
4406 gen_helper_divb_AL(cpu_T
[0]);
4409 gen_jmp_im(pc_start
- s
->cs_base
);
4410 gen_helper_divw_AX(cpu_T
[0]);
4414 gen_jmp_im(pc_start
- s
->cs_base
);
4415 gen_helper_divl_EAX(cpu_T
[0]);
4417 #ifdef TARGET_X86_64
4419 gen_jmp_im(pc_start
- s
->cs_base
);
4420 gen_helper_divq_EAX(cpu_T
[0]);
4428 gen_jmp_im(pc_start
- s
->cs_base
);
4429 gen_helper_idivb_AL(cpu_T
[0]);
4432 gen_jmp_im(pc_start
- s
->cs_base
);
4433 gen_helper_idivw_AX(cpu_T
[0]);
4437 gen_jmp_im(pc_start
- s
->cs_base
);
4438 gen_helper_idivl_EAX(cpu_T
[0]);
4440 #ifdef TARGET_X86_64
4442 gen_jmp_im(pc_start
- s
->cs_base
);
4443 gen_helper_idivq_EAX(cpu_T
[0]);
4453 case 0xfe: /* GRP4 */
4454 case 0xff: /* GRP5 */
4458 ot
= dflag
+ OT_WORD
;
4460 modrm
= ldub_code(s
->pc
++);
4461 mod
= (modrm
>> 6) & 3;
4462 rm
= (modrm
& 7) | REX_B(s
);
4463 op
= (modrm
>> 3) & 7;
4464 if (op
>= 2 && b
== 0xfe) {
4468 if (op
== 2 || op
== 4) {
4469 /* operand size for jumps is 64 bit */
4471 } else if (op
== 3 || op
== 5) {
4472 /* for call calls, the operand is 16 or 32 bit, even
4474 ot
= dflag
? OT_LONG
: OT_WORD
;
4475 } else if (op
== 6) {
4476 /* default push size is 64 bit */
4477 ot
= dflag
? OT_QUAD
: OT_WORD
;
4481 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4482 if (op
>= 2 && op
!= 3 && op
!= 5)
4483 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4485 gen_op_mov_TN_reg(ot
, 0, rm
);
4489 case 0: /* inc Ev */
4494 gen_inc(s
, ot
, opreg
, 1);
4496 case 1: /* dec Ev */
4501 gen_inc(s
, ot
, opreg
, -1);
4503 case 2: /* call Ev */
4504 /* XXX: optimize if memory (no 'and' is necessary) */
4506 gen_op_andl_T0_ffff();
4507 next_eip
= s
->pc
- s
->cs_base
;
4508 gen_movtl_T1_im(next_eip
);
4513 case 3: /* lcall Ev */
4514 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4515 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4516 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4518 if (s
->pe
&& !s
->vm86
) {
4519 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4520 gen_op_set_cc_op(s
->cc_op
);
4521 gen_jmp_im(pc_start
- s
->cs_base
);
4522 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4523 gen_helper_lcall_protected(cpu_tmp2_i32
, cpu_T
[1],
4524 tcg_const_i32(dflag
),
4525 tcg_const_i32(s
->pc
- pc_start
));
4527 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4528 gen_helper_lcall_real(cpu_tmp2_i32
, cpu_T
[1],
4529 tcg_const_i32(dflag
),
4530 tcg_const_i32(s
->pc
- s
->cs_base
));
4534 case 4: /* jmp Ev */
4536 gen_op_andl_T0_ffff();
4540 case 5: /* ljmp Ev */
4541 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4542 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4543 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4545 if (s
->pe
&& !s
->vm86
) {
4546 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4547 gen_op_set_cc_op(s
->cc_op
);
4548 gen_jmp_im(pc_start
- s
->cs_base
);
4549 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
4550 gen_helper_ljmp_protected(cpu_tmp2_i32
, cpu_T
[1],
4551 tcg_const_i32(s
->pc
- pc_start
));
4553 gen_op_movl_seg_T0_vm(R_CS
);
4554 gen_op_movl_T0_T1();
4559 case 6: /* push Ev */
4567 case 0x84: /* test Ev, Gv */
4572 ot
= dflag
+ OT_WORD
;
4574 modrm
= ldub_code(s
->pc
++);
4575 mod
= (modrm
>> 6) & 3;
4576 rm
= (modrm
& 7) | REX_B(s
);
4577 reg
= ((modrm
>> 3) & 7) | rex_r
;
4579 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4580 gen_op_mov_TN_reg(ot
, 1, reg
);
4581 gen_op_testl_T0_T1_cc();
4582 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4585 case 0xa8: /* test eAX, Iv */
4590 ot
= dflag
+ OT_WORD
;
4591 val
= insn_get(s
, ot
);
4593 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
4594 gen_op_movl_T1_im(val
);
4595 gen_op_testl_T0_T1_cc();
4596 s
->cc_op
= CC_OP_LOGICB
+ ot
;
4599 case 0x98: /* CWDE/CBW */
4600 #ifdef TARGET_X86_64
4602 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4603 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4604 gen_op_mov_reg_T0(OT_QUAD
, R_EAX
);
4608 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4609 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4610 gen_op_mov_reg_T0(OT_LONG
, R_EAX
);
4612 gen_op_mov_TN_reg(OT_BYTE
, 0, R_EAX
);
4613 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
4614 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
4617 case 0x99: /* CDQ/CWD */
4618 #ifdef TARGET_X86_64
4620 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
4621 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 63);
4622 gen_op_mov_reg_T0(OT_QUAD
, R_EDX
);
4626 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
4627 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4628 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 31);
4629 gen_op_mov_reg_T0(OT_LONG
, R_EDX
);
4631 gen_op_mov_TN_reg(OT_WORD
, 0, R_EAX
);
4632 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4633 tcg_gen_sari_tl(cpu_T
[0], cpu_T
[0], 15);
4634 gen_op_mov_reg_T0(OT_WORD
, R_EDX
);
4637 case 0x1af: /* imul Gv, Ev */
4638 case 0x69: /* imul Gv, Ev, I */
4640 ot
= dflag
+ OT_WORD
;
4641 modrm
= ldub_code(s
->pc
++);
4642 reg
= ((modrm
>> 3) & 7) | rex_r
;
4644 s
->rip_offset
= insn_const_size(ot
);
4647 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4649 val
= insn_get(s
, ot
);
4650 gen_op_movl_T1_im(val
);
4651 } else if (b
== 0x6b) {
4652 val
= (int8_t)insn_get(s
, OT_BYTE
);
4653 gen_op_movl_T1_im(val
);
4655 gen_op_mov_TN_reg(ot
, 1, reg
);
4658 #ifdef TARGET_X86_64
4659 if (ot
== OT_QUAD
) {
4660 gen_helper_imulq_T0_T1(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4663 if (ot
== OT_LONG
) {
4664 #ifdef TARGET_X86_64
4665 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
4666 tcg_gen_ext32s_tl(cpu_T
[1], cpu_T
[1]);
4667 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4668 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4669 tcg_gen_ext32s_tl(cpu_tmp0
, cpu_T
[0]);
4670 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4674 t0
= tcg_temp_new_i64();
4675 t1
= tcg_temp_new_i64();
4676 tcg_gen_ext_i32_i64(t0
, cpu_T
[0]);
4677 tcg_gen_ext_i32_i64(t1
, cpu_T
[1]);
4678 tcg_gen_mul_i64(t0
, t0
, t1
);
4679 tcg_gen_trunc_i64_i32(cpu_T
[0], t0
);
4680 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4681 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[0], 31);
4682 tcg_gen_shri_i64(t0
, t0
, 32);
4683 tcg_gen_trunc_i64_i32(cpu_T
[1], t0
);
4684 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[1], cpu_tmp0
);
4688 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
4689 tcg_gen_ext16s_tl(cpu_T
[1], cpu_T
[1]);
4690 /* XXX: use 32 bit mul which could be faster */
4691 tcg_gen_mul_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
4692 tcg_gen_mov_tl(cpu_cc_dst
, cpu_T
[0]);
4693 tcg_gen_ext16s_tl(cpu_tmp0
, cpu_T
[0]);
4694 tcg_gen_sub_tl(cpu_cc_src
, cpu_T
[0], cpu_tmp0
);
4696 gen_op_mov_reg_T0(ot
, reg
);
4697 s
->cc_op
= CC_OP_MULB
+ ot
;
4700 case 0x1c1: /* xadd Ev, Gv */
4704 ot
= dflag
+ OT_WORD
;
4705 modrm
= ldub_code(s
->pc
++);
4706 reg
= ((modrm
>> 3) & 7) | rex_r
;
4707 mod
= (modrm
>> 6) & 3;
4709 rm
= (modrm
& 7) | REX_B(s
);
4710 gen_op_mov_TN_reg(ot
, 0, reg
);
4711 gen_op_mov_TN_reg(ot
, 1, rm
);
4712 gen_op_addl_T0_T1();
4713 gen_op_mov_reg_T1(ot
, reg
);
4714 gen_op_mov_reg_T0(ot
, rm
);
4716 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4717 gen_op_mov_TN_reg(ot
, 0, reg
);
4718 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4719 gen_op_addl_T0_T1();
4720 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4721 gen_op_mov_reg_T1(ot
, reg
);
4723 gen_op_update2_cc();
4724 s
->cc_op
= CC_OP_ADDB
+ ot
;
4727 case 0x1b1: /* cmpxchg Ev, Gv */
4730 TCGv t0
, t1
, t2
, a0
;
4735 ot
= dflag
+ OT_WORD
;
4736 modrm
= ldub_code(s
->pc
++);
4737 reg
= ((modrm
>> 3) & 7) | rex_r
;
4738 mod
= (modrm
>> 6) & 3;
4739 t0
= tcg_temp_local_new();
4740 t1
= tcg_temp_local_new();
4741 t2
= tcg_temp_local_new();
4742 a0
= tcg_temp_local_new();
4743 gen_op_mov_v_reg(ot
, t1
, reg
);
4745 rm
= (modrm
& 7) | REX_B(s
);
4746 gen_op_mov_v_reg(ot
, t0
, rm
);
4748 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4749 tcg_gen_mov_tl(a0
, cpu_A0
);
4750 gen_op_ld_v(ot
+ s
->mem_index
, t0
, a0
);
4751 rm
= 0; /* avoid warning */
4753 label1
= gen_new_label();
4754 tcg_gen_ld_tl(t2
, cpu_env
, offsetof(CPUState
, regs
[R_EAX
]));
4755 tcg_gen_sub_tl(t2
, t2
, t0
);
4757 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, label1
);
4759 label2
= gen_new_label();
4760 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4762 gen_set_label(label1
);
4763 gen_op_mov_reg_v(ot
, rm
, t1
);
4764 gen_set_label(label2
);
4766 tcg_gen_mov_tl(t1
, t0
);
4767 gen_op_mov_reg_v(ot
, R_EAX
, t0
);
4768 gen_set_label(label1
);
4770 gen_op_st_v(ot
+ s
->mem_index
, t1
, a0
);
4772 tcg_gen_mov_tl(cpu_cc_src
, t0
);
4773 tcg_gen_mov_tl(cpu_cc_dst
, t2
);
4774 s
->cc_op
= CC_OP_SUBB
+ ot
;
4781 case 0x1c7: /* cmpxchg8b */
4782 modrm
= ldub_code(s
->pc
++);
4783 mod
= (modrm
>> 6) & 3;
4784 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
4786 #ifdef TARGET_X86_64
4788 if (!(s
->cpuid_ext_features
& CPUID_EXT_CX16
))
4790 gen_jmp_im(pc_start
- s
->cs_base
);
4791 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4792 gen_op_set_cc_op(s
->cc_op
);
4793 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4794 gen_helper_cmpxchg16b(cpu_A0
);
4798 if (!(s
->cpuid_features
& CPUID_CX8
))
4800 gen_jmp_im(pc_start
- s
->cs_base
);
4801 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4802 gen_op_set_cc_op(s
->cc_op
);
4803 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4804 gen_helper_cmpxchg8b(cpu_A0
);
4806 s
->cc_op
= CC_OP_EFLAGS
;
4809 /**************************/
4811 case 0x50 ... 0x57: /* push */
4812 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
4815 case 0x58 ... 0x5f: /* pop */
4817 ot
= dflag
? OT_QUAD
: OT_WORD
;
4819 ot
= dflag
+ OT_WORD
;
4822 /* NOTE: order is important for pop %sp */
4824 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
4826 case 0x60: /* pusha */
4831 case 0x61: /* popa */
4836 case 0x68: /* push Iv */
4839 ot
= dflag
? OT_QUAD
: OT_WORD
;
4841 ot
= dflag
+ OT_WORD
;
4844 val
= insn_get(s
, ot
);
4846 val
= (int8_t)insn_get(s
, OT_BYTE
);
4847 gen_op_movl_T0_im(val
);
4850 case 0x8f: /* pop Ev */
4852 ot
= dflag
? OT_QUAD
: OT_WORD
;
4854 ot
= dflag
+ OT_WORD
;
4856 modrm
= ldub_code(s
->pc
++);
4857 mod
= (modrm
>> 6) & 3;
4860 /* NOTE: order is important for pop %sp */
4862 rm
= (modrm
& 7) | REX_B(s
);
4863 gen_op_mov_reg_T0(ot
, rm
);
4865 /* NOTE: order is important too for MMU exceptions */
4866 s
->popl_esp_hack
= 1 << ot
;
4867 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4868 s
->popl_esp_hack
= 0;
4872 case 0xc8: /* enter */
4875 val
= lduw_code(s
->pc
);
4877 level
= ldub_code(s
->pc
++);
4878 gen_enter(s
, val
, level
);
4881 case 0xc9: /* leave */
4882 /* XXX: exception not precise (ESP is updated before potential exception) */
4884 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
4885 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
4886 } else if (s
->ss32
) {
4887 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
4888 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
4890 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
4891 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
4895 ot
= dflag
? OT_QUAD
: OT_WORD
;
4897 ot
= dflag
+ OT_WORD
;
4899 gen_op_mov_reg_T0(ot
, R_EBP
);
4902 case 0x06: /* push es */
4903 case 0x0e: /* push cs */
4904 case 0x16: /* push ss */
4905 case 0x1e: /* push ds */
4908 gen_op_movl_T0_seg(b
>> 3);
4911 case 0x1a0: /* push fs */
4912 case 0x1a8: /* push gs */
4913 gen_op_movl_T0_seg((b
>> 3) & 7);
4916 case 0x07: /* pop es */
4917 case 0x17: /* pop ss */
4918 case 0x1f: /* pop ds */
4923 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4926 /* if reg == SS, inhibit interrupts/trace. */
4927 /* If several instructions disable interrupts, only the
4929 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4930 gen_helper_set_inhibit_irq();
4934 gen_jmp_im(s
->pc
- s
->cs_base
);
4938 case 0x1a1: /* pop fs */
4939 case 0x1a9: /* pop gs */
4941 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
4944 gen_jmp_im(s
->pc
- s
->cs_base
);
4949 /**************************/
4952 case 0x89: /* mov Gv, Ev */
4956 ot
= dflag
+ OT_WORD
;
4957 modrm
= ldub_code(s
->pc
++);
4958 reg
= ((modrm
>> 3) & 7) | rex_r
;
4960 /* generate a generic store */
4961 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4964 case 0xc7: /* mov Ev, Iv */
4968 ot
= dflag
+ OT_WORD
;
4969 modrm
= ldub_code(s
->pc
++);
4970 mod
= (modrm
>> 6) & 3;
4972 s
->rip_offset
= insn_const_size(ot
);
4973 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4975 val
= insn_get(s
, ot
);
4976 gen_op_movl_T0_im(val
);
4978 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4980 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4983 case 0x8b: /* mov Ev, Gv */
4987 ot
= OT_WORD
+ dflag
;
4988 modrm
= ldub_code(s
->pc
++);
4989 reg
= ((modrm
>> 3) & 7) | rex_r
;
4991 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4992 gen_op_mov_reg_T0(ot
, reg
);
4994 case 0x8e: /* mov seg, Gv */
4995 modrm
= ldub_code(s
->pc
++);
4996 reg
= (modrm
>> 3) & 7;
4997 if (reg
>= 6 || reg
== R_CS
)
4999 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5000 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
5002 /* if reg == SS, inhibit interrupts/trace */
5003 /* If several instructions disable interrupts, only the
5005 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5006 gen_helper_set_inhibit_irq();
5010 gen_jmp_im(s
->pc
- s
->cs_base
);
5014 case 0x8c: /* mov Gv, seg */
5015 modrm
= ldub_code(s
->pc
++);
5016 reg
= (modrm
>> 3) & 7;
5017 mod
= (modrm
>> 6) & 3;
5020 gen_op_movl_T0_seg(reg
);
5022 ot
= OT_WORD
+ dflag
;
5025 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5028 case 0x1b6: /* movzbS Gv, Eb */
5029 case 0x1b7: /* movzwS Gv, Eb */
5030 case 0x1be: /* movsbS Gv, Eb */
5031 case 0x1bf: /* movswS Gv, Eb */
5034 /* d_ot is the size of destination */
5035 d_ot
= dflag
+ OT_WORD
;
5036 /* ot is the size of source */
5037 ot
= (b
& 1) + OT_BYTE
;
5038 modrm
= ldub_code(s
->pc
++);
5039 reg
= ((modrm
>> 3) & 7) | rex_r
;
5040 mod
= (modrm
>> 6) & 3;
5041 rm
= (modrm
& 7) | REX_B(s
);
5044 gen_op_mov_TN_reg(ot
, 0, rm
);
5045 switch(ot
| (b
& 8)) {
5047 tcg_gen_ext8u_tl(cpu_T
[0], cpu_T
[0]);
5050 tcg_gen_ext8s_tl(cpu_T
[0], cpu_T
[0]);
5053 tcg_gen_ext16u_tl(cpu_T
[0], cpu_T
[0]);
5057 tcg_gen_ext16s_tl(cpu_T
[0], cpu_T
[0]);
5060 gen_op_mov_reg_T0(d_ot
, reg
);
5062 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5064 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
5066 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
5068 gen_op_mov_reg_T0(d_ot
, reg
);
5073 case 0x8d: /* lea */
5074 ot
= dflag
+ OT_WORD
;
5075 modrm
= ldub_code(s
->pc
++);
5076 mod
= (modrm
>> 6) & 3;
5079 reg
= ((modrm
>> 3) & 7) | rex_r
;
5080 /* we must ensure that no segment is added */
5084 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5086 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
5089 case 0xa0: /* mov EAX, Ov */
5091 case 0xa2: /* mov Ov, EAX */
5094 target_ulong offset_addr
;
5099 ot
= dflag
+ OT_WORD
;
5100 #ifdef TARGET_X86_64
5101 if (s
->aflag
== 2) {
5102 offset_addr
= ldq_code(s
->pc
);
5104 gen_op_movq_A0_im(offset_addr
);
5109 offset_addr
= insn_get(s
, OT_LONG
);
5111 offset_addr
= insn_get(s
, OT_WORD
);
5113 gen_op_movl_A0_im(offset_addr
);
5115 gen_add_A0_ds_seg(s
);
5117 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5118 gen_op_mov_reg_T0(ot
, R_EAX
);
5120 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
5121 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5125 case 0xd7: /* xlat */
5126 #ifdef TARGET_X86_64
5127 if (s
->aflag
== 2) {
5128 gen_op_movq_A0_reg(R_EBX
);
5129 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EAX
);
5130 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
5131 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
5135 gen_op_movl_A0_reg(R_EBX
);
5136 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
5137 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xff);
5138 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_T
[0]);
5140 gen_op_andl_A0_ffff();
5142 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
5144 gen_add_A0_ds_seg(s
);
5145 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
5146 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
5148 case 0xb0 ... 0xb7: /* mov R, Ib */
5149 val
= insn_get(s
, OT_BYTE
);
5150 gen_op_movl_T0_im(val
);
5151 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
5153 case 0xb8 ... 0xbf: /* mov R, Iv */
5154 #ifdef TARGET_X86_64
5158 tmp
= ldq_code(s
->pc
);
5160 reg
= (b
& 7) | REX_B(s
);
5161 gen_movtl_T0_im(tmp
);
5162 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5166 ot
= dflag
? OT_LONG
: OT_WORD
;
5167 val
= insn_get(s
, ot
);
5168 reg
= (b
& 7) | REX_B(s
);
5169 gen_op_movl_T0_im(val
);
5170 gen_op_mov_reg_T0(ot
, reg
);
5174 case 0x91 ... 0x97: /* xchg R, EAX */
5175 ot
= dflag
+ OT_WORD
;
5176 reg
= (b
& 7) | REX_B(s
);
5180 case 0x87: /* xchg Ev, Gv */
5184 ot
= dflag
+ OT_WORD
;
5185 modrm
= ldub_code(s
->pc
++);
5186 reg
= ((modrm
>> 3) & 7) | rex_r
;
5187 mod
= (modrm
>> 6) & 3;
5189 rm
= (modrm
& 7) | REX_B(s
);
5191 gen_op_mov_TN_reg(ot
, 0, reg
);
5192 gen_op_mov_TN_reg(ot
, 1, rm
);
5193 gen_op_mov_reg_T0(ot
, rm
);
5194 gen_op_mov_reg_T1(ot
, reg
);
5196 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5197 gen_op_mov_TN_reg(ot
, 0, reg
);
5198 /* for xchg, lock is implicit */
5199 if (!(prefixes
& PREFIX_LOCK
))
5201 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5202 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5203 if (!(prefixes
& PREFIX_LOCK
))
5204 gen_helper_unlock();
5205 gen_op_mov_reg_T1(ot
, reg
);
5208 case 0xc4: /* les Gv */
5213 case 0xc5: /* lds Gv */
5218 case 0x1b2: /* lss Gv */
5221 case 0x1b4: /* lfs Gv */
5224 case 0x1b5: /* lgs Gv */
5227 ot
= dflag
? OT_LONG
: OT_WORD
;
5228 modrm
= ldub_code(s
->pc
++);
5229 reg
= ((modrm
>> 3) & 7) | rex_r
;
5230 mod
= (modrm
>> 6) & 3;
5233 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5234 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5235 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
5236 /* load the segment first to handle exceptions properly */
5237 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
5238 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
5239 /* then put the data */
5240 gen_op_mov_reg_T1(ot
, reg
);
5242 gen_jmp_im(s
->pc
- s
->cs_base
);
5247 /************************/
5258 ot
= dflag
+ OT_WORD
;
5260 modrm
= ldub_code(s
->pc
++);
5261 mod
= (modrm
>> 6) & 3;
5262 op
= (modrm
>> 3) & 7;
5268 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5271 opreg
= (modrm
& 7) | REX_B(s
);
5276 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
5279 shift
= ldub_code(s
->pc
++);
5281 gen_shifti(s
, op
, ot
, opreg
, shift
);
5296 case 0x1a4: /* shld imm */
5300 case 0x1a5: /* shld cl */
5304 case 0x1ac: /* shrd imm */
5308 case 0x1ad: /* shrd cl */
5312 ot
= dflag
+ OT_WORD
;
5313 modrm
= ldub_code(s
->pc
++);
5314 mod
= (modrm
>> 6) & 3;
5315 rm
= (modrm
& 7) | REX_B(s
);
5316 reg
= ((modrm
>> 3) & 7) | rex_r
;
5318 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5323 gen_op_mov_TN_reg(ot
, 1, reg
);
5326 val
= ldub_code(s
->pc
++);
5327 tcg_gen_movi_tl(cpu_T3
, val
);
5329 tcg_gen_ld_tl(cpu_T3
, cpu_env
, offsetof(CPUState
, regs
[R_ECX
]));
5331 gen_shiftd_rm_T1_T3(s
, ot
, opreg
, op
);
5334 /************************/
5337 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
5338 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5339 /* XXX: what to do if illegal op ? */
5340 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5343 modrm
= ldub_code(s
->pc
++);
5344 mod
= (modrm
>> 6) & 3;
5346 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
5349 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5351 case 0x00 ... 0x07: /* fxxxs */
5352 case 0x10 ... 0x17: /* fixxxl */
5353 case 0x20 ... 0x27: /* fxxxl */
5354 case 0x30 ... 0x37: /* fixxx */
5361 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5362 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5363 gen_helper_flds_FT0(cpu_tmp2_i32
);
5366 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5367 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5368 gen_helper_fildl_FT0(cpu_tmp2_i32
);
5371 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5372 (s
->mem_index
>> 2) - 1);
5373 gen_helper_fldl_FT0(cpu_tmp1_i64
);
5377 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5378 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5379 gen_helper_fildl_FT0(cpu_tmp2_i32
);
5383 gen_helper_fp_arith_ST0_FT0(op1
);
5385 /* fcomp needs pop */
5390 case 0x08: /* flds */
5391 case 0x0a: /* fsts */
5392 case 0x0b: /* fstps */
5393 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5394 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5395 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5400 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5401 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5402 gen_helper_flds_ST0(cpu_tmp2_i32
);
5405 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5406 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5407 gen_helper_fildl_ST0(cpu_tmp2_i32
);
5410 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5411 (s
->mem_index
>> 2) - 1);
5412 gen_helper_fldl_ST0(cpu_tmp1_i64
);
5416 gen_op_lds_T0_A0(OT_WORD
+ s
->mem_index
);
5417 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5418 gen_helper_fildl_ST0(cpu_tmp2_i32
);
5423 /* XXX: the corresponding CPUID bit must be tested ! */
5426 gen_helper_fisttl_ST0(cpu_tmp2_i32
);
5427 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5428 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5431 gen_helper_fisttll_ST0(cpu_tmp1_i64
);
5432 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5433 (s
->mem_index
>> 2) - 1);
5437 gen_helper_fistt_ST0(cpu_tmp2_i32
);
5438 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5439 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5447 gen_helper_fsts_ST0(cpu_tmp2_i32
);
5448 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5449 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5452 gen_helper_fistl_ST0(cpu_tmp2_i32
);
5453 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5454 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
5457 gen_helper_fstl_ST0(cpu_tmp1_i64
);
5458 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5459 (s
->mem_index
>> 2) - 1);
5463 gen_helper_fist_ST0(cpu_tmp2_i32
);
5464 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5465 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5473 case 0x0c: /* fldenv mem */
5474 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5475 gen_op_set_cc_op(s
->cc_op
);
5476 gen_jmp_im(pc_start
- s
->cs_base
);
5478 cpu_A0
, tcg_const_i32(s
->dflag
));
5480 case 0x0d: /* fldcw mem */
5481 gen_op_ld_T0_A0(OT_WORD
+ s
->mem_index
);
5482 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5483 gen_helper_fldcw(cpu_tmp2_i32
);
5485 case 0x0e: /* fnstenv mem */
5486 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5487 gen_op_set_cc_op(s
->cc_op
);
5488 gen_jmp_im(pc_start
- s
->cs_base
);
5489 gen_helper_fstenv(cpu_A0
, tcg_const_i32(s
->dflag
));
5491 case 0x0f: /* fnstcw mem */
5492 gen_helper_fnstcw(cpu_tmp2_i32
);
5493 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5494 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5496 case 0x1d: /* fldt mem */
5497 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5498 gen_op_set_cc_op(s
->cc_op
);
5499 gen_jmp_im(pc_start
- s
->cs_base
);
5500 gen_helper_fldt_ST0(cpu_A0
);
5502 case 0x1f: /* fstpt mem */
5503 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5504 gen_op_set_cc_op(s
->cc_op
);
5505 gen_jmp_im(pc_start
- s
->cs_base
);
5506 gen_helper_fstt_ST0(cpu_A0
);
5509 case 0x2c: /* frstor mem */
5510 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5511 gen_op_set_cc_op(s
->cc_op
);
5512 gen_jmp_im(pc_start
- s
->cs_base
);
5513 gen_helper_frstor(cpu_A0
, tcg_const_i32(s
->dflag
));
5515 case 0x2e: /* fnsave mem */
5516 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5517 gen_op_set_cc_op(s
->cc_op
);
5518 gen_jmp_im(pc_start
- s
->cs_base
);
5519 gen_helper_fsave(cpu_A0
, tcg_const_i32(s
->dflag
));
5521 case 0x2f: /* fnstsw mem */
5522 gen_helper_fnstsw(cpu_tmp2_i32
);
5523 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5524 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5526 case 0x3c: /* fbld */
5527 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5528 gen_op_set_cc_op(s
->cc_op
);
5529 gen_jmp_im(pc_start
- s
->cs_base
);
5530 gen_helper_fbld_ST0(cpu_A0
);
5532 case 0x3e: /* fbstp */
5533 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5534 gen_op_set_cc_op(s
->cc_op
);
5535 gen_jmp_im(pc_start
- s
->cs_base
);
5536 gen_helper_fbst_ST0(cpu_A0
);
5539 case 0x3d: /* fildll */
5540 tcg_gen_qemu_ld64(cpu_tmp1_i64
, cpu_A0
,
5541 (s
->mem_index
>> 2) - 1);
5542 gen_helper_fildll_ST0(cpu_tmp1_i64
);
5544 case 0x3f: /* fistpll */
5545 gen_helper_fistll_ST0(cpu_tmp1_i64
);
5546 tcg_gen_qemu_st64(cpu_tmp1_i64
, cpu_A0
,
5547 (s
->mem_index
>> 2) - 1);
5554 /* register float ops */
5558 case 0x08: /* fld sti */
5560 gen_helper_fmov_ST0_STN(tcg_const_i32((opreg
+ 1) & 7));
5562 case 0x09: /* fxchg sti */
5563 case 0x29: /* fxchg4 sti, undocumented op */
5564 case 0x39: /* fxchg7 sti, undocumented op */
5565 gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg
));
5567 case 0x0a: /* grp d9/2 */
5570 /* check exceptions (FreeBSD FPU probe) */
5571 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5572 gen_op_set_cc_op(s
->cc_op
);
5573 gen_jmp_im(pc_start
- s
->cs_base
);
5580 case 0x0c: /* grp d9/4 */
5583 gen_helper_fchs_ST0();
5586 gen_helper_fabs_ST0();
5589 gen_helper_fldz_FT0();
5590 gen_helper_fcom_ST0_FT0();
5593 gen_helper_fxam_ST0();
5599 case 0x0d: /* grp d9/5 */
5604 gen_helper_fld1_ST0();
5608 gen_helper_fldl2t_ST0();
5612 gen_helper_fldl2e_ST0();
5616 gen_helper_fldpi_ST0();
5620 gen_helper_fldlg2_ST0();
5624 gen_helper_fldln2_ST0();
5628 gen_helper_fldz_ST0();
5635 case 0x0e: /* grp d9/6 */
5646 case 3: /* fpatan */
5647 gen_helper_fpatan();
5649 case 4: /* fxtract */
5650 gen_helper_fxtract();
5652 case 5: /* fprem1 */
5653 gen_helper_fprem1();
5655 case 6: /* fdecstp */
5656 gen_helper_fdecstp();
5659 case 7: /* fincstp */
5660 gen_helper_fincstp();
5664 case 0x0f: /* grp d9/7 */
5669 case 1: /* fyl2xp1 */
5670 gen_helper_fyl2xp1();
5675 case 3: /* fsincos */
5676 gen_helper_fsincos();
5678 case 5: /* fscale */
5679 gen_helper_fscale();
5681 case 4: /* frndint */
5682 gen_helper_frndint();
5693 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5694 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5695 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5701 gen_helper_fp_arith_STN_ST0(op1
, opreg
);
5705 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5706 gen_helper_fp_arith_ST0_FT0(op1
);
5710 case 0x02: /* fcom */
5711 case 0x22: /* fcom2, undocumented op */
5712 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5713 gen_helper_fcom_ST0_FT0();
5715 case 0x03: /* fcomp */
5716 case 0x23: /* fcomp3, undocumented op */
5717 case 0x32: /* fcomp5, undocumented op */
5718 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5719 gen_helper_fcom_ST0_FT0();
5722 case 0x15: /* da/5 */
5724 case 1: /* fucompp */
5725 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5726 gen_helper_fucom_ST0_FT0();
5736 case 0: /* feni (287 only, just do nop here) */
5738 case 1: /* fdisi (287 only, just do nop here) */
5743 case 3: /* fninit */
5744 gen_helper_fninit();
5746 case 4: /* fsetpm (287 only, just do nop here) */
5752 case 0x1d: /* fucomi */
5753 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5754 gen_op_set_cc_op(s
->cc_op
);
5755 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5756 gen_helper_fucomi_ST0_FT0();
5757 s
->cc_op
= CC_OP_EFLAGS
;
5759 case 0x1e: /* fcomi */
5760 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5761 gen_op_set_cc_op(s
->cc_op
);
5762 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5763 gen_helper_fcomi_ST0_FT0();
5764 s
->cc_op
= CC_OP_EFLAGS
;
5766 case 0x28: /* ffree sti */
5767 gen_helper_ffree_STN(tcg_const_i32(opreg
));
5769 case 0x2a: /* fst sti */
5770 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg
));
5772 case 0x2b: /* fstp sti */
5773 case 0x0b: /* fstp1 sti, undocumented op */
5774 case 0x3a: /* fstp8 sti, undocumented op */
5775 case 0x3b: /* fstp9 sti, undocumented op */
5776 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg
));
5779 case 0x2c: /* fucom st(i) */
5780 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5781 gen_helper_fucom_ST0_FT0();
5783 case 0x2d: /* fucomp st(i) */
5784 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5785 gen_helper_fucom_ST0_FT0();
5788 case 0x33: /* de/3 */
5790 case 1: /* fcompp */
5791 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5792 gen_helper_fcom_ST0_FT0();
5800 case 0x38: /* ffreep sti, undocumented op */
5801 gen_helper_ffree_STN(tcg_const_i32(opreg
));
5804 case 0x3c: /* df/4 */
5807 gen_helper_fnstsw(cpu_tmp2_i32
);
5808 tcg_gen_extu_i32_tl(cpu_T
[0], cpu_tmp2_i32
);
5809 gen_op_mov_reg_T0(OT_WORD
, R_EAX
);
5815 case 0x3d: /* fucomip */
5816 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5817 gen_op_set_cc_op(s
->cc_op
);
5818 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5819 gen_helper_fucomi_ST0_FT0();
5821 s
->cc_op
= CC_OP_EFLAGS
;
5823 case 0x3e: /* fcomip */
5824 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5825 gen_op_set_cc_op(s
->cc_op
);
5826 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg
));
5827 gen_helper_fcomi_ST0_FT0();
5829 s
->cc_op
= CC_OP_EFLAGS
;
5831 case 0x10 ... 0x13: /* fcmovxx */
5835 static const uint8_t fcmov_cc
[8] = {
5841 op1
= fcmov_cc
[op
& 3] | (((op
>> 3) & 1) ^ 1);
5842 l1
= gen_new_label();
5843 gen_jcc1(s
, s
->cc_op
, op1
, l1
);
5844 gen_helper_fmov_ST0_STN(tcg_const_i32(opreg
));
5853 /************************/
5856 case 0xa4: /* movsS */
5861 ot
= dflag
+ OT_WORD
;
5863 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5864 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5870 case 0xaa: /* stosS */
5875 ot
= dflag
+ OT_WORD
;
5877 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5878 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5883 case 0xac: /* lodsS */
5888 ot
= dflag
+ OT_WORD
;
5889 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5890 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5895 case 0xae: /* scasS */
5900 ot
= dflag
+ OT_WORD
;
5901 if (prefixes
& PREFIX_REPNZ
) {
5902 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5903 } else if (prefixes
& PREFIX_REPZ
) {
5904 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5907 s
->cc_op
= CC_OP_SUBB
+ ot
;
5911 case 0xa6: /* cmpsS */
5916 ot
= dflag
+ OT_WORD
;
5917 if (prefixes
& PREFIX_REPNZ
) {
5918 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
5919 } else if (prefixes
& PREFIX_REPZ
) {
5920 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
5923 s
->cc_op
= CC_OP_SUBB
+ ot
;
5926 case 0x6c: /* insS */
5931 ot
= dflag
? OT_LONG
: OT_WORD
;
5932 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5933 gen_op_andl_T0_ffff();
5934 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5935 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) | 4);
5936 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5937 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5941 gen_jmp(s
, s
->pc
- s
->cs_base
);
5945 case 0x6e: /* outsS */
5950 ot
= dflag
? OT_LONG
: OT_WORD
;
5951 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5952 gen_op_andl_T0_ffff();
5953 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5954 svm_is_rep(prefixes
) | 4);
5955 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
5956 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5960 gen_jmp(s
, s
->pc
- s
->cs_base
);
5965 /************************/
5973 ot
= dflag
? OT_LONG
: OT_WORD
;
5974 val
= ldub_code(s
->pc
++);
5975 gen_op_movl_T0_im(val
);
5976 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5977 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
5980 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
5981 gen_helper_in_func(ot
, cpu_T
[1], cpu_tmp2_i32
);
5982 gen_op_mov_reg_T1(ot
, R_EAX
);
5985 gen_jmp(s
, s
->pc
- s
->cs_base
);
5993 ot
= dflag
? OT_LONG
: OT_WORD
;
5994 val
= ldub_code(s
->pc
++);
5995 gen_op_movl_T0_im(val
);
5996 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
5997 svm_is_rep(prefixes
));
5998 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
6002 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6003 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
6004 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
6005 gen_helper_out_func(ot
, cpu_tmp2_i32
, cpu_tmp3_i32
);
6008 gen_jmp(s
, s
->pc
- s
->cs_base
);
6016 ot
= dflag
? OT_LONG
: OT_WORD
;
6017 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6018 gen_op_andl_T0_ffff();
6019 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6020 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
));
6023 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6024 gen_helper_in_func(ot
, cpu_T
[1], cpu_tmp2_i32
);
6025 gen_op_mov_reg_T1(ot
, R_EAX
);
6028 gen_jmp(s
, s
->pc
- s
->cs_base
);
6036 ot
= dflag
? OT_LONG
: OT_WORD
;
6037 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
6038 gen_op_andl_T0_ffff();
6039 gen_check_io(s
, ot
, pc_start
- s
->cs_base
,
6040 svm_is_rep(prefixes
));
6041 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
6045 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6046 tcg_gen_andi_i32(cpu_tmp2_i32
, cpu_tmp2_i32
, 0xffff);
6047 tcg_gen_trunc_tl_i32(cpu_tmp3_i32
, cpu_T
[1]);
6048 gen_helper_out_func(ot
, cpu_tmp2_i32
, cpu_tmp3_i32
);
6051 gen_jmp(s
, s
->pc
- s
->cs_base
);
6055 /************************/
6057 case 0xc2: /* ret im */
6058 val
= ldsw_code(s
->pc
);
6061 if (CODE64(s
) && s
->dflag
)
6063 gen_stack_update(s
, val
+ (2 << s
->dflag
));
6065 gen_op_andl_T0_ffff();
6069 case 0xc3: /* ret */
6073 gen_op_andl_T0_ffff();
6077 case 0xca: /* lret im */
6078 val
= ldsw_code(s
->pc
);
6081 if (s
->pe
&& !s
->vm86
) {
6082 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6083 gen_op_set_cc_op(s
->cc_op
);
6084 gen_jmp_im(pc_start
- s
->cs_base
);
6085 gen_helper_lret_protected(tcg_const_i32(s
->dflag
),
6086 tcg_const_i32(val
));
6090 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
6092 gen_op_andl_T0_ffff();
6093 /* NOTE: keeping EIP updated is not a problem in case of
6097 gen_op_addl_A0_im(2 << s
->dflag
);
6098 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
6099 gen_op_movl_seg_T0_vm(R_CS
);
6100 /* add stack offset */
6101 gen_stack_update(s
, val
+ (4 << s
->dflag
));
6105 case 0xcb: /* lret */
6108 case 0xcf: /* iret */
6109 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
);
6112 gen_helper_iret_real(tcg_const_i32(s
->dflag
));
6113 s
->cc_op
= CC_OP_EFLAGS
;
6114 } else if (s
->vm86
) {
6116 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6118 gen_helper_iret_real(tcg_const_i32(s
->dflag
));
6119 s
->cc_op
= CC_OP_EFLAGS
;
6122 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6123 gen_op_set_cc_op(s
->cc_op
);
6124 gen_jmp_im(pc_start
- s
->cs_base
);
6125 gen_helper_iret_protected(tcg_const_i32(s
->dflag
),
6126 tcg_const_i32(s
->pc
- s
->cs_base
));
6127 s
->cc_op
= CC_OP_EFLAGS
;
6131 case 0xe8: /* call im */
6134 tval
= (int32_t)insn_get(s
, OT_LONG
);
6136 tval
= (int16_t)insn_get(s
, OT_WORD
);
6137 next_eip
= s
->pc
- s
->cs_base
;
6141 gen_movtl_T0_im(next_eip
);
6146 case 0x9a: /* lcall im */
6148 unsigned int selector
, offset
;
6152 ot
= dflag
? OT_LONG
: OT_WORD
;
6153 offset
= insn_get(s
, ot
);
6154 selector
= insn_get(s
, OT_WORD
);
6156 gen_op_movl_T0_im(selector
);
6157 gen_op_movl_T1_imu(offset
);
6160 case 0xe9: /* jmp im */
6162 tval
= (int32_t)insn_get(s
, OT_LONG
);
6164 tval
= (int16_t)insn_get(s
, OT_WORD
);
6165 tval
+= s
->pc
- s
->cs_base
;
6172 case 0xea: /* ljmp im */
6174 unsigned int selector
, offset
;
6178 ot
= dflag
? OT_LONG
: OT_WORD
;
6179 offset
= insn_get(s
, ot
);
6180 selector
= insn_get(s
, OT_WORD
);
6182 gen_op_movl_T0_im(selector
);
6183 gen_op_movl_T1_imu(offset
);
6186 case 0xeb: /* jmp Jb */
6187 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6188 tval
+= s
->pc
- s
->cs_base
;
6193 case 0x70 ... 0x7f: /* jcc Jb */
6194 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6196 case 0x180 ... 0x18f: /* jcc Jv */
6198 tval
= (int32_t)insn_get(s
, OT_LONG
);
6200 tval
= (int16_t)insn_get(s
, OT_WORD
);
6203 next_eip
= s
->pc
- s
->cs_base
;
6207 gen_jcc(s
, b
, tval
, next_eip
);
6210 case 0x190 ... 0x19f: /* setcc Gv */
6211 modrm
= ldub_code(s
->pc
++);
6213 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
6215 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6220 ot
= dflag
+ OT_WORD
;
6221 modrm
= ldub_code(s
->pc
++);
6222 reg
= ((modrm
>> 3) & 7) | rex_r
;
6223 mod
= (modrm
>> 6) & 3;
6224 t0
= tcg_temp_local_new();
6226 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6227 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
6229 rm
= (modrm
& 7) | REX_B(s
);
6230 gen_op_mov_v_reg(ot
, t0
, rm
);
6232 #ifdef TARGET_X86_64
6233 if (ot
== OT_LONG
) {
6234 /* XXX: specific Intel behaviour ? */
6235 l1
= gen_new_label();
6236 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
6237 tcg_gen_st32_tl(t0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
6239 tcg_gen_movi_tl(cpu_tmp0
, 0);
6240 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
6244 l1
= gen_new_label();
6245 gen_jcc1(s
, s
->cc_op
, b
^ 1, l1
);
6246 gen_op_mov_reg_v(ot
, reg
, t0
);
6253 /************************/
6255 case 0x9c: /* pushf */
6256 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
);
6257 if (s
->vm86
&& s
->iopl
!= 3) {
6258 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6260 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6261 gen_op_set_cc_op(s
->cc_op
);
6262 gen_helper_read_eflags(cpu_T
[0]);
6266 case 0x9d: /* popf */
6267 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
);
6268 if (s
->vm86
&& s
->iopl
!= 3) {
6269 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6274 gen_helper_write_eflags(cpu_T
[0],
6275 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
)));
6277 gen_helper_write_eflags(cpu_T
[0],
6278 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
| IOPL_MASK
) & 0xffff));
6281 if (s
->cpl
<= s
->iopl
) {
6283 gen_helper_write_eflags(cpu_T
[0],
6284 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
)));
6286 gen_helper_write_eflags(cpu_T
[0],
6287 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
| IF_MASK
) & 0xffff));
6291 gen_helper_write_eflags(cpu_T
[0],
6292 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
)));
6294 gen_helper_write_eflags(cpu_T
[0],
6295 tcg_const_i32((TF_MASK
| AC_MASK
| ID_MASK
| NT_MASK
) & 0xffff));
6300 s
->cc_op
= CC_OP_EFLAGS
;
6301 /* abort translation because TF flag may change */
6302 gen_jmp_im(s
->pc
- s
->cs_base
);
6306 case 0x9e: /* sahf */
6307 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
6309 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
6310 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6311 gen_op_set_cc_op(s
->cc_op
);
6312 gen_compute_eflags(cpu_cc_src
);
6313 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, CC_O
);
6314 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
);
6315 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_T
[0]);
6316 s
->cc_op
= CC_OP_EFLAGS
;
6318 case 0x9f: /* lahf */
6319 if (CODE64(s
) && !(s
->cpuid_ext3_features
& CPUID_EXT3_LAHF_LM
))
6321 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6322 gen_op_set_cc_op(s
->cc_op
);
6323 gen_compute_eflags(cpu_T
[0]);
6324 /* Note: gen_compute_eflags() only gives the condition codes */
6325 tcg_gen_ori_tl(cpu_T
[0], cpu_T
[0], 0x02);
6326 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
6328 case 0xf5: /* cmc */
6329 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6330 gen_op_set_cc_op(s
->cc_op
);
6331 gen_compute_eflags(cpu_cc_src
);
6332 tcg_gen_xori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
6333 s
->cc_op
= CC_OP_EFLAGS
;
6335 case 0xf8: /* clc */
6336 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6337 gen_op_set_cc_op(s
->cc_op
);
6338 gen_compute_eflags(cpu_cc_src
);
6339 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_C
);
6340 s
->cc_op
= CC_OP_EFLAGS
;
6342 case 0xf9: /* stc */
6343 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6344 gen_op_set_cc_op(s
->cc_op
);
6345 gen_compute_eflags(cpu_cc_src
);
6346 tcg_gen_ori_tl(cpu_cc_src
, cpu_cc_src
, CC_C
);
6347 s
->cc_op
= CC_OP_EFLAGS
;
6349 case 0xfc: /* cld */
6350 tcg_gen_movi_i32(cpu_tmp2_i32
, 1);
6351 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
6353 case 0xfd: /* std */
6354 tcg_gen_movi_i32(cpu_tmp2_i32
, -1);
6355 tcg_gen_st_i32(cpu_tmp2_i32
, cpu_env
, offsetof(CPUState
, df
));
6358 /************************/
6359 /* bit operations */
6360 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6361 ot
= dflag
+ OT_WORD
;
6362 modrm
= ldub_code(s
->pc
++);
6363 op
= (modrm
>> 3) & 7;
6364 mod
= (modrm
>> 6) & 3;
6365 rm
= (modrm
& 7) | REX_B(s
);
6368 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6369 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6371 gen_op_mov_TN_reg(ot
, 0, rm
);
6374 val
= ldub_code(s
->pc
++);
6375 gen_op_movl_T1_im(val
);
6380 case 0x1a3: /* bt Gv, Ev */
6383 case 0x1ab: /* bts */
6386 case 0x1b3: /* btr */
6389 case 0x1bb: /* btc */
6392 ot
= dflag
+ OT_WORD
;
6393 modrm
= ldub_code(s
->pc
++);
6394 reg
= ((modrm
>> 3) & 7) | rex_r
;
6395 mod
= (modrm
>> 6) & 3;
6396 rm
= (modrm
& 7) | REX_B(s
);
6397 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
6399 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6400 /* specific case: we need to add a displacement */
6401 gen_exts(ot
, cpu_T
[1]);
6402 tcg_gen_sari_tl(cpu_tmp0
, cpu_T
[1], 3 + ot
);
6403 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, ot
);
6404 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
6405 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6407 gen_op_mov_TN_reg(ot
, 0, rm
);
6410 tcg_gen_andi_tl(cpu_T
[1], cpu_T
[1], (1 << (3 + ot
)) - 1);
6413 tcg_gen_shr_tl(cpu_cc_src
, cpu_T
[0], cpu_T
[1]);
6414 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6417 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6418 tcg_gen_movi_tl(cpu_tmp0
, 1);
6419 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6420 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6423 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6424 tcg_gen_movi_tl(cpu_tmp0
, 1);
6425 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6426 tcg_gen_not_tl(cpu_tmp0
, cpu_tmp0
);
6427 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6431 tcg_gen_shr_tl(cpu_tmp4
, cpu_T
[0], cpu_T
[1]);
6432 tcg_gen_movi_tl(cpu_tmp0
, 1);
6433 tcg_gen_shl_tl(cpu_tmp0
, cpu_tmp0
, cpu_T
[1]);
6434 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
6437 s
->cc_op
= CC_OP_SARB
+ ot
;
6440 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6442 gen_op_mov_reg_T0(ot
, rm
);
6443 tcg_gen_mov_tl(cpu_cc_src
, cpu_tmp4
);
6444 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6447 case 0x1bc: /* bsf */
6448 case 0x1bd: /* bsr */
6453 ot
= dflag
+ OT_WORD
;
6454 modrm
= ldub_code(s
->pc
++);
6455 reg
= ((modrm
>> 3) & 7) | rex_r
;
6456 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6457 gen_extu(ot
, cpu_T
[0]);
6458 label1
= gen_new_label();
6459 tcg_gen_movi_tl(cpu_cc_dst
, 0);
6460 t0
= tcg_temp_local_new();
6461 tcg_gen_mov_tl(t0
, cpu_T
[0]);
6462 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, label1
);
6464 gen_helper_bsr(cpu_T
[0], t0
);
6466 gen_helper_bsf(cpu_T
[0], t0
);
6468 gen_op_mov_reg_T0(ot
, reg
);
6469 tcg_gen_movi_tl(cpu_cc_dst
, 1);
6470 gen_set_label(label1
);
6471 tcg_gen_discard_tl(cpu_cc_src
);
6472 s
->cc_op
= CC_OP_LOGICB
+ ot
;
6476 /************************/
6478 case 0x27: /* daa */
6481 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6482 gen_op_set_cc_op(s
->cc_op
);
6484 s
->cc_op
= CC_OP_EFLAGS
;
6486 case 0x2f: /* das */
6489 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6490 gen_op_set_cc_op(s
->cc_op
);
6492 s
->cc_op
= CC_OP_EFLAGS
;
6494 case 0x37: /* aaa */
6497 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6498 gen_op_set_cc_op(s
->cc_op
);
6500 s
->cc_op
= CC_OP_EFLAGS
;
6502 case 0x3f: /* aas */
6505 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6506 gen_op_set_cc_op(s
->cc_op
);
6508 s
->cc_op
= CC_OP_EFLAGS
;
6510 case 0xd4: /* aam */
6513 val
= ldub_code(s
->pc
++);
6515 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
6517 gen_helper_aam(tcg_const_i32(val
));
6518 s
->cc_op
= CC_OP_LOGICB
;
6521 case 0xd5: /* aad */
6524 val
= ldub_code(s
->pc
++);
6525 gen_helper_aad(tcg_const_i32(val
));
6526 s
->cc_op
= CC_OP_LOGICB
;
6528 /************************/
6530 case 0x90: /* nop */
6531 /* XXX: xchg + rex handling */
6532 /* XXX: correct lock test for all insn */
6533 if (prefixes
& PREFIX_LOCK
)
6535 if (prefixes
& PREFIX_REPZ
) {
6536 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
6539 case 0x9b: /* fwait */
6540 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
6541 (HF_MP_MASK
| HF_TS_MASK
)) {
6542 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6544 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6545 gen_op_set_cc_op(s
->cc_op
);
6546 gen_jmp_im(pc_start
- s
->cs_base
);
6550 case 0xcc: /* int3 */
6551 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6553 case 0xcd: /* int N */
6554 val
= ldub_code(s
->pc
++);
6555 if (s
->vm86
&& s
->iopl
!= 3) {
6556 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6558 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
6561 case 0xce: /* into */
6564 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6565 gen_op_set_cc_op(s
->cc_op
);
6566 gen_jmp_im(pc_start
- s
->cs_base
);
6567 gen_helper_into(tcg_const_i32(s
->pc
- pc_start
));
6570 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6571 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
);
6573 gen_debug(s
, pc_start
- s
->cs_base
);
6576 tb_flush(cpu_single_env
);
6577 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
6581 case 0xfa: /* cli */
6583 if (s
->cpl
<= s
->iopl
) {
6586 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6592 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6596 case 0xfb: /* sti */
6598 if (s
->cpl
<= s
->iopl
) {
6601 /* interruptions are enabled only the first insn after sti */
6602 /* If several instructions disable interrupts, only the
6604 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
6605 gen_helper_set_inhibit_irq();
6606 /* give a chance to handle pending irqs */
6607 gen_jmp_im(s
->pc
- s
->cs_base
);
6610 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6616 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6620 case 0x62: /* bound */
6623 ot
= dflag
? OT_LONG
: OT_WORD
;
6624 modrm
= ldub_code(s
->pc
++);
6625 reg
= (modrm
>> 3) & 7;
6626 mod
= (modrm
>> 6) & 3;
6629 gen_op_mov_TN_reg(ot
, 0, reg
);
6630 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6631 gen_jmp_im(pc_start
- s
->cs_base
);
6632 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6634 gen_helper_boundw(cpu_A0
, cpu_tmp2_i32
);
6636 gen_helper_boundl(cpu_A0
, cpu_tmp2_i32
);
6638 case 0x1c8 ... 0x1cf: /* bswap reg */
6639 reg
= (b
& 7) | REX_B(s
);
6640 #ifdef TARGET_X86_64
6642 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
6643 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
6644 gen_op_mov_reg_T0(OT_QUAD
, reg
);
6648 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6650 tmp0
= tcg_temp_new_i32();
6651 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
6652 tcg_gen_bswap_i32(tmp0
, tmp0
);
6653 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
6654 gen_op_mov_reg_T0(OT_LONG
, reg
);
6658 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
6659 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
6660 gen_op_mov_reg_T0(OT_LONG
, reg
);
6664 case 0xd6: /* salc */
6667 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6668 gen_op_set_cc_op(s
->cc_op
);
6669 gen_compute_eflags_c(cpu_T
[0]);
6670 tcg_gen_neg_tl(cpu_T
[0], cpu_T
[0]);
6671 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
6673 case 0xe0: /* loopnz */
6674 case 0xe1: /* loopz */
6675 case 0xe2: /* loop */
6676 case 0xe3: /* jecxz */
6680 tval
= (int8_t)insn_get(s
, OT_BYTE
);
6681 next_eip
= s
->pc
- s
->cs_base
;
6686 l1
= gen_new_label();
6687 l2
= gen_new_label();
6688 l3
= gen_new_label();
6691 case 0: /* loopnz */
6693 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6694 gen_op_set_cc_op(s
->cc_op
);
6695 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6696 gen_op_jz_ecx(s
->aflag
, l3
);
6697 gen_compute_eflags(cpu_tmp0
);
6698 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, CC_Z
);
6700 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
6702 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_tmp0
, 0, l1
);
6706 gen_op_add_reg_im(s
->aflag
, R_ECX
, -1);
6707 gen_op_jnz_ecx(s
->aflag
, l1
);
6711 gen_op_jz_ecx(s
->aflag
, l1
);
6716 gen_jmp_im(next_eip
);
6725 case 0x130: /* wrmsr */
6726 case 0x132: /* rdmsr */
6728 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6730 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6731 gen_op_set_cc_op(s
->cc_op
);
6732 gen_jmp_im(pc_start
- s
->cs_base
);
6740 case 0x131: /* rdtsc */
6741 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6742 gen_op_set_cc_op(s
->cc_op
);
6743 gen_jmp_im(pc_start
- s
->cs_base
);
6749 gen_jmp(s
, s
->pc
- s
->cs_base
);
6752 case 0x133: /* rdpmc */
6753 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6754 gen_op_set_cc_op(s
->cc_op
);
6755 gen_jmp_im(pc_start
- s
->cs_base
);
6758 case 0x134: /* sysenter */
6759 /* For Intel SYSENTER is valid on 64-bit */
6760 if (CODE64(s
) && cpu_single_env
->cpuid_vendor1
!= CPUID_VENDOR_INTEL_1
)
6763 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6765 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6766 gen_op_set_cc_op(s
->cc_op
);
6767 s
->cc_op
= CC_OP_DYNAMIC
;
6769 gen_jmp_im(pc_start
- s
->cs_base
);
6770 gen_helper_sysenter();
6774 case 0x135: /* sysexit */
6775 /* For Intel SYSEXIT is valid on 64-bit */
6776 if (CODE64(s
) && cpu_single_env
->cpuid_vendor1
!= CPUID_VENDOR_INTEL_1
)
6779 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6781 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6782 gen_op_set_cc_op(s
->cc_op
);
6783 s
->cc_op
= CC_OP_DYNAMIC
;
6785 gen_jmp_im(pc_start
- s
->cs_base
);
6786 gen_helper_sysexit(tcg_const_i32(dflag
));
6790 #ifdef TARGET_X86_64
6791 case 0x105: /* syscall */
6792 /* XXX: is it usable in real mode ? */
6793 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6794 gen_op_set_cc_op(s
->cc_op
);
6795 s
->cc_op
= CC_OP_DYNAMIC
;
6797 gen_jmp_im(pc_start
- s
->cs_base
);
6798 gen_helper_syscall(tcg_const_i32(s
->pc
- pc_start
));
6801 case 0x107: /* sysret */
6803 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6805 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6806 gen_op_set_cc_op(s
->cc_op
);
6807 s
->cc_op
= CC_OP_DYNAMIC
;
6809 gen_jmp_im(pc_start
- s
->cs_base
);
6810 gen_helper_sysret(tcg_const_i32(s
->dflag
));
6811 /* condition codes are modified only in long mode */
6813 s
->cc_op
= CC_OP_EFLAGS
;
6818 case 0x1a2: /* cpuid */
6819 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6820 gen_op_set_cc_op(s
->cc_op
);
6821 gen_jmp_im(pc_start
- s
->cs_base
);
6824 case 0xf4: /* hlt */
6826 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6828 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6829 gen_op_set_cc_op(s
->cc_op
);
6830 gen_jmp_im(pc_start
- s
->cs_base
);
6831 gen_helper_hlt(tcg_const_i32(s
->pc
- pc_start
));
6836 modrm
= ldub_code(s
->pc
++);
6837 mod
= (modrm
>> 6) & 3;
6838 op
= (modrm
>> 3) & 7;
6841 if (!s
->pe
|| s
->vm86
)
6843 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
);
6844 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,ldt
.selector
));
6848 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6851 if (!s
->pe
|| s
->vm86
)
6854 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6856 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
);
6857 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6858 gen_jmp_im(pc_start
- s
->cs_base
);
6859 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6860 gen_helper_lldt(cpu_tmp2_i32
);
6864 if (!s
->pe
|| s
->vm86
)
6866 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
);
6867 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,tr
.selector
));
6871 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
6874 if (!s
->pe
|| s
->vm86
)
6877 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6879 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
);
6880 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6881 gen_jmp_im(pc_start
- s
->cs_base
);
6882 tcg_gen_trunc_tl_i32(cpu_tmp2_i32
, cpu_T
[0]);
6883 gen_helper_ltr(cpu_tmp2_i32
);
6888 if (!s
->pe
|| s
->vm86
)
6890 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
6891 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6892 gen_op_set_cc_op(s
->cc_op
);
6894 gen_helper_verr(cpu_T
[0]);
6896 gen_helper_verw(cpu_T
[0]);
6897 s
->cc_op
= CC_OP_EFLAGS
;
6904 modrm
= ldub_code(s
->pc
++);
6905 mod
= (modrm
>> 6) & 3;
6906 op
= (modrm
>> 3) & 7;
6912 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
);
6913 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6914 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.limit
));
6915 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6916 gen_add_A0_im(s
, 2);
6917 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, gdt
.base
));
6919 gen_op_andl_T0_im(0xffffff);
6920 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6925 case 0: /* monitor */
6926 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6929 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6930 gen_op_set_cc_op(s
->cc_op
);
6931 gen_jmp_im(pc_start
- s
->cs_base
);
6932 #ifdef TARGET_X86_64
6933 if (s
->aflag
== 2) {
6934 gen_op_movq_A0_reg(R_EAX
);
6938 gen_op_movl_A0_reg(R_EAX
);
6940 gen_op_andl_A0_ffff();
6942 gen_add_A0_ds_seg(s
);
6943 gen_helper_monitor(cpu_A0
);
6946 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
6949 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6950 gen_op_set_cc_op(s
->cc_op
);
6951 s
->cc_op
= CC_OP_DYNAMIC
;
6953 gen_jmp_im(pc_start
- s
->cs_base
);
6954 gen_helper_mwait(tcg_const_i32(s
->pc
- pc_start
));
6961 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
);
6962 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6963 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.limit
));
6964 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
6965 gen_add_A0_im(s
, 2);
6966 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, idt
.base
));
6968 gen_op_andl_T0_im(0xffffff);
6969 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
6975 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6976 gen_op_set_cc_op(s
->cc_op
);
6977 gen_jmp_im(pc_start
- s
->cs_base
);
6980 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
6983 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6986 gen_helper_vmrun(tcg_const_i32(s
->aflag
),
6987 tcg_const_i32(s
->pc
- pc_start
));
6992 case 1: /* VMMCALL */
6993 if (!(s
->flags
& HF_SVME_MASK
))
6995 gen_helper_vmmcall();
6997 case 2: /* VMLOAD */
6998 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7001 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7004 gen_helper_vmload(tcg_const_i32(s
->aflag
));
7007 case 3: /* VMSAVE */
7008 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7011 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7014 gen_helper_vmsave(tcg_const_i32(s
->aflag
));
7018 if ((!(s
->flags
& HF_SVME_MASK
) &&
7019 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
7023 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7030 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7033 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7039 case 6: /* SKINIT */
7040 if ((!(s
->flags
& HF_SVME_MASK
) &&
7041 !(s
->cpuid_ext3_features
& CPUID_EXT3_SKINIT
)) ||
7044 gen_helper_skinit();
7046 case 7: /* INVLPGA */
7047 if (!(s
->flags
& HF_SVME_MASK
) || !s
->pe
)
7050 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7053 gen_helper_invlpga(tcg_const_i32(s
->aflag
));
7059 } else if (s
->cpl
!= 0) {
7060 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7062 gen_svm_check_intercept(s
, pc_start
,
7063 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
);
7064 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7065 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
7066 gen_add_A0_im(s
, 2);
7067 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
7069 gen_op_andl_T0_im(0xffffff);
7071 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,gdt
.base
));
7072 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,gdt
.limit
));
7074 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,idt
.base
));
7075 tcg_gen_st32_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,idt
.limit
));
7080 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
);
7081 #if defined TARGET_X86_64 && defined WORDS_BIGENDIAN
7082 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,cr
[0]) + 4);
7084 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,cr
[0]));
7086 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
7090 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7092 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
7093 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7094 gen_helper_lmsw(cpu_T
[0]);
7095 gen_jmp_im(s
->pc
- s
->cs_base
);
7099 case 7: /* invlpg */
7101 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7104 #ifdef TARGET_X86_64
7105 if (CODE64(s
) && rm
== 0) {
7107 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,segs
[R_GS
].base
));
7108 tcg_gen_ld_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,kernelgsbase
));
7109 tcg_gen_st_tl(cpu_T
[1], cpu_env
, offsetof(CPUX86State
,segs
[R_GS
].base
));
7110 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,kernelgsbase
));
7117 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7118 gen_op_set_cc_op(s
->cc_op
);
7119 gen_jmp_im(pc_start
- s
->cs_base
);
7120 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7121 gen_helper_invlpg(cpu_A0
);
7122 gen_jmp_im(s
->pc
- s
->cs_base
);
7131 case 0x108: /* invd */
7132 case 0x109: /* wbinvd */
7134 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7136 gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
);
7140 case 0x63: /* arpl or movslS (x86_64) */
7141 #ifdef TARGET_X86_64
7144 /* d_ot is the size of destination */
7145 d_ot
= dflag
+ OT_WORD
;
7147 modrm
= ldub_code(s
->pc
++);
7148 reg
= ((modrm
>> 3) & 7) | rex_r
;
7149 mod
= (modrm
>> 6) & 3;
7150 rm
= (modrm
& 7) | REX_B(s
);
7153 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
7155 if (d_ot
== OT_QUAD
)
7156 tcg_gen_ext32s_tl(cpu_T
[0], cpu_T
[0]);
7157 gen_op_mov_reg_T0(d_ot
, reg
);
7159 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7160 if (d_ot
== OT_QUAD
) {
7161 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
7163 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
7165 gen_op_mov_reg_T0(d_ot
, reg
);
7173 if (!s
->pe
|| s
->vm86
)
7175 t0
= tcg_temp_local_new();
7176 t1
= tcg_temp_local_new();
7177 t2
= tcg_temp_local_new();
7179 modrm
= ldub_code(s
->pc
++);
7180 reg
= (modrm
>> 3) & 7;
7181 mod
= (modrm
>> 6) & 3;
7184 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7185 gen_op_ld_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
7187 gen_op_mov_v_reg(ot
, t0
, rm
);
7189 gen_op_mov_v_reg(ot
, t1
, reg
);
7190 tcg_gen_andi_tl(cpu_tmp0
, t0
, 3);
7191 tcg_gen_andi_tl(t1
, t1
, 3);
7192 tcg_gen_movi_tl(t2
, 0);
7193 label1
= gen_new_label();
7194 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_tmp0
, t1
, label1
);
7195 tcg_gen_andi_tl(t0
, t0
, ~3);
7196 tcg_gen_or_tl(t0
, t0
, t1
);
7197 tcg_gen_movi_tl(t2
, CC_Z
);
7198 gen_set_label(label1
);
7200 gen_op_st_v(ot
+ s
->mem_index
, t0
, cpu_A0
);
7202 gen_op_mov_reg_v(ot
, rm
, t0
);
7204 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7205 gen_op_set_cc_op(s
->cc_op
);
7206 gen_compute_eflags(cpu_cc_src
);
7207 tcg_gen_andi_tl(cpu_cc_src
, cpu_cc_src
, ~CC_Z
);
7208 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, t2
);
7209 s
->cc_op
= CC_OP_EFLAGS
;
7215 case 0x102: /* lar */
7216 case 0x103: /* lsl */
7220 if (!s
->pe
|| s
->vm86
)
7222 ot
= dflag
? OT_LONG
: OT_WORD
;
7223 modrm
= ldub_code(s
->pc
++);
7224 reg
= ((modrm
>> 3) & 7) | rex_r
;
7225 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
7226 t0
= tcg_temp_local_new();
7227 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7228 gen_op_set_cc_op(s
->cc_op
);
7230 gen_helper_lar(t0
, cpu_T
[0]);
7232 gen_helper_lsl(t0
, cpu_T
[0]);
7233 tcg_gen_andi_tl(cpu_tmp0
, cpu_cc_src
, CC_Z
);
7234 label1
= gen_new_label();
7235 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, label1
);
7236 gen_op_mov_reg_v(ot
, reg
, t0
);
7237 gen_set_label(label1
);
7238 s
->cc_op
= CC_OP_EFLAGS
;
7243 modrm
= ldub_code(s
->pc
++);
7244 mod
= (modrm
>> 6) & 3;
7245 op
= (modrm
>> 3) & 7;
7247 case 0: /* prefetchnta */
7248 case 1: /* prefetchnt0 */
7249 case 2: /* prefetchnt0 */
7250 case 3: /* prefetchnt0 */
7253 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7254 /* nothing more to do */
7256 default: /* nop (multi byte) */
7257 gen_nop_modrm(s
, modrm
);
7261 case 0x119 ... 0x11f: /* nop (multi byte) */
7262 modrm
= ldub_code(s
->pc
++);
7263 gen_nop_modrm(s
, modrm
);
7265 case 0x120: /* mov reg, crN */
7266 case 0x122: /* mov crN, reg */
7268 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7270 modrm
= ldub_code(s
->pc
++);
7271 if ((modrm
& 0xc0) != 0xc0)
7273 rm
= (modrm
& 7) | REX_B(s
);
7274 reg
= ((modrm
>> 3) & 7) | rex_r
;
7285 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7286 gen_op_set_cc_op(s
->cc_op
);
7287 gen_jmp_im(pc_start
- s
->cs_base
);
7289 gen_op_mov_TN_reg(ot
, 0, rm
);
7290 gen_helper_write_crN(tcg_const_i32(reg
), cpu_T
[0]);
7291 gen_jmp_im(s
->pc
- s
->cs_base
);
7294 gen_helper_read_crN(cpu_T
[0], tcg_const_i32(reg
));
7295 gen_op_mov_reg_T0(ot
, rm
);
7303 case 0x121: /* mov reg, drN */
7304 case 0x123: /* mov drN, reg */
7306 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7308 modrm
= ldub_code(s
->pc
++);
7309 if ((modrm
& 0xc0) != 0xc0)
7311 rm
= (modrm
& 7) | REX_B(s
);
7312 reg
= ((modrm
>> 3) & 7) | rex_r
;
7317 /* XXX: do it dynamically with CR4.DE bit */
7318 if (reg
== 4 || reg
== 5 || reg
>= 8)
7321 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
7322 gen_op_mov_TN_reg(ot
, 0, rm
);
7323 gen_helper_movl_drN_T0(tcg_const_i32(reg
), cpu_T
[0]);
7324 gen_jmp_im(s
->pc
- s
->cs_base
);
7327 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
7328 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
,dr
[reg
]));
7329 gen_op_mov_reg_T0(ot
, rm
);
7333 case 0x106: /* clts */
7335 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
7337 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
7339 /* abort block because static cpu state changed */
7340 gen_jmp_im(s
->pc
- s
->cs_base
);
7344 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7345 case 0x1c3: /* MOVNTI reg, mem */
7346 if (!(s
->cpuid_features
& CPUID_SSE2
))
7348 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
7349 modrm
= ldub_code(s
->pc
++);
7350 mod
= (modrm
>> 6) & 3;
7353 reg
= ((modrm
>> 3) & 7) | rex_r
;
7354 /* generate a generic store */
7355 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
7358 modrm
= ldub_code(s
->pc
++);
7359 mod
= (modrm
>> 6) & 3;
7360 op
= (modrm
>> 3) & 7;
7362 case 0: /* fxsave */
7363 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
7364 (s
->flags
& HF_EM_MASK
))
7366 if (s
->flags
& HF_TS_MASK
) {
7367 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7370 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7371 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7372 gen_op_set_cc_op(s
->cc_op
);
7373 gen_jmp_im(pc_start
- s
->cs_base
);
7374 gen_helper_fxsave(cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
7376 case 1: /* fxrstor */
7377 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
7378 (s
->flags
& HF_EM_MASK
))
7380 if (s
->flags
& HF_TS_MASK
) {
7381 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7384 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7385 if (s
->cc_op
!= CC_OP_DYNAMIC
)
7386 gen_op_set_cc_op(s
->cc_op
);
7387 gen_jmp_im(pc_start
- s
->cs_base
);
7388 gen_helper_fxrstor(cpu_A0
, tcg_const_i32((s
->dflag
== 2)));
7390 case 2: /* ldmxcsr */
7391 case 3: /* stmxcsr */
7392 if (s
->flags
& HF_TS_MASK
) {
7393 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
7396 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
7399 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7401 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
7402 tcg_gen_st32_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, mxcsr
));
7404 tcg_gen_ld32u_tl(cpu_T
[0], cpu_env
, offsetof(CPUX86State
, mxcsr
));
7405 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
7408 case 5: /* lfence */
7409 case 6: /* mfence */
7410 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
7413 case 7: /* sfence / clflush */
7414 if ((modrm
& 0xc7) == 0xc0) {
7416 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7417 if (!(s
->cpuid_features
& CPUID_SSE
))
7421 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
7423 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7430 case 0x10d: /* 3DNow! prefetch(w) */
7431 modrm
= ldub_code(s
->pc
++);
7432 mod
= (modrm
>> 6) & 3;
7435 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
7436 /* ignore for now */
7438 case 0x1aa: /* rsm */
7439 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
);
7440 if (!(s
->flags
& HF_SMM_MASK
))
7442 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
7443 gen_op_set_cc_op(s
->cc_op
);
7444 s
->cc_op
= CC_OP_DYNAMIC
;
7446 gen_jmp_im(s
->pc
- s
->cs_base
);
7450 case 0x1b8: /* SSE4.2 popcnt */
7451 if ((prefixes
& (PREFIX_REPZ
| PREFIX_LOCK
| PREFIX_REPNZ
)) !=
7454 if (!(s
->cpuid_ext_features
& CPUID_EXT_POPCNT
))
7457 modrm
= ldub_code(s
->pc
++);
7458 reg
= ((modrm
>> 3) & 7);
7460 if (s
->prefix
& PREFIX_DATA
)
7462 else if (s
->dflag
!= 2)
7467 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
7468 gen_helper_popcnt(cpu_T
[0], cpu_T
[0], tcg_const_i32(ot
));
7469 gen_op_mov_reg_T0(ot
, reg
);
7471 s
->cc_op
= CC_OP_EFLAGS
;
7473 case 0x10e ... 0x10f:
7474 /* 3DNow! instructions, ignore prefixes */
7475 s
->prefix
&= ~(PREFIX_REPZ
| PREFIX_REPNZ
| PREFIX_DATA
);
7476 case 0x110 ... 0x117:
7477 case 0x128 ... 0x12f:
7478 case 0x138 ... 0x13a:
7479 case 0x150 ... 0x177:
7480 case 0x17c ... 0x17f:
7482 case 0x1c4 ... 0x1c6:
7483 case 0x1d0 ... 0x1fe:
7484 gen_sse(s
, b
, pc_start
, rex_r
);
7489 /* lock generation */
7490 if (s
->prefix
& PREFIX_LOCK
)
7491 gen_helper_unlock();
7494 if (s
->prefix
& PREFIX_LOCK
)
7495 gen_helper_unlock();
7496 /* XXX: ensure that no lock was generated */
7497 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
7501 void optimize_flags_init(void)
7503 #if TCG_TARGET_REG_BITS == 32
7504 assert(sizeof(CCTable
) == (1 << 3));
7506 assert(sizeof(CCTable
) == (1 << 4));
7508 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
7509 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
,
7510 offsetof(CPUState
, cc_op
), "cc_op");
7511 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
7513 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
7515 cpu_cc_tmp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_tmp
),
7518 /* register helpers */
7519 #define GEN_HELPER 2
7523 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7524 basic block 'tb'. If search_pc is TRUE, also generate PC
7525 information for each intermediate instruction. */
7526 static inline void gen_intermediate_code_internal(CPUState
*env
,
7527 TranslationBlock
*tb
,
7530 DisasContext dc1
, *dc
= &dc1
;
7531 target_ulong pc_ptr
;
7532 uint16_t *gen_opc_end
;
7536 target_ulong pc_start
;
7537 target_ulong cs_base
;
7541 /* generate intermediate code */
7543 cs_base
= tb
->cs_base
;
7545 cflags
= tb
->cflags
;
7547 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
7548 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
7549 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
7550 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
7552 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
7553 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
7554 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
7555 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
7556 dc
->singlestep_enabled
= env
->singlestep_enabled
;
7557 dc
->cc_op
= CC_OP_DYNAMIC
;
7558 dc
->cs_base
= cs_base
;
7560 dc
->popl_esp_hack
= 0;
7561 /* select memory access functions */
7563 if (flags
& HF_SOFTMMU_MASK
) {
7565 dc
->mem_index
= 2 * 4;
7567 dc
->mem_index
= 1 * 4;
7569 dc
->cpuid_features
= env
->cpuid_features
;
7570 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
7571 dc
->cpuid_ext2_features
= env
->cpuid_ext2_features
;
7572 dc
->cpuid_ext3_features
= env
->cpuid_ext3_features
;
7573 #ifdef TARGET_X86_64
7574 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
7575 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
7578 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
7579 (flags
& HF_INHIBIT_IRQ_MASK
)
7580 #ifndef CONFIG_SOFTMMU
7581 || (flags
& HF_SOFTMMU_MASK
)
7585 /* check addseg logic */
7586 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
7587 printf("ERROR addseg\n");
7590 cpu_T
[0] = tcg_temp_new();
7591 cpu_T
[1] = tcg_temp_new();
7592 cpu_A0
= tcg_temp_new();
7593 cpu_T3
= tcg_temp_new();
7595 cpu_tmp0
= tcg_temp_new();
7596 cpu_tmp1_i64
= tcg_temp_new_i64();
7597 cpu_tmp2_i32
= tcg_temp_new_i32();
7598 cpu_tmp3_i32
= tcg_temp_new_i32();
7599 cpu_tmp4
= tcg_temp_new();
7600 cpu_tmp5
= tcg_temp_new();
7601 cpu_tmp6
= tcg_temp_new();
7602 cpu_ptr0
= tcg_temp_new_ptr();
7603 cpu_ptr1
= tcg_temp_new_ptr();
7605 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
7607 dc
->is_jmp
= DISAS_NEXT
;
7611 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
7613 max_insns
= CF_COUNT_MASK
;
7617 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
7618 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
7619 if (bp
->pc
== pc_ptr
) {
7620 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
7626 j
= gen_opc_ptr
- gen_opc_buf
;
7630 gen_opc_instr_start
[lj
++] = 0;
7632 gen_opc_pc
[lj
] = pc_ptr
;
7633 gen_opc_cc_op
[lj
] = dc
->cc_op
;
7634 gen_opc_instr_start
[lj
] = 1;
7635 gen_opc_icount
[lj
] = num_insns
;
7637 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
7640 pc_ptr
= disas_insn(dc
, pc_ptr
);
7642 /* stop translation if indicated */
7645 /* if single step mode, we generate only one instruction and
7646 generate an exception */
7647 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7648 the flag and abort the translation to give the irqs a
7649 change to be happen */
7650 if (dc
->tf
|| dc
->singlestep_enabled
||
7651 (flags
& HF_INHIBIT_IRQ_MASK
)) {
7652 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7656 /* if too long translation, stop generation too */
7657 if (gen_opc_ptr
>= gen_opc_end
||
7658 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32) ||
7659 num_insns
>= max_insns
) {
7660 gen_jmp_im(pc_ptr
- dc
->cs_base
);
7665 if (tb
->cflags
& CF_LAST_IO
)
7667 gen_icount_end(tb
, num_insns
);
7668 *gen_opc_ptr
= INDEX_op_end
;
7669 /* we don't forget to fill the last values */
7671 j
= gen_opc_ptr
- gen_opc_buf
;
7674 gen_opc_instr_start
[lj
++] = 0;
7678 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, X86_DUMP_CCOP
);
7679 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
7681 qemu_log("----------------\n");
7682 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
7683 #ifdef TARGET_X86_64
7688 disas_flags
= !dc
->code32
;
7689 log_target_disas(pc_start
, pc_ptr
- pc_start
, disas_flags
);
7695 tb
->size
= pc_ptr
- pc_start
;
7696 tb
->icount
= num_insns
;
7700 void gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
7702 gen_intermediate_code_internal(env
, tb
, 0);
7705 void gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
7707 gen_intermediate_code_internal(env
, tb
, 1);
7710 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
7711 unsigned long searched_pc
, int pc_pos
, void *puc
)
7715 if (qemu_loglevel_mask(CPU_LOG_TB_OP
)) {
7717 qemu_log("RESTORE:\n");
7718 for(i
= 0;i
<= pc_pos
; i
++) {
7719 if (gen_opc_instr_start
[i
]) {
7720 qemu_log("0x%04x: " TARGET_FMT_lx
"\n", i
, gen_opc_pc
[i
]);
7723 qemu_log("spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx
" cs_base=%x\n",
7724 searched_pc
, pc_pos
, gen_opc_pc
[pc_pos
] - tb
->cs_base
,
7725 (uint32_t)tb
->cs_base
);
7728 env
->eip
= gen_opc_pc
[pc_pos
] - tb
->cs_base
;
7729 cc_op
= gen_opc_cc_op
[pc_pos
];
7730 if (cc_op
!= CC_OP_DYNAMIC
)