4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(x...) x
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #define BUGGY_64(x) NULL
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(x...)
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv cpu_env
, cpu_T
[2], cpu_A0
;
62 /* local register indexes (only used inside old micro ops) */
66 static int x86_64_hregs
;
69 typedef struct DisasContext
{
70 /* current insn context */
71 int override
; /* -1 if no override */
74 target_ulong pc
; /* pc = eip + cs_base */
75 int is_jmp
; /* 1 = means jump (stop translation), 2 means CPU
76 static state change (stop translation) */
77 /* current block context */
78 target_ulong cs_base
; /* base of CS segment */
79 int pe
; /* protected mode */
80 int code32
; /* 32 bit code segment */
82 int lma
; /* long mode active */
83 int code64
; /* 64 bit code segment */
86 int ss32
; /* 32 bit stack segment */
87 int cc_op
; /* current CC operation */
88 int addseg
; /* non zero if either DS/ES/SS have a non zero base */
89 int f_st
; /* currently unused */
90 int vm86
; /* vm86 mode */
93 int tf
; /* TF cpu flag */
94 int singlestep_enabled
; /* "hardware" single step enabled */
95 int jmp_opt
; /* use direct block chaining for direct jumps */
96 int mem_index
; /* select memory access functions */
97 uint64_t flags
; /* all execution flags */
98 struct TranslationBlock
*tb
;
99 int popl_esp_hack
; /* for correct popl with esp base handling */
100 int rip_offset
; /* only used in x86_64, but left for simplicity */
102 int cpuid_ext_features
;
105 static void gen_eob(DisasContext
*s
);
106 static void gen_jmp(DisasContext
*s
, target_ulong eip
);
107 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
);
109 /* i386 arith/logic operations */
129 OP_SHL1
, /* undocumented */
142 /* I386 int registers */
143 OR_EAX
, /* MUST be even numbered */
152 OR_TMP0
= 16, /* temporary operand register */
154 OR_A0
, /* temporary register used when doing address evaluation */
157 static inline void gen_op_movl_T0_0(void)
159 tcg_gen_movi_tl(cpu_T
[0], 0);
162 static inline void gen_op_movl_T0_im(int32_t val
)
164 tcg_gen_movi_tl(cpu_T
[0], val
);
167 static inline void gen_op_movl_T0_imu(uint32_t val
)
169 tcg_gen_movi_tl(cpu_T
[0], val
);
172 static inline void gen_op_movl_T1_im(int32_t val
)
174 tcg_gen_movi_tl(cpu_T
[1], val
);
177 static inline void gen_op_movl_T1_imu(uint32_t val
)
179 tcg_gen_movi_tl(cpu_T
[1], val
);
182 static inline void gen_op_movl_A0_im(uint32_t val
)
184 tcg_gen_movi_tl(cpu_A0
, val
);
188 static inline void gen_op_movq_A0_im(int64_t val
)
190 tcg_gen_movi_tl(cpu_A0
, val
);
194 static inline void gen_movtl_T0_im(target_ulong val
)
196 tcg_gen_movi_tl(cpu_T
[0], val
);
199 static inline void gen_movtl_T1_im(target_ulong val
)
201 tcg_gen_movi_tl(cpu_T
[1], val
);
204 static inline void gen_op_andl_T0_ffff(void)
206 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], 0xffff);
209 static inline void gen_op_andl_T0_im(uint32_t val
)
211 tcg_gen_andi_tl(cpu_T
[0], cpu_T
[0], val
);
214 static inline void gen_op_movl_T0_T1(void)
216 tcg_gen_mov_tl(cpu_T
[0], cpu_T
[1]);
219 static inline void gen_op_andl_A0_ffff(void)
221 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffff);
226 #define NB_OP_SIZES 4
228 #define DEF_REGS(prefix, suffix) \
229 prefix ## EAX ## suffix,\
230 prefix ## ECX ## suffix,\
231 prefix ## EDX ## suffix,\
232 prefix ## EBX ## suffix,\
233 prefix ## ESP ## suffix,\
234 prefix ## EBP ## suffix,\
235 prefix ## ESI ## suffix,\
236 prefix ## EDI ## suffix,\
237 prefix ## R8 ## suffix,\
238 prefix ## R9 ## suffix,\
239 prefix ## R10 ## suffix,\
240 prefix ## R11 ## suffix,\
241 prefix ## R12 ## suffix,\
242 prefix ## R13 ## suffix,\
243 prefix ## R14 ## suffix,\
244 prefix ## R15 ## suffix,
246 #else /* !TARGET_X86_64 */
248 #define NB_OP_SIZES 3
250 #define DEF_REGS(prefix, suffix) \
251 prefix ## EAX ## suffix,\
252 prefix ## ECX ## suffix,\
253 prefix ## EDX ## suffix,\
254 prefix ## EBX ## suffix,\
255 prefix ## ESP ## suffix,\
256 prefix ## EBP ## suffix,\
257 prefix ## ESI ## suffix,\
258 prefix ## EDI ## suffix,
260 #endif /* !TARGET_X86_64 */
262 #if defined(WORDS_BIGENDIAN)
263 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
264 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
265 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
266 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
267 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269 #define REG_B_OFFSET 0
270 #define REG_H_OFFSET 1
271 #define REG_W_OFFSET 0
272 #define REG_L_OFFSET 0
273 #define REG_LH_OFFSET 4
276 static inline void gen_op_mov_reg_TN(int ot
, int t_index
, int reg
)
280 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
281 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_B_OFFSET
);
283 tcg_gen_st8_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
287 tcg_gen_st16_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
291 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
292 /* high part of register set to zero */
293 tcg_gen_movi_tl(cpu_tmp0
, 0);
294 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
298 tcg_gen_st_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
303 tcg_gen_st32_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
309 static inline void gen_op_mov_reg_T0(int ot
, int reg
)
311 gen_op_mov_reg_TN(ot
, 0, reg
);
314 static inline void gen_op_mov_reg_T1(int ot
, int reg
)
316 gen_op_mov_reg_TN(ot
, 1, reg
);
319 static inline void gen_op_mov_reg_A0(int size
, int reg
)
323 tcg_gen_st16_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_W_OFFSET
);
327 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
328 /* high part of register set to zero */
329 tcg_gen_movi_tl(cpu_tmp0
, 0);
330 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_LH_OFFSET
);
334 tcg_gen_st_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
339 tcg_gen_st32_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
345 static inline void gen_op_mov_TN_reg(int ot
, int t_index
, int reg
)
349 if (reg
< 4 X86_64_DEF( || reg
>= 8 || x86_64_hregs
)) {
352 tcg_gen_ld8u_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
- 4]) + REG_H_OFFSET
);
357 tcg_gen_ld_tl(cpu_T
[t_index
], cpu_env
, offsetof(CPUState
, regs
[reg
]));
362 static inline void gen_op_movl_A0_reg(int reg
)
364 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]) + REG_L_OFFSET
);
367 static inline void gen_op_addl_A0_im(int32_t val
)
369 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
371 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
376 static inline void gen_op_addq_A0_im(int64_t val
)
378 tcg_gen_addi_tl(cpu_A0
, cpu_A0
, val
);
382 static void gen_add_A0_im(DisasContext
*s
, int val
)
386 gen_op_addq_A0_im(val
);
389 gen_op_addl_A0_im(val
);
392 static inline void gen_op_addl_T0_T1(void)
394 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
397 static inline void gen_op_jmp_T0(void)
399 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUState
, eip
));
402 static inline void gen_op_addw_ESP_im(int32_t val
)
404 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
405 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
406 tcg_gen_st16_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]) + REG_W_OFFSET
);
409 static inline void gen_op_addl_ESP_im(int32_t val
)
411 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
412 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
414 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffff);
416 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
420 static inline void gen_op_addq_ESP_im(int32_t val
)
422 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
423 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, val
);
424 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[R_ESP
]));
428 static inline void gen_op_set_cc_op(int32_t val
)
430 tcg_gen_movi_tl(cpu_tmp0
, val
);
431 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, cc_op
));
434 static inline void gen_op_addl_A0_reg_sN(int shift
, int reg
)
436 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
438 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
439 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
441 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
445 static inline void gen_op_movl_A0_seg(int reg
)
447 tcg_gen_ld32u_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
) + REG_L_OFFSET
);
450 static inline void gen_op_addl_A0_seg(int reg
)
452 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
453 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
455 tcg_gen_andi_tl(cpu_A0
, cpu_A0
, 0xffffffff);
460 static inline void gen_op_movq_A0_seg(int reg
)
462 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
465 static inline void gen_op_addq_A0_seg(int reg
)
467 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, segs
[reg
].base
));
468 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
471 static inline void gen_op_movq_A0_reg(int reg
)
473 tcg_gen_ld_tl(cpu_A0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
476 static inline void gen_op_addq_A0_reg_sN(int shift
, int reg
)
478 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, regs
[reg
]));
480 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, shift
);
481 tcg_gen_add_tl(cpu_A0
, cpu_A0
, cpu_tmp0
);
485 static GenOpFunc
*gen_op_cmov_reg_T1_T0
[NB_OP_SIZES
- 1][CPU_NB_REGS
] = {
487 DEF_REGS(gen_op_cmovw_
, _T1_T0
)
490 DEF_REGS(gen_op_cmovl_
, _T1_T0
)
494 DEF_REGS(gen_op_cmovq_
, _T1_T0
)
499 #define DEF_ARITHC(SUFFIX)\
501 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
502 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
505 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
506 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
509 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
510 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
513 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
514 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
517 static GenOpFunc
*gen_op_arithc_T0_T1_cc
[4][2] = {
521 static GenOpFunc
*gen_op_arithc_mem_T0_T1_cc
[3 * 4][2] = {
523 #ifndef CONFIG_USER_ONLY
529 static const int cc_op_arithb
[8] = {
540 #define DEF_CMPXCHG(SUFFIX)\
541 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
542 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
543 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
544 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546 static GenOpFunc
*gen_op_cmpxchg_T0_T1_EAX_cc
[4] = {
550 static GenOpFunc
*gen_op_cmpxchg_mem_T0_T1_EAX_cc
[3 * 4] = {
552 #ifndef CONFIG_USER_ONLY
558 #define DEF_SHIFT(SUFFIX)\
560 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
561 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
562 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
563 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
564 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
565 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
566 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
567 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
570 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
571 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
572 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
573 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
574 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
575 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
576 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
577 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
580 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
581 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
582 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
583 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
584 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
585 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
586 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
587 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
590 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
591 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
592 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
593 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
594 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
595 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
596 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
597 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
600 static GenOpFunc
*gen_op_shift_T0_T1_cc
[4][8] = {
604 static GenOpFunc
*gen_op_shift_mem_T0_T1_cc
[3 * 4][8] = {
606 #ifndef CONFIG_USER_ONLY
612 #define DEF_SHIFTD(SUFFIX, op)\
618 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
619 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
622 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
626 X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
630 static GenOpFunc1
*gen_op_shiftd_T0_T1_im_cc
[4][2] = {
634 static GenOpFunc
*gen_op_shiftd_T0_T1_ECX_cc
[4][2] = {
638 static GenOpFunc1
*gen_op_shiftd_mem_T0_T1_im_cc
[3 * 4][2] = {
640 #ifndef CONFIG_USER_ONLY
641 DEF_SHIFTD(_kernel
, im
)
642 DEF_SHIFTD(_user
, im
)
646 static GenOpFunc
*gen_op_shiftd_mem_T0_T1_ECX_cc
[3 * 4][2] = {
647 DEF_SHIFTD(_raw
, ECX
)
648 #ifndef CONFIG_USER_ONLY
649 DEF_SHIFTD(_kernel
, ECX
)
650 DEF_SHIFTD(_user
, ECX
)
654 static GenOpFunc
*gen_op_btx_T0_T1_cc
[3][4] = {
657 gen_op_btsw_T0_T1_cc
,
658 gen_op_btrw_T0_T1_cc
,
659 gen_op_btcw_T0_T1_cc
,
663 gen_op_btsl_T0_T1_cc
,
664 gen_op_btrl_T0_T1_cc
,
665 gen_op_btcl_T0_T1_cc
,
670 gen_op_btsq_T0_T1_cc
,
671 gen_op_btrq_T0_T1_cc
,
672 gen_op_btcq_T0_T1_cc
,
677 static GenOpFunc
*gen_op_add_bit_A0_T1
[3] = {
678 gen_op_add_bitw_A0_T1
,
679 gen_op_add_bitl_A0_T1
,
680 X86_64_ONLY(gen_op_add_bitq_A0_T1
),
683 static GenOpFunc
*gen_op_bsx_T0_cc
[3][2] = {
700 static inline void gen_op_lds_T0_A0(int idx
)
702 int mem_index
= (idx
>> 2) - 1;
705 tcg_gen_qemu_ld8s(cpu_T
[0], cpu_A0
, mem_index
);
708 tcg_gen_qemu_ld16s(cpu_T
[0], cpu_A0
, mem_index
);
712 tcg_gen_qemu_ld32s(cpu_T
[0], cpu_A0
, mem_index
);
717 /* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
718 static inline void gen_op_ld_T0_A0(int idx
)
720 int mem_index
= (idx
>> 2) - 1;
723 tcg_gen_qemu_ld8u(cpu_T
[0], cpu_A0
, mem_index
);
726 tcg_gen_qemu_ld16u(cpu_T
[0], cpu_A0
, mem_index
);
729 tcg_gen_qemu_ld32u(cpu_T
[0], cpu_A0
, mem_index
);
733 tcg_gen_qemu_ld64(cpu_T
[0], cpu_A0
, mem_index
);
738 static inline void gen_op_ldu_T0_A0(int idx
)
740 gen_op_ld_T0_A0(idx
);
743 static inline void gen_op_ld_T1_A0(int idx
)
745 int mem_index
= (idx
>> 2) - 1;
748 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_A0
, mem_index
);
751 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_A0
, mem_index
);
754 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_A0
, mem_index
);
758 tcg_gen_qemu_ld64(cpu_T
[1], cpu_A0
, mem_index
);
763 static inline void gen_op_st_T0_A0(int idx
)
765 int mem_index
= (idx
>> 2) - 1;
768 tcg_gen_qemu_st8(cpu_T
[0], cpu_A0
, mem_index
);
771 tcg_gen_qemu_st16(cpu_T
[0], cpu_A0
, mem_index
);
774 tcg_gen_qemu_st32(cpu_T
[0], cpu_A0
, mem_index
);
778 tcg_gen_qemu_st64(cpu_T
[0], cpu_A0
, mem_index
);
783 static inline void gen_op_st_T1_A0(int idx
)
785 int mem_index
= (idx
>> 2) - 1;
788 tcg_gen_qemu_st8(cpu_T
[1], cpu_A0
, mem_index
);
791 tcg_gen_qemu_st16(cpu_T
[1], cpu_A0
, mem_index
);
794 tcg_gen_qemu_st32(cpu_T
[1], cpu_A0
, mem_index
);
798 tcg_gen_qemu_st64(cpu_T
[1], cpu_A0
, mem_index
);
803 static inline void gen_jmp_im(target_ulong pc
)
805 tcg_gen_movi_tl(cpu_tmp0
, pc
);
806 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, eip
));
809 static inline void gen_string_movl_A0_ESI(DisasContext
*s
)
813 override
= s
->override
;
817 gen_op_movq_A0_seg(override
);
818 gen_op_addq_A0_reg_sN(0, R_ESI
);
820 gen_op_movq_A0_reg(R_ESI
);
826 if (s
->addseg
&& override
< 0)
829 gen_op_movl_A0_seg(override
);
830 gen_op_addl_A0_reg_sN(0, R_ESI
);
832 gen_op_movl_A0_reg(R_ESI
);
835 /* 16 address, always override */
838 gen_op_movl_A0_reg(R_ESI
);
839 gen_op_andl_A0_ffff();
840 gen_op_addl_A0_seg(override
);
844 static inline void gen_string_movl_A0_EDI(DisasContext
*s
)
848 gen_op_movq_A0_reg(R_EDI
);
853 gen_op_movl_A0_seg(R_ES
);
854 gen_op_addl_A0_reg_sN(0, R_EDI
);
856 gen_op_movl_A0_reg(R_EDI
);
859 gen_op_movl_A0_reg(R_EDI
);
860 gen_op_andl_A0_ffff();
861 gen_op_addl_A0_seg(R_ES
);
865 static GenOpFunc
*gen_op_movl_T0_Dshift
[4] = {
866 gen_op_movl_T0_Dshiftb
,
867 gen_op_movl_T0_Dshiftw
,
868 gen_op_movl_T0_Dshiftl
,
869 X86_64_ONLY(gen_op_movl_T0_Dshiftq
),
872 static GenOpFunc1
*gen_op_jnz_ecx
[3] = {
875 X86_64_ONLY(gen_op_jnz_ecxq
),
878 static GenOpFunc1
*gen_op_jz_ecx
[3] = {
881 X86_64_ONLY(gen_op_jz_ecxq
),
884 static GenOpFunc
*gen_op_dec_ECX
[3] = {
887 X86_64_ONLY(gen_op_decq_ECX
),
890 static GenOpFunc1
*gen_op_string_jnz_sub
[2][4] = {
895 X86_64_ONLY(gen_op_jnz_subq
),
901 X86_64_ONLY(gen_op_jz_subq
),
905 static GenOpFunc
*gen_op_in_DX_T0
[3] = {
911 static GenOpFunc
*gen_op_out_DX_T0
[3] = {
917 static GenOpFunc
*gen_op_in
[3] = {
923 static GenOpFunc
*gen_op_out
[3] = {
929 static GenOpFunc
*gen_check_io_T0
[3] = {
935 static GenOpFunc
*gen_check_io_DX
[3] = {
941 static void gen_check_io(DisasContext
*s
, int ot
, int use_dx
, target_ulong cur_eip
)
943 if (s
->pe
&& (s
->cpl
> s
->iopl
|| s
->vm86
)) {
944 if (s
->cc_op
!= CC_OP_DYNAMIC
)
945 gen_op_set_cc_op(s
->cc_op
);
948 gen_check_io_DX
[ot
]();
950 gen_check_io_T0
[ot
]();
954 static inline void gen_movs(DisasContext
*s
, int ot
)
956 gen_string_movl_A0_ESI(s
);
957 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
958 gen_string_movl_A0_EDI(s
);
959 gen_op_st_T0_A0(ot
+ s
->mem_index
);
960 gen_op_movl_T0_Dshift
[ot
]();
963 gen_op_addq_ESI_T0();
964 gen_op_addq_EDI_T0();
968 gen_op_addl_ESI_T0();
969 gen_op_addl_EDI_T0();
971 gen_op_addw_ESI_T0();
972 gen_op_addw_EDI_T0();
976 static inline void gen_update_cc_op(DisasContext
*s
)
978 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
979 gen_op_set_cc_op(s
->cc_op
);
980 s
->cc_op
= CC_OP_DYNAMIC
;
984 /* XXX: does not work with gdbstub "ice" single step - not a
986 static int gen_jz_ecx_string(DisasContext
*s
, target_ulong next_eip
)
990 l1
= gen_new_label();
991 l2
= gen_new_label();
992 gen_op_jnz_ecx
[s
->aflag
](l1
);
994 gen_jmp_tb(s
, next_eip
, 1);
999 static inline void gen_stos(DisasContext
*s
, int ot
)
1001 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1002 gen_string_movl_A0_EDI(s
);
1003 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1004 gen_op_movl_T0_Dshift
[ot
]();
1005 #ifdef TARGET_X86_64
1006 if (s
->aflag
== 2) {
1007 gen_op_addq_EDI_T0();
1011 gen_op_addl_EDI_T0();
1013 gen_op_addw_EDI_T0();
1017 static inline void gen_lods(DisasContext
*s
, int ot
)
1019 gen_string_movl_A0_ESI(s
);
1020 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1021 gen_op_mov_reg_T0(ot
, R_EAX
);
1022 gen_op_movl_T0_Dshift
[ot
]();
1023 #ifdef TARGET_X86_64
1024 if (s
->aflag
== 2) {
1025 gen_op_addq_ESI_T0();
1029 gen_op_addl_ESI_T0();
1031 gen_op_addw_ESI_T0();
1035 static inline void gen_scas(DisasContext
*s
, int ot
)
1037 gen_op_mov_TN_reg(OT_LONG
, 0, R_EAX
);
1038 gen_string_movl_A0_EDI(s
);
1039 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1040 gen_op_cmpl_T0_T1_cc();
1041 gen_op_movl_T0_Dshift
[ot
]();
1042 #ifdef TARGET_X86_64
1043 if (s
->aflag
== 2) {
1044 gen_op_addq_EDI_T0();
1048 gen_op_addl_EDI_T0();
1050 gen_op_addw_EDI_T0();
1054 static inline void gen_cmps(DisasContext
*s
, int ot
)
1056 gen_string_movl_A0_ESI(s
);
1057 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1058 gen_string_movl_A0_EDI(s
);
1059 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
1060 gen_op_cmpl_T0_T1_cc();
1061 gen_op_movl_T0_Dshift
[ot
]();
1062 #ifdef TARGET_X86_64
1063 if (s
->aflag
== 2) {
1064 gen_op_addq_ESI_T0();
1065 gen_op_addq_EDI_T0();
1069 gen_op_addl_ESI_T0();
1070 gen_op_addl_EDI_T0();
1072 gen_op_addw_ESI_T0();
1073 gen_op_addw_EDI_T0();
1077 static inline void gen_ins(DisasContext
*s
, int ot
)
1079 gen_string_movl_A0_EDI(s
);
1081 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1082 gen_op_in_DX_T0
[ot
]();
1083 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1084 gen_op_movl_T0_Dshift
[ot
]();
1085 #ifdef TARGET_X86_64
1086 if (s
->aflag
== 2) {
1087 gen_op_addq_EDI_T0();
1091 gen_op_addl_EDI_T0();
1093 gen_op_addw_EDI_T0();
1097 static inline void gen_outs(DisasContext
*s
, int ot
)
1099 gen_string_movl_A0_ESI(s
);
1100 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1101 gen_op_out_DX_T0
[ot
]();
1102 gen_op_movl_T0_Dshift
[ot
]();
1103 #ifdef TARGET_X86_64
1104 if (s
->aflag
== 2) {
1105 gen_op_addq_ESI_T0();
1109 gen_op_addl_ESI_T0();
1111 gen_op_addw_ESI_T0();
1115 /* same method as Valgrind : we generate jumps to current or next
1117 #define GEN_REPZ(op) \
1118 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1119 target_ulong cur_eip, target_ulong next_eip) \
1122 gen_update_cc_op(s); \
1123 l2 = gen_jz_ecx_string(s, next_eip); \
1124 gen_ ## op(s, ot); \
1125 gen_op_dec_ECX[s->aflag](); \
1126 /* a loop would cause two single step exceptions if ECX = 1 \
1127 before rep string_insn */ \
1129 gen_op_jz_ecx[s->aflag](l2); \
1130 gen_jmp(s, cur_eip); \
1133 #define GEN_REPZ2(op) \
1134 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1135 target_ulong cur_eip, \
1136 target_ulong next_eip, \
1140 gen_update_cc_op(s); \
1141 l2 = gen_jz_ecx_string(s, next_eip); \
1142 gen_ ## op(s, ot); \
1143 gen_op_dec_ECX[s->aflag](); \
1144 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1145 gen_op_string_jnz_sub[nz][ot](l2);\
1147 gen_op_jz_ecx[s->aflag](l2); \
1148 gen_jmp(s, cur_eip); \
1170 static GenOpFunc1
*gen_jcc_sub
[4][8] = {
1201 #ifdef TARGET_X86_64
1204 BUGGY_64(gen_op_jb_subq
),
1206 BUGGY_64(gen_op_jbe_subq
),
1209 BUGGY_64(gen_op_jl_subq
),
1210 BUGGY_64(gen_op_jle_subq
),
1214 static GenOpFunc1
*gen_op_loop
[3][4] = {
1225 #ifdef TARGET_X86_64
1234 static GenOpFunc
*gen_setcc_slow
[8] = {
1245 static GenOpFunc
*gen_setcc_sub
[4][8] = {
1248 gen_op_setb_T0_subb
,
1249 gen_op_setz_T0_subb
,
1250 gen_op_setbe_T0_subb
,
1251 gen_op_sets_T0_subb
,
1253 gen_op_setl_T0_subb
,
1254 gen_op_setle_T0_subb
,
1258 gen_op_setb_T0_subw
,
1259 gen_op_setz_T0_subw
,
1260 gen_op_setbe_T0_subw
,
1261 gen_op_sets_T0_subw
,
1263 gen_op_setl_T0_subw
,
1264 gen_op_setle_T0_subw
,
1268 gen_op_setb_T0_subl
,
1269 gen_op_setz_T0_subl
,
1270 gen_op_setbe_T0_subl
,
1271 gen_op_sets_T0_subl
,
1273 gen_op_setl_T0_subl
,
1274 gen_op_setle_T0_subl
,
1276 #ifdef TARGET_X86_64
1279 gen_op_setb_T0_subq
,
1280 gen_op_setz_T0_subq
,
1281 gen_op_setbe_T0_subq
,
1282 gen_op_sets_T0_subq
,
1284 gen_op_setl_T0_subq
,
1285 gen_op_setle_T0_subq
,
1290 static GenOpFunc
*gen_op_fp_arith_ST0_FT0
[8] = {
1291 gen_op_fadd_ST0_FT0
,
1292 gen_op_fmul_ST0_FT0
,
1293 gen_op_fcom_ST0_FT0
,
1294 gen_op_fcom_ST0_FT0
,
1295 gen_op_fsub_ST0_FT0
,
1296 gen_op_fsubr_ST0_FT0
,
1297 gen_op_fdiv_ST0_FT0
,
1298 gen_op_fdivr_ST0_FT0
,
1301 /* NOTE the exception in "r" op ordering */
1302 static GenOpFunc1
*gen_op_fp_arith_STN_ST0
[8] = {
1303 gen_op_fadd_STN_ST0
,
1304 gen_op_fmul_STN_ST0
,
1307 gen_op_fsubr_STN_ST0
,
1308 gen_op_fsub_STN_ST0
,
1309 gen_op_fdivr_STN_ST0
,
1310 gen_op_fdiv_STN_ST0
,
1313 /* if d == OR_TMP0, it means memory operand (address in A0) */
1314 static void gen_op(DisasContext
*s1
, int op
, int ot
, int d
)
1316 GenOpFunc
*gen_update_cc
;
1319 gen_op_mov_TN_reg(ot
, 0, d
);
1321 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1326 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1327 gen_op_set_cc_op(s1
->cc_op
);
1329 gen_op_arithc_T0_T1_cc
[ot
][op
- OP_ADCL
]();
1330 gen_op_mov_reg_T0(ot
, d
);
1332 gen_op_arithc_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
- OP_ADCL
]();
1334 s1
->cc_op
= CC_OP_DYNAMIC
;
1337 gen_op_addl_T0_T1();
1338 s1
->cc_op
= CC_OP_ADDB
+ ot
;
1339 gen_update_cc
= gen_op_update2_cc
;
1342 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1343 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1344 gen_update_cc
= gen_op_update2_cc
;
1348 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1349 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1350 gen_update_cc
= gen_op_update1_cc
;
1353 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1354 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1355 gen_update_cc
= gen_op_update1_cc
;
1358 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1359 s1
->cc_op
= CC_OP_LOGICB
+ ot
;
1360 gen_update_cc
= gen_op_update1_cc
;
1363 gen_op_cmpl_T0_T1_cc();
1364 s1
->cc_op
= CC_OP_SUBB
+ ot
;
1365 gen_update_cc
= NULL
;
1368 if (op
!= OP_CMPL
) {
1370 gen_op_mov_reg_T0(ot
, d
);
1372 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1374 /* the flags update must happen after the memory write (precise
1375 exception support) */
1381 /* if d == OR_TMP0, it means memory operand (address in A0) */
1382 static void gen_inc(DisasContext
*s1
, int ot
, int d
, int c
)
1385 gen_op_mov_TN_reg(ot
, 0, d
);
1387 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1388 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1389 gen_op_set_cc_op(s1
->cc_op
);
1392 s1
->cc_op
= CC_OP_INCB
+ ot
;
1395 s1
->cc_op
= CC_OP_DECB
+ ot
;
1398 gen_op_mov_reg_T0(ot
, d
);
1400 gen_op_st_T0_A0(ot
+ s1
->mem_index
);
1401 gen_op_update_inc_cc();
1404 static void gen_shift(DisasContext
*s1
, int op
, int ot
, int d
, int s
)
1407 gen_op_mov_TN_reg(ot
, 0, d
);
1409 gen_op_ld_T0_A0(ot
+ s1
->mem_index
);
1411 gen_op_mov_TN_reg(ot
, 1, s
);
1412 /* for zero counts, flags are not updated, so must do it dynamically */
1413 if (s1
->cc_op
!= CC_OP_DYNAMIC
)
1414 gen_op_set_cc_op(s1
->cc_op
);
1417 gen_op_shift_T0_T1_cc
[ot
][op
]();
1419 gen_op_shift_mem_T0_T1_cc
[ot
+ s1
->mem_index
][op
]();
1421 gen_op_mov_reg_T0(ot
, d
);
1422 s1
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
1425 static void gen_shifti(DisasContext
*s1
, int op
, int ot
, int d
, int c
)
1427 /* currently not optimized */
1428 gen_op_movl_T1_im(c
);
1429 gen_shift(s1
, op
, ot
, d
, OR_TMP1
);
1432 static void gen_lea_modrm(DisasContext
*s
, int modrm
, int *reg_ptr
, int *offset_ptr
)
1440 int mod
, rm
, code
, override
, must_add_seg
;
1442 override
= s
->override
;
1443 must_add_seg
= s
->addseg
;
1446 mod
= (modrm
>> 6) & 3;
1458 code
= ldub_code(s
->pc
++);
1459 scale
= (code
>> 6) & 3;
1460 index
= ((code
>> 3) & 7) | REX_X(s
);
1467 if ((base
& 7) == 5) {
1469 disp
= (int32_t)ldl_code(s
->pc
);
1471 if (CODE64(s
) && !havesib
) {
1472 disp
+= s
->pc
+ s
->rip_offset
;
1479 disp
= (int8_t)ldub_code(s
->pc
++);
1483 disp
= ldl_code(s
->pc
);
1489 /* for correct popl handling with esp */
1490 if (base
== 4 && s
->popl_esp_hack
)
1491 disp
+= s
->popl_esp_hack
;
1492 #ifdef TARGET_X86_64
1493 if (s
->aflag
== 2) {
1494 gen_op_movq_A0_reg(base
);
1496 gen_op_addq_A0_im(disp
);
1501 gen_op_movl_A0_reg(base
);
1503 gen_op_addl_A0_im(disp
);
1506 #ifdef TARGET_X86_64
1507 if (s
->aflag
== 2) {
1508 gen_op_movq_A0_im(disp
);
1512 gen_op_movl_A0_im(disp
);
1515 /* XXX: index == 4 is always invalid */
1516 if (havesib
&& (index
!= 4 || scale
!= 0)) {
1517 #ifdef TARGET_X86_64
1518 if (s
->aflag
== 2) {
1519 gen_op_addq_A0_reg_sN(scale
, index
);
1523 gen_op_addl_A0_reg_sN(scale
, index
);
1528 if (base
== R_EBP
|| base
== R_ESP
)
1533 #ifdef TARGET_X86_64
1534 if (s
->aflag
== 2) {
1535 gen_op_addq_A0_seg(override
);
1539 gen_op_addl_A0_seg(override
);
1546 disp
= lduw_code(s
->pc
);
1548 gen_op_movl_A0_im(disp
);
1549 rm
= 0; /* avoid SS override */
1556 disp
= (int8_t)ldub_code(s
->pc
++);
1560 disp
= lduw_code(s
->pc
);
1566 gen_op_movl_A0_reg(R_EBX
);
1567 gen_op_addl_A0_reg_sN(0, R_ESI
);
1570 gen_op_movl_A0_reg(R_EBX
);
1571 gen_op_addl_A0_reg_sN(0, R_EDI
);
1574 gen_op_movl_A0_reg(R_EBP
);
1575 gen_op_addl_A0_reg_sN(0, R_ESI
);
1578 gen_op_movl_A0_reg(R_EBP
);
1579 gen_op_addl_A0_reg_sN(0, R_EDI
);
1582 gen_op_movl_A0_reg(R_ESI
);
1585 gen_op_movl_A0_reg(R_EDI
);
1588 gen_op_movl_A0_reg(R_EBP
);
1592 gen_op_movl_A0_reg(R_EBX
);
1596 gen_op_addl_A0_im(disp
);
1597 gen_op_andl_A0_ffff();
1601 if (rm
== 2 || rm
== 3 || rm
== 6)
1606 gen_op_addl_A0_seg(override
);
1616 static void gen_nop_modrm(DisasContext
*s
, int modrm
)
1618 int mod
, rm
, base
, code
;
1620 mod
= (modrm
>> 6) & 3;
1630 code
= ldub_code(s
->pc
++);
1666 /* used for LEA and MOV AX, mem */
1667 static void gen_add_A0_ds_seg(DisasContext
*s
)
1669 int override
, must_add_seg
;
1670 must_add_seg
= s
->addseg
;
1672 if (s
->override
>= 0) {
1673 override
= s
->override
;
1679 #ifdef TARGET_X86_64
1681 gen_op_addq_A0_seg(override
);
1685 gen_op_addl_A0_seg(override
);
1690 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1692 static void gen_ldst_modrm(DisasContext
*s
, int modrm
, int ot
, int reg
, int is_store
)
1694 int mod
, rm
, opreg
, disp
;
1696 mod
= (modrm
>> 6) & 3;
1697 rm
= (modrm
& 7) | REX_B(s
);
1701 gen_op_mov_TN_reg(ot
, 0, reg
);
1702 gen_op_mov_reg_T0(ot
, rm
);
1704 gen_op_mov_TN_reg(ot
, 0, rm
);
1706 gen_op_mov_reg_T0(ot
, reg
);
1709 gen_lea_modrm(s
, modrm
, &opreg
, &disp
);
1712 gen_op_mov_TN_reg(ot
, 0, reg
);
1713 gen_op_st_T0_A0(ot
+ s
->mem_index
);
1715 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
1717 gen_op_mov_reg_T0(ot
, reg
);
1722 static inline uint32_t insn_get(DisasContext
*s
, int ot
)
1728 ret
= ldub_code(s
->pc
);
1732 ret
= lduw_code(s
->pc
);
1737 ret
= ldl_code(s
->pc
);
1744 static inline int insn_const_size(unsigned int ot
)
1752 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong eip
)
1754 TranslationBlock
*tb
;
1757 pc
= s
->cs_base
+ eip
;
1759 /* NOTE: we handle the case where the TB spans two pages here */
1760 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
1761 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
1762 /* jump to same page: we can use a direct jump */
1763 tcg_gen_goto_tb(tb_num
);
1765 tcg_gen_exit_tb((long)tb
+ tb_num
);
1767 /* jump to another page: currently not optimized */
1773 static inline void gen_jcc(DisasContext
*s
, int b
,
1774 target_ulong val
, target_ulong next_eip
)
1776 TranslationBlock
*tb
;
1783 jcc_op
= (b
>> 1) & 7;
1787 /* we optimize the cmp/jcc case */
1792 func
= gen_jcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1795 /* some jumps are easy to compute */
1837 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1840 func
= gen_jcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1852 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1853 gen_op_set_cc_op(s
->cc_op
);
1854 s
->cc_op
= CC_OP_DYNAMIC
;
1858 gen_setcc_slow
[jcc_op
]();
1859 func
= gen_op_jnz_T0_label
;
1869 l1
= gen_new_label();
1872 gen_goto_tb(s
, 0, next_eip
);
1875 gen_goto_tb(s
, 1, val
);
1880 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
1881 gen_op_set_cc_op(s
->cc_op
);
1882 s
->cc_op
= CC_OP_DYNAMIC
;
1884 gen_setcc_slow
[jcc_op
]();
1890 l1
= gen_new_label();
1891 l2
= gen_new_label();
1892 gen_op_jnz_T0_label(l1
);
1893 gen_jmp_im(next_eip
);
1894 gen_op_jmp_label(l2
);
1902 static void gen_setcc(DisasContext
*s
, int b
)
1908 jcc_op
= (b
>> 1) & 7;
1910 /* we optimize the cmp/jcc case */
1915 func
= gen_setcc_sub
[s
->cc_op
- CC_OP_SUBB
][jcc_op
];
1920 /* some jumps are easy to compute */
1947 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1950 func
= gen_setcc_sub
[(s
->cc_op
- CC_OP_ADDB
) % 4][jcc_op
];
1958 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1959 gen_op_set_cc_op(s
->cc_op
);
1960 func
= gen_setcc_slow
[jcc_op
];
1969 /* move T0 to seg_reg and compute if the CPU state may change. Never
1970 call this function with seg_reg == R_CS */
1971 static void gen_movl_seg_T0(DisasContext
*s
, int seg_reg
, target_ulong cur_eip
)
1973 if (s
->pe
&& !s
->vm86
) {
1974 /* XXX: optimize by finding processor state dynamically */
1975 if (s
->cc_op
!= CC_OP_DYNAMIC
)
1976 gen_op_set_cc_op(s
->cc_op
);
1977 gen_jmp_im(cur_eip
);
1978 gen_op_movl_seg_T0(seg_reg
);
1979 /* abort translation because the addseg value may change or
1980 because ss32 may change. For R_SS, translation must always
1981 stop as a special handling must be done to disable hardware
1982 interrupts for the next instruction */
1983 if (seg_reg
== R_SS
|| (s
->code32
&& seg_reg
< R_FS
))
1986 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[seg_reg
]));
1987 if (seg_reg
== R_SS
)
1992 #define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1995 gen_svm_check_io(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
1997 #if !defined(CONFIG_USER_ONLY)
1998 if(s
->flags
& (1ULL << INTERCEPT_IOIO_PROT
)) {
1999 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2000 gen_op_set_cc_op(s
->cc_op
);
2001 SVM_movq_T1_im(s
->pc
- s
->cs_base
);
2002 gen_jmp_im(pc_start
- s
->cs_base
);
2004 gen_op_svm_check_intercept_io((uint32_t)(type
>> 32), (uint32_t)type
);
2005 s
->cc_op
= CC_OP_DYNAMIC
;
2006 /* FIXME: maybe we could move the io intercept vector to the TB as well
2007 so we know if this is an EOB or not ... let's assume it's not
2014 static inline int svm_is_rep(int prefixes
)
2016 return ((prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) ? 8 : 0);
2020 gen_svm_check_intercept_param(DisasContext
*s
, target_ulong pc_start
,
2021 uint64_t type
, uint64_t param
)
2023 if(!(s
->flags
& (INTERCEPT_SVM_MASK
)))
2024 /* no SVM activated */
2027 /* CRx and DRx reads/writes */
2028 case SVM_EXIT_READ_CR0
... SVM_EXIT_EXCP_BASE
- 1:
2029 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2030 gen_op_set_cc_op(s
->cc_op
);
2031 s
->cc_op
= CC_OP_DYNAMIC
;
2033 gen_jmp_im(pc_start
- s
->cs_base
);
2034 SVM_movq_T1_im(param
);
2036 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2037 /* this is a special case as we do not know if the interception occurs
2038 so we assume there was none */
2041 if(s
->flags
& (1ULL << INTERCEPT_MSR_PROT
)) {
2042 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2043 gen_op_set_cc_op(s
->cc_op
);
2044 s
->cc_op
= CC_OP_DYNAMIC
;
2046 gen_jmp_im(pc_start
- s
->cs_base
);
2047 SVM_movq_T1_im(param
);
2049 gen_op_svm_check_intercept_param((uint32_t)(type
>> 32), (uint32_t)type
);
2050 /* this is a special case as we do not know if the interception occurs
2051 so we assume there was none */
2056 if(s
->flags
& (1ULL << ((type
- SVM_EXIT_INTR
) + INTERCEPT_INTR
))) {
2057 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2058 gen_op_set_cc_op(s
->cc_op
);
2059 s
->cc_op
= CC_OP_EFLAGS
;
2061 gen_jmp_im(pc_start
- s
->cs_base
);
2062 SVM_movq_T1_im(param
);
2064 gen_op_svm_vmexit(type
>> 32, type
);
2065 /* we can optimize this one so TBs don't get longer
2066 than up to vmexit */
2075 gen_svm_check_intercept(DisasContext
*s
, target_ulong pc_start
, uint64_t type
)
2077 return gen_svm_check_intercept_param(s
, pc_start
, type
, 0);
2080 static inline void gen_stack_update(DisasContext
*s
, int addend
)
2082 #ifdef TARGET_X86_64
2084 gen_op_addq_ESP_im(addend
);
2088 gen_op_addl_ESP_im(addend
);
2090 gen_op_addw_ESP_im(addend
);
2094 /* generate a push. It depends on ss32, addseg and dflag */
2095 static void gen_push_T0(DisasContext
*s
)
2097 #ifdef TARGET_X86_64
2099 gen_op_movq_A0_reg(R_ESP
);
2101 gen_op_addq_A0_im(-8);
2102 gen_op_st_T0_A0(OT_QUAD
+ s
->mem_index
);
2104 gen_op_addq_A0_im(-2);
2105 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2107 gen_op_mov_reg_A0(2, R_ESP
);
2111 gen_op_movl_A0_reg(R_ESP
);
2113 gen_op_addl_A0_im(-2);
2115 gen_op_addl_A0_im(-4);
2118 gen_op_movl_T1_A0();
2119 gen_op_addl_A0_seg(R_SS
);
2122 gen_op_andl_A0_ffff();
2123 gen_op_movl_T1_A0();
2124 gen_op_addl_A0_seg(R_SS
);
2126 gen_op_st_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2127 if (s
->ss32
&& !s
->addseg
)
2128 gen_op_mov_reg_A0(1, R_ESP
);
2130 gen_op_mov_reg_T1(s
->ss32
+ 1, R_ESP
);
2134 /* generate a push. It depends on ss32, addseg and dflag */
2135 /* slower version for T1, only used for call Ev */
2136 static void gen_push_T1(DisasContext
*s
)
2138 #ifdef TARGET_X86_64
2140 gen_op_movq_A0_reg(R_ESP
);
2142 gen_op_addq_A0_im(-8);
2143 gen_op_st_T1_A0(OT_QUAD
+ s
->mem_index
);
2145 gen_op_addq_A0_im(-2);
2146 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
2148 gen_op_mov_reg_A0(2, R_ESP
);
2152 gen_op_movl_A0_reg(R_ESP
);
2154 gen_op_addl_A0_im(-2);
2156 gen_op_addl_A0_im(-4);
2159 gen_op_addl_A0_seg(R_SS
);
2162 gen_op_andl_A0_ffff();
2163 gen_op_addl_A0_seg(R_SS
);
2165 gen_op_st_T1_A0(s
->dflag
+ 1 + s
->mem_index
);
2167 if (s
->ss32
&& !s
->addseg
)
2168 gen_op_mov_reg_A0(1, R_ESP
);
2170 gen_stack_update(s
, (-2) << s
->dflag
);
2174 /* two step pop is necessary for precise exceptions */
2175 static void gen_pop_T0(DisasContext
*s
)
2177 #ifdef TARGET_X86_64
2179 gen_op_movq_A0_reg(R_ESP
);
2180 gen_op_ld_T0_A0((s
->dflag
? OT_QUAD
: OT_WORD
) + s
->mem_index
);
2184 gen_op_movl_A0_reg(R_ESP
);
2187 gen_op_addl_A0_seg(R_SS
);
2189 gen_op_andl_A0_ffff();
2190 gen_op_addl_A0_seg(R_SS
);
2192 gen_op_ld_T0_A0(s
->dflag
+ 1 + s
->mem_index
);
2196 static void gen_pop_update(DisasContext
*s
)
2198 #ifdef TARGET_X86_64
2199 if (CODE64(s
) && s
->dflag
) {
2200 gen_stack_update(s
, 8);
2204 gen_stack_update(s
, 2 << s
->dflag
);
2208 static void gen_stack_A0(DisasContext
*s
)
2210 gen_op_movl_A0_reg(R_ESP
);
2212 gen_op_andl_A0_ffff();
2213 gen_op_movl_T1_A0();
2215 gen_op_addl_A0_seg(R_SS
);
2218 /* NOTE: wrap around in 16 bit not fully handled */
2219 static void gen_pusha(DisasContext
*s
)
2222 gen_op_movl_A0_reg(R_ESP
);
2223 gen_op_addl_A0_im(-16 << s
->dflag
);
2225 gen_op_andl_A0_ffff();
2226 gen_op_movl_T1_A0();
2228 gen_op_addl_A0_seg(R_SS
);
2229 for(i
= 0;i
< 8; i
++) {
2230 gen_op_mov_TN_reg(OT_LONG
, 0, 7 - i
);
2231 gen_op_st_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2232 gen_op_addl_A0_im(2 << s
->dflag
);
2234 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2237 /* NOTE: wrap around in 16 bit not fully handled */
2238 static void gen_popa(DisasContext
*s
)
2241 gen_op_movl_A0_reg(R_ESP
);
2243 gen_op_andl_A0_ffff();
2244 gen_op_movl_T1_A0();
2245 gen_op_addl_T1_im(16 << s
->dflag
);
2247 gen_op_addl_A0_seg(R_SS
);
2248 for(i
= 0;i
< 8; i
++) {
2249 /* ESP is not reloaded */
2251 gen_op_ld_T0_A0(OT_WORD
+ s
->dflag
+ s
->mem_index
);
2252 gen_op_mov_reg_T0(OT_WORD
+ s
->dflag
, 7 - i
);
2254 gen_op_addl_A0_im(2 << s
->dflag
);
2256 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2259 static void gen_enter(DisasContext
*s
, int esp_addend
, int level
)
2264 #ifdef TARGET_X86_64
2266 ot
= s
->dflag
? OT_QUAD
: OT_WORD
;
2269 gen_op_movl_A0_reg(R_ESP
);
2270 gen_op_addq_A0_im(-opsize
);
2271 gen_op_movl_T1_A0();
2274 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2275 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2277 gen_op_enter64_level(level
, (ot
== OT_QUAD
));
2279 gen_op_mov_reg_T1(ot
, R_EBP
);
2280 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2281 gen_op_mov_reg_T1(OT_QUAD
, R_ESP
);
2285 ot
= s
->dflag
+ OT_WORD
;
2286 opsize
= 2 << s
->dflag
;
2288 gen_op_movl_A0_reg(R_ESP
);
2289 gen_op_addl_A0_im(-opsize
);
2291 gen_op_andl_A0_ffff();
2292 gen_op_movl_T1_A0();
2294 gen_op_addl_A0_seg(R_SS
);
2296 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
2297 gen_op_st_T0_A0(ot
+ s
->mem_index
);
2299 gen_op_enter_level(level
, s
->dflag
);
2301 gen_op_mov_reg_T1(ot
, R_EBP
);
2302 gen_op_addl_T1_im( -esp_addend
+ (-opsize
* level
) );
2303 gen_op_mov_reg_T1(OT_WORD
+ s
->ss32
, R_ESP
);
2307 static void gen_exception(DisasContext
*s
, int trapno
, target_ulong cur_eip
)
2309 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2310 gen_op_set_cc_op(s
->cc_op
);
2311 gen_jmp_im(cur_eip
);
2312 gen_op_raise_exception(trapno
);
2316 /* an interrupt is different from an exception because of the
2318 static void gen_interrupt(DisasContext
*s
, int intno
,
2319 target_ulong cur_eip
, target_ulong next_eip
)
2321 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2322 gen_op_set_cc_op(s
->cc_op
);
2323 gen_jmp_im(cur_eip
);
2324 gen_op_raise_interrupt(intno
, (int)(next_eip
- cur_eip
));
2328 static void gen_debug(DisasContext
*s
, target_ulong cur_eip
)
2330 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2331 gen_op_set_cc_op(s
->cc_op
);
2332 gen_jmp_im(cur_eip
);
2337 /* generate a generic end of block. Trace exception is also generated
2339 static void gen_eob(DisasContext
*s
)
2341 if (s
->cc_op
!= CC_OP_DYNAMIC
)
2342 gen_op_set_cc_op(s
->cc_op
);
2343 if (s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
) {
2344 gen_op_reset_inhibit_irq();
2346 if (s
->singlestep_enabled
) {
2349 gen_op_single_step();
2356 /* generate a jump to eip. No segment change must happen before as a
2357 direct call to the next block may occur */
2358 static void gen_jmp_tb(DisasContext
*s
, target_ulong eip
, int tb_num
)
2361 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
2362 gen_op_set_cc_op(s
->cc_op
);
2363 s
->cc_op
= CC_OP_DYNAMIC
;
2365 gen_goto_tb(s
, tb_num
, eip
);
2373 static void gen_jmp(DisasContext
*s
, target_ulong eip
)
2375 gen_jmp_tb(s
, eip
, 0);
2378 static GenOpFunc1
*gen_ldq_env_A0
[3] = {
2379 gen_op_ldq_raw_env_A0
,
2380 #ifndef CONFIG_USER_ONLY
2381 gen_op_ldq_kernel_env_A0
,
2382 gen_op_ldq_user_env_A0
,
2386 static GenOpFunc1
*gen_stq_env_A0
[3] = {
2387 gen_op_stq_raw_env_A0
,
2388 #ifndef CONFIG_USER_ONLY
2389 gen_op_stq_kernel_env_A0
,
2390 gen_op_stq_user_env_A0
,
2394 static GenOpFunc1
*gen_ldo_env_A0
[3] = {
2395 gen_op_ldo_raw_env_A0
,
2396 #ifndef CONFIG_USER_ONLY
2397 gen_op_ldo_kernel_env_A0
,
2398 gen_op_ldo_user_env_A0
,
2402 static GenOpFunc1
*gen_sto_env_A0
[3] = {
2403 gen_op_sto_raw_env_A0
,
2404 #ifndef CONFIG_USER_ONLY
2405 gen_op_sto_kernel_env_A0
,
2406 gen_op_sto_user_env_A0
,
2410 #define SSE_SPECIAL ((GenOpFunc2 *)1)
2412 #define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2413 #define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2414 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2416 static GenOpFunc2
*sse_op_table1
[256][4] = {
2417 /* pure SSE operations */
2418 [0x10] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2419 [0x11] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movups, movupd, movss, movsd */
2420 [0x12] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd, movsldup, movddup */
2421 [0x13] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movlps, movlpd */
2422 [0x14] = { gen_op_punpckldq_xmm
, gen_op_punpcklqdq_xmm
},
2423 [0x15] = { gen_op_punpckhdq_xmm
, gen_op_punpckhqdq_xmm
},
2424 [0x16] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd, movshdup */
2425 [0x17] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movhps, movhpd */
2427 [0x28] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2428 [0x29] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movaps, movapd */
2429 [0x2a] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2430 [0x2b] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntps, movntpd */
2431 [0x2c] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2432 [0x2d] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2433 [0x2e] = { gen_op_ucomiss
, gen_op_ucomisd
},
2434 [0x2f] = { gen_op_comiss
, gen_op_comisd
},
2435 [0x50] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movmskps, movmskpd */
2436 [0x51] = SSE_FOP(sqrt
),
2437 [0x52] = { gen_op_rsqrtps
, NULL
, gen_op_rsqrtss
, NULL
},
2438 [0x53] = { gen_op_rcpps
, NULL
, gen_op_rcpss
, NULL
},
2439 [0x54] = { gen_op_pand_xmm
, gen_op_pand_xmm
}, /* andps, andpd */
2440 [0x55] = { gen_op_pandn_xmm
, gen_op_pandn_xmm
}, /* andnps, andnpd */
2441 [0x56] = { gen_op_por_xmm
, gen_op_por_xmm
}, /* orps, orpd */
2442 [0x57] = { gen_op_pxor_xmm
, gen_op_pxor_xmm
}, /* xorps, xorpd */
2443 [0x58] = SSE_FOP(add
),
2444 [0x59] = SSE_FOP(mul
),
2445 [0x5a] = { gen_op_cvtps2pd
, gen_op_cvtpd2ps
,
2446 gen_op_cvtss2sd
, gen_op_cvtsd2ss
},
2447 [0x5b] = { gen_op_cvtdq2ps
, gen_op_cvtps2dq
, gen_op_cvttps2dq
},
2448 [0x5c] = SSE_FOP(sub
),
2449 [0x5d] = SSE_FOP(min
),
2450 [0x5e] = SSE_FOP(div
),
2451 [0x5f] = SSE_FOP(max
),
2453 [0xc2] = SSE_FOP(cmpeq
),
2454 [0xc6] = { (GenOpFunc2
*)gen_op_shufps
, (GenOpFunc2
*)gen_op_shufpd
},
2456 /* MMX ops and their SSE extensions */
2457 [0x60] = MMX_OP2(punpcklbw
),
2458 [0x61] = MMX_OP2(punpcklwd
),
2459 [0x62] = MMX_OP2(punpckldq
),
2460 [0x63] = MMX_OP2(packsswb
),
2461 [0x64] = MMX_OP2(pcmpgtb
),
2462 [0x65] = MMX_OP2(pcmpgtw
),
2463 [0x66] = MMX_OP2(pcmpgtl
),
2464 [0x67] = MMX_OP2(packuswb
),
2465 [0x68] = MMX_OP2(punpckhbw
),
2466 [0x69] = MMX_OP2(punpckhwd
),
2467 [0x6a] = MMX_OP2(punpckhdq
),
2468 [0x6b] = MMX_OP2(packssdw
),
2469 [0x6c] = { NULL
, gen_op_punpcklqdq_xmm
},
2470 [0x6d] = { NULL
, gen_op_punpckhqdq_xmm
},
2471 [0x6e] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movd mm, ea */
2472 [0x6f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, , movqdu */
2473 [0x70] = { (GenOpFunc2
*)gen_op_pshufw_mmx
,
2474 (GenOpFunc2
*)gen_op_pshufd_xmm
,
2475 (GenOpFunc2
*)gen_op_pshufhw_xmm
,
2476 (GenOpFunc2
*)gen_op_pshuflw_xmm
},
2477 [0x71] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftw */
2478 [0x72] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftd */
2479 [0x73] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* shiftq */
2480 [0x74] = MMX_OP2(pcmpeqb
),
2481 [0x75] = MMX_OP2(pcmpeqw
),
2482 [0x76] = MMX_OP2(pcmpeql
),
2483 [0x77] = { SSE_SPECIAL
}, /* emms */
2484 [0x7c] = { NULL
, gen_op_haddpd
, NULL
, gen_op_haddps
},
2485 [0x7d] = { NULL
, gen_op_hsubpd
, NULL
, gen_op_hsubps
},
2486 [0x7e] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movd, movd, , movq */
2487 [0x7f] = { SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
}, /* movq, movdqa, movdqu */
2488 [0xc4] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pinsrw */
2489 [0xc5] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pextrw */
2490 [0xd0] = { NULL
, gen_op_addsubpd
, NULL
, gen_op_addsubps
},
2491 [0xd1] = MMX_OP2(psrlw
),
2492 [0xd2] = MMX_OP2(psrld
),
2493 [0xd3] = MMX_OP2(psrlq
),
2494 [0xd4] = MMX_OP2(paddq
),
2495 [0xd5] = MMX_OP2(pmullw
),
2496 [0xd6] = { NULL
, SSE_SPECIAL
, SSE_SPECIAL
, SSE_SPECIAL
},
2497 [0xd7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* pmovmskb */
2498 [0xd8] = MMX_OP2(psubusb
),
2499 [0xd9] = MMX_OP2(psubusw
),
2500 [0xda] = MMX_OP2(pminub
),
2501 [0xdb] = MMX_OP2(pand
),
2502 [0xdc] = MMX_OP2(paddusb
),
2503 [0xdd] = MMX_OP2(paddusw
),
2504 [0xde] = MMX_OP2(pmaxub
),
2505 [0xdf] = MMX_OP2(pandn
),
2506 [0xe0] = MMX_OP2(pavgb
),
2507 [0xe1] = MMX_OP2(psraw
),
2508 [0xe2] = MMX_OP2(psrad
),
2509 [0xe3] = MMX_OP2(pavgw
),
2510 [0xe4] = MMX_OP2(pmulhuw
),
2511 [0xe5] = MMX_OP2(pmulhw
),
2512 [0xe6] = { NULL
, gen_op_cvttpd2dq
, gen_op_cvtdq2pd
, gen_op_cvtpd2dq
},
2513 [0xe7] = { SSE_SPECIAL
, SSE_SPECIAL
}, /* movntq, movntq */
2514 [0xe8] = MMX_OP2(psubsb
),
2515 [0xe9] = MMX_OP2(psubsw
),
2516 [0xea] = MMX_OP2(pminsw
),
2517 [0xeb] = MMX_OP2(por
),
2518 [0xec] = MMX_OP2(paddsb
),
2519 [0xed] = MMX_OP2(paddsw
),
2520 [0xee] = MMX_OP2(pmaxsw
),
2521 [0xef] = MMX_OP2(pxor
),
2522 [0xf0] = { NULL
, NULL
, NULL
, SSE_SPECIAL
}, /* lddqu */
2523 [0xf1] = MMX_OP2(psllw
),
2524 [0xf2] = MMX_OP2(pslld
),
2525 [0xf3] = MMX_OP2(psllq
),
2526 [0xf4] = MMX_OP2(pmuludq
),
2527 [0xf5] = MMX_OP2(pmaddwd
),
2528 [0xf6] = MMX_OP2(psadbw
),
2529 [0xf7] = MMX_OP2(maskmov
),
2530 [0xf8] = MMX_OP2(psubb
),
2531 [0xf9] = MMX_OP2(psubw
),
2532 [0xfa] = MMX_OP2(psubl
),
2533 [0xfb] = MMX_OP2(psubq
),
2534 [0xfc] = MMX_OP2(paddb
),
2535 [0xfd] = MMX_OP2(paddw
),
2536 [0xfe] = MMX_OP2(paddl
),
2539 static GenOpFunc2
*sse_op_table2
[3 * 8][2] = {
2540 [0 + 2] = MMX_OP2(psrlw
),
2541 [0 + 4] = MMX_OP2(psraw
),
2542 [0 + 6] = MMX_OP2(psllw
),
2543 [8 + 2] = MMX_OP2(psrld
),
2544 [8 + 4] = MMX_OP2(psrad
),
2545 [8 + 6] = MMX_OP2(pslld
),
2546 [16 + 2] = MMX_OP2(psrlq
),
2547 [16 + 3] = { NULL
, gen_op_psrldq_xmm
},
2548 [16 + 6] = MMX_OP2(psllq
),
2549 [16 + 7] = { NULL
, gen_op_pslldq_xmm
},
2552 static GenOpFunc1
*sse_op_table3
[4 * 3] = {
2555 X86_64_ONLY(gen_op_cvtsq2ss
),
2556 X86_64_ONLY(gen_op_cvtsq2sd
),
2560 X86_64_ONLY(gen_op_cvttss2sq
),
2561 X86_64_ONLY(gen_op_cvttsd2sq
),
2565 X86_64_ONLY(gen_op_cvtss2sq
),
2566 X86_64_ONLY(gen_op_cvtsd2sq
),
2569 static GenOpFunc2
*sse_op_table4
[8][4] = {
2580 static void gen_sse(DisasContext
*s
, int b
, target_ulong pc_start
, int rex_r
)
2582 int b1
, op1_offset
, op2_offset
, is_xmm
, val
, ot
;
2583 int modrm
, mod
, rm
, reg
, reg_addr
, offset_addr
;
2584 GenOpFunc2
*sse_op2
;
2585 GenOpFunc3
*sse_op3
;
2588 if (s
->prefix
& PREFIX_DATA
)
2590 else if (s
->prefix
& PREFIX_REPZ
)
2592 else if (s
->prefix
& PREFIX_REPNZ
)
2596 sse_op2
= sse_op_table1
[b
][b1
];
2599 if (b
<= 0x5f || b
== 0xc6 || b
== 0xc2) {
2609 /* simple MMX/SSE operation */
2610 if (s
->flags
& HF_TS_MASK
) {
2611 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
2614 if (s
->flags
& HF_EM_MASK
) {
2616 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
2619 if (is_xmm
&& !(s
->flags
& HF_OSFXSR_MASK
))
2626 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2627 the static cpu state) */
2632 modrm
= ldub_code(s
->pc
++);
2633 reg
= ((modrm
>> 3) & 7);
2636 mod
= (modrm
>> 6) & 3;
2637 if (sse_op2
== SSE_SPECIAL
) {
2640 case 0x0e7: /* movntq */
2643 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2644 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2646 case 0x1e7: /* movntdq */
2647 case 0x02b: /* movntps */
2648 case 0x12b: /* movntps */
2649 case 0x3f0: /* lddqu */
2652 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2653 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2655 case 0x6e: /* movd mm, ea */
2656 #ifdef TARGET_X86_64
2657 if (s
->dflag
== 2) {
2658 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2659 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2663 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2664 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2667 case 0x16e: /* movd xmm, ea */
2668 #ifdef TARGET_X86_64
2669 if (s
->dflag
== 2) {
2670 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 0);
2671 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2675 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 0);
2676 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2679 case 0x6f: /* movq mm, ea */
2681 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2682 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2685 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
].mmx
),
2686 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
2689 case 0x010: /* movups */
2690 case 0x110: /* movupd */
2691 case 0x028: /* movaps */
2692 case 0x128: /* movapd */
2693 case 0x16f: /* movdqa xmm, ea */
2694 case 0x26f: /* movdqu xmm, ea */
2696 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2697 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2699 rm
= (modrm
& 7) | REX_B(s
);
2700 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[reg
]),
2701 offsetof(CPUX86State
,xmm_regs
[rm
]));
2704 case 0x210: /* movss xmm, ea */
2706 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2707 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
2708 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2710 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2711 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2712 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2714 rm
= (modrm
& 7) | REX_B(s
);
2715 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2716 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2719 case 0x310: /* movsd xmm, ea */
2721 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2722 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2724 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2725 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2727 rm
= (modrm
& 7) | REX_B(s
);
2728 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2729 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2732 case 0x012: /* movlps */
2733 case 0x112: /* movlpd */
2735 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2736 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2739 rm
= (modrm
& 7) | REX_B(s
);
2740 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2741 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
2744 case 0x212: /* movsldup */
2746 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2747 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2749 rm
= (modrm
& 7) | REX_B(s
);
2750 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2751 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)));
2752 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2753 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(2)));
2755 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2756 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2757 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2758 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)));
2760 case 0x312: /* movddup */
2762 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2763 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2765 rm
= (modrm
& 7) | REX_B(s
);
2766 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2767 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2769 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2770 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2772 case 0x016: /* movhps */
2773 case 0x116: /* movhpd */
2775 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2776 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2779 rm
= (modrm
& 7) | REX_B(s
);
2780 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)),
2781 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2784 case 0x216: /* movshdup */
2786 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2787 gen_ldo_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2789 rm
= (modrm
& 7) | REX_B(s
);
2790 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)),
2791 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(1)));
2792 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)),
2793 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(3)));
2795 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)),
2796 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(1)));
2797 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(2)),
2798 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(3)));
2800 case 0x7e: /* movd ea, mm */
2801 #ifdef TARGET_X86_64
2802 if (s
->dflag
== 2) {
2803 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2804 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2808 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2809 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2812 case 0x17e: /* movd ea, xmm */
2813 #ifdef TARGET_X86_64
2814 if (s
->dflag
== 2) {
2815 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2816 gen_ldst_modrm(s
, modrm
, OT_QUAD
, OR_TMP0
, 1);
2820 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]));
2821 gen_ldst_modrm(s
, modrm
, OT_LONG
, OR_TMP0
, 1);
2824 case 0x27e: /* movq xmm, ea */
2826 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2827 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2829 rm
= (modrm
& 7) | REX_B(s
);
2830 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
2831 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
2833 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2835 case 0x7f: /* movq ea, mm */
2837 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2838 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2841 gen_op_movq(offsetof(CPUX86State
,fpregs
[rm
].mmx
),
2842 offsetof(CPUX86State
,fpregs
[reg
].mmx
));
2845 case 0x011: /* movups */
2846 case 0x111: /* movupd */
2847 case 0x029: /* movaps */
2848 case 0x129: /* movapd */
2849 case 0x17f: /* movdqa ea, xmm */
2850 case 0x27f: /* movdqu ea, xmm */
2852 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2853 gen_sto_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
]));
2855 rm
= (modrm
& 7) | REX_B(s
);
2856 gen_op_movo(offsetof(CPUX86State
,xmm_regs
[rm
]),
2857 offsetof(CPUX86State
,xmm_regs
[reg
]));
2860 case 0x211: /* movss ea, xmm */
2862 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2863 gen_op_movl_T0_env(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2864 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
2866 rm
= (modrm
& 7) | REX_B(s
);
2867 gen_op_movl(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_L(0)),
2868 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_L(0)));
2871 case 0x311: /* movsd ea, xmm */
2873 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2874 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2876 rm
= (modrm
& 7) | REX_B(s
);
2877 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
2878 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2881 case 0x013: /* movlps */
2882 case 0x113: /* movlpd */
2884 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2885 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
2890 case 0x017: /* movhps */
2891 case 0x117: /* movhpd */
2893 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2894 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
2899 case 0x71: /* shift mm, im */
2902 case 0x171: /* shift xmm, im */
2905 val
= ldub_code(s
->pc
++);
2907 gen_op_movl_T0_im(val
);
2908 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
2910 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(1)));
2911 op1_offset
= offsetof(CPUX86State
,xmm_t0
);
2913 gen_op_movl_T0_im(val
);
2914 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(0)));
2916 gen_op_movl_env_T0(offsetof(CPUX86State
,mmx_t0
.MMX_L(1)));
2917 op1_offset
= offsetof(CPUX86State
,mmx_t0
);
2919 sse_op2
= sse_op_table2
[((b
- 1) & 3) * 8 + (((modrm
>> 3)) & 7)][b1
];
2923 rm
= (modrm
& 7) | REX_B(s
);
2924 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2927 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2929 sse_op2(op2_offset
, op1_offset
);
2931 case 0x050: /* movmskps */
2932 rm
= (modrm
& 7) | REX_B(s
);
2933 gen_op_movmskps(offsetof(CPUX86State
,xmm_regs
[rm
]));
2934 gen_op_mov_reg_T0(OT_LONG
, reg
);
2936 case 0x150: /* movmskpd */
2937 rm
= (modrm
& 7) | REX_B(s
);
2938 gen_op_movmskpd(offsetof(CPUX86State
,xmm_regs
[rm
]));
2939 gen_op_mov_reg_T0(OT_LONG
, reg
);
2941 case 0x02a: /* cvtpi2ps */
2942 case 0x12a: /* cvtpi2pd */
2945 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2946 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
2947 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
2950 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
2952 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2955 gen_op_cvtpi2ps(op1_offset
, op2_offset
);
2959 gen_op_cvtpi2pd(op1_offset
, op2_offset
);
2963 case 0x22a: /* cvtsi2ss */
2964 case 0x32a: /* cvtsi2sd */
2965 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
2966 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
2967 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
2968 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2)](op1_offset
);
2970 case 0x02c: /* cvttps2pi */
2971 case 0x12c: /* cvttpd2pi */
2972 case 0x02d: /* cvtps2pi */
2973 case 0x12d: /* cvtpd2pi */
2976 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
2977 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
2978 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
2980 rm
= (modrm
& 7) | REX_B(s
);
2981 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
2983 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
);
2986 gen_op_cvttps2pi(op1_offset
, op2_offset
);
2989 gen_op_cvttpd2pi(op1_offset
, op2_offset
);
2992 gen_op_cvtps2pi(op1_offset
, op2_offset
);
2995 gen_op_cvtpd2pi(op1_offset
, op2_offset
);
2999 case 0x22c: /* cvttss2si */
3000 case 0x32c: /* cvttsd2si */
3001 case 0x22d: /* cvtss2si */
3002 case 0x32d: /* cvtsd2si */
3003 ot
= (s
->dflag
== 2) ? OT_QUAD
: OT_LONG
;
3005 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3007 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_Q(0)));
3009 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3010 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3012 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3014 rm
= (modrm
& 7) | REX_B(s
);
3015 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3017 sse_op_table3
[(s
->dflag
== 2) * 2 + ((b
>> 8) - 2) + 4 +
3018 (b
& 1) * 4](op2_offset
);
3019 gen_op_mov_reg_T0(ot
, reg
);
3021 case 0xc4: /* pinsrw */
3024 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
3025 val
= ldub_code(s
->pc
++);
3028 gen_op_pinsrw_xmm(offsetof(CPUX86State
,xmm_regs
[reg
]), val
);
3031 gen_op_pinsrw_mmx(offsetof(CPUX86State
,fpregs
[reg
].mmx
), val
);
3034 case 0xc5: /* pextrw */
3038 val
= ldub_code(s
->pc
++);
3041 rm
= (modrm
& 7) | REX_B(s
);
3042 gen_op_pextrw_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]), val
);
3046 gen_op_pextrw_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
), val
);
3048 reg
= ((modrm
>> 3) & 7) | rex_r
;
3049 gen_op_mov_reg_T0(OT_LONG
, reg
);
3051 case 0x1d6: /* movq ea, xmm */
3053 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3054 gen_stq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3056 rm
= (modrm
& 7) | REX_B(s
);
3057 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)),
3058 offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)));
3059 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(1)));
3062 case 0x2d6: /* movq2dq */
3065 gen_op_movq(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(0)),
3066 offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3067 gen_op_movq_env_0(offsetof(CPUX86State
,xmm_regs
[reg
].XMM_Q(1)));
3069 case 0x3d6: /* movdq2q */
3071 rm
= (modrm
& 7) | REX_B(s
);
3072 gen_op_movq(offsetof(CPUX86State
,fpregs
[reg
& 7].mmx
),
3073 offsetof(CPUX86State
,xmm_regs
[rm
].XMM_Q(0)));
3075 case 0xd7: /* pmovmskb */
3080 rm
= (modrm
& 7) | REX_B(s
);
3081 gen_op_pmovmskb_xmm(offsetof(CPUX86State
,xmm_regs
[rm
]));
3084 gen_op_pmovmskb_mmx(offsetof(CPUX86State
,fpregs
[rm
].mmx
));
3086 reg
= ((modrm
>> 3) & 7) | rex_r
;
3087 gen_op_mov_reg_T0(OT_LONG
, reg
);
3093 /* generic MMX or SSE operation */
3096 /* maskmov : we must prepare A0 */
3099 #ifdef TARGET_X86_64
3100 if (s
->aflag
== 2) {
3101 gen_op_movq_A0_reg(R_EDI
);
3105 gen_op_movl_A0_reg(R_EDI
);
3107 gen_op_andl_A0_ffff();
3109 gen_add_A0_ds_seg(s
);
3111 case 0x70: /* pshufx insn */
3112 case 0xc6: /* pshufx insn */
3113 case 0xc2: /* compare insns */
3120 op1_offset
= offsetof(CPUX86State
,xmm_regs
[reg
]);
3122 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3123 op2_offset
= offsetof(CPUX86State
,xmm_t0
);
3124 if (b1
>= 2 && ((b
>= 0x50 && b
<= 0x5f && b
!= 0x5b) ||
3126 /* specific case for SSE single instructions */
3129 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
3130 gen_op_movl_env_T0(offsetof(CPUX86State
,xmm_t0
.XMM_L(0)));
3133 gen_ldq_env_A0
[s
->mem_index
>> 2](offsetof(CPUX86State
,xmm_t0
.XMM_D(0)));
3136 gen_ldo_env_A0
[s
->mem_index
>> 2](op2_offset
);
3139 rm
= (modrm
& 7) | REX_B(s
);
3140 op2_offset
= offsetof(CPUX86State
,xmm_regs
[rm
]);
3143 op1_offset
= offsetof(CPUX86State
,fpregs
[reg
].mmx
);
3145 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3146 op2_offset
= offsetof(CPUX86State
,mmx_t0
);
3147 gen_ldq_env_A0
[s
->mem_index
>> 2](op2_offset
);
3150 op2_offset
= offsetof(CPUX86State
,fpregs
[rm
].mmx
);
3154 case 0x70: /* pshufx insn */
3155 case 0xc6: /* pshufx insn */
3156 val
= ldub_code(s
->pc
++);
3157 sse_op3
= (GenOpFunc3
*)sse_op2
;
3158 sse_op3(op1_offset
, op2_offset
, val
);
3162 val
= ldub_code(s
->pc
++);
3165 sse_op2
= sse_op_table4
[val
][b1
];
3166 sse_op2(op1_offset
, op2_offset
);
3169 sse_op2(op1_offset
, op2_offset
);
3172 if (b
== 0x2e || b
== 0x2f) {
3173 s
->cc_op
= CC_OP_EFLAGS
;
3179 /* convert one instruction. s->is_jmp is set if the translation must
3180 be stopped. Return the next pc value */
3181 static target_ulong
disas_insn(DisasContext
*s
, target_ulong pc_start
)
3183 int b
, prefixes
, aflag
, dflag
;
3185 int modrm
, reg
, rm
, mod
, reg_addr
, op
, opreg
, offset_addr
, val
;
3186 target_ulong next_eip
, tval
;
3196 #ifdef TARGET_X86_64
3201 s
->rip_offset
= 0; /* for relative ip address */
3203 b
= ldub_code(s
->pc
);
3205 /* check prefixes */
3206 #ifdef TARGET_X86_64
3210 prefixes
|= PREFIX_REPZ
;
3213 prefixes
|= PREFIX_REPNZ
;
3216 prefixes
|= PREFIX_LOCK
;
3237 prefixes
|= PREFIX_DATA
;
3240 prefixes
|= PREFIX_ADR
;
3244 rex_w
= (b
>> 3) & 1;
3245 rex_r
= (b
& 0x4) << 1;
3246 s
->rex_x
= (b
& 0x2) << 2;
3247 REX_B(s
) = (b
& 0x1) << 3;
3248 x86_64_hregs
= 1; /* select uniform byte register addressing */
3252 /* 0x66 is ignored if rex.w is set */
3255 if (prefixes
& PREFIX_DATA
)
3258 if (!(prefixes
& PREFIX_ADR
))
3265 prefixes
|= PREFIX_REPZ
;
3268 prefixes
|= PREFIX_REPNZ
;
3271 prefixes
|= PREFIX_LOCK
;
3292 prefixes
|= PREFIX_DATA
;
3295 prefixes
|= PREFIX_ADR
;
3298 if (prefixes
& PREFIX_DATA
)
3300 if (prefixes
& PREFIX_ADR
)
3304 s
->prefix
= prefixes
;
3308 /* lock generation */
3309 if (prefixes
& PREFIX_LOCK
)
3312 /* now check op code */
3316 /**************************/
3317 /* extended op code */
3318 b
= ldub_code(s
->pc
++) | 0x100;
3321 /**************************/
3339 ot
= dflag
+ OT_WORD
;
3342 case 0: /* OP Ev, Gv */
3343 modrm
= ldub_code(s
->pc
++);
3344 reg
= ((modrm
>> 3) & 7) | rex_r
;
3345 mod
= (modrm
>> 6) & 3;
3346 rm
= (modrm
& 7) | REX_B(s
);
3348 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3350 } else if (op
== OP_XORL
&& rm
== reg
) {
3352 /* xor reg, reg optimisation */
3354 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3355 gen_op_mov_reg_T0(ot
, reg
);
3356 gen_op_update1_cc();
3361 gen_op_mov_TN_reg(ot
, 1, reg
);
3362 gen_op(s
, op
, ot
, opreg
);
3364 case 1: /* OP Gv, Ev */
3365 modrm
= ldub_code(s
->pc
++);
3366 mod
= (modrm
>> 6) & 3;
3367 reg
= ((modrm
>> 3) & 7) | rex_r
;
3368 rm
= (modrm
& 7) | REX_B(s
);
3370 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3371 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3372 } else if (op
== OP_XORL
&& rm
== reg
) {
3375 gen_op_mov_TN_reg(ot
, 1, rm
);
3377 gen_op(s
, op
, ot
, reg
);
3379 case 2: /* OP A, Iv */
3380 val
= insn_get(s
, ot
);
3381 gen_op_movl_T1_im(val
);
3382 gen_op(s
, op
, ot
, OR_EAX
);
3388 case 0x80: /* GRP1 */
3398 ot
= dflag
+ OT_WORD
;
3400 modrm
= ldub_code(s
->pc
++);
3401 mod
= (modrm
>> 6) & 3;
3402 rm
= (modrm
& 7) | REX_B(s
);
3403 op
= (modrm
>> 3) & 7;
3409 s
->rip_offset
= insn_const_size(ot
);
3410 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3421 val
= insn_get(s
, ot
);
3424 val
= (int8_t)insn_get(s
, OT_BYTE
);
3427 gen_op_movl_T1_im(val
);
3428 gen_op(s
, op
, ot
, opreg
);
3432 /**************************/
3433 /* inc, dec, and other misc arith */
3434 case 0x40 ... 0x47: /* inc Gv */
3435 ot
= dflag
? OT_LONG
: OT_WORD
;
3436 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), 1);
3438 case 0x48 ... 0x4f: /* dec Gv */
3439 ot
= dflag
? OT_LONG
: OT_WORD
;
3440 gen_inc(s
, ot
, OR_EAX
+ (b
& 7), -1);
3442 case 0xf6: /* GRP3 */
3447 ot
= dflag
+ OT_WORD
;
3449 modrm
= ldub_code(s
->pc
++);
3450 mod
= (modrm
>> 6) & 3;
3451 rm
= (modrm
& 7) | REX_B(s
);
3452 op
= (modrm
>> 3) & 7;
3455 s
->rip_offset
= insn_const_size(ot
);
3456 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3457 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3459 gen_op_mov_TN_reg(ot
, 0, rm
);
3464 val
= insn_get(s
, ot
);
3465 gen_op_movl_T1_im(val
);
3466 gen_op_testl_T0_T1_cc();
3467 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3472 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3474 gen_op_mov_reg_T0(ot
, rm
);
3480 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3482 gen_op_mov_reg_T0(ot
, rm
);
3484 gen_op_update_neg_cc();
3485 s
->cc_op
= CC_OP_SUBB
+ ot
;
3490 gen_op_mulb_AL_T0();
3491 s
->cc_op
= CC_OP_MULB
;
3494 gen_op_mulw_AX_T0();
3495 s
->cc_op
= CC_OP_MULW
;
3499 gen_op_mull_EAX_T0();
3500 s
->cc_op
= CC_OP_MULL
;
3502 #ifdef TARGET_X86_64
3504 gen_op_mulq_EAX_T0();
3505 s
->cc_op
= CC_OP_MULQ
;
3513 gen_op_imulb_AL_T0();
3514 s
->cc_op
= CC_OP_MULB
;
3517 gen_op_imulw_AX_T0();
3518 s
->cc_op
= CC_OP_MULW
;
3522 gen_op_imull_EAX_T0();
3523 s
->cc_op
= CC_OP_MULL
;
3525 #ifdef TARGET_X86_64
3527 gen_op_imulq_EAX_T0();
3528 s
->cc_op
= CC_OP_MULQ
;
3536 gen_jmp_im(pc_start
- s
->cs_base
);
3537 gen_op_divb_AL_T0();
3540 gen_jmp_im(pc_start
- s
->cs_base
);
3541 gen_op_divw_AX_T0();
3545 gen_jmp_im(pc_start
- s
->cs_base
);
3547 /* XXX: this is just a test */
3548 tcg_gen_macro_2(cpu_T
[0], cpu_T
[0], MACRO_TEST
);
3550 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
3553 #ifdef TARGET_X86_64
3555 gen_jmp_im(pc_start
- s
->cs_base
);
3556 gen_op_divq_EAX_T0();
3564 gen_jmp_im(pc_start
- s
->cs_base
);
3565 gen_op_idivb_AL_T0();
3568 gen_jmp_im(pc_start
- s
->cs_base
);
3569 gen_op_idivw_AX_T0();
3573 gen_jmp_im(pc_start
- s
->cs_base
);
3574 tcg_gen_helper_0_1(helper_idivl_EAX_T0
, cpu_T
[0]);
3576 #ifdef TARGET_X86_64
3578 gen_jmp_im(pc_start
- s
->cs_base
);
3579 gen_op_idivq_EAX_T0();
3589 case 0xfe: /* GRP4 */
3590 case 0xff: /* GRP5 */
3594 ot
= dflag
+ OT_WORD
;
3596 modrm
= ldub_code(s
->pc
++);
3597 mod
= (modrm
>> 6) & 3;
3598 rm
= (modrm
& 7) | REX_B(s
);
3599 op
= (modrm
>> 3) & 7;
3600 if (op
>= 2 && b
== 0xfe) {
3604 if (op
== 2 || op
== 4) {
3605 /* operand size for jumps is 64 bit */
3607 } else if (op
== 3 || op
== 5) {
3608 /* for call calls, the operand is 16 or 32 bit, even
3610 ot
= dflag
? OT_LONG
: OT_WORD
;
3611 } else if (op
== 6) {
3612 /* default push size is 64 bit */
3613 ot
= dflag
? OT_QUAD
: OT_WORD
;
3617 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3618 if (op
>= 2 && op
!= 3 && op
!= 5)
3619 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3621 gen_op_mov_TN_reg(ot
, 0, rm
);
3625 case 0: /* inc Ev */
3630 gen_inc(s
, ot
, opreg
, 1);
3632 case 1: /* dec Ev */
3637 gen_inc(s
, ot
, opreg
, -1);
3639 case 2: /* call Ev */
3640 /* XXX: optimize if memory (no 'and' is necessary) */
3642 gen_op_andl_T0_ffff();
3643 next_eip
= s
->pc
- s
->cs_base
;
3644 gen_movtl_T1_im(next_eip
);
3649 case 3: /* lcall Ev */
3650 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3651 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3652 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3654 if (s
->pe
&& !s
->vm86
) {
3655 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3656 gen_op_set_cc_op(s
->cc_op
);
3657 gen_jmp_im(pc_start
- s
->cs_base
);
3658 gen_op_lcall_protected_T0_T1(dflag
, s
->pc
- pc_start
);
3660 gen_op_lcall_real_T0_T1(dflag
, s
->pc
- s
->cs_base
);
3664 case 4: /* jmp Ev */
3666 gen_op_andl_T0_ffff();
3670 case 5: /* ljmp Ev */
3671 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3672 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
3673 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
3675 if (s
->pe
&& !s
->vm86
) {
3676 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3677 gen_op_set_cc_op(s
->cc_op
);
3678 gen_jmp_im(pc_start
- s
->cs_base
);
3679 gen_op_ljmp_protected_T0_T1(s
->pc
- pc_start
);
3681 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
3682 gen_op_movl_T0_T1();
3687 case 6: /* push Ev */
3695 case 0x84: /* test Ev, Gv */
3700 ot
= dflag
+ OT_WORD
;
3702 modrm
= ldub_code(s
->pc
++);
3703 mod
= (modrm
>> 6) & 3;
3704 rm
= (modrm
& 7) | REX_B(s
);
3705 reg
= ((modrm
>> 3) & 7) | rex_r
;
3707 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3708 gen_op_mov_TN_reg(ot
, 1, reg
);
3709 gen_op_testl_T0_T1_cc();
3710 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3713 case 0xa8: /* test eAX, Iv */
3718 ot
= dflag
+ OT_WORD
;
3719 val
= insn_get(s
, ot
);
3721 gen_op_mov_TN_reg(ot
, 0, OR_EAX
);
3722 gen_op_movl_T1_im(val
);
3723 gen_op_testl_T0_T1_cc();
3724 s
->cc_op
= CC_OP_LOGICB
+ ot
;
3727 case 0x98: /* CWDE/CBW */
3728 #ifdef TARGET_X86_64
3730 gen_op_movslq_RAX_EAX();
3734 gen_op_movswl_EAX_AX();
3736 gen_op_movsbw_AX_AL();
3738 case 0x99: /* CDQ/CWD */
3739 #ifdef TARGET_X86_64
3741 gen_op_movsqo_RDX_RAX();
3745 gen_op_movslq_EDX_EAX();
3747 gen_op_movswl_DX_AX();
3749 case 0x1af: /* imul Gv, Ev */
3750 case 0x69: /* imul Gv, Ev, I */
3752 ot
= dflag
+ OT_WORD
;
3753 modrm
= ldub_code(s
->pc
++);
3754 reg
= ((modrm
>> 3) & 7) | rex_r
;
3756 s
->rip_offset
= insn_const_size(ot
);
3759 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
3761 val
= insn_get(s
, ot
);
3762 gen_op_movl_T1_im(val
);
3763 } else if (b
== 0x6b) {
3764 val
= (int8_t)insn_get(s
, OT_BYTE
);
3765 gen_op_movl_T1_im(val
);
3767 gen_op_mov_TN_reg(ot
, 1, reg
);
3770 #ifdef TARGET_X86_64
3771 if (ot
== OT_QUAD
) {
3772 gen_op_imulq_T0_T1();
3775 if (ot
== OT_LONG
) {
3776 gen_op_imull_T0_T1();
3778 gen_op_imulw_T0_T1();
3780 gen_op_mov_reg_T0(ot
, reg
);
3781 s
->cc_op
= CC_OP_MULB
+ ot
;
3784 case 0x1c1: /* xadd Ev, Gv */
3788 ot
= dflag
+ OT_WORD
;
3789 modrm
= ldub_code(s
->pc
++);
3790 reg
= ((modrm
>> 3) & 7) | rex_r
;
3791 mod
= (modrm
>> 6) & 3;
3793 rm
= (modrm
& 7) | REX_B(s
);
3794 gen_op_mov_TN_reg(ot
, 0, reg
);
3795 gen_op_mov_TN_reg(ot
, 1, rm
);
3796 gen_op_addl_T0_T1();
3797 gen_op_mov_reg_T1(ot
, reg
);
3798 gen_op_mov_reg_T0(ot
, rm
);
3800 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3801 gen_op_mov_TN_reg(ot
, 0, reg
);
3802 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
3803 gen_op_addl_T0_T1();
3804 gen_op_st_T0_A0(ot
+ s
->mem_index
);
3805 gen_op_mov_reg_T1(ot
, reg
);
3807 gen_op_update2_cc();
3808 s
->cc_op
= CC_OP_ADDB
+ ot
;
3811 case 0x1b1: /* cmpxchg Ev, Gv */
3815 ot
= dflag
+ OT_WORD
;
3816 modrm
= ldub_code(s
->pc
++);
3817 reg
= ((modrm
>> 3) & 7) | rex_r
;
3818 mod
= (modrm
>> 6) & 3;
3819 gen_op_mov_TN_reg(ot
, 1, reg
);
3821 rm
= (modrm
& 7) | REX_B(s
);
3822 gen_op_mov_TN_reg(ot
, 0, rm
);
3823 gen_op_cmpxchg_T0_T1_EAX_cc
[ot
]();
3824 gen_op_mov_reg_T0(ot
, rm
);
3826 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3827 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
3828 gen_op_cmpxchg_mem_T0_T1_EAX_cc
[ot
+ s
->mem_index
]();
3830 s
->cc_op
= CC_OP_SUBB
+ ot
;
3832 case 0x1c7: /* cmpxchg8b */
3833 modrm
= ldub_code(s
->pc
++);
3834 mod
= (modrm
>> 6) & 3;
3835 if ((mod
== 3) || ((modrm
& 0x38) != 0x8))
3837 gen_jmp_im(pc_start
- s
->cs_base
);
3838 if (s
->cc_op
!= CC_OP_DYNAMIC
)
3839 gen_op_set_cc_op(s
->cc_op
);
3840 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
3842 s
->cc_op
= CC_OP_EFLAGS
;
3845 /**************************/
3847 case 0x50 ... 0x57: /* push */
3848 gen_op_mov_TN_reg(OT_LONG
, 0, (b
& 7) | REX_B(s
));
3851 case 0x58 ... 0x5f: /* pop */
3853 ot
= dflag
? OT_QUAD
: OT_WORD
;
3855 ot
= dflag
+ OT_WORD
;
3858 /* NOTE: order is important for pop %sp */
3860 gen_op_mov_reg_T0(ot
, (b
& 7) | REX_B(s
));
3862 case 0x60: /* pusha */
3867 case 0x61: /* popa */
3872 case 0x68: /* push Iv */
3875 ot
= dflag
? OT_QUAD
: OT_WORD
;
3877 ot
= dflag
+ OT_WORD
;
3880 val
= insn_get(s
, ot
);
3882 val
= (int8_t)insn_get(s
, OT_BYTE
);
3883 gen_op_movl_T0_im(val
);
3886 case 0x8f: /* pop Ev */
3888 ot
= dflag
? OT_QUAD
: OT_WORD
;
3890 ot
= dflag
+ OT_WORD
;
3892 modrm
= ldub_code(s
->pc
++);
3893 mod
= (modrm
>> 6) & 3;
3896 /* NOTE: order is important for pop %sp */
3898 rm
= (modrm
& 7) | REX_B(s
);
3899 gen_op_mov_reg_T0(ot
, rm
);
3901 /* NOTE: order is important too for MMU exceptions */
3902 s
->popl_esp_hack
= 1 << ot
;
3903 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
3904 s
->popl_esp_hack
= 0;
3908 case 0xc8: /* enter */
3911 val
= lduw_code(s
->pc
);
3913 level
= ldub_code(s
->pc
++);
3914 gen_enter(s
, val
, level
);
3917 case 0xc9: /* leave */
3918 /* XXX: exception not precise (ESP is updated before potential exception) */
3920 gen_op_mov_TN_reg(OT_QUAD
, 0, R_EBP
);
3921 gen_op_mov_reg_T0(OT_QUAD
, R_ESP
);
3922 } else if (s
->ss32
) {
3923 gen_op_mov_TN_reg(OT_LONG
, 0, R_EBP
);
3924 gen_op_mov_reg_T0(OT_LONG
, R_ESP
);
3926 gen_op_mov_TN_reg(OT_WORD
, 0, R_EBP
);
3927 gen_op_mov_reg_T0(OT_WORD
, R_ESP
);
3931 ot
= dflag
? OT_QUAD
: OT_WORD
;
3933 ot
= dflag
+ OT_WORD
;
3935 gen_op_mov_reg_T0(ot
, R_EBP
);
3938 case 0x06: /* push es */
3939 case 0x0e: /* push cs */
3940 case 0x16: /* push ss */
3941 case 0x1e: /* push ds */
3944 gen_op_movl_T0_seg(b
>> 3);
3947 case 0x1a0: /* push fs */
3948 case 0x1a8: /* push gs */
3949 gen_op_movl_T0_seg((b
>> 3) & 7);
3952 case 0x07: /* pop es */
3953 case 0x17: /* pop ss */
3954 case 0x1f: /* pop ds */
3959 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
3962 /* if reg == SS, inhibit interrupts/trace. */
3963 /* If several instructions disable interrupts, only the
3965 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
3966 gen_op_set_inhibit_irq();
3970 gen_jmp_im(s
->pc
- s
->cs_base
);
3974 case 0x1a1: /* pop fs */
3975 case 0x1a9: /* pop gs */
3977 gen_movl_seg_T0(s
, (b
>> 3) & 7, pc_start
- s
->cs_base
);
3980 gen_jmp_im(s
->pc
- s
->cs_base
);
3985 /**************************/
3988 case 0x89: /* mov Gv, Ev */
3992 ot
= dflag
+ OT_WORD
;
3993 modrm
= ldub_code(s
->pc
++);
3994 reg
= ((modrm
>> 3) & 7) | rex_r
;
3996 /* generate a generic store */
3997 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
4000 case 0xc7: /* mov Ev, Iv */
4004 ot
= dflag
+ OT_WORD
;
4005 modrm
= ldub_code(s
->pc
++);
4006 mod
= (modrm
>> 6) & 3;
4008 s
->rip_offset
= insn_const_size(ot
);
4009 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4011 val
= insn_get(s
, ot
);
4012 gen_op_movl_T0_im(val
);
4014 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4016 gen_op_mov_reg_T0(ot
, (modrm
& 7) | REX_B(s
));
4019 case 0x8b: /* mov Ev, Gv */
4023 ot
= OT_WORD
+ dflag
;
4024 modrm
= ldub_code(s
->pc
++);
4025 reg
= ((modrm
>> 3) & 7) | rex_r
;
4027 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
4028 gen_op_mov_reg_T0(ot
, reg
);
4030 case 0x8e: /* mov seg, Gv */
4031 modrm
= ldub_code(s
->pc
++);
4032 reg
= (modrm
>> 3) & 7;
4033 if (reg
>= 6 || reg
== R_CS
)
4035 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
4036 gen_movl_seg_T0(s
, reg
, pc_start
- s
->cs_base
);
4038 /* if reg == SS, inhibit interrupts/trace */
4039 /* If several instructions disable interrupts, only the
4041 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
4042 gen_op_set_inhibit_irq();
4046 gen_jmp_im(s
->pc
- s
->cs_base
);
4050 case 0x8c: /* mov Gv, seg */
4051 modrm
= ldub_code(s
->pc
++);
4052 reg
= (modrm
>> 3) & 7;
4053 mod
= (modrm
>> 6) & 3;
4056 gen_op_movl_T0_seg(reg
);
4058 ot
= OT_WORD
+ dflag
;
4061 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
4064 case 0x1b6: /* movzbS Gv, Eb */
4065 case 0x1b7: /* movzwS Gv, Eb */
4066 case 0x1be: /* movsbS Gv, Eb */
4067 case 0x1bf: /* movswS Gv, Eb */
4070 /* d_ot is the size of destination */
4071 d_ot
= dflag
+ OT_WORD
;
4072 /* ot is the size of source */
4073 ot
= (b
& 1) + OT_BYTE
;
4074 modrm
= ldub_code(s
->pc
++);
4075 reg
= ((modrm
>> 3) & 7) | rex_r
;
4076 mod
= (modrm
>> 6) & 3;
4077 rm
= (modrm
& 7) | REX_B(s
);
4080 gen_op_mov_TN_reg(ot
, 0, rm
);
4081 switch(ot
| (b
& 8)) {
4083 gen_op_movzbl_T0_T0();
4086 gen_op_movsbl_T0_T0();
4089 gen_op_movzwl_T0_T0();
4093 gen_op_movswl_T0_T0();
4096 gen_op_mov_reg_T0(d_ot
, reg
);
4098 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4100 gen_op_lds_T0_A0(ot
+ s
->mem_index
);
4102 gen_op_ldu_T0_A0(ot
+ s
->mem_index
);
4104 gen_op_mov_reg_T0(d_ot
, reg
);
4109 case 0x8d: /* lea */
4110 ot
= dflag
+ OT_WORD
;
4111 modrm
= ldub_code(s
->pc
++);
4112 mod
= (modrm
>> 6) & 3;
4115 reg
= ((modrm
>> 3) & 7) | rex_r
;
4116 /* we must ensure that no segment is added */
4120 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4122 gen_op_mov_reg_A0(ot
- OT_WORD
, reg
);
4125 case 0xa0: /* mov EAX, Ov */
4127 case 0xa2: /* mov Ov, EAX */
4130 target_ulong offset_addr
;
4135 ot
= dflag
+ OT_WORD
;
4136 #ifdef TARGET_X86_64
4137 if (s
->aflag
== 2) {
4138 offset_addr
= ldq_code(s
->pc
);
4140 gen_op_movq_A0_im(offset_addr
);
4145 offset_addr
= insn_get(s
, OT_LONG
);
4147 offset_addr
= insn_get(s
, OT_WORD
);
4149 gen_op_movl_A0_im(offset_addr
);
4151 gen_add_A0_ds_seg(s
);
4153 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4154 gen_op_mov_reg_T0(ot
, R_EAX
);
4156 gen_op_mov_TN_reg(ot
, 0, R_EAX
);
4157 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4161 case 0xd7: /* xlat */
4162 #ifdef TARGET_X86_64
4163 if (s
->aflag
== 2) {
4164 gen_op_movq_A0_reg(R_EBX
);
4165 gen_op_addq_A0_AL();
4169 gen_op_movl_A0_reg(R_EBX
);
4170 gen_op_addl_A0_AL();
4172 gen_op_andl_A0_ffff();
4174 gen_add_A0_ds_seg(s
);
4175 gen_op_ldu_T0_A0(OT_BYTE
+ s
->mem_index
);
4176 gen_op_mov_reg_T0(OT_BYTE
, R_EAX
);
4178 case 0xb0 ... 0xb7: /* mov R, Ib */
4179 val
= insn_get(s
, OT_BYTE
);
4180 gen_op_movl_T0_im(val
);
4181 gen_op_mov_reg_T0(OT_BYTE
, (b
& 7) | REX_B(s
));
4183 case 0xb8 ... 0xbf: /* mov R, Iv */
4184 #ifdef TARGET_X86_64
4188 tmp
= ldq_code(s
->pc
);
4190 reg
= (b
& 7) | REX_B(s
);
4191 gen_movtl_T0_im(tmp
);
4192 gen_op_mov_reg_T0(OT_QUAD
, reg
);
4196 ot
= dflag
? OT_LONG
: OT_WORD
;
4197 val
= insn_get(s
, ot
);
4198 reg
= (b
& 7) | REX_B(s
);
4199 gen_op_movl_T0_im(val
);
4200 gen_op_mov_reg_T0(ot
, reg
);
4204 case 0x91 ... 0x97: /* xchg R, EAX */
4205 ot
= dflag
+ OT_WORD
;
4206 reg
= (b
& 7) | REX_B(s
);
4210 case 0x87: /* xchg Ev, Gv */
4214 ot
= dflag
+ OT_WORD
;
4215 modrm
= ldub_code(s
->pc
++);
4216 reg
= ((modrm
>> 3) & 7) | rex_r
;
4217 mod
= (modrm
>> 6) & 3;
4219 rm
= (modrm
& 7) | REX_B(s
);
4221 gen_op_mov_TN_reg(ot
, 0, reg
);
4222 gen_op_mov_TN_reg(ot
, 1, rm
);
4223 gen_op_mov_reg_T0(ot
, rm
);
4224 gen_op_mov_reg_T1(ot
, reg
);
4226 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4227 gen_op_mov_TN_reg(ot
, 0, reg
);
4228 /* for xchg, lock is implicit */
4229 if (!(prefixes
& PREFIX_LOCK
))
4231 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4232 gen_op_st_T0_A0(ot
+ s
->mem_index
);
4233 if (!(prefixes
& PREFIX_LOCK
))
4235 gen_op_mov_reg_T1(ot
, reg
);
4238 case 0xc4: /* les Gv */
4243 case 0xc5: /* lds Gv */
4248 case 0x1b2: /* lss Gv */
4251 case 0x1b4: /* lfs Gv */
4254 case 0x1b5: /* lgs Gv */
4257 ot
= dflag
? OT_LONG
: OT_WORD
;
4258 modrm
= ldub_code(s
->pc
++);
4259 reg
= ((modrm
>> 3) & 7) | rex_r
;
4260 mod
= (modrm
>> 6) & 3;
4263 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4264 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
4265 gen_add_A0_im(s
, 1 << (ot
- OT_WORD
+ 1));
4266 /* load the segment first to handle exceptions properly */
4267 gen_op_ldu_T0_A0(OT_WORD
+ s
->mem_index
);
4268 gen_movl_seg_T0(s
, op
, pc_start
- s
->cs_base
);
4269 /* then put the data */
4270 gen_op_mov_reg_T1(ot
, reg
);
4272 gen_jmp_im(s
->pc
- s
->cs_base
);
4277 /************************/
4288 ot
= dflag
+ OT_WORD
;
4290 modrm
= ldub_code(s
->pc
++);
4291 mod
= (modrm
>> 6) & 3;
4292 op
= (modrm
>> 3) & 7;
4298 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4301 opreg
= (modrm
& 7) | REX_B(s
);
4306 gen_shift(s
, op
, ot
, opreg
, OR_ECX
);
4309 shift
= ldub_code(s
->pc
++);
4311 gen_shifti(s
, op
, ot
, opreg
, shift
);
4326 case 0x1a4: /* shld imm */
4330 case 0x1a5: /* shld cl */
4334 case 0x1ac: /* shrd imm */
4338 case 0x1ad: /* shrd cl */
4342 ot
= dflag
+ OT_WORD
;
4343 modrm
= ldub_code(s
->pc
++);
4344 mod
= (modrm
>> 6) & 3;
4345 rm
= (modrm
& 7) | REX_B(s
);
4346 reg
= ((modrm
>> 3) & 7) | rex_r
;
4349 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4350 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
4352 gen_op_mov_TN_reg(ot
, 0, rm
);
4354 gen_op_mov_TN_reg(ot
, 1, reg
);
4357 val
= ldub_code(s
->pc
++);
4364 gen_op_shiftd_T0_T1_im_cc
[ot
][op
](val
);
4366 gen_op_shiftd_mem_T0_T1_im_cc
[ot
+ s
->mem_index
][op
](val
);
4367 if (op
== 0 && ot
!= OT_WORD
)
4368 s
->cc_op
= CC_OP_SHLB
+ ot
;
4370 s
->cc_op
= CC_OP_SARB
+ ot
;
4373 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4374 gen_op_set_cc_op(s
->cc_op
);
4376 gen_op_shiftd_T0_T1_ECX_cc
[ot
][op
]();
4378 gen_op_shiftd_mem_T0_T1_ECX_cc
[ot
+ s
->mem_index
][op
]();
4379 s
->cc_op
= CC_OP_DYNAMIC
; /* cannot predict flags after */
4382 gen_op_mov_reg_T0(ot
, rm
);
4386 /************************/
4389 if (s
->flags
& (HF_EM_MASK
| HF_TS_MASK
)) {
4390 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4391 /* XXX: what to do if illegal op ? */
4392 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
4395 modrm
= ldub_code(s
->pc
++);
4396 mod
= (modrm
>> 6) & 3;
4398 op
= ((b
& 7) << 3) | ((modrm
>> 3) & 7);
4401 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
4403 case 0x00 ... 0x07: /* fxxxs */
4404 case 0x10 ... 0x17: /* fixxxl */
4405 case 0x20 ... 0x27: /* fxxxl */
4406 case 0x30 ... 0x37: /* fixxx */
4413 gen_op_flds_FT0_A0();
4416 gen_op_fildl_FT0_A0();
4419 gen_op_fldl_FT0_A0();
4423 gen_op_fild_FT0_A0();
4427 gen_op_fp_arith_ST0_FT0
[op1
]();
4429 /* fcomp needs pop */
4434 case 0x08: /* flds */
4435 case 0x0a: /* fsts */
4436 case 0x0b: /* fstps */
4437 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4438 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4439 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4444 gen_op_flds_ST0_A0();
4447 gen_op_fildl_ST0_A0();
4450 gen_op_fldl_ST0_A0();
4454 gen_op_fild_ST0_A0();
4461 gen_op_fisttl_ST0_A0();
4464 gen_op_fisttll_ST0_A0();
4468 gen_op_fistt_ST0_A0();
4475 gen_op_fsts_ST0_A0();
4478 gen_op_fistl_ST0_A0();
4481 gen_op_fstl_ST0_A0();
4485 gen_op_fist_ST0_A0();
4493 case 0x0c: /* fldenv mem */
4494 gen_op_fldenv_A0(s
->dflag
);
4496 case 0x0d: /* fldcw mem */
4499 case 0x0e: /* fnstenv mem */
4500 gen_op_fnstenv_A0(s
->dflag
);
4502 case 0x0f: /* fnstcw mem */
4505 case 0x1d: /* fldt mem */
4506 gen_op_fldt_ST0_A0();
4508 case 0x1f: /* fstpt mem */
4509 gen_op_fstt_ST0_A0();
4512 case 0x2c: /* frstor mem */
4513 gen_op_frstor_A0(s
->dflag
);
4515 case 0x2e: /* fnsave mem */
4516 gen_op_fnsave_A0(s
->dflag
);
4518 case 0x2f: /* fnstsw mem */
4521 case 0x3c: /* fbld */
4522 gen_op_fbld_ST0_A0();
4524 case 0x3e: /* fbstp */
4525 gen_op_fbst_ST0_A0();
4528 case 0x3d: /* fildll */
4529 gen_op_fildll_ST0_A0();
4531 case 0x3f: /* fistpll */
4532 gen_op_fistll_ST0_A0();
4539 /* register float ops */
4543 case 0x08: /* fld sti */
4545 gen_op_fmov_ST0_STN((opreg
+ 1) & 7);
4547 case 0x09: /* fxchg sti */
4548 case 0x29: /* fxchg4 sti, undocumented op */
4549 case 0x39: /* fxchg7 sti, undocumented op */
4550 gen_op_fxchg_ST0_STN(opreg
);
4552 case 0x0a: /* grp d9/2 */
4555 /* check exceptions (FreeBSD FPU probe) */
4556 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4557 gen_op_set_cc_op(s
->cc_op
);
4558 gen_jmp_im(pc_start
- s
->cs_base
);
4565 case 0x0c: /* grp d9/4 */
4575 gen_op_fcom_ST0_FT0();
4584 case 0x0d: /* grp d9/5 */
4593 gen_op_fldl2t_ST0();
4597 gen_op_fldl2e_ST0();
4605 gen_op_fldlg2_ST0();
4609 gen_op_fldln2_ST0();
4620 case 0x0e: /* grp d9/6 */
4631 case 3: /* fpatan */
4634 case 4: /* fxtract */
4637 case 5: /* fprem1 */
4640 case 6: /* fdecstp */
4644 case 7: /* fincstp */
4649 case 0x0f: /* grp d9/7 */
4654 case 1: /* fyl2xp1 */
4660 case 3: /* fsincos */
4663 case 5: /* fscale */
4666 case 4: /* frndint */
4678 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4679 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4680 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4686 gen_op_fp_arith_STN_ST0
[op1
](opreg
);
4690 gen_op_fmov_FT0_STN(opreg
);
4691 gen_op_fp_arith_ST0_FT0
[op1
]();
4695 case 0x02: /* fcom */
4696 case 0x22: /* fcom2, undocumented op */
4697 gen_op_fmov_FT0_STN(opreg
);
4698 gen_op_fcom_ST0_FT0();
4700 case 0x03: /* fcomp */
4701 case 0x23: /* fcomp3, undocumented op */
4702 case 0x32: /* fcomp5, undocumented op */
4703 gen_op_fmov_FT0_STN(opreg
);
4704 gen_op_fcom_ST0_FT0();
4707 case 0x15: /* da/5 */
4709 case 1: /* fucompp */
4710 gen_op_fmov_FT0_STN(1);
4711 gen_op_fucom_ST0_FT0();
4721 case 0: /* feni (287 only, just do nop here) */
4723 case 1: /* fdisi (287 only, just do nop here) */
4728 case 3: /* fninit */
4731 case 4: /* fsetpm (287 only, just do nop here) */
4737 case 0x1d: /* fucomi */
4738 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4739 gen_op_set_cc_op(s
->cc_op
);
4740 gen_op_fmov_FT0_STN(opreg
);
4741 gen_op_fucomi_ST0_FT0();
4742 s
->cc_op
= CC_OP_EFLAGS
;
4744 case 0x1e: /* fcomi */
4745 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4746 gen_op_set_cc_op(s
->cc_op
);
4747 gen_op_fmov_FT0_STN(opreg
);
4748 gen_op_fcomi_ST0_FT0();
4749 s
->cc_op
= CC_OP_EFLAGS
;
4751 case 0x28: /* ffree sti */
4752 gen_op_ffree_STN(opreg
);
4754 case 0x2a: /* fst sti */
4755 gen_op_fmov_STN_ST0(opreg
);
4757 case 0x2b: /* fstp sti */
4758 case 0x0b: /* fstp1 sti, undocumented op */
4759 case 0x3a: /* fstp8 sti, undocumented op */
4760 case 0x3b: /* fstp9 sti, undocumented op */
4761 gen_op_fmov_STN_ST0(opreg
);
4764 case 0x2c: /* fucom st(i) */
4765 gen_op_fmov_FT0_STN(opreg
);
4766 gen_op_fucom_ST0_FT0();
4768 case 0x2d: /* fucomp st(i) */
4769 gen_op_fmov_FT0_STN(opreg
);
4770 gen_op_fucom_ST0_FT0();
4773 case 0x33: /* de/3 */
4775 case 1: /* fcompp */
4776 gen_op_fmov_FT0_STN(1);
4777 gen_op_fcom_ST0_FT0();
4785 case 0x38: /* ffreep sti, undocumented op */
4786 gen_op_ffree_STN(opreg
);
4789 case 0x3c: /* df/4 */
4792 gen_op_fnstsw_EAX();
4798 case 0x3d: /* fucomip */
4799 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4800 gen_op_set_cc_op(s
->cc_op
);
4801 gen_op_fmov_FT0_STN(opreg
);
4802 gen_op_fucomi_ST0_FT0();
4804 s
->cc_op
= CC_OP_EFLAGS
;
4806 case 0x3e: /* fcomip */
4807 if (s
->cc_op
!= CC_OP_DYNAMIC
)
4808 gen_op_set_cc_op(s
->cc_op
);
4809 gen_op_fmov_FT0_STN(opreg
);
4810 gen_op_fcomi_ST0_FT0();
4812 s
->cc_op
= CC_OP_EFLAGS
;
4814 case 0x10 ... 0x13: /* fcmovxx */
4818 const static uint8_t fcmov_cc
[8] = {
4824 op1
= fcmov_cc
[op
& 3] | ((op
>> 3) & 1);
4826 gen_op_fcmov_ST0_STN_T0(opreg
);
4834 /************************/
4837 case 0xa4: /* movsS */
4842 ot
= dflag
+ OT_WORD
;
4844 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4845 gen_repz_movs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4851 case 0xaa: /* stosS */
4856 ot
= dflag
+ OT_WORD
;
4858 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4859 gen_repz_stos(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4864 case 0xac: /* lodsS */
4869 ot
= dflag
+ OT_WORD
;
4870 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4871 gen_repz_lods(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4876 case 0xae: /* scasS */
4881 ot
= dflag
+ OT_WORD
;
4882 if (prefixes
& PREFIX_REPNZ
) {
4883 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4884 } else if (prefixes
& PREFIX_REPZ
) {
4885 gen_repz_scas(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4888 s
->cc_op
= CC_OP_SUBB
+ ot
;
4892 case 0xa6: /* cmpsS */
4897 ot
= dflag
+ OT_WORD
;
4898 if (prefixes
& PREFIX_REPNZ
) {
4899 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 1);
4900 } else if (prefixes
& PREFIX_REPZ
) {
4901 gen_repz_cmps(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
, 0);
4904 s
->cc_op
= CC_OP_SUBB
+ ot
;
4907 case 0x6c: /* insS */
4912 ot
= dflag
? OT_LONG
: OT_WORD
;
4913 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4914 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4915 gen_op_andl_T0_ffff();
4916 if (gen_svm_check_io(s
, pc_start
,
4917 SVM_IOIO_TYPE_MASK
| (1 << (4+ot
)) |
4918 svm_is_rep(prefixes
) | 4 | (1 << (7+s
->aflag
))))
4920 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4921 gen_repz_ins(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4926 case 0x6e: /* outsS */
4931 ot
= dflag
? OT_LONG
: OT_WORD
;
4932 gen_check_io(s
, ot
, 1, pc_start
- s
->cs_base
);
4933 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4934 gen_op_andl_T0_ffff();
4935 if (gen_svm_check_io(s
, pc_start
,
4936 (1 << (4+ot
)) | svm_is_rep(prefixes
) |
4937 4 | (1 << (7+s
->aflag
))))
4939 if (prefixes
& (PREFIX_REPZ
| PREFIX_REPNZ
)) {
4940 gen_repz_outs(s
, ot
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
4946 /************************/
4954 ot
= dflag
? OT_LONG
: OT_WORD
;
4955 val
= ldub_code(s
->pc
++);
4956 gen_op_movl_T0_im(val
);
4957 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4958 if (gen_svm_check_io(s
, pc_start
,
4959 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
4963 gen_op_mov_reg_T1(ot
, R_EAX
);
4970 ot
= dflag
? OT_LONG
: OT_WORD
;
4971 val
= ldub_code(s
->pc
++);
4972 gen_op_movl_T0_im(val
);
4973 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4974 if (gen_svm_check_io(s
, pc_start
, svm_is_rep(prefixes
) |
4977 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
4985 ot
= dflag
? OT_LONG
: OT_WORD
;
4986 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
4987 gen_op_andl_T0_ffff();
4988 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
4989 if (gen_svm_check_io(s
, pc_start
,
4990 SVM_IOIO_TYPE_MASK
| svm_is_rep(prefixes
) |
4994 gen_op_mov_reg_T1(ot
, R_EAX
);
5001 ot
= dflag
? OT_LONG
: OT_WORD
;
5002 gen_op_mov_TN_reg(OT_WORD
, 0, R_EDX
);
5003 gen_op_andl_T0_ffff();
5004 gen_check_io(s
, ot
, 0, pc_start
- s
->cs_base
);
5005 if (gen_svm_check_io(s
, pc_start
,
5006 svm_is_rep(prefixes
) | (1 << (4+ot
))))
5008 gen_op_mov_TN_reg(ot
, 1, R_EAX
);
5012 /************************/
5014 case 0xc2: /* ret im */
5015 val
= ldsw_code(s
->pc
);
5018 if (CODE64(s
) && s
->dflag
)
5020 gen_stack_update(s
, val
+ (2 << s
->dflag
));
5022 gen_op_andl_T0_ffff();
5026 case 0xc3: /* ret */
5030 gen_op_andl_T0_ffff();
5034 case 0xca: /* lret im */
5035 val
= ldsw_code(s
->pc
);
5038 if (s
->pe
&& !s
->vm86
) {
5039 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5040 gen_op_set_cc_op(s
->cc_op
);
5041 gen_jmp_im(pc_start
- s
->cs_base
);
5042 gen_op_lret_protected(s
->dflag
, val
);
5046 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5048 gen_op_andl_T0_ffff();
5049 /* NOTE: keeping EIP updated is not a problem in case of
5053 gen_op_addl_A0_im(2 << s
->dflag
);
5054 gen_op_ld_T0_A0(1 + s
->dflag
+ s
->mem_index
);
5055 gen_op_movl_seg_T0_vm(offsetof(CPUX86State
,segs
[R_CS
]));
5056 /* add stack offset */
5057 gen_stack_update(s
, val
+ (4 << s
->dflag
));
5061 case 0xcb: /* lret */
5064 case 0xcf: /* iret */
5065 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IRET
))
5069 gen_op_iret_real(s
->dflag
);
5070 s
->cc_op
= CC_OP_EFLAGS
;
5071 } else if (s
->vm86
) {
5073 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5075 gen_op_iret_real(s
->dflag
);
5076 s
->cc_op
= CC_OP_EFLAGS
;
5079 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5080 gen_op_set_cc_op(s
->cc_op
);
5081 gen_jmp_im(pc_start
- s
->cs_base
);
5082 gen_op_iret_protected(s
->dflag
, s
->pc
- s
->cs_base
);
5083 s
->cc_op
= CC_OP_EFLAGS
;
5087 case 0xe8: /* call im */
5090 tval
= (int32_t)insn_get(s
, OT_LONG
);
5092 tval
= (int16_t)insn_get(s
, OT_WORD
);
5093 next_eip
= s
->pc
- s
->cs_base
;
5097 gen_movtl_T0_im(next_eip
);
5102 case 0x9a: /* lcall im */
5104 unsigned int selector
, offset
;
5108 ot
= dflag
? OT_LONG
: OT_WORD
;
5109 offset
= insn_get(s
, ot
);
5110 selector
= insn_get(s
, OT_WORD
);
5112 gen_op_movl_T0_im(selector
);
5113 gen_op_movl_T1_imu(offset
);
5116 case 0xe9: /* jmp im */
5118 tval
= (int32_t)insn_get(s
, OT_LONG
);
5120 tval
= (int16_t)insn_get(s
, OT_WORD
);
5121 tval
+= s
->pc
- s
->cs_base
;
5126 case 0xea: /* ljmp im */
5128 unsigned int selector
, offset
;
5132 ot
= dflag
? OT_LONG
: OT_WORD
;
5133 offset
= insn_get(s
, ot
);
5134 selector
= insn_get(s
, OT_WORD
);
5136 gen_op_movl_T0_im(selector
);
5137 gen_op_movl_T1_imu(offset
);
5140 case 0xeb: /* jmp Jb */
5141 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5142 tval
+= s
->pc
- s
->cs_base
;
5147 case 0x70 ... 0x7f: /* jcc Jb */
5148 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5150 case 0x180 ... 0x18f: /* jcc Jv */
5152 tval
= (int32_t)insn_get(s
, OT_LONG
);
5154 tval
= (int16_t)insn_get(s
, OT_WORD
);
5157 next_eip
= s
->pc
- s
->cs_base
;
5161 gen_jcc(s
, b
, tval
, next_eip
);
5164 case 0x190 ... 0x19f: /* setcc Gv */
5165 modrm
= ldub_code(s
->pc
++);
5167 gen_ldst_modrm(s
, modrm
, OT_BYTE
, OR_TMP0
, 1);
5169 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5170 ot
= dflag
+ OT_WORD
;
5171 modrm
= ldub_code(s
->pc
++);
5172 reg
= ((modrm
>> 3) & 7) | rex_r
;
5173 mod
= (modrm
>> 6) & 3;
5176 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5177 gen_op_ld_T1_A0(ot
+ s
->mem_index
);
5179 rm
= (modrm
& 7) | REX_B(s
);
5180 gen_op_mov_TN_reg(ot
, 1, rm
);
5182 gen_op_cmov_reg_T1_T0
[ot
- OT_WORD
][reg
]();
5185 /************************/
5187 case 0x9c: /* pushf */
5188 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PUSHF
))
5190 if (s
->vm86
&& s
->iopl
!= 3) {
5191 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5193 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5194 gen_op_set_cc_op(s
->cc_op
);
5195 gen_op_movl_T0_eflags();
5199 case 0x9d: /* popf */
5200 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_POPF
))
5202 if (s
->vm86
&& s
->iopl
!= 3) {
5203 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5208 gen_op_movl_eflags_T0_cpl0();
5210 gen_op_movw_eflags_T0_cpl0();
5213 if (s
->cpl
<= s
->iopl
) {
5215 gen_op_movl_eflags_T0_io();
5217 gen_op_movw_eflags_T0_io();
5221 gen_op_movl_eflags_T0();
5223 gen_op_movw_eflags_T0();
5228 s
->cc_op
= CC_OP_EFLAGS
;
5229 /* abort translation because TF flag may change */
5230 gen_jmp_im(s
->pc
- s
->cs_base
);
5234 case 0x9e: /* sahf */
5237 gen_op_mov_TN_reg(OT_BYTE
, 0, R_AH
);
5238 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5239 gen_op_set_cc_op(s
->cc_op
);
5240 gen_op_movb_eflags_T0();
5241 s
->cc_op
= CC_OP_EFLAGS
;
5243 case 0x9f: /* lahf */
5246 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5247 gen_op_set_cc_op(s
->cc_op
);
5248 gen_op_movl_T0_eflags();
5249 gen_op_mov_reg_T0(OT_BYTE
, R_AH
);
5251 case 0xf5: /* cmc */
5252 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5253 gen_op_set_cc_op(s
->cc_op
);
5255 s
->cc_op
= CC_OP_EFLAGS
;
5257 case 0xf8: /* clc */
5258 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5259 gen_op_set_cc_op(s
->cc_op
);
5261 s
->cc_op
= CC_OP_EFLAGS
;
5263 case 0xf9: /* stc */
5264 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5265 gen_op_set_cc_op(s
->cc_op
);
5267 s
->cc_op
= CC_OP_EFLAGS
;
5269 case 0xfc: /* cld */
5272 case 0xfd: /* std */
5276 /************************/
5277 /* bit operations */
5278 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5279 ot
= dflag
+ OT_WORD
;
5280 modrm
= ldub_code(s
->pc
++);
5281 op
= (modrm
>> 3) & 7;
5282 mod
= (modrm
>> 6) & 3;
5283 rm
= (modrm
& 7) | REX_B(s
);
5286 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5287 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5289 gen_op_mov_TN_reg(ot
, 0, rm
);
5292 val
= ldub_code(s
->pc
++);
5293 gen_op_movl_T1_im(val
);
5297 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5298 s
->cc_op
= CC_OP_SARB
+ ot
;
5301 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5303 gen_op_mov_reg_T0(ot
, rm
);
5304 gen_op_update_bt_cc();
5307 case 0x1a3: /* bt Gv, Ev */
5310 case 0x1ab: /* bts */
5313 case 0x1b3: /* btr */
5316 case 0x1bb: /* btc */
5319 ot
= dflag
+ OT_WORD
;
5320 modrm
= ldub_code(s
->pc
++);
5321 reg
= ((modrm
>> 3) & 7) | rex_r
;
5322 mod
= (modrm
>> 6) & 3;
5323 rm
= (modrm
& 7) | REX_B(s
);
5324 gen_op_mov_TN_reg(OT_LONG
, 1, reg
);
5326 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5327 /* specific case: we need to add a displacement */
5328 gen_op_add_bit_A0_T1
[ot
- OT_WORD
]();
5329 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
5331 gen_op_mov_TN_reg(ot
, 0, rm
);
5333 gen_op_btx_T0_T1_cc
[ot
- OT_WORD
][op
]();
5334 s
->cc_op
= CC_OP_SARB
+ ot
;
5337 gen_op_st_T0_A0(ot
+ s
->mem_index
);
5339 gen_op_mov_reg_T0(ot
, rm
);
5340 gen_op_update_bt_cc();
5343 case 0x1bc: /* bsf */
5344 case 0x1bd: /* bsr */
5345 ot
= dflag
+ OT_WORD
;
5346 modrm
= ldub_code(s
->pc
++);
5347 reg
= ((modrm
>> 3) & 7) | rex_r
;
5348 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
5349 /* NOTE: in order to handle the 0 case, we must load the
5350 result. It could be optimized with a generated jump */
5351 gen_op_mov_TN_reg(ot
, 1, reg
);
5352 gen_op_bsx_T0_cc
[ot
- OT_WORD
][b
& 1]();
5353 gen_op_mov_reg_T1(ot
, reg
);
5354 s
->cc_op
= CC_OP_LOGICB
+ ot
;
5356 /************************/
5358 case 0x27: /* daa */
5361 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5362 gen_op_set_cc_op(s
->cc_op
);
5364 s
->cc_op
= CC_OP_EFLAGS
;
5366 case 0x2f: /* das */
5369 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5370 gen_op_set_cc_op(s
->cc_op
);
5372 s
->cc_op
= CC_OP_EFLAGS
;
5374 case 0x37: /* aaa */
5377 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5378 gen_op_set_cc_op(s
->cc_op
);
5380 s
->cc_op
= CC_OP_EFLAGS
;
5382 case 0x3f: /* aas */
5385 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5386 gen_op_set_cc_op(s
->cc_op
);
5388 s
->cc_op
= CC_OP_EFLAGS
;
5390 case 0xd4: /* aam */
5393 val
= ldub_code(s
->pc
++);
5395 gen_exception(s
, EXCP00_DIVZ
, pc_start
- s
->cs_base
);
5398 s
->cc_op
= CC_OP_LOGICB
;
5401 case 0xd5: /* aad */
5404 val
= ldub_code(s
->pc
++);
5406 s
->cc_op
= CC_OP_LOGICB
;
5408 /************************/
5410 case 0x90: /* nop */
5411 /* XXX: xchg + rex handling */
5412 /* XXX: correct lock test for all insn */
5413 if (prefixes
& PREFIX_LOCK
)
5415 if (prefixes
& PREFIX_REPZ
) {
5416 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_PAUSE
);
5419 case 0x9b: /* fwait */
5420 if ((s
->flags
& (HF_MP_MASK
| HF_TS_MASK
)) ==
5421 (HF_MP_MASK
| HF_TS_MASK
)) {
5422 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
5424 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5425 gen_op_set_cc_op(s
->cc_op
);
5426 gen_jmp_im(pc_start
- s
->cs_base
);
5430 case 0xcc: /* int3 */
5431 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5433 gen_interrupt(s
, EXCP03_INT3
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5435 case 0xcd: /* int N */
5436 val
= ldub_code(s
->pc
++);
5437 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5439 if (s
->vm86
&& s
->iopl
!= 3) {
5440 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5442 gen_interrupt(s
, val
, pc_start
- s
->cs_base
, s
->pc
- s
->cs_base
);
5445 case 0xce: /* into */
5448 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SWINT
))
5450 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5451 gen_op_set_cc_op(s
->cc_op
);
5452 gen_jmp_im(pc_start
- s
->cs_base
);
5453 gen_op_into(s
->pc
- pc_start
);
5455 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5456 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_ICEBP
))
5459 gen_debug(s
, pc_start
- s
->cs_base
);
5462 tb_flush(cpu_single_env
);
5463 cpu_set_log(CPU_LOG_INT
| CPU_LOG_TB_IN_ASM
);
5466 case 0xfa: /* cli */
5468 if (s
->cpl
<= s
->iopl
) {
5471 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5477 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5481 case 0xfb: /* sti */
5483 if (s
->cpl
<= s
->iopl
) {
5486 /* interruptions are enabled only the first insn after sti */
5487 /* If several instructions disable interrupts, only the
5489 if (!(s
->tb
->flags
& HF_INHIBIT_IRQ_MASK
))
5490 gen_op_set_inhibit_irq();
5491 /* give a chance to handle pending irqs */
5492 gen_jmp_im(s
->pc
- s
->cs_base
);
5495 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5501 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5505 case 0x62: /* bound */
5508 ot
= dflag
? OT_LONG
: OT_WORD
;
5509 modrm
= ldub_code(s
->pc
++);
5510 reg
= (modrm
>> 3) & 7;
5511 mod
= (modrm
>> 6) & 3;
5514 gen_op_mov_TN_reg(ot
, 0, reg
);
5515 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5516 gen_jmp_im(pc_start
- s
->cs_base
);
5522 case 0x1c8 ... 0x1cf: /* bswap reg */
5523 reg
= (b
& 7) | REX_B(s
);
5524 #ifdef TARGET_X86_64
5526 gen_op_mov_TN_reg(OT_QUAD
, 0, reg
);
5527 tcg_gen_bswap_i64(cpu_T
[0], cpu_T
[0]);
5528 gen_op_mov_reg_T0(OT_QUAD
, reg
);
5532 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5534 tmp0
= tcg_temp_new(TCG_TYPE_I32
);
5535 tcg_gen_trunc_i64_i32(tmp0
, cpu_T
[0]);
5536 tcg_gen_bswap_i32(tmp0
, tmp0
);
5537 tcg_gen_extu_i32_i64(cpu_T
[0], tmp0
);
5538 gen_op_mov_reg_T0(OT_LONG
, reg
);
5542 gen_op_mov_TN_reg(OT_LONG
, 0, reg
);
5543 tcg_gen_bswap_i32(cpu_T
[0], cpu_T
[0]);
5544 gen_op_mov_reg_T0(OT_LONG
, reg
);
5548 case 0xd6: /* salc */
5551 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5552 gen_op_set_cc_op(s
->cc_op
);
5555 case 0xe0: /* loopnz */
5556 case 0xe1: /* loopz */
5557 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5558 gen_op_set_cc_op(s
->cc_op
);
5560 case 0xe2: /* loop */
5561 case 0xe3: /* jecxz */
5565 tval
= (int8_t)insn_get(s
, OT_BYTE
);
5566 next_eip
= s
->pc
- s
->cs_base
;
5571 l1
= gen_new_label();
5572 l2
= gen_new_label();
5575 gen_op_jz_ecx
[s
->aflag
](l1
);
5577 gen_op_dec_ECX
[s
->aflag
]();
5580 gen_op_loop
[s
->aflag
][b
](l1
);
5583 gen_jmp_im(next_eip
);
5584 gen_op_jmp_label(l2
);
5591 case 0x130: /* wrmsr */
5592 case 0x132: /* rdmsr */
5594 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5598 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 0);
5601 retval
= gen_svm_check_intercept_param(s
, pc_start
, SVM_EXIT_MSR
, 1);
5608 case 0x131: /* rdtsc */
5609 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RDTSC
))
5611 gen_jmp_im(pc_start
- s
->cs_base
);
5614 case 0x133: /* rdpmc */
5615 gen_jmp_im(pc_start
- s
->cs_base
);
5618 case 0x134: /* sysenter */
5622 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5624 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5625 gen_op_set_cc_op(s
->cc_op
);
5626 s
->cc_op
= CC_OP_DYNAMIC
;
5628 gen_jmp_im(pc_start
- s
->cs_base
);
5633 case 0x135: /* sysexit */
5637 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5639 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5640 gen_op_set_cc_op(s
->cc_op
);
5641 s
->cc_op
= CC_OP_DYNAMIC
;
5643 gen_jmp_im(pc_start
- s
->cs_base
);
5648 #ifdef TARGET_X86_64
5649 case 0x105: /* syscall */
5650 /* XXX: is it usable in real mode ? */
5651 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5652 gen_op_set_cc_op(s
->cc_op
);
5653 s
->cc_op
= CC_OP_DYNAMIC
;
5655 gen_jmp_im(pc_start
- s
->cs_base
);
5656 gen_op_syscall(s
->pc
- pc_start
);
5659 case 0x107: /* sysret */
5661 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5663 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5664 gen_op_set_cc_op(s
->cc_op
);
5665 s
->cc_op
= CC_OP_DYNAMIC
;
5667 gen_jmp_im(pc_start
- s
->cs_base
);
5668 gen_op_sysret(s
->dflag
);
5669 /* condition codes are modified only in long mode */
5671 s
->cc_op
= CC_OP_EFLAGS
;
5676 case 0x1a2: /* cpuid */
5677 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CPUID
))
5681 case 0xf4: /* hlt */
5683 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5685 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_HLT
))
5687 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5688 gen_op_set_cc_op(s
->cc_op
);
5689 gen_jmp_im(s
->pc
- s
->cs_base
);
5695 modrm
= ldub_code(s
->pc
++);
5696 mod
= (modrm
>> 6) & 3;
5697 op
= (modrm
>> 3) & 7;
5700 if (!s
->pe
|| s
->vm86
)
5702 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_READ
))
5704 gen_op_movl_T0_env(offsetof(CPUX86State
,ldt
.selector
));
5708 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5711 if (!s
->pe
|| s
->vm86
)
5714 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5716 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_LDTR_WRITE
))
5718 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5719 gen_jmp_im(pc_start
- s
->cs_base
);
5724 if (!s
->pe
|| s
->vm86
)
5726 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_READ
))
5728 gen_op_movl_T0_env(offsetof(CPUX86State
,tr
.selector
));
5732 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 1);
5735 if (!s
->pe
|| s
->vm86
)
5738 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5740 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_TR_WRITE
))
5742 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5743 gen_jmp_im(pc_start
- s
->cs_base
);
5749 if (!s
->pe
|| s
->vm86
)
5751 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5752 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5753 gen_op_set_cc_op(s
->cc_op
);
5758 s
->cc_op
= CC_OP_EFLAGS
;
5765 modrm
= ldub_code(s
->pc
++);
5766 mod
= (modrm
>> 6) & 3;
5767 op
= (modrm
>> 3) & 7;
5773 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_GDTR_READ
))
5775 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5776 gen_op_movl_T0_env(offsetof(CPUX86State
, gdt
.limit
));
5777 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5778 gen_add_A0_im(s
, 2);
5779 gen_op_movtl_T0_env(offsetof(CPUX86State
, gdt
.base
));
5781 gen_op_andl_T0_im(0xffffff);
5782 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5787 case 0: /* monitor */
5788 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5791 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MONITOR
))
5793 gen_jmp_im(pc_start
- s
->cs_base
);
5794 #ifdef TARGET_X86_64
5795 if (s
->aflag
== 2) {
5796 gen_op_movq_A0_reg(R_EBX
);
5797 gen_op_addq_A0_AL();
5801 gen_op_movl_A0_reg(R_EBX
);
5802 gen_op_addl_A0_AL();
5804 gen_op_andl_A0_ffff();
5806 gen_add_A0_ds_seg(s
);
5810 if (!(s
->cpuid_ext_features
& CPUID_EXT_MONITOR
) ||
5813 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
5814 gen_op_set_cc_op(s
->cc_op
);
5815 s
->cc_op
= CC_OP_DYNAMIC
;
5817 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_MWAIT
))
5819 gen_jmp_im(s
->pc
- s
->cs_base
);
5827 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_IDTR_READ
))
5829 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5830 gen_op_movl_T0_env(offsetof(CPUX86State
, idt
.limit
));
5831 gen_op_st_T0_A0(OT_WORD
+ s
->mem_index
);
5832 gen_add_A0_im(s
, 2);
5833 gen_op_movtl_T0_env(offsetof(CPUX86State
, idt
.base
));
5835 gen_op_andl_T0_im(0xffffff);
5836 gen_op_st_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5844 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMRUN
))
5846 if (s
->cc_op
!= CC_OP_DYNAMIC
)
5847 gen_op_set_cc_op(s
->cc_op
);
5848 gen_jmp_im(s
->pc
- s
->cs_base
);
5850 s
->cc_op
= CC_OP_EFLAGS
;
5853 case 1: /* VMMCALL */
5854 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMMCALL
))
5856 /* FIXME: cause #UD if hflags & SVM */
5859 case 2: /* VMLOAD */
5860 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMLOAD
))
5864 case 3: /* VMSAVE */
5865 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_VMSAVE
))
5870 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_STGI
))
5875 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_CLGI
))
5879 case 6: /* SKINIT */
5880 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_SKINIT
))
5884 case 7: /* INVLPGA */
5885 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPGA
))
5892 } else if (s
->cpl
!= 0) {
5893 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5895 if (gen_svm_check_intercept(s
, pc_start
,
5896 op
==2 ? SVM_EXIT_GDTR_WRITE
: SVM_EXIT_IDTR_WRITE
))
5898 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5899 gen_op_ld_T1_A0(OT_WORD
+ s
->mem_index
);
5900 gen_add_A0_im(s
, 2);
5901 gen_op_ld_T0_A0(CODE64(s
) + OT_LONG
+ s
->mem_index
);
5903 gen_op_andl_T0_im(0xffffff);
5905 gen_op_movtl_env_T0(offsetof(CPUX86State
,gdt
.base
));
5906 gen_op_movl_env_T1(offsetof(CPUX86State
,gdt
.limit
));
5908 gen_op_movtl_env_T0(offsetof(CPUX86State
,idt
.base
));
5909 gen_op_movl_env_T1(offsetof(CPUX86State
,idt
.limit
));
5914 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
))
5916 gen_op_movl_T0_env(offsetof(CPUX86State
,cr
[0]));
5917 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 1);
5921 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5923 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
))
5925 gen_ldst_modrm(s
, modrm
, OT_WORD
, OR_TMP0
, 0);
5927 gen_jmp_im(s
->pc
- s
->cs_base
);
5931 case 7: /* invlpg */
5933 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5936 #ifdef TARGET_X86_64
5937 if (CODE64(s
) && rm
== 0) {
5939 gen_op_movtl_T0_env(offsetof(CPUX86State
,segs
[R_GS
].base
));
5940 gen_op_movtl_T1_env(offsetof(CPUX86State
,kernelgsbase
));
5941 gen_op_movtl_env_T1(offsetof(CPUX86State
,segs
[R_GS
].base
));
5942 gen_op_movtl_env_T0(offsetof(CPUX86State
,kernelgsbase
));
5949 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_INVLPG
))
5951 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5953 gen_jmp_im(s
->pc
- s
->cs_base
);
5962 case 0x108: /* invd */
5963 case 0x109: /* wbinvd */
5965 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
5967 if (gen_svm_check_intercept(s
, pc_start
, (b
& 2) ? SVM_EXIT_INVD
: SVM_EXIT_WBINVD
))
5972 case 0x63: /* arpl or movslS (x86_64) */
5973 #ifdef TARGET_X86_64
5976 /* d_ot is the size of destination */
5977 d_ot
= dflag
+ OT_WORD
;
5979 modrm
= ldub_code(s
->pc
++);
5980 reg
= ((modrm
>> 3) & 7) | rex_r
;
5981 mod
= (modrm
>> 6) & 3;
5982 rm
= (modrm
& 7) | REX_B(s
);
5985 gen_op_mov_TN_reg(OT_LONG
, 0, rm
);
5987 if (d_ot
== OT_QUAD
)
5988 gen_op_movslq_T0_T0();
5989 gen_op_mov_reg_T0(d_ot
, reg
);
5991 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
5992 if (d_ot
== OT_QUAD
) {
5993 gen_op_lds_T0_A0(OT_LONG
+ s
->mem_index
);
5995 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
5997 gen_op_mov_reg_T0(d_ot
, reg
);
6002 if (!s
->pe
|| s
->vm86
)
6004 ot
= dflag
? OT_LONG
: OT_WORD
;
6005 modrm
= ldub_code(s
->pc
++);
6006 reg
= (modrm
>> 3) & 7;
6007 mod
= (modrm
>> 6) & 3;
6010 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6011 gen_op_ld_T0_A0(ot
+ s
->mem_index
);
6013 gen_op_mov_TN_reg(ot
, 0, rm
);
6015 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6016 gen_op_set_cc_op(s
->cc_op
);
6018 s
->cc_op
= CC_OP_EFLAGS
;
6020 gen_op_st_T0_A0(ot
+ s
->mem_index
);
6022 gen_op_mov_reg_T0(ot
, rm
);
6024 gen_op_arpl_update();
6027 case 0x102: /* lar */
6028 case 0x103: /* lsl */
6029 if (!s
->pe
|| s
->vm86
)
6031 ot
= dflag
? OT_LONG
: OT_WORD
;
6032 modrm
= ldub_code(s
->pc
++);
6033 reg
= ((modrm
>> 3) & 7) | rex_r
;
6034 gen_ldst_modrm(s
, modrm
, ot
, OR_TMP0
, 0);
6035 gen_op_mov_TN_reg(ot
, 1, reg
);
6036 if (s
->cc_op
!= CC_OP_DYNAMIC
)
6037 gen_op_set_cc_op(s
->cc_op
);
6042 s
->cc_op
= CC_OP_EFLAGS
;
6043 gen_op_mov_reg_T1(ot
, reg
);
6046 modrm
= ldub_code(s
->pc
++);
6047 mod
= (modrm
>> 6) & 3;
6048 op
= (modrm
>> 3) & 7;
6050 case 0: /* prefetchnta */
6051 case 1: /* prefetchnt0 */
6052 case 2: /* prefetchnt0 */
6053 case 3: /* prefetchnt0 */
6056 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6057 /* nothing more to do */
6059 default: /* nop (multi byte) */
6060 gen_nop_modrm(s
, modrm
);
6064 case 0x119 ... 0x11f: /* nop (multi byte) */
6065 modrm
= ldub_code(s
->pc
++);
6066 gen_nop_modrm(s
, modrm
);
6068 case 0x120: /* mov reg, crN */
6069 case 0x122: /* mov crN, reg */
6071 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6073 modrm
= ldub_code(s
->pc
++);
6074 if ((modrm
& 0xc0) != 0xc0)
6076 rm
= (modrm
& 7) | REX_B(s
);
6077 reg
= ((modrm
>> 3) & 7) | rex_r
;
6089 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
+ reg
);
6090 gen_op_mov_TN_reg(ot
, 0, rm
);
6091 gen_op_movl_crN_T0(reg
);
6092 gen_jmp_im(s
->pc
- s
->cs_base
);
6095 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_CR0
+ reg
);
6096 #if !defined(CONFIG_USER_ONLY)
6098 gen_op_movtl_T0_cr8();
6101 gen_op_movtl_T0_env(offsetof(CPUX86State
,cr
[reg
]));
6102 gen_op_mov_reg_T0(ot
, rm
);
6110 case 0x121: /* mov reg, drN */
6111 case 0x123: /* mov drN, reg */
6113 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6115 modrm
= ldub_code(s
->pc
++);
6116 if ((modrm
& 0xc0) != 0xc0)
6118 rm
= (modrm
& 7) | REX_B(s
);
6119 reg
= ((modrm
>> 3) & 7) | rex_r
;
6124 /* XXX: do it dynamically with CR4.DE bit */
6125 if (reg
== 4 || reg
== 5 || reg
>= 8)
6128 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_DR0
+ reg
);
6129 gen_op_mov_TN_reg(ot
, 0, rm
);
6130 gen_op_movl_drN_T0(reg
);
6131 gen_jmp_im(s
->pc
- s
->cs_base
);
6134 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_READ_DR0
+ reg
);
6135 gen_op_movtl_T0_env(offsetof(CPUX86State
,dr
[reg
]));
6136 gen_op_mov_reg_T0(ot
, rm
);
6140 case 0x106: /* clts */
6142 gen_exception(s
, EXCP0D_GPF
, pc_start
- s
->cs_base
);
6144 gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_WRITE_CR0
);
6146 /* abort block because static cpu state changed */
6147 gen_jmp_im(s
->pc
- s
->cs_base
);
6151 /* MMX/SSE/SSE2/PNI support */
6152 case 0x1c3: /* MOVNTI reg, mem */
6153 if (!(s
->cpuid_features
& CPUID_SSE2
))
6155 ot
= s
->dflag
== 2 ? OT_QUAD
: OT_LONG
;
6156 modrm
= ldub_code(s
->pc
++);
6157 mod
= (modrm
>> 6) & 3;
6160 reg
= ((modrm
>> 3) & 7) | rex_r
;
6161 /* generate a generic store */
6162 gen_ldst_modrm(s
, modrm
, ot
, reg
, 1);
6165 modrm
= ldub_code(s
->pc
++);
6166 mod
= (modrm
>> 6) & 3;
6167 op
= (modrm
>> 3) & 7;
6169 case 0: /* fxsave */
6170 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6171 (s
->flags
& HF_EM_MASK
))
6173 if (s
->flags
& HF_TS_MASK
) {
6174 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6177 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6178 gen_op_fxsave_A0((s
->dflag
== 2));
6180 case 1: /* fxrstor */
6181 if (mod
== 3 || !(s
->cpuid_features
& CPUID_FXSR
) ||
6182 (s
->flags
& HF_EM_MASK
))
6184 if (s
->flags
& HF_TS_MASK
) {
6185 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6188 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6189 gen_op_fxrstor_A0((s
->dflag
== 2));
6191 case 2: /* ldmxcsr */
6192 case 3: /* stmxcsr */
6193 if (s
->flags
& HF_TS_MASK
) {
6194 gen_exception(s
, EXCP07_PREX
, pc_start
- s
->cs_base
);
6197 if ((s
->flags
& HF_EM_MASK
) || !(s
->flags
& HF_OSFXSR_MASK
) ||
6200 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6202 gen_op_ld_T0_A0(OT_LONG
+ s
->mem_index
);
6203 gen_op_movl_env_T0(offsetof(CPUX86State
, mxcsr
));
6205 gen_op_movl_T0_env(offsetof(CPUX86State
, mxcsr
));
6206 gen_op_st_T0_A0(OT_LONG
+ s
->mem_index
);
6209 case 5: /* lfence */
6210 case 6: /* mfence */
6211 if ((modrm
& 0xc7) != 0xc0 || !(s
->cpuid_features
& CPUID_SSE
))
6214 case 7: /* sfence / clflush */
6215 if ((modrm
& 0xc7) == 0xc0) {
6217 if (!(s
->cpuid_features
& CPUID_SSE
))
6221 if (!(s
->cpuid_features
& CPUID_CLFLUSH
))
6223 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6230 case 0x10d: /* prefetch */
6231 modrm
= ldub_code(s
->pc
++);
6232 gen_lea_modrm(s
, modrm
, ®_addr
, &offset_addr
);
6233 /* ignore for now */
6235 case 0x1aa: /* rsm */
6236 if (gen_svm_check_intercept(s
, pc_start
, SVM_EXIT_RSM
))
6238 if (!(s
->flags
& HF_SMM_MASK
))
6240 if (s
->cc_op
!= CC_OP_DYNAMIC
) {
6241 gen_op_set_cc_op(s
->cc_op
);
6242 s
->cc_op
= CC_OP_DYNAMIC
;
6244 gen_jmp_im(s
->pc
- s
->cs_base
);
6248 case 0x110 ... 0x117:
6249 case 0x128 ... 0x12f:
6250 case 0x150 ... 0x177:
6251 case 0x17c ... 0x17f:
6253 case 0x1c4 ... 0x1c6:
6254 case 0x1d0 ... 0x1fe:
6255 gen_sse(s
, b
, pc_start
, rex_r
);
6260 /* lock generation */
6261 if (s
->prefix
& PREFIX_LOCK
)
6265 if (s
->prefix
& PREFIX_LOCK
)
6267 /* XXX: ensure that no lock was generated */
6268 gen_exception(s
, EXCP06_ILLOP
, pc_start
- s
->cs_base
);
6272 #define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6273 #define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6275 /* flags read by an operation */
6276 static uint16_t opc_read_flags
[NB_OPS
] = {
6277 [INDEX_op_aas
] = CC_A
,
6278 [INDEX_op_aaa
] = CC_A
,
6279 [INDEX_op_das
] = CC_A
| CC_C
,
6280 [INDEX_op_daa
] = CC_A
| CC_C
,
6282 /* subtle: due to the incl/decl implementation, C is used */
6283 [INDEX_op_update_inc_cc
] = CC_C
,
6285 [INDEX_op_into
] = CC_O
,
6287 [INDEX_op_jb_subb
] = CC_C
,
6288 [INDEX_op_jb_subw
] = CC_C
,
6289 [INDEX_op_jb_subl
] = CC_C
,
6291 [INDEX_op_jz_subb
] = CC_Z
,
6292 [INDEX_op_jz_subw
] = CC_Z
,
6293 [INDEX_op_jz_subl
] = CC_Z
,
6295 [INDEX_op_jbe_subb
] = CC_Z
| CC_C
,
6296 [INDEX_op_jbe_subw
] = CC_Z
| CC_C
,
6297 [INDEX_op_jbe_subl
] = CC_Z
| CC_C
,
6299 [INDEX_op_js_subb
] = CC_S
,
6300 [INDEX_op_js_subw
] = CC_S
,
6301 [INDEX_op_js_subl
] = CC_S
,
6303 [INDEX_op_jl_subb
] = CC_O
| CC_S
,
6304 [INDEX_op_jl_subw
] = CC_O
| CC_S
,
6305 [INDEX_op_jl_subl
] = CC_O
| CC_S
,
6307 [INDEX_op_jle_subb
] = CC_O
| CC_S
| CC_Z
,
6308 [INDEX_op_jle_subw
] = CC_O
| CC_S
| CC_Z
,
6309 [INDEX_op_jle_subl
] = CC_O
| CC_S
| CC_Z
,
6311 [INDEX_op_loopnzw
] = CC_Z
,
6312 [INDEX_op_loopnzl
] = CC_Z
,
6313 [INDEX_op_loopzw
] = CC_Z
,
6314 [INDEX_op_loopzl
] = CC_Z
,
6316 [INDEX_op_seto_T0_cc
] = CC_O
,
6317 [INDEX_op_setb_T0_cc
] = CC_C
,
6318 [INDEX_op_setz_T0_cc
] = CC_Z
,
6319 [INDEX_op_setbe_T0_cc
] = CC_Z
| CC_C
,
6320 [INDEX_op_sets_T0_cc
] = CC_S
,
6321 [INDEX_op_setp_T0_cc
] = CC_P
,
6322 [INDEX_op_setl_T0_cc
] = CC_O
| CC_S
,
6323 [INDEX_op_setle_T0_cc
] = CC_O
| CC_S
| CC_Z
,
6325 [INDEX_op_setb_T0_subb
] = CC_C
,
6326 [INDEX_op_setb_T0_subw
] = CC_C
,
6327 [INDEX_op_setb_T0_subl
] = CC_C
,
6329 [INDEX_op_setz_T0_subb
] = CC_Z
,
6330 [INDEX_op_setz_T0_subw
] = CC_Z
,
6331 [INDEX_op_setz_T0_subl
] = CC_Z
,
6333 [INDEX_op_setbe_T0_subb
] = CC_Z
| CC_C
,
6334 [INDEX_op_setbe_T0_subw
] = CC_Z
| CC_C
,
6335 [INDEX_op_setbe_T0_subl
] = CC_Z
| CC_C
,
6337 [INDEX_op_sets_T0_subb
] = CC_S
,
6338 [INDEX_op_sets_T0_subw
] = CC_S
,
6339 [INDEX_op_sets_T0_subl
] = CC_S
,
6341 [INDEX_op_setl_T0_subb
] = CC_O
| CC_S
,
6342 [INDEX_op_setl_T0_subw
] = CC_O
| CC_S
,
6343 [INDEX_op_setl_T0_subl
] = CC_O
| CC_S
,
6345 [INDEX_op_setle_T0_subb
] = CC_O
| CC_S
| CC_Z
,
6346 [INDEX_op_setle_T0_subw
] = CC_O
| CC_S
| CC_Z
,
6347 [INDEX_op_setle_T0_subl
] = CC_O
| CC_S
| CC_Z
,
6349 [INDEX_op_movl_T0_eflags
] = CC_OSZAPC
,
6350 [INDEX_op_cmc
] = CC_C
,
6351 [INDEX_op_salc
] = CC_C
,
6353 /* needed for correct flag optimisation before string ops */
6354 [INDEX_op_jnz_ecxw
] = CC_OSZAPC
,
6355 [INDEX_op_jnz_ecxl
] = CC_OSZAPC
,
6356 [INDEX_op_jz_ecxw
] = CC_OSZAPC
,
6357 [INDEX_op_jz_ecxl
] = CC_OSZAPC
,
6359 #ifdef TARGET_X86_64
6360 [INDEX_op_jb_subq
] = CC_C
,
6361 [INDEX_op_jz_subq
] = CC_Z
,
6362 [INDEX_op_jbe_subq
] = CC_Z
| CC_C
,
6363 [INDEX_op_js_subq
] = CC_S
,
6364 [INDEX_op_jl_subq
] = CC_O
| CC_S
,
6365 [INDEX_op_jle_subq
] = CC_O
| CC_S
| CC_Z
,
6367 [INDEX_op_loopnzq
] = CC_Z
,
6368 [INDEX_op_loopzq
] = CC_Z
,
6370 [INDEX_op_setb_T0_subq
] = CC_C
,
6371 [INDEX_op_setz_T0_subq
] = CC_Z
,
6372 [INDEX_op_setbe_T0_subq
] = CC_Z
| CC_C
,
6373 [INDEX_op_sets_T0_subq
] = CC_S
,
6374 [INDEX_op_setl_T0_subq
] = CC_O
| CC_S
,
6375 [INDEX_op_setle_T0_subq
] = CC_O
| CC_S
| CC_Z
,
6377 [INDEX_op_jnz_ecxq
] = CC_OSZAPC
,
6378 [INDEX_op_jz_ecxq
] = CC_OSZAPC
,
6381 #define DEF_READF(SUFFIX)\
6382 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6383 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6384 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6385 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6386 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6387 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6388 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6389 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6391 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6392 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6393 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6394 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6395 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6396 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6397 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6398 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6402 #ifndef CONFIG_USER_ONLY
6408 /* flags written by an operation */
6409 static uint16_t opc_write_flags
[NB_OPS
] = {
6410 [INDEX_op_update2_cc
] = CC_OSZAPC
,
6411 [INDEX_op_update1_cc
] = CC_OSZAPC
,
6412 [INDEX_op_cmpl_T0_T1_cc
] = CC_OSZAPC
,
6413 [INDEX_op_update_neg_cc
] = CC_OSZAPC
,
6414 /* subtle: due to the incl/decl implementation, C is used */
6415 [INDEX_op_update_inc_cc
] = CC_OSZAPC
,
6416 [INDEX_op_testl_T0_T1_cc
] = CC_OSZAPC
,
6418 [INDEX_op_mulb_AL_T0
] = CC_OSZAPC
,
6419 [INDEX_op_mulw_AX_T0
] = CC_OSZAPC
,
6420 [INDEX_op_mull_EAX_T0
] = CC_OSZAPC
,
6421 X86_64_DEF([INDEX_op_mulq_EAX_T0
] = CC_OSZAPC
,)
6422 [INDEX_op_imulb_AL_T0
] = CC_OSZAPC
,
6423 [INDEX_op_imulw_AX_T0
] = CC_OSZAPC
,
6424 [INDEX_op_imull_EAX_T0
] = CC_OSZAPC
,
6425 X86_64_DEF([INDEX_op_imulq_EAX_T0
] = CC_OSZAPC
,)
6426 [INDEX_op_imulw_T0_T1
] = CC_OSZAPC
,
6427 [INDEX_op_imull_T0_T1
] = CC_OSZAPC
,
6428 X86_64_DEF([INDEX_op_imulq_T0_T1
] = CC_OSZAPC
,)
6431 [INDEX_op_ucomiss
] = CC_OSZAPC
,
6432 [INDEX_op_ucomisd
] = CC_OSZAPC
,
6433 [INDEX_op_comiss
] = CC_OSZAPC
,
6434 [INDEX_op_comisd
] = CC_OSZAPC
,
6437 [INDEX_op_aam
] = CC_OSZAPC
,
6438 [INDEX_op_aad
] = CC_OSZAPC
,
6439 [INDEX_op_aas
] = CC_OSZAPC
,
6440 [INDEX_op_aaa
] = CC_OSZAPC
,
6441 [INDEX_op_das
] = CC_OSZAPC
,
6442 [INDEX_op_daa
] = CC_OSZAPC
,
6444 [INDEX_op_movb_eflags_T0
] = CC_S
| CC_Z
| CC_A
| CC_P
| CC_C
,
6445 [INDEX_op_movw_eflags_T0
] = CC_OSZAPC
,
6446 [INDEX_op_movl_eflags_T0
] = CC_OSZAPC
,
6447 [INDEX_op_movw_eflags_T0_io
] = CC_OSZAPC
,
6448 [INDEX_op_movl_eflags_T0_io
] = CC_OSZAPC
,
6449 [INDEX_op_movw_eflags_T0_cpl0
] = CC_OSZAPC
,
6450 [INDEX_op_movl_eflags_T0_cpl0
] = CC_OSZAPC
,
6451 [INDEX_op_clc
] = CC_C
,
6452 [INDEX_op_stc
] = CC_C
,
6453 [INDEX_op_cmc
] = CC_C
,
6455 [INDEX_op_btw_T0_T1_cc
] = CC_OSZAPC
,
6456 [INDEX_op_btl_T0_T1_cc
] = CC_OSZAPC
,
6457 X86_64_DEF([INDEX_op_btq_T0_T1_cc
] = CC_OSZAPC
,)
6458 [INDEX_op_btsw_T0_T1_cc
] = CC_OSZAPC
,
6459 [INDEX_op_btsl_T0_T1_cc
] = CC_OSZAPC
,
6460 X86_64_DEF([INDEX_op_btsq_T0_T1_cc
] = CC_OSZAPC
,)
6461 [INDEX_op_btrw_T0_T1_cc
] = CC_OSZAPC
,
6462 [INDEX_op_btrl_T0_T1_cc
] = CC_OSZAPC
,
6463 X86_64_DEF([INDEX_op_btrq_T0_T1_cc
] = CC_OSZAPC
,)
6464 [INDEX_op_btcw_T0_T1_cc
] = CC_OSZAPC
,
6465 [INDEX_op_btcl_T0_T1_cc
] = CC_OSZAPC
,
6466 X86_64_DEF([INDEX_op_btcq_T0_T1_cc
] = CC_OSZAPC
,)
6468 [INDEX_op_bsfw_T0_cc
] = CC_OSZAPC
,
6469 [INDEX_op_bsfl_T0_cc
] = CC_OSZAPC
,
6470 X86_64_DEF([INDEX_op_bsfq_T0_cc
] = CC_OSZAPC
,)
6471 [INDEX_op_bsrw_T0_cc
] = CC_OSZAPC
,
6472 [INDEX_op_bsrl_T0_cc
] = CC_OSZAPC
,
6473 X86_64_DEF([INDEX_op_bsrq_T0_cc
] = CC_OSZAPC
,)
6475 [INDEX_op_cmpxchgb_T0_T1_EAX_cc
] = CC_OSZAPC
,
6476 [INDEX_op_cmpxchgw_T0_T1_EAX_cc
] = CC_OSZAPC
,
6477 [INDEX_op_cmpxchgl_T0_T1_EAX_cc
] = CC_OSZAPC
,
6478 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc
] = CC_OSZAPC
,)
6480 [INDEX_op_cmpxchg8b
] = CC_Z
,
6481 [INDEX_op_lar
] = CC_Z
,
6482 [INDEX_op_lsl
] = CC_Z
,
6483 [INDEX_op_verr
] = CC_Z
,
6484 [INDEX_op_verw
] = CC_Z
,
6485 [INDEX_op_fcomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6486 [INDEX_op_fucomi_ST0_FT0
] = CC_Z
| CC_P
| CC_C
,
6488 #define DEF_WRITEF(SUFFIX)\
6489 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6490 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6491 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6492 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6493 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6494 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6495 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6496 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6498 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6499 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6500 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6501 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6502 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6503 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6504 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6505 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6507 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6508 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6509 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6510 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6511 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6512 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6513 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6514 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6516 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6517 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6518 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6519 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6521 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6522 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6523 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6524 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6526 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6527 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6528 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6529 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6531 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6532 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6533 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6534 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6535 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6536 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6538 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6539 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6540 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6541 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6542 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6543 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6545 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6546 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6547 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6548 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6553 #ifndef CONFIG_USER_ONLY
6559 /* simpler form of an operation if no flags need to be generated */
6560 static uint16_t opc_simpler
[NB_OPS
] = {
6561 [INDEX_op_update2_cc
] = INDEX_op_nop
,
6562 [INDEX_op_update1_cc
] = INDEX_op_nop
,
6563 [INDEX_op_update_neg_cc
] = INDEX_op_nop
,
6565 /* broken: CC_OP logic must be rewritten */
6566 [INDEX_op_update_inc_cc
] = INDEX_op_nop
,
6569 [INDEX_op_shlb_T0_T1_cc
] = INDEX_op_shlb_T0_T1
,
6570 [INDEX_op_shlw_T0_T1_cc
] = INDEX_op_shlw_T0_T1
,
6571 [INDEX_op_shll_T0_T1_cc
] = INDEX_op_shll_T0_T1
,
6572 X86_64_DEF([INDEX_op_shlq_T0_T1_cc
] = INDEX_op_shlq_T0_T1
,)
6574 [INDEX_op_shrb_T0_T1_cc
] = INDEX_op_shrb_T0_T1
,
6575 [INDEX_op_shrw_T0_T1_cc
] = INDEX_op_shrw_T0_T1
,
6576 [INDEX_op_shrl_T0_T1_cc
] = INDEX_op_shrl_T0_T1
,
6577 X86_64_DEF([INDEX_op_shrq_T0_T1_cc
] = INDEX_op_shrq_T0_T1
,)
6579 [INDEX_op_sarb_T0_T1_cc
] = INDEX_op_sarb_T0_T1
,
6580 [INDEX_op_sarw_T0_T1_cc
] = INDEX_op_sarw_T0_T1
,
6581 [INDEX_op_sarl_T0_T1_cc
] = INDEX_op_sarl_T0_T1
,
6582 X86_64_DEF([INDEX_op_sarq_T0_T1_cc
] = INDEX_op_sarq_T0_T1
,)
6584 #define DEF_SIMPLER(SUFFIX)\
6585 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6586 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6587 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6588 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6590 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6591 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6592 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6593 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6597 #ifndef CONFIG_USER_ONLY
6598 DEF_SIMPLER(_kernel
)
6603 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
6608 tcg_gen_helper_0_1(helper_divl_EAX_T0
, cpu_T
[0]);
6614 void optimize_flags_init(void)
6617 /* put default values in arrays */
6618 for(i
= 0; i
< NB_OPS
; i
++) {
6619 if (opc_simpler
[i
] == 0)
6623 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
6625 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
6626 #if TARGET_LONG_BITS > HOST_LONG_BITS
6627 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
6628 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
6629 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
6630 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
6631 cpu_A0
= tcg_global_mem_new(TCG_TYPE_TL
,
6632 TCG_AREG0
, offsetof(CPUState
, t2
), "A0");
6634 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
6635 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
6636 cpu_A0
= tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "A0");
6638 /* the helpers are only registered to print debug info */
6639 TCG_HELPER(helper_divl_EAX_T0
);
6640 TCG_HELPER(helper_idivl_EAX_T0
);
6643 /* CPU flags computation optimization: we move backward thru the
6644 generated code to see which flags are needed. The operation is
6645 modified if suitable */
6646 static void optimize_flags(uint16_t *opc_buf
, int opc_buf_len
)
6649 int live_flags
, write_flags
, op
;
6651 opc_ptr
= opc_buf
+ opc_buf_len
;
6652 /* live_flags contains the flags needed by the next instructions
6653 in the code. At the end of the block, we consider that all the
6655 live_flags
= CC_OSZAPC
;
6656 while (opc_ptr
> opc_buf
) {
6658 /* if none of the flags written by the instruction is used,
6659 then we can try to find a simpler instruction */
6660 write_flags
= opc_write_flags
[op
];
6661 if ((live_flags
& write_flags
) == 0) {
6662 *opc_ptr
= opc_simpler
[op
];
6664 /* compute the live flags before the instruction */
6665 live_flags
&= ~write_flags
;
6666 live_flags
|= opc_read_flags
[op
];
6670 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6671 basic block 'tb'. If search_pc is TRUE, also generate PC
6672 information for each intermediate instruction. */
6673 static inline int gen_intermediate_code_internal(CPUState
*env
,
6674 TranslationBlock
*tb
,
6677 DisasContext dc1
, *dc
= &dc1
;
6678 target_ulong pc_ptr
;
6679 uint16_t *gen_opc_end
;
6682 target_ulong pc_start
;
6683 target_ulong cs_base
;
6685 /* generate intermediate code */
6687 cs_base
= tb
->cs_base
;
6689 cflags
= tb
->cflags
;
6691 dc
->pe
= (flags
>> HF_PE_SHIFT
) & 1;
6692 dc
->code32
= (flags
>> HF_CS32_SHIFT
) & 1;
6693 dc
->ss32
= (flags
>> HF_SS32_SHIFT
) & 1;
6694 dc
->addseg
= (flags
>> HF_ADDSEG_SHIFT
) & 1;
6696 dc
->vm86
= (flags
>> VM_SHIFT
) & 1;
6697 dc
->cpl
= (flags
>> HF_CPL_SHIFT
) & 3;
6698 dc
->iopl
= (flags
>> IOPL_SHIFT
) & 3;
6699 dc
->tf
= (flags
>> TF_SHIFT
) & 1;
6700 dc
->singlestep_enabled
= env
->singlestep_enabled
;
6701 dc
->cc_op
= CC_OP_DYNAMIC
;
6702 dc
->cs_base
= cs_base
;
6704 dc
->popl_esp_hack
= 0;
6705 /* select memory access functions */
6707 if (flags
& HF_SOFTMMU_MASK
) {
6709 dc
->mem_index
= 2 * 4;
6711 dc
->mem_index
= 1 * 4;
6713 dc
->cpuid_features
= env
->cpuid_features
;
6714 dc
->cpuid_ext_features
= env
->cpuid_ext_features
;
6715 #ifdef TARGET_X86_64
6716 dc
->lma
= (flags
>> HF_LMA_SHIFT
) & 1;
6717 dc
->code64
= (flags
>> HF_CS64_SHIFT
) & 1;
6720 dc
->jmp_opt
= !(dc
->tf
|| env
->singlestep_enabled
||
6721 (flags
& HF_INHIBIT_IRQ_MASK
)
6722 #ifndef CONFIG_SOFTMMU
6723 || (flags
& HF_SOFTMMU_MASK
)
6727 /* check addseg logic */
6728 if (!dc
->addseg
&& (dc
->vm86
|| !dc
->pe
|| !dc
->code32
))
6729 printf("ERROR addseg\n");
6732 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
6734 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
6736 dc
->is_jmp
= DISAS_NEXT
;
6741 if (env
->nb_breakpoints
> 0) {
6742 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
6743 if (env
->breakpoints
[j
] == pc_ptr
) {
6744 gen_debug(dc
, pc_ptr
- dc
->cs_base
);
6750 j
= gen_opc_ptr
- gen_opc_buf
;
6754 gen_opc_instr_start
[lj
++] = 0;
6756 gen_opc_pc
[lj
] = pc_ptr
;
6757 gen_opc_cc_op
[lj
] = dc
->cc_op
;
6758 gen_opc_instr_start
[lj
] = 1;
6760 pc_ptr
= disas_insn(dc
, pc_ptr
);
6761 /* stop translation if indicated */
6764 /* if single step mode, we generate only one instruction and
6765 generate an exception */
6766 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6767 the flag and abort the translation to give the irqs a
6768 change to be happen */
6769 if (dc
->tf
|| dc
->singlestep_enabled
||
6770 (flags
& HF_INHIBIT_IRQ_MASK
) ||
6771 (cflags
& CF_SINGLE_INSN
)) {
6772 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6776 /* if too long translation, stop generation too */
6777 if (gen_opc_ptr
>= gen_opc_end
||
6778 (pc_ptr
- pc_start
) >= (TARGET_PAGE_SIZE
- 32)) {
6779 gen_jmp_im(pc_ptr
- dc
->cs_base
);
6784 *gen_opc_ptr
= INDEX_op_end
;
6785 /* we don't forget to fill the last values */
6787 j
= gen_opc_ptr
- gen_opc_buf
;
6790 gen_opc_instr_start
[lj
++] = 0;
6794 if (loglevel
& CPU_LOG_TB_CPU
) {
6795 cpu_dump_state(env
, logfile
, fprintf
, X86_DUMP_CCOP
);
6797 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
6799 fprintf(logfile
, "----------------\n");
6800 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
6801 #ifdef TARGET_X86_64
6806 disas_flags
= !dc
->code32
;
6807 target_disas(logfile
, pc_start
, pc_ptr
- pc_start
, disas_flags
);
6808 fprintf(logfile
, "\n");
6809 if (loglevel
& CPU_LOG_TB_OP_OPT
) {
6810 fprintf(logfile
, "OP before opt:\n");
6811 tcg_dump_ops(&tcg_ctx
, logfile
);
6812 fprintf(logfile
, "\n");
6817 /* optimize flag computations */
6818 optimize_flags(gen_opc_buf
, gen_opc_ptr
- gen_opc_buf
);
6821 tb
->size
= pc_ptr
- pc_start
;
6825 int gen_intermediate_code(CPUState
*env
, TranslationBlock
*tb
)
6827 return gen_intermediate_code_internal(env
, tb
, 0);
6830 int gen_intermediate_code_pc(CPUState
*env
, TranslationBlock
*tb
)
6832 return gen_intermediate_code_internal(env
, tb
, 1);