Merge remote-tracking branch 'qemu-kvm-tmp/memory/core' into staging
[qemu.git] / target-i386 / translate.c
blobb9667628a00016b41a5781cf4111fb18fe226ddc
1 /*
2 * i386 translation
4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 #include <stdarg.h>
20 #include <stdlib.h>
21 #include <stdio.h>
22 #include <string.h>
23 #include <inttypes.h>
24 #include <signal.h>
26 #include "cpu.h"
27 #include "disas.h"
28 #include "tcg-op.h"
30 #include "helper.h"
31 #define GEN_HELPER 1
32 #include "helper.h"
34 #define PREFIX_REPZ 0x01
35 #define PREFIX_REPNZ 0x02
36 #define PREFIX_LOCK 0x04
37 #define PREFIX_DATA 0x08
38 #define PREFIX_ADR 0x10
40 #ifdef TARGET_X86_64
41 #define X86_64_ONLY(x) x
42 #define X86_64_DEF(...) __VA_ARGS__
43 #define CODE64(s) ((s)->code64)
44 #define REX_X(s) ((s)->rex_x)
45 #define REX_B(s) ((s)->rex_b)
46 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47 #if 1
48 #define BUGGY_64(x) NULL
49 #endif
50 #else
51 #define X86_64_ONLY(x) NULL
52 #define X86_64_DEF(...)
53 #define CODE64(s) 0
54 #define REX_X(s) 0
55 #define REX_B(s) 0
56 #endif
58 //#define MACRO_TEST 1
60 /* global register indexes */
61 static TCGv_ptr cpu_env;
62 static TCGv cpu_A0, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
63 static TCGv_i32 cpu_cc_op;
64 static TCGv cpu_regs[CPU_NB_REGS];
65 /* local temps */
66 static TCGv cpu_T[2], cpu_T3;
67 /* local register indexes (only used inside old micro ops) */
68 static TCGv cpu_tmp0, cpu_tmp4;
69 static TCGv_ptr cpu_ptr0, cpu_ptr1;
70 static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
71 static TCGv_i64 cpu_tmp1_i64;
72 static TCGv cpu_tmp5;
74 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
76 #include "gen-icount.h"
78 #ifdef TARGET_X86_64
79 static int x86_64_hregs;
80 #endif
82 typedef struct DisasContext {
83 /* current insn context */
84 int override; /* -1 if no override */
85 int prefix;
86 int aflag, dflag;
87 target_ulong pc; /* pc = eip + cs_base */
88 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
89 static state change (stop translation) */
90 /* current block context */
91 target_ulong cs_base; /* base of CS segment */
92 int pe; /* protected mode */
93 int code32; /* 32 bit code segment */
94 #ifdef TARGET_X86_64
95 int lma; /* long mode active */
96 int code64; /* 64 bit code segment */
97 int rex_x, rex_b;
98 #endif
99 int ss32; /* 32 bit stack segment */
100 int cc_op; /* current CC operation */
101 int addseg; /* non zero if either DS/ES/SS have a non zero base */
102 int f_st; /* currently unused */
103 int vm86; /* vm86 mode */
104 int cpl;
105 int iopl;
106 int tf; /* TF cpu flag */
107 int singlestep_enabled; /* "hardware" single step enabled */
108 int jmp_opt; /* use direct block chaining for direct jumps */
109 int mem_index; /* select memory access functions */
110 uint64_t flags; /* all execution flags */
111 struct TranslationBlock *tb;
112 int popl_esp_hack; /* for correct popl with esp base handling */
113 int rip_offset; /* only used in x86_64, but left for simplicity */
114 int cpuid_features;
115 int cpuid_ext_features;
116 int cpuid_ext2_features;
117 int cpuid_ext3_features;
118 } DisasContext;
120 static void gen_eob(DisasContext *s);
121 static void gen_jmp(DisasContext *s, target_ulong eip);
122 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
124 /* i386 arith/logic operations */
125 enum {
126 OP_ADDL,
127 OP_ORL,
128 OP_ADCL,
129 OP_SBBL,
130 OP_ANDL,
131 OP_SUBL,
132 OP_XORL,
133 OP_CMPL,
136 /* i386 shift ops */
137 enum {
138 OP_ROL,
139 OP_ROR,
140 OP_RCL,
141 OP_RCR,
142 OP_SHL,
143 OP_SHR,
144 OP_SHL1, /* undocumented */
145 OP_SAR = 7,
148 enum {
149 JCC_O,
150 JCC_B,
151 JCC_Z,
152 JCC_BE,
153 JCC_S,
154 JCC_P,
155 JCC_L,
156 JCC_LE,
159 /* operand size */
160 enum {
161 OT_BYTE = 0,
162 OT_WORD,
163 OT_LONG,
164 OT_QUAD,
167 enum {
168 /* I386 int registers */
169 OR_EAX, /* MUST be even numbered */
170 OR_ECX,
171 OR_EDX,
172 OR_EBX,
173 OR_ESP,
174 OR_EBP,
175 OR_ESI,
176 OR_EDI,
178 OR_TMP0 = 16, /* temporary operand register */
179 OR_TMP1,
180 OR_A0, /* temporary register used when doing address evaluation */
183 static inline void gen_op_movl_T0_0(void)
185 tcg_gen_movi_tl(cpu_T[0], 0);
188 static inline void gen_op_movl_T0_im(int32_t val)
190 tcg_gen_movi_tl(cpu_T[0], val);
193 static inline void gen_op_movl_T0_imu(uint32_t val)
195 tcg_gen_movi_tl(cpu_T[0], val);
198 static inline void gen_op_movl_T1_im(int32_t val)
200 tcg_gen_movi_tl(cpu_T[1], val);
203 static inline void gen_op_movl_T1_imu(uint32_t val)
205 tcg_gen_movi_tl(cpu_T[1], val);
208 static inline void gen_op_movl_A0_im(uint32_t val)
210 tcg_gen_movi_tl(cpu_A0, val);
213 #ifdef TARGET_X86_64
214 static inline void gen_op_movq_A0_im(int64_t val)
216 tcg_gen_movi_tl(cpu_A0, val);
218 #endif
220 static inline void gen_movtl_T0_im(target_ulong val)
222 tcg_gen_movi_tl(cpu_T[0], val);
225 static inline void gen_movtl_T1_im(target_ulong val)
227 tcg_gen_movi_tl(cpu_T[1], val);
230 static inline void gen_op_andl_T0_ffff(void)
232 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
235 static inline void gen_op_andl_T0_im(uint32_t val)
237 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
240 static inline void gen_op_movl_T0_T1(void)
242 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
245 static inline void gen_op_andl_A0_ffff(void)
247 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
250 #ifdef TARGET_X86_64
252 #define NB_OP_SIZES 4
254 #else /* !TARGET_X86_64 */
256 #define NB_OP_SIZES 3
258 #endif /* !TARGET_X86_64 */
260 #if defined(HOST_WORDS_BIGENDIAN)
261 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
262 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
263 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
264 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
265 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
266 #else
267 #define REG_B_OFFSET 0
268 #define REG_H_OFFSET 1
269 #define REG_W_OFFSET 0
270 #define REG_L_OFFSET 0
271 #define REG_LH_OFFSET 4
272 #endif
274 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
276 switch(ot) {
277 case OT_BYTE:
278 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
279 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 8);
280 } else {
281 tcg_gen_deposit_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], t0, 8, 8);
283 break;
284 case OT_WORD:
285 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], t0, 0, 16);
286 break;
287 default: /* XXX this shouldn't be reached; abort? */
288 case OT_LONG:
289 /* For x86_64, this sets the higher half of register to zero.
290 For i386, this is equivalent to a mov. */
291 tcg_gen_ext32u_tl(cpu_regs[reg], t0);
292 break;
293 #ifdef TARGET_X86_64
294 case OT_QUAD:
295 tcg_gen_mov_tl(cpu_regs[reg], t0);
296 break;
297 #endif
301 static inline void gen_op_mov_reg_T0(int ot, int reg)
303 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
306 static inline void gen_op_mov_reg_T1(int ot, int reg)
308 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
311 static inline void gen_op_mov_reg_A0(int size, int reg)
313 switch(size) {
314 case 0:
315 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_A0, 0, 16);
316 break;
317 default: /* XXX this shouldn't be reached; abort? */
318 case 1:
319 /* For x86_64, this sets the higher half of register to zero.
320 For i386, this is equivalent to a mov. */
321 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_A0);
322 break;
323 #ifdef TARGET_X86_64
324 case 2:
325 tcg_gen_mov_tl(cpu_regs[reg], cpu_A0);
326 break;
327 #endif
331 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
333 switch(ot) {
334 case OT_BYTE:
335 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
336 goto std_case;
337 } else {
338 tcg_gen_shri_tl(t0, cpu_regs[reg - 4], 8);
339 tcg_gen_ext8u_tl(t0, t0);
341 break;
342 default:
343 std_case:
344 tcg_gen_mov_tl(t0, cpu_regs[reg]);
345 break;
349 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
351 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
354 static inline void gen_op_movl_A0_reg(int reg)
356 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
359 static inline void gen_op_addl_A0_im(int32_t val)
361 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
362 #ifdef TARGET_X86_64
363 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
364 #endif
367 #ifdef TARGET_X86_64
368 static inline void gen_op_addq_A0_im(int64_t val)
370 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
372 #endif
374 static void gen_add_A0_im(DisasContext *s, int val)
376 #ifdef TARGET_X86_64
377 if (CODE64(s))
378 gen_op_addq_A0_im(val);
379 else
380 #endif
381 gen_op_addl_A0_im(val);
384 static inline void gen_op_addl_T0_T1(void)
386 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
389 static inline void gen_op_jmp_T0(void)
391 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
394 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
396 switch(size) {
397 case 0:
398 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
399 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
400 break;
401 case 1:
402 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
403 /* For x86_64, this sets the higher half of register to zero.
404 For i386, this is equivalent to a nop. */
405 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
406 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
407 break;
408 #ifdef TARGET_X86_64
409 case 2:
410 tcg_gen_addi_tl(cpu_regs[reg], cpu_regs[reg], val);
411 break;
412 #endif
416 static inline void gen_op_add_reg_T0(int size, int reg)
418 switch(size) {
419 case 0:
420 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
421 tcg_gen_deposit_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0, 0, 16);
422 break;
423 case 1:
424 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
425 /* For x86_64, this sets the higher half of register to zero.
426 For i386, this is equivalent to a nop. */
427 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
428 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
429 break;
430 #ifdef TARGET_X86_64
431 case 2:
432 tcg_gen_add_tl(cpu_regs[reg], cpu_regs[reg], cpu_T[0]);
433 break;
434 #endif
438 static inline void gen_op_set_cc_op(int32_t val)
440 tcg_gen_movi_i32(cpu_cc_op, val);
443 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
445 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
446 if (shift != 0)
447 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
448 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
449 /* For x86_64, this sets the higher half of register to zero.
450 For i386, this is equivalent to a nop. */
451 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
454 static inline void gen_op_movl_A0_seg(int reg)
456 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
459 static inline void gen_op_addl_A0_seg(int reg)
461 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
462 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
463 #ifdef TARGET_X86_64
464 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
465 #endif
468 #ifdef TARGET_X86_64
469 static inline void gen_op_movq_A0_seg(int reg)
471 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
474 static inline void gen_op_addq_A0_seg(int reg)
476 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
477 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
480 static inline void gen_op_movq_A0_reg(int reg)
482 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
485 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
487 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
488 if (shift != 0)
489 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
490 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
492 #endif
494 static inline void gen_op_lds_T0_A0(int idx)
496 int mem_index = (idx >> 2) - 1;
497 switch(idx & 3) {
498 case 0:
499 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
500 break;
501 case 1:
502 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
503 break;
504 default:
505 case 2:
506 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
507 break;
511 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
513 int mem_index = (idx >> 2) - 1;
514 switch(idx & 3) {
515 case 0:
516 tcg_gen_qemu_ld8u(t0, a0, mem_index);
517 break;
518 case 1:
519 tcg_gen_qemu_ld16u(t0, a0, mem_index);
520 break;
521 case 2:
522 tcg_gen_qemu_ld32u(t0, a0, mem_index);
523 break;
524 default:
525 case 3:
526 /* Should never happen on 32-bit targets. */
527 #ifdef TARGET_X86_64
528 tcg_gen_qemu_ld64(t0, a0, mem_index);
529 #endif
530 break;
534 /* XXX: always use ldu or lds */
535 static inline void gen_op_ld_T0_A0(int idx)
537 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
540 static inline void gen_op_ldu_T0_A0(int idx)
542 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
545 static inline void gen_op_ld_T1_A0(int idx)
547 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
550 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
552 int mem_index = (idx >> 2) - 1;
553 switch(idx & 3) {
554 case 0:
555 tcg_gen_qemu_st8(t0, a0, mem_index);
556 break;
557 case 1:
558 tcg_gen_qemu_st16(t0, a0, mem_index);
559 break;
560 case 2:
561 tcg_gen_qemu_st32(t0, a0, mem_index);
562 break;
563 default:
564 case 3:
565 /* Should never happen on 32-bit targets. */
566 #ifdef TARGET_X86_64
567 tcg_gen_qemu_st64(t0, a0, mem_index);
568 #endif
569 break;
573 static inline void gen_op_st_T0_A0(int idx)
575 gen_op_st_v(idx, cpu_T[0], cpu_A0);
578 static inline void gen_op_st_T1_A0(int idx)
580 gen_op_st_v(idx, cpu_T[1], cpu_A0);
583 static inline void gen_jmp_im(target_ulong pc)
585 tcg_gen_movi_tl(cpu_tmp0, pc);
586 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
589 static inline void gen_string_movl_A0_ESI(DisasContext *s)
591 int override;
593 override = s->override;
594 #ifdef TARGET_X86_64
595 if (s->aflag == 2) {
596 if (override >= 0) {
597 gen_op_movq_A0_seg(override);
598 gen_op_addq_A0_reg_sN(0, R_ESI);
599 } else {
600 gen_op_movq_A0_reg(R_ESI);
602 } else
603 #endif
604 if (s->aflag) {
605 /* 32 bit address */
606 if (s->addseg && override < 0)
607 override = R_DS;
608 if (override >= 0) {
609 gen_op_movl_A0_seg(override);
610 gen_op_addl_A0_reg_sN(0, R_ESI);
611 } else {
612 gen_op_movl_A0_reg(R_ESI);
614 } else {
615 /* 16 address, always override */
616 if (override < 0)
617 override = R_DS;
618 gen_op_movl_A0_reg(R_ESI);
619 gen_op_andl_A0_ffff();
620 gen_op_addl_A0_seg(override);
624 static inline void gen_string_movl_A0_EDI(DisasContext *s)
626 #ifdef TARGET_X86_64
627 if (s->aflag == 2) {
628 gen_op_movq_A0_reg(R_EDI);
629 } else
630 #endif
631 if (s->aflag) {
632 if (s->addseg) {
633 gen_op_movl_A0_seg(R_ES);
634 gen_op_addl_A0_reg_sN(0, R_EDI);
635 } else {
636 gen_op_movl_A0_reg(R_EDI);
638 } else {
639 gen_op_movl_A0_reg(R_EDI);
640 gen_op_andl_A0_ffff();
641 gen_op_addl_A0_seg(R_ES);
645 static inline void gen_op_movl_T0_Dshift(int ot)
647 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
648 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
651 static void gen_extu(int ot, TCGv reg)
653 switch(ot) {
654 case OT_BYTE:
655 tcg_gen_ext8u_tl(reg, reg);
656 break;
657 case OT_WORD:
658 tcg_gen_ext16u_tl(reg, reg);
659 break;
660 case OT_LONG:
661 tcg_gen_ext32u_tl(reg, reg);
662 break;
663 default:
664 break;
668 static void gen_exts(int ot, TCGv reg)
670 switch(ot) {
671 case OT_BYTE:
672 tcg_gen_ext8s_tl(reg, reg);
673 break;
674 case OT_WORD:
675 tcg_gen_ext16s_tl(reg, reg);
676 break;
677 case OT_LONG:
678 tcg_gen_ext32s_tl(reg, reg);
679 break;
680 default:
681 break;
685 static inline void gen_op_jnz_ecx(int size, int label1)
687 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
688 gen_extu(size + 1, cpu_tmp0);
689 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
692 static inline void gen_op_jz_ecx(int size, int label1)
694 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
695 gen_extu(size + 1, cpu_tmp0);
696 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
699 static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
701 switch (ot) {
702 case 0: gen_helper_inb(v, n); break;
703 case 1: gen_helper_inw(v, n); break;
704 case 2: gen_helper_inl(v, n); break;
709 static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
711 switch (ot) {
712 case 0: gen_helper_outb(v, n); break;
713 case 1: gen_helper_outw(v, n); break;
714 case 2: gen_helper_outl(v, n); break;
719 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
720 uint32_t svm_flags)
722 int state_saved;
723 target_ulong next_eip;
725 state_saved = 0;
726 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
727 if (s->cc_op != CC_OP_DYNAMIC)
728 gen_op_set_cc_op(s->cc_op);
729 gen_jmp_im(cur_eip);
730 state_saved = 1;
731 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
732 switch (ot) {
733 case 0: gen_helper_check_iob(cpu_tmp2_i32); break;
734 case 1: gen_helper_check_iow(cpu_tmp2_i32); break;
735 case 2: gen_helper_check_iol(cpu_tmp2_i32); break;
738 if(s->flags & HF_SVMI_MASK) {
739 if (!state_saved) {
740 if (s->cc_op != CC_OP_DYNAMIC)
741 gen_op_set_cc_op(s->cc_op);
742 gen_jmp_im(cur_eip);
744 svm_flags |= (1 << (4 + ot));
745 next_eip = s->pc - s->cs_base;
746 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
747 gen_helper_svm_check_io(cpu_tmp2_i32, tcg_const_i32(svm_flags),
748 tcg_const_i32(next_eip - cur_eip));
752 static inline void gen_movs(DisasContext *s, int ot)
754 gen_string_movl_A0_ESI(s);
755 gen_op_ld_T0_A0(ot + s->mem_index);
756 gen_string_movl_A0_EDI(s);
757 gen_op_st_T0_A0(ot + s->mem_index);
758 gen_op_movl_T0_Dshift(ot);
759 gen_op_add_reg_T0(s->aflag, R_ESI);
760 gen_op_add_reg_T0(s->aflag, R_EDI);
763 static inline void gen_update_cc_op(DisasContext *s)
765 if (s->cc_op != CC_OP_DYNAMIC) {
766 gen_op_set_cc_op(s->cc_op);
767 s->cc_op = CC_OP_DYNAMIC;
771 static void gen_op_update1_cc(void)
773 tcg_gen_discard_tl(cpu_cc_src);
774 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
777 static void gen_op_update2_cc(void)
779 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
780 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
783 static inline void gen_op_cmpl_T0_T1_cc(void)
785 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
786 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
789 static inline void gen_op_testl_T0_T1_cc(void)
791 tcg_gen_discard_tl(cpu_cc_src);
792 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
795 static void gen_op_update_neg_cc(void)
797 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
798 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
801 /* compute eflags.C to reg */
802 static void gen_compute_eflags_c(TCGv reg)
804 gen_helper_cc_compute_c(cpu_tmp2_i32, cpu_cc_op);
805 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
808 /* compute all eflags to cc_src */
809 static void gen_compute_eflags(TCGv reg)
811 gen_helper_cc_compute_all(cpu_tmp2_i32, cpu_cc_op);
812 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
815 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
817 if (s->cc_op != CC_OP_DYNAMIC)
818 gen_op_set_cc_op(s->cc_op);
819 switch(jcc_op) {
820 case JCC_O:
821 gen_compute_eflags(cpu_T[0]);
822 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
823 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
824 break;
825 case JCC_B:
826 gen_compute_eflags_c(cpu_T[0]);
827 break;
828 case JCC_Z:
829 gen_compute_eflags(cpu_T[0]);
830 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
831 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
832 break;
833 case JCC_BE:
834 gen_compute_eflags(cpu_tmp0);
835 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
836 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
837 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
838 break;
839 case JCC_S:
840 gen_compute_eflags(cpu_T[0]);
841 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
842 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
843 break;
844 case JCC_P:
845 gen_compute_eflags(cpu_T[0]);
846 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
847 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
848 break;
849 case JCC_L:
850 gen_compute_eflags(cpu_tmp0);
851 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
852 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
853 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
854 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
855 break;
856 default:
857 case JCC_LE:
858 gen_compute_eflags(cpu_tmp0);
859 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
860 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
861 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
862 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
863 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
864 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
865 break;
869 /* return true if setcc_slow is not needed (WARNING: must be kept in
870 sync with gen_jcc1) */
871 static int is_fast_jcc_case(DisasContext *s, int b)
873 int jcc_op;
874 jcc_op = (b >> 1) & 7;
875 switch(s->cc_op) {
876 /* we optimize the cmp/jcc case */
877 case CC_OP_SUBB:
878 case CC_OP_SUBW:
879 case CC_OP_SUBL:
880 case CC_OP_SUBQ:
881 if (jcc_op == JCC_O || jcc_op == JCC_P)
882 goto slow_jcc;
883 break;
885 /* some jumps are easy to compute */
886 case CC_OP_ADDB:
887 case CC_OP_ADDW:
888 case CC_OP_ADDL:
889 case CC_OP_ADDQ:
891 case CC_OP_LOGICB:
892 case CC_OP_LOGICW:
893 case CC_OP_LOGICL:
894 case CC_OP_LOGICQ:
896 case CC_OP_INCB:
897 case CC_OP_INCW:
898 case CC_OP_INCL:
899 case CC_OP_INCQ:
901 case CC_OP_DECB:
902 case CC_OP_DECW:
903 case CC_OP_DECL:
904 case CC_OP_DECQ:
906 case CC_OP_SHLB:
907 case CC_OP_SHLW:
908 case CC_OP_SHLL:
909 case CC_OP_SHLQ:
910 if (jcc_op != JCC_Z && jcc_op != JCC_S)
911 goto slow_jcc;
912 break;
913 default:
914 slow_jcc:
915 return 0;
917 return 1;
920 /* generate a conditional jump to label 'l1' according to jump opcode
921 value 'b'. In the fast case, T0 is guaranted not to be used. */
922 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
924 int inv, jcc_op, size, cond;
925 TCGv t0;
927 inv = b & 1;
928 jcc_op = (b >> 1) & 7;
930 switch(cc_op) {
931 /* we optimize the cmp/jcc case */
932 case CC_OP_SUBB:
933 case CC_OP_SUBW:
934 case CC_OP_SUBL:
935 case CC_OP_SUBQ:
937 size = cc_op - CC_OP_SUBB;
938 switch(jcc_op) {
939 case JCC_Z:
940 fast_jcc_z:
941 switch(size) {
942 case 0:
943 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
944 t0 = cpu_tmp0;
945 break;
946 case 1:
947 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
948 t0 = cpu_tmp0;
949 break;
950 #ifdef TARGET_X86_64
951 case 2:
952 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
953 t0 = cpu_tmp0;
954 break;
955 #endif
956 default:
957 t0 = cpu_cc_dst;
958 break;
960 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
961 break;
962 case JCC_S:
963 fast_jcc_s:
964 switch(size) {
965 case 0:
966 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
967 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
968 0, l1);
969 break;
970 case 1:
971 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
972 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
973 0, l1);
974 break;
975 #ifdef TARGET_X86_64
976 case 2:
977 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
978 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
979 0, l1);
980 break;
981 #endif
982 default:
983 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
984 0, l1);
985 break;
987 break;
989 case JCC_B:
990 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
991 goto fast_jcc_b;
992 case JCC_BE:
993 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
994 fast_jcc_b:
995 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
996 switch(size) {
997 case 0:
998 t0 = cpu_tmp0;
999 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1000 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1001 break;
1002 case 1:
1003 t0 = cpu_tmp0;
1004 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1005 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1006 break;
1007 #ifdef TARGET_X86_64
1008 case 2:
1009 t0 = cpu_tmp0;
1010 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1011 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1012 break;
1013 #endif
1014 default:
1015 t0 = cpu_cc_src;
1016 break;
1018 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1019 break;
1021 case JCC_L:
1022 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1023 goto fast_jcc_l;
1024 case JCC_LE:
1025 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1026 fast_jcc_l:
1027 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1028 switch(size) {
1029 case 0:
1030 t0 = cpu_tmp0;
1031 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1032 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1033 break;
1034 case 1:
1035 t0 = cpu_tmp0;
1036 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1037 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1038 break;
1039 #ifdef TARGET_X86_64
1040 case 2:
1041 t0 = cpu_tmp0;
1042 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1043 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1044 break;
1045 #endif
1046 default:
1047 t0 = cpu_cc_src;
1048 break;
1050 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1051 break;
1053 default:
1054 goto slow_jcc;
1056 break;
1058 /* some jumps are easy to compute */
1059 case CC_OP_ADDB:
1060 case CC_OP_ADDW:
1061 case CC_OP_ADDL:
1062 case CC_OP_ADDQ:
1064 case CC_OP_ADCB:
1065 case CC_OP_ADCW:
1066 case CC_OP_ADCL:
1067 case CC_OP_ADCQ:
1069 case CC_OP_SBBB:
1070 case CC_OP_SBBW:
1071 case CC_OP_SBBL:
1072 case CC_OP_SBBQ:
1074 case CC_OP_LOGICB:
1075 case CC_OP_LOGICW:
1076 case CC_OP_LOGICL:
1077 case CC_OP_LOGICQ:
1079 case CC_OP_INCB:
1080 case CC_OP_INCW:
1081 case CC_OP_INCL:
1082 case CC_OP_INCQ:
1084 case CC_OP_DECB:
1085 case CC_OP_DECW:
1086 case CC_OP_DECL:
1087 case CC_OP_DECQ:
1089 case CC_OP_SHLB:
1090 case CC_OP_SHLW:
1091 case CC_OP_SHLL:
1092 case CC_OP_SHLQ:
1094 case CC_OP_SARB:
1095 case CC_OP_SARW:
1096 case CC_OP_SARL:
1097 case CC_OP_SARQ:
1098 switch(jcc_op) {
1099 case JCC_Z:
1100 size = (cc_op - CC_OP_ADDB) & 3;
1101 goto fast_jcc_z;
1102 case JCC_S:
1103 size = (cc_op - CC_OP_ADDB) & 3;
1104 goto fast_jcc_s;
1105 default:
1106 goto slow_jcc;
1108 break;
1109 default:
1110 slow_jcc:
1111 gen_setcc_slow_T0(s, jcc_op);
1112 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1113 cpu_T[0], 0, l1);
1114 break;
1118 /* XXX: does not work with gdbstub "ice" single step - not a
1119 serious problem */
1120 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1122 int l1, l2;
1124 l1 = gen_new_label();
1125 l2 = gen_new_label();
1126 gen_op_jnz_ecx(s->aflag, l1);
1127 gen_set_label(l2);
1128 gen_jmp_tb(s, next_eip, 1);
1129 gen_set_label(l1);
1130 return l2;
1133 static inline void gen_stos(DisasContext *s, int ot)
1135 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1136 gen_string_movl_A0_EDI(s);
1137 gen_op_st_T0_A0(ot + s->mem_index);
1138 gen_op_movl_T0_Dshift(ot);
1139 gen_op_add_reg_T0(s->aflag, R_EDI);
1142 static inline void gen_lods(DisasContext *s, int ot)
1144 gen_string_movl_A0_ESI(s);
1145 gen_op_ld_T0_A0(ot + s->mem_index);
1146 gen_op_mov_reg_T0(ot, R_EAX);
1147 gen_op_movl_T0_Dshift(ot);
1148 gen_op_add_reg_T0(s->aflag, R_ESI);
1151 static inline void gen_scas(DisasContext *s, int ot)
1153 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1154 gen_string_movl_A0_EDI(s);
1155 gen_op_ld_T1_A0(ot + s->mem_index);
1156 gen_op_cmpl_T0_T1_cc();
1157 gen_op_movl_T0_Dshift(ot);
1158 gen_op_add_reg_T0(s->aflag, R_EDI);
1161 static inline void gen_cmps(DisasContext *s, int ot)
1163 gen_string_movl_A0_ESI(s);
1164 gen_op_ld_T0_A0(ot + s->mem_index);
1165 gen_string_movl_A0_EDI(s);
1166 gen_op_ld_T1_A0(ot + s->mem_index);
1167 gen_op_cmpl_T0_T1_cc();
1168 gen_op_movl_T0_Dshift(ot);
1169 gen_op_add_reg_T0(s->aflag, R_ESI);
1170 gen_op_add_reg_T0(s->aflag, R_EDI);
1173 static inline void gen_ins(DisasContext *s, int ot)
1175 if (use_icount)
1176 gen_io_start();
1177 gen_string_movl_A0_EDI(s);
1178 /* Note: we must do this dummy write first to be restartable in
1179 case of page fault. */
1180 gen_op_movl_T0_0();
1181 gen_op_st_T0_A0(ot + s->mem_index);
1182 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1183 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1184 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1185 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
1186 gen_op_st_T0_A0(ot + s->mem_index);
1187 gen_op_movl_T0_Dshift(ot);
1188 gen_op_add_reg_T0(s->aflag, R_EDI);
1189 if (use_icount)
1190 gen_io_end();
1193 static inline void gen_outs(DisasContext *s, int ot)
1195 if (use_icount)
1196 gen_io_start();
1197 gen_string_movl_A0_ESI(s);
1198 gen_op_ld_T0_A0(ot + s->mem_index);
1200 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1201 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1202 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1203 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1204 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
1206 gen_op_movl_T0_Dshift(ot);
1207 gen_op_add_reg_T0(s->aflag, R_ESI);
1208 if (use_icount)
1209 gen_io_end();
1212 /* same method as Valgrind : we generate jumps to current or next
1213 instruction */
1214 #define GEN_REPZ(op) \
1215 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1216 target_ulong cur_eip, target_ulong next_eip) \
1218 int l2;\
1219 gen_update_cc_op(s); \
1220 l2 = gen_jz_ecx_string(s, next_eip); \
1221 gen_ ## op(s, ot); \
1222 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1223 /* a loop would cause two single step exceptions if ECX = 1 \
1224 before rep string_insn */ \
1225 if (!s->jmp_opt) \
1226 gen_op_jz_ecx(s->aflag, l2); \
1227 gen_jmp(s, cur_eip); \
1230 #define GEN_REPZ2(op) \
1231 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1232 target_ulong cur_eip, \
1233 target_ulong next_eip, \
1234 int nz) \
1236 int l2;\
1237 gen_update_cc_op(s); \
1238 l2 = gen_jz_ecx_string(s, next_eip); \
1239 gen_ ## op(s, ot); \
1240 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1241 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1242 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1243 if (!s->jmp_opt) \
1244 gen_op_jz_ecx(s->aflag, l2); \
1245 gen_jmp(s, cur_eip); \
1248 GEN_REPZ(movs)
1249 GEN_REPZ(stos)
1250 GEN_REPZ(lods)
1251 GEN_REPZ(ins)
1252 GEN_REPZ(outs)
1253 GEN_REPZ2(scas)
1254 GEN_REPZ2(cmps)
1256 static void gen_helper_fp_arith_ST0_FT0(int op)
1258 switch (op) {
1259 case 0: gen_helper_fadd_ST0_FT0(); break;
1260 case 1: gen_helper_fmul_ST0_FT0(); break;
1261 case 2: gen_helper_fcom_ST0_FT0(); break;
1262 case 3: gen_helper_fcom_ST0_FT0(); break;
1263 case 4: gen_helper_fsub_ST0_FT0(); break;
1264 case 5: gen_helper_fsubr_ST0_FT0(); break;
1265 case 6: gen_helper_fdiv_ST0_FT0(); break;
1266 case 7: gen_helper_fdivr_ST0_FT0(); break;
1270 /* NOTE the exception in "r" op ordering */
1271 static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
1273 TCGv_i32 tmp = tcg_const_i32(opreg);
1274 switch (op) {
1275 case 0: gen_helper_fadd_STN_ST0(tmp); break;
1276 case 1: gen_helper_fmul_STN_ST0(tmp); break;
1277 case 4: gen_helper_fsubr_STN_ST0(tmp); break;
1278 case 5: gen_helper_fsub_STN_ST0(tmp); break;
1279 case 6: gen_helper_fdivr_STN_ST0(tmp); break;
1280 case 7: gen_helper_fdiv_STN_ST0(tmp); break;
1284 /* if d == OR_TMP0, it means memory operand (address in A0) */
1285 static void gen_op(DisasContext *s1, int op, int ot, int d)
1287 if (d != OR_TMP0) {
1288 gen_op_mov_TN_reg(ot, 0, d);
1289 } else {
1290 gen_op_ld_T0_A0(ot + s1->mem_index);
1292 switch(op) {
1293 case OP_ADCL:
1294 if (s1->cc_op != CC_OP_DYNAMIC)
1295 gen_op_set_cc_op(s1->cc_op);
1296 gen_compute_eflags_c(cpu_tmp4);
1297 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1298 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1299 if (d != OR_TMP0)
1300 gen_op_mov_reg_T0(ot, d);
1301 else
1302 gen_op_st_T0_A0(ot + s1->mem_index);
1303 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1304 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1305 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1306 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1307 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1308 s1->cc_op = CC_OP_DYNAMIC;
1309 break;
1310 case OP_SBBL:
1311 if (s1->cc_op != CC_OP_DYNAMIC)
1312 gen_op_set_cc_op(s1->cc_op);
1313 gen_compute_eflags_c(cpu_tmp4);
1314 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1315 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1316 if (d != OR_TMP0)
1317 gen_op_mov_reg_T0(ot, d);
1318 else
1319 gen_op_st_T0_A0(ot + s1->mem_index);
1320 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1321 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1322 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1323 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1324 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1325 s1->cc_op = CC_OP_DYNAMIC;
1326 break;
1327 case OP_ADDL:
1328 gen_op_addl_T0_T1();
1329 if (d != OR_TMP0)
1330 gen_op_mov_reg_T0(ot, d);
1331 else
1332 gen_op_st_T0_A0(ot + s1->mem_index);
1333 gen_op_update2_cc();
1334 s1->cc_op = CC_OP_ADDB + ot;
1335 break;
1336 case OP_SUBL:
1337 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1338 if (d != OR_TMP0)
1339 gen_op_mov_reg_T0(ot, d);
1340 else
1341 gen_op_st_T0_A0(ot + s1->mem_index);
1342 gen_op_update2_cc();
1343 s1->cc_op = CC_OP_SUBB + ot;
1344 break;
1345 default:
1346 case OP_ANDL:
1347 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1348 if (d != OR_TMP0)
1349 gen_op_mov_reg_T0(ot, d);
1350 else
1351 gen_op_st_T0_A0(ot + s1->mem_index);
1352 gen_op_update1_cc();
1353 s1->cc_op = CC_OP_LOGICB + ot;
1354 break;
1355 case OP_ORL:
1356 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1357 if (d != OR_TMP0)
1358 gen_op_mov_reg_T0(ot, d);
1359 else
1360 gen_op_st_T0_A0(ot + s1->mem_index);
1361 gen_op_update1_cc();
1362 s1->cc_op = CC_OP_LOGICB + ot;
1363 break;
1364 case OP_XORL:
1365 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1366 if (d != OR_TMP0)
1367 gen_op_mov_reg_T0(ot, d);
1368 else
1369 gen_op_st_T0_A0(ot + s1->mem_index);
1370 gen_op_update1_cc();
1371 s1->cc_op = CC_OP_LOGICB + ot;
1372 break;
1373 case OP_CMPL:
1374 gen_op_cmpl_T0_T1_cc();
1375 s1->cc_op = CC_OP_SUBB + ot;
1376 break;
1380 /* if d == OR_TMP0, it means memory operand (address in A0) */
1381 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1383 if (d != OR_TMP0)
1384 gen_op_mov_TN_reg(ot, 0, d);
1385 else
1386 gen_op_ld_T0_A0(ot + s1->mem_index);
1387 if (s1->cc_op != CC_OP_DYNAMIC)
1388 gen_op_set_cc_op(s1->cc_op);
1389 if (c > 0) {
1390 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1391 s1->cc_op = CC_OP_INCB + ot;
1392 } else {
1393 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1394 s1->cc_op = CC_OP_DECB + ot;
1396 if (d != OR_TMP0)
1397 gen_op_mov_reg_T0(ot, d);
1398 else
1399 gen_op_st_T0_A0(ot + s1->mem_index);
1400 gen_compute_eflags_c(cpu_cc_src);
1401 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1404 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1405 int is_right, int is_arith)
1407 target_ulong mask;
1408 int shift_label;
1409 TCGv t0, t1, t2;
1411 if (ot == OT_QUAD) {
1412 mask = 0x3f;
1413 } else {
1414 mask = 0x1f;
1417 /* load */
1418 if (op1 == OR_TMP0) {
1419 gen_op_ld_T0_A0(ot + s->mem_index);
1420 } else {
1421 gen_op_mov_TN_reg(ot, 0, op1);
1424 t0 = tcg_temp_local_new();
1425 t1 = tcg_temp_local_new();
1426 t2 = tcg_temp_local_new();
1428 tcg_gen_andi_tl(t2, cpu_T[1], mask);
1430 if (is_right) {
1431 if (is_arith) {
1432 gen_exts(ot, cpu_T[0]);
1433 tcg_gen_mov_tl(t0, cpu_T[0]);
1434 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], t2);
1435 } else {
1436 gen_extu(ot, cpu_T[0]);
1437 tcg_gen_mov_tl(t0, cpu_T[0]);
1438 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], t2);
1440 } else {
1441 tcg_gen_mov_tl(t0, cpu_T[0]);
1442 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], t2);
1445 /* store */
1446 if (op1 == OR_TMP0) {
1447 gen_op_st_T0_A0(ot + s->mem_index);
1448 } else {
1449 gen_op_mov_reg_T0(ot, op1);
1452 /* update eflags if non zero shift */
1453 if (s->cc_op != CC_OP_DYNAMIC) {
1454 gen_op_set_cc_op(s->cc_op);
1457 tcg_gen_mov_tl(t1, cpu_T[0]);
1459 shift_label = gen_new_label();
1460 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, shift_label);
1462 tcg_gen_addi_tl(t2, t2, -1);
1463 tcg_gen_mov_tl(cpu_cc_dst, t1);
1465 if (is_right) {
1466 if (is_arith) {
1467 tcg_gen_sar_tl(cpu_cc_src, t0, t2);
1468 } else {
1469 tcg_gen_shr_tl(cpu_cc_src, t0, t2);
1471 } else {
1472 tcg_gen_shl_tl(cpu_cc_src, t0, t2);
1475 if (is_right) {
1476 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1477 } else {
1478 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1481 gen_set_label(shift_label);
1482 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1484 tcg_temp_free(t0);
1485 tcg_temp_free(t1);
1486 tcg_temp_free(t2);
1489 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1490 int is_right, int is_arith)
1492 int mask;
1494 if (ot == OT_QUAD)
1495 mask = 0x3f;
1496 else
1497 mask = 0x1f;
1499 /* load */
1500 if (op1 == OR_TMP0)
1501 gen_op_ld_T0_A0(ot + s->mem_index);
1502 else
1503 gen_op_mov_TN_reg(ot, 0, op1);
1505 op2 &= mask;
1506 if (op2 != 0) {
1507 if (is_right) {
1508 if (is_arith) {
1509 gen_exts(ot, cpu_T[0]);
1510 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1511 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1512 } else {
1513 gen_extu(ot, cpu_T[0]);
1514 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1515 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1517 } else {
1518 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1519 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1523 /* store */
1524 if (op1 == OR_TMP0)
1525 gen_op_st_T0_A0(ot + s->mem_index);
1526 else
1527 gen_op_mov_reg_T0(ot, op1);
1529 /* update eflags if non zero shift */
1530 if (op2 != 0) {
1531 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1532 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1533 if (is_right)
1534 s->cc_op = CC_OP_SARB + ot;
1535 else
1536 s->cc_op = CC_OP_SHLB + ot;
1540 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1542 if (arg2 >= 0)
1543 tcg_gen_shli_tl(ret, arg1, arg2);
1544 else
1545 tcg_gen_shri_tl(ret, arg1, -arg2);
1548 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1549 int is_right)
1551 target_ulong mask;
1552 int label1, label2, data_bits;
1553 TCGv t0, t1, t2, a0;
1555 /* XXX: inefficient, but we must use local temps */
1556 t0 = tcg_temp_local_new();
1557 t1 = tcg_temp_local_new();
1558 t2 = tcg_temp_local_new();
1559 a0 = tcg_temp_local_new();
1561 if (ot == OT_QUAD)
1562 mask = 0x3f;
1563 else
1564 mask = 0x1f;
1566 /* load */
1567 if (op1 == OR_TMP0) {
1568 tcg_gen_mov_tl(a0, cpu_A0);
1569 gen_op_ld_v(ot + s->mem_index, t0, a0);
1570 } else {
1571 gen_op_mov_v_reg(ot, t0, op1);
1574 tcg_gen_mov_tl(t1, cpu_T[1]);
1576 tcg_gen_andi_tl(t1, t1, mask);
1578 /* Must test zero case to avoid using undefined behaviour in TCG
1579 shifts. */
1580 label1 = gen_new_label();
1581 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1583 if (ot <= OT_WORD)
1584 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1585 else
1586 tcg_gen_mov_tl(cpu_tmp0, t1);
1588 gen_extu(ot, t0);
1589 tcg_gen_mov_tl(t2, t0);
1591 data_bits = 8 << ot;
1592 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1593 fix TCG definition) */
1594 if (is_right) {
1595 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1596 tcg_gen_subfi_tl(cpu_tmp0, data_bits, cpu_tmp0);
1597 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1598 } else {
1599 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1600 tcg_gen_subfi_tl(cpu_tmp0, data_bits, cpu_tmp0);
1601 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1603 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1605 gen_set_label(label1);
1606 /* store */
1607 if (op1 == OR_TMP0) {
1608 gen_op_st_v(ot + s->mem_index, t0, a0);
1609 } else {
1610 gen_op_mov_reg_v(ot, op1, t0);
1613 /* update eflags */
1614 if (s->cc_op != CC_OP_DYNAMIC)
1615 gen_op_set_cc_op(s->cc_op);
1617 label2 = gen_new_label();
1618 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1620 gen_compute_eflags(cpu_cc_src);
1621 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1622 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1623 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1624 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1625 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1626 if (is_right) {
1627 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1629 tcg_gen_andi_tl(t0, t0, CC_C);
1630 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1632 tcg_gen_discard_tl(cpu_cc_dst);
1633 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1635 gen_set_label(label2);
1636 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1638 tcg_temp_free(t0);
1639 tcg_temp_free(t1);
1640 tcg_temp_free(t2);
1641 tcg_temp_free(a0);
1644 static void gen_rot_rm_im(DisasContext *s, int ot, int op1, int op2,
1645 int is_right)
1647 int mask;
1648 int data_bits;
1649 TCGv t0, t1, a0;
1651 /* XXX: inefficient, but we must use local temps */
1652 t0 = tcg_temp_local_new();
1653 t1 = tcg_temp_local_new();
1654 a0 = tcg_temp_local_new();
1656 if (ot == OT_QUAD)
1657 mask = 0x3f;
1658 else
1659 mask = 0x1f;
1661 /* load */
1662 if (op1 == OR_TMP0) {
1663 tcg_gen_mov_tl(a0, cpu_A0);
1664 gen_op_ld_v(ot + s->mem_index, t0, a0);
1665 } else {
1666 gen_op_mov_v_reg(ot, t0, op1);
1669 gen_extu(ot, t0);
1670 tcg_gen_mov_tl(t1, t0);
1672 op2 &= mask;
1673 data_bits = 8 << ot;
1674 if (op2 != 0) {
1675 int shift = op2 & ((1 << (3 + ot)) - 1);
1676 if (is_right) {
1677 tcg_gen_shri_tl(cpu_tmp4, t0, shift);
1678 tcg_gen_shli_tl(t0, t0, data_bits - shift);
1680 else {
1681 tcg_gen_shli_tl(cpu_tmp4, t0, shift);
1682 tcg_gen_shri_tl(t0, t0, data_bits - shift);
1684 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1687 /* store */
1688 if (op1 == OR_TMP0) {
1689 gen_op_st_v(ot + s->mem_index, t0, a0);
1690 } else {
1691 gen_op_mov_reg_v(ot, op1, t0);
1694 if (op2 != 0) {
1695 /* update eflags */
1696 if (s->cc_op != CC_OP_DYNAMIC)
1697 gen_op_set_cc_op(s->cc_op);
1699 gen_compute_eflags(cpu_cc_src);
1700 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1701 tcg_gen_xor_tl(cpu_tmp0, t1, t0);
1702 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1703 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1704 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1705 if (is_right) {
1706 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1708 tcg_gen_andi_tl(t0, t0, CC_C);
1709 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1711 tcg_gen_discard_tl(cpu_cc_dst);
1712 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1713 s->cc_op = CC_OP_EFLAGS;
1716 tcg_temp_free(t0);
1717 tcg_temp_free(t1);
1718 tcg_temp_free(a0);
1721 /* XXX: add faster immediate = 1 case */
1722 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1723 int is_right)
1725 int label1;
1727 if (s->cc_op != CC_OP_DYNAMIC)
1728 gen_op_set_cc_op(s->cc_op);
1730 /* load */
1731 if (op1 == OR_TMP0)
1732 gen_op_ld_T0_A0(ot + s->mem_index);
1733 else
1734 gen_op_mov_TN_reg(ot, 0, op1);
1736 if (is_right) {
1737 switch (ot) {
1738 case 0: gen_helper_rcrb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1739 case 1: gen_helper_rcrw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1740 case 2: gen_helper_rcrl(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1741 #ifdef TARGET_X86_64
1742 case 3: gen_helper_rcrq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1743 #endif
1745 } else {
1746 switch (ot) {
1747 case 0: gen_helper_rclb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1748 case 1: gen_helper_rclw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1749 case 2: gen_helper_rcll(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1750 #ifdef TARGET_X86_64
1751 case 3: gen_helper_rclq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1752 #endif
1755 /* store */
1756 if (op1 == OR_TMP0)
1757 gen_op_st_T0_A0(ot + s->mem_index);
1758 else
1759 gen_op_mov_reg_T0(ot, op1);
1761 /* update eflags */
1762 label1 = gen_new_label();
1763 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1765 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1766 tcg_gen_discard_tl(cpu_cc_dst);
1767 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1769 gen_set_label(label1);
1770 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1773 /* XXX: add faster immediate case */
1774 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1775 int is_right)
1777 int label1, label2, data_bits;
1778 target_ulong mask;
1779 TCGv t0, t1, t2, a0;
1781 t0 = tcg_temp_local_new();
1782 t1 = tcg_temp_local_new();
1783 t2 = tcg_temp_local_new();
1784 a0 = tcg_temp_local_new();
1786 if (ot == OT_QUAD)
1787 mask = 0x3f;
1788 else
1789 mask = 0x1f;
1791 /* load */
1792 if (op1 == OR_TMP0) {
1793 tcg_gen_mov_tl(a0, cpu_A0);
1794 gen_op_ld_v(ot + s->mem_index, t0, a0);
1795 } else {
1796 gen_op_mov_v_reg(ot, t0, op1);
1799 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1801 tcg_gen_mov_tl(t1, cpu_T[1]);
1802 tcg_gen_mov_tl(t2, cpu_T3);
1804 /* Must test zero case to avoid using undefined behaviour in TCG
1805 shifts. */
1806 label1 = gen_new_label();
1807 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1809 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1810 if (ot == OT_WORD) {
1811 /* Note: we implement the Intel behaviour for shift count > 16 */
1812 if (is_right) {
1813 tcg_gen_andi_tl(t0, t0, 0xffff);
1814 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1815 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1816 tcg_gen_ext32u_tl(t0, t0);
1818 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1820 /* only needed if count > 16, but a test would complicate */
1821 tcg_gen_subfi_tl(cpu_tmp5, 32, t2);
1822 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1824 tcg_gen_shr_tl(t0, t0, t2);
1826 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1827 } else {
1828 /* XXX: not optimal */
1829 tcg_gen_andi_tl(t0, t0, 0xffff);
1830 tcg_gen_shli_tl(t1, t1, 16);
1831 tcg_gen_or_tl(t1, t1, t0);
1832 tcg_gen_ext32u_tl(t1, t1);
1834 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1835 tcg_gen_subfi_tl(cpu_tmp0, 32, cpu_tmp5);
1836 tcg_gen_shr_tl(cpu_tmp5, t1, cpu_tmp0);
1837 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp5);
1839 tcg_gen_shl_tl(t0, t0, t2);
1840 tcg_gen_subfi_tl(cpu_tmp5, 32, t2);
1841 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1842 tcg_gen_or_tl(t0, t0, t1);
1844 } else {
1845 data_bits = 8 << ot;
1846 if (is_right) {
1847 if (ot == OT_LONG)
1848 tcg_gen_ext32u_tl(t0, t0);
1850 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1852 tcg_gen_shr_tl(t0, t0, t2);
1853 tcg_gen_subfi_tl(cpu_tmp5, data_bits, t2);
1854 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1855 tcg_gen_or_tl(t0, t0, t1);
1857 } else {
1858 if (ot == OT_LONG)
1859 tcg_gen_ext32u_tl(t1, t1);
1861 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1863 tcg_gen_shl_tl(t0, t0, t2);
1864 tcg_gen_subfi_tl(cpu_tmp5, data_bits, t2);
1865 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1866 tcg_gen_or_tl(t0, t0, t1);
1869 tcg_gen_mov_tl(t1, cpu_tmp4);
1871 gen_set_label(label1);
1872 /* store */
1873 if (op1 == OR_TMP0) {
1874 gen_op_st_v(ot + s->mem_index, t0, a0);
1875 } else {
1876 gen_op_mov_reg_v(ot, op1, t0);
1879 /* update eflags */
1880 if (s->cc_op != CC_OP_DYNAMIC)
1881 gen_op_set_cc_op(s->cc_op);
1883 label2 = gen_new_label();
1884 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1886 tcg_gen_mov_tl(cpu_cc_src, t1);
1887 tcg_gen_mov_tl(cpu_cc_dst, t0);
1888 if (is_right) {
1889 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1890 } else {
1891 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1893 gen_set_label(label2);
1894 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1896 tcg_temp_free(t0);
1897 tcg_temp_free(t1);
1898 tcg_temp_free(t2);
1899 tcg_temp_free(a0);
1902 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1904 if (s != OR_TMP1)
1905 gen_op_mov_TN_reg(ot, 1, s);
1906 switch(op) {
1907 case OP_ROL:
1908 gen_rot_rm_T1(s1, ot, d, 0);
1909 break;
1910 case OP_ROR:
1911 gen_rot_rm_T1(s1, ot, d, 1);
1912 break;
1913 case OP_SHL:
1914 case OP_SHL1:
1915 gen_shift_rm_T1(s1, ot, d, 0, 0);
1916 break;
1917 case OP_SHR:
1918 gen_shift_rm_T1(s1, ot, d, 1, 0);
1919 break;
1920 case OP_SAR:
1921 gen_shift_rm_T1(s1, ot, d, 1, 1);
1922 break;
1923 case OP_RCL:
1924 gen_rotc_rm_T1(s1, ot, d, 0);
1925 break;
1926 case OP_RCR:
1927 gen_rotc_rm_T1(s1, ot, d, 1);
1928 break;
1932 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1934 switch(op) {
1935 case OP_ROL:
1936 gen_rot_rm_im(s1, ot, d, c, 0);
1937 break;
1938 case OP_ROR:
1939 gen_rot_rm_im(s1, ot, d, c, 1);
1940 break;
1941 case OP_SHL:
1942 case OP_SHL1:
1943 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1944 break;
1945 case OP_SHR:
1946 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1947 break;
1948 case OP_SAR:
1949 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1950 break;
1951 default:
1952 /* currently not optimized */
1953 gen_op_movl_T1_im(c);
1954 gen_shift(s1, op, ot, d, OR_TMP1);
1955 break;
1959 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1961 target_long disp;
1962 int havesib;
1963 int base;
1964 int index;
1965 int scale;
1966 int opreg;
1967 int mod, rm, code, override, must_add_seg;
1969 override = s->override;
1970 must_add_seg = s->addseg;
1971 if (override >= 0)
1972 must_add_seg = 1;
1973 mod = (modrm >> 6) & 3;
1974 rm = modrm & 7;
1976 if (s->aflag) {
1978 havesib = 0;
1979 base = rm;
1980 index = 0;
1981 scale = 0;
1983 if (base == 4) {
1984 havesib = 1;
1985 code = ldub_code(s->pc++);
1986 scale = (code >> 6) & 3;
1987 index = ((code >> 3) & 7) | REX_X(s);
1988 base = (code & 7);
1990 base |= REX_B(s);
1992 switch (mod) {
1993 case 0:
1994 if ((base & 7) == 5) {
1995 base = -1;
1996 disp = (int32_t)ldl_code(s->pc);
1997 s->pc += 4;
1998 if (CODE64(s) && !havesib) {
1999 disp += s->pc + s->rip_offset;
2001 } else {
2002 disp = 0;
2004 break;
2005 case 1:
2006 disp = (int8_t)ldub_code(s->pc++);
2007 break;
2008 default:
2009 case 2:
2010 disp = (int32_t)ldl_code(s->pc);
2011 s->pc += 4;
2012 break;
2015 if (base >= 0) {
2016 /* for correct popl handling with esp */
2017 if (base == 4 && s->popl_esp_hack)
2018 disp += s->popl_esp_hack;
2019 #ifdef TARGET_X86_64
2020 if (s->aflag == 2) {
2021 gen_op_movq_A0_reg(base);
2022 if (disp != 0) {
2023 gen_op_addq_A0_im(disp);
2025 } else
2026 #endif
2028 gen_op_movl_A0_reg(base);
2029 if (disp != 0)
2030 gen_op_addl_A0_im(disp);
2032 } else {
2033 #ifdef TARGET_X86_64
2034 if (s->aflag == 2) {
2035 gen_op_movq_A0_im(disp);
2036 } else
2037 #endif
2039 gen_op_movl_A0_im(disp);
2042 /* index == 4 means no index */
2043 if (havesib && (index != 4)) {
2044 #ifdef TARGET_X86_64
2045 if (s->aflag == 2) {
2046 gen_op_addq_A0_reg_sN(scale, index);
2047 } else
2048 #endif
2050 gen_op_addl_A0_reg_sN(scale, index);
2053 if (must_add_seg) {
2054 if (override < 0) {
2055 if (base == R_EBP || base == R_ESP)
2056 override = R_SS;
2057 else
2058 override = R_DS;
2060 #ifdef TARGET_X86_64
2061 if (s->aflag == 2) {
2062 gen_op_addq_A0_seg(override);
2063 } else
2064 #endif
2066 gen_op_addl_A0_seg(override);
2069 } else {
2070 switch (mod) {
2071 case 0:
2072 if (rm == 6) {
2073 disp = lduw_code(s->pc);
2074 s->pc += 2;
2075 gen_op_movl_A0_im(disp);
2076 rm = 0; /* avoid SS override */
2077 goto no_rm;
2078 } else {
2079 disp = 0;
2081 break;
2082 case 1:
2083 disp = (int8_t)ldub_code(s->pc++);
2084 break;
2085 default:
2086 case 2:
2087 disp = lduw_code(s->pc);
2088 s->pc += 2;
2089 break;
2091 switch(rm) {
2092 case 0:
2093 gen_op_movl_A0_reg(R_EBX);
2094 gen_op_addl_A0_reg_sN(0, R_ESI);
2095 break;
2096 case 1:
2097 gen_op_movl_A0_reg(R_EBX);
2098 gen_op_addl_A0_reg_sN(0, R_EDI);
2099 break;
2100 case 2:
2101 gen_op_movl_A0_reg(R_EBP);
2102 gen_op_addl_A0_reg_sN(0, R_ESI);
2103 break;
2104 case 3:
2105 gen_op_movl_A0_reg(R_EBP);
2106 gen_op_addl_A0_reg_sN(0, R_EDI);
2107 break;
2108 case 4:
2109 gen_op_movl_A0_reg(R_ESI);
2110 break;
2111 case 5:
2112 gen_op_movl_A0_reg(R_EDI);
2113 break;
2114 case 6:
2115 gen_op_movl_A0_reg(R_EBP);
2116 break;
2117 default:
2118 case 7:
2119 gen_op_movl_A0_reg(R_EBX);
2120 break;
2122 if (disp != 0)
2123 gen_op_addl_A0_im(disp);
2124 gen_op_andl_A0_ffff();
2125 no_rm:
2126 if (must_add_seg) {
2127 if (override < 0) {
2128 if (rm == 2 || rm == 3 || rm == 6)
2129 override = R_SS;
2130 else
2131 override = R_DS;
2133 gen_op_addl_A0_seg(override);
2137 opreg = OR_A0;
2138 disp = 0;
2139 *reg_ptr = opreg;
2140 *offset_ptr = disp;
2143 static void gen_nop_modrm(DisasContext *s, int modrm)
2145 int mod, rm, base, code;
2147 mod = (modrm >> 6) & 3;
2148 if (mod == 3)
2149 return;
2150 rm = modrm & 7;
2152 if (s->aflag) {
2154 base = rm;
2156 if (base == 4) {
2157 code = ldub_code(s->pc++);
2158 base = (code & 7);
2161 switch (mod) {
2162 case 0:
2163 if (base == 5) {
2164 s->pc += 4;
2166 break;
2167 case 1:
2168 s->pc++;
2169 break;
2170 default:
2171 case 2:
2172 s->pc += 4;
2173 break;
2175 } else {
2176 switch (mod) {
2177 case 0:
2178 if (rm == 6) {
2179 s->pc += 2;
2181 break;
2182 case 1:
2183 s->pc++;
2184 break;
2185 default:
2186 case 2:
2187 s->pc += 2;
2188 break;
2193 /* used for LEA and MOV AX, mem */
2194 static void gen_add_A0_ds_seg(DisasContext *s)
2196 int override, must_add_seg;
2197 must_add_seg = s->addseg;
2198 override = R_DS;
2199 if (s->override >= 0) {
2200 override = s->override;
2201 must_add_seg = 1;
2203 if (must_add_seg) {
2204 #ifdef TARGET_X86_64
2205 if (CODE64(s)) {
2206 gen_op_addq_A0_seg(override);
2207 } else
2208 #endif
2210 gen_op_addl_A0_seg(override);
2215 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2216 OR_TMP0 */
2217 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2219 int mod, rm, opreg, disp;
2221 mod = (modrm >> 6) & 3;
2222 rm = (modrm & 7) | REX_B(s);
2223 if (mod == 3) {
2224 if (is_store) {
2225 if (reg != OR_TMP0)
2226 gen_op_mov_TN_reg(ot, 0, reg);
2227 gen_op_mov_reg_T0(ot, rm);
2228 } else {
2229 gen_op_mov_TN_reg(ot, 0, rm);
2230 if (reg != OR_TMP0)
2231 gen_op_mov_reg_T0(ot, reg);
2233 } else {
2234 gen_lea_modrm(s, modrm, &opreg, &disp);
2235 if (is_store) {
2236 if (reg != OR_TMP0)
2237 gen_op_mov_TN_reg(ot, 0, reg);
2238 gen_op_st_T0_A0(ot + s->mem_index);
2239 } else {
2240 gen_op_ld_T0_A0(ot + s->mem_index);
2241 if (reg != OR_TMP0)
2242 gen_op_mov_reg_T0(ot, reg);
2247 static inline uint32_t insn_get(DisasContext *s, int ot)
2249 uint32_t ret;
2251 switch(ot) {
2252 case OT_BYTE:
2253 ret = ldub_code(s->pc);
2254 s->pc++;
2255 break;
2256 case OT_WORD:
2257 ret = lduw_code(s->pc);
2258 s->pc += 2;
2259 break;
2260 default:
2261 case OT_LONG:
2262 ret = ldl_code(s->pc);
2263 s->pc += 4;
2264 break;
2266 return ret;
2269 static inline int insn_const_size(unsigned int ot)
2271 if (ot <= OT_LONG)
2272 return 1 << ot;
2273 else
2274 return 4;
2277 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2279 TranslationBlock *tb;
2280 target_ulong pc;
2282 pc = s->cs_base + eip;
2283 tb = s->tb;
2284 /* NOTE: we handle the case where the TB spans two pages here */
2285 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2286 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2287 /* jump to same page: we can use a direct jump */
2288 tcg_gen_goto_tb(tb_num);
2289 gen_jmp_im(eip);
2290 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
2291 } else {
2292 /* jump to another page: currently not optimized */
2293 gen_jmp_im(eip);
2294 gen_eob(s);
2298 static inline void gen_jcc(DisasContext *s, int b,
2299 target_ulong val, target_ulong next_eip)
2301 int l1, l2, cc_op;
2303 cc_op = s->cc_op;
2304 gen_update_cc_op(s);
2305 if (s->jmp_opt) {
2306 l1 = gen_new_label();
2307 gen_jcc1(s, cc_op, b, l1);
2309 gen_goto_tb(s, 0, next_eip);
2311 gen_set_label(l1);
2312 gen_goto_tb(s, 1, val);
2313 s->is_jmp = DISAS_TB_JUMP;
2314 } else {
2316 l1 = gen_new_label();
2317 l2 = gen_new_label();
2318 gen_jcc1(s, cc_op, b, l1);
2320 gen_jmp_im(next_eip);
2321 tcg_gen_br(l2);
2323 gen_set_label(l1);
2324 gen_jmp_im(val);
2325 gen_set_label(l2);
2326 gen_eob(s);
2330 static void gen_setcc(DisasContext *s, int b)
2332 int inv, jcc_op, l1;
2333 TCGv t0;
2335 if (is_fast_jcc_case(s, b)) {
2336 /* nominal case: we use a jump */
2337 /* XXX: make it faster by adding new instructions in TCG */
2338 t0 = tcg_temp_local_new();
2339 tcg_gen_movi_tl(t0, 0);
2340 l1 = gen_new_label();
2341 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2342 tcg_gen_movi_tl(t0, 1);
2343 gen_set_label(l1);
2344 tcg_gen_mov_tl(cpu_T[0], t0);
2345 tcg_temp_free(t0);
2346 } else {
2347 /* slow case: it is more efficient not to generate a jump,
2348 although it is questionnable whether this optimization is
2349 worth to */
2350 inv = b & 1;
2351 jcc_op = (b >> 1) & 7;
2352 gen_setcc_slow_T0(s, jcc_op);
2353 if (inv) {
2354 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2359 static inline void gen_op_movl_T0_seg(int seg_reg)
2361 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2362 offsetof(CPUX86State,segs[seg_reg].selector));
2365 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2367 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2368 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2369 offsetof(CPUX86State,segs[seg_reg].selector));
2370 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2371 tcg_gen_st_tl(cpu_T[0], cpu_env,
2372 offsetof(CPUX86State,segs[seg_reg].base));
2375 /* move T0 to seg_reg and compute if the CPU state may change. Never
2376 call this function with seg_reg == R_CS */
2377 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2379 if (s->pe && !s->vm86) {
2380 /* XXX: optimize by finding processor state dynamically */
2381 if (s->cc_op != CC_OP_DYNAMIC)
2382 gen_op_set_cc_op(s->cc_op);
2383 gen_jmp_im(cur_eip);
2384 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2385 gen_helper_load_seg(tcg_const_i32(seg_reg), cpu_tmp2_i32);
2386 /* abort translation because the addseg value may change or
2387 because ss32 may change. For R_SS, translation must always
2388 stop as a special handling must be done to disable hardware
2389 interrupts for the next instruction */
2390 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2391 s->is_jmp = DISAS_TB_JUMP;
2392 } else {
2393 gen_op_movl_seg_T0_vm(seg_reg);
2394 if (seg_reg == R_SS)
2395 s->is_jmp = DISAS_TB_JUMP;
2399 static inline int svm_is_rep(int prefixes)
2401 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2404 static inline void
2405 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2406 uint32_t type, uint64_t param)
2408 /* no SVM activated; fast case */
2409 if (likely(!(s->flags & HF_SVMI_MASK)))
2410 return;
2411 if (s->cc_op != CC_OP_DYNAMIC)
2412 gen_op_set_cc_op(s->cc_op);
2413 gen_jmp_im(pc_start - s->cs_base);
2414 gen_helper_svm_check_intercept_param(tcg_const_i32(type),
2415 tcg_const_i64(param));
2418 static inline void
2419 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2421 gen_svm_check_intercept_param(s, pc_start, type, 0);
2424 static inline void gen_stack_update(DisasContext *s, int addend)
2426 #ifdef TARGET_X86_64
2427 if (CODE64(s)) {
2428 gen_op_add_reg_im(2, R_ESP, addend);
2429 } else
2430 #endif
2431 if (s->ss32) {
2432 gen_op_add_reg_im(1, R_ESP, addend);
2433 } else {
2434 gen_op_add_reg_im(0, R_ESP, addend);
2438 /* generate a push. It depends on ss32, addseg and dflag */
2439 static void gen_push_T0(DisasContext *s)
2441 #ifdef TARGET_X86_64
2442 if (CODE64(s)) {
2443 gen_op_movq_A0_reg(R_ESP);
2444 if (s->dflag) {
2445 gen_op_addq_A0_im(-8);
2446 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2447 } else {
2448 gen_op_addq_A0_im(-2);
2449 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2451 gen_op_mov_reg_A0(2, R_ESP);
2452 } else
2453 #endif
2455 gen_op_movl_A0_reg(R_ESP);
2456 if (!s->dflag)
2457 gen_op_addl_A0_im(-2);
2458 else
2459 gen_op_addl_A0_im(-4);
2460 if (s->ss32) {
2461 if (s->addseg) {
2462 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2463 gen_op_addl_A0_seg(R_SS);
2465 } else {
2466 gen_op_andl_A0_ffff();
2467 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2468 gen_op_addl_A0_seg(R_SS);
2470 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2471 if (s->ss32 && !s->addseg)
2472 gen_op_mov_reg_A0(1, R_ESP);
2473 else
2474 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2478 /* generate a push. It depends on ss32, addseg and dflag */
2479 /* slower version for T1, only used for call Ev */
2480 static void gen_push_T1(DisasContext *s)
2482 #ifdef TARGET_X86_64
2483 if (CODE64(s)) {
2484 gen_op_movq_A0_reg(R_ESP);
2485 if (s->dflag) {
2486 gen_op_addq_A0_im(-8);
2487 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2488 } else {
2489 gen_op_addq_A0_im(-2);
2490 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2492 gen_op_mov_reg_A0(2, R_ESP);
2493 } else
2494 #endif
2496 gen_op_movl_A0_reg(R_ESP);
2497 if (!s->dflag)
2498 gen_op_addl_A0_im(-2);
2499 else
2500 gen_op_addl_A0_im(-4);
2501 if (s->ss32) {
2502 if (s->addseg) {
2503 gen_op_addl_A0_seg(R_SS);
2505 } else {
2506 gen_op_andl_A0_ffff();
2507 gen_op_addl_A0_seg(R_SS);
2509 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2511 if (s->ss32 && !s->addseg)
2512 gen_op_mov_reg_A0(1, R_ESP);
2513 else
2514 gen_stack_update(s, (-2) << s->dflag);
2518 /* two step pop is necessary for precise exceptions */
2519 static void gen_pop_T0(DisasContext *s)
2521 #ifdef TARGET_X86_64
2522 if (CODE64(s)) {
2523 gen_op_movq_A0_reg(R_ESP);
2524 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2525 } else
2526 #endif
2528 gen_op_movl_A0_reg(R_ESP);
2529 if (s->ss32) {
2530 if (s->addseg)
2531 gen_op_addl_A0_seg(R_SS);
2532 } else {
2533 gen_op_andl_A0_ffff();
2534 gen_op_addl_A0_seg(R_SS);
2536 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2540 static void gen_pop_update(DisasContext *s)
2542 #ifdef TARGET_X86_64
2543 if (CODE64(s) && s->dflag) {
2544 gen_stack_update(s, 8);
2545 } else
2546 #endif
2548 gen_stack_update(s, 2 << s->dflag);
2552 static void gen_stack_A0(DisasContext *s)
2554 gen_op_movl_A0_reg(R_ESP);
2555 if (!s->ss32)
2556 gen_op_andl_A0_ffff();
2557 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2558 if (s->addseg)
2559 gen_op_addl_A0_seg(R_SS);
2562 /* NOTE: wrap around in 16 bit not fully handled */
2563 static void gen_pusha(DisasContext *s)
2565 int i;
2566 gen_op_movl_A0_reg(R_ESP);
2567 gen_op_addl_A0_im(-16 << s->dflag);
2568 if (!s->ss32)
2569 gen_op_andl_A0_ffff();
2570 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2571 if (s->addseg)
2572 gen_op_addl_A0_seg(R_SS);
2573 for(i = 0;i < 8; i++) {
2574 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2575 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2576 gen_op_addl_A0_im(2 << s->dflag);
2578 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2581 /* NOTE: wrap around in 16 bit not fully handled */
2582 static void gen_popa(DisasContext *s)
2584 int i;
2585 gen_op_movl_A0_reg(R_ESP);
2586 if (!s->ss32)
2587 gen_op_andl_A0_ffff();
2588 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2589 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2590 if (s->addseg)
2591 gen_op_addl_A0_seg(R_SS);
2592 for(i = 0;i < 8; i++) {
2593 /* ESP is not reloaded */
2594 if (i != 3) {
2595 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2596 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2598 gen_op_addl_A0_im(2 << s->dflag);
2600 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2603 static void gen_enter(DisasContext *s, int esp_addend, int level)
2605 int ot, opsize;
2607 level &= 0x1f;
2608 #ifdef TARGET_X86_64
2609 if (CODE64(s)) {
2610 ot = s->dflag ? OT_QUAD : OT_WORD;
2611 opsize = 1 << ot;
2613 gen_op_movl_A0_reg(R_ESP);
2614 gen_op_addq_A0_im(-opsize);
2615 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2617 /* push bp */
2618 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2619 gen_op_st_T0_A0(ot + s->mem_index);
2620 if (level) {
2621 /* XXX: must save state */
2622 gen_helper_enter64_level(tcg_const_i32(level),
2623 tcg_const_i32((ot == OT_QUAD)),
2624 cpu_T[1]);
2626 gen_op_mov_reg_T1(ot, R_EBP);
2627 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2628 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2629 } else
2630 #endif
2632 ot = s->dflag + OT_WORD;
2633 opsize = 2 << s->dflag;
2635 gen_op_movl_A0_reg(R_ESP);
2636 gen_op_addl_A0_im(-opsize);
2637 if (!s->ss32)
2638 gen_op_andl_A0_ffff();
2639 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2640 if (s->addseg)
2641 gen_op_addl_A0_seg(R_SS);
2642 /* push bp */
2643 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2644 gen_op_st_T0_A0(ot + s->mem_index);
2645 if (level) {
2646 /* XXX: must save state */
2647 gen_helper_enter_level(tcg_const_i32(level),
2648 tcg_const_i32(s->dflag),
2649 cpu_T[1]);
2651 gen_op_mov_reg_T1(ot, R_EBP);
2652 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2653 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2657 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2659 if (s->cc_op != CC_OP_DYNAMIC)
2660 gen_op_set_cc_op(s->cc_op);
2661 gen_jmp_im(cur_eip);
2662 gen_helper_raise_exception(tcg_const_i32(trapno));
2663 s->is_jmp = DISAS_TB_JUMP;
2666 /* an interrupt is different from an exception because of the
2667 privilege checks */
2668 static void gen_interrupt(DisasContext *s, int intno,
2669 target_ulong cur_eip, target_ulong next_eip)
2671 if (s->cc_op != CC_OP_DYNAMIC)
2672 gen_op_set_cc_op(s->cc_op);
2673 gen_jmp_im(cur_eip);
2674 gen_helper_raise_interrupt(tcg_const_i32(intno),
2675 tcg_const_i32(next_eip - cur_eip));
2676 s->is_jmp = DISAS_TB_JUMP;
2679 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2681 if (s->cc_op != CC_OP_DYNAMIC)
2682 gen_op_set_cc_op(s->cc_op);
2683 gen_jmp_im(cur_eip);
2684 gen_helper_debug();
2685 s->is_jmp = DISAS_TB_JUMP;
2688 /* generate a generic end of block. Trace exception is also generated
2689 if needed */
2690 static void gen_eob(DisasContext *s)
2692 if (s->cc_op != CC_OP_DYNAMIC)
2693 gen_op_set_cc_op(s->cc_op);
2694 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2695 gen_helper_reset_inhibit_irq();
2697 if (s->tb->flags & HF_RF_MASK) {
2698 gen_helper_reset_rf();
2700 if (s->singlestep_enabled) {
2701 gen_helper_debug();
2702 } else if (s->tf) {
2703 gen_helper_single_step();
2704 } else {
2705 tcg_gen_exit_tb(0);
2707 s->is_jmp = DISAS_TB_JUMP;
2710 /* generate a jump to eip. No segment change must happen before as a
2711 direct call to the next block may occur */
2712 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2714 if (s->jmp_opt) {
2715 gen_update_cc_op(s);
2716 gen_goto_tb(s, tb_num, eip);
2717 s->is_jmp = DISAS_TB_JUMP;
2718 } else {
2719 gen_jmp_im(eip);
2720 gen_eob(s);
2724 static void gen_jmp(DisasContext *s, target_ulong eip)
2726 gen_jmp_tb(s, eip, 0);
2729 static inline void gen_ldq_env_A0(int idx, int offset)
2731 int mem_index = (idx >> 2) - 1;
2732 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2733 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2736 static inline void gen_stq_env_A0(int idx, int offset)
2738 int mem_index = (idx >> 2) - 1;
2739 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2740 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2743 static inline void gen_ldo_env_A0(int idx, int offset)
2745 int mem_index = (idx >> 2) - 1;
2746 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2747 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2748 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2749 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2750 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2753 static inline void gen_sto_env_A0(int idx, int offset)
2755 int mem_index = (idx >> 2) - 1;
2756 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2757 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2758 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2759 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2760 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2763 static inline void gen_op_movo(int d_offset, int s_offset)
2765 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2766 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2767 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2768 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2771 static inline void gen_op_movq(int d_offset, int s_offset)
2773 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2774 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2777 static inline void gen_op_movl(int d_offset, int s_offset)
2779 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2780 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2783 static inline void gen_op_movq_env_0(int d_offset)
2785 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2786 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2789 #define SSE_SPECIAL ((void *)1)
2790 #define SSE_DUMMY ((void *)2)
2792 #define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2793 #define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2794 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
2796 static void *sse_op_table1[256][4] = {
2797 /* 3DNow! extensions */
2798 [0x0e] = { SSE_DUMMY }, /* femms */
2799 [0x0f] = { SSE_DUMMY }, /* pf... */
2800 /* pure SSE operations */
2801 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2802 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2803 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2804 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2805 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
2806 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
2807 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2808 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2810 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2811 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2812 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2813 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd, movntss, movntsd */
2814 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2815 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2816 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
2817 [0x2f] = { gen_helper_comiss, gen_helper_comisd },
2818 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2819 [0x51] = SSE_FOP(sqrt),
2820 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
2821 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
2822 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
2823 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
2824 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
2825 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
2826 [0x58] = SSE_FOP(add),
2827 [0x59] = SSE_FOP(mul),
2828 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
2829 gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
2830 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
2831 [0x5c] = SSE_FOP(sub),
2832 [0x5d] = SSE_FOP(min),
2833 [0x5e] = SSE_FOP(div),
2834 [0x5f] = SSE_FOP(max),
2836 [0xc2] = SSE_FOP(cmpeq),
2837 [0xc6] = { gen_helper_shufps, gen_helper_shufpd },
2839 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2840 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2842 /* MMX ops and their SSE extensions */
2843 [0x60] = MMX_OP2(punpcklbw),
2844 [0x61] = MMX_OP2(punpcklwd),
2845 [0x62] = MMX_OP2(punpckldq),
2846 [0x63] = MMX_OP2(packsswb),
2847 [0x64] = MMX_OP2(pcmpgtb),
2848 [0x65] = MMX_OP2(pcmpgtw),
2849 [0x66] = MMX_OP2(pcmpgtl),
2850 [0x67] = MMX_OP2(packuswb),
2851 [0x68] = MMX_OP2(punpckhbw),
2852 [0x69] = MMX_OP2(punpckhwd),
2853 [0x6a] = MMX_OP2(punpckhdq),
2854 [0x6b] = MMX_OP2(packssdw),
2855 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
2856 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
2857 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2858 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2859 [0x70] = { gen_helper_pshufw_mmx,
2860 gen_helper_pshufd_xmm,
2861 gen_helper_pshufhw_xmm,
2862 gen_helper_pshuflw_xmm },
2863 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2864 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2865 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2866 [0x74] = MMX_OP2(pcmpeqb),
2867 [0x75] = MMX_OP2(pcmpeqw),
2868 [0x76] = MMX_OP2(pcmpeql),
2869 [0x77] = { SSE_DUMMY }, /* emms */
2870 [0x78] = { NULL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* extrq_i, insertq_i */
2871 [0x79] = { NULL, gen_helper_extrq_r, NULL, gen_helper_insertq_r },
2872 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
2873 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
2874 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2875 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2876 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2877 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2878 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
2879 [0xd1] = MMX_OP2(psrlw),
2880 [0xd2] = MMX_OP2(psrld),
2881 [0xd3] = MMX_OP2(psrlq),
2882 [0xd4] = MMX_OP2(paddq),
2883 [0xd5] = MMX_OP2(pmullw),
2884 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2885 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2886 [0xd8] = MMX_OP2(psubusb),
2887 [0xd9] = MMX_OP2(psubusw),
2888 [0xda] = MMX_OP2(pminub),
2889 [0xdb] = MMX_OP2(pand),
2890 [0xdc] = MMX_OP2(paddusb),
2891 [0xdd] = MMX_OP2(paddusw),
2892 [0xde] = MMX_OP2(pmaxub),
2893 [0xdf] = MMX_OP2(pandn),
2894 [0xe0] = MMX_OP2(pavgb),
2895 [0xe1] = MMX_OP2(psraw),
2896 [0xe2] = MMX_OP2(psrad),
2897 [0xe3] = MMX_OP2(pavgw),
2898 [0xe4] = MMX_OP2(pmulhuw),
2899 [0xe5] = MMX_OP2(pmulhw),
2900 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
2901 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2902 [0xe8] = MMX_OP2(psubsb),
2903 [0xe9] = MMX_OP2(psubsw),
2904 [0xea] = MMX_OP2(pminsw),
2905 [0xeb] = MMX_OP2(por),
2906 [0xec] = MMX_OP2(paddsb),
2907 [0xed] = MMX_OP2(paddsw),
2908 [0xee] = MMX_OP2(pmaxsw),
2909 [0xef] = MMX_OP2(pxor),
2910 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2911 [0xf1] = MMX_OP2(psllw),
2912 [0xf2] = MMX_OP2(pslld),
2913 [0xf3] = MMX_OP2(psllq),
2914 [0xf4] = MMX_OP2(pmuludq),
2915 [0xf5] = MMX_OP2(pmaddwd),
2916 [0xf6] = MMX_OP2(psadbw),
2917 [0xf7] = MMX_OP2(maskmov),
2918 [0xf8] = MMX_OP2(psubb),
2919 [0xf9] = MMX_OP2(psubw),
2920 [0xfa] = MMX_OP2(psubl),
2921 [0xfb] = MMX_OP2(psubq),
2922 [0xfc] = MMX_OP2(paddb),
2923 [0xfd] = MMX_OP2(paddw),
2924 [0xfe] = MMX_OP2(paddl),
2927 static void *sse_op_table2[3 * 8][2] = {
2928 [0 + 2] = MMX_OP2(psrlw),
2929 [0 + 4] = MMX_OP2(psraw),
2930 [0 + 6] = MMX_OP2(psllw),
2931 [8 + 2] = MMX_OP2(psrld),
2932 [8 + 4] = MMX_OP2(psrad),
2933 [8 + 6] = MMX_OP2(pslld),
2934 [16 + 2] = MMX_OP2(psrlq),
2935 [16 + 3] = { NULL, gen_helper_psrldq_xmm },
2936 [16 + 6] = MMX_OP2(psllq),
2937 [16 + 7] = { NULL, gen_helper_pslldq_xmm },
2940 static void *sse_op_table3[4 * 3] = {
2941 gen_helper_cvtsi2ss,
2942 gen_helper_cvtsi2sd,
2943 X86_64_ONLY(gen_helper_cvtsq2ss),
2944 X86_64_ONLY(gen_helper_cvtsq2sd),
2946 gen_helper_cvttss2si,
2947 gen_helper_cvttsd2si,
2948 X86_64_ONLY(gen_helper_cvttss2sq),
2949 X86_64_ONLY(gen_helper_cvttsd2sq),
2951 gen_helper_cvtss2si,
2952 gen_helper_cvtsd2si,
2953 X86_64_ONLY(gen_helper_cvtss2sq),
2954 X86_64_ONLY(gen_helper_cvtsd2sq),
2957 static void *sse_op_table4[8][4] = {
2958 SSE_FOP(cmpeq),
2959 SSE_FOP(cmplt),
2960 SSE_FOP(cmple),
2961 SSE_FOP(cmpunord),
2962 SSE_FOP(cmpneq),
2963 SSE_FOP(cmpnlt),
2964 SSE_FOP(cmpnle),
2965 SSE_FOP(cmpord),
2968 static void *sse_op_table5[256] = {
2969 [0x0c] = gen_helper_pi2fw,
2970 [0x0d] = gen_helper_pi2fd,
2971 [0x1c] = gen_helper_pf2iw,
2972 [0x1d] = gen_helper_pf2id,
2973 [0x8a] = gen_helper_pfnacc,
2974 [0x8e] = gen_helper_pfpnacc,
2975 [0x90] = gen_helper_pfcmpge,
2976 [0x94] = gen_helper_pfmin,
2977 [0x96] = gen_helper_pfrcp,
2978 [0x97] = gen_helper_pfrsqrt,
2979 [0x9a] = gen_helper_pfsub,
2980 [0x9e] = gen_helper_pfadd,
2981 [0xa0] = gen_helper_pfcmpgt,
2982 [0xa4] = gen_helper_pfmax,
2983 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
2984 [0xa7] = gen_helper_movq, /* pfrsqit1 */
2985 [0xaa] = gen_helper_pfsubr,
2986 [0xae] = gen_helper_pfacc,
2987 [0xb0] = gen_helper_pfcmpeq,
2988 [0xb4] = gen_helper_pfmul,
2989 [0xb6] = gen_helper_movq, /* pfrcpit2 */
2990 [0xb7] = gen_helper_pmulhrw_mmx,
2991 [0xbb] = gen_helper_pswapd,
2992 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
2995 struct sse_op_helper_s {
2996 void *op[2]; uint32_t ext_mask;
2998 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
2999 #define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3000 #define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3001 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3002 static struct sse_op_helper_s sse_op_table6[256] = {
3003 [0x00] = SSSE3_OP(pshufb),
3004 [0x01] = SSSE3_OP(phaddw),
3005 [0x02] = SSSE3_OP(phaddd),
3006 [0x03] = SSSE3_OP(phaddsw),
3007 [0x04] = SSSE3_OP(pmaddubsw),
3008 [0x05] = SSSE3_OP(phsubw),
3009 [0x06] = SSSE3_OP(phsubd),
3010 [0x07] = SSSE3_OP(phsubsw),
3011 [0x08] = SSSE3_OP(psignb),
3012 [0x09] = SSSE3_OP(psignw),
3013 [0x0a] = SSSE3_OP(psignd),
3014 [0x0b] = SSSE3_OP(pmulhrsw),
3015 [0x10] = SSE41_OP(pblendvb),
3016 [0x14] = SSE41_OP(blendvps),
3017 [0x15] = SSE41_OP(blendvpd),
3018 [0x17] = SSE41_OP(ptest),
3019 [0x1c] = SSSE3_OP(pabsb),
3020 [0x1d] = SSSE3_OP(pabsw),
3021 [0x1e] = SSSE3_OP(pabsd),
3022 [0x20] = SSE41_OP(pmovsxbw),
3023 [0x21] = SSE41_OP(pmovsxbd),
3024 [0x22] = SSE41_OP(pmovsxbq),
3025 [0x23] = SSE41_OP(pmovsxwd),
3026 [0x24] = SSE41_OP(pmovsxwq),
3027 [0x25] = SSE41_OP(pmovsxdq),
3028 [0x28] = SSE41_OP(pmuldq),
3029 [0x29] = SSE41_OP(pcmpeqq),
3030 [0x2a] = SSE41_SPECIAL, /* movntqda */
3031 [0x2b] = SSE41_OP(packusdw),
3032 [0x30] = SSE41_OP(pmovzxbw),
3033 [0x31] = SSE41_OP(pmovzxbd),
3034 [0x32] = SSE41_OP(pmovzxbq),
3035 [0x33] = SSE41_OP(pmovzxwd),
3036 [0x34] = SSE41_OP(pmovzxwq),
3037 [0x35] = SSE41_OP(pmovzxdq),
3038 [0x37] = SSE42_OP(pcmpgtq),
3039 [0x38] = SSE41_OP(pminsb),
3040 [0x39] = SSE41_OP(pminsd),
3041 [0x3a] = SSE41_OP(pminuw),
3042 [0x3b] = SSE41_OP(pminud),
3043 [0x3c] = SSE41_OP(pmaxsb),
3044 [0x3d] = SSE41_OP(pmaxsd),
3045 [0x3e] = SSE41_OP(pmaxuw),
3046 [0x3f] = SSE41_OP(pmaxud),
3047 [0x40] = SSE41_OP(pmulld),
3048 [0x41] = SSE41_OP(phminposuw),
3051 static struct sse_op_helper_s sse_op_table7[256] = {
3052 [0x08] = SSE41_OP(roundps),
3053 [0x09] = SSE41_OP(roundpd),
3054 [0x0a] = SSE41_OP(roundss),
3055 [0x0b] = SSE41_OP(roundsd),
3056 [0x0c] = SSE41_OP(blendps),
3057 [0x0d] = SSE41_OP(blendpd),
3058 [0x0e] = SSE41_OP(pblendw),
3059 [0x0f] = SSSE3_OP(palignr),
3060 [0x14] = SSE41_SPECIAL, /* pextrb */
3061 [0x15] = SSE41_SPECIAL, /* pextrw */
3062 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3063 [0x17] = SSE41_SPECIAL, /* extractps */
3064 [0x20] = SSE41_SPECIAL, /* pinsrb */
3065 [0x21] = SSE41_SPECIAL, /* insertps */
3066 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3067 [0x40] = SSE41_OP(dpps),
3068 [0x41] = SSE41_OP(dppd),
3069 [0x42] = SSE41_OP(mpsadbw),
3070 [0x60] = SSE42_OP(pcmpestrm),
3071 [0x61] = SSE42_OP(pcmpestri),
3072 [0x62] = SSE42_OP(pcmpistrm),
3073 [0x63] = SSE42_OP(pcmpistri),
3076 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3078 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3079 int modrm, mod, rm, reg, reg_addr, offset_addr;
3080 void *sse_op2;
3082 b &= 0xff;
3083 if (s->prefix & PREFIX_DATA)
3084 b1 = 1;
3085 else if (s->prefix & PREFIX_REPZ)
3086 b1 = 2;
3087 else if (s->prefix & PREFIX_REPNZ)
3088 b1 = 3;
3089 else
3090 b1 = 0;
3091 sse_op2 = sse_op_table1[b][b1];
3092 if (!sse_op2)
3093 goto illegal_op;
3094 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3095 is_xmm = 1;
3096 } else {
3097 if (b1 == 0) {
3098 /* MMX case */
3099 is_xmm = 0;
3100 } else {
3101 is_xmm = 1;
3104 /* simple MMX/SSE operation */
3105 if (s->flags & HF_TS_MASK) {
3106 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3107 return;
3109 if (s->flags & HF_EM_MASK) {
3110 illegal_op:
3111 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3112 return;
3114 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3115 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3116 goto illegal_op;
3117 if (b == 0x0e) {
3118 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3119 goto illegal_op;
3120 /* femms */
3121 gen_helper_emms();
3122 return;
3124 if (b == 0x77) {
3125 /* emms */
3126 gen_helper_emms();
3127 return;
3129 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3130 the static cpu state) */
3131 if (!is_xmm) {
3132 gen_helper_enter_mmx();
3135 modrm = ldub_code(s->pc++);
3136 reg = ((modrm >> 3) & 7);
3137 if (is_xmm)
3138 reg |= rex_r;
3139 mod = (modrm >> 6) & 3;
3140 if (sse_op2 == SSE_SPECIAL) {
3141 b |= (b1 << 8);
3142 switch(b) {
3143 case 0x0e7: /* movntq */
3144 if (mod == 3)
3145 goto illegal_op;
3146 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3147 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3148 break;
3149 case 0x1e7: /* movntdq */
3150 case 0x02b: /* movntps */
3151 case 0x12b: /* movntps */
3152 if (mod == 3)
3153 goto illegal_op;
3154 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3155 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3156 break;
3157 case 0x3f0: /* lddqu */
3158 if (mod == 3)
3159 goto illegal_op;
3160 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3161 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3162 break;
3163 case 0x22b: /* movntss */
3164 case 0x32b: /* movntsd */
3165 if (mod == 3)
3166 goto illegal_op;
3167 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3168 if (b1 & 1) {
3169 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,
3170 xmm_regs[reg]));
3171 } else {
3172 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3173 xmm_regs[reg].XMM_L(0)));
3174 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3176 break;
3177 case 0x6e: /* movd mm, ea */
3178 #ifdef TARGET_X86_64
3179 if (s->dflag == 2) {
3180 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3181 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3182 } else
3183 #endif
3185 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3186 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3187 offsetof(CPUX86State,fpregs[reg].mmx));
3188 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3189 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
3191 break;
3192 case 0x16e: /* movd xmm, ea */
3193 #ifdef TARGET_X86_64
3194 if (s->dflag == 2) {
3195 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3196 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3197 offsetof(CPUX86State,xmm_regs[reg]));
3198 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
3199 } else
3200 #endif
3202 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3203 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3204 offsetof(CPUX86State,xmm_regs[reg]));
3205 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3206 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
3208 break;
3209 case 0x6f: /* movq mm, ea */
3210 if (mod != 3) {
3211 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3212 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3213 } else {
3214 rm = (modrm & 7);
3215 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3216 offsetof(CPUX86State,fpregs[rm].mmx));
3217 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3218 offsetof(CPUX86State,fpregs[reg].mmx));
3220 break;
3221 case 0x010: /* movups */
3222 case 0x110: /* movupd */
3223 case 0x028: /* movaps */
3224 case 0x128: /* movapd */
3225 case 0x16f: /* movdqa xmm, ea */
3226 case 0x26f: /* movdqu xmm, ea */
3227 if (mod != 3) {
3228 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3229 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3230 } else {
3231 rm = (modrm & 7) | REX_B(s);
3232 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3233 offsetof(CPUX86State,xmm_regs[rm]));
3235 break;
3236 case 0x210: /* movss xmm, ea */
3237 if (mod != 3) {
3238 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3239 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3240 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3241 gen_op_movl_T0_0();
3242 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3243 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3244 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3245 } else {
3246 rm = (modrm & 7) | REX_B(s);
3247 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3248 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3250 break;
3251 case 0x310: /* movsd xmm, ea */
3252 if (mod != 3) {
3253 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3254 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3255 gen_op_movl_T0_0();
3256 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3257 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3258 } else {
3259 rm = (modrm & 7) | REX_B(s);
3260 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3261 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3263 break;
3264 case 0x012: /* movlps */
3265 case 0x112: /* movlpd */
3266 if (mod != 3) {
3267 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3268 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3269 } else {
3270 /* movhlps */
3271 rm = (modrm & 7) | REX_B(s);
3272 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3273 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3275 break;
3276 case 0x212: /* movsldup */
3277 if (mod != 3) {
3278 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3279 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3280 } else {
3281 rm = (modrm & 7) | REX_B(s);
3282 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3283 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3284 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3285 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3287 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3288 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3289 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3290 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3291 break;
3292 case 0x312: /* movddup */
3293 if (mod != 3) {
3294 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3295 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3296 } else {
3297 rm = (modrm & 7) | REX_B(s);
3298 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3299 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3301 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3302 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3303 break;
3304 case 0x016: /* movhps */
3305 case 0x116: /* movhpd */
3306 if (mod != 3) {
3307 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3308 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3309 } else {
3310 /* movlhps */
3311 rm = (modrm & 7) | REX_B(s);
3312 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3313 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3315 break;
3316 case 0x216: /* movshdup */
3317 if (mod != 3) {
3318 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3319 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3320 } else {
3321 rm = (modrm & 7) | REX_B(s);
3322 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3323 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3324 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3325 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3327 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3328 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3329 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3330 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3331 break;
3332 case 0x178:
3333 case 0x378:
3335 int bit_index, field_length;
3337 if (b1 == 1 && reg != 0)
3338 goto illegal_op;
3339 field_length = ldub_code(s->pc++) & 0x3F;
3340 bit_index = ldub_code(s->pc++) & 0x3F;
3341 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3342 offsetof(CPUX86State,xmm_regs[reg]));
3343 if (b1 == 1)
3344 gen_helper_extrq_i(cpu_ptr0, tcg_const_i32(bit_index),
3345 tcg_const_i32(field_length));
3346 else
3347 gen_helper_insertq_i(cpu_ptr0, tcg_const_i32(bit_index),
3348 tcg_const_i32(field_length));
3350 break;
3351 case 0x7e: /* movd ea, mm */
3352 #ifdef TARGET_X86_64
3353 if (s->dflag == 2) {
3354 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3355 offsetof(CPUX86State,fpregs[reg].mmx));
3356 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3357 } else
3358 #endif
3360 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3361 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3362 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3364 break;
3365 case 0x17e: /* movd ea, xmm */
3366 #ifdef TARGET_X86_64
3367 if (s->dflag == 2) {
3368 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3369 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3370 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3371 } else
3372 #endif
3374 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3375 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3376 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3378 break;
3379 case 0x27e: /* movq xmm, ea */
3380 if (mod != 3) {
3381 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3382 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3383 } else {
3384 rm = (modrm & 7) | REX_B(s);
3385 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3386 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3388 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3389 break;
3390 case 0x7f: /* movq ea, mm */
3391 if (mod != 3) {
3392 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3393 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3394 } else {
3395 rm = (modrm & 7);
3396 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3397 offsetof(CPUX86State,fpregs[reg].mmx));
3399 break;
3400 case 0x011: /* movups */
3401 case 0x111: /* movupd */
3402 case 0x029: /* movaps */
3403 case 0x129: /* movapd */
3404 case 0x17f: /* movdqa ea, xmm */
3405 case 0x27f: /* movdqu ea, xmm */
3406 if (mod != 3) {
3407 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3408 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3409 } else {
3410 rm = (modrm & 7) | REX_B(s);
3411 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3412 offsetof(CPUX86State,xmm_regs[reg]));
3414 break;
3415 case 0x211: /* movss ea, xmm */
3416 if (mod != 3) {
3417 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3418 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3419 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3420 } else {
3421 rm = (modrm & 7) | REX_B(s);
3422 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3423 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3425 break;
3426 case 0x311: /* movsd ea, xmm */
3427 if (mod != 3) {
3428 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3429 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3430 } else {
3431 rm = (modrm & 7) | REX_B(s);
3432 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3433 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3435 break;
3436 case 0x013: /* movlps */
3437 case 0x113: /* movlpd */
3438 if (mod != 3) {
3439 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3440 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3441 } else {
3442 goto illegal_op;
3444 break;
3445 case 0x017: /* movhps */
3446 case 0x117: /* movhpd */
3447 if (mod != 3) {
3448 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3449 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3450 } else {
3451 goto illegal_op;
3453 break;
3454 case 0x71: /* shift mm, im */
3455 case 0x72:
3456 case 0x73:
3457 case 0x171: /* shift xmm, im */
3458 case 0x172:
3459 case 0x173:
3460 if (b1 >= 2) {
3461 goto illegal_op;
3463 val = ldub_code(s->pc++);
3464 if (is_xmm) {
3465 gen_op_movl_T0_im(val);
3466 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3467 gen_op_movl_T0_0();
3468 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3469 op1_offset = offsetof(CPUX86State,xmm_t0);
3470 } else {
3471 gen_op_movl_T0_im(val);
3472 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3473 gen_op_movl_T0_0();
3474 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3475 op1_offset = offsetof(CPUX86State,mmx_t0);
3477 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3478 if (!sse_op2)
3479 goto illegal_op;
3480 if (is_xmm) {
3481 rm = (modrm & 7) | REX_B(s);
3482 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3483 } else {
3484 rm = (modrm & 7);
3485 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3487 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3488 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3489 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3490 break;
3491 case 0x050: /* movmskps */
3492 rm = (modrm & 7) | REX_B(s);
3493 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3494 offsetof(CPUX86State,xmm_regs[rm]));
3495 gen_helper_movmskps(cpu_tmp2_i32, cpu_ptr0);
3496 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3497 gen_op_mov_reg_T0(OT_LONG, reg);
3498 break;
3499 case 0x150: /* movmskpd */
3500 rm = (modrm & 7) | REX_B(s);
3501 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3502 offsetof(CPUX86State,xmm_regs[rm]));
3503 gen_helper_movmskpd(cpu_tmp2_i32, cpu_ptr0);
3504 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3505 gen_op_mov_reg_T0(OT_LONG, reg);
3506 break;
3507 case 0x02a: /* cvtpi2ps */
3508 case 0x12a: /* cvtpi2pd */
3509 gen_helper_enter_mmx();
3510 if (mod != 3) {
3511 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3512 op2_offset = offsetof(CPUX86State,mmx_t0);
3513 gen_ldq_env_A0(s->mem_index, op2_offset);
3514 } else {
3515 rm = (modrm & 7);
3516 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3518 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3519 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3520 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3521 switch(b >> 8) {
3522 case 0x0:
3523 gen_helper_cvtpi2ps(cpu_ptr0, cpu_ptr1);
3524 break;
3525 default:
3526 case 0x1:
3527 gen_helper_cvtpi2pd(cpu_ptr0, cpu_ptr1);
3528 break;
3530 break;
3531 case 0x22a: /* cvtsi2ss */
3532 case 0x32a: /* cvtsi2sd */
3533 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3534 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3535 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3536 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3537 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3538 if (ot == OT_LONG) {
3539 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3540 ((void (*)(TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_tmp2_i32);
3541 } else {
3542 ((void (*)(TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_T[0]);
3544 break;
3545 case 0x02c: /* cvttps2pi */
3546 case 0x12c: /* cvttpd2pi */
3547 case 0x02d: /* cvtps2pi */
3548 case 0x12d: /* cvtpd2pi */
3549 gen_helper_enter_mmx();
3550 if (mod != 3) {
3551 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3552 op2_offset = offsetof(CPUX86State,xmm_t0);
3553 gen_ldo_env_A0(s->mem_index, op2_offset);
3554 } else {
3555 rm = (modrm & 7) | REX_B(s);
3556 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3558 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3559 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3560 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3561 switch(b) {
3562 case 0x02c:
3563 gen_helper_cvttps2pi(cpu_ptr0, cpu_ptr1);
3564 break;
3565 case 0x12c:
3566 gen_helper_cvttpd2pi(cpu_ptr0, cpu_ptr1);
3567 break;
3568 case 0x02d:
3569 gen_helper_cvtps2pi(cpu_ptr0, cpu_ptr1);
3570 break;
3571 case 0x12d:
3572 gen_helper_cvtpd2pi(cpu_ptr0, cpu_ptr1);
3573 break;
3575 break;
3576 case 0x22c: /* cvttss2si */
3577 case 0x32c: /* cvttsd2si */
3578 case 0x22d: /* cvtss2si */
3579 case 0x32d: /* cvtsd2si */
3580 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3581 if (mod != 3) {
3582 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3583 if ((b >> 8) & 1) {
3584 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3585 } else {
3586 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3587 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3589 op2_offset = offsetof(CPUX86State,xmm_t0);
3590 } else {
3591 rm = (modrm & 7) | REX_B(s);
3592 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3594 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3595 (b & 1) * 4];
3596 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3597 if (ot == OT_LONG) {
3598 ((void (*)(TCGv_i32, TCGv_ptr))sse_op2)(cpu_tmp2_i32, cpu_ptr0);
3599 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3600 } else {
3601 ((void (*)(TCGv, TCGv_ptr))sse_op2)(cpu_T[0], cpu_ptr0);
3603 gen_op_mov_reg_T0(ot, reg);
3604 break;
3605 case 0xc4: /* pinsrw */
3606 case 0x1c4:
3607 s->rip_offset = 1;
3608 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3609 val = ldub_code(s->pc++);
3610 if (b1) {
3611 val &= 7;
3612 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3613 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3614 } else {
3615 val &= 3;
3616 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3617 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3619 break;
3620 case 0xc5: /* pextrw */
3621 case 0x1c5:
3622 if (mod != 3)
3623 goto illegal_op;
3624 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3625 val = ldub_code(s->pc++);
3626 if (b1) {
3627 val &= 7;
3628 rm = (modrm & 7) | REX_B(s);
3629 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3630 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3631 } else {
3632 val &= 3;
3633 rm = (modrm & 7);
3634 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3635 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3637 reg = ((modrm >> 3) & 7) | rex_r;
3638 gen_op_mov_reg_T0(ot, reg);
3639 break;
3640 case 0x1d6: /* movq ea, xmm */
3641 if (mod != 3) {
3642 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3643 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3644 } else {
3645 rm = (modrm & 7) | REX_B(s);
3646 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3647 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3648 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3650 break;
3651 case 0x2d6: /* movq2dq */
3652 gen_helper_enter_mmx();
3653 rm = (modrm & 7);
3654 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3655 offsetof(CPUX86State,fpregs[rm].mmx));
3656 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3657 break;
3658 case 0x3d6: /* movdq2q */
3659 gen_helper_enter_mmx();
3660 rm = (modrm & 7) | REX_B(s);
3661 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3662 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3663 break;
3664 case 0xd7: /* pmovmskb */
3665 case 0x1d7:
3666 if (mod != 3)
3667 goto illegal_op;
3668 if (b1) {
3669 rm = (modrm & 7) | REX_B(s);
3670 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3671 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_ptr0);
3672 } else {
3673 rm = (modrm & 7);
3674 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3675 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_ptr0);
3677 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3678 reg = ((modrm >> 3) & 7) | rex_r;
3679 gen_op_mov_reg_T0(OT_LONG, reg);
3680 break;
3681 case 0x138:
3682 if (s->prefix & PREFIX_REPNZ)
3683 goto crc32;
3684 case 0x038:
3685 b = modrm;
3686 modrm = ldub_code(s->pc++);
3687 rm = modrm & 7;
3688 reg = ((modrm >> 3) & 7) | rex_r;
3689 mod = (modrm >> 6) & 3;
3690 if (b1 >= 2) {
3691 goto illegal_op;
3694 sse_op2 = sse_op_table6[b].op[b1];
3695 if (!sse_op2)
3696 goto illegal_op;
3697 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3698 goto illegal_op;
3700 if (b1) {
3701 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3702 if (mod == 3) {
3703 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3704 } else {
3705 op2_offset = offsetof(CPUX86State,xmm_t0);
3706 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3707 switch (b) {
3708 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3709 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3710 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3711 gen_ldq_env_A0(s->mem_index, op2_offset +
3712 offsetof(XMMReg, XMM_Q(0)));
3713 break;
3714 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3715 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3716 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3717 (s->mem_index >> 2) - 1);
3718 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3719 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3720 offsetof(XMMReg, XMM_L(0)));
3721 break;
3722 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3723 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3724 (s->mem_index >> 2) - 1);
3725 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3726 offsetof(XMMReg, XMM_W(0)));
3727 break;
3728 case 0x2a: /* movntqda */
3729 gen_ldo_env_A0(s->mem_index, op1_offset);
3730 return;
3731 default:
3732 gen_ldo_env_A0(s->mem_index, op2_offset);
3735 } else {
3736 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3737 if (mod == 3) {
3738 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3739 } else {
3740 op2_offset = offsetof(CPUX86State,mmx_t0);
3741 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3742 gen_ldq_env_A0(s->mem_index, op2_offset);
3745 if (sse_op2 == SSE_SPECIAL)
3746 goto illegal_op;
3748 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3749 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3750 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3752 if (b == 0x17)
3753 s->cc_op = CC_OP_EFLAGS;
3754 break;
3755 case 0x338: /* crc32 */
3756 crc32:
3757 b = modrm;
3758 modrm = ldub_code(s->pc++);
3759 reg = ((modrm >> 3) & 7) | rex_r;
3761 if (b != 0xf0 && b != 0xf1)
3762 goto illegal_op;
3763 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
3764 goto illegal_op;
3766 if (b == 0xf0)
3767 ot = OT_BYTE;
3768 else if (b == 0xf1 && s->dflag != 2)
3769 if (s->prefix & PREFIX_DATA)
3770 ot = OT_WORD;
3771 else
3772 ot = OT_LONG;
3773 else
3774 ot = OT_QUAD;
3776 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3777 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3778 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3779 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
3780 cpu_T[0], tcg_const_i32(8 << ot));
3782 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3783 gen_op_mov_reg_T0(ot, reg);
3784 break;
3785 case 0x03a:
3786 case 0x13a:
3787 b = modrm;
3788 modrm = ldub_code(s->pc++);
3789 rm = modrm & 7;
3790 reg = ((modrm >> 3) & 7) | rex_r;
3791 mod = (modrm >> 6) & 3;
3792 if (b1 >= 2) {
3793 goto illegal_op;
3796 sse_op2 = sse_op_table7[b].op[b1];
3797 if (!sse_op2)
3798 goto illegal_op;
3799 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
3800 goto illegal_op;
3802 if (sse_op2 == SSE_SPECIAL) {
3803 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3804 rm = (modrm & 7) | REX_B(s);
3805 if (mod != 3)
3806 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3807 reg = ((modrm >> 3) & 7) | rex_r;
3808 val = ldub_code(s->pc++);
3809 switch (b) {
3810 case 0x14: /* pextrb */
3811 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3812 xmm_regs[reg].XMM_B(val & 15)));
3813 if (mod == 3)
3814 gen_op_mov_reg_T0(ot, rm);
3815 else
3816 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
3817 (s->mem_index >> 2) - 1);
3818 break;
3819 case 0x15: /* pextrw */
3820 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3821 xmm_regs[reg].XMM_W(val & 7)));
3822 if (mod == 3)
3823 gen_op_mov_reg_T0(ot, rm);
3824 else
3825 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
3826 (s->mem_index >> 2) - 1);
3827 break;
3828 case 0x16:
3829 if (ot == OT_LONG) { /* pextrd */
3830 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3831 offsetof(CPUX86State,
3832 xmm_regs[reg].XMM_L(val & 3)));
3833 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3834 if (mod == 3)
3835 gen_op_mov_reg_v(ot, rm, cpu_T[0]);
3836 else
3837 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3838 (s->mem_index >> 2) - 1);
3839 } else { /* pextrq */
3840 #ifdef TARGET_X86_64
3841 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3842 offsetof(CPUX86State,
3843 xmm_regs[reg].XMM_Q(val & 1)));
3844 if (mod == 3)
3845 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
3846 else
3847 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
3848 (s->mem_index >> 2) - 1);
3849 #else
3850 goto illegal_op;
3851 #endif
3853 break;
3854 case 0x17: /* extractps */
3855 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3856 xmm_regs[reg].XMM_L(val & 3)));
3857 if (mod == 3)
3858 gen_op_mov_reg_T0(ot, rm);
3859 else
3860 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3861 (s->mem_index >> 2) - 1);
3862 break;
3863 case 0x20: /* pinsrb */
3864 if (mod == 3)
3865 gen_op_mov_TN_reg(OT_LONG, 0, rm);
3866 else
3867 tcg_gen_qemu_ld8u(cpu_tmp0, cpu_A0,
3868 (s->mem_index >> 2) - 1);
3869 tcg_gen_st8_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State,
3870 xmm_regs[reg].XMM_B(val & 15)));
3871 break;
3872 case 0x21: /* insertps */
3873 if (mod == 3) {
3874 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3875 offsetof(CPUX86State,xmm_regs[rm]
3876 .XMM_L((val >> 6) & 3)));
3877 } else {
3878 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3879 (s->mem_index >> 2) - 1);
3880 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3882 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3883 offsetof(CPUX86State,xmm_regs[reg]
3884 .XMM_L((val >> 4) & 3)));
3885 if ((val >> 0) & 1)
3886 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3887 cpu_env, offsetof(CPUX86State,
3888 xmm_regs[reg].XMM_L(0)));
3889 if ((val >> 1) & 1)
3890 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3891 cpu_env, offsetof(CPUX86State,
3892 xmm_regs[reg].XMM_L(1)));
3893 if ((val >> 2) & 1)
3894 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3895 cpu_env, offsetof(CPUX86State,
3896 xmm_regs[reg].XMM_L(2)));
3897 if ((val >> 3) & 1)
3898 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3899 cpu_env, offsetof(CPUX86State,
3900 xmm_regs[reg].XMM_L(3)));
3901 break;
3902 case 0x22:
3903 if (ot == OT_LONG) { /* pinsrd */
3904 if (mod == 3)
3905 gen_op_mov_v_reg(ot, cpu_tmp0, rm);
3906 else
3907 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3908 (s->mem_index >> 2) - 1);
3909 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3910 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3911 offsetof(CPUX86State,
3912 xmm_regs[reg].XMM_L(val & 3)));
3913 } else { /* pinsrq */
3914 #ifdef TARGET_X86_64
3915 if (mod == 3)
3916 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
3917 else
3918 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
3919 (s->mem_index >> 2) - 1);
3920 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3921 offsetof(CPUX86State,
3922 xmm_regs[reg].XMM_Q(val & 1)));
3923 #else
3924 goto illegal_op;
3925 #endif
3927 break;
3929 return;
3932 if (b1) {
3933 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3934 if (mod == 3) {
3935 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3936 } else {
3937 op2_offset = offsetof(CPUX86State,xmm_t0);
3938 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3939 gen_ldo_env_A0(s->mem_index, op2_offset);
3941 } else {
3942 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3943 if (mod == 3) {
3944 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3945 } else {
3946 op2_offset = offsetof(CPUX86State,mmx_t0);
3947 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3948 gen_ldq_env_A0(s->mem_index, op2_offset);
3951 val = ldub_code(s->pc++);
3953 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3954 s->cc_op = CC_OP_EFLAGS;
3956 if (s->dflag == 2)
3957 /* The helper must use entire 64-bit gp registers */
3958 val |= 1 << 8;
3961 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3962 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3963 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3964 break;
3965 default:
3966 goto illegal_op;
3968 } else {
3969 /* generic MMX or SSE operation */
3970 switch(b) {
3971 case 0x70: /* pshufx insn */
3972 case 0xc6: /* pshufx insn */
3973 case 0xc2: /* compare insns */
3974 s->rip_offset = 1;
3975 break;
3976 default:
3977 break;
3979 if (is_xmm) {
3980 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3981 if (mod != 3) {
3982 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3983 op2_offset = offsetof(CPUX86State,xmm_t0);
3984 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3985 b == 0xc2)) {
3986 /* specific case for SSE single instructions */
3987 if (b1 == 2) {
3988 /* 32 bit access */
3989 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3990 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3991 } else {
3992 /* 64 bit access */
3993 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3995 } else {
3996 gen_ldo_env_A0(s->mem_index, op2_offset);
3998 } else {
3999 rm = (modrm & 7) | REX_B(s);
4000 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4002 } else {
4003 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4004 if (mod != 3) {
4005 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4006 op2_offset = offsetof(CPUX86State,mmx_t0);
4007 gen_ldq_env_A0(s->mem_index, op2_offset);
4008 } else {
4009 rm = (modrm & 7);
4010 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4013 switch(b) {
4014 case 0x0f: /* 3DNow! data insns */
4015 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4016 goto illegal_op;
4017 val = ldub_code(s->pc++);
4018 sse_op2 = sse_op_table5[val];
4019 if (!sse_op2)
4020 goto illegal_op;
4021 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4022 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4023 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4024 break;
4025 case 0x70: /* pshufx insn */
4026 case 0xc6: /* pshufx insn */
4027 val = ldub_code(s->pc++);
4028 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4029 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4030 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4031 break;
4032 case 0xc2:
4033 /* compare insns */
4034 val = ldub_code(s->pc++);
4035 if (val >= 8)
4036 goto illegal_op;
4037 sse_op2 = sse_op_table4[val][b1];
4038 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4039 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4040 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4041 break;
4042 case 0xf7:
4043 /* maskmov : we must prepare A0 */
4044 if (mod != 3)
4045 goto illegal_op;
4046 #ifdef TARGET_X86_64
4047 if (s->aflag == 2) {
4048 gen_op_movq_A0_reg(R_EDI);
4049 } else
4050 #endif
4052 gen_op_movl_A0_reg(R_EDI);
4053 if (s->aflag == 0)
4054 gen_op_andl_A0_ffff();
4056 gen_add_A0_ds_seg(s);
4058 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4059 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4060 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_ptr1, cpu_A0);
4061 break;
4062 default:
4063 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4064 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4065 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4066 break;
4068 if (b == 0x2e || b == 0x2f) {
4069 s->cc_op = CC_OP_EFLAGS;
4074 /* convert one instruction. s->is_jmp is set if the translation must
4075 be stopped. Return the next pc value */
4076 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4078 int b, prefixes, aflag, dflag;
4079 int shift, ot;
4080 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4081 target_ulong next_eip, tval;
4082 int rex_w, rex_r;
4084 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
4085 tcg_gen_debug_insn_start(pc_start);
4086 s->pc = pc_start;
4087 prefixes = 0;
4088 aflag = s->code32;
4089 dflag = s->code32;
4090 s->override = -1;
4091 rex_w = -1;
4092 rex_r = 0;
4093 #ifdef TARGET_X86_64
4094 s->rex_x = 0;
4095 s->rex_b = 0;
4096 x86_64_hregs = 0;
4097 #endif
4098 s->rip_offset = 0; /* for relative ip address */
4099 next_byte:
4100 b = ldub_code(s->pc);
4101 s->pc++;
4102 /* check prefixes */
4103 #ifdef TARGET_X86_64
4104 if (CODE64(s)) {
4105 switch (b) {
4106 case 0xf3:
4107 prefixes |= PREFIX_REPZ;
4108 goto next_byte;
4109 case 0xf2:
4110 prefixes |= PREFIX_REPNZ;
4111 goto next_byte;
4112 case 0xf0:
4113 prefixes |= PREFIX_LOCK;
4114 goto next_byte;
4115 case 0x2e:
4116 s->override = R_CS;
4117 goto next_byte;
4118 case 0x36:
4119 s->override = R_SS;
4120 goto next_byte;
4121 case 0x3e:
4122 s->override = R_DS;
4123 goto next_byte;
4124 case 0x26:
4125 s->override = R_ES;
4126 goto next_byte;
4127 case 0x64:
4128 s->override = R_FS;
4129 goto next_byte;
4130 case 0x65:
4131 s->override = R_GS;
4132 goto next_byte;
4133 case 0x66:
4134 prefixes |= PREFIX_DATA;
4135 goto next_byte;
4136 case 0x67:
4137 prefixes |= PREFIX_ADR;
4138 goto next_byte;
4139 case 0x40 ... 0x4f:
4140 /* REX prefix */
4141 rex_w = (b >> 3) & 1;
4142 rex_r = (b & 0x4) << 1;
4143 s->rex_x = (b & 0x2) << 2;
4144 REX_B(s) = (b & 0x1) << 3;
4145 x86_64_hregs = 1; /* select uniform byte register addressing */
4146 goto next_byte;
4148 if (rex_w == 1) {
4149 /* 0x66 is ignored if rex.w is set */
4150 dflag = 2;
4151 } else {
4152 if (prefixes & PREFIX_DATA)
4153 dflag ^= 1;
4155 if (!(prefixes & PREFIX_ADR))
4156 aflag = 2;
4157 } else
4158 #endif
4160 switch (b) {
4161 case 0xf3:
4162 prefixes |= PREFIX_REPZ;
4163 goto next_byte;
4164 case 0xf2:
4165 prefixes |= PREFIX_REPNZ;
4166 goto next_byte;
4167 case 0xf0:
4168 prefixes |= PREFIX_LOCK;
4169 goto next_byte;
4170 case 0x2e:
4171 s->override = R_CS;
4172 goto next_byte;
4173 case 0x36:
4174 s->override = R_SS;
4175 goto next_byte;
4176 case 0x3e:
4177 s->override = R_DS;
4178 goto next_byte;
4179 case 0x26:
4180 s->override = R_ES;
4181 goto next_byte;
4182 case 0x64:
4183 s->override = R_FS;
4184 goto next_byte;
4185 case 0x65:
4186 s->override = R_GS;
4187 goto next_byte;
4188 case 0x66:
4189 prefixes |= PREFIX_DATA;
4190 goto next_byte;
4191 case 0x67:
4192 prefixes |= PREFIX_ADR;
4193 goto next_byte;
4195 if (prefixes & PREFIX_DATA)
4196 dflag ^= 1;
4197 if (prefixes & PREFIX_ADR)
4198 aflag ^= 1;
4201 s->prefix = prefixes;
4202 s->aflag = aflag;
4203 s->dflag = dflag;
4205 /* lock generation */
4206 if (prefixes & PREFIX_LOCK)
4207 gen_helper_lock();
4209 /* now check op code */
4210 reswitch:
4211 switch(b) {
4212 case 0x0f:
4213 /**************************/
4214 /* extended op code */
4215 b = ldub_code(s->pc++) | 0x100;
4216 goto reswitch;
4218 /**************************/
4219 /* arith & logic */
4220 case 0x00 ... 0x05:
4221 case 0x08 ... 0x0d:
4222 case 0x10 ... 0x15:
4223 case 0x18 ... 0x1d:
4224 case 0x20 ... 0x25:
4225 case 0x28 ... 0x2d:
4226 case 0x30 ... 0x35:
4227 case 0x38 ... 0x3d:
4229 int op, f, val;
4230 op = (b >> 3) & 7;
4231 f = (b >> 1) & 3;
4233 if ((b & 1) == 0)
4234 ot = OT_BYTE;
4235 else
4236 ot = dflag + OT_WORD;
4238 switch(f) {
4239 case 0: /* OP Ev, Gv */
4240 modrm = ldub_code(s->pc++);
4241 reg = ((modrm >> 3) & 7) | rex_r;
4242 mod = (modrm >> 6) & 3;
4243 rm = (modrm & 7) | REX_B(s);
4244 if (mod != 3) {
4245 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4246 opreg = OR_TMP0;
4247 } else if (op == OP_XORL && rm == reg) {
4248 xor_zero:
4249 /* xor reg, reg optimisation */
4250 gen_op_movl_T0_0();
4251 s->cc_op = CC_OP_LOGICB + ot;
4252 gen_op_mov_reg_T0(ot, reg);
4253 gen_op_update1_cc();
4254 break;
4255 } else {
4256 opreg = rm;
4258 gen_op_mov_TN_reg(ot, 1, reg);
4259 gen_op(s, op, ot, opreg);
4260 break;
4261 case 1: /* OP Gv, Ev */
4262 modrm = ldub_code(s->pc++);
4263 mod = (modrm >> 6) & 3;
4264 reg = ((modrm >> 3) & 7) | rex_r;
4265 rm = (modrm & 7) | REX_B(s);
4266 if (mod != 3) {
4267 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4268 gen_op_ld_T1_A0(ot + s->mem_index);
4269 } else if (op == OP_XORL && rm == reg) {
4270 goto xor_zero;
4271 } else {
4272 gen_op_mov_TN_reg(ot, 1, rm);
4274 gen_op(s, op, ot, reg);
4275 break;
4276 case 2: /* OP A, Iv */
4277 val = insn_get(s, ot);
4278 gen_op_movl_T1_im(val);
4279 gen_op(s, op, ot, OR_EAX);
4280 break;
4283 break;
4285 case 0x82:
4286 if (CODE64(s))
4287 goto illegal_op;
4288 case 0x80: /* GRP1 */
4289 case 0x81:
4290 case 0x83:
4292 int val;
4294 if ((b & 1) == 0)
4295 ot = OT_BYTE;
4296 else
4297 ot = dflag + OT_WORD;
4299 modrm = ldub_code(s->pc++);
4300 mod = (modrm >> 6) & 3;
4301 rm = (modrm & 7) | REX_B(s);
4302 op = (modrm >> 3) & 7;
4304 if (mod != 3) {
4305 if (b == 0x83)
4306 s->rip_offset = 1;
4307 else
4308 s->rip_offset = insn_const_size(ot);
4309 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4310 opreg = OR_TMP0;
4311 } else {
4312 opreg = rm;
4315 switch(b) {
4316 default:
4317 case 0x80:
4318 case 0x81:
4319 case 0x82:
4320 val = insn_get(s, ot);
4321 break;
4322 case 0x83:
4323 val = (int8_t)insn_get(s, OT_BYTE);
4324 break;
4326 gen_op_movl_T1_im(val);
4327 gen_op(s, op, ot, opreg);
4329 break;
4331 /**************************/
4332 /* inc, dec, and other misc arith */
4333 case 0x40 ... 0x47: /* inc Gv */
4334 ot = dflag ? OT_LONG : OT_WORD;
4335 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4336 break;
4337 case 0x48 ... 0x4f: /* dec Gv */
4338 ot = dflag ? OT_LONG : OT_WORD;
4339 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4340 break;
4341 case 0xf6: /* GRP3 */
4342 case 0xf7:
4343 if ((b & 1) == 0)
4344 ot = OT_BYTE;
4345 else
4346 ot = dflag + OT_WORD;
4348 modrm = ldub_code(s->pc++);
4349 mod = (modrm >> 6) & 3;
4350 rm = (modrm & 7) | REX_B(s);
4351 op = (modrm >> 3) & 7;
4352 if (mod != 3) {
4353 if (op == 0)
4354 s->rip_offset = insn_const_size(ot);
4355 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4356 gen_op_ld_T0_A0(ot + s->mem_index);
4357 } else {
4358 gen_op_mov_TN_reg(ot, 0, rm);
4361 switch(op) {
4362 case 0: /* test */
4363 val = insn_get(s, ot);
4364 gen_op_movl_T1_im(val);
4365 gen_op_testl_T0_T1_cc();
4366 s->cc_op = CC_OP_LOGICB + ot;
4367 break;
4368 case 2: /* not */
4369 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4370 if (mod != 3) {
4371 gen_op_st_T0_A0(ot + s->mem_index);
4372 } else {
4373 gen_op_mov_reg_T0(ot, rm);
4375 break;
4376 case 3: /* neg */
4377 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4378 if (mod != 3) {
4379 gen_op_st_T0_A0(ot + s->mem_index);
4380 } else {
4381 gen_op_mov_reg_T0(ot, rm);
4383 gen_op_update_neg_cc();
4384 s->cc_op = CC_OP_SUBB + ot;
4385 break;
4386 case 4: /* mul */
4387 switch(ot) {
4388 case OT_BYTE:
4389 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4390 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4391 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4392 /* XXX: use 32 bit mul which could be faster */
4393 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4394 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4395 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4396 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4397 s->cc_op = CC_OP_MULB;
4398 break;
4399 case OT_WORD:
4400 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4401 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4402 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4403 /* XXX: use 32 bit mul which could be faster */
4404 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4405 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4406 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4407 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4408 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4409 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4410 s->cc_op = CC_OP_MULW;
4411 break;
4412 default:
4413 case OT_LONG:
4414 #ifdef TARGET_X86_64
4415 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4416 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4417 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4418 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4419 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4420 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4421 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4422 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4423 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4424 #else
4426 TCGv_i64 t0, t1;
4427 t0 = tcg_temp_new_i64();
4428 t1 = tcg_temp_new_i64();
4429 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4430 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4431 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4432 tcg_gen_mul_i64(t0, t0, t1);
4433 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4434 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4435 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4436 tcg_gen_shri_i64(t0, t0, 32);
4437 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4438 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4439 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4441 #endif
4442 s->cc_op = CC_OP_MULL;
4443 break;
4444 #ifdef TARGET_X86_64
4445 case OT_QUAD:
4446 gen_helper_mulq_EAX_T0(cpu_T[0]);
4447 s->cc_op = CC_OP_MULQ;
4448 break;
4449 #endif
4451 break;
4452 case 5: /* imul */
4453 switch(ot) {
4454 case OT_BYTE:
4455 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4456 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4457 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4458 /* XXX: use 32 bit mul which could be faster */
4459 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4460 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4461 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4462 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4463 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4464 s->cc_op = CC_OP_MULB;
4465 break;
4466 case OT_WORD:
4467 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4468 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4469 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4470 /* XXX: use 32 bit mul which could be faster */
4471 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4472 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4473 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4474 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4475 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4476 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4477 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4478 s->cc_op = CC_OP_MULW;
4479 break;
4480 default:
4481 case OT_LONG:
4482 #ifdef TARGET_X86_64
4483 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4484 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4485 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4486 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4487 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4488 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4489 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4490 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4491 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4492 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4493 #else
4495 TCGv_i64 t0, t1;
4496 t0 = tcg_temp_new_i64();
4497 t1 = tcg_temp_new_i64();
4498 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4499 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4500 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4501 tcg_gen_mul_i64(t0, t0, t1);
4502 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4503 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4504 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4505 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4506 tcg_gen_shri_i64(t0, t0, 32);
4507 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4508 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4509 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4511 #endif
4512 s->cc_op = CC_OP_MULL;
4513 break;
4514 #ifdef TARGET_X86_64
4515 case OT_QUAD:
4516 gen_helper_imulq_EAX_T0(cpu_T[0]);
4517 s->cc_op = CC_OP_MULQ;
4518 break;
4519 #endif
4521 break;
4522 case 6: /* div */
4523 switch(ot) {
4524 case OT_BYTE:
4525 gen_jmp_im(pc_start - s->cs_base);
4526 gen_helper_divb_AL(cpu_T[0]);
4527 break;
4528 case OT_WORD:
4529 gen_jmp_im(pc_start - s->cs_base);
4530 gen_helper_divw_AX(cpu_T[0]);
4531 break;
4532 default:
4533 case OT_LONG:
4534 gen_jmp_im(pc_start - s->cs_base);
4535 gen_helper_divl_EAX(cpu_T[0]);
4536 break;
4537 #ifdef TARGET_X86_64
4538 case OT_QUAD:
4539 gen_jmp_im(pc_start - s->cs_base);
4540 gen_helper_divq_EAX(cpu_T[0]);
4541 break;
4542 #endif
4544 break;
4545 case 7: /* idiv */
4546 switch(ot) {
4547 case OT_BYTE:
4548 gen_jmp_im(pc_start - s->cs_base);
4549 gen_helper_idivb_AL(cpu_T[0]);
4550 break;
4551 case OT_WORD:
4552 gen_jmp_im(pc_start - s->cs_base);
4553 gen_helper_idivw_AX(cpu_T[0]);
4554 break;
4555 default:
4556 case OT_LONG:
4557 gen_jmp_im(pc_start - s->cs_base);
4558 gen_helper_idivl_EAX(cpu_T[0]);
4559 break;
4560 #ifdef TARGET_X86_64
4561 case OT_QUAD:
4562 gen_jmp_im(pc_start - s->cs_base);
4563 gen_helper_idivq_EAX(cpu_T[0]);
4564 break;
4565 #endif
4567 break;
4568 default:
4569 goto illegal_op;
4571 break;
4573 case 0xfe: /* GRP4 */
4574 case 0xff: /* GRP5 */
4575 if ((b & 1) == 0)
4576 ot = OT_BYTE;
4577 else
4578 ot = dflag + OT_WORD;
4580 modrm = ldub_code(s->pc++);
4581 mod = (modrm >> 6) & 3;
4582 rm = (modrm & 7) | REX_B(s);
4583 op = (modrm >> 3) & 7;
4584 if (op >= 2 && b == 0xfe) {
4585 goto illegal_op;
4587 if (CODE64(s)) {
4588 if (op == 2 || op == 4) {
4589 /* operand size for jumps is 64 bit */
4590 ot = OT_QUAD;
4591 } else if (op == 3 || op == 5) {
4592 ot = dflag ? OT_LONG + (rex_w == 1) : OT_WORD;
4593 } else if (op == 6) {
4594 /* default push size is 64 bit */
4595 ot = dflag ? OT_QUAD : OT_WORD;
4598 if (mod != 3) {
4599 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4600 if (op >= 2 && op != 3 && op != 5)
4601 gen_op_ld_T0_A0(ot + s->mem_index);
4602 } else {
4603 gen_op_mov_TN_reg(ot, 0, rm);
4606 switch(op) {
4607 case 0: /* inc Ev */
4608 if (mod != 3)
4609 opreg = OR_TMP0;
4610 else
4611 opreg = rm;
4612 gen_inc(s, ot, opreg, 1);
4613 break;
4614 case 1: /* dec Ev */
4615 if (mod != 3)
4616 opreg = OR_TMP0;
4617 else
4618 opreg = rm;
4619 gen_inc(s, ot, opreg, -1);
4620 break;
4621 case 2: /* call Ev */
4622 /* XXX: optimize if memory (no 'and' is necessary) */
4623 if (s->dflag == 0)
4624 gen_op_andl_T0_ffff();
4625 next_eip = s->pc - s->cs_base;
4626 gen_movtl_T1_im(next_eip);
4627 gen_push_T1(s);
4628 gen_op_jmp_T0();
4629 gen_eob(s);
4630 break;
4631 case 3: /* lcall Ev */
4632 gen_op_ld_T1_A0(ot + s->mem_index);
4633 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4634 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4635 do_lcall:
4636 if (s->pe && !s->vm86) {
4637 if (s->cc_op != CC_OP_DYNAMIC)
4638 gen_op_set_cc_op(s->cc_op);
4639 gen_jmp_im(pc_start - s->cs_base);
4640 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4641 gen_helper_lcall_protected(cpu_tmp2_i32, cpu_T[1],
4642 tcg_const_i32(dflag),
4643 tcg_const_i32(s->pc - pc_start));
4644 } else {
4645 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4646 gen_helper_lcall_real(cpu_tmp2_i32, cpu_T[1],
4647 tcg_const_i32(dflag),
4648 tcg_const_i32(s->pc - s->cs_base));
4650 gen_eob(s);
4651 break;
4652 case 4: /* jmp Ev */
4653 if (s->dflag == 0)
4654 gen_op_andl_T0_ffff();
4655 gen_op_jmp_T0();
4656 gen_eob(s);
4657 break;
4658 case 5: /* ljmp Ev */
4659 gen_op_ld_T1_A0(ot + s->mem_index);
4660 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4661 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4662 do_ljmp:
4663 if (s->pe && !s->vm86) {
4664 if (s->cc_op != CC_OP_DYNAMIC)
4665 gen_op_set_cc_op(s->cc_op);
4666 gen_jmp_im(pc_start - s->cs_base);
4667 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4668 gen_helper_ljmp_protected(cpu_tmp2_i32, cpu_T[1],
4669 tcg_const_i32(s->pc - pc_start));
4670 } else {
4671 gen_op_movl_seg_T0_vm(R_CS);
4672 gen_op_movl_T0_T1();
4673 gen_op_jmp_T0();
4675 gen_eob(s);
4676 break;
4677 case 6: /* push Ev */
4678 gen_push_T0(s);
4679 break;
4680 default:
4681 goto illegal_op;
4683 break;
4685 case 0x84: /* test Ev, Gv */
4686 case 0x85:
4687 if ((b & 1) == 0)
4688 ot = OT_BYTE;
4689 else
4690 ot = dflag + OT_WORD;
4692 modrm = ldub_code(s->pc++);
4693 reg = ((modrm >> 3) & 7) | rex_r;
4695 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4696 gen_op_mov_TN_reg(ot, 1, reg);
4697 gen_op_testl_T0_T1_cc();
4698 s->cc_op = CC_OP_LOGICB + ot;
4699 break;
4701 case 0xa8: /* test eAX, Iv */
4702 case 0xa9:
4703 if ((b & 1) == 0)
4704 ot = OT_BYTE;
4705 else
4706 ot = dflag + OT_WORD;
4707 val = insn_get(s, ot);
4709 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4710 gen_op_movl_T1_im(val);
4711 gen_op_testl_T0_T1_cc();
4712 s->cc_op = CC_OP_LOGICB + ot;
4713 break;
4715 case 0x98: /* CWDE/CBW */
4716 #ifdef TARGET_X86_64
4717 if (dflag == 2) {
4718 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4719 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4720 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4721 } else
4722 #endif
4723 if (dflag == 1) {
4724 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4725 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4726 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4727 } else {
4728 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4729 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4730 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4732 break;
4733 case 0x99: /* CDQ/CWD */
4734 #ifdef TARGET_X86_64
4735 if (dflag == 2) {
4736 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4737 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4738 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4739 } else
4740 #endif
4741 if (dflag == 1) {
4742 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4743 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4744 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4745 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4746 } else {
4747 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4748 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4749 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4750 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4752 break;
4753 case 0x1af: /* imul Gv, Ev */
4754 case 0x69: /* imul Gv, Ev, I */
4755 case 0x6b:
4756 ot = dflag + OT_WORD;
4757 modrm = ldub_code(s->pc++);
4758 reg = ((modrm >> 3) & 7) | rex_r;
4759 if (b == 0x69)
4760 s->rip_offset = insn_const_size(ot);
4761 else if (b == 0x6b)
4762 s->rip_offset = 1;
4763 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4764 if (b == 0x69) {
4765 val = insn_get(s, ot);
4766 gen_op_movl_T1_im(val);
4767 } else if (b == 0x6b) {
4768 val = (int8_t)insn_get(s, OT_BYTE);
4769 gen_op_movl_T1_im(val);
4770 } else {
4771 gen_op_mov_TN_reg(ot, 1, reg);
4774 #ifdef TARGET_X86_64
4775 if (ot == OT_QUAD) {
4776 gen_helper_imulq_T0_T1(cpu_T[0], cpu_T[0], cpu_T[1]);
4777 } else
4778 #endif
4779 if (ot == OT_LONG) {
4780 #ifdef TARGET_X86_64
4781 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4782 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4783 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4784 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4785 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4786 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4787 #else
4789 TCGv_i64 t0, t1;
4790 t0 = tcg_temp_new_i64();
4791 t1 = tcg_temp_new_i64();
4792 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4793 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4794 tcg_gen_mul_i64(t0, t0, t1);
4795 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4796 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4797 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4798 tcg_gen_shri_i64(t0, t0, 32);
4799 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4800 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4802 #endif
4803 } else {
4804 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4805 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4806 /* XXX: use 32 bit mul which could be faster */
4807 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4808 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4809 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4810 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4812 gen_op_mov_reg_T0(ot, reg);
4813 s->cc_op = CC_OP_MULB + ot;
4814 break;
4815 case 0x1c0:
4816 case 0x1c1: /* xadd Ev, Gv */
4817 if ((b & 1) == 0)
4818 ot = OT_BYTE;
4819 else
4820 ot = dflag + OT_WORD;
4821 modrm = ldub_code(s->pc++);
4822 reg = ((modrm >> 3) & 7) | rex_r;
4823 mod = (modrm >> 6) & 3;
4824 if (mod == 3) {
4825 rm = (modrm & 7) | REX_B(s);
4826 gen_op_mov_TN_reg(ot, 0, reg);
4827 gen_op_mov_TN_reg(ot, 1, rm);
4828 gen_op_addl_T0_T1();
4829 gen_op_mov_reg_T1(ot, reg);
4830 gen_op_mov_reg_T0(ot, rm);
4831 } else {
4832 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4833 gen_op_mov_TN_reg(ot, 0, reg);
4834 gen_op_ld_T1_A0(ot + s->mem_index);
4835 gen_op_addl_T0_T1();
4836 gen_op_st_T0_A0(ot + s->mem_index);
4837 gen_op_mov_reg_T1(ot, reg);
4839 gen_op_update2_cc();
4840 s->cc_op = CC_OP_ADDB + ot;
4841 break;
4842 case 0x1b0:
4843 case 0x1b1: /* cmpxchg Ev, Gv */
4845 int label1, label2;
4846 TCGv t0, t1, t2, a0;
4848 if ((b & 1) == 0)
4849 ot = OT_BYTE;
4850 else
4851 ot = dflag + OT_WORD;
4852 modrm = ldub_code(s->pc++);
4853 reg = ((modrm >> 3) & 7) | rex_r;
4854 mod = (modrm >> 6) & 3;
4855 t0 = tcg_temp_local_new();
4856 t1 = tcg_temp_local_new();
4857 t2 = tcg_temp_local_new();
4858 a0 = tcg_temp_local_new();
4859 gen_op_mov_v_reg(ot, t1, reg);
4860 if (mod == 3) {
4861 rm = (modrm & 7) | REX_B(s);
4862 gen_op_mov_v_reg(ot, t0, rm);
4863 } else {
4864 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4865 tcg_gen_mov_tl(a0, cpu_A0);
4866 gen_op_ld_v(ot + s->mem_index, t0, a0);
4867 rm = 0; /* avoid warning */
4869 label1 = gen_new_label();
4870 tcg_gen_sub_tl(t2, cpu_regs[R_EAX], t0);
4871 gen_extu(ot, t2);
4872 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4873 if (mod == 3) {
4874 label2 = gen_new_label();
4875 gen_op_mov_reg_v(ot, R_EAX, t0);
4876 tcg_gen_br(label2);
4877 gen_set_label(label1);
4878 gen_op_mov_reg_v(ot, rm, t1);
4879 gen_set_label(label2);
4880 } else {
4881 tcg_gen_mov_tl(t1, t0);
4882 gen_op_mov_reg_v(ot, R_EAX, t0);
4883 gen_set_label(label1);
4884 /* always store */
4885 gen_op_st_v(ot + s->mem_index, t1, a0);
4887 tcg_gen_mov_tl(cpu_cc_src, t0);
4888 tcg_gen_mov_tl(cpu_cc_dst, t2);
4889 s->cc_op = CC_OP_SUBB + ot;
4890 tcg_temp_free(t0);
4891 tcg_temp_free(t1);
4892 tcg_temp_free(t2);
4893 tcg_temp_free(a0);
4895 break;
4896 case 0x1c7: /* cmpxchg8b */
4897 modrm = ldub_code(s->pc++);
4898 mod = (modrm >> 6) & 3;
4899 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4900 goto illegal_op;
4901 #ifdef TARGET_X86_64
4902 if (dflag == 2) {
4903 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4904 goto illegal_op;
4905 gen_jmp_im(pc_start - s->cs_base);
4906 if (s->cc_op != CC_OP_DYNAMIC)
4907 gen_op_set_cc_op(s->cc_op);
4908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4909 gen_helper_cmpxchg16b(cpu_A0);
4910 } else
4911 #endif
4913 if (!(s->cpuid_features & CPUID_CX8))
4914 goto illegal_op;
4915 gen_jmp_im(pc_start - s->cs_base);
4916 if (s->cc_op != CC_OP_DYNAMIC)
4917 gen_op_set_cc_op(s->cc_op);
4918 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4919 gen_helper_cmpxchg8b(cpu_A0);
4921 s->cc_op = CC_OP_EFLAGS;
4922 break;
4924 /**************************/
4925 /* push/pop */
4926 case 0x50 ... 0x57: /* push */
4927 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4928 gen_push_T0(s);
4929 break;
4930 case 0x58 ... 0x5f: /* pop */
4931 if (CODE64(s)) {
4932 ot = dflag ? OT_QUAD : OT_WORD;
4933 } else {
4934 ot = dflag + OT_WORD;
4936 gen_pop_T0(s);
4937 /* NOTE: order is important for pop %sp */
4938 gen_pop_update(s);
4939 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4940 break;
4941 case 0x60: /* pusha */
4942 if (CODE64(s))
4943 goto illegal_op;
4944 gen_pusha(s);
4945 break;
4946 case 0x61: /* popa */
4947 if (CODE64(s))
4948 goto illegal_op;
4949 gen_popa(s);
4950 break;
4951 case 0x68: /* push Iv */
4952 case 0x6a:
4953 if (CODE64(s)) {
4954 ot = dflag ? OT_QUAD : OT_WORD;
4955 } else {
4956 ot = dflag + OT_WORD;
4958 if (b == 0x68)
4959 val = insn_get(s, ot);
4960 else
4961 val = (int8_t)insn_get(s, OT_BYTE);
4962 gen_op_movl_T0_im(val);
4963 gen_push_T0(s);
4964 break;
4965 case 0x8f: /* pop Ev */
4966 if (CODE64(s)) {
4967 ot = dflag ? OT_QUAD : OT_WORD;
4968 } else {
4969 ot = dflag + OT_WORD;
4971 modrm = ldub_code(s->pc++);
4972 mod = (modrm >> 6) & 3;
4973 gen_pop_T0(s);
4974 if (mod == 3) {
4975 /* NOTE: order is important for pop %sp */
4976 gen_pop_update(s);
4977 rm = (modrm & 7) | REX_B(s);
4978 gen_op_mov_reg_T0(ot, rm);
4979 } else {
4980 /* NOTE: order is important too for MMU exceptions */
4981 s->popl_esp_hack = 1 << ot;
4982 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4983 s->popl_esp_hack = 0;
4984 gen_pop_update(s);
4986 break;
4987 case 0xc8: /* enter */
4989 int level;
4990 val = lduw_code(s->pc);
4991 s->pc += 2;
4992 level = ldub_code(s->pc++);
4993 gen_enter(s, val, level);
4995 break;
4996 case 0xc9: /* leave */
4997 /* XXX: exception not precise (ESP is updated before potential exception) */
4998 if (CODE64(s)) {
4999 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5000 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5001 } else if (s->ss32) {
5002 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5003 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5004 } else {
5005 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5006 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5008 gen_pop_T0(s);
5009 if (CODE64(s)) {
5010 ot = dflag ? OT_QUAD : OT_WORD;
5011 } else {
5012 ot = dflag + OT_WORD;
5014 gen_op_mov_reg_T0(ot, R_EBP);
5015 gen_pop_update(s);
5016 break;
5017 case 0x06: /* push es */
5018 case 0x0e: /* push cs */
5019 case 0x16: /* push ss */
5020 case 0x1e: /* push ds */
5021 if (CODE64(s))
5022 goto illegal_op;
5023 gen_op_movl_T0_seg(b >> 3);
5024 gen_push_T0(s);
5025 break;
5026 case 0x1a0: /* push fs */
5027 case 0x1a8: /* push gs */
5028 gen_op_movl_T0_seg((b >> 3) & 7);
5029 gen_push_T0(s);
5030 break;
5031 case 0x07: /* pop es */
5032 case 0x17: /* pop ss */
5033 case 0x1f: /* pop ds */
5034 if (CODE64(s))
5035 goto illegal_op;
5036 reg = b >> 3;
5037 gen_pop_T0(s);
5038 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5039 gen_pop_update(s);
5040 if (reg == R_SS) {
5041 /* if reg == SS, inhibit interrupts/trace. */
5042 /* If several instructions disable interrupts, only the
5043 _first_ does it */
5044 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5045 gen_helper_set_inhibit_irq();
5046 s->tf = 0;
5048 if (s->is_jmp) {
5049 gen_jmp_im(s->pc - s->cs_base);
5050 gen_eob(s);
5052 break;
5053 case 0x1a1: /* pop fs */
5054 case 0x1a9: /* pop gs */
5055 gen_pop_T0(s);
5056 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5057 gen_pop_update(s);
5058 if (s->is_jmp) {
5059 gen_jmp_im(s->pc - s->cs_base);
5060 gen_eob(s);
5062 break;
5064 /**************************/
5065 /* mov */
5066 case 0x88:
5067 case 0x89: /* mov Gv, Ev */
5068 if ((b & 1) == 0)
5069 ot = OT_BYTE;
5070 else
5071 ot = dflag + OT_WORD;
5072 modrm = ldub_code(s->pc++);
5073 reg = ((modrm >> 3) & 7) | rex_r;
5075 /* generate a generic store */
5076 gen_ldst_modrm(s, modrm, ot, reg, 1);
5077 break;
5078 case 0xc6:
5079 case 0xc7: /* mov Ev, Iv */
5080 if ((b & 1) == 0)
5081 ot = OT_BYTE;
5082 else
5083 ot = dflag + OT_WORD;
5084 modrm = ldub_code(s->pc++);
5085 mod = (modrm >> 6) & 3;
5086 if (mod != 3) {
5087 s->rip_offset = insn_const_size(ot);
5088 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5090 val = insn_get(s, ot);
5091 gen_op_movl_T0_im(val);
5092 if (mod != 3)
5093 gen_op_st_T0_A0(ot + s->mem_index);
5094 else
5095 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5096 break;
5097 case 0x8a:
5098 case 0x8b: /* mov Ev, Gv */
5099 if ((b & 1) == 0)
5100 ot = OT_BYTE;
5101 else
5102 ot = OT_WORD + dflag;
5103 modrm = ldub_code(s->pc++);
5104 reg = ((modrm >> 3) & 7) | rex_r;
5106 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5107 gen_op_mov_reg_T0(ot, reg);
5108 break;
5109 case 0x8e: /* mov seg, Gv */
5110 modrm = ldub_code(s->pc++);
5111 reg = (modrm >> 3) & 7;
5112 if (reg >= 6 || reg == R_CS)
5113 goto illegal_op;
5114 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5115 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5116 if (reg == R_SS) {
5117 /* if reg == SS, inhibit interrupts/trace */
5118 /* If several instructions disable interrupts, only the
5119 _first_ does it */
5120 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5121 gen_helper_set_inhibit_irq();
5122 s->tf = 0;
5124 if (s->is_jmp) {
5125 gen_jmp_im(s->pc - s->cs_base);
5126 gen_eob(s);
5128 break;
5129 case 0x8c: /* mov Gv, seg */
5130 modrm = ldub_code(s->pc++);
5131 reg = (modrm >> 3) & 7;
5132 mod = (modrm >> 6) & 3;
5133 if (reg >= 6)
5134 goto illegal_op;
5135 gen_op_movl_T0_seg(reg);
5136 if (mod == 3)
5137 ot = OT_WORD + dflag;
5138 else
5139 ot = OT_WORD;
5140 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5141 break;
5143 case 0x1b6: /* movzbS Gv, Eb */
5144 case 0x1b7: /* movzwS Gv, Eb */
5145 case 0x1be: /* movsbS Gv, Eb */
5146 case 0x1bf: /* movswS Gv, Eb */
5148 int d_ot;
5149 /* d_ot is the size of destination */
5150 d_ot = dflag + OT_WORD;
5151 /* ot is the size of source */
5152 ot = (b & 1) + OT_BYTE;
5153 modrm = ldub_code(s->pc++);
5154 reg = ((modrm >> 3) & 7) | rex_r;
5155 mod = (modrm >> 6) & 3;
5156 rm = (modrm & 7) | REX_B(s);
5158 if (mod == 3) {
5159 gen_op_mov_TN_reg(ot, 0, rm);
5160 switch(ot | (b & 8)) {
5161 case OT_BYTE:
5162 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5163 break;
5164 case OT_BYTE | 8:
5165 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5166 break;
5167 case OT_WORD:
5168 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5169 break;
5170 default:
5171 case OT_WORD | 8:
5172 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5173 break;
5175 gen_op_mov_reg_T0(d_ot, reg);
5176 } else {
5177 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5178 if (b & 8) {
5179 gen_op_lds_T0_A0(ot + s->mem_index);
5180 } else {
5181 gen_op_ldu_T0_A0(ot + s->mem_index);
5183 gen_op_mov_reg_T0(d_ot, reg);
5186 break;
5188 case 0x8d: /* lea */
5189 ot = dflag + OT_WORD;
5190 modrm = ldub_code(s->pc++);
5191 mod = (modrm >> 6) & 3;
5192 if (mod == 3)
5193 goto illegal_op;
5194 reg = ((modrm >> 3) & 7) | rex_r;
5195 /* we must ensure that no segment is added */
5196 s->override = -1;
5197 val = s->addseg;
5198 s->addseg = 0;
5199 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5200 s->addseg = val;
5201 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5202 break;
5204 case 0xa0: /* mov EAX, Ov */
5205 case 0xa1:
5206 case 0xa2: /* mov Ov, EAX */
5207 case 0xa3:
5209 target_ulong offset_addr;
5211 if ((b & 1) == 0)
5212 ot = OT_BYTE;
5213 else
5214 ot = dflag + OT_WORD;
5215 #ifdef TARGET_X86_64
5216 if (s->aflag == 2) {
5217 offset_addr = ldq_code(s->pc);
5218 s->pc += 8;
5219 gen_op_movq_A0_im(offset_addr);
5220 } else
5221 #endif
5223 if (s->aflag) {
5224 offset_addr = insn_get(s, OT_LONG);
5225 } else {
5226 offset_addr = insn_get(s, OT_WORD);
5228 gen_op_movl_A0_im(offset_addr);
5230 gen_add_A0_ds_seg(s);
5231 if ((b & 2) == 0) {
5232 gen_op_ld_T0_A0(ot + s->mem_index);
5233 gen_op_mov_reg_T0(ot, R_EAX);
5234 } else {
5235 gen_op_mov_TN_reg(ot, 0, R_EAX);
5236 gen_op_st_T0_A0(ot + s->mem_index);
5239 break;
5240 case 0xd7: /* xlat */
5241 #ifdef TARGET_X86_64
5242 if (s->aflag == 2) {
5243 gen_op_movq_A0_reg(R_EBX);
5244 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5245 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5246 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5247 } else
5248 #endif
5250 gen_op_movl_A0_reg(R_EBX);
5251 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5252 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5253 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5254 if (s->aflag == 0)
5255 gen_op_andl_A0_ffff();
5256 else
5257 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5259 gen_add_A0_ds_seg(s);
5260 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5261 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5262 break;
5263 case 0xb0 ... 0xb7: /* mov R, Ib */
5264 val = insn_get(s, OT_BYTE);
5265 gen_op_movl_T0_im(val);
5266 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5267 break;
5268 case 0xb8 ... 0xbf: /* mov R, Iv */
5269 #ifdef TARGET_X86_64
5270 if (dflag == 2) {
5271 uint64_t tmp;
5272 /* 64 bit case */
5273 tmp = ldq_code(s->pc);
5274 s->pc += 8;
5275 reg = (b & 7) | REX_B(s);
5276 gen_movtl_T0_im(tmp);
5277 gen_op_mov_reg_T0(OT_QUAD, reg);
5278 } else
5279 #endif
5281 ot = dflag ? OT_LONG : OT_WORD;
5282 val = insn_get(s, ot);
5283 reg = (b & 7) | REX_B(s);
5284 gen_op_movl_T0_im(val);
5285 gen_op_mov_reg_T0(ot, reg);
5287 break;
5289 case 0x91 ... 0x97: /* xchg R, EAX */
5290 do_xchg_reg_eax:
5291 ot = dflag + OT_WORD;
5292 reg = (b & 7) | REX_B(s);
5293 rm = R_EAX;
5294 goto do_xchg_reg;
5295 case 0x86:
5296 case 0x87: /* xchg Ev, Gv */
5297 if ((b & 1) == 0)
5298 ot = OT_BYTE;
5299 else
5300 ot = dflag + OT_WORD;
5301 modrm = ldub_code(s->pc++);
5302 reg = ((modrm >> 3) & 7) | rex_r;
5303 mod = (modrm >> 6) & 3;
5304 if (mod == 3) {
5305 rm = (modrm & 7) | REX_B(s);
5306 do_xchg_reg:
5307 gen_op_mov_TN_reg(ot, 0, reg);
5308 gen_op_mov_TN_reg(ot, 1, rm);
5309 gen_op_mov_reg_T0(ot, rm);
5310 gen_op_mov_reg_T1(ot, reg);
5311 } else {
5312 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5313 gen_op_mov_TN_reg(ot, 0, reg);
5314 /* for xchg, lock is implicit */
5315 if (!(prefixes & PREFIX_LOCK))
5316 gen_helper_lock();
5317 gen_op_ld_T1_A0(ot + s->mem_index);
5318 gen_op_st_T0_A0(ot + s->mem_index);
5319 if (!(prefixes & PREFIX_LOCK))
5320 gen_helper_unlock();
5321 gen_op_mov_reg_T1(ot, reg);
5323 break;
5324 case 0xc4: /* les Gv */
5325 if (CODE64(s))
5326 goto illegal_op;
5327 op = R_ES;
5328 goto do_lxx;
5329 case 0xc5: /* lds Gv */
5330 if (CODE64(s))
5331 goto illegal_op;
5332 op = R_DS;
5333 goto do_lxx;
5334 case 0x1b2: /* lss Gv */
5335 op = R_SS;
5336 goto do_lxx;
5337 case 0x1b4: /* lfs Gv */
5338 op = R_FS;
5339 goto do_lxx;
5340 case 0x1b5: /* lgs Gv */
5341 op = R_GS;
5342 do_lxx:
5343 ot = dflag ? OT_LONG : OT_WORD;
5344 modrm = ldub_code(s->pc++);
5345 reg = ((modrm >> 3) & 7) | rex_r;
5346 mod = (modrm >> 6) & 3;
5347 if (mod == 3)
5348 goto illegal_op;
5349 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5350 gen_op_ld_T1_A0(ot + s->mem_index);
5351 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5352 /* load the segment first to handle exceptions properly */
5353 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5354 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5355 /* then put the data */
5356 gen_op_mov_reg_T1(ot, reg);
5357 if (s->is_jmp) {
5358 gen_jmp_im(s->pc - s->cs_base);
5359 gen_eob(s);
5361 break;
5363 /************************/
5364 /* shifts */
5365 case 0xc0:
5366 case 0xc1:
5367 /* shift Ev,Ib */
5368 shift = 2;
5369 grp2:
5371 if ((b & 1) == 0)
5372 ot = OT_BYTE;
5373 else
5374 ot = dflag + OT_WORD;
5376 modrm = ldub_code(s->pc++);
5377 mod = (modrm >> 6) & 3;
5378 op = (modrm >> 3) & 7;
5380 if (mod != 3) {
5381 if (shift == 2) {
5382 s->rip_offset = 1;
5384 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5385 opreg = OR_TMP0;
5386 } else {
5387 opreg = (modrm & 7) | REX_B(s);
5390 /* simpler op */
5391 if (shift == 0) {
5392 gen_shift(s, op, ot, opreg, OR_ECX);
5393 } else {
5394 if (shift == 2) {
5395 shift = ldub_code(s->pc++);
5397 gen_shifti(s, op, ot, opreg, shift);
5400 break;
5401 case 0xd0:
5402 case 0xd1:
5403 /* shift Ev,1 */
5404 shift = 1;
5405 goto grp2;
5406 case 0xd2:
5407 case 0xd3:
5408 /* shift Ev,cl */
5409 shift = 0;
5410 goto grp2;
5412 case 0x1a4: /* shld imm */
5413 op = 0;
5414 shift = 1;
5415 goto do_shiftd;
5416 case 0x1a5: /* shld cl */
5417 op = 0;
5418 shift = 0;
5419 goto do_shiftd;
5420 case 0x1ac: /* shrd imm */
5421 op = 1;
5422 shift = 1;
5423 goto do_shiftd;
5424 case 0x1ad: /* shrd cl */
5425 op = 1;
5426 shift = 0;
5427 do_shiftd:
5428 ot = dflag + OT_WORD;
5429 modrm = ldub_code(s->pc++);
5430 mod = (modrm >> 6) & 3;
5431 rm = (modrm & 7) | REX_B(s);
5432 reg = ((modrm >> 3) & 7) | rex_r;
5433 if (mod != 3) {
5434 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5435 opreg = OR_TMP0;
5436 } else {
5437 opreg = rm;
5439 gen_op_mov_TN_reg(ot, 1, reg);
5441 if (shift) {
5442 val = ldub_code(s->pc++);
5443 tcg_gen_movi_tl(cpu_T3, val);
5444 } else {
5445 tcg_gen_mov_tl(cpu_T3, cpu_regs[R_ECX]);
5447 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5448 break;
5450 /************************/
5451 /* floats */
5452 case 0xd8 ... 0xdf:
5453 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5454 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5455 /* XXX: what to do if illegal op ? */
5456 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5457 break;
5459 modrm = ldub_code(s->pc++);
5460 mod = (modrm >> 6) & 3;
5461 rm = modrm & 7;
5462 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5463 if (mod != 3) {
5464 /* memory op */
5465 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5466 switch(op) {
5467 case 0x00 ... 0x07: /* fxxxs */
5468 case 0x10 ... 0x17: /* fixxxl */
5469 case 0x20 ... 0x27: /* fxxxl */
5470 case 0x30 ... 0x37: /* fixxx */
5472 int op1;
5473 op1 = op & 7;
5475 switch(op >> 4) {
5476 case 0:
5477 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5478 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5479 gen_helper_flds_FT0(cpu_tmp2_i32);
5480 break;
5481 case 1:
5482 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5483 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5484 gen_helper_fildl_FT0(cpu_tmp2_i32);
5485 break;
5486 case 2:
5487 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5488 (s->mem_index >> 2) - 1);
5489 gen_helper_fldl_FT0(cpu_tmp1_i64);
5490 break;
5491 case 3:
5492 default:
5493 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5494 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5495 gen_helper_fildl_FT0(cpu_tmp2_i32);
5496 break;
5499 gen_helper_fp_arith_ST0_FT0(op1);
5500 if (op1 == 3) {
5501 /* fcomp needs pop */
5502 gen_helper_fpop();
5505 break;
5506 case 0x08: /* flds */
5507 case 0x0a: /* fsts */
5508 case 0x0b: /* fstps */
5509 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5510 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5511 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5512 switch(op & 7) {
5513 case 0:
5514 switch(op >> 4) {
5515 case 0:
5516 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5517 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5518 gen_helper_flds_ST0(cpu_tmp2_i32);
5519 break;
5520 case 1:
5521 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5522 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5523 gen_helper_fildl_ST0(cpu_tmp2_i32);
5524 break;
5525 case 2:
5526 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5527 (s->mem_index >> 2) - 1);
5528 gen_helper_fldl_ST0(cpu_tmp1_i64);
5529 break;
5530 case 3:
5531 default:
5532 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5533 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5534 gen_helper_fildl_ST0(cpu_tmp2_i32);
5535 break;
5537 break;
5538 case 1:
5539 /* XXX: the corresponding CPUID bit must be tested ! */
5540 switch(op >> 4) {
5541 case 1:
5542 gen_helper_fisttl_ST0(cpu_tmp2_i32);
5543 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5544 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5545 break;
5546 case 2:
5547 gen_helper_fisttll_ST0(cpu_tmp1_i64);
5548 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5549 (s->mem_index >> 2) - 1);
5550 break;
5551 case 3:
5552 default:
5553 gen_helper_fistt_ST0(cpu_tmp2_i32);
5554 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5555 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5556 break;
5558 gen_helper_fpop();
5559 break;
5560 default:
5561 switch(op >> 4) {
5562 case 0:
5563 gen_helper_fsts_ST0(cpu_tmp2_i32);
5564 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5565 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5566 break;
5567 case 1:
5568 gen_helper_fistl_ST0(cpu_tmp2_i32);
5569 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5570 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5571 break;
5572 case 2:
5573 gen_helper_fstl_ST0(cpu_tmp1_i64);
5574 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5575 (s->mem_index >> 2) - 1);
5576 break;
5577 case 3:
5578 default:
5579 gen_helper_fist_ST0(cpu_tmp2_i32);
5580 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5581 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5582 break;
5584 if ((op & 7) == 3)
5585 gen_helper_fpop();
5586 break;
5588 break;
5589 case 0x0c: /* fldenv mem */
5590 if (s->cc_op != CC_OP_DYNAMIC)
5591 gen_op_set_cc_op(s->cc_op);
5592 gen_jmp_im(pc_start - s->cs_base);
5593 gen_helper_fldenv(
5594 cpu_A0, tcg_const_i32(s->dflag));
5595 break;
5596 case 0x0d: /* fldcw mem */
5597 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5598 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5599 gen_helper_fldcw(cpu_tmp2_i32);
5600 break;
5601 case 0x0e: /* fnstenv mem */
5602 if (s->cc_op != CC_OP_DYNAMIC)
5603 gen_op_set_cc_op(s->cc_op);
5604 gen_jmp_im(pc_start - s->cs_base);
5605 gen_helper_fstenv(cpu_A0, tcg_const_i32(s->dflag));
5606 break;
5607 case 0x0f: /* fnstcw mem */
5608 gen_helper_fnstcw(cpu_tmp2_i32);
5609 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5610 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5611 break;
5612 case 0x1d: /* fldt mem */
5613 if (s->cc_op != CC_OP_DYNAMIC)
5614 gen_op_set_cc_op(s->cc_op);
5615 gen_jmp_im(pc_start - s->cs_base);
5616 gen_helper_fldt_ST0(cpu_A0);
5617 break;
5618 case 0x1f: /* fstpt mem */
5619 if (s->cc_op != CC_OP_DYNAMIC)
5620 gen_op_set_cc_op(s->cc_op);
5621 gen_jmp_im(pc_start - s->cs_base);
5622 gen_helper_fstt_ST0(cpu_A0);
5623 gen_helper_fpop();
5624 break;
5625 case 0x2c: /* frstor mem */
5626 if (s->cc_op != CC_OP_DYNAMIC)
5627 gen_op_set_cc_op(s->cc_op);
5628 gen_jmp_im(pc_start - s->cs_base);
5629 gen_helper_frstor(cpu_A0, tcg_const_i32(s->dflag));
5630 break;
5631 case 0x2e: /* fnsave mem */
5632 if (s->cc_op != CC_OP_DYNAMIC)
5633 gen_op_set_cc_op(s->cc_op);
5634 gen_jmp_im(pc_start - s->cs_base);
5635 gen_helper_fsave(cpu_A0, tcg_const_i32(s->dflag));
5636 break;
5637 case 0x2f: /* fnstsw mem */
5638 gen_helper_fnstsw(cpu_tmp2_i32);
5639 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5640 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5641 break;
5642 case 0x3c: /* fbld */
5643 if (s->cc_op != CC_OP_DYNAMIC)
5644 gen_op_set_cc_op(s->cc_op);
5645 gen_jmp_im(pc_start - s->cs_base);
5646 gen_helper_fbld_ST0(cpu_A0);
5647 break;
5648 case 0x3e: /* fbstp */
5649 if (s->cc_op != CC_OP_DYNAMIC)
5650 gen_op_set_cc_op(s->cc_op);
5651 gen_jmp_im(pc_start - s->cs_base);
5652 gen_helper_fbst_ST0(cpu_A0);
5653 gen_helper_fpop();
5654 break;
5655 case 0x3d: /* fildll */
5656 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5657 (s->mem_index >> 2) - 1);
5658 gen_helper_fildll_ST0(cpu_tmp1_i64);
5659 break;
5660 case 0x3f: /* fistpll */
5661 gen_helper_fistll_ST0(cpu_tmp1_i64);
5662 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5663 (s->mem_index >> 2) - 1);
5664 gen_helper_fpop();
5665 break;
5666 default:
5667 goto illegal_op;
5669 } else {
5670 /* register float ops */
5671 opreg = rm;
5673 switch(op) {
5674 case 0x08: /* fld sti */
5675 gen_helper_fpush();
5676 gen_helper_fmov_ST0_STN(tcg_const_i32((opreg + 1) & 7));
5677 break;
5678 case 0x09: /* fxchg sti */
5679 case 0x29: /* fxchg4 sti, undocumented op */
5680 case 0x39: /* fxchg7 sti, undocumented op */
5681 gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg));
5682 break;
5683 case 0x0a: /* grp d9/2 */
5684 switch(rm) {
5685 case 0: /* fnop */
5686 /* check exceptions (FreeBSD FPU probe) */
5687 if (s->cc_op != CC_OP_DYNAMIC)
5688 gen_op_set_cc_op(s->cc_op);
5689 gen_jmp_im(pc_start - s->cs_base);
5690 gen_helper_fwait();
5691 break;
5692 default:
5693 goto illegal_op;
5695 break;
5696 case 0x0c: /* grp d9/4 */
5697 switch(rm) {
5698 case 0: /* fchs */
5699 gen_helper_fchs_ST0();
5700 break;
5701 case 1: /* fabs */
5702 gen_helper_fabs_ST0();
5703 break;
5704 case 4: /* ftst */
5705 gen_helper_fldz_FT0();
5706 gen_helper_fcom_ST0_FT0();
5707 break;
5708 case 5: /* fxam */
5709 gen_helper_fxam_ST0();
5710 break;
5711 default:
5712 goto illegal_op;
5714 break;
5715 case 0x0d: /* grp d9/5 */
5717 switch(rm) {
5718 case 0:
5719 gen_helper_fpush();
5720 gen_helper_fld1_ST0();
5721 break;
5722 case 1:
5723 gen_helper_fpush();
5724 gen_helper_fldl2t_ST0();
5725 break;
5726 case 2:
5727 gen_helper_fpush();
5728 gen_helper_fldl2e_ST0();
5729 break;
5730 case 3:
5731 gen_helper_fpush();
5732 gen_helper_fldpi_ST0();
5733 break;
5734 case 4:
5735 gen_helper_fpush();
5736 gen_helper_fldlg2_ST0();
5737 break;
5738 case 5:
5739 gen_helper_fpush();
5740 gen_helper_fldln2_ST0();
5741 break;
5742 case 6:
5743 gen_helper_fpush();
5744 gen_helper_fldz_ST0();
5745 break;
5746 default:
5747 goto illegal_op;
5750 break;
5751 case 0x0e: /* grp d9/6 */
5752 switch(rm) {
5753 case 0: /* f2xm1 */
5754 gen_helper_f2xm1();
5755 break;
5756 case 1: /* fyl2x */
5757 gen_helper_fyl2x();
5758 break;
5759 case 2: /* fptan */
5760 gen_helper_fptan();
5761 break;
5762 case 3: /* fpatan */
5763 gen_helper_fpatan();
5764 break;
5765 case 4: /* fxtract */
5766 gen_helper_fxtract();
5767 break;
5768 case 5: /* fprem1 */
5769 gen_helper_fprem1();
5770 break;
5771 case 6: /* fdecstp */
5772 gen_helper_fdecstp();
5773 break;
5774 default:
5775 case 7: /* fincstp */
5776 gen_helper_fincstp();
5777 break;
5779 break;
5780 case 0x0f: /* grp d9/7 */
5781 switch(rm) {
5782 case 0: /* fprem */
5783 gen_helper_fprem();
5784 break;
5785 case 1: /* fyl2xp1 */
5786 gen_helper_fyl2xp1();
5787 break;
5788 case 2: /* fsqrt */
5789 gen_helper_fsqrt();
5790 break;
5791 case 3: /* fsincos */
5792 gen_helper_fsincos();
5793 break;
5794 case 5: /* fscale */
5795 gen_helper_fscale();
5796 break;
5797 case 4: /* frndint */
5798 gen_helper_frndint();
5799 break;
5800 case 6: /* fsin */
5801 gen_helper_fsin();
5802 break;
5803 default:
5804 case 7: /* fcos */
5805 gen_helper_fcos();
5806 break;
5808 break;
5809 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5810 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5811 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5813 int op1;
5815 op1 = op & 7;
5816 if (op >= 0x20) {
5817 gen_helper_fp_arith_STN_ST0(op1, opreg);
5818 if (op >= 0x30)
5819 gen_helper_fpop();
5820 } else {
5821 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5822 gen_helper_fp_arith_ST0_FT0(op1);
5825 break;
5826 case 0x02: /* fcom */
5827 case 0x22: /* fcom2, undocumented op */
5828 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5829 gen_helper_fcom_ST0_FT0();
5830 break;
5831 case 0x03: /* fcomp */
5832 case 0x23: /* fcomp3, undocumented op */
5833 case 0x32: /* fcomp5, undocumented op */
5834 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5835 gen_helper_fcom_ST0_FT0();
5836 gen_helper_fpop();
5837 break;
5838 case 0x15: /* da/5 */
5839 switch(rm) {
5840 case 1: /* fucompp */
5841 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5842 gen_helper_fucom_ST0_FT0();
5843 gen_helper_fpop();
5844 gen_helper_fpop();
5845 break;
5846 default:
5847 goto illegal_op;
5849 break;
5850 case 0x1c:
5851 switch(rm) {
5852 case 0: /* feni (287 only, just do nop here) */
5853 break;
5854 case 1: /* fdisi (287 only, just do nop here) */
5855 break;
5856 case 2: /* fclex */
5857 gen_helper_fclex();
5858 break;
5859 case 3: /* fninit */
5860 gen_helper_fninit();
5861 break;
5862 case 4: /* fsetpm (287 only, just do nop here) */
5863 break;
5864 default:
5865 goto illegal_op;
5867 break;
5868 case 0x1d: /* fucomi */
5869 if (s->cc_op != CC_OP_DYNAMIC)
5870 gen_op_set_cc_op(s->cc_op);
5871 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5872 gen_helper_fucomi_ST0_FT0();
5873 s->cc_op = CC_OP_EFLAGS;
5874 break;
5875 case 0x1e: /* fcomi */
5876 if (s->cc_op != CC_OP_DYNAMIC)
5877 gen_op_set_cc_op(s->cc_op);
5878 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5879 gen_helper_fcomi_ST0_FT0();
5880 s->cc_op = CC_OP_EFLAGS;
5881 break;
5882 case 0x28: /* ffree sti */
5883 gen_helper_ffree_STN(tcg_const_i32(opreg));
5884 break;
5885 case 0x2a: /* fst sti */
5886 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5887 break;
5888 case 0x2b: /* fstp sti */
5889 case 0x0b: /* fstp1 sti, undocumented op */
5890 case 0x3a: /* fstp8 sti, undocumented op */
5891 case 0x3b: /* fstp9 sti, undocumented op */
5892 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5893 gen_helper_fpop();
5894 break;
5895 case 0x2c: /* fucom st(i) */
5896 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5897 gen_helper_fucom_ST0_FT0();
5898 break;
5899 case 0x2d: /* fucomp st(i) */
5900 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5901 gen_helper_fucom_ST0_FT0();
5902 gen_helper_fpop();
5903 break;
5904 case 0x33: /* de/3 */
5905 switch(rm) {
5906 case 1: /* fcompp */
5907 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5908 gen_helper_fcom_ST0_FT0();
5909 gen_helper_fpop();
5910 gen_helper_fpop();
5911 break;
5912 default:
5913 goto illegal_op;
5915 break;
5916 case 0x38: /* ffreep sti, undocumented op */
5917 gen_helper_ffree_STN(tcg_const_i32(opreg));
5918 gen_helper_fpop();
5919 break;
5920 case 0x3c: /* df/4 */
5921 switch(rm) {
5922 case 0:
5923 gen_helper_fnstsw(cpu_tmp2_i32);
5924 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5925 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5926 break;
5927 default:
5928 goto illegal_op;
5930 break;
5931 case 0x3d: /* fucomip */
5932 if (s->cc_op != CC_OP_DYNAMIC)
5933 gen_op_set_cc_op(s->cc_op);
5934 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5935 gen_helper_fucomi_ST0_FT0();
5936 gen_helper_fpop();
5937 s->cc_op = CC_OP_EFLAGS;
5938 break;
5939 case 0x3e: /* fcomip */
5940 if (s->cc_op != CC_OP_DYNAMIC)
5941 gen_op_set_cc_op(s->cc_op);
5942 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5943 gen_helper_fcomi_ST0_FT0();
5944 gen_helper_fpop();
5945 s->cc_op = CC_OP_EFLAGS;
5946 break;
5947 case 0x10 ... 0x13: /* fcmovxx */
5948 case 0x18 ... 0x1b:
5950 int op1, l1;
5951 static const uint8_t fcmov_cc[8] = {
5952 (JCC_B << 1),
5953 (JCC_Z << 1),
5954 (JCC_BE << 1),
5955 (JCC_P << 1),
5957 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5958 l1 = gen_new_label();
5959 gen_jcc1(s, s->cc_op, op1, l1);
5960 gen_helper_fmov_ST0_STN(tcg_const_i32(opreg));
5961 gen_set_label(l1);
5963 break;
5964 default:
5965 goto illegal_op;
5968 break;
5969 /************************/
5970 /* string ops */
5972 case 0xa4: /* movsS */
5973 case 0xa5:
5974 if ((b & 1) == 0)
5975 ot = OT_BYTE;
5976 else
5977 ot = dflag + OT_WORD;
5979 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5980 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5981 } else {
5982 gen_movs(s, ot);
5984 break;
5986 case 0xaa: /* stosS */
5987 case 0xab:
5988 if ((b & 1) == 0)
5989 ot = OT_BYTE;
5990 else
5991 ot = dflag + OT_WORD;
5993 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5994 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5995 } else {
5996 gen_stos(s, ot);
5998 break;
5999 case 0xac: /* lodsS */
6000 case 0xad:
6001 if ((b & 1) == 0)
6002 ot = OT_BYTE;
6003 else
6004 ot = dflag + OT_WORD;
6005 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6006 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6007 } else {
6008 gen_lods(s, ot);
6010 break;
6011 case 0xae: /* scasS */
6012 case 0xaf:
6013 if ((b & 1) == 0)
6014 ot = OT_BYTE;
6015 else
6016 ot = dflag + OT_WORD;
6017 if (prefixes & PREFIX_REPNZ) {
6018 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6019 } else if (prefixes & PREFIX_REPZ) {
6020 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6021 } else {
6022 gen_scas(s, ot);
6023 s->cc_op = CC_OP_SUBB + ot;
6025 break;
6027 case 0xa6: /* cmpsS */
6028 case 0xa7:
6029 if ((b & 1) == 0)
6030 ot = OT_BYTE;
6031 else
6032 ot = dflag + OT_WORD;
6033 if (prefixes & PREFIX_REPNZ) {
6034 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6035 } else if (prefixes & PREFIX_REPZ) {
6036 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6037 } else {
6038 gen_cmps(s, ot);
6039 s->cc_op = CC_OP_SUBB + ot;
6041 break;
6042 case 0x6c: /* insS */
6043 case 0x6d:
6044 if ((b & 1) == 0)
6045 ot = OT_BYTE;
6046 else
6047 ot = dflag ? OT_LONG : OT_WORD;
6048 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6049 gen_op_andl_T0_ffff();
6050 gen_check_io(s, ot, pc_start - s->cs_base,
6051 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6052 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6053 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6054 } else {
6055 gen_ins(s, ot);
6056 if (use_icount) {
6057 gen_jmp(s, s->pc - s->cs_base);
6060 break;
6061 case 0x6e: /* outsS */
6062 case 0x6f:
6063 if ((b & 1) == 0)
6064 ot = OT_BYTE;
6065 else
6066 ot = dflag ? OT_LONG : OT_WORD;
6067 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6068 gen_op_andl_T0_ffff();
6069 gen_check_io(s, ot, pc_start - s->cs_base,
6070 svm_is_rep(prefixes) | 4);
6071 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6072 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6073 } else {
6074 gen_outs(s, ot);
6075 if (use_icount) {
6076 gen_jmp(s, s->pc - s->cs_base);
6079 break;
6081 /************************/
6082 /* port I/O */
6084 case 0xe4:
6085 case 0xe5:
6086 if ((b & 1) == 0)
6087 ot = OT_BYTE;
6088 else
6089 ot = dflag ? OT_LONG : OT_WORD;
6090 val = ldub_code(s->pc++);
6091 gen_op_movl_T0_im(val);
6092 gen_check_io(s, ot, pc_start - s->cs_base,
6093 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6094 if (use_icount)
6095 gen_io_start();
6096 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6097 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6098 gen_op_mov_reg_T1(ot, R_EAX);
6099 if (use_icount) {
6100 gen_io_end();
6101 gen_jmp(s, s->pc - s->cs_base);
6103 break;
6104 case 0xe6:
6105 case 0xe7:
6106 if ((b & 1) == 0)
6107 ot = OT_BYTE;
6108 else
6109 ot = dflag ? OT_LONG : OT_WORD;
6110 val = ldub_code(s->pc++);
6111 gen_op_movl_T0_im(val);
6112 gen_check_io(s, ot, pc_start - s->cs_base,
6113 svm_is_rep(prefixes));
6114 gen_op_mov_TN_reg(ot, 1, R_EAX);
6116 if (use_icount)
6117 gen_io_start();
6118 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6119 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6120 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6121 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6122 if (use_icount) {
6123 gen_io_end();
6124 gen_jmp(s, s->pc - s->cs_base);
6126 break;
6127 case 0xec:
6128 case 0xed:
6129 if ((b & 1) == 0)
6130 ot = OT_BYTE;
6131 else
6132 ot = dflag ? OT_LONG : OT_WORD;
6133 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6134 gen_op_andl_T0_ffff();
6135 gen_check_io(s, ot, pc_start - s->cs_base,
6136 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6137 if (use_icount)
6138 gen_io_start();
6139 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6140 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6141 gen_op_mov_reg_T1(ot, R_EAX);
6142 if (use_icount) {
6143 gen_io_end();
6144 gen_jmp(s, s->pc - s->cs_base);
6146 break;
6147 case 0xee:
6148 case 0xef:
6149 if ((b & 1) == 0)
6150 ot = OT_BYTE;
6151 else
6152 ot = dflag ? OT_LONG : OT_WORD;
6153 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6154 gen_op_andl_T0_ffff();
6155 gen_check_io(s, ot, pc_start - s->cs_base,
6156 svm_is_rep(prefixes));
6157 gen_op_mov_TN_reg(ot, 1, R_EAX);
6159 if (use_icount)
6160 gen_io_start();
6161 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6162 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6163 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6164 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6165 if (use_icount) {
6166 gen_io_end();
6167 gen_jmp(s, s->pc - s->cs_base);
6169 break;
6171 /************************/
6172 /* control */
6173 case 0xc2: /* ret im */
6174 val = ldsw_code(s->pc);
6175 s->pc += 2;
6176 gen_pop_T0(s);
6177 if (CODE64(s) && s->dflag)
6178 s->dflag = 2;
6179 gen_stack_update(s, val + (2 << s->dflag));
6180 if (s->dflag == 0)
6181 gen_op_andl_T0_ffff();
6182 gen_op_jmp_T0();
6183 gen_eob(s);
6184 break;
6185 case 0xc3: /* ret */
6186 gen_pop_T0(s);
6187 gen_pop_update(s);
6188 if (s->dflag == 0)
6189 gen_op_andl_T0_ffff();
6190 gen_op_jmp_T0();
6191 gen_eob(s);
6192 break;
6193 case 0xca: /* lret im */
6194 val = ldsw_code(s->pc);
6195 s->pc += 2;
6196 do_lret:
6197 if (s->pe && !s->vm86) {
6198 if (s->cc_op != CC_OP_DYNAMIC)
6199 gen_op_set_cc_op(s->cc_op);
6200 gen_jmp_im(pc_start - s->cs_base);
6201 gen_helper_lret_protected(tcg_const_i32(s->dflag),
6202 tcg_const_i32(val));
6203 } else {
6204 gen_stack_A0(s);
6205 /* pop offset */
6206 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6207 if (s->dflag == 0)
6208 gen_op_andl_T0_ffff();
6209 /* NOTE: keeping EIP updated is not a problem in case of
6210 exception */
6211 gen_op_jmp_T0();
6212 /* pop selector */
6213 gen_op_addl_A0_im(2 << s->dflag);
6214 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6215 gen_op_movl_seg_T0_vm(R_CS);
6216 /* add stack offset */
6217 gen_stack_update(s, val + (4 << s->dflag));
6219 gen_eob(s);
6220 break;
6221 case 0xcb: /* lret */
6222 val = 0;
6223 goto do_lret;
6224 case 0xcf: /* iret */
6225 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6226 if (!s->pe) {
6227 /* real mode */
6228 gen_helper_iret_real(tcg_const_i32(s->dflag));
6229 s->cc_op = CC_OP_EFLAGS;
6230 } else if (s->vm86) {
6231 if (s->iopl != 3) {
6232 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6233 } else {
6234 gen_helper_iret_real(tcg_const_i32(s->dflag));
6235 s->cc_op = CC_OP_EFLAGS;
6237 } else {
6238 if (s->cc_op != CC_OP_DYNAMIC)
6239 gen_op_set_cc_op(s->cc_op);
6240 gen_jmp_im(pc_start - s->cs_base);
6241 gen_helper_iret_protected(tcg_const_i32(s->dflag),
6242 tcg_const_i32(s->pc - s->cs_base));
6243 s->cc_op = CC_OP_EFLAGS;
6245 gen_eob(s);
6246 break;
6247 case 0xe8: /* call im */
6249 if (dflag)
6250 tval = (int32_t)insn_get(s, OT_LONG);
6251 else
6252 tval = (int16_t)insn_get(s, OT_WORD);
6253 next_eip = s->pc - s->cs_base;
6254 tval += next_eip;
6255 if (s->dflag == 0)
6256 tval &= 0xffff;
6257 else if(!CODE64(s))
6258 tval &= 0xffffffff;
6259 gen_movtl_T0_im(next_eip);
6260 gen_push_T0(s);
6261 gen_jmp(s, tval);
6263 break;
6264 case 0x9a: /* lcall im */
6266 unsigned int selector, offset;
6268 if (CODE64(s))
6269 goto illegal_op;
6270 ot = dflag ? OT_LONG : OT_WORD;
6271 offset = insn_get(s, ot);
6272 selector = insn_get(s, OT_WORD);
6274 gen_op_movl_T0_im(selector);
6275 gen_op_movl_T1_imu(offset);
6277 goto do_lcall;
6278 case 0xe9: /* jmp im */
6279 if (dflag)
6280 tval = (int32_t)insn_get(s, OT_LONG);
6281 else
6282 tval = (int16_t)insn_get(s, OT_WORD);
6283 tval += s->pc - s->cs_base;
6284 if (s->dflag == 0)
6285 tval &= 0xffff;
6286 else if(!CODE64(s))
6287 tval &= 0xffffffff;
6288 gen_jmp(s, tval);
6289 break;
6290 case 0xea: /* ljmp im */
6292 unsigned int selector, offset;
6294 if (CODE64(s))
6295 goto illegal_op;
6296 ot = dflag ? OT_LONG : OT_WORD;
6297 offset = insn_get(s, ot);
6298 selector = insn_get(s, OT_WORD);
6300 gen_op_movl_T0_im(selector);
6301 gen_op_movl_T1_imu(offset);
6303 goto do_ljmp;
6304 case 0xeb: /* jmp Jb */
6305 tval = (int8_t)insn_get(s, OT_BYTE);
6306 tval += s->pc - s->cs_base;
6307 if (s->dflag == 0)
6308 tval &= 0xffff;
6309 gen_jmp(s, tval);
6310 break;
6311 case 0x70 ... 0x7f: /* jcc Jb */
6312 tval = (int8_t)insn_get(s, OT_BYTE);
6313 goto do_jcc;
6314 case 0x180 ... 0x18f: /* jcc Jv */
6315 if (dflag) {
6316 tval = (int32_t)insn_get(s, OT_LONG);
6317 } else {
6318 tval = (int16_t)insn_get(s, OT_WORD);
6320 do_jcc:
6321 next_eip = s->pc - s->cs_base;
6322 tval += next_eip;
6323 if (s->dflag == 0)
6324 tval &= 0xffff;
6325 gen_jcc(s, b, tval, next_eip);
6326 break;
6328 case 0x190 ... 0x19f: /* setcc Gv */
6329 modrm = ldub_code(s->pc++);
6330 gen_setcc(s, b);
6331 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6332 break;
6333 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6335 int l1;
6336 TCGv t0;
6338 ot = dflag + OT_WORD;
6339 modrm = ldub_code(s->pc++);
6340 reg = ((modrm >> 3) & 7) | rex_r;
6341 mod = (modrm >> 6) & 3;
6342 t0 = tcg_temp_local_new();
6343 if (mod != 3) {
6344 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6345 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6346 } else {
6347 rm = (modrm & 7) | REX_B(s);
6348 gen_op_mov_v_reg(ot, t0, rm);
6350 #ifdef TARGET_X86_64
6351 if (ot == OT_LONG) {
6352 /* XXX: specific Intel behaviour ? */
6353 l1 = gen_new_label();
6354 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6355 tcg_gen_mov_tl(cpu_regs[reg], t0);
6356 gen_set_label(l1);
6357 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_regs[reg]);
6358 } else
6359 #endif
6361 l1 = gen_new_label();
6362 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6363 gen_op_mov_reg_v(ot, reg, t0);
6364 gen_set_label(l1);
6366 tcg_temp_free(t0);
6368 break;
6370 /************************/
6371 /* flags */
6372 case 0x9c: /* pushf */
6373 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6374 if (s->vm86 && s->iopl != 3) {
6375 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6376 } else {
6377 if (s->cc_op != CC_OP_DYNAMIC)
6378 gen_op_set_cc_op(s->cc_op);
6379 gen_helper_read_eflags(cpu_T[0]);
6380 gen_push_T0(s);
6382 break;
6383 case 0x9d: /* popf */
6384 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6385 if (s->vm86 && s->iopl != 3) {
6386 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6387 } else {
6388 gen_pop_T0(s);
6389 if (s->cpl == 0) {
6390 if (s->dflag) {
6391 gen_helper_write_eflags(cpu_T[0],
6392 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6393 } else {
6394 gen_helper_write_eflags(cpu_T[0],
6395 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6397 } else {
6398 if (s->cpl <= s->iopl) {
6399 if (s->dflag) {
6400 gen_helper_write_eflags(cpu_T[0],
6401 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6402 } else {
6403 gen_helper_write_eflags(cpu_T[0],
6404 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6406 } else {
6407 if (s->dflag) {
6408 gen_helper_write_eflags(cpu_T[0],
6409 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6410 } else {
6411 gen_helper_write_eflags(cpu_T[0],
6412 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6416 gen_pop_update(s);
6417 s->cc_op = CC_OP_EFLAGS;
6418 /* abort translation because TF flag may change */
6419 gen_jmp_im(s->pc - s->cs_base);
6420 gen_eob(s);
6422 break;
6423 case 0x9e: /* sahf */
6424 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6425 goto illegal_op;
6426 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6427 if (s->cc_op != CC_OP_DYNAMIC)
6428 gen_op_set_cc_op(s->cc_op);
6429 gen_compute_eflags(cpu_cc_src);
6430 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6431 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6432 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6433 s->cc_op = CC_OP_EFLAGS;
6434 break;
6435 case 0x9f: /* lahf */
6436 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6437 goto illegal_op;
6438 if (s->cc_op != CC_OP_DYNAMIC)
6439 gen_op_set_cc_op(s->cc_op);
6440 gen_compute_eflags(cpu_T[0]);
6441 /* Note: gen_compute_eflags() only gives the condition codes */
6442 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6443 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6444 break;
6445 case 0xf5: /* cmc */
6446 if (s->cc_op != CC_OP_DYNAMIC)
6447 gen_op_set_cc_op(s->cc_op);
6448 gen_compute_eflags(cpu_cc_src);
6449 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6450 s->cc_op = CC_OP_EFLAGS;
6451 break;
6452 case 0xf8: /* clc */
6453 if (s->cc_op != CC_OP_DYNAMIC)
6454 gen_op_set_cc_op(s->cc_op);
6455 gen_compute_eflags(cpu_cc_src);
6456 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6457 s->cc_op = CC_OP_EFLAGS;
6458 break;
6459 case 0xf9: /* stc */
6460 if (s->cc_op != CC_OP_DYNAMIC)
6461 gen_op_set_cc_op(s->cc_op);
6462 gen_compute_eflags(cpu_cc_src);
6463 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6464 s->cc_op = CC_OP_EFLAGS;
6465 break;
6466 case 0xfc: /* cld */
6467 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6468 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6469 break;
6470 case 0xfd: /* std */
6471 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6472 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6473 break;
6475 /************************/
6476 /* bit operations */
6477 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6478 ot = dflag + OT_WORD;
6479 modrm = ldub_code(s->pc++);
6480 op = (modrm >> 3) & 7;
6481 mod = (modrm >> 6) & 3;
6482 rm = (modrm & 7) | REX_B(s);
6483 if (mod != 3) {
6484 s->rip_offset = 1;
6485 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6486 gen_op_ld_T0_A0(ot + s->mem_index);
6487 } else {
6488 gen_op_mov_TN_reg(ot, 0, rm);
6490 /* load shift */
6491 val = ldub_code(s->pc++);
6492 gen_op_movl_T1_im(val);
6493 if (op < 4)
6494 goto illegal_op;
6495 op -= 4;
6496 goto bt_op;
6497 case 0x1a3: /* bt Gv, Ev */
6498 op = 0;
6499 goto do_btx;
6500 case 0x1ab: /* bts */
6501 op = 1;
6502 goto do_btx;
6503 case 0x1b3: /* btr */
6504 op = 2;
6505 goto do_btx;
6506 case 0x1bb: /* btc */
6507 op = 3;
6508 do_btx:
6509 ot = dflag + OT_WORD;
6510 modrm = ldub_code(s->pc++);
6511 reg = ((modrm >> 3) & 7) | rex_r;
6512 mod = (modrm >> 6) & 3;
6513 rm = (modrm & 7) | REX_B(s);
6514 gen_op_mov_TN_reg(OT_LONG, 1, reg);
6515 if (mod != 3) {
6516 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6517 /* specific case: we need to add a displacement */
6518 gen_exts(ot, cpu_T[1]);
6519 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6520 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6521 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6522 gen_op_ld_T0_A0(ot + s->mem_index);
6523 } else {
6524 gen_op_mov_TN_reg(ot, 0, rm);
6526 bt_op:
6527 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6528 switch(op) {
6529 case 0:
6530 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6531 tcg_gen_movi_tl(cpu_cc_dst, 0);
6532 break;
6533 case 1:
6534 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6535 tcg_gen_movi_tl(cpu_tmp0, 1);
6536 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6537 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6538 break;
6539 case 2:
6540 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6541 tcg_gen_movi_tl(cpu_tmp0, 1);
6542 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6543 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6544 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6545 break;
6546 default:
6547 case 3:
6548 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6549 tcg_gen_movi_tl(cpu_tmp0, 1);
6550 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6551 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6552 break;
6554 s->cc_op = CC_OP_SARB + ot;
6555 if (op != 0) {
6556 if (mod != 3)
6557 gen_op_st_T0_A0(ot + s->mem_index);
6558 else
6559 gen_op_mov_reg_T0(ot, rm);
6560 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6561 tcg_gen_movi_tl(cpu_cc_dst, 0);
6563 break;
6564 case 0x1bc: /* bsf */
6565 case 0x1bd: /* bsr */
6567 int label1;
6568 TCGv t0;
6570 ot = dflag + OT_WORD;
6571 modrm = ldub_code(s->pc++);
6572 reg = ((modrm >> 3) & 7) | rex_r;
6573 gen_ldst_modrm(s,modrm, ot, OR_TMP0, 0);
6574 gen_extu(ot, cpu_T[0]);
6575 t0 = tcg_temp_local_new();
6576 tcg_gen_mov_tl(t0, cpu_T[0]);
6577 if ((b & 1) && (prefixes & PREFIX_REPZ) &&
6578 (s->cpuid_ext3_features & CPUID_EXT3_ABM)) {
6579 switch(ot) {
6580 case OT_WORD: gen_helper_lzcnt(cpu_T[0], t0,
6581 tcg_const_i32(16)); break;
6582 case OT_LONG: gen_helper_lzcnt(cpu_T[0], t0,
6583 tcg_const_i32(32)); break;
6584 case OT_QUAD: gen_helper_lzcnt(cpu_T[0], t0,
6585 tcg_const_i32(64)); break;
6587 gen_op_mov_reg_T0(ot, reg);
6588 } else {
6589 label1 = gen_new_label();
6590 tcg_gen_movi_tl(cpu_cc_dst, 0);
6591 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6592 if (b & 1) {
6593 gen_helper_bsr(cpu_T[0], t0);
6594 } else {
6595 gen_helper_bsf(cpu_T[0], t0);
6597 gen_op_mov_reg_T0(ot, reg);
6598 tcg_gen_movi_tl(cpu_cc_dst, 1);
6599 gen_set_label(label1);
6600 tcg_gen_discard_tl(cpu_cc_src);
6601 s->cc_op = CC_OP_LOGICB + ot;
6603 tcg_temp_free(t0);
6605 break;
6606 /************************/
6607 /* bcd */
6608 case 0x27: /* daa */
6609 if (CODE64(s))
6610 goto illegal_op;
6611 if (s->cc_op != CC_OP_DYNAMIC)
6612 gen_op_set_cc_op(s->cc_op);
6613 gen_helper_daa();
6614 s->cc_op = CC_OP_EFLAGS;
6615 break;
6616 case 0x2f: /* das */
6617 if (CODE64(s))
6618 goto illegal_op;
6619 if (s->cc_op != CC_OP_DYNAMIC)
6620 gen_op_set_cc_op(s->cc_op);
6621 gen_helper_das();
6622 s->cc_op = CC_OP_EFLAGS;
6623 break;
6624 case 0x37: /* aaa */
6625 if (CODE64(s))
6626 goto illegal_op;
6627 if (s->cc_op != CC_OP_DYNAMIC)
6628 gen_op_set_cc_op(s->cc_op);
6629 gen_helper_aaa();
6630 s->cc_op = CC_OP_EFLAGS;
6631 break;
6632 case 0x3f: /* aas */
6633 if (CODE64(s))
6634 goto illegal_op;
6635 if (s->cc_op != CC_OP_DYNAMIC)
6636 gen_op_set_cc_op(s->cc_op);
6637 gen_helper_aas();
6638 s->cc_op = CC_OP_EFLAGS;
6639 break;
6640 case 0xd4: /* aam */
6641 if (CODE64(s))
6642 goto illegal_op;
6643 val = ldub_code(s->pc++);
6644 if (val == 0) {
6645 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6646 } else {
6647 gen_helper_aam(tcg_const_i32(val));
6648 s->cc_op = CC_OP_LOGICB;
6650 break;
6651 case 0xd5: /* aad */
6652 if (CODE64(s))
6653 goto illegal_op;
6654 val = ldub_code(s->pc++);
6655 gen_helper_aad(tcg_const_i32(val));
6656 s->cc_op = CC_OP_LOGICB;
6657 break;
6658 /************************/
6659 /* misc */
6660 case 0x90: /* nop */
6661 /* XXX: correct lock test for all insn */
6662 if (prefixes & PREFIX_LOCK) {
6663 goto illegal_op;
6665 /* If REX_B is set, then this is xchg eax, r8d, not a nop. */
6666 if (REX_B(s)) {
6667 goto do_xchg_reg_eax;
6669 if (prefixes & PREFIX_REPZ) {
6670 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6672 break;
6673 case 0x9b: /* fwait */
6674 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6675 (HF_MP_MASK | HF_TS_MASK)) {
6676 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6677 } else {
6678 if (s->cc_op != CC_OP_DYNAMIC)
6679 gen_op_set_cc_op(s->cc_op);
6680 gen_jmp_im(pc_start - s->cs_base);
6681 gen_helper_fwait();
6683 break;
6684 case 0xcc: /* int3 */
6685 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6686 break;
6687 case 0xcd: /* int N */
6688 val = ldub_code(s->pc++);
6689 if (s->vm86 && s->iopl != 3) {
6690 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6691 } else {
6692 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6694 break;
6695 case 0xce: /* into */
6696 if (CODE64(s))
6697 goto illegal_op;
6698 if (s->cc_op != CC_OP_DYNAMIC)
6699 gen_op_set_cc_op(s->cc_op);
6700 gen_jmp_im(pc_start - s->cs_base);
6701 gen_helper_into(tcg_const_i32(s->pc - pc_start));
6702 break;
6703 #ifdef WANT_ICEBP
6704 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6705 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6706 #if 1
6707 gen_debug(s, pc_start - s->cs_base);
6708 #else
6709 /* start debug */
6710 tb_flush(cpu_single_env);
6711 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6712 #endif
6713 break;
6714 #endif
6715 case 0xfa: /* cli */
6716 if (!s->vm86) {
6717 if (s->cpl <= s->iopl) {
6718 gen_helper_cli();
6719 } else {
6720 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6722 } else {
6723 if (s->iopl == 3) {
6724 gen_helper_cli();
6725 } else {
6726 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6729 break;
6730 case 0xfb: /* sti */
6731 if (!s->vm86) {
6732 if (s->cpl <= s->iopl) {
6733 gen_sti:
6734 gen_helper_sti();
6735 /* interruptions are enabled only the first insn after sti */
6736 /* If several instructions disable interrupts, only the
6737 _first_ does it */
6738 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6739 gen_helper_set_inhibit_irq();
6740 /* give a chance to handle pending irqs */
6741 gen_jmp_im(s->pc - s->cs_base);
6742 gen_eob(s);
6743 } else {
6744 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6746 } else {
6747 if (s->iopl == 3) {
6748 goto gen_sti;
6749 } else {
6750 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6753 break;
6754 case 0x62: /* bound */
6755 if (CODE64(s))
6756 goto illegal_op;
6757 ot = dflag ? OT_LONG : OT_WORD;
6758 modrm = ldub_code(s->pc++);
6759 reg = (modrm >> 3) & 7;
6760 mod = (modrm >> 6) & 3;
6761 if (mod == 3)
6762 goto illegal_op;
6763 gen_op_mov_TN_reg(ot, 0, reg);
6764 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6765 gen_jmp_im(pc_start - s->cs_base);
6766 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6767 if (ot == OT_WORD)
6768 gen_helper_boundw(cpu_A0, cpu_tmp2_i32);
6769 else
6770 gen_helper_boundl(cpu_A0, cpu_tmp2_i32);
6771 break;
6772 case 0x1c8 ... 0x1cf: /* bswap reg */
6773 reg = (b & 7) | REX_B(s);
6774 #ifdef TARGET_X86_64
6775 if (dflag == 2) {
6776 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6777 tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
6778 gen_op_mov_reg_T0(OT_QUAD, reg);
6779 } else
6780 #endif
6782 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6783 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
6784 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
6785 gen_op_mov_reg_T0(OT_LONG, reg);
6787 break;
6788 case 0xd6: /* salc */
6789 if (CODE64(s))
6790 goto illegal_op;
6791 if (s->cc_op != CC_OP_DYNAMIC)
6792 gen_op_set_cc_op(s->cc_op);
6793 gen_compute_eflags_c(cpu_T[0]);
6794 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6795 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6796 break;
6797 case 0xe0: /* loopnz */
6798 case 0xe1: /* loopz */
6799 case 0xe2: /* loop */
6800 case 0xe3: /* jecxz */
6802 int l1, l2, l3;
6804 tval = (int8_t)insn_get(s, OT_BYTE);
6805 next_eip = s->pc - s->cs_base;
6806 tval += next_eip;
6807 if (s->dflag == 0)
6808 tval &= 0xffff;
6810 l1 = gen_new_label();
6811 l2 = gen_new_label();
6812 l3 = gen_new_label();
6813 b &= 3;
6814 switch(b) {
6815 case 0: /* loopnz */
6816 case 1: /* loopz */
6817 if (s->cc_op != CC_OP_DYNAMIC)
6818 gen_op_set_cc_op(s->cc_op);
6819 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6820 gen_op_jz_ecx(s->aflag, l3);
6821 gen_compute_eflags(cpu_tmp0);
6822 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6823 if (b == 0) {
6824 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6825 } else {
6826 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6828 break;
6829 case 2: /* loop */
6830 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6831 gen_op_jnz_ecx(s->aflag, l1);
6832 break;
6833 default:
6834 case 3: /* jcxz */
6835 gen_op_jz_ecx(s->aflag, l1);
6836 break;
6839 gen_set_label(l3);
6840 gen_jmp_im(next_eip);
6841 tcg_gen_br(l2);
6843 gen_set_label(l1);
6844 gen_jmp_im(tval);
6845 gen_set_label(l2);
6846 gen_eob(s);
6848 break;
6849 case 0x130: /* wrmsr */
6850 case 0x132: /* rdmsr */
6851 if (s->cpl != 0) {
6852 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6853 } else {
6854 if (s->cc_op != CC_OP_DYNAMIC)
6855 gen_op_set_cc_op(s->cc_op);
6856 gen_jmp_im(pc_start - s->cs_base);
6857 if (b & 2) {
6858 gen_helper_rdmsr();
6859 } else {
6860 gen_helper_wrmsr();
6863 break;
6864 case 0x131: /* rdtsc */
6865 if (s->cc_op != CC_OP_DYNAMIC)
6866 gen_op_set_cc_op(s->cc_op);
6867 gen_jmp_im(pc_start - s->cs_base);
6868 if (use_icount)
6869 gen_io_start();
6870 gen_helper_rdtsc();
6871 if (use_icount) {
6872 gen_io_end();
6873 gen_jmp(s, s->pc - s->cs_base);
6875 break;
6876 case 0x133: /* rdpmc */
6877 if (s->cc_op != CC_OP_DYNAMIC)
6878 gen_op_set_cc_op(s->cc_op);
6879 gen_jmp_im(pc_start - s->cs_base);
6880 gen_helper_rdpmc();
6881 break;
6882 case 0x134: /* sysenter */
6883 /* For Intel SYSENTER is valid on 64-bit */
6884 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6885 goto illegal_op;
6886 if (!s->pe) {
6887 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6888 } else {
6889 gen_update_cc_op(s);
6890 gen_jmp_im(pc_start - s->cs_base);
6891 gen_helper_sysenter();
6892 gen_eob(s);
6894 break;
6895 case 0x135: /* sysexit */
6896 /* For Intel SYSEXIT is valid on 64-bit */
6897 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6898 goto illegal_op;
6899 if (!s->pe) {
6900 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6901 } else {
6902 gen_update_cc_op(s);
6903 gen_jmp_im(pc_start - s->cs_base);
6904 gen_helper_sysexit(tcg_const_i32(dflag));
6905 gen_eob(s);
6907 break;
6908 #ifdef TARGET_X86_64
6909 case 0x105: /* syscall */
6910 /* XXX: is it usable in real mode ? */
6911 gen_update_cc_op(s);
6912 gen_jmp_im(pc_start - s->cs_base);
6913 gen_helper_syscall(tcg_const_i32(s->pc - pc_start));
6914 gen_eob(s);
6915 break;
6916 case 0x107: /* sysret */
6917 if (!s->pe) {
6918 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6919 } else {
6920 gen_update_cc_op(s);
6921 gen_jmp_im(pc_start - s->cs_base);
6922 gen_helper_sysret(tcg_const_i32(s->dflag));
6923 /* condition codes are modified only in long mode */
6924 if (s->lma)
6925 s->cc_op = CC_OP_EFLAGS;
6926 gen_eob(s);
6928 break;
6929 #endif
6930 case 0x1a2: /* cpuid */
6931 if (s->cc_op != CC_OP_DYNAMIC)
6932 gen_op_set_cc_op(s->cc_op);
6933 gen_jmp_im(pc_start - s->cs_base);
6934 gen_helper_cpuid();
6935 break;
6936 case 0xf4: /* hlt */
6937 if (s->cpl != 0) {
6938 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6939 } else {
6940 if (s->cc_op != CC_OP_DYNAMIC)
6941 gen_op_set_cc_op(s->cc_op);
6942 gen_jmp_im(pc_start - s->cs_base);
6943 gen_helper_hlt(tcg_const_i32(s->pc - pc_start));
6944 s->is_jmp = DISAS_TB_JUMP;
6946 break;
6947 case 0x100:
6948 modrm = ldub_code(s->pc++);
6949 mod = (modrm >> 6) & 3;
6950 op = (modrm >> 3) & 7;
6951 switch(op) {
6952 case 0: /* sldt */
6953 if (!s->pe || s->vm86)
6954 goto illegal_op;
6955 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6956 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6957 ot = OT_WORD;
6958 if (mod == 3)
6959 ot += s->dflag;
6960 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6961 break;
6962 case 2: /* lldt */
6963 if (!s->pe || s->vm86)
6964 goto illegal_op;
6965 if (s->cpl != 0) {
6966 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6967 } else {
6968 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6969 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6970 gen_jmp_im(pc_start - s->cs_base);
6971 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6972 gen_helper_lldt(cpu_tmp2_i32);
6974 break;
6975 case 1: /* str */
6976 if (!s->pe || s->vm86)
6977 goto illegal_op;
6978 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6979 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6980 ot = OT_WORD;
6981 if (mod == 3)
6982 ot += s->dflag;
6983 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6984 break;
6985 case 3: /* ltr */
6986 if (!s->pe || s->vm86)
6987 goto illegal_op;
6988 if (s->cpl != 0) {
6989 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6990 } else {
6991 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
6992 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6993 gen_jmp_im(pc_start - s->cs_base);
6994 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6995 gen_helper_ltr(cpu_tmp2_i32);
6997 break;
6998 case 4: /* verr */
6999 case 5: /* verw */
7000 if (!s->pe || s->vm86)
7001 goto illegal_op;
7002 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7003 if (s->cc_op != CC_OP_DYNAMIC)
7004 gen_op_set_cc_op(s->cc_op);
7005 if (op == 4)
7006 gen_helper_verr(cpu_T[0]);
7007 else
7008 gen_helper_verw(cpu_T[0]);
7009 s->cc_op = CC_OP_EFLAGS;
7010 break;
7011 default:
7012 goto illegal_op;
7014 break;
7015 case 0x101:
7016 modrm = ldub_code(s->pc++);
7017 mod = (modrm >> 6) & 3;
7018 op = (modrm >> 3) & 7;
7019 rm = modrm & 7;
7020 switch(op) {
7021 case 0: /* sgdt */
7022 if (mod == 3)
7023 goto illegal_op;
7024 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7025 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7026 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7027 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7028 gen_add_A0_im(s, 2);
7029 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7030 if (!s->dflag)
7031 gen_op_andl_T0_im(0xffffff);
7032 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7033 break;
7034 case 1:
7035 if (mod == 3) {
7036 switch (rm) {
7037 case 0: /* monitor */
7038 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7039 s->cpl != 0)
7040 goto illegal_op;
7041 if (s->cc_op != CC_OP_DYNAMIC)
7042 gen_op_set_cc_op(s->cc_op);
7043 gen_jmp_im(pc_start - s->cs_base);
7044 #ifdef TARGET_X86_64
7045 if (s->aflag == 2) {
7046 gen_op_movq_A0_reg(R_EAX);
7047 } else
7048 #endif
7050 gen_op_movl_A0_reg(R_EAX);
7051 if (s->aflag == 0)
7052 gen_op_andl_A0_ffff();
7054 gen_add_A0_ds_seg(s);
7055 gen_helper_monitor(cpu_A0);
7056 break;
7057 case 1: /* mwait */
7058 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7059 s->cpl != 0)
7060 goto illegal_op;
7061 gen_update_cc_op(s);
7062 gen_jmp_im(pc_start - s->cs_base);
7063 gen_helper_mwait(tcg_const_i32(s->pc - pc_start));
7064 gen_eob(s);
7065 break;
7066 default:
7067 goto illegal_op;
7069 } else { /* sidt */
7070 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7071 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7072 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7073 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7074 gen_add_A0_im(s, 2);
7075 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7076 if (!s->dflag)
7077 gen_op_andl_T0_im(0xffffff);
7078 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7080 break;
7081 case 2: /* lgdt */
7082 case 3: /* lidt */
7083 if (mod == 3) {
7084 if (s->cc_op != CC_OP_DYNAMIC)
7085 gen_op_set_cc_op(s->cc_op);
7086 gen_jmp_im(pc_start - s->cs_base);
7087 switch(rm) {
7088 case 0: /* VMRUN */
7089 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7090 goto illegal_op;
7091 if (s->cpl != 0) {
7092 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7093 break;
7094 } else {
7095 gen_helper_vmrun(tcg_const_i32(s->aflag),
7096 tcg_const_i32(s->pc - pc_start));
7097 tcg_gen_exit_tb(0);
7098 s->is_jmp = DISAS_TB_JUMP;
7100 break;
7101 case 1: /* VMMCALL */
7102 if (!(s->flags & HF_SVME_MASK))
7103 goto illegal_op;
7104 gen_helper_vmmcall();
7105 break;
7106 case 2: /* VMLOAD */
7107 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7108 goto illegal_op;
7109 if (s->cpl != 0) {
7110 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7111 break;
7112 } else {
7113 gen_helper_vmload(tcg_const_i32(s->aflag));
7115 break;
7116 case 3: /* VMSAVE */
7117 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7118 goto illegal_op;
7119 if (s->cpl != 0) {
7120 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7121 break;
7122 } else {
7123 gen_helper_vmsave(tcg_const_i32(s->aflag));
7125 break;
7126 case 4: /* STGI */
7127 if ((!(s->flags & HF_SVME_MASK) &&
7128 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7129 !s->pe)
7130 goto illegal_op;
7131 if (s->cpl != 0) {
7132 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7133 break;
7134 } else {
7135 gen_helper_stgi();
7137 break;
7138 case 5: /* CLGI */
7139 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7140 goto illegal_op;
7141 if (s->cpl != 0) {
7142 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7143 break;
7144 } else {
7145 gen_helper_clgi();
7147 break;
7148 case 6: /* SKINIT */
7149 if ((!(s->flags & HF_SVME_MASK) &&
7150 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7151 !s->pe)
7152 goto illegal_op;
7153 gen_helper_skinit();
7154 break;
7155 case 7: /* INVLPGA */
7156 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7157 goto illegal_op;
7158 if (s->cpl != 0) {
7159 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7160 break;
7161 } else {
7162 gen_helper_invlpga(tcg_const_i32(s->aflag));
7164 break;
7165 default:
7166 goto illegal_op;
7168 } else if (s->cpl != 0) {
7169 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7170 } else {
7171 gen_svm_check_intercept(s, pc_start,
7172 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7173 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7174 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7175 gen_add_A0_im(s, 2);
7176 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7177 if (!s->dflag)
7178 gen_op_andl_T0_im(0xffffff);
7179 if (op == 2) {
7180 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7181 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7182 } else {
7183 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7184 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7187 break;
7188 case 4: /* smsw */
7189 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7190 #if defined TARGET_X86_64 && defined HOST_WORDS_BIGENDIAN
7191 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4);
7192 #else
7193 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7194 #endif
7195 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7196 break;
7197 case 6: /* lmsw */
7198 if (s->cpl != 0) {
7199 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7200 } else {
7201 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7202 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7203 gen_helper_lmsw(cpu_T[0]);
7204 gen_jmp_im(s->pc - s->cs_base);
7205 gen_eob(s);
7207 break;
7208 case 7:
7209 if (mod != 3) { /* invlpg */
7210 if (s->cpl != 0) {
7211 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7212 } else {
7213 if (s->cc_op != CC_OP_DYNAMIC)
7214 gen_op_set_cc_op(s->cc_op);
7215 gen_jmp_im(pc_start - s->cs_base);
7216 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7217 gen_helper_invlpg(cpu_A0);
7218 gen_jmp_im(s->pc - s->cs_base);
7219 gen_eob(s);
7221 } else {
7222 switch (rm) {
7223 case 0: /* swapgs */
7224 #ifdef TARGET_X86_64
7225 if (CODE64(s)) {
7226 if (s->cpl != 0) {
7227 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7228 } else {
7229 tcg_gen_ld_tl(cpu_T[0], cpu_env,
7230 offsetof(CPUX86State,segs[R_GS].base));
7231 tcg_gen_ld_tl(cpu_T[1], cpu_env,
7232 offsetof(CPUX86State,kernelgsbase));
7233 tcg_gen_st_tl(cpu_T[1], cpu_env,
7234 offsetof(CPUX86State,segs[R_GS].base));
7235 tcg_gen_st_tl(cpu_T[0], cpu_env,
7236 offsetof(CPUX86State,kernelgsbase));
7238 } else
7239 #endif
7241 goto illegal_op;
7243 break;
7244 case 1: /* rdtscp */
7245 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7246 goto illegal_op;
7247 if (s->cc_op != CC_OP_DYNAMIC)
7248 gen_op_set_cc_op(s->cc_op);
7249 gen_jmp_im(pc_start - s->cs_base);
7250 if (use_icount)
7251 gen_io_start();
7252 gen_helper_rdtscp();
7253 if (use_icount) {
7254 gen_io_end();
7255 gen_jmp(s, s->pc - s->cs_base);
7257 break;
7258 default:
7259 goto illegal_op;
7262 break;
7263 default:
7264 goto illegal_op;
7266 break;
7267 case 0x108: /* invd */
7268 case 0x109: /* wbinvd */
7269 if (s->cpl != 0) {
7270 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7271 } else {
7272 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7273 /* nothing to do */
7275 break;
7276 case 0x63: /* arpl or movslS (x86_64) */
7277 #ifdef TARGET_X86_64
7278 if (CODE64(s)) {
7279 int d_ot;
7280 /* d_ot is the size of destination */
7281 d_ot = dflag + OT_WORD;
7283 modrm = ldub_code(s->pc++);
7284 reg = ((modrm >> 3) & 7) | rex_r;
7285 mod = (modrm >> 6) & 3;
7286 rm = (modrm & 7) | REX_B(s);
7288 if (mod == 3) {
7289 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7290 /* sign extend */
7291 if (d_ot == OT_QUAD)
7292 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7293 gen_op_mov_reg_T0(d_ot, reg);
7294 } else {
7295 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7296 if (d_ot == OT_QUAD) {
7297 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7298 } else {
7299 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7301 gen_op_mov_reg_T0(d_ot, reg);
7303 } else
7304 #endif
7306 int label1;
7307 TCGv t0, t1, t2, a0;
7309 if (!s->pe || s->vm86)
7310 goto illegal_op;
7311 t0 = tcg_temp_local_new();
7312 t1 = tcg_temp_local_new();
7313 t2 = tcg_temp_local_new();
7314 ot = OT_WORD;
7315 modrm = ldub_code(s->pc++);
7316 reg = (modrm >> 3) & 7;
7317 mod = (modrm >> 6) & 3;
7318 rm = modrm & 7;
7319 if (mod != 3) {
7320 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7321 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7322 a0 = tcg_temp_local_new();
7323 tcg_gen_mov_tl(a0, cpu_A0);
7324 } else {
7325 gen_op_mov_v_reg(ot, t0, rm);
7326 TCGV_UNUSED(a0);
7328 gen_op_mov_v_reg(ot, t1, reg);
7329 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7330 tcg_gen_andi_tl(t1, t1, 3);
7331 tcg_gen_movi_tl(t2, 0);
7332 label1 = gen_new_label();
7333 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7334 tcg_gen_andi_tl(t0, t0, ~3);
7335 tcg_gen_or_tl(t0, t0, t1);
7336 tcg_gen_movi_tl(t2, CC_Z);
7337 gen_set_label(label1);
7338 if (mod != 3) {
7339 gen_op_st_v(ot + s->mem_index, t0, a0);
7340 tcg_temp_free(a0);
7341 } else {
7342 gen_op_mov_reg_v(ot, rm, t0);
7344 if (s->cc_op != CC_OP_DYNAMIC)
7345 gen_op_set_cc_op(s->cc_op);
7346 gen_compute_eflags(cpu_cc_src);
7347 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7348 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7349 s->cc_op = CC_OP_EFLAGS;
7350 tcg_temp_free(t0);
7351 tcg_temp_free(t1);
7352 tcg_temp_free(t2);
7354 break;
7355 case 0x102: /* lar */
7356 case 0x103: /* lsl */
7358 int label1;
7359 TCGv t0;
7360 if (!s->pe || s->vm86)
7361 goto illegal_op;
7362 ot = dflag ? OT_LONG : OT_WORD;
7363 modrm = ldub_code(s->pc++);
7364 reg = ((modrm >> 3) & 7) | rex_r;
7365 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7366 t0 = tcg_temp_local_new();
7367 if (s->cc_op != CC_OP_DYNAMIC)
7368 gen_op_set_cc_op(s->cc_op);
7369 if (b == 0x102)
7370 gen_helper_lar(t0, cpu_T[0]);
7371 else
7372 gen_helper_lsl(t0, cpu_T[0]);
7373 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7374 label1 = gen_new_label();
7375 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7376 gen_op_mov_reg_v(ot, reg, t0);
7377 gen_set_label(label1);
7378 s->cc_op = CC_OP_EFLAGS;
7379 tcg_temp_free(t0);
7381 break;
7382 case 0x118:
7383 modrm = ldub_code(s->pc++);
7384 mod = (modrm >> 6) & 3;
7385 op = (modrm >> 3) & 7;
7386 switch(op) {
7387 case 0: /* prefetchnta */
7388 case 1: /* prefetchnt0 */
7389 case 2: /* prefetchnt0 */
7390 case 3: /* prefetchnt0 */
7391 if (mod == 3)
7392 goto illegal_op;
7393 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7394 /* nothing more to do */
7395 break;
7396 default: /* nop (multi byte) */
7397 gen_nop_modrm(s, modrm);
7398 break;
7400 break;
7401 case 0x119 ... 0x11f: /* nop (multi byte) */
7402 modrm = ldub_code(s->pc++);
7403 gen_nop_modrm(s, modrm);
7404 break;
7405 case 0x120: /* mov reg, crN */
7406 case 0x122: /* mov crN, reg */
7407 if (s->cpl != 0) {
7408 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7409 } else {
7410 modrm = ldub_code(s->pc++);
7411 if ((modrm & 0xc0) != 0xc0)
7412 goto illegal_op;
7413 rm = (modrm & 7) | REX_B(s);
7414 reg = ((modrm >> 3) & 7) | rex_r;
7415 if (CODE64(s))
7416 ot = OT_QUAD;
7417 else
7418 ot = OT_LONG;
7419 if ((prefixes & PREFIX_LOCK) && (reg == 0) &&
7420 (s->cpuid_ext3_features & CPUID_EXT3_CR8LEG)) {
7421 reg = 8;
7423 switch(reg) {
7424 case 0:
7425 case 2:
7426 case 3:
7427 case 4:
7428 case 8:
7429 if (s->cc_op != CC_OP_DYNAMIC)
7430 gen_op_set_cc_op(s->cc_op);
7431 gen_jmp_im(pc_start - s->cs_base);
7432 if (b & 2) {
7433 gen_op_mov_TN_reg(ot, 0, rm);
7434 gen_helper_write_crN(tcg_const_i32(reg), cpu_T[0]);
7435 gen_jmp_im(s->pc - s->cs_base);
7436 gen_eob(s);
7437 } else {
7438 gen_helper_read_crN(cpu_T[0], tcg_const_i32(reg));
7439 gen_op_mov_reg_T0(ot, rm);
7441 break;
7442 default:
7443 goto illegal_op;
7446 break;
7447 case 0x121: /* mov reg, drN */
7448 case 0x123: /* mov drN, reg */
7449 if (s->cpl != 0) {
7450 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7451 } else {
7452 modrm = ldub_code(s->pc++);
7453 if ((modrm & 0xc0) != 0xc0)
7454 goto illegal_op;
7455 rm = (modrm & 7) | REX_B(s);
7456 reg = ((modrm >> 3) & 7) | rex_r;
7457 if (CODE64(s))
7458 ot = OT_QUAD;
7459 else
7460 ot = OT_LONG;
7461 /* XXX: do it dynamically with CR4.DE bit */
7462 if (reg == 4 || reg == 5 || reg >= 8)
7463 goto illegal_op;
7464 if (b & 2) {
7465 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7466 gen_op_mov_TN_reg(ot, 0, rm);
7467 gen_helper_movl_drN_T0(tcg_const_i32(reg), cpu_T[0]);
7468 gen_jmp_im(s->pc - s->cs_base);
7469 gen_eob(s);
7470 } else {
7471 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7472 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7473 gen_op_mov_reg_T0(ot, rm);
7476 break;
7477 case 0x106: /* clts */
7478 if (s->cpl != 0) {
7479 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7480 } else {
7481 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7482 gen_helper_clts();
7483 /* abort block because static cpu state changed */
7484 gen_jmp_im(s->pc - s->cs_base);
7485 gen_eob(s);
7487 break;
7488 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7489 case 0x1c3: /* MOVNTI reg, mem */
7490 if (!(s->cpuid_features & CPUID_SSE2))
7491 goto illegal_op;
7492 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
7493 modrm = ldub_code(s->pc++);
7494 mod = (modrm >> 6) & 3;
7495 if (mod == 3)
7496 goto illegal_op;
7497 reg = ((modrm >> 3) & 7) | rex_r;
7498 /* generate a generic store */
7499 gen_ldst_modrm(s, modrm, ot, reg, 1);
7500 break;
7501 case 0x1ae:
7502 modrm = ldub_code(s->pc++);
7503 mod = (modrm >> 6) & 3;
7504 op = (modrm >> 3) & 7;
7505 switch(op) {
7506 case 0: /* fxsave */
7507 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7508 (s->prefix & PREFIX_LOCK))
7509 goto illegal_op;
7510 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
7511 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7512 break;
7514 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7515 if (s->cc_op != CC_OP_DYNAMIC)
7516 gen_op_set_cc_op(s->cc_op);
7517 gen_jmp_im(pc_start - s->cs_base);
7518 gen_helper_fxsave(cpu_A0, tcg_const_i32((s->dflag == 2)));
7519 break;
7520 case 1: /* fxrstor */
7521 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7522 (s->prefix & PREFIX_LOCK))
7523 goto illegal_op;
7524 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
7525 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7526 break;
7528 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7529 if (s->cc_op != CC_OP_DYNAMIC)
7530 gen_op_set_cc_op(s->cc_op);
7531 gen_jmp_im(pc_start - s->cs_base);
7532 gen_helper_fxrstor(cpu_A0, tcg_const_i32((s->dflag == 2)));
7533 break;
7534 case 2: /* ldmxcsr */
7535 case 3: /* stmxcsr */
7536 if (s->flags & HF_TS_MASK) {
7537 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7538 break;
7540 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
7541 mod == 3)
7542 goto illegal_op;
7543 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7544 if (op == 2) {
7545 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7546 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7547 } else {
7548 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7549 gen_op_st_T0_A0(OT_LONG + s->mem_index);
7551 break;
7552 case 5: /* lfence */
7553 case 6: /* mfence */
7554 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE2))
7555 goto illegal_op;
7556 break;
7557 case 7: /* sfence / clflush */
7558 if ((modrm & 0xc7) == 0xc0) {
7559 /* sfence */
7560 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7561 if (!(s->cpuid_features & CPUID_SSE))
7562 goto illegal_op;
7563 } else {
7564 /* clflush */
7565 if (!(s->cpuid_features & CPUID_CLFLUSH))
7566 goto illegal_op;
7567 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7569 break;
7570 default:
7571 goto illegal_op;
7573 break;
7574 case 0x10d: /* 3DNow! prefetch(w) */
7575 modrm = ldub_code(s->pc++);
7576 mod = (modrm >> 6) & 3;
7577 if (mod == 3)
7578 goto illegal_op;
7579 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7580 /* ignore for now */
7581 break;
7582 case 0x1aa: /* rsm */
7583 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7584 if (!(s->flags & HF_SMM_MASK))
7585 goto illegal_op;
7586 gen_update_cc_op(s);
7587 gen_jmp_im(s->pc - s->cs_base);
7588 gen_helper_rsm();
7589 gen_eob(s);
7590 break;
7591 case 0x1b8: /* SSE4.2 popcnt */
7592 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
7593 PREFIX_REPZ)
7594 goto illegal_op;
7595 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
7596 goto illegal_op;
7598 modrm = ldub_code(s->pc++);
7599 reg = ((modrm >> 3) & 7);
7601 if (s->prefix & PREFIX_DATA)
7602 ot = OT_WORD;
7603 else if (s->dflag != 2)
7604 ot = OT_LONG;
7605 else
7606 ot = OT_QUAD;
7608 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7609 gen_helper_popcnt(cpu_T[0], cpu_T[0], tcg_const_i32(ot));
7610 gen_op_mov_reg_T0(ot, reg);
7612 s->cc_op = CC_OP_EFLAGS;
7613 break;
7614 case 0x10e ... 0x10f:
7615 /* 3DNow! instructions, ignore prefixes */
7616 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7617 case 0x110 ... 0x117:
7618 case 0x128 ... 0x12f:
7619 case 0x138 ... 0x13a:
7620 case 0x150 ... 0x179:
7621 case 0x17c ... 0x17f:
7622 case 0x1c2:
7623 case 0x1c4 ... 0x1c6:
7624 case 0x1d0 ... 0x1fe:
7625 gen_sse(s, b, pc_start, rex_r);
7626 break;
7627 default:
7628 goto illegal_op;
7630 /* lock generation */
7631 if (s->prefix & PREFIX_LOCK)
7632 gen_helper_unlock();
7633 return s->pc;
7634 illegal_op:
7635 if (s->prefix & PREFIX_LOCK)
7636 gen_helper_unlock();
7637 /* XXX: ensure that no lock was generated */
7638 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7639 return s->pc;
7642 void optimize_flags_init(void)
7644 #if TCG_TARGET_REG_BITS == 32
7645 assert(sizeof(CCTable) == (1 << 3));
7646 #else
7647 assert(sizeof(CCTable) == (1 << 4));
7648 #endif
7649 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
7650 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
7651 offsetof(CPUState, cc_op), "cc_op");
7652 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
7653 "cc_src");
7654 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
7655 "cc_dst");
7656 cpu_cc_tmp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_tmp),
7657 "cc_tmp");
7659 #ifdef TARGET_X86_64
7660 cpu_regs[R_EAX] = tcg_global_mem_new_i64(TCG_AREG0,
7661 offsetof(CPUState, regs[R_EAX]), "rax");
7662 cpu_regs[R_ECX] = tcg_global_mem_new_i64(TCG_AREG0,
7663 offsetof(CPUState, regs[R_ECX]), "rcx");
7664 cpu_regs[R_EDX] = tcg_global_mem_new_i64(TCG_AREG0,
7665 offsetof(CPUState, regs[R_EDX]), "rdx");
7666 cpu_regs[R_EBX] = tcg_global_mem_new_i64(TCG_AREG0,
7667 offsetof(CPUState, regs[R_EBX]), "rbx");
7668 cpu_regs[R_ESP] = tcg_global_mem_new_i64(TCG_AREG0,
7669 offsetof(CPUState, regs[R_ESP]), "rsp");
7670 cpu_regs[R_EBP] = tcg_global_mem_new_i64(TCG_AREG0,
7671 offsetof(CPUState, regs[R_EBP]), "rbp");
7672 cpu_regs[R_ESI] = tcg_global_mem_new_i64(TCG_AREG0,
7673 offsetof(CPUState, regs[R_ESI]), "rsi");
7674 cpu_regs[R_EDI] = tcg_global_mem_new_i64(TCG_AREG0,
7675 offsetof(CPUState, regs[R_EDI]), "rdi");
7676 cpu_regs[8] = tcg_global_mem_new_i64(TCG_AREG0,
7677 offsetof(CPUState, regs[8]), "r8");
7678 cpu_regs[9] = tcg_global_mem_new_i64(TCG_AREG0,
7679 offsetof(CPUState, regs[9]), "r9");
7680 cpu_regs[10] = tcg_global_mem_new_i64(TCG_AREG0,
7681 offsetof(CPUState, regs[10]), "r10");
7682 cpu_regs[11] = tcg_global_mem_new_i64(TCG_AREG0,
7683 offsetof(CPUState, regs[11]), "r11");
7684 cpu_regs[12] = tcg_global_mem_new_i64(TCG_AREG0,
7685 offsetof(CPUState, regs[12]), "r12");
7686 cpu_regs[13] = tcg_global_mem_new_i64(TCG_AREG0,
7687 offsetof(CPUState, regs[13]), "r13");
7688 cpu_regs[14] = tcg_global_mem_new_i64(TCG_AREG0,
7689 offsetof(CPUState, regs[14]), "r14");
7690 cpu_regs[15] = tcg_global_mem_new_i64(TCG_AREG0,
7691 offsetof(CPUState, regs[15]), "r15");
7692 #else
7693 cpu_regs[R_EAX] = tcg_global_mem_new_i32(TCG_AREG0,
7694 offsetof(CPUState, regs[R_EAX]), "eax");
7695 cpu_regs[R_ECX] = tcg_global_mem_new_i32(TCG_AREG0,
7696 offsetof(CPUState, regs[R_ECX]), "ecx");
7697 cpu_regs[R_EDX] = tcg_global_mem_new_i32(TCG_AREG0,
7698 offsetof(CPUState, regs[R_EDX]), "edx");
7699 cpu_regs[R_EBX] = tcg_global_mem_new_i32(TCG_AREG0,
7700 offsetof(CPUState, regs[R_EBX]), "ebx");
7701 cpu_regs[R_ESP] = tcg_global_mem_new_i32(TCG_AREG0,
7702 offsetof(CPUState, regs[R_ESP]), "esp");
7703 cpu_regs[R_EBP] = tcg_global_mem_new_i32(TCG_AREG0,
7704 offsetof(CPUState, regs[R_EBP]), "ebp");
7705 cpu_regs[R_ESI] = tcg_global_mem_new_i32(TCG_AREG0,
7706 offsetof(CPUState, regs[R_ESI]), "esi");
7707 cpu_regs[R_EDI] = tcg_global_mem_new_i32(TCG_AREG0,
7708 offsetof(CPUState, regs[R_EDI]), "edi");
7709 #endif
7711 /* register helpers */
7712 #define GEN_HELPER 2
7713 #include "helper.h"
7716 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7717 basic block 'tb'. If search_pc is TRUE, also generate PC
7718 information for each intermediate instruction. */
7719 static inline void gen_intermediate_code_internal(CPUState *env,
7720 TranslationBlock *tb,
7721 int search_pc)
7723 DisasContext dc1, *dc = &dc1;
7724 target_ulong pc_ptr;
7725 uint16_t *gen_opc_end;
7726 CPUBreakpoint *bp;
7727 int j, lj;
7728 uint64_t flags;
7729 target_ulong pc_start;
7730 target_ulong cs_base;
7731 int num_insns;
7732 int max_insns;
7734 /* generate intermediate code */
7735 pc_start = tb->pc;
7736 cs_base = tb->cs_base;
7737 flags = tb->flags;
7739 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7740 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7741 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7742 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7743 dc->f_st = 0;
7744 dc->vm86 = (flags >> VM_SHIFT) & 1;
7745 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7746 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7747 dc->tf = (flags >> TF_SHIFT) & 1;
7748 dc->singlestep_enabled = env->singlestep_enabled;
7749 dc->cc_op = CC_OP_DYNAMIC;
7750 dc->cs_base = cs_base;
7751 dc->tb = tb;
7752 dc->popl_esp_hack = 0;
7753 /* select memory access functions */
7754 dc->mem_index = 0;
7755 if (flags & HF_SOFTMMU_MASK) {
7756 if (dc->cpl == 3)
7757 dc->mem_index = 2 * 4;
7758 else
7759 dc->mem_index = 1 * 4;
7761 dc->cpuid_features = env->cpuid_features;
7762 dc->cpuid_ext_features = env->cpuid_ext_features;
7763 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7764 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7765 #ifdef TARGET_X86_64
7766 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7767 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7768 #endif
7769 dc->flags = flags;
7770 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7771 (flags & HF_INHIBIT_IRQ_MASK)
7772 #ifndef CONFIG_SOFTMMU
7773 || (flags & HF_SOFTMMU_MASK)
7774 #endif
7776 #if 0
7777 /* check addseg logic */
7778 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7779 printf("ERROR addseg\n");
7780 #endif
7782 cpu_T[0] = tcg_temp_new();
7783 cpu_T[1] = tcg_temp_new();
7784 cpu_A0 = tcg_temp_new();
7785 cpu_T3 = tcg_temp_new();
7787 cpu_tmp0 = tcg_temp_new();
7788 cpu_tmp1_i64 = tcg_temp_new_i64();
7789 cpu_tmp2_i32 = tcg_temp_new_i32();
7790 cpu_tmp3_i32 = tcg_temp_new_i32();
7791 cpu_tmp4 = tcg_temp_new();
7792 cpu_tmp5 = tcg_temp_new();
7793 cpu_ptr0 = tcg_temp_new_ptr();
7794 cpu_ptr1 = tcg_temp_new_ptr();
7796 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7798 dc->is_jmp = DISAS_NEXT;
7799 pc_ptr = pc_start;
7800 lj = -1;
7801 num_insns = 0;
7802 max_insns = tb->cflags & CF_COUNT_MASK;
7803 if (max_insns == 0)
7804 max_insns = CF_COUNT_MASK;
7806 gen_icount_start();
7807 for(;;) {
7808 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
7809 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
7810 if (bp->pc == pc_ptr &&
7811 !((bp->flags & BP_CPU) && (tb->flags & HF_RF_MASK))) {
7812 gen_debug(dc, pc_ptr - dc->cs_base);
7813 break;
7817 if (search_pc) {
7818 j = gen_opc_ptr - gen_opc_buf;
7819 if (lj < j) {
7820 lj++;
7821 while (lj < j)
7822 gen_opc_instr_start[lj++] = 0;
7824 gen_opc_pc[lj] = pc_ptr;
7825 gen_opc_cc_op[lj] = dc->cc_op;
7826 gen_opc_instr_start[lj] = 1;
7827 gen_opc_icount[lj] = num_insns;
7829 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7830 gen_io_start();
7832 pc_ptr = disas_insn(dc, pc_ptr);
7833 num_insns++;
7834 /* stop translation if indicated */
7835 if (dc->is_jmp)
7836 break;
7837 /* if single step mode, we generate only one instruction and
7838 generate an exception */
7839 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7840 the flag and abort the translation to give the irqs a
7841 change to be happen */
7842 if (dc->tf || dc->singlestep_enabled ||
7843 (flags & HF_INHIBIT_IRQ_MASK)) {
7844 gen_jmp_im(pc_ptr - dc->cs_base);
7845 gen_eob(dc);
7846 break;
7848 /* if too long translation, stop generation too */
7849 if (gen_opc_ptr >= gen_opc_end ||
7850 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
7851 num_insns >= max_insns) {
7852 gen_jmp_im(pc_ptr - dc->cs_base);
7853 gen_eob(dc);
7854 break;
7856 if (singlestep) {
7857 gen_jmp_im(pc_ptr - dc->cs_base);
7858 gen_eob(dc);
7859 break;
7862 if (tb->cflags & CF_LAST_IO)
7863 gen_io_end();
7864 gen_icount_end(tb, num_insns);
7865 *gen_opc_ptr = INDEX_op_end;
7866 /* we don't forget to fill the last values */
7867 if (search_pc) {
7868 j = gen_opc_ptr - gen_opc_buf;
7869 lj++;
7870 while (lj <= j)
7871 gen_opc_instr_start[lj++] = 0;
7874 #ifdef DEBUG_DISAS
7875 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
7876 int disas_flags;
7877 qemu_log("----------------\n");
7878 qemu_log("IN: %s\n", lookup_symbol(pc_start));
7879 #ifdef TARGET_X86_64
7880 if (dc->code64)
7881 disas_flags = 2;
7882 else
7883 #endif
7884 disas_flags = !dc->code32;
7885 log_target_disas(pc_start, pc_ptr - pc_start, disas_flags);
7886 qemu_log("\n");
7888 #endif
7890 if (!search_pc) {
7891 tb->size = pc_ptr - pc_start;
7892 tb->icount = num_insns;
7896 void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7898 gen_intermediate_code_internal(env, tb, 0);
7901 void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7903 gen_intermediate_code_internal(env, tb, 1);
7906 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
7908 int cc_op;
7909 #ifdef DEBUG_DISAS
7910 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) {
7911 int i;
7912 qemu_log("RESTORE:\n");
7913 for(i = 0;i <= pc_pos; i++) {
7914 if (gen_opc_instr_start[i]) {
7915 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7918 qemu_log("pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7919 pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7920 (uint32_t)tb->cs_base);
7922 #endif
7923 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7924 cc_op = gen_opc_cc_op[pc_pos];
7925 if (cc_op != CC_OP_DYNAMIC)
7926 env->cc_op = cc_op;