Convert atexit users to exit_notifier
[qemu/aliguori-queue.git] / target-i386 / translate.c
blob28d9940ef6901200cb2f924aa6ccc9592bb96551
1 /*
2 * i386 translation
4 * Copyright (c) 2003 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 #include <stdarg.h>
20 #include <stdlib.h>
21 #include <stdio.h>
22 #include <string.h>
23 #include <inttypes.h>
24 #include <signal.h>
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "tcg-op.h"
31 #include "helper.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define PREFIX_REPZ 0x01
36 #define PREFIX_REPNZ 0x02
37 #define PREFIX_LOCK 0x04
38 #define PREFIX_DATA 0x08
39 #define PREFIX_ADR 0x10
41 #ifdef TARGET_X86_64
42 #define X86_64_ONLY(x) x
43 #define X86_64_DEF(...) __VA_ARGS__
44 #define CODE64(s) ((s)->code64)
45 #define REX_X(s) ((s)->rex_x)
46 #define REX_B(s) ((s)->rex_b)
47 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
48 #if 1
49 #define BUGGY_64(x) NULL
50 #endif
51 #else
52 #define X86_64_ONLY(x) NULL
53 #define X86_64_DEF(...)
54 #define CODE64(s) 0
55 #define REX_X(s) 0
56 #define REX_B(s) 0
57 #endif
59 //#define MACRO_TEST 1
61 /* global register indexes */
62 static TCGv_ptr cpu_env;
63 static TCGv cpu_A0, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
64 static TCGv_i32 cpu_cc_op;
65 static TCGv cpu_regs[CPU_NB_REGS];
66 /* local temps */
67 static TCGv cpu_T[2], cpu_T3;
68 /* local register indexes (only used inside old micro ops) */
69 static TCGv cpu_tmp0, cpu_tmp4;
70 static TCGv_ptr cpu_ptr0, cpu_ptr1;
71 static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
72 static TCGv_i64 cpu_tmp1_i64;
73 static TCGv cpu_tmp5;
75 #include "gen-icount.h"
77 #ifdef TARGET_X86_64
78 static int x86_64_hregs;
79 #endif
81 typedef struct DisasContext {
82 /* current insn context */
83 int override; /* -1 if no override */
84 int prefix;
85 int aflag, dflag;
86 target_ulong pc; /* pc = eip + cs_base */
87 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
88 static state change (stop translation) */
89 /* current block context */
90 target_ulong cs_base; /* base of CS segment */
91 int pe; /* protected mode */
92 int code32; /* 32 bit code segment */
93 #ifdef TARGET_X86_64
94 int lma; /* long mode active */
95 int code64; /* 64 bit code segment */
96 int rex_x, rex_b;
97 #endif
98 int ss32; /* 32 bit stack segment */
99 int cc_op; /* current CC operation */
100 int addseg; /* non zero if either DS/ES/SS have a non zero base */
101 int f_st; /* currently unused */
102 int vm86; /* vm86 mode */
103 int cpl;
104 int iopl;
105 int tf; /* TF cpu flag */
106 int singlestep_enabled; /* "hardware" single step enabled */
107 int jmp_opt; /* use direct block chaining for direct jumps */
108 int mem_index; /* select memory access functions */
109 uint64_t flags; /* all execution flags */
110 struct TranslationBlock *tb;
111 int popl_esp_hack; /* for correct popl with esp base handling */
112 int rip_offset; /* only used in x86_64, but left for simplicity */
113 int cpuid_features;
114 int cpuid_ext_features;
115 int cpuid_ext2_features;
116 int cpuid_ext3_features;
117 } DisasContext;
119 static void gen_eob(DisasContext *s);
120 static void gen_jmp(DisasContext *s, target_ulong eip);
121 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
123 /* i386 arith/logic operations */
124 enum {
125 OP_ADDL,
126 OP_ORL,
127 OP_ADCL,
128 OP_SBBL,
129 OP_ANDL,
130 OP_SUBL,
131 OP_XORL,
132 OP_CMPL,
135 /* i386 shift ops */
136 enum {
137 OP_ROL,
138 OP_ROR,
139 OP_RCL,
140 OP_RCR,
141 OP_SHL,
142 OP_SHR,
143 OP_SHL1, /* undocumented */
144 OP_SAR = 7,
147 enum {
148 JCC_O,
149 JCC_B,
150 JCC_Z,
151 JCC_BE,
152 JCC_S,
153 JCC_P,
154 JCC_L,
155 JCC_LE,
158 /* operand size */
159 enum {
160 OT_BYTE = 0,
161 OT_WORD,
162 OT_LONG,
163 OT_QUAD,
166 enum {
167 /* I386 int registers */
168 OR_EAX, /* MUST be even numbered */
169 OR_ECX,
170 OR_EDX,
171 OR_EBX,
172 OR_ESP,
173 OR_EBP,
174 OR_ESI,
175 OR_EDI,
177 OR_TMP0 = 16, /* temporary operand register */
178 OR_TMP1,
179 OR_A0, /* temporary register used when doing address evaluation */
182 static inline void gen_op_movl_T0_0(void)
184 tcg_gen_movi_tl(cpu_T[0], 0);
187 static inline void gen_op_movl_T0_im(int32_t val)
189 tcg_gen_movi_tl(cpu_T[0], val);
192 static inline void gen_op_movl_T0_imu(uint32_t val)
194 tcg_gen_movi_tl(cpu_T[0], val);
197 static inline void gen_op_movl_T1_im(int32_t val)
199 tcg_gen_movi_tl(cpu_T[1], val);
202 static inline void gen_op_movl_T1_imu(uint32_t val)
204 tcg_gen_movi_tl(cpu_T[1], val);
207 static inline void gen_op_movl_A0_im(uint32_t val)
209 tcg_gen_movi_tl(cpu_A0, val);
212 #ifdef TARGET_X86_64
213 static inline void gen_op_movq_A0_im(int64_t val)
215 tcg_gen_movi_tl(cpu_A0, val);
217 #endif
219 static inline void gen_movtl_T0_im(target_ulong val)
221 tcg_gen_movi_tl(cpu_T[0], val);
224 static inline void gen_movtl_T1_im(target_ulong val)
226 tcg_gen_movi_tl(cpu_T[1], val);
229 static inline void gen_op_andl_T0_ffff(void)
231 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
234 static inline void gen_op_andl_T0_im(uint32_t val)
236 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
239 static inline void gen_op_movl_T0_T1(void)
241 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
244 static inline void gen_op_andl_A0_ffff(void)
246 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
249 #ifdef TARGET_X86_64
251 #define NB_OP_SIZES 4
253 #else /* !TARGET_X86_64 */
255 #define NB_OP_SIZES 3
257 #endif /* !TARGET_X86_64 */
259 #if defined(HOST_WORDS_BIGENDIAN)
260 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
261 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
262 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
263 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
264 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
265 #else
266 #define REG_B_OFFSET 0
267 #define REG_H_OFFSET 1
268 #define REG_W_OFFSET 0
269 #define REG_L_OFFSET 0
270 #define REG_LH_OFFSET 4
271 #endif
273 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
275 TCGv tmp;
277 switch(ot) {
278 case OT_BYTE:
279 tmp = tcg_temp_new();
280 tcg_gen_ext8u_tl(tmp, t0);
281 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282 tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xff);
283 tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], tmp);
284 } else {
285 tcg_gen_shli_tl(tmp, tmp, 8);
286 tcg_gen_andi_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], ~0xff00);
287 tcg_gen_or_tl(cpu_regs[reg - 4], cpu_regs[reg - 4], tmp);
289 tcg_temp_free(tmp);
290 break;
291 case OT_WORD:
292 tmp = tcg_temp_new();
293 tcg_gen_ext16u_tl(tmp, t0);
294 tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
295 tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], tmp);
296 tcg_temp_free(tmp);
297 break;
298 default: /* XXX this shouldn't be reached; abort? */
299 case OT_LONG:
300 /* For x86_64, this sets the higher half of register to zero.
301 For i386, this is equivalent to a mov. */
302 tcg_gen_ext32u_tl(cpu_regs[reg], t0);
303 break;
304 #ifdef TARGET_X86_64
305 case OT_QUAD:
306 tcg_gen_mov_tl(cpu_regs[reg], t0);
307 break;
308 #endif
312 static inline void gen_op_mov_reg_T0(int ot, int reg)
314 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
317 static inline void gen_op_mov_reg_T1(int ot, int reg)
319 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
322 static inline void gen_op_mov_reg_A0(int size, int reg)
324 TCGv tmp;
326 switch(size) {
327 case 0:
328 tmp = tcg_temp_new();
329 tcg_gen_ext16u_tl(tmp, cpu_A0);
330 tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
331 tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], tmp);
332 tcg_temp_free(tmp);
333 break;
334 default: /* XXX this shouldn't be reached; abort? */
335 case 1:
336 /* For x86_64, this sets the higher half of register to zero.
337 For i386, this is equivalent to a mov. */
338 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_A0);
339 break;
340 #ifdef TARGET_X86_64
341 case 2:
342 tcg_gen_mov_tl(cpu_regs[reg], cpu_A0);
343 break;
344 #endif
348 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
350 switch(ot) {
351 case OT_BYTE:
352 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353 goto std_case;
354 } else {
355 tcg_gen_shri_tl(t0, cpu_regs[reg - 4], 8);
356 tcg_gen_ext8u_tl(t0, t0);
358 break;
359 default:
360 std_case:
361 tcg_gen_mov_tl(t0, cpu_regs[reg]);
362 break;
366 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
368 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
371 static inline void gen_op_movl_A0_reg(int reg)
373 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
376 static inline void gen_op_addl_A0_im(int32_t val)
378 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
379 #ifdef TARGET_X86_64
380 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
381 #endif
384 #ifdef TARGET_X86_64
385 static inline void gen_op_addq_A0_im(int64_t val)
387 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
389 #endif
391 static void gen_add_A0_im(DisasContext *s, int val)
393 #ifdef TARGET_X86_64
394 if (CODE64(s))
395 gen_op_addq_A0_im(val);
396 else
397 #endif
398 gen_op_addl_A0_im(val);
401 static inline void gen_op_addl_T0_T1(void)
403 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
406 static inline void gen_op_jmp_T0(void)
408 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
411 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
413 switch(size) {
414 case 0:
415 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
416 tcg_gen_ext16u_tl(cpu_tmp0, cpu_tmp0);
417 tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
418 tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0);
419 break;
420 case 1:
421 tcg_gen_addi_tl(cpu_tmp0, cpu_regs[reg], val);
422 /* For x86_64, this sets the higher half of register to zero.
423 For i386, this is equivalent to a nop. */
424 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
425 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
426 break;
427 #ifdef TARGET_X86_64
428 case 2:
429 tcg_gen_addi_tl(cpu_regs[reg], cpu_regs[reg], val);
430 break;
431 #endif
435 static inline void gen_op_add_reg_T0(int size, int reg)
437 switch(size) {
438 case 0:
439 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
440 tcg_gen_ext16u_tl(cpu_tmp0, cpu_tmp0);
441 tcg_gen_andi_tl(cpu_regs[reg], cpu_regs[reg], ~0xffff);
442 tcg_gen_or_tl(cpu_regs[reg], cpu_regs[reg], cpu_tmp0);
443 break;
444 case 1:
445 tcg_gen_add_tl(cpu_tmp0, cpu_regs[reg], cpu_T[0]);
446 /* For x86_64, this sets the higher half of register to zero.
447 For i386, this is equivalent to a nop. */
448 tcg_gen_ext32u_tl(cpu_tmp0, cpu_tmp0);
449 tcg_gen_mov_tl(cpu_regs[reg], cpu_tmp0);
450 break;
451 #ifdef TARGET_X86_64
452 case 2:
453 tcg_gen_add_tl(cpu_regs[reg], cpu_regs[reg], cpu_T[0]);
454 break;
455 #endif
459 static inline void gen_op_set_cc_op(int32_t val)
461 tcg_gen_movi_i32(cpu_cc_op, val);
464 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
466 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
467 if (shift != 0)
468 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
469 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
470 /* For x86_64, this sets the higher half of register to zero.
471 For i386, this is equivalent to a nop. */
472 tcg_gen_ext32u_tl(cpu_A0, cpu_A0);
475 static inline void gen_op_movl_A0_seg(int reg)
477 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
480 static inline void gen_op_addl_A0_seg(int reg)
482 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
483 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
484 #ifdef TARGET_X86_64
485 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
486 #endif
489 #ifdef TARGET_X86_64
490 static inline void gen_op_movq_A0_seg(int reg)
492 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
495 static inline void gen_op_addq_A0_seg(int reg)
497 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
498 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
501 static inline void gen_op_movq_A0_reg(int reg)
503 tcg_gen_mov_tl(cpu_A0, cpu_regs[reg]);
506 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
508 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[reg]);
509 if (shift != 0)
510 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
511 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
513 #endif
515 static inline void gen_op_lds_T0_A0(int idx)
517 int mem_index = (idx >> 2) - 1;
518 switch(idx & 3) {
519 case 0:
520 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
521 break;
522 case 1:
523 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
524 break;
525 default:
526 case 2:
527 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
528 break;
532 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
534 int mem_index = (idx >> 2) - 1;
535 switch(idx & 3) {
536 case 0:
537 tcg_gen_qemu_ld8u(t0, a0, mem_index);
538 break;
539 case 1:
540 tcg_gen_qemu_ld16u(t0, a0, mem_index);
541 break;
542 case 2:
543 tcg_gen_qemu_ld32u(t0, a0, mem_index);
544 break;
545 default:
546 case 3:
547 /* Should never happen on 32-bit targets. */
548 #ifdef TARGET_X86_64
549 tcg_gen_qemu_ld64(t0, a0, mem_index);
550 #endif
551 break;
555 /* XXX: always use ldu or lds */
556 static inline void gen_op_ld_T0_A0(int idx)
558 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
561 static inline void gen_op_ldu_T0_A0(int idx)
563 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
566 static inline void gen_op_ld_T1_A0(int idx)
568 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
571 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
573 int mem_index = (idx >> 2) - 1;
574 switch(idx & 3) {
575 case 0:
576 tcg_gen_qemu_st8(t0, a0, mem_index);
577 break;
578 case 1:
579 tcg_gen_qemu_st16(t0, a0, mem_index);
580 break;
581 case 2:
582 tcg_gen_qemu_st32(t0, a0, mem_index);
583 break;
584 default:
585 case 3:
586 /* Should never happen on 32-bit targets. */
587 #ifdef TARGET_X86_64
588 tcg_gen_qemu_st64(t0, a0, mem_index);
589 #endif
590 break;
594 static inline void gen_op_st_T0_A0(int idx)
596 gen_op_st_v(idx, cpu_T[0], cpu_A0);
599 static inline void gen_op_st_T1_A0(int idx)
601 gen_op_st_v(idx, cpu_T[1], cpu_A0);
604 static inline void gen_jmp_im(target_ulong pc)
606 tcg_gen_movi_tl(cpu_tmp0, pc);
607 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
610 static inline void gen_string_movl_A0_ESI(DisasContext *s)
612 int override;
614 override = s->override;
615 #ifdef TARGET_X86_64
616 if (s->aflag == 2) {
617 if (override >= 0) {
618 gen_op_movq_A0_seg(override);
619 gen_op_addq_A0_reg_sN(0, R_ESI);
620 } else {
621 gen_op_movq_A0_reg(R_ESI);
623 } else
624 #endif
625 if (s->aflag) {
626 /* 32 bit address */
627 if (s->addseg && override < 0)
628 override = R_DS;
629 if (override >= 0) {
630 gen_op_movl_A0_seg(override);
631 gen_op_addl_A0_reg_sN(0, R_ESI);
632 } else {
633 gen_op_movl_A0_reg(R_ESI);
635 } else {
636 /* 16 address, always override */
637 if (override < 0)
638 override = R_DS;
639 gen_op_movl_A0_reg(R_ESI);
640 gen_op_andl_A0_ffff();
641 gen_op_addl_A0_seg(override);
645 static inline void gen_string_movl_A0_EDI(DisasContext *s)
647 #ifdef TARGET_X86_64
648 if (s->aflag == 2) {
649 gen_op_movq_A0_reg(R_EDI);
650 } else
651 #endif
652 if (s->aflag) {
653 if (s->addseg) {
654 gen_op_movl_A0_seg(R_ES);
655 gen_op_addl_A0_reg_sN(0, R_EDI);
656 } else {
657 gen_op_movl_A0_reg(R_EDI);
659 } else {
660 gen_op_movl_A0_reg(R_EDI);
661 gen_op_andl_A0_ffff();
662 gen_op_addl_A0_seg(R_ES);
666 static inline void gen_op_movl_T0_Dshift(int ot)
668 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
669 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
672 static void gen_extu(int ot, TCGv reg)
674 switch(ot) {
675 case OT_BYTE:
676 tcg_gen_ext8u_tl(reg, reg);
677 break;
678 case OT_WORD:
679 tcg_gen_ext16u_tl(reg, reg);
680 break;
681 case OT_LONG:
682 tcg_gen_ext32u_tl(reg, reg);
683 break;
684 default:
685 break;
689 static void gen_exts(int ot, TCGv reg)
691 switch(ot) {
692 case OT_BYTE:
693 tcg_gen_ext8s_tl(reg, reg);
694 break;
695 case OT_WORD:
696 tcg_gen_ext16s_tl(reg, reg);
697 break;
698 case OT_LONG:
699 tcg_gen_ext32s_tl(reg, reg);
700 break;
701 default:
702 break;
706 static inline void gen_op_jnz_ecx(int size, int label1)
708 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
709 gen_extu(size + 1, cpu_tmp0);
710 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
713 static inline void gen_op_jz_ecx(int size, int label1)
715 tcg_gen_mov_tl(cpu_tmp0, cpu_regs[R_ECX]);
716 gen_extu(size + 1, cpu_tmp0);
717 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
720 static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
722 switch (ot) {
723 case 0: gen_helper_inb(v, n); break;
724 case 1: gen_helper_inw(v, n); break;
725 case 2: gen_helper_inl(v, n); break;
730 static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
732 switch (ot) {
733 case 0: gen_helper_outb(v, n); break;
734 case 1: gen_helper_outw(v, n); break;
735 case 2: gen_helper_outl(v, n); break;
740 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
741 uint32_t svm_flags)
743 int state_saved;
744 target_ulong next_eip;
746 state_saved = 0;
747 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
748 if (s->cc_op != CC_OP_DYNAMIC)
749 gen_op_set_cc_op(s->cc_op);
750 gen_jmp_im(cur_eip);
751 state_saved = 1;
752 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
753 switch (ot) {
754 case 0: gen_helper_check_iob(cpu_tmp2_i32); break;
755 case 1: gen_helper_check_iow(cpu_tmp2_i32); break;
756 case 2: gen_helper_check_iol(cpu_tmp2_i32); break;
759 if(s->flags & HF_SVMI_MASK) {
760 if (!state_saved) {
761 if (s->cc_op != CC_OP_DYNAMIC)
762 gen_op_set_cc_op(s->cc_op);
763 gen_jmp_im(cur_eip);
764 state_saved = 1;
766 svm_flags |= (1 << (4 + ot));
767 next_eip = s->pc - s->cs_base;
768 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
769 gen_helper_svm_check_io(cpu_tmp2_i32, tcg_const_i32(svm_flags),
770 tcg_const_i32(next_eip - cur_eip));
774 static inline void gen_movs(DisasContext *s, int ot)
776 gen_string_movl_A0_ESI(s);
777 gen_op_ld_T0_A0(ot + s->mem_index);
778 gen_string_movl_A0_EDI(s);
779 gen_op_st_T0_A0(ot + s->mem_index);
780 gen_op_movl_T0_Dshift(ot);
781 gen_op_add_reg_T0(s->aflag, R_ESI);
782 gen_op_add_reg_T0(s->aflag, R_EDI);
785 static inline void gen_update_cc_op(DisasContext *s)
787 if (s->cc_op != CC_OP_DYNAMIC) {
788 gen_op_set_cc_op(s->cc_op);
789 s->cc_op = CC_OP_DYNAMIC;
793 static void gen_op_update1_cc(void)
795 tcg_gen_discard_tl(cpu_cc_src);
796 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
799 static void gen_op_update2_cc(void)
801 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
802 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
805 static inline void gen_op_cmpl_T0_T1_cc(void)
807 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
808 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
811 static inline void gen_op_testl_T0_T1_cc(void)
813 tcg_gen_discard_tl(cpu_cc_src);
814 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
817 static void gen_op_update_neg_cc(void)
819 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
820 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
823 /* compute eflags.C to reg */
824 static void gen_compute_eflags_c(TCGv reg)
826 gen_helper_cc_compute_c(cpu_tmp2_i32, cpu_cc_op);
827 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
830 /* compute all eflags to cc_src */
831 static void gen_compute_eflags(TCGv reg)
833 gen_helper_cc_compute_all(cpu_tmp2_i32, cpu_cc_op);
834 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
837 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
839 if (s->cc_op != CC_OP_DYNAMIC)
840 gen_op_set_cc_op(s->cc_op);
841 switch(jcc_op) {
842 case JCC_O:
843 gen_compute_eflags(cpu_T[0]);
844 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
845 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
846 break;
847 case JCC_B:
848 gen_compute_eflags_c(cpu_T[0]);
849 break;
850 case JCC_Z:
851 gen_compute_eflags(cpu_T[0]);
852 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
853 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
854 break;
855 case JCC_BE:
856 gen_compute_eflags(cpu_tmp0);
857 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
858 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
859 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
860 break;
861 case JCC_S:
862 gen_compute_eflags(cpu_T[0]);
863 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
864 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
865 break;
866 case JCC_P:
867 gen_compute_eflags(cpu_T[0]);
868 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
869 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
870 break;
871 case JCC_L:
872 gen_compute_eflags(cpu_tmp0);
873 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
874 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
875 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
876 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
877 break;
878 default:
879 case JCC_LE:
880 gen_compute_eflags(cpu_tmp0);
881 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
882 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
883 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
884 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
885 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
886 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
887 break;
891 /* return true if setcc_slow is not needed (WARNING: must be kept in
892 sync with gen_jcc1) */
893 static int is_fast_jcc_case(DisasContext *s, int b)
895 int jcc_op;
896 jcc_op = (b >> 1) & 7;
897 switch(s->cc_op) {
898 /* we optimize the cmp/jcc case */
899 case CC_OP_SUBB:
900 case CC_OP_SUBW:
901 case CC_OP_SUBL:
902 case CC_OP_SUBQ:
903 if (jcc_op == JCC_O || jcc_op == JCC_P)
904 goto slow_jcc;
905 break;
907 /* some jumps are easy to compute */
908 case CC_OP_ADDB:
909 case CC_OP_ADDW:
910 case CC_OP_ADDL:
911 case CC_OP_ADDQ:
913 case CC_OP_LOGICB:
914 case CC_OP_LOGICW:
915 case CC_OP_LOGICL:
916 case CC_OP_LOGICQ:
918 case CC_OP_INCB:
919 case CC_OP_INCW:
920 case CC_OP_INCL:
921 case CC_OP_INCQ:
923 case CC_OP_DECB:
924 case CC_OP_DECW:
925 case CC_OP_DECL:
926 case CC_OP_DECQ:
928 case CC_OP_SHLB:
929 case CC_OP_SHLW:
930 case CC_OP_SHLL:
931 case CC_OP_SHLQ:
932 if (jcc_op != JCC_Z && jcc_op != JCC_S)
933 goto slow_jcc;
934 break;
935 default:
936 slow_jcc:
937 return 0;
939 return 1;
942 /* generate a conditional jump to label 'l1' according to jump opcode
943 value 'b'. In the fast case, T0 is guaranted not to be used. */
944 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
946 int inv, jcc_op, size, cond;
947 TCGv t0;
949 inv = b & 1;
950 jcc_op = (b >> 1) & 7;
952 switch(cc_op) {
953 /* we optimize the cmp/jcc case */
954 case CC_OP_SUBB:
955 case CC_OP_SUBW:
956 case CC_OP_SUBL:
957 case CC_OP_SUBQ:
959 size = cc_op - CC_OP_SUBB;
960 switch(jcc_op) {
961 case JCC_Z:
962 fast_jcc_z:
963 switch(size) {
964 case 0:
965 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
966 t0 = cpu_tmp0;
967 break;
968 case 1:
969 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
970 t0 = cpu_tmp0;
971 break;
972 #ifdef TARGET_X86_64
973 case 2:
974 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
975 t0 = cpu_tmp0;
976 break;
977 #endif
978 default:
979 t0 = cpu_cc_dst;
980 break;
982 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
983 break;
984 case JCC_S:
985 fast_jcc_s:
986 switch(size) {
987 case 0:
988 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
989 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
990 0, l1);
991 break;
992 case 1:
993 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
994 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
995 0, l1);
996 break;
997 #ifdef TARGET_X86_64
998 case 2:
999 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1000 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1001 0, l1);
1002 break;
1003 #endif
1004 default:
1005 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1006 0, l1);
1007 break;
1009 break;
1011 case JCC_B:
1012 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1013 goto fast_jcc_b;
1014 case JCC_BE:
1015 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1016 fast_jcc_b:
1017 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1018 switch(size) {
1019 case 0:
1020 t0 = cpu_tmp0;
1021 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1022 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1023 break;
1024 case 1:
1025 t0 = cpu_tmp0;
1026 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1027 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1028 break;
1029 #ifdef TARGET_X86_64
1030 case 2:
1031 t0 = cpu_tmp0;
1032 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1033 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1034 break;
1035 #endif
1036 default:
1037 t0 = cpu_cc_src;
1038 break;
1040 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1041 break;
1043 case JCC_L:
1044 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1045 goto fast_jcc_l;
1046 case JCC_LE:
1047 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1048 fast_jcc_l:
1049 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1050 switch(size) {
1051 case 0:
1052 t0 = cpu_tmp0;
1053 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1054 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1055 break;
1056 case 1:
1057 t0 = cpu_tmp0;
1058 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1059 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1060 break;
1061 #ifdef TARGET_X86_64
1062 case 2:
1063 t0 = cpu_tmp0;
1064 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1065 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1066 break;
1067 #endif
1068 default:
1069 t0 = cpu_cc_src;
1070 break;
1072 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1073 break;
1075 default:
1076 goto slow_jcc;
1078 break;
1080 /* some jumps are easy to compute */
1081 case CC_OP_ADDB:
1082 case CC_OP_ADDW:
1083 case CC_OP_ADDL:
1084 case CC_OP_ADDQ:
1086 case CC_OP_ADCB:
1087 case CC_OP_ADCW:
1088 case CC_OP_ADCL:
1089 case CC_OP_ADCQ:
1091 case CC_OP_SBBB:
1092 case CC_OP_SBBW:
1093 case CC_OP_SBBL:
1094 case CC_OP_SBBQ:
1096 case CC_OP_LOGICB:
1097 case CC_OP_LOGICW:
1098 case CC_OP_LOGICL:
1099 case CC_OP_LOGICQ:
1101 case CC_OP_INCB:
1102 case CC_OP_INCW:
1103 case CC_OP_INCL:
1104 case CC_OP_INCQ:
1106 case CC_OP_DECB:
1107 case CC_OP_DECW:
1108 case CC_OP_DECL:
1109 case CC_OP_DECQ:
1111 case CC_OP_SHLB:
1112 case CC_OP_SHLW:
1113 case CC_OP_SHLL:
1114 case CC_OP_SHLQ:
1116 case CC_OP_SARB:
1117 case CC_OP_SARW:
1118 case CC_OP_SARL:
1119 case CC_OP_SARQ:
1120 switch(jcc_op) {
1121 case JCC_Z:
1122 size = (cc_op - CC_OP_ADDB) & 3;
1123 goto fast_jcc_z;
1124 case JCC_S:
1125 size = (cc_op - CC_OP_ADDB) & 3;
1126 goto fast_jcc_s;
1127 default:
1128 goto slow_jcc;
1130 break;
1131 default:
1132 slow_jcc:
1133 gen_setcc_slow_T0(s, jcc_op);
1134 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1135 cpu_T[0], 0, l1);
1136 break;
1140 /* XXX: does not work with gdbstub "ice" single step - not a
1141 serious problem */
1142 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1144 int l1, l2;
1146 l1 = gen_new_label();
1147 l2 = gen_new_label();
1148 gen_op_jnz_ecx(s->aflag, l1);
1149 gen_set_label(l2);
1150 gen_jmp_tb(s, next_eip, 1);
1151 gen_set_label(l1);
1152 return l2;
1155 static inline void gen_stos(DisasContext *s, int ot)
1157 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1158 gen_string_movl_A0_EDI(s);
1159 gen_op_st_T0_A0(ot + s->mem_index);
1160 gen_op_movl_T0_Dshift(ot);
1161 gen_op_add_reg_T0(s->aflag, R_EDI);
1164 static inline void gen_lods(DisasContext *s, int ot)
1166 gen_string_movl_A0_ESI(s);
1167 gen_op_ld_T0_A0(ot + s->mem_index);
1168 gen_op_mov_reg_T0(ot, R_EAX);
1169 gen_op_movl_T0_Dshift(ot);
1170 gen_op_add_reg_T0(s->aflag, R_ESI);
1173 static inline void gen_scas(DisasContext *s, int ot)
1175 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1176 gen_string_movl_A0_EDI(s);
1177 gen_op_ld_T1_A0(ot + s->mem_index);
1178 gen_op_cmpl_T0_T1_cc();
1179 gen_op_movl_T0_Dshift(ot);
1180 gen_op_add_reg_T0(s->aflag, R_EDI);
1183 static inline void gen_cmps(DisasContext *s, int ot)
1185 gen_string_movl_A0_ESI(s);
1186 gen_op_ld_T0_A0(ot + s->mem_index);
1187 gen_string_movl_A0_EDI(s);
1188 gen_op_ld_T1_A0(ot + s->mem_index);
1189 gen_op_cmpl_T0_T1_cc();
1190 gen_op_movl_T0_Dshift(ot);
1191 gen_op_add_reg_T0(s->aflag, R_ESI);
1192 gen_op_add_reg_T0(s->aflag, R_EDI);
1195 static inline void gen_ins(DisasContext *s, int ot)
1197 if (use_icount)
1198 gen_io_start();
1199 gen_string_movl_A0_EDI(s);
1200 /* Note: we must do this dummy write first to be restartable in
1201 case of page fault. */
1202 gen_op_movl_T0_0();
1203 gen_op_st_T0_A0(ot + s->mem_index);
1204 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1205 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1206 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1207 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
1208 gen_op_st_T0_A0(ot + s->mem_index);
1209 gen_op_movl_T0_Dshift(ot);
1210 gen_op_add_reg_T0(s->aflag, R_EDI);
1211 if (use_icount)
1212 gen_io_end();
1215 static inline void gen_outs(DisasContext *s, int ot)
1217 if (use_icount)
1218 gen_io_start();
1219 gen_string_movl_A0_ESI(s);
1220 gen_op_ld_T0_A0(ot + s->mem_index);
1222 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1223 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1224 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1225 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1226 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
1228 gen_op_movl_T0_Dshift(ot);
1229 gen_op_add_reg_T0(s->aflag, R_ESI);
1230 if (use_icount)
1231 gen_io_end();
1234 /* same method as Valgrind : we generate jumps to current or next
1235 instruction */
1236 #define GEN_REPZ(op) \
1237 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1238 target_ulong cur_eip, target_ulong next_eip) \
1240 int l2;\
1241 gen_update_cc_op(s); \
1242 l2 = gen_jz_ecx_string(s, next_eip); \
1243 gen_ ## op(s, ot); \
1244 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1245 /* a loop would cause two single step exceptions if ECX = 1 \
1246 before rep string_insn */ \
1247 if (!s->jmp_opt) \
1248 gen_op_jz_ecx(s->aflag, l2); \
1249 gen_jmp(s, cur_eip); \
1252 #define GEN_REPZ2(op) \
1253 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1254 target_ulong cur_eip, \
1255 target_ulong next_eip, \
1256 int nz) \
1258 int l2;\
1259 gen_update_cc_op(s); \
1260 l2 = gen_jz_ecx_string(s, next_eip); \
1261 gen_ ## op(s, ot); \
1262 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1263 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1264 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1265 if (!s->jmp_opt) \
1266 gen_op_jz_ecx(s->aflag, l2); \
1267 gen_jmp(s, cur_eip); \
1270 GEN_REPZ(movs)
1271 GEN_REPZ(stos)
1272 GEN_REPZ(lods)
1273 GEN_REPZ(ins)
1274 GEN_REPZ(outs)
1275 GEN_REPZ2(scas)
1276 GEN_REPZ2(cmps)
1278 static void gen_helper_fp_arith_ST0_FT0(int op)
1280 switch (op) {
1281 case 0: gen_helper_fadd_ST0_FT0(); break;
1282 case 1: gen_helper_fmul_ST0_FT0(); break;
1283 case 2: gen_helper_fcom_ST0_FT0(); break;
1284 case 3: gen_helper_fcom_ST0_FT0(); break;
1285 case 4: gen_helper_fsub_ST0_FT0(); break;
1286 case 5: gen_helper_fsubr_ST0_FT0(); break;
1287 case 6: gen_helper_fdiv_ST0_FT0(); break;
1288 case 7: gen_helper_fdivr_ST0_FT0(); break;
1292 /* NOTE the exception in "r" op ordering */
1293 static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
1295 TCGv_i32 tmp = tcg_const_i32(opreg);
1296 switch (op) {
1297 case 0: gen_helper_fadd_STN_ST0(tmp); break;
1298 case 1: gen_helper_fmul_STN_ST0(tmp); break;
1299 case 4: gen_helper_fsubr_STN_ST0(tmp); break;
1300 case 5: gen_helper_fsub_STN_ST0(tmp); break;
1301 case 6: gen_helper_fdivr_STN_ST0(tmp); break;
1302 case 7: gen_helper_fdiv_STN_ST0(tmp); break;
1306 /* if d == OR_TMP0, it means memory operand (address in A0) */
1307 static void gen_op(DisasContext *s1, int op, int ot, int d)
1309 if (d != OR_TMP0) {
1310 gen_op_mov_TN_reg(ot, 0, d);
1311 } else {
1312 gen_op_ld_T0_A0(ot + s1->mem_index);
1314 switch(op) {
1315 case OP_ADCL:
1316 if (s1->cc_op != CC_OP_DYNAMIC)
1317 gen_op_set_cc_op(s1->cc_op);
1318 gen_compute_eflags_c(cpu_tmp4);
1319 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1320 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1321 if (d != OR_TMP0)
1322 gen_op_mov_reg_T0(ot, d);
1323 else
1324 gen_op_st_T0_A0(ot + s1->mem_index);
1325 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1326 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1327 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1328 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1329 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1330 s1->cc_op = CC_OP_DYNAMIC;
1331 break;
1332 case OP_SBBL:
1333 if (s1->cc_op != CC_OP_DYNAMIC)
1334 gen_op_set_cc_op(s1->cc_op);
1335 gen_compute_eflags_c(cpu_tmp4);
1336 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1337 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1338 if (d != OR_TMP0)
1339 gen_op_mov_reg_T0(ot, d);
1340 else
1341 gen_op_st_T0_A0(ot + s1->mem_index);
1342 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1343 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1344 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1345 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1346 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1347 s1->cc_op = CC_OP_DYNAMIC;
1348 break;
1349 case OP_ADDL:
1350 gen_op_addl_T0_T1();
1351 if (d != OR_TMP0)
1352 gen_op_mov_reg_T0(ot, d);
1353 else
1354 gen_op_st_T0_A0(ot + s1->mem_index);
1355 gen_op_update2_cc();
1356 s1->cc_op = CC_OP_ADDB + ot;
1357 break;
1358 case OP_SUBL:
1359 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360 if (d != OR_TMP0)
1361 gen_op_mov_reg_T0(ot, d);
1362 else
1363 gen_op_st_T0_A0(ot + s1->mem_index);
1364 gen_op_update2_cc();
1365 s1->cc_op = CC_OP_SUBB + ot;
1366 break;
1367 default:
1368 case OP_ANDL:
1369 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1370 if (d != OR_TMP0)
1371 gen_op_mov_reg_T0(ot, d);
1372 else
1373 gen_op_st_T0_A0(ot + s1->mem_index);
1374 gen_op_update1_cc();
1375 s1->cc_op = CC_OP_LOGICB + ot;
1376 break;
1377 case OP_ORL:
1378 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1379 if (d != OR_TMP0)
1380 gen_op_mov_reg_T0(ot, d);
1381 else
1382 gen_op_st_T0_A0(ot + s1->mem_index);
1383 gen_op_update1_cc();
1384 s1->cc_op = CC_OP_LOGICB + ot;
1385 break;
1386 case OP_XORL:
1387 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1388 if (d != OR_TMP0)
1389 gen_op_mov_reg_T0(ot, d);
1390 else
1391 gen_op_st_T0_A0(ot + s1->mem_index);
1392 gen_op_update1_cc();
1393 s1->cc_op = CC_OP_LOGICB + ot;
1394 break;
1395 case OP_CMPL:
1396 gen_op_cmpl_T0_T1_cc();
1397 s1->cc_op = CC_OP_SUBB + ot;
1398 break;
1402 /* if d == OR_TMP0, it means memory operand (address in A0) */
1403 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1405 if (d != OR_TMP0)
1406 gen_op_mov_TN_reg(ot, 0, d);
1407 else
1408 gen_op_ld_T0_A0(ot + s1->mem_index);
1409 if (s1->cc_op != CC_OP_DYNAMIC)
1410 gen_op_set_cc_op(s1->cc_op);
1411 if (c > 0) {
1412 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1413 s1->cc_op = CC_OP_INCB + ot;
1414 } else {
1415 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1416 s1->cc_op = CC_OP_DECB + ot;
1418 if (d != OR_TMP0)
1419 gen_op_mov_reg_T0(ot, d);
1420 else
1421 gen_op_st_T0_A0(ot + s1->mem_index);
1422 gen_compute_eflags_c(cpu_cc_src);
1423 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1426 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1427 int is_right, int is_arith)
1429 target_ulong mask;
1430 int shift_label;
1431 TCGv t0, t1;
1433 if (ot == OT_QUAD)
1434 mask = 0x3f;
1435 else
1436 mask = 0x1f;
1438 /* load */
1439 if (op1 == OR_TMP0)
1440 gen_op_ld_T0_A0(ot + s->mem_index);
1441 else
1442 gen_op_mov_TN_reg(ot, 0, op1);
1444 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1446 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1448 if (is_right) {
1449 if (is_arith) {
1450 gen_exts(ot, cpu_T[0]);
1451 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1452 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1453 } else {
1454 gen_extu(ot, cpu_T[0]);
1455 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1456 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1458 } else {
1459 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1460 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1463 /* store */
1464 if (op1 == OR_TMP0)
1465 gen_op_st_T0_A0(ot + s->mem_index);
1466 else
1467 gen_op_mov_reg_T0(ot, op1);
1469 /* update eflags if non zero shift */
1470 if (s->cc_op != CC_OP_DYNAMIC)
1471 gen_op_set_cc_op(s->cc_op);
1473 /* XXX: inefficient */
1474 t0 = tcg_temp_local_new();
1475 t1 = tcg_temp_local_new();
1477 tcg_gen_mov_tl(t0, cpu_T[0]);
1478 tcg_gen_mov_tl(t1, cpu_T3);
1480 shift_label = gen_new_label();
1481 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1483 tcg_gen_mov_tl(cpu_cc_src, t1);
1484 tcg_gen_mov_tl(cpu_cc_dst, t0);
1485 if (is_right)
1486 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1487 else
1488 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1490 gen_set_label(shift_label);
1491 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1493 tcg_temp_free(t0);
1494 tcg_temp_free(t1);
1497 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1498 int is_right, int is_arith)
1500 int mask;
1502 if (ot == OT_QUAD)
1503 mask = 0x3f;
1504 else
1505 mask = 0x1f;
1507 /* load */
1508 if (op1 == OR_TMP0)
1509 gen_op_ld_T0_A0(ot + s->mem_index);
1510 else
1511 gen_op_mov_TN_reg(ot, 0, op1);
1513 op2 &= mask;
1514 if (op2 != 0) {
1515 if (is_right) {
1516 if (is_arith) {
1517 gen_exts(ot, cpu_T[0]);
1518 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1519 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1520 } else {
1521 gen_extu(ot, cpu_T[0]);
1522 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1523 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1525 } else {
1526 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1527 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1531 /* store */
1532 if (op1 == OR_TMP0)
1533 gen_op_st_T0_A0(ot + s->mem_index);
1534 else
1535 gen_op_mov_reg_T0(ot, op1);
1537 /* update eflags if non zero shift */
1538 if (op2 != 0) {
1539 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1540 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1541 if (is_right)
1542 s->cc_op = CC_OP_SARB + ot;
1543 else
1544 s->cc_op = CC_OP_SHLB + ot;
1548 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1550 if (arg2 >= 0)
1551 tcg_gen_shli_tl(ret, arg1, arg2);
1552 else
1553 tcg_gen_shri_tl(ret, arg1, -arg2);
1556 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1557 int is_right)
1559 target_ulong mask;
1560 int label1, label2, data_bits;
1561 TCGv t0, t1, t2, a0;
1563 /* XXX: inefficient, but we must use local temps */
1564 t0 = tcg_temp_local_new();
1565 t1 = tcg_temp_local_new();
1566 t2 = tcg_temp_local_new();
1567 a0 = tcg_temp_local_new();
1569 if (ot == OT_QUAD)
1570 mask = 0x3f;
1571 else
1572 mask = 0x1f;
1574 /* load */
1575 if (op1 == OR_TMP0) {
1576 tcg_gen_mov_tl(a0, cpu_A0);
1577 gen_op_ld_v(ot + s->mem_index, t0, a0);
1578 } else {
1579 gen_op_mov_v_reg(ot, t0, op1);
1582 tcg_gen_mov_tl(t1, cpu_T[1]);
1584 tcg_gen_andi_tl(t1, t1, mask);
1586 /* Must test zero case to avoid using undefined behaviour in TCG
1587 shifts. */
1588 label1 = gen_new_label();
1589 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1591 if (ot <= OT_WORD)
1592 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1593 else
1594 tcg_gen_mov_tl(cpu_tmp0, t1);
1596 gen_extu(ot, t0);
1597 tcg_gen_mov_tl(t2, t0);
1599 data_bits = 8 << ot;
1600 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1601 fix TCG definition) */
1602 if (is_right) {
1603 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1604 tcg_gen_subfi_tl(cpu_tmp0, data_bits, cpu_tmp0);
1605 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1606 } else {
1607 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1608 tcg_gen_subfi_tl(cpu_tmp0, data_bits, cpu_tmp0);
1609 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1611 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1613 gen_set_label(label1);
1614 /* store */
1615 if (op1 == OR_TMP0) {
1616 gen_op_st_v(ot + s->mem_index, t0, a0);
1617 } else {
1618 gen_op_mov_reg_v(ot, op1, t0);
1621 /* update eflags */
1622 if (s->cc_op != CC_OP_DYNAMIC)
1623 gen_op_set_cc_op(s->cc_op);
1625 label2 = gen_new_label();
1626 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1628 gen_compute_eflags(cpu_cc_src);
1629 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1630 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1631 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1632 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1633 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1634 if (is_right) {
1635 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1637 tcg_gen_andi_tl(t0, t0, CC_C);
1638 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1640 tcg_gen_discard_tl(cpu_cc_dst);
1641 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1643 gen_set_label(label2);
1644 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1646 tcg_temp_free(t0);
1647 tcg_temp_free(t1);
1648 tcg_temp_free(t2);
1649 tcg_temp_free(a0);
1652 static void gen_rot_rm_im(DisasContext *s, int ot, int op1, int op2,
1653 int is_right)
1655 int mask;
1656 int data_bits;
1657 TCGv t0, t1, a0;
1659 /* XXX: inefficient, but we must use local temps */
1660 t0 = tcg_temp_local_new();
1661 t1 = tcg_temp_local_new();
1662 a0 = tcg_temp_local_new();
1664 if (ot == OT_QUAD)
1665 mask = 0x3f;
1666 else
1667 mask = 0x1f;
1669 /* load */
1670 if (op1 == OR_TMP0) {
1671 tcg_gen_mov_tl(a0, cpu_A0);
1672 gen_op_ld_v(ot + s->mem_index, t0, a0);
1673 } else {
1674 gen_op_mov_v_reg(ot, t0, op1);
1677 gen_extu(ot, t0);
1678 tcg_gen_mov_tl(t1, t0);
1680 op2 &= mask;
1681 data_bits = 8 << ot;
1682 if (op2 != 0) {
1683 int shift = op2 & ((1 << (3 + ot)) - 1);
1684 if (is_right) {
1685 tcg_gen_shri_tl(cpu_tmp4, t0, shift);
1686 tcg_gen_shli_tl(t0, t0, data_bits - shift);
1688 else {
1689 tcg_gen_shli_tl(cpu_tmp4, t0, shift);
1690 tcg_gen_shri_tl(t0, t0, data_bits - shift);
1692 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1695 /* store */
1696 if (op1 == OR_TMP0) {
1697 gen_op_st_v(ot + s->mem_index, t0, a0);
1698 } else {
1699 gen_op_mov_reg_v(ot, op1, t0);
1702 if (op2 != 0) {
1703 /* update eflags */
1704 if (s->cc_op != CC_OP_DYNAMIC)
1705 gen_op_set_cc_op(s->cc_op);
1707 gen_compute_eflags(cpu_cc_src);
1708 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1709 tcg_gen_xor_tl(cpu_tmp0, t1, t0);
1710 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1711 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1712 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1713 if (is_right) {
1714 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1716 tcg_gen_andi_tl(t0, t0, CC_C);
1717 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1719 tcg_gen_discard_tl(cpu_cc_dst);
1720 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1721 s->cc_op = CC_OP_EFLAGS;
1724 tcg_temp_free(t0);
1725 tcg_temp_free(t1);
1726 tcg_temp_free(a0);
1729 /* XXX: add faster immediate = 1 case */
1730 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1731 int is_right)
1733 int label1;
1735 if (s->cc_op != CC_OP_DYNAMIC)
1736 gen_op_set_cc_op(s->cc_op);
1738 /* load */
1739 if (op1 == OR_TMP0)
1740 gen_op_ld_T0_A0(ot + s->mem_index);
1741 else
1742 gen_op_mov_TN_reg(ot, 0, op1);
1744 if (is_right) {
1745 switch (ot) {
1746 case 0: gen_helper_rcrb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1747 case 1: gen_helper_rcrw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1748 case 2: gen_helper_rcrl(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1749 #ifdef TARGET_X86_64
1750 case 3: gen_helper_rcrq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1751 #endif
1753 } else {
1754 switch (ot) {
1755 case 0: gen_helper_rclb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1756 case 1: gen_helper_rclw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1757 case 2: gen_helper_rcll(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1758 #ifdef TARGET_X86_64
1759 case 3: gen_helper_rclq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1760 #endif
1763 /* store */
1764 if (op1 == OR_TMP0)
1765 gen_op_st_T0_A0(ot + s->mem_index);
1766 else
1767 gen_op_mov_reg_T0(ot, op1);
1769 /* update eflags */
1770 label1 = gen_new_label();
1771 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1773 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1774 tcg_gen_discard_tl(cpu_cc_dst);
1775 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1777 gen_set_label(label1);
1778 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1781 /* XXX: add faster immediate case */
1782 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1783 int is_right)
1785 int label1, label2, data_bits;
1786 target_ulong mask;
1787 TCGv t0, t1, t2, a0;
1789 t0 = tcg_temp_local_new();
1790 t1 = tcg_temp_local_new();
1791 t2 = tcg_temp_local_new();
1792 a0 = tcg_temp_local_new();
1794 if (ot == OT_QUAD)
1795 mask = 0x3f;
1796 else
1797 mask = 0x1f;
1799 /* load */
1800 if (op1 == OR_TMP0) {
1801 tcg_gen_mov_tl(a0, cpu_A0);
1802 gen_op_ld_v(ot + s->mem_index, t0, a0);
1803 } else {
1804 gen_op_mov_v_reg(ot, t0, op1);
1807 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1809 tcg_gen_mov_tl(t1, cpu_T[1]);
1810 tcg_gen_mov_tl(t2, cpu_T3);
1812 /* Must test zero case to avoid using undefined behaviour in TCG
1813 shifts. */
1814 label1 = gen_new_label();
1815 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1817 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1818 if (ot == OT_WORD) {
1819 /* Note: we implement the Intel behaviour for shift count > 16 */
1820 if (is_right) {
1821 tcg_gen_andi_tl(t0, t0, 0xffff);
1822 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1823 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1824 tcg_gen_ext32u_tl(t0, t0);
1826 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1828 /* only needed if count > 16, but a test would complicate */
1829 tcg_gen_subfi_tl(cpu_tmp5, 32, t2);
1830 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1832 tcg_gen_shr_tl(t0, t0, t2);
1834 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1835 } else {
1836 /* XXX: not optimal */
1837 tcg_gen_andi_tl(t0, t0, 0xffff);
1838 tcg_gen_shli_tl(t1, t1, 16);
1839 tcg_gen_or_tl(t1, t1, t0);
1840 tcg_gen_ext32u_tl(t1, t1);
1842 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1843 tcg_gen_subfi_tl(cpu_tmp0, 32, cpu_tmp5);
1844 tcg_gen_shr_tl(cpu_tmp5, t1, cpu_tmp0);
1845 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp5);
1847 tcg_gen_shl_tl(t0, t0, t2);
1848 tcg_gen_subfi_tl(cpu_tmp5, 32, t2);
1849 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1850 tcg_gen_or_tl(t0, t0, t1);
1852 } else {
1853 data_bits = 8 << ot;
1854 if (is_right) {
1855 if (ot == OT_LONG)
1856 tcg_gen_ext32u_tl(t0, t0);
1858 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1860 tcg_gen_shr_tl(t0, t0, t2);
1861 tcg_gen_subfi_tl(cpu_tmp5, data_bits, t2);
1862 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1863 tcg_gen_or_tl(t0, t0, t1);
1865 } else {
1866 if (ot == OT_LONG)
1867 tcg_gen_ext32u_tl(t1, t1);
1869 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1871 tcg_gen_shl_tl(t0, t0, t2);
1872 tcg_gen_subfi_tl(cpu_tmp5, data_bits, t2);
1873 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1874 tcg_gen_or_tl(t0, t0, t1);
1877 tcg_gen_mov_tl(t1, cpu_tmp4);
1879 gen_set_label(label1);
1880 /* store */
1881 if (op1 == OR_TMP0) {
1882 gen_op_st_v(ot + s->mem_index, t0, a0);
1883 } else {
1884 gen_op_mov_reg_v(ot, op1, t0);
1887 /* update eflags */
1888 if (s->cc_op != CC_OP_DYNAMIC)
1889 gen_op_set_cc_op(s->cc_op);
1891 label2 = gen_new_label();
1892 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1894 tcg_gen_mov_tl(cpu_cc_src, t1);
1895 tcg_gen_mov_tl(cpu_cc_dst, t0);
1896 if (is_right) {
1897 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1898 } else {
1899 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1901 gen_set_label(label2);
1902 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1904 tcg_temp_free(t0);
1905 tcg_temp_free(t1);
1906 tcg_temp_free(t2);
1907 tcg_temp_free(a0);
1910 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1912 if (s != OR_TMP1)
1913 gen_op_mov_TN_reg(ot, 1, s);
1914 switch(op) {
1915 case OP_ROL:
1916 gen_rot_rm_T1(s1, ot, d, 0);
1917 break;
1918 case OP_ROR:
1919 gen_rot_rm_T1(s1, ot, d, 1);
1920 break;
1921 case OP_SHL:
1922 case OP_SHL1:
1923 gen_shift_rm_T1(s1, ot, d, 0, 0);
1924 break;
1925 case OP_SHR:
1926 gen_shift_rm_T1(s1, ot, d, 1, 0);
1927 break;
1928 case OP_SAR:
1929 gen_shift_rm_T1(s1, ot, d, 1, 1);
1930 break;
1931 case OP_RCL:
1932 gen_rotc_rm_T1(s1, ot, d, 0);
1933 break;
1934 case OP_RCR:
1935 gen_rotc_rm_T1(s1, ot, d, 1);
1936 break;
1940 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1942 switch(op) {
1943 case OP_ROL:
1944 gen_rot_rm_im(s1, ot, d, c, 0);
1945 break;
1946 case OP_ROR:
1947 gen_rot_rm_im(s1, ot, d, c, 1);
1948 break;
1949 case OP_SHL:
1950 case OP_SHL1:
1951 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1952 break;
1953 case OP_SHR:
1954 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1955 break;
1956 case OP_SAR:
1957 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1958 break;
1959 default:
1960 /* currently not optimized */
1961 gen_op_movl_T1_im(c);
1962 gen_shift(s1, op, ot, d, OR_TMP1);
1963 break;
1967 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1969 target_long disp;
1970 int havesib;
1971 int base;
1972 int index;
1973 int scale;
1974 int opreg;
1975 int mod, rm, code, override, must_add_seg;
1977 override = s->override;
1978 must_add_seg = s->addseg;
1979 if (override >= 0)
1980 must_add_seg = 1;
1981 mod = (modrm >> 6) & 3;
1982 rm = modrm & 7;
1984 if (s->aflag) {
1986 havesib = 0;
1987 base = rm;
1988 index = 0;
1989 scale = 0;
1991 if (base == 4) {
1992 havesib = 1;
1993 code = ldub_code(s->pc++);
1994 scale = (code >> 6) & 3;
1995 index = ((code >> 3) & 7) | REX_X(s);
1996 base = (code & 7);
1998 base |= REX_B(s);
2000 switch (mod) {
2001 case 0:
2002 if ((base & 7) == 5) {
2003 base = -1;
2004 disp = (int32_t)ldl_code(s->pc);
2005 s->pc += 4;
2006 if (CODE64(s) && !havesib) {
2007 disp += s->pc + s->rip_offset;
2009 } else {
2010 disp = 0;
2012 break;
2013 case 1:
2014 disp = (int8_t)ldub_code(s->pc++);
2015 break;
2016 default:
2017 case 2:
2018 disp = ldl_code(s->pc);
2019 s->pc += 4;
2020 break;
2023 if (base >= 0) {
2024 /* for correct popl handling with esp */
2025 if (base == 4 && s->popl_esp_hack)
2026 disp += s->popl_esp_hack;
2027 #ifdef TARGET_X86_64
2028 if (s->aflag == 2) {
2029 gen_op_movq_A0_reg(base);
2030 if (disp != 0) {
2031 gen_op_addq_A0_im(disp);
2033 } else
2034 #endif
2036 gen_op_movl_A0_reg(base);
2037 if (disp != 0)
2038 gen_op_addl_A0_im(disp);
2040 } else {
2041 #ifdef TARGET_X86_64
2042 if (s->aflag == 2) {
2043 gen_op_movq_A0_im(disp);
2044 } else
2045 #endif
2047 gen_op_movl_A0_im(disp);
2050 /* index == 4 means no index */
2051 if (havesib && (index != 4)) {
2052 #ifdef TARGET_X86_64
2053 if (s->aflag == 2) {
2054 gen_op_addq_A0_reg_sN(scale, index);
2055 } else
2056 #endif
2058 gen_op_addl_A0_reg_sN(scale, index);
2061 if (must_add_seg) {
2062 if (override < 0) {
2063 if (base == R_EBP || base == R_ESP)
2064 override = R_SS;
2065 else
2066 override = R_DS;
2068 #ifdef TARGET_X86_64
2069 if (s->aflag == 2) {
2070 gen_op_addq_A0_seg(override);
2071 } else
2072 #endif
2074 gen_op_addl_A0_seg(override);
2077 } else {
2078 switch (mod) {
2079 case 0:
2080 if (rm == 6) {
2081 disp = lduw_code(s->pc);
2082 s->pc += 2;
2083 gen_op_movl_A0_im(disp);
2084 rm = 0; /* avoid SS override */
2085 goto no_rm;
2086 } else {
2087 disp = 0;
2089 break;
2090 case 1:
2091 disp = (int8_t)ldub_code(s->pc++);
2092 break;
2093 default:
2094 case 2:
2095 disp = lduw_code(s->pc);
2096 s->pc += 2;
2097 break;
2099 switch(rm) {
2100 case 0:
2101 gen_op_movl_A0_reg(R_EBX);
2102 gen_op_addl_A0_reg_sN(0, R_ESI);
2103 break;
2104 case 1:
2105 gen_op_movl_A0_reg(R_EBX);
2106 gen_op_addl_A0_reg_sN(0, R_EDI);
2107 break;
2108 case 2:
2109 gen_op_movl_A0_reg(R_EBP);
2110 gen_op_addl_A0_reg_sN(0, R_ESI);
2111 break;
2112 case 3:
2113 gen_op_movl_A0_reg(R_EBP);
2114 gen_op_addl_A0_reg_sN(0, R_EDI);
2115 break;
2116 case 4:
2117 gen_op_movl_A0_reg(R_ESI);
2118 break;
2119 case 5:
2120 gen_op_movl_A0_reg(R_EDI);
2121 break;
2122 case 6:
2123 gen_op_movl_A0_reg(R_EBP);
2124 break;
2125 default:
2126 case 7:
2127 gen_op_movl_A0_reg(R_EBX);
2128 break;
2130 if (disp != 0)
2131 gen_op_addl_A0_im(disp);
2132 gen_op_andl_A0_ffff();
2133 no_rm:
2134 if (must_add_seg) {
2135 if (override < 0) {
2136 if (rm == 2 || rm == 3 || rm == 6)
2137 override = R_SS;
2138 else
2139 override = R_DS;
2141 gen_op_addl_A0_seg(override);
2145 opreg = OR_A0;
2146 disp = 0;
2147 *reg_ptr = opreg;
2148 *offset_ptr = disp;
2151 static void gen_nop_modrm(DisasContext *s, int modrm)
2153 int mod, rm, base, code;
2155 mod = (modrm >> 6) & 3;
2156 if (mod == 3)
2157 return;
2158 rm = modrm & 7;
2160 if (s->aflag) {
2162 base = rm;
2164 if (base == 4) {
2165 code = ldub_code(s->pc++);
2166 base = (code & 7);
2169 switch (mod) {
2170 case 0:
2171 if (base == 5) {
2172 s->pc += 4;
2174 break;
2175 case 1:
2176 s->pc++;
2177 break;
2178 default:
2179 case 2:
2180 s->pc += 4;
2181 break;
2183 } else {
2184 switch (mod) {
2185 case 0:
2186 if (rm == 6) {
2187 s->pc += 2;
2189 break;
2190 case 1:
2191 s->pc++;
2192 break;
2193 default:
2194 case 2:
2195 s->pc += 2;
2196 break;
2201 /* used for LEA and MOV AX, mem */
2202 static void gen_add_A0_ds_seg(DisasContext *s)
2204 int override, must_add_seg;
2205 must_add_seg = s->addseg;
2206 override = R_DS;
2207 if (s->override >= 0) {
2208 override = s->override;
2209 must_add_seg = 1;
2211 if (must_add_seg) {
2212 #ifdef TARGET_X86_64
2213 if (CODE64(s)) {
2214 gen_op_addq_A0_seg(override);
2215 } else
2216 #endif
2218 gen_op_addl_A0_seg(override);
2223 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2224 OR_TMP0 */
2225 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2227 int mod, rm, opreg, disp;
2229 mod = (modrm >> 6) & 3;
2230 rm = (modrm & 7) | REX_B(s);
2231 if (mod == 3) {
2232 if (is_store) {
2233 if (reg != OR_TMP0)
2234 gen_op_mov_TN_reg(ot, 0, reg);
2235 gen_op_mov_reg_T0(ot, rm);
2236 } else {
2237 gen_op_mov_TN_reg(ot, 0, rm);
2238 if (reg != OR_TMP0)
2239 gen_op_mov_reg_T0(ot, reg);
2241 } else {
2242 gen_lea_modrm(s, modrm, &opreg, &disp);
2243 if (is_store) {
2244 if (reg != OR_TMP0)
2245 gen_op_mov_TN_reg(ot, 0, reg);
2246 gen_op_st_T0_A0(ot + s->mem_index);
2247 } else {
2248 gen_op_ld_T0_A0(ot + s->mem_index);
2249 if (reg != OR_TMP0)
2250 gen_op_mov_reg_T0(ot, reg);
2255 static inline uint32_t insn_get(DisasContext *s, int ot)
2257 uint32_t ret;
2259 switch(ot) {
2260 case OT_BYTE:
2261 ret = ldub_code(s->pc);
2262 s->pc++;
2263 break;
2264 case OT_WORD:
2265 ret = lduw_code(s->pc);
2266 s->pc += 2;
2267 break;
2268 default:
2269 case OT_LONG:
2270 ret = ldl_code(s->pc);
2271 s->pc += 4;
2272 break;
2274 return ret;
2277 static inline int insn_const_size(unsigned int ot)
2279 if (ot <= OT_LONG)
2280 return 1 << ot;
2281 else
2282 return 4;
2285 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2287 TranslationBlock *tb;
2288 target_ulong pc;
2290 pc = s->cs_base + eip;
2291 tb = s->tb;
2292 /* NOTE: we handle the case where the TB spans two pages here */
2293 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2294 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2295 /* jump to same page: we can use a direct jump */
2296 tcg_gen_goto_tb(tb_num);
2297 gen_jmp_im(eip);
2298 tcg_gen_exit_tb((long)tb + tb_num);
2299 } else {
2300 /* jump to another page: currently not optimized */
2301 gen_jmp_im(eip);
2302 gen_eob(s);
2306 static inline void gen_jcc(DisasContext *s, int b,
2307 target_ulong val, target_ulong next_eip)
2309 int l1, l2, cc_op;
2311 cc_op = s->cc_op;
2312 if (s->cc_op != CC_OP_DYNAMIC) {
2313 gen_op_set_cc_op(s->cc_op);
2314 s->cc_op = CC_OP_DYNAMIC;
2316 if (s->jmp_opt) {
2317 l1 = gen_new_label();
2318 gen_jcc1(s, cc_op, b, l1);
2320 gen_goto_tb(s, 0, next_eip);
2322 gen_set_label(l1);
2323 gen_goto_tb(s, 1, val);
2324 s->is_jmp = 3;
2325 } else {
2327 l1 = gen_new_label();
2328 l2 = gen_new_label();
2329 gen_jcc1(s, cc_op, b, l1);
2331 gen_jmp_im(next_eip);
2332 tcg_gen_br(l2);
2334 gen_set_label(l1);
2335 gen_jmp_im(val);
2336 gen_set_label(l2);
2337 gen_eob(s);
2341 static void gen_setcc(DisasContext *s, int b)
2343 int inv, jcc_op, l1;
2344 TCGv t0;
2346 if (is_fast_jcc_case(s, b)) {
2347 /* nominal case: we use a jump */
2348 /* XXX: make it faster by adding new instructions in TCG */
2349 t0 = tcg_temp_local_new();
2350 tcg_gen_movi_tl(t0, 0);
2351 l1 = gen_new_label();
2352 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2353 tcg_gen_movi_tl(t0, 1);
2354 gen_set_label(l1);
2355 tcg_gen_mov_tl(cpu_T[0], t0);
2356 tcg_temp_free(t0);
2357 } else {
2358 /* slow case: it is more efficient not to generate a jump,
2359 although it is questionnable whether this optimization is
2360 worth to */
2361 inv = b & 1;
2362 jcc_op = (b >> 1) & 7;
2363 gen_setcc_slow_T0(s, jcc_op);
2364 if (inv) {
2365 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2370 static inline void gen_op_movl_T0_seg(int seg_reg)
2372 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2373 offsetof(CPUX86State,segs[seg_reg].selector));
2376 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2378 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2379 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2380 offsetof(CPUX86State,segs[seg_reg].selector));
2381 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2382 tcg_gen_st_tl(cpu_T[0], cpu_env,
2383 offsetof(CPUX86State,segs[seg_reg].base));
2386 /* move T0 to seg_reg and compute if the CPU state may change. Never
2387 call this function with seg_reg == R_CS */
2388 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2390 if (s->pe && !s->vm86) {
2391 /* XXX: optimize by finding processor state dynamically */
2392 if (s->cc_op != CC_OP_DYNAMIC)
2393 gen_op_set_cc_op(s->cc_op);
2394 gen_jmp_im(cur_eip);
2395 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2396 gen_helper_load_seg(tcg_const_i32(seg_reg), cpu_tmp2_i32);
2397 /* abort translation because the addseg value may change or
2398 because ss32 may change. For R_SS, translation must always
2399 stop as a special handling must be done to disable hardware
2400 interrupts for the next instruction */
2401 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2402 s->is_jmp = 3;
2403 } else {
2404 gen_op_movl_seg_T0_vm(seg_reg);
2405 if (seg_reg == R_SS)
2406 s->is_jmp = 3;
2410 static inline int svm_is_rep(int prefixes)
2412 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2415 static inline void
2416 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2417 uint32_t type, uint64_t param)
2419 /* no SVM activated; fast case */
2420 if (likely(!(s->flags & HF_SVMI_MASK)))
2421 return;
2422 if (s->cc_op != CC_OP_DYNAMIC)
2423 gen_op_set_cc_op(s->cc_op);
2424 gen_jmp_im(pc_start - s->cs_base);
2425 gen_helper_svm_check_intercept_param(tcg_const_i32(type),
2426 tcg_const_i64(param));
2429 static inline void
2430 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2432 gen_svm_check_intercept_param(s, pc_start, type, 0);
2435 static inline void gen_stack_update(DisasContext *s, int addend)
2437 #ifdef TARGET_X86_64
2438 if (CODE64(s)) {
2439 gen_op_add_reg_im(2, R_ESP, addend);
2440 } else
2441 #endif
2442 if (s->ss32) {
2443 gen_op_add_reg_im(1, R_ESP, addend);
2444 } else {
2445 gen_op_add_reg_im(0, R_ESP, addend);
2449 /* generate a push. It depends on ss32, addseg and dflag */
2450 static void gen_push_T0(DisasContext *s)
2452 #ifdef TARGET_X86_64
2453 if (CODE64(s)) {
2454 gen_op_movq_A0_reg(R_ESP);
2455 if (s->dflag) {
2456 gen_op_addq_A0_im(-8);
2457 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2458 } else {
2459 gen_op_addq_A0_im(-2);
2460 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2462 gen_op_mov_reg_A0(2, R_ESP);
2463 } else
2464 #endif
2466 gen_op_movl_A0_reg(R_ESP);
2467 if (!s->dflag)
2468 gen_op_addl_A0_im(-2);
2469 else
2470 gen_op_addl_A0_im(-4);
2471 if (s->ss32) {
2472 if (s->addseg) {
2473 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2474 gen_op_addl_A0_seg(R_SS);
2476 } else {
2477 gen_op_andl_A0_ffff();
2478 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2479 gen_op_addl_A0_seg(R_SS);
2481 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2482 if (s->ss32 && !s->addseg)
2483 gen_op_mov_reg_A0(1, R_ESP);
2484 else
2485 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2489 /* generate a push. It depends on ss32, addseg and dflag */
2490 /* slower version for T1, only used for call Ev */
2491 static void gen_push_T1(DisasContext *s)
2493 #ifdef TARGET_X86_64
2494 if (CODE64(s)) {
2495 gen_op_movq_A0_reg(R_ESP);
2496 if (s->dflag) {
2497 gen_op_addq_A0_im(-8);
2498 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2499 } else {
2500 gen_op_addq_A0_im(-2);
2501 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2503 gen_op_mov_reg_A0(2, R_ESP);
2504 } else
2505 #endif
2507 gen_op_movl_A0_reg(R_ESP);
2508 if (!s->dflag)
2509 gen_op_addl_A0_im(-2);
2510 else
2511 gen_op_addl_A0_im(-4);
2512 if (s->ss32) {
2513 if (s->addseg) {
2514 gen_op_addl_A0_seg(R_SS);
2516 } else {
2517 gen_op_andl_A0_ffff();
2518 gen_op_addl_A0_seg(R_SS);
2520 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2522 if (s->ss32 && !s->addseg)
2523 gen_op_mov_reg_A0(1, R_ESP);
2524 else
2525 gen_stack_update(s, (-2) << s->dflag);
2529 /* two step pop is necessary for precise exceptions */
2530 static void gen_pop_T0(DisasContext *s)
2532 #ifdef TARGET_X86_64
2533 if (CODE64(s)) {
2534 gen_op_movq_A0_reg(R_ESP);
2535 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2536 } else
2537 #endif
2539 gen_op_movl_A0_reg(R_ESP);
2540 if (s->ss32) {
2541 if (s->addseg)
2542 gen_op_addl_A0_seg(R_SS);
2543 } else {
2544 gen_op_andl_A0_ffff();
2545 gen_op_addl_A0_seg(R_SS);
2547 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2551 static void gen_pop_update(DisasContext *s)
2553 #ifdef TARGET_X86_64
2554 if (CODE64(s) && s->dflag) {
2555 gen_stack_update(s, 8);
2556 } else
2557 #endif
2559 gen_stack_update(s, 2 << s->dflag);
2563 static void gen_stack_A0(DisasContext *s)
2565 gen_op_movl_A0_reg(R_ESP);
2566 if (!s->ss32)
2567 gen_op_andl_A0_ffff();
2568 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2569 if (s->addseg)
2570 gen_op_addl_A0_seg(R_SS);
2573 /* NOTE: wrap around in 16 bit not fully handled */
2574 static void gen_pusha(DisasContext *s)
2576 int i;
2577 gen_op_movl_A0_reg(R_ESP);
2578 gen_op_addl_A0_im(-16 << s->dflag);
2579 if (!s->ss32)
2580 gen_op_andl_A0_ffff();
2581 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2582 if (s->addseg)
2583 gen_op_addl_A0_seg(R_SS);
2584 for(i = 0;i < 8; i++) {
2585 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2586 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2587 gen_op_addl_A0_im(2 << s->dflag);
2589 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2592 /* NOTE: wrap around in 16 bit not fully handled */
2593 static void gen_popa(DisasContext *s)
2595 int i;
2596 gen_op_movl_A0_reg(R_ESP);
2597 if (!s->ss32)
2598 gen_op_andl_A0_ffff();
2599 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2600 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2601 if (s->addseg)
2602 gen_op_addl_A0_seg(R_SS);
2603 for(i = 0;i < 8; i++) {
2604 /* ESP is not reloaded */
2605 if (i != 3) {
2606 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2607 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2609 gen_op_addl_A0_im(2 << s->dflag);
2611 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2614 static void gen_enter(DisasContext *s, int esp_addend, int level)
2616 int ot, opsize;
2618 level &= 0x1f;
2619 #ifdef TARGET_X86_64
2620 if (CODE64(s)) {
2621 ot = s->dflag ? OT_QUAD : OT_WORD;
2622 opsize = 1 << ot;
2624 gen_op_movl_A0_reg(R_ESP);
2625 gen_op_addq_A0_im(-opsize);
2626 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2628 /* push bp */
2629 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2630 gen_op_st_T0_A0(ot + s->mem_index);
2631 if (level) {
2632 /* XXX: must save state */
2633 gen_helper_enter64_level(tcg_const_i32(level),
2634 tcg_const_i32((ot == OT_QUAD)),
2635 cpu_T[1]);
2637 gen_op_mov_reg_T1(ot, R_EBP);
2638 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2639 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2640 } else
2641 #endif
2643 ot = s->dflag + OT_WORD;
2644 opsize = 2 << s->dflag;
2646 gen_op_movl_A0_reg(R_ESP);
2647 gen_op_addl_A0_im(-opsize);
2648 if (!s->ss32)
2649 gen_op_andl_A0_ffff();
2650 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2651 if (s->addseg)
2652 gen_op_addl_A0_seg(R_SS);
2653 /* push bp */
2654 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2655 gen_op_st_T0_A0(ot + s->mem_index);
2656 if (level) {
2657 /* XXX: must save state */
2658 gen_helper_enter_level(tcg_const_i32(level),
2659 tcg_const_i32(s->dflag),
2660 cpu_T[1]);
2662 gen_op_mov_reg_T1(ot, R_EBP);
2663 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2664 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2668 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2670 if (s->cc_op != CC_OP_DYNAMIC)
2671 gen_op_set_cc_op(s->cc_op);
2672 gen_jmp_im(cur_eip);
2673 gen_helper_raise_exception(tcg_const_i32(trapno));
2674 s->is_jmp = 3;
2677 /* an interrupt is different from an exception because of the
2678 privilege checks */
2679 static void gen_interrupt(DisasContext *s, int intno,
2680 target_ulong cur_eip, target_ulong next_eip)
2682 if (s->cc_op != CC_OP_DYNAMIC)
2683 gen_op_set_cc_op(s->cc_op);
2684 gen_jmp_im(cur_eip);
2685 gen_helper_raise_interrupt(tcg_const_i32(intno),
2686 tcg_const_i32(next_eip - cur_eip));
2687 s->is_jmp = 3;
2690 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2692 if (s->cc_op != CC_OP_DYNAMIC)
2693 gen_op_set_cc_op(s->cc_op);
2694 gen_jmp_im(cur_eip);
2695 gen_helper_debug();
2696 s->is_jmp = 3;
2699 /* generate a generic end of block. Trace exception is also generated
2700 if needed */
2701 static void gen_eob(DisasContext *s)
2703 if (s->cc_op != CC_OP_DYNAMIC)
2704 gen_op_set_cc_op(s->cc_op);
2705 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2706 gen_helper_reset_inhibit_irq();
2708 if (s->tb->flags & HF_RF_MASK) {
2709 gen_helper_reset_rf();
2711 if (s->singlestep_enabled) {
2712 gen_helper_debug();
2713 } else if (s->tf) {
2714 gen_helper_single_step();
2715 } else {
2716 tcg_gen_exit_tb(0);
2718 s->is_jmp = 3;
2721 /* generate a jump to eip. No segment change must happen before as a
2722 direct call to the next block may occur */
2723 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2725 if (s->jmp_opt) {
2726 if (s->cc_op != CC_OP_DYNAMIC) {
2727 gen_op_set_cc_op(s->cc_op);
2728 s->cc_op = CC_OP_DYNAMIC;
2730 gen_goto_tb(s, tb_num, eip);
2731 s->is_jmp = 3;
2732 } else {
2733 gen_jmp_im(eip);
2734 gen_eob(s);
2738 static void gen_jmp(DisasContext *s, target_ulong eip)
2740 gen_jmp_tb(s, eip, 0);
2743 static inline void gen_ldq_env_A0(int idx, int offset)
2745 int mem_index = (idx >> 2) - 1;
2746 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2747 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2750 static inline void gen_stq_env_A0(int idx, int offset)
2752 int mem_index = (idx >> 2) - 1;
2753 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2754 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2757 static inline void gen_ldo_env_A0(int idx, int offset)
2759 int mem_index = (idx >> 2) - 1;
2760 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2761 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2762 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2763 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2764 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2767 static inline void gen_sto_env_A0(int idx, int offset)
2769 int mem_index = (idx >> 2) - 1;
2770 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2771 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2772 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2773 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2774 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2777 static inline void gen_op_movo(int d_offset, int s_offset)
2779 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2780 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2781 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2782 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2785 static inline void gen_op_movq(int d_offset, int s_offset)
2787 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2788 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2791 static inline void gen_op_movl(int d_offset, int s_offset)
2793 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2794 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2797 static inline void gen_op_movq_env_0(int d_offset)
2799 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2800 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2803 #define SSE_SPECIAL ((void *)1)
2804 #define SSE_DUMMY ((void *)2)
2806 #define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2807 #define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2808 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
2810 static void *sse_op_table1[256][4] = {
2811 /* 3DNow! extensions */
2812 [0x0e] = { SSE_DUMMY }, /* femms */
2813 [0x0f] = { SSE_DUMMY }, /* pf... */
2814 /* pure SSE operations */
2815 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2816 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2817 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2818 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2819 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
2820 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
2821 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2822 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2824 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2825 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2826 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2827 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd, movntss, movntsd */
2828 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2829 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2830 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
2831 [0x2f] = { gen_helper_comiss, gen_helper_comisd },
2832 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2833 [0x51] = SSE_FOP(sqrt),
2834 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
2835 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
2836 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
2837 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
2838 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
2839 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
2840 [0x58] = SSE_FOP(add),
2841 [0x59] = SSE_FOP(mul),
2842 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
2843 gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
2844 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
2845 [0x5c] = SSE_FOP(sub),
2846 [0x5d] = SSE_FOP(min),
2847 [0x5e] = SSE_FOP(div),
2848 [0x5f] = SSE_FOP(max),
2850 [0xc2] = SSE_FOP(cmpeq),
2851 [0xc6] = { gen_helper_shufps, gen_helper_shufpd },
2853 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2854 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2856 /* MMX ops and their SSE extensions */
2857 [0x60] = MMX_OP2(punpcklbw),
2858 [0x61] = MMX_OP2(punpcklwd),
2859 [0x62] = MMX_OP2(punpckldq),
2860 [0x63] = MMX_OP2(packsswb),
2861 [0x64] = MMX_OP2(pcmpgtb),
2862 [0x65] = MMX_OP2(pcmpgtw),
2863 [0x66] = MMX_OP2(pcmpgtl),
2864 [0x67] = MMX_OP2(packuswb),
2865 [0x68] = MMX_OP2(punpckhbw),
2866 [0x69] = MMX_OP2(punpckhwd),
2867 [0x6a] = MMX_OP2(punpckhdq),
2868 [0x6b] = MMX_OP2(packssdw),
2869 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
2870 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
2871 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2872 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2873 [0x70] = { gen_helper_pshufw_mmx,
2874 gen_helper_pshufd_xmm,
2875 gen_helper_pshufhw_xmm,
2876 gen_helper_pshuflw_xmm },
2877 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2878 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2879 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2880 [0x74] = MMX_OP2(pcmpeqb),
2881 [0x75] = MMX_OP2(pcmpeqw),
2882 [0x76] = MMX_OP2(pcmpeql),
2883 [0x77] = { SSE_DUMMY }, /* emms */
2884 [0x78] = { NULL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* extrq_i, insertq_i */
2885 [0x79] = { NULL, gen_helper_extrq_r, NULL, gen_helper_insertq_r },
2886 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
2887 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
2888 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2889 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2890 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2891 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2892 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
2893 [0xd1] = MMX_OP2(psrlw),
2894 [0xd2] = MMX_OP2(psrld),
2895 [0xd3] = MMX_OP2(psrlq),
2896 [0xd4] = MMX_OP2(paddq),
2897 [0xd5] = MMX_OP2(pmullw),
2898 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2899 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2900 [0xd8] = MMX_OP2(psubusb),
2901 [0xd9] = MMX_OP2(psubusw),
2902 [0xda] = MMX_OP2(pminub),
2903 [0xdb] = MMX_OP2(pand),
2904 [0xdc] = MMX_OP2(paddusb),
2905 [0xdd] = MMX_OP2(paddusw),
2906 [0xde] = MMX_OP2(pmaxub),
2907 [0xdf] = MMX_OP2(pandn),
2908 [0xe0] = MMX_OP2(pavgb),
2909 [0xe1] = MMX_OP2(psraw),
2910 [0xe2] = MMX_OP2(psrad),
2911 [0xe3] = MMX_OP2(pavgw),
2912 [0xe4] = MMX_OP2(pmulhuw),
2913 [0xe5] = MMX_OP2(pmulhw),
2914 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
2915 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2916 [0xe8] = MMX_OP2(psubsb),
2917 [0xe9] = MMX_OP2(psubsw),
2918 [0xea] = MMX_OP2(pminsw),
2919 [0xeb] = MMX_OP2(por),
2920 [0xec] = MMX_OP2(paddsb),
2921 [0xed] = MMX_OP2(paddsw),
2922 [0xee] = MMX_OP2(pmaxsw),
2923 [0xef] = MMX_OP2(pxor),
2924 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2925 [0xf1] = MMX_OP2(psllw),
2926 [0xf2] = MMX_OP2(pslld),
2927 [0xf3] = MMX_OP2(psllq),
2928 [0xf4] = MMX_OP2(pmuludq),
2929 [0xf5] = MMX_OP2(pmaddwd),
2930 [0xf6] = MMX_OP2(psadbw),
2931 [0xf7] = MMX_OP2(maskmov),
2932 [0xf8] = MMX_OP2(psubb),
2933 [0xf9] = MMX_OP2(psubw),
2934 [0xfa] = MMX_OP2(psubl),
2935 [0xfb] = MMX_OP2(psubq),
2936 [0xfc] = MMX_OP2(paddb),
2937 [0xfd] = MMX_OP2(paddw),
2938 [0xfe] = MMX_OP2(paddl),
2941 static void *sse_op_table2[3 * 8][2] = {
2942 [0 + 2] = MMX_OP2(psrlw),
2943 [0 + 4] = MMX_OP2(psraw),
2944 [0 + 6] = MMX_OP2(psllw),
2945 [8 + 2] = MMX_OP2(psrld),
2946 [8 + 4] = MMX_OP2(psrad),
2947 [8 + 6] = MMX_OP2(pslld),
2948 [16 + 2] = MMX_OP2(psrlq),
2949 [16 + 3] = { NULL, gen_helper_psrldq_xmm },
2950 [16 + 6] = MMX_OP2(psllq),
2951 [16 + 7] = { NULL, gen_helper_pslldq_xmm },
2954 static void *sse_op_table3[4 * 3] = {
2955 gen_helper_cvtsi2ss,
2956 gen_helper_cvtsi2sd,
2957 X86_64_ONLY(gen_helper_cvtsq2ss),
2958 X86_64_ONLY(gen_helper_cvtsq2sd),
2960 gen_helper_cvttss2si,
2961 gen_helper_cvttsd2si,
2962 X86_64_ONLY(gen_helper_cvttss2sq),
2963 X86_64_ONLY(gen_helper_cvttsd2sq),
2965 gen_helper_cvtss2si,
2966 gen_helper_cvtsd2si,
2967 X86_64_ONLY(gen_helper_cvtss2sq),
2968 X86_64_ONLY(gen_helper_cvtsd2sq),
2971 static void *sse_op_table4[8][4] = {
2972 SSE_FOP(cmpeq),
2973 SSE_FOP(cmplt),
2974 SSE_FOP(cmple),
2975 SSE_FOP(cmpunord),
2976 SSE_FOP(cmpneq),
2977 SSE_FOP(cmpnlt),
2978 SSE_FOP(cmpnle),
2979 SSE_FOP(cmpord),
2982 static void *sse_op_table5[256] = {
2983 [0x0c] = gen_helper_pi2fw,
2984 [0x0d] = gen_helper_pi2fd,
2985 [0x1c] = gen_helper_pf2iw,
2986 [0x1d] = gen_helper_pf2id,
2987 [0x8a] = gen_helper_pfnacc,
2988 [0x8e] = gen_helper_pfpnacc,
2989 [0x90] = gen_helper_pfcmpge,
2990 [0x94] = gen_helper_pfmin,
2991 [0x96] = gen_helper_pfrcp,
2992 [0x97] = gen_helper_pfrsqrt,
2993 [0x9a] = gen_helper_pfsub,
2994 [0x9e] = gen_helper_pfadd,
2995 [0xa0] = gen_helper_pfcmpgt,
2996 [0xa4] = gen_helper_pfmax,
2997 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
2998 [0xa7] = gen_helper_movq, /* pfrsqit1 */
2999 [0xaa] = gen_helper_pfsubr,
3000 [0xae] = gen_helper_pfacc,
3001 [0xb0] = gen_helper_pfcmpeq,
3002 [0xb4] = gen_helper_pfmul,
3003 [0xb6] = gen_helper_movq, /* pfrcpit2 */
3004 [0xb7] = gen_helper_pmulhrw_mmx,
3005 [0xbb] = gen_helper_pswapd,
3006 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
3009 struct sse_op_helper_s {
3010 void *op[2]; uint32_t ext_mask;
3012 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3013 #define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3014 #define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3015 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3016 static struct sse_op_helper_s sse_op_table6[256] = {
3017 [0x00] = SSSE3_OP(pshufb),
3018 [0x01] = SSSE3_OP(phaddw),
3019 [0x02] = SSSE3_OP(phaddd),
3020 [0x03] = SSSE3_OP(phaddsw),
3021 [0x04] = SSSE3_OP(pmaddubsw),
3022 [0x05] = SSSE3_OP(phsubw),
3023 [0x06] = SSSE3_OP(phsubd),
3024 [0x07] = SSSE3_OP(phsubsw),
3025 [0x08] = SSSE3_OP(psignb),
3026 [0x09] = SSSE3_OP(psignw),
3027 [0x0a] = SSSE3_OP(psignd),
3028 [0x0b] = SSSE3_OP(pmulhrsw),
3029 [0x10] = SSE41_OP(pblendvb),
3030 [0x14] = SSE41_OP(blendvps),
3031 [0x15] = SSE41_OP(blendvpd),
3032 [0x17] = SSE41_OP(ptest),
3033 [0x1c] = SSSE3_OP(pabsb),
3034 [0x1d] = SSSE3_OP(pabsw),
3035 [0x1e] = SSSE3_OP(pabsd),
3036 [0x20] = SSE41_OP(pmovsxbw),
3037 [0x21] = SSE41_OP(pmovsxbd),
3038 [0x22] = SSE41_OP(pmovsxbq),
3039 [0x23] = SSE41_OP(pmovsxwd),
3040 [0x24] = SSE41_OP(pmovsxwq),
3041 [0x25] = SSE41_OP(pmovsxdq),
3042 [0x28] = SSE41_OP(pmuldq),
3043 [0x29] = SSE41_OP(pcmpeqq),
3044 [0x2a] = SSE41_SPECIAL, /* movntqda */
3045 [0x2b] = SSE41_OP(packusdw),
3046 [0x30] = SSE41_OP(pmovzxbw),
3047 [0x31] = SSE41_OP(pmovzxbd),
3048 [0x32] = SSE41_OP(pmovzxbq),
3049 [0x33] = SSE41_OP(pmovzxwd),
3050 [0x34] = SSE41_OP(pmovzxwq),
3051 [0x35] = SSE41_OP(pmovzxdq),
3052 [0x37] = SSE42_OP(pcmpgtq),
3053 [0x38] = SSE41_OP(pminsb),
3054 [0x39] = SSE41_OP(pminsd),
3055 [0x3a] = SSE41_OP(pminuw),
3056 [0x3b] = SSE41_OP(pminud),
3057 [0x3c] = SSE41_OP(pmaxsb),
3058 [0x3d] = SSE41_OP(pmaxsd),
3059 [0x3e] = SSE41_OP(pmaxuw),
3060 [0x3f] = SSE41_OP(pmaxud),
3061 [0x40] = SSE41_OP(pmulld),
3062 [0x41] = SSE41_OP(phminposuw),
3065 static struct sse_op_helper_s sse_op_table7[256] = {
3066 [0x08] = SSE41_OP(roundps),
3067 [0x09] = SSE41_OP(roundpd),
3068 [0x0a] = SSE41_OP(roundss),
3069 [0x0b] = SSE41_OP(roundsd),
3070 [0x0c] = SSE41_OP(blendps),
3071 [0x0d] = SSE41_OP(blendpd),
3072 [0x0e] = SSE41_OP(pblendw),
3073 [0x0f] = SSSE3_OP(palignr),
3074 [0x14] = SSE41_SPECIAL, /* pextrb */
3075 [0x15] = SSE41_SPECIAL, /* pextrw */
3076 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3077 [0x17] = SSE41_SPECIAL, /* extractps */
3078 [0x20] = SSE41_SPECIAL, /* pinsrb */
3079 [0x21] = SSE41_SPECIAL, /* insertps */
3080 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3081 [0x40] = SSE41_OP(dpps),
3082 [0x41] = SSE41_OP(dppd),
3083 [0x42] = SSE41_OP(mpsadbw),
3084 [0x60] = SSE42_OP(pcmpestrm),
3085 [0x61] = SSE42_OP(pcmpestri),
3086 [0x62] = SSE42_OP(pcmpistrm),
3087 [0x63] = SSE42_OP(pcmpistri),
3090 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3092 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3093 int modrm, mod, rm, reg, reg_addr, offset_addr;
3094 void *sse_op2;
3096 b &= 0xff;
3097 if (s->prefix & PREFIX_DATA)
3098 b1 = 1;
3099 else if (s->prefix & PREFIX_REPZ)
3100 b1 = 2;
3101 else if (s->prefix & PREFIX_REPNZ)
3102 b1 = 3;
3103 else
3104 b1 = 0;
3105 sse_op2 = sse_op_table1[b][b1];
3106 if (!sse_op2)
3107 goto illegal_op;
3108 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3109 is_xmm = 1;
3110 } else {
3111 if (b1 == 0) {
3112 /* MMX case */
3113 is_xmm = 0;
3114 } else {
3115 is_xmm = 1;
3118 /* simple MMX/SSE operation */
3119 if (s->flags & HF_TS_MASK) {
3120 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3121 return;
3123 if (s->flags & HF_EM_MASK) {
3124 illegal_op:
3125 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3126 return;
3128 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3129 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3130 goto illegal_op;
3131 if (b == 0x0e) {
3132 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3133 goto illegal_op;
3134 /* femms */
3135 gen_helper_emms();
3136 return;
3138 if (b == 0x77) {
3139 /* emms */
3140 gen_helper_emms();
3141 return;
3143 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3144 the static cpu state) */
3145 if (!is_xmm) {
3146 gen_helper_enter_mmx();
3149 modrm = ldub_code(s->pc++);
3150 reg = ((modrm >> 3) & 7);
3151 if (is_xmm)
3152 reg |= rex_r;
3153 mod = (modrm >> 6) & 3;
3154 if (sse_op2 == SSE_SPECIAL) {
3155 b |= (b1 << 8);
3156 switch(b) {
3157 case 0x0e7: /* movntq */
3158 if (mod == 3)
3159 goto illegal_op;
3160 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3161 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3162 break;
3163 case 0x1e7: /* movntdq */
3164 case 0x02b: /* movntps */
3165 case 0x12b: /* movntps */
3166 if (mod == 3)
3167 goto illegal_op;
3168 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3169 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3170 break;
3171 case 0x3f0: /* lddqu */
3172 if (mod == 3)
3173 goto illegal_op;
3174 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3175 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3176 break;
3177 case 0x22b: /* movntss */
3178 case 0x32b: /* movntsd */
3179 if (mod == 3)
3180 goto illegal_op;
3181 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3182 if (b1 & 1) {
3183 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,
3184 xmm_regs[reg]));
3185 } else {
3186 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3187 xmm_regs[reg].XMM_L(0)));
3188 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3190 break;
3191 case 0x6e: /* movd mm, ea */
3192 #ifdef TARGET_X86_64
3193 if (s->dflag == 2) {
3194 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3195 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3196 } else
3197 #endif
3199 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3200 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3201 offsetof(CPUX86State,fpregs[reg].mmx));
3202 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3203 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
3205 break;
3206 case 0x16e: /* movd xmm, ea */
3207 #ifdef TARGET_X86_64
3208 if (s->dflag == 2) {
3209 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3210 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3211 offsetof(CPUX86State,xmm_regs[reg]));
3212 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
3213 } else
3214 #endif
3216 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3217 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3218 offsetof(CPUX86State,xmm_regs[reg]));
3219 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3220 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
3222 break;
3223 case 0x6f: /* movq mm, ea */
3224 if (mod != 3) {
3225 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3226 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3227 } else {
3228 rm = (modrm & 7);
3229 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3230 offsetof(CPUX86State,fpregs[rm].mmx));
3231 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3232 offsetof(CPUX86State,fpregs[reg].mmx));
3234 break;
3235 case 0x010: /* movups */
3236 case 0x110: /* movupd */
3237 case 0x028: /* movaps */
3238 case 0x128: /* movapd */
3239 case 0x16f: /* movdqa xmm, ea */
3240 case 0x26f: /* movdqu xmm, ea */
3241 if (mod != 3) {
3242 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3243 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3244 } else {
3245 rm = (modrm & 7) | REX_B(s);
3246 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3247 offsetof(CPUX86State,xmm_regs[rm]));
3249 break;
3250 case 0x210: /* movss xmm, ea */
3251 if (mod != 3) {
3252 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3253 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3254 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3255 gen_op_movl_T0_0();
3256 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3257 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3258 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3259 } else {
3260 rm = (modrm & 7) | REX_B(s);
3261 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3262 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3264 break;
3265 case 0x310: /* movsd xmm, ea */
3266 if (mod != 3) {
3267 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3268 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3269 gen_op_movl_T0_0();
3270 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3271 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3272 } else {
3273 rm = (modrm & 7) | REX_B(s);
3274 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3275 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3277 break;
3278 case 0x012: /* movlps */
3279 case 0x112: /* movlpd */
3280 if (mod != 3) {
3281 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3282 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3283 } else {
3284 /* movhlps */
3285 rm = (modrm & 7) | REX_B(s);
3286 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3287 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3289 break;
3290 case 0x212: /* movsldup */
3291 if (mod != 3) {
3292 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3293 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3294 } else {
3295 rm = (modrm & 7) | REX_B(s);
3296 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3297 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3298 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3299 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3301 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3302 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3303 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3304 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3305 break;
3306 case 0x312: /* movddup */
3307 if (mod != 3) {
3308 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3309 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3310 } else {
3311 rm = (modrm & 7) | REX_B(s);
3312 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3313 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3315 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3316 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3317 break;
3318 case 0x016: /* movhps */
3319 case 0x116: /* movhpd */
3320 if (mod != 3) {
3321 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3322 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3323 } else {
3324 /* movlhps */
3325 rm = (modrm & 7) | REX_B(s);
3326 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3327 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3329 break;
3330 case 0x216: /* movshdup */
3331 if (mod != 3) {
3332 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3333 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3334 } else {
3335 rm = (modrm & 7) | REX_B(s);
3336 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3337 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3338 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3339 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3341 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3342 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3343 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3344 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3345 break;
3346 case 0x178:
3347 case 0x378:
3349 int bit_index, field_length;
3351 if (b1 == 1 && reg != 0)
3352 goto illegal_op;
3353 field_length = ldub_code(s->pc++) & 0x3F;
3354 bit_index = ldub_code(s->pc++) & 0x3F;
3355 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3356 offsetof(CPUX86State,xmm_regs[reg]));
3357 if (b1 == 1)
3358 gen_helper_extrq_i(cpu_ptr0, tcg_const_i32(bit_index),
3359 tcg_const_i32(field_length));
3360 else
3361 gen_helper_insertq_i(cpu_ptr0, tcg_const_i32(bit_index),
3362 tcg_const_i32(field_length));
3364 break;
3365 case 0x7e: /* movd ea, mm */
3366 #ifdef TARGET_X86_64
3367 if (s->dflag == 2) {
3368 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3369 offsetof(CPUX86State,fpregs[reg].mmx));
3370 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3371 } else
3372 #endif
3374 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3375 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3376 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3378 break;
3379 case 0x17e: /* movd ea, xmm */
3380 #ifdef TARGET_X86_64
3381 if (s->dflag == 2) {
3382 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3383 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3384 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3385 } else
3386 #endif
3388 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3389 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3390 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3392 break;
3393 case 0x27e: /* movq xmm, ea */
3394 if (mod != 3) {
3395 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3396 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3397 } else {
3398 rm = (modrm & 7) | REX_B(s);
3399 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3400 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3402 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3403 break;
3404 case 0x7f: /* movq ea, mm */
3405 if (mod != 3) {
3406 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3407 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3408 } else {
3409 rm = (modrm & 7);
3410 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3411 offsetof(CPUX86State,fpregs[reg].mmx));
3413 break;
3414 case 0x011: /* movups */
3415 case 0x111: /* movupd */
3416 case 0x029: /* movaps */
3417 case 0x129: /* movapd */
3418 case 0x17f: /* movdqa ea, xmm */
3419 case 0x27f: /* movdqu ea, xmm */
3420 if (mod != 3) {
3421 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3422 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3423 } else {
3424 rm = (modrm & 7) | REX_B(s);
3425 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3426 offsetof(CPUX86State,xmm_regs[reg]));
3428 break;
3429 case 0x211: /* movss ea, xmm */
3430 if (mod != 3) {
3431 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3432 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3433 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3434 } else {
3435 rm = (modrm & 7) | REX_B(s);
3436 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3437 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3439 break;
3440 case 0x311: /* movsd ea, xmm */
3441 if (mod != 3) {
3442 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3443 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3444 } else {
3445 rm = (modrm & 7) | REX_B(s);
3446 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3447 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3449 break;
3450 case 0x013: /* movlps */
3451 case 0x113: /* movlpd */
3452 if (mod != 3) {
3453 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3454 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3455 } else {
3456 goto illegal_op;
3458 break;
3459 case 0x017: /* movhps */
3460 case 0x117: /* movhpd */
3461 if (mod != 3) {
3462 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3463 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3464 } else {
3465 goto illegal_op;
3467 break;
3468 case 0x71: /* shift mm, im */
3469 case 0x72:
3470 case 0x73:
3471 case 0x171: /* shift xmm, im */
3472 case 0x172:
3473 case 0x173:
3474 val = ldub_code(s->pc++);
3475 if (is_xmm) {
3476 gen_op_movl_T0_im(val);
3477 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3478 gen_op_movl_T0_0();
3479 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3480 op1_offset = offsetof(CPUX86State,xmm_t0);
3481 } else {
3482 gen_op_movl_T0_im(val);
3483 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3484 gen_op_movl_T0_0();
3485 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3486 op1_offset = offsetof(CPUX86State,mmx_t0);
3488 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3489 if (!sse_op2)
3490 goto illegal_op;
3491 if (is_xmm) {
3492 rm = (modrm & 7) | REX_B(s);
3493 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3494 } else {
3495 rm = (modrm & 7);
3496 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3498 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3499 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3500 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3501 break;
3502 case 0x050: /* movmskps */
3503 rm = (modrm & 7) | REX_B(s);
3504 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3505 offsetof(CPUX86State,xmm_regs[rm]));
3506 gen_helper_movmskps(cpu_tmp2_i32, cpu_ptr0);
3507 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3508 gen_op_mov_reg_T0(OT_LONG, reg);
3509 break;
3510 case 0x150: /* movmskpd */
3511 rm = (modrm & 7) | REX_B(s);
3512 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3513 offsetof(CPUX86State,xmm_regs[rm]));
3514 gen_helper_movmskpd(cpu_tmp2_i32, cpu_ptr0);
3515 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3516 gen_op_mov_reg_T0(OT_LONG, reg);
3517 break;
3518 case 0x02a: /* cvtpi2ps */
3519 case 0x12a: /* cvtpi2pd */
3520 gen_helper_enter_mmx();
3521 if (mod != 3) {
3522 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3523 op2_offset = offsetof(CPUX86State,mmx_t0);
3524 gen_ldq_env_A0(s->mem_index, op2_offset);
3525 } else {
3526 rm = (modrm & 7);
3527 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3529 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3530 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3531 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3532 switch(b >> 8) {
3533 case 0x0:
3534 gen_helper_cvtpi2ps(cpu_ptr0, cpu_ptr1);
3535 break;
3536 default:
3537 case 0x1:
3538 gen_helper_cvtpi2pd(cpu_ptr0, cpu_ptr1);
3539 break;
3541 break;
3542 case 0x22a: /* cvtsi2ss */
3543 case 0x32a: /* cvtsi2sd */
3544 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3545 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3546 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3547 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3548 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3549 if (ot == OT_LONG) {
3550 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3551 ((void (*)(TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_tmp2_i32);
3552 } else {
3553 ((void (*)(TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_T[0]);
3555 break;
3556 case 0x02c: /* cvttps2pi */
3557 case 0x12c: /* cvttpd2pi */
3558 case 0x02d: /* cvtps2pi */
3559 case 0x12d: /* cvtpd2pi */
3560 gen_helper_enter_mmx();
3561 if (mod != 3) {
3562 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3563 op2_offset = offsetof(CPUX86State,xmm_t0);
3564 gen_ldo_env_A0(s->mem_index, op2_offset);
3565 } else {
3566 rm = (modrm & 7) | REX_B(s);
3567 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3569 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3570 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3571 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3572 switch(b) {
3573 case 0x02c:
3574 gen_helper_cvttps2pi(cpu_ptr0, cpu_ptr1);
3575 break;
3576 case 0x12c:
3577 gen_helper_cvttpd2pi(cpu_ptr0, cpu_ptr1);
3578 break;
3579 case 0x02d:
3580 gen_helper_cvtps2pi(cpu_ptr0, cpu_ptr1);
3581 break;
3582 case 0x12d:
3583 gen_helper_cvtpd2pi(cpu_ptr0, cpu_ptr1);
3584 break;
3586 break;
3587 case 0x22c: /* cvttss2si */
3588 case 0x32c: /* cvttsd2si */
3589 case 0x22d: /* cvtss2si */
3590 case 0x32d: /* cvtsd2si */
3591 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3592 if (mod != 3) {
3593 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3594 if ((b >> 8) & 1) {
3595 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3596 } else {
3597 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3598 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3600 op2_offset = offsetof(CPUX86State,xmm_t0);
3601 } else {
3602 rm = (modrm & 7) | REX_B(s);
3603 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3605 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3606 (b & 1) * 4];
3607 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3608 if (ot == OT_LONG) {
3609 ((void (*)(TCGv_i32, TCGv_ptr))sse_op2)(cpu_tmp2_i32, cpu_ptr0);
3610 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3611 } else {
3612 ((void (*)(TCGv, TCGv_ptr))sse_op2)(cpu_T[0], cpu_ptr0);
3614 gen_op_mov_reg_T0(ot, reg);
3615 break;
3616 case 0xc4: /* pinsrw */
3617 case 0x1c4:
3618 s->rip_offset = 1;
3619 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3620 val = ldub_code(s->pc++);
3621 if (b1) {
3622 val &= 7;
3623 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3624 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3625 } else {
3626 val &= 3;
3627 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3628 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3630 break;
3631 case 0xc5: /* pextrw */
3632 case 0x1c5:
3633 if (mod != 3)
3634 goto illegal_op;
3635 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3636 val = ldub_code(s->pc++);
3637 if (b1) {
3638 val &= 7;
3639 rm = (modrm & 7) | REX_B(s);
3640 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3641 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3642 } else {
3643 val &= 3;
3644 rm = (modrm & 7);
3645 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3646 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3648 reg = ((modrm >> 3) & 7) | rex_r;
3649 gen_op_mov_reg_T0(ot, reg);
3650 break;
3651 case 0x1d6: /* movq ea, xmm */
3652 if (mod != 3) {
3653 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3654 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3655 } else {
3656 rm = (modrm & 7) | REX_B(s);
3657 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3658 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3659 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3661 break;
3662 case 0x2d6: /* movq2dq */
3663 gen_helper_enter_mmx();
3664 rm = (modrm & 7);
3665 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3666 offsetof(CPUX86State,fpregs[rm].mmx));
3667 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3668 break;
3669 case 0x3d6: /* movdq2q */
3670 gen_helper_enter_mmx();
3671 rm = (modrm & 7) | REX_B(s);
3672 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3673 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3674 break;
3675 case 0xd7: /* pmovmskb */
3676 case 0x1d7:
3677 if (mod != 3)
3678 goto illegal_op;
3679 if (b1) {
3680 rm = (modrm & 7) | REX_B(s);
3681 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3682 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_ptr0);
3683 } else {
3684 rm = (modrm & 7);
3685 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3686 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_ptr0);
3688 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3689 reg = ((modrm >> 3) & 7) | rex_r;
3690 gen_op_mov_reg_T0(OT_LONG, reg);
3691 break;
3692 case 0x138:
3693 if (s->prefix & PREFIX_REPNZ)
3694 goto crc32;
3695 case 0x038:
3696 b = modrm;
3697 modrm = ldub_code(s->pc++);
3698 rm = modrm & 7;
3699 reg = ((modrm >> 3) & 7) | rex_r;
3700 mod = (modrm >> 6) & 3;
3702 sse_op2 = sse_op_table6[b].op[b1];
3703 if (!sse_op2)
3704 goto illegal_op;
3705 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3706 goto illegal_op;
3708 if (b1) {
3709 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3710 if (mod == 3) {
3711 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3712 } else {
3713 op2_offset = offsetof(CPUX86State,xmm_t0);
3714 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3715 switch (b) {
3716 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3717 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3718 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3719 gen_ldq_env_A0(s->mem_index, op2_offset +
3720 offsetof(XMMReg, XMM_Q(0)));
3721 break;
3722 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3723 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3724 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3725 (s->mem_index >> 2) - 1);
3726 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3727 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3728 offsetof(XMMReg, XMM_L(0)));
3729 break;
3730 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3731 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3732 (s->mem_index >> 2) - 1);
3733 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3734 offsetof(XMMReg, XMM_W(0)));
3735 break;
3736 case 0x2a: /* movntqda */
3737 gen_ldo_env_A0(s->mem_index, op1_offset);
3738 return;
3739 default:
3740 gen_ldo_env_A0(s->mem_index, op2_offset);
3743 } else {
3744 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3745 if (mod == 3) {
3746 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3747 } else {
3748 op2_offset = offsetof(CPUX86State,mmx_t0);
3749 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3750 gen_ldq_env_A0(s->mem_index, op2_offset);
3753 if (sse_op2 == SSE_SPECIAL)
3754 goto illegal_op;
3756 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3757 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3758 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3760 if (b == 0x17)
3761 s->cc_op = CC_OP_EFLAGS;
3762 break;
3763 case 0x338: /* crc32 */
3764 crc32:
3765 b = modrm;
3766 modrm = ldub_code(s->pc++);
3767 reg = ((modrm >> 3) & 7) | rex_r;
3769 if (b != 0xf0 && b != 0xf1)
3770 goto illegal_op;
3771 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
3772 goto illegal_op;
3774 if (b == 0xf0)
3775 ot = OT_BYTE;
3776 else if (b == 0xf1 && s->dflag != 2)
3777 if (s->prefix & PREFIX_DATA)
3778 ot = OT_WORD;
3779 else
3780 ot = OT_LONG;
3781 else
3782 ot = OT_QUAD;
3784 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3785 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3786 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3787 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
3788 cpu_T[0], tcg_const_i32(8 << ot));
3790 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3791 gen_op_mov_reg_T0(ot, reg);
3792 break;
3793 case 0x03a:
3794 case 0x13a:
3795 b = modrm;
3796 modrm = ldub_code(s->pc++);
3797 rm = modrm & 7;
3798 reg = ((modrm >> 3) & 7) | rex_r;
3799 mod = (modrm >> 6) & 3;
3801 sse_op2 = sse_op_table7[b].op[b1];
3802 if (!sse_op2)
3803 goto illegal_op;
3804 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
3805 goto illegal_op;
3807 if (sse_op2 == SSE_SPECIAL) {
3808 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3809 rm = (modrm & 7) | REX_B(s);
3810 if (mod != 3)
3811 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3812 reg = ((modrm >> 3) & 7) | rex_r;
3813 val = ldub_code(s->pc++);
3814 switch (b) {
3815 case 0x14: /* pextrb */
3816 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3817 xmm_regs[reg].XMM_B(val & 15)));
3818 if (mod == 3)
3819 gen_op_mov_reg_T0(ot, rm);
3820 else
3821 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
3822 (s->mem_index >> 2) - 1);
3823 break;
3824 case 0x15: /* pextrw */
3825 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3826 xmm_regs[reg].XMM_W(val & 7)));
3827 if (mod == 3)
3828 gen_op_mov_reg_T0(ot, rm);
3829 else
3830 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
3831 (s->mem_index >> 2) - 1);
3832 break;
3833 case 0x16:
3834 if (ot == OT_LONG) { /* pextrd */
3835 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3836 offsetof(CPUX86State,
3837 xmm_regs[reg].XMM_L(val & 3)));
3838 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3839 if (mod == 3)
3840 gen_op_mov_reg_v(ot, rm, cpu_T[0]);
3841 else
3842 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3843 (s->mem_index >> 2) - 1);
3844 } else { /* pextrq */
3845 #ifdef TARGET_X86_64
3846 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3847 offsetof(CPUX86State,
3848 xmm_regs[reg].XMM_Q(val & 1)));
3849 if (mod == 3)
3850 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
3851 else
3852 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
3853 (s->mem_index >> 2) - 1);
3854 #else
3855 goto illegal_op;
3856 #endif
3858 break;
3859 case 0x17: /* extractps */
3860 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3861 xmm_regs[reg].XMM_L(val & 3)));
3862 if (mod == 3)
3863 gen_op_mov_reg_T0(ot, rm);
3864 else
3865 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3866 (s->mem_index >> 2) - 1);
3867 break;
3868 case 0x20: /* pinsrb */
3869 if (mod == 3)
3870 gen_op_mov_TN_reg(OT_LONG, 0, rm);
3871 else
3872 tcg_gen_qemu_ld8u(cpu_tmp0, cpu_A0,
3873 (s->mem_index >> 2) - 1);
3874 tcg_gen_st8_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State,
3875 xmm_regs[reg].XMM_B(val & 15)));
3876 break;
3877 case 0x21: /* insertps */
3878 if (mod == 3) {
3879 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3880 offsetof(CPUX86State,xmm_regs[rm]
3881 .XMM_L((val >> 6) & 3)));
3882 } else {
3883 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3884 (s->mem_index >> 2) - 1);
3885 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3887 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3888 offsetof(CPUX86State,xmm_regs[reg]
3889 .XMM_L((val >> 4) & 3)));
3890 if ((val >> 0) & 1)
3891 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3892 cpu_env, offsetof(CPUX86State,
3893 xmm_regs[reg].XMM_L(0)));
3894 if ((val >> 1) & 1)
3895 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3896 cpu_env, offsetof(CPUX86State,
3897 xmm_regs[reg].XMM_L(1)));
3898 if ((val >> 2) & 1)
3899 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3900 cpu_env, offsetof(CPUX86State,
3901 xmm_regs[reg].XMM_L(2)));
3902 if ((val >> 3) & 1)
3903 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3904 cpu_env, offsetof(CPUX86State,
3905 xmm_regs[reg].XMM_L(3)));
3906 break;
3907 case 0x22:
3908 if (ot == OT_LONG) { /* pinsrd */
3909 if (mod == 3)
3910 gen_op_mov_v_reg(ot, cpu_tmp0, rm);
3911 else
3912 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3913 (s->mem_index >> 2) - 1);
3914 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3915 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3916 offsetof(CPUX86State,
3917 xmm_regs[reg].XMM_L(val & 3)));
3918 } else { /* pinsrq */
3919 #ifdef TARGET_X86_64
3920 if (mod == 3)
3921 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
3922 else
3923 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
3924 (s->mem_index >> 2) - 1);
3925 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3926 offsetof(CPUX86State,
3927 xmm_regs[reg].XMM_Q(val & 1)));
3928 #else
3929 goto illegal_op;
3930 #endif
3932 break;
3934 return;
3937 if (b1) {
3938 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3939 if (mod == 3) {
3940 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3941 } else {
3942 op2_offset = offsetof(CPUX86State,xmm_t0);
3943 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3944 gen_ldo_env_A0(s->mem_index, op2_offset);
3946 } else {
3947 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3948 if (mod == 3) {
3949 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3950 } else {
3951 op2_offset = offsetof(CPUX86State,mmx_t0);
3952 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3953 gen_ldq_env_A0(s->mem_index, op2_offset);
3956 val = ldub_code(s->pc++);
3958 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3959 s->cc_op = CC_OP_EFLAGS;
3961 if (s->dflag == 2)
3962 /* The helper must use entire 64-bit gp registers */
3963 val |= 1 << 8;
3966 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3967 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3968 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3969 break;
3970 default:
3971 goto illegal_op;
3973 } else {
3974 /* generic MMX or SSE operation */
3975 switch(b) {
3976 case 0x70: /* pshufx insn */
3977 case 0xc6: /* pshufx insn */
3978 case 0xc2: /* compare insns */
3979 s->rip_offset = 1;
3980 break;
3981 default:
3982 break;
3984 if (is_xmm) {
3985 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3986 if (mod != 3) {
3987 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3988 op2_offset = offsetof(CPUX86State,xmm_t0);
3989 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3990 b == 0xc2)) {
3991 /* specific case for SSE single instructions */
3992 if (b1 == 2) {
3993 /* 32 bit access */
3994 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3995 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3996 } else {
3997 /* 64 bit access */
3998 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4000 } else {
4001 gen_ldo_env_A0(s->mem_index, op2_offset);
4003 } else {
4004 rm = (modrm & 7) | REX_B(s);
4005 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4007 } else {
4008 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4009 if (mod != 3) {
4010 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4011 op2_offset = offsetof(CPUX86State,mmx_t0);
4012 gen_ldq_env_A0(s->mem_index, op2_offset);
4013 } else {
4014 rm = (modrm & 7);
4015 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4018 switch(b) {
4019 case 0x0f: /* 3DNow! data insns */
4020 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4021 goto illegal_op;
4022 val = ldub_code(s->pc++);
4023 sse_op2 = sse_op_table5[val];
4024 if (!sse_op2)
4025 goto illegal_op;
4026 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4027 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4028 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4029 break;
4030 case 0x70: /* pshufx insn */
4031 case 0xc6: /* pshufx insn */
4032 val = ldub_code(s->pc++);
4033 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4034 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4035 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4036 break;
4037 case 0xc2:
4038 /* compare insns */
4039 val = ldub_code(s->pc++);
4040 if (val >= 8)
4041 goto illegal_op;
4042 sse_op2 = sse_op_table4[val][b1];
4043 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4044 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4045 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4046 break;
4047 case 0xf7:
4048 /* maskmov : we must prepare A0 */
4049 if (mod != 3)
4050 goto illegal_op;
4051 #ifdef TARGET_X86_64
4052 if (s->aflag == 2) {
4053 gen_op_movq_A0_reg(R_EDI);
4054 } else
4055 #endif
4057 gen_op_movl_A0_reg(R_EDI);
4058 if (s->aflag == 0)
4059 gen_op_andl_A0_ffff();
4061 gen_add_A0_ds_seg(s);
4063 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4064 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4065 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_ptr1, cpu_A0);
4066 break;
4067 default:
4068 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4069 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4070 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4071 break;
4073 if (b == 0x2e || b == 0x2f) {
4074 s->cc_op = CC_OP_EFLAGS;
4079 /* convert one instruction. s->is_jmp is set if the translation must
4080 be stopped. Return the next pc value */
4081 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4083 int b, prefixes, aflag, dflag;
4084 int shift, ot;
4085 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4086 target_ulong next_eip, tval;
4087 int rex_w, rex_r;
4089 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
4090 tcg_gen_debug_insn_start(pc_start);
4091 s->pc = pc_start;
4092 prefixes = 0;
4093 aflag = s->code32;
4094 dflag = s->code32;
4095 s->override = -1;
4096 rex_w = -1;
4097 rex_r = 0;
4098 #ifdef TARGET_X86_64
4099 s->rex_x = 0;
4100 s->rex_b = 0;
4101 x86_64_hregs = 0;
4102 #endif
4103 s->rip_offset = 0; /* for relative ip address */
4104 next_byte:
4105 b = ldub_code(s->pc);
4106 s->pc++;
4107 /* check prefixes */
4108 #ifdef TARGET_X86_64
4109 if (CODE64(s)) {
4110 switch (b) {
4111 case 0xf3:
4112 prefixes |= PREFIX_REPZ;
4113 goto next_byte;
4114 case 0xf2:
4115 prefixes |= PREFIX_REPNZ;
4116 goto next_byte;
4117 case 0xf0:
4118 prefixes |= PREFIX_LOCK;
4119 goto next_byte;
4120 case 0x2e:
4121 s->override = R_CS;
4122 goto next_byte;
4123 case 0x36:
4124 s->override = R_SS;
4125 goto next_byte;
4126 case 0x3e:
4127 s->override = R_DS;
4128 goto next_byte;
4129 case 0x26:
4130 s->override = R_ES;
4131 goto next_byte;
4132 case 0x64:
4133 s->override = R_FS;
4134 goto next_byte;
4135 case 0x65:
4136 s->override = R_GS;
4137 goto next_byte;
4138 case 0x66:
4139 prefixes |= PREFIX_DATA;
4140 goto next_byte;
4141 case 0x67:
4142 prefixes |= PREFIX_ADR;
4143 goto next_byte;
4144 case 0x40 ... 0x4f:
4145 /* REX prefix */
4146 rex_w = (b >> 3) & 1;
4147 rex_r = (b & 0x4) << 1;
4148 s->rex_x = (b & 0x2) << 2;
4149 REX_B(s) = (b & 0x1) << 3;
4150 x86_64_hregs = 1; /* select uniform byte register addressing */
4151 goto next_byte;
4153 if (rex_w == 1) {
4154 /* 0x66 is ignored if rex.w is set */
4155 dflag = 2;
4156 } else {
4157 if (prefixes & PREFIX_DATA)
4158 dflag ^= 1;
4160 if (!(prefixes & PREFIX_ADR))
4161 aflag = 2;
4162 } else
4163 #endif
4165 switch (b) {
4166 case 0xf3:
4167 prefixes |= PREFIX_REPZ;
4168 goto next_byte;
4169 case 0xf2:
4170 prefixes |= PREFIX_REPNZ;
4171 goto next_byte;
4172 case 0xf0:
4173 prefixes |= PREFIX_LOCK;
4174 goto next_byte;
4175 case 0x2e:
4176 s->override = R_CS;
4177 goto next_byte;
4178 case 0x36:
4179 s->override = R_SS;
4180 goto next_byte;
4181 case 0x3e:
4182 s->override = R_DS;
4183 goto next_byte;
4184 case 0x26:
4185 s->override = R_ES;
4186 goto next_byte;
4187 case 0x64:
4188 s->override = R_FS;
4189 goto next_byte;
4190 case 0x65:
4191 s->override = R_GS;
4192 goto next_byte;
4193 case 0x66:
4194 prefixes |= PREFIX_DATA;
4195 goto next_byte;
4196 case 0x67:
4197 prefixes |= PREFIX_ADR;
4198 goto next_byte;
4200 if (prefixes & PREFIX_DATA)
4201 dflag ^= 1;
4202 if (prefixes & PREFIX_ADR)
4203 aflag ^= 1;
4206 s->prefix = prefixes;
4207 s->aflag = aflag;
4208 s->dflag = dflag;
4210 /* lock generation */
4211 if (prefixes & PREFIX_LOCK)
4212 gen_helper_lock();
4214 /* now check op code */
4215 reswitch:
4216 switch(b) {
4217 case 0x0f:
4218 /**************************/
4219 /* extended op code */
4220 b = ldub_code(s->pc++) | 0x100;
4221 goto reswitch;
4223 /**************************/
4224 /* arith & logic */
4225 case 0x00 ... 0x05:
4226 case 0x08 ... 0x0d:
4227 case 0x10 ... 0x15:
4228 case 0x18 ... 0x1d:
4229 case 0x20 ... 0x25:
4230 case 0x28 ... 0x2d:
4231 case 0x30 ... 0x35:
4232 case 0x38 ... 0x3d:
4234 int op, f, val;
4235 op = (b >> 3) & 7;
4236 f = (b >> 1) & 3;
4238 if ((b & 1) == 0)
4239 ot = OT_BYTE;
4240 else
4241 ot = dflag + OT_WORD;
4243 switch(f) {
4244 case 0: /* OP Ev, Gv */
4245 modrm = ldub_code(s->pc++);
4246 reg = ((modrm >> 3) & 7) | rex_r;
4247 mod = (modrm >> 6) & 3;
4248 rm = (modrm & 7) | REX_B(s);
4249 if (mod != 3) {
4250 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4251 opreg = OR_TMP0;
4252 } else if (op == OP_XORL && rm == reg) {
4253 xor_zero:
4254 /* xor reg, reg optimisation */
4255 gen_op_movl_T0_0();
4256 s->cc_op = CC_OP_LOGICB + ot;
4257 gen_op_mov_reg_T0(ot, reg);
4258 gen_op_update1_cc();
4259 break;
4260 } else {
4261 opreg = rm;
4263 gen_op_mov_TN_reg(ot, 1, reg);
4264 gen_op(s, op, ot, opreg);
4265 break;
4266 case 1: /* OP Gv, Ev */
4267 modrm = ldub_code(s->pc++);
4268 mod = (modrm >> 6) & 3;
4269 reg = ((modrm >> 3) & 7) | rex_r;
4270 rm = (modrm & 7) | REX_B(s);
4271 if (mod != 3) {
4272 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4273 gen_op_ld_T1_A0(ot + s->mem_index);
4274 } else if (op == OP_XORL && rm == reg) {
4275 goto xor_zero;
4276 } else {
4277 gen_op_mov_TN_reg(ot, 1, rm);
4279 gen_op(s, op, ot, reg);
4280 break;
4281 case 2: /* OP A, Iv */
4282 val = insn_get(s, ot);
4283 gen_op_movl_T1_im(val);
4284 gen_op(s, op, ot, OR_EAX);
4285 break;
4288 break;
4290 case 0x82:
4291 if (CODE64(s))
4292 goto illegal_op;
4293 case 0x80: /* GRP1 */
4294 case 0x81:
4295 case 0x83:
4297 int val;
4299 if ((b & 1) == 0)
4300 ot = OT_BYTE;
4301 else
4302 ot = dflag + OT_WORD;
4304 modrm = ldub_code(s->pc++);
4305 mod = (modrm >> 6) & 3;
4306 rm = (modrm & 7) | REX_B(s);
4307 op = (modrm >> 3) & 7;
4309 if (mod != 3) {
4310 if (b == 0x83)
4311 s->rip_offset = 1;
4312 else
4313 s->rip_offset = insn_const_size(ot);
4314 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4315 opreg = OR_TMP0;
4316 } else {
4317 opreg = rm;
4320 switch(b) {
4321 default:
4322 case 0x80:
4323 case 0x81:
4324 case 0x82:
4325 val = insn_get(s, ot);
4326 break;
4327 case 0x83:
4328 val = (int8_t)insn_get(s, OT_BYTE);
4329 break;
4331 gen_op_movl_T1_im(val);
4332 gen_op(s, op, ot, opreg);
4334 break;
4336 /**************************/
4337 /* inc, dec, and other misc arith */
4338 case 0x40 ... 0x47: /* inc Gv */
4339 ot = dflag ? OT_LONG : OT_WORD;
4340 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4341 break;
4342 case 0x48 ... 0x4f: /* dec Gv */
4343 ot = dflag ? OT_LONG : OT_WORD;
4344 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4345 break;
4346 case 0xf6: /* GRP3 */
4347 case 0xf7:
4348 if ((b & 1) == 0)
4349 ot = OT_BYTE;
4350 else
4351 ot = dflag + OT_WORD;
4353 modrm = ldub_code(s->pc++);
4354 mod = (modrm >> 6) & 3;
4355 rm = (modrm & 7) | REX_B(s);
4356 op = (modrm >> 3) & 7;
4357 if (mod != 3) {
4358 if (op == 0)
4359 s->rip_offset = insn_const_size(ot);
4360 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4361 gen_op_ld_T0_A0(ot + s->mem_index);
4362 } else {
4363 gen_op_mov_TN_reg(ot, 0, rm);
4366 switch(op) {
4367 case 0: /* test */
4368 val = insn_get(s, ot);
4369 gen_op_movl_T1_im(val);
4370 gen_op_testl_T0_T1_cc();
4371 s->cc_op = CC_OP_LOGICB + ot;
4372 break;
4373 case 2: /* not */
4374 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4375 if (mod != 3) {
4376 gen_op_st_T0_A0(ot + s->mem_index);
4377 } else {
4378 gen_op_mov_reg_T0(ot, rm);
4380 break;
4381 case 3: /* neg */
4382 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4383 if (mod != 3) {
4384 gen_op_st_T0_A0(ot + s->mem_index);
4385 } else {
4386 gen_op_mov_reg_T0(ot, rm);
4388 gen_op_update_neg_cc();
4389 s->cc_op = CC_OP_SUBB + ot;
4390 break;
4391 case 4: /* mul */
4392 switch(ot) {
4393 case OT_BYTE:
4394 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4395 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4396 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4397 /* XXX: use 32 bit mul which could be faster */
4398 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4399 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4400 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4401 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4402 s->cc_op = CC_OP_MULB;
4403 break;
4404 case OT_WORD:
4405 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4406 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4407 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4408 /* XXX: use 32 bit mul which could be faster */
4409 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4410 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4411 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4412 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4413 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4414 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4415 s->cc_op = CC_OP_MULW;
4416 break;
4417 default:
4418 case OT_LONG:
4419 #ifdef TARGET_X86_64
4420 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4421 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4422 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4423 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4424 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4425 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4426 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4427 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4428 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4429 #else
4431 TCGv_i64 t0, t1;
4432 t0 = tcg_temp_new_i64();
4433 t1 = tcg_temp_new_i64();
4434 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4435 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4436 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4437 tcg_gen_mul_i64(t0, t0, t1);
4438 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4439 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4440 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4441 tcg_gen_shri_i64(t0, t0, 32);
4442 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4443 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4444 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4446 #endif
4447 s->cc_op = CC_OP_MULL;
4448 break;
4449 #ifdef TARGET_X86_64
4450 case OT_QUAD:
4451 gen_helper_mulq_EAX_T0(cpu_T[0]);
4452 s->cc_op = CC_OP_MULQ;
4453 break;
4454 #endif
4456 break;
4457 case 5: /* imul */
4458 switch(ot) {
4459 case OT_BYTE:
4460 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4461 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4462 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4463 /* XXX: use 32 bit mul which could be faster */
4464 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4465 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4466 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4467 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4468 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4469 s->cc_op = CC_OP_MULB;
4470 break;
4471 case OT_WORD:
4472 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4473 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4474 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4475 /* XXX: use 32 bit mul which could be faster */
4476 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4477 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4478 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4479 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4480 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4481 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4482 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4483 s->cc_op = CC_OP_MULW;
4484 break;
4485 default:
4486 case OT_LONG:
4487 #ifdef TARGET_X86_64
4488 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4489 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4490 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4491 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4492 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4493 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4494 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4495 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4496 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4497 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4498 #else
4500 TCGv_i64 t0, t1;
4501 t0 = tcg_temp_new_i64();
4502 t1 = tcg_temp_new_i64();
4503 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4504 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4505 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4506 tcg_gen_mul_i64(t0, t0, t1);
4507 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4508 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4509 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4510 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4511 tcg_gen_shri_i64(t0, t0, 32);
4512 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4513 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4514 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4516 #endif
4517 s->cc_op = CC_OP_MULL;
4518 break;
4519 #ifdef TARGET_X86_64
4520 case OT_QUAD:
4521 gen_helper_imulq_EAX_T0(cpu_T[0]);
4522 s->cc_op = CC_OP_MULQ;
4523 break;
4524 #endif
4526 break;
4527 case 6: /* div */
4528 switch(ot) {
4529 case OT_BYTE:
4530 gen_jmp_im(pc_start - s->cs_base);
4531 gen_helper_divb_AL(cpu_T[0]);
4532 break;
4533 case OT_WORD:
4534 gen_jmp_im(pc_start - s->cs_base);
4535 gen_helper_divw_AX(cpu_T[0]);
4536 break;
4537 default:
4538 case OT_LONG:
4539 gen_jmp_im(pc_start - s->cs_base);
4540 gen_helper_divl_EAX(cpu_T[0]);
4541 break;
4542 #ifdef TARGET_X86_64
4543 case OT_QUAD:
4544 gen_jmp_im(pc_start - s->cs_base);
4545 gen_helper_divq_EAX(cpu_T[0]);
4546 break;
4547 #endif
4549 break;
4550 case 7: /* idiv */
4551 switch(ot) {
4552 case OT_BYTE:
4553 gen_jmp_im(pc_start - s->cs_base);
4554 gen_helper_idivb_AL(cpu_T[0]);
4555 break;
4556 case OT_WORD:
4557 gen_jmp_im(pc_start - s->cs_base);
4558 gen_helper_idivw_AX(cpu_T[0]);
4559 break;
4560 default:
4561 case OT_LONG:
4562 gen_jmp_im(pc_start - s->cs_base);
4563 gen_helper_idivl_EAX(cpu_T[0]);
4564 break;
4565 #ifdef TARGET_X86_64
4566 case OT_QUAD:
4567 gen_jmp_im(pc_start - s->cs_base);
4568 gen_helper_idivq_EAX(cpu_T[0]);
4569 break;
4570 #endif
4572 break;
4573 default:
4574 goto illegal_op;
4576 break;
4578 case 0xfe: /* GRP4 */
4579 case 0xff: /* GRP5 */
4580 if ((b & 1) == 0)
4581 ot = OT_BYTE;
4582 else
4583 ot = dflag + OT_WORD;
4585 modrm = ldub_code(s->pc++);
4586 mod = (modrm >> 6) & 3;
4587 rm = (modrm & 7) | REX_B(s);
4588 op = (modrm >> 3) & 7;
4589 if (op >= 2 && b == 0xfe) {
4590 goto illegal_op;
4592 if (CODE64(s)) {
4593 if (op == 2 || op == 4) {
4594 /* operand size for jumps is 64 bit */
4595 ot = OT_QUAD;
4596 } else if (op == 3 || op == 5) {
4597 ot = dflag ? OT_LONG + (rex_w == 1) : OT_WORD;
4598 } else if (op == 6) {
4599 /* default push size is 64 bit */
4600 ot = dflag ? OT_QUAD : OT_WORD;
4603 if (mod != 3) {
4604 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4605 if (op >= 2 && op != 3 && op != 5)
4606 gen_op_ld_T0_A0(ot + s->mem_index);
4607 } else {
4608 gen_op_mov_TN_reg(ot, 0, rm);
4611 switch(op) {
4612 case 0: /* inc Ev */
4613 if (mod != 3)
4614 opreg = OR_TMP0;
4615 else
4616 opreg = rm;
4617 gen_inc(s, ot, opreg, 1);
4618 break;
4619 case 1: /* dec Ev */
4620 if (mod != 3)
4621 opreg = OR_TMP0;
4622 else
4623 opreg = rm;
4624 gen_inc(s, ot, opreg, -1);
4625 break;
4626 case 2: /* call Ev */
4627 /* XXX: optimize if memory (no 'and' is necessary) */
4628 if (s->dflag == 0)
4629 gen_op_andl_T0_ffff();
4630 next_eip = s->pc - s->cs_base;
4631 gen_movtl_T1_im(next_eip);
4632 gen_push_T1(s);
4633 gen_op_jmp_T0();
4634 gen_eob(s);
4635 break;
4636 case 3: /* lcall Ev */
4637 gen_op_ld_T1_A0(ot + s->mem_index);
4638 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4639 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4640 do_lcall:
4641 if (s->pe && !s->vm86) {
4642 if (s->cc_op != CC_OP_DYNAMIC)
4643 gen_op_set_cc_op(s->cc_op);
4644 gen_jmp_im(pc_start - s->cs_base);
4645 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4646 gen_helper_lcall_protected(cpu_tmp2_i32, cpu_T[1],
4647 tcg_const_i32(dflag),
4648 tcg_const_i32(s->pc - pc_start));
4649 } else {
4650 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4651 gen_helper_lcall_real(cpu_tmp2_i32, cpu_T[1],
4652 tcg_const_i32(dflag),
4653 tcg_const_i32(s->pc - s->cs_base));
4655 gen_eob(s);
4656 break;
4657 case 4: /* jmp Ev */
4658 if (s->dflag == 0)
4659 gen_op_andl_T0_ffff();
4660 gen_op_jmp_T0();
4661 gen_eob(s);
4662 break;
4663 case 5: /* ljmp Ev */
4664 gen_op_ld_T1_A0(ot + s->mem_index);
4665 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4666 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4667 do_ljmp:
4668 if (s->pe && !s->vm86) {
4669 if (s->cc_op != CC_OP_DYNAMIC)
4670 gen_op_set_cc_op(s->cc_op);
4671 gen_jmp_im(pc_start - s->cs_base);
4672 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4673 gen_helper_ljmp_protected(cpu_tmp2_i32, cpu_T[1],
4674 tcg_const_i32(s->pc - pc_start));
4675 } else {
4676 gen_op_movl_seg_T0_vm(R_CS);
4677 gen_op_movl_T0_T1();
4678 gen_op_jmp_T0();
4680 gen_eob(s);
4681 break;
4682 case 6: /* push Ev */
4683 gen_push_T0(s);
4684 break;
4685 default:
4686 goto illegal_op;
4688 break;
4690 case 0x84: /* test Ev, Gv */
4691 case 0x85:
4692 if ((b & 1) == 0)
4693 ot = OT_BYTE;
4694 else
4695 ot = dflag + OT_WORD;
4697 modrm = ldub_code(s->pc++);
4698 reg = ((modrm >> 3) & 7) | rex_r;
4700 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4701 gen_op_mov_TN_reg(ot, 1, reg);
4702 gen_op_testl_T0_T1_cc();
4703 s->cc_op = CC_OP_LOGICB + ot;
4704 break;
4706 case 0xa8: /* test eAX, Iv */
4707 case 0xa9:
4708 if ((b & 1) == 0)
4709 ot = OT_BYTE;
4710 else
4711 ot = dflag + OT_WORD;
4712 val = insn_get(s, ot);
4714 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4715 gen_op_movl_T1_im(val);
4716 gen_op_testl_T0_T1_cc();
4717 s->cc_op = CC_OP_LOGICB + ot;
4718 break;
4720 case 0x98: /* CWDE/CBW */
4721 #ifdef TARGET_X86_64
4722 if (dflag == 2) {
4723 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4724 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4725 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4726 } else
4727 #endif
4728 if (dflag == 1) {
4729 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4730 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4731 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4732 } else {
4733 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4734 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4735 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4737 break;
4738 case 0x99: /* CDQ/CWD */
4739 #ifdef TARGET_X86_64
4740 if (dflag == 2) {
4741 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4742 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4743 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4744 } else
4745 #endif
4746 if (dflag == 1) {
4747 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4748 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4749 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4750 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4751 } else {
4752 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4753 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4754 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4755 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4757 break;
4758 case 0x1af: /* imul Gv, Ev */
4759 case 0x69: /* imul Gv, Ev, I */
4760 case 0x6b:
4761 ot = dflag + OT_WORD;
4762 modrm = ldub_code(s->pc++);
4763 reg = ((modrm >> 3) & 7) | rex_r;
4764 if (b == 0x69)
4765 s->rip_offset = insn_const_size(ot);
4766 else if (b == 0x6b)
4767 s->rip_offset = 1;
4768 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4769 if (b == 0x69) {
4770 val = insn_get(s, ot);
4771 gen_op_movl_T1_im(val);
4772 } else if (b == 0x6b) {
4773 val = (int8_t)insn_get(s, OT_BYTE);
4774 gen_op_movl_T1_im(val);
4775 } else {
4776 gen_op_mov_TN_reg(ot, 1, reg);
4779 #ifdef TARGET_X86_64
4780 if (ot == OT_QUAD) {
4781 gen_helper_imulq_T0_T1(cpu_T[0], cpu_T[0], cpu_T[1]);
4782 } else
4783 #endif
4784 if (ot == OT_LONG) {
4785 #ifdef TARGET_X86_64
4786 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4787 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4788 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4789 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4790 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4791 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4792 #else
4794 TCGv_i64 t0, t1;
4795 t0 = tcg_temp_new_i64();
4796 t1 = tcg_temp_new_i64();
4797 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4798 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4799 tcg_gen_mul_i64(t0, t0, t1);
4800 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4801 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4802 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4803 tcg_gen_shri_i64(t0, t0, 32);
4804 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4805 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4807 #endif
4808 } else {
4809 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4810 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4811 /* XXX: use 32 bit mul which could be faster */
4812 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4813 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4814 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4815 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4817 gen_op_mov_reg_T0(ot, reg);
4818 s->cc_op = CC_OP_MULB + ot;
4819 break;
4820 case 0x1c0:
4821 case 0x1c1: /* xadd Ev, Gv */
4822 if ((b & 1) == 0)
4823 ot = OT_BYTE;
4824 else
4825 ot = dflag + OT_WORD;
4826 modrm = ldub_code(s->pc++);
4827 reg = ((modrm >> 3) & 7) | rex_r;
4828 mod = (modrm >> 6) & 3;
4829 if (mod == 3) {
4830 rm = (modrm & 7) | REX_B(s);
4831 gen_op_mov_TN_reg(ot, 0, reg);
4832 gen_op_mov_TN_reg(ot, 1, rm);
4833 gen_op_addl_T0_T1();
4834 gen_op_mov_reg_T1(ot, reg);
4835 gen_op_mov_reg_T0(ot, rm);
4836 } else {
4837 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4838 gen_op_mov_TN_reg(ot, 0, reg);
4839 gen_op_ld_T1_A0(ot + s->mem_index);
4840 gen_op_addl_T0_T1();
4841 gen_op_st_T0_A0(ot + s->mem_index);
4842 gen_op_mov_reg_T1(ot, reg);
4844 gen_op_update2_cc();
4845 s->cc_op = CC_OP_ADDB + ot;
4846 break;
4847 case 0x1b0:
4848 case 0x1b1: /* cmpxchg Ev, Gv */
4850 int label1, label2;
4851 TCGv t0, t1, t2, a0;
4853 if ((b & 1) == 0)
4854 ot = OT_BYTE;
4855 else
4856 ot = dflag + OT_WORD;
4857 modrm = ldub_code(s->pc++);
4858 reg = ((modrm >> 3) & 7) | rex_r;
4859 mod = (modrm >> 6) & 3;
4860 t0 = tcg_temp_local_new();
4861 t1 = tcg_temp_local_new();
4862 t2 = tcg_temp_local_new();
4863 a0 = tcg_temp_local_new();
4864 gen_op_mov_v_reg(ot, t1, reg);
4865 if (mod == 3) {
4866 rm = (modrm & 7) | REX_B(s);
4867 gen_op_mov_v_reg(ot, t0, rm);
4868 } else {
4869 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4870 tcg_gen_mov_tl(a0, cpu_A0);
4871 gen_op_ld_v(ot + s->mem_index, t0, a0);
4872 rm = 0; /* avoid warning */
4874 label1 = gen_new_label();
4875 tcg_gen_sub_tl(t2, cpu_regs[R_EAX], t0);
4876 gen_extu(ot, t2);
4877 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4878 if (mod == 3) {
4879 label2 = gen_new_label();
4880 gen_op_mov_reg_v(ot, R_EAX, t0);
4881 tcg_gen_br(label2);
4882 gen_set_label(label1);
4883 gen_op_mov_reg_v(ot, rm, t1);
4884 gen_set_label(label2);
4885 } else {
4886 tcg_gen_mov_tl(t1, t0);
4887 gen_op_mov_reg_v(ot, R_EAX, t0);
4888 gen_set_label(label1);
4889 /* always store */
4890 gen_op_st_v(ot + s->mem_index, t1, a0);
4892 tcg_gen_mov_tl(cpu_cc_src, t0);
4893 tcg_gen_mov_tl(cpu_cc_dst, t2);
4894 s->cc_op = CC_OP_SUBB + ot;
4895 tcg_temp_free(t0);
4896 tcg_temp_free(t1);
4897 tcg_temp_free(t2);
4898 tcg_temp_free(a0);
4900 break;
4901 case 0x1c7: /* cmpxchg8b */
4902 modrm = ldub_code(s->pc++);
4903 mod = (modrm >> 6) & 3;
4904 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4905 goto illegal_op;
4906 #ifdef TARGET_X86_64
4907 if (dflag == 2) {
4908 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4909 goto illegal_op;
4910 gen_jmp_im(pc_start - s->cs_base);
4911 if (s->cc_op != CC_OP_DYNAMIC)
4912 gen_op_set_cc_op(s->cc_op);
4913 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4914 gen_helper_cmpxchg16b(cpu_A0);
4915 } else
4916 #endif
4918 if (!(s->cpuid_features & CPUID_CX8))
4919 goto illegal_op;
4920 gen_jmp_im(pc_start - s->cs_base);
4921 if (s->cc_op != CC_OP_DYNAMIC)
4922 gen_op_set_cc_op(s->cc_op);
4923 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4924 gen_helper_cmpxchg8b(cpu_A0);
4926 s->cc_op = CC_OP_EFLAGS;
4927 break;
4929 /**************************/
4930 /* push/pop */
4931 case 0x50 ... 0x57: /* push */
4932 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4933 gen_push_T0(s);
4934 break;
4935 case 0x58 ... 0x5f: /* pop */
4936 if (CODE64(s)) {
4937 ot = dflag ? OT_QUAD : OT_WORD;
4938 } else {
4939 ot = dflag + OT_WORD;
4941 gen_pop_T0(s);
4942 /* NOTE: order is important for pop %sp */
4943 gen_pop_update(s);
4944 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4945 break;
4946 case 0x60: /* pusha */
4947 if (CODE64(s))
4948 goto illegal_op;
4949 gen_pusha(s);
4950 break;
4951 case 0x61: /* popa */
4952 if (CODE64(s))
4953 goto illegal_op;
4954 gen_popa(s);
4955 break;
4956 case 0x68: /* push Iv */
4957 case 0x6a:
4958 if (CODE64(s)) {
4959 ot = dflag ? OT_QUAD : OT_WORD;
4960 } else {
4961 ot = dflag + OT_WORD;
4963 if (b == 0x68)
4964 val = insn_get(s, ot);
4965 else
4966 val = (int8_t)insn_get(s, OT_BYTE);
4967 gen_op_movl_T0_im(val);
4968 gen_push_T0(s);
4969 break;
4970 case 0x8f: /* pop Ev */
4971 if (CODE64(s)) {
4972 ot = dflag ? OT_QUAD : OT_WORD;
4973 } else {
4974 ot = dflag + OT_WORD;
4976 modrm = ldub_code(s->pc++);
4977 mod = (modrm >> 6) & 3;
4978 gen_pop_T0(s);
4979 if (mod == 3) {
4980 /* NOTE: order is important for pop %sp */
4981 gen_pop_update(s);
4982 rm = (modrm & 7) | REX_B(s);
4983 gen_op_mov_reg_T0(ot, rm);
4984 } else {
4985 /* NOTE: order is important too for MMU exceptions */
4986 s->popl_esp_hack = 1 << ot;
4987 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4988 s->popl_esp_hack = 0;
4989 gen_pop_update(s);
4991 break;
4992 case 0xc8: /* enter */
4994 int level;
4995 val = lduw_code(s->pc);
4996 s->pc += 2;
4997 level = ldub_code(s->pc++);
4998 gen_enter(s, val, level);
5000 break;
5001 case 0xc9: /* leave */
5002 /* XXX: exception not precise (ESP is updated before potential exception) */
5003 if (CODE64(s)) {
5004 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5005 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5006 } else if (s->ss32) {
5007 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5008 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5009 } else {
5010 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5011 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5013 gen_pop_T0(s);
5014 if (CODE64(s)) {
5015 ot = dflag ? OT_QUAD : OT_WORD;
5016 } else {
5017 ot = dflag + OT_WORD;
5019 gen_op_mov_reg_T0(ot, R_EBP);
5020 gen_pop_update(s);
5021 break;
5022 case 0x06: /* push es */
5023 case 0x0e: /* push cs */
5024 case 0x16: /* push ss */
5025 case 0x1e: /* push ds */
5026 if (CODE64(s))
5027 goto illegal_op;
5028 gen_op_movl_T0_seg(b >> 3);
5029 gen_push_T0(s);
5030 break;
5031 case 0x1a0: /* push fs */
5032 case 0x1a8: /* push gs */
5033 gen_op_movl_T0_seg((b >> 3) & 7);
5034 gen_push_T0(s);
5035 break;
5036 case 0x07: /* pop es */
5037 case 0x17: /* pop ss */
5038 case 0x1f: /* pop ds */
5039 if (CODE64(s))
5040 goto illegal_op;
5041 reg = b >> 3;
5042 gen_pop_T0(s);
5043 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5044 gen_pop_update(s);
5045 if (reg == R_SS) {
5046 /* if reg == SS, inhibit interrupts/trace. */
5047 /* If several instructions disable interrupts, only the
5048 _first_ does it */
5049 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5050 gen_helper_set_inhibit_irq();
5051 s->tf = 0;
5053 if (s->is_jmp) {
5054 gen_jmp_im(s->pc - s->cs_base);
5055 gen_eob(s);
5057 break;
5058 case 0x1a1: /* pop fs */
5059 case 0x1a9: /* pop gs */
5060 gen_pop_T0(s);
5061 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5062 gen_pop_update(s);
5063 if (s->is_jmp) {
5064 gen_jmp_im(s->pc - s->cs_base);
5065 gen_eob(s);
5067 break;
5069 /**************************/
5070 /* mov */
5071 case 0x88:
5072 case 0x89: /* mov Gv, Ev */
5073 if ((b & 1) == 0)
5074 ot = OT_BYTE;
5075 else
5076 ot = dflag + OT_WORD;
5077 modrm = ldub_code(s->pc++);
5078 reg = ((modrm >> 3) & 7) | rex_r;
5080 /* generate a generic store */
5081 gen_ldst_modrm(s, modrm, ot, reg, 1);
5082 break;
5083 case 0xc6:
5084 case 0xc7: /* mov Ev, Iv */
5085 if ((b & 1) == 0)
5086 ot = OT_BYTE;
5087 else
5088 ot = dflag + OT_WORD;
5089 modrm = ldub_code(s->pc++);
5090 mod = (modrm >> 6) & 3;
5091 if (mod != 3) {
5092 s->rip_offset = insn_const_size(ot);
5093 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5095 val = insn_get(s, ot);
5096 gen_op_movl_T0_im(val);
5097 if (mod != 3)
5098 gen_op_st_T0_A0(ot + s->mem_index);
5099 else
5100 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5101 break;
5102 case 0x8a:
5103 case 0x8b: /* mov Ev, Gv */
5104 if ((b & 1) == 0)
5105 ot = OT_BYTE;
5106 else
5107 ot = OT_WORD + dflag;
5108 modrm = ldub_code(s->pc++);
5109 reg = ((modrm >> 3) & 7) | rex_r;
5111 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5112 gen_op_mov_reg_T0(ot, reg);
5113 break;
5114 case 0x8e: /* mov seg, Gv */
5115 modrm = ldub_code(s->pc++);
5116 reg = (modrm >> 3) & 7;
5117 if (reg >= 6 || reg == R_CS)
5118 goto illegal_op;
5119 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5120 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5121 if (reg == R_SS) {
5122 /* if reg == SS, inhibit interrupts/trace */
5123 /* If several instructions disable interrupts, only the
5124 _first_ does it */
5125 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5126 gen_helper_set_inhibit_irq();
5127 s->tf = 0;
5129 if (s->is_jmp) {
5130 gen_jmp_im(s->pc - s->cs_base);
5131 gen_eob(s);
5133 break;
5134 case 0x8c: /* mov Gv, seg */
5135 modrm = ldub_code(s->pc++);
5136 reg = (modrm >> 3) & 7;
5137 mod = (modrm >> 6) & 3;
5138 if (reg >= 6)
5139 goto illegal_op;
5140 gen_op_movl_T0_seg(reg);
5141 if (mod == 3)
5142 ot = OT_WORD + dflag;
5143 else
5144 ot = OT_WORD;
5145 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5146 break;
5148 case 0x1b6: /* movzbS Gv, Eb */
5149 case 0x1b7: /* movzwS Gv, Eb */
5150 case 0x1be: /* movsbS Gv, Eb */
5151 case 0x1bf: /* movswS Gv, Eb */
5153 int d_ot;
5154 /* d_ot is the size of destination */
5155 d_ot = dflag + OT_WORD;
5156 /* ot is the size of source */
5157 ot = (b & 1) + OT_BYTE;
5158 modrm = ldub_code(s->pc++);
5159 reg = ((modrm >> 3) & 7) | rex_r;
5160 mod = (modrm >> 6) & 3;
5161 rm = (modrm & 7) | REX_B(s);
5163 if (mod == 3) {
5164 gen_op_mov_TN_reg(ot, 0, rm);
5165 switch(ot | (b & 8)) {
5166 case OT_BYTE:
5167 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5168 break;
5169 case OT_BYTE | 8:
5170 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5171 break;
5172 case OT_WORD:
5173 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5174 break;
5175 default:
5176 case OT_WORD | 8:
5177 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5178 break;
5180 gen_op_mov_reg_T0(d_ot, reg);
5181 } else {
5182 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5183 if (b & 8) {
5184 gen_op_lds_T0_A0(ot + s->mem_index);
5185 } else {
5186 gen_op_ldu_T0_A0(ot + s->mem_index);
5188 gen_op_mov_reg_T0(d_ot, reg);
5191 break;
5193 case 0x8d: /* lea */
5194 ot = dflag + OT_WORD;
5195 modrm = ldub_code(s->pc++);
5196 mod = (modrm >> 6) & 3;
5197 if (mod == 3)
5198 goto illegal_op;
5199 reg = ((modrm >> 3) & 7) | rex_r;
5200 /* we must ensure that no segment is added */
5201 s->override = -1;
5202 val = s->addseg;
5203 s->addseg = 0;
5204 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5205 s->addseg = val;
5206 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5207 break;
5209 case 0xa0: /* mov EAX, Ov */
5210 case 0xa1:
5211 case 0xa2: /* mov Ov, EAX */
5212 case 0xa3:
5214 target_ulong offset_addr;
5216 if ((b & 1) == 0)
5217 ot = OT_BYTE;
5218 else
5219 ot = dflag + OT_WORD;
5220 #ifdef TARGET_X86_64
5221 if (s->aflag == 2) {
5222 offset_addr = ldq_code(s->pc);
5223 s->pc += 8;
5224 gen_op_movq_A0_im(offset_addr);
5225 } else
5226 #endif
5228 if (s->aflag) {
5229 offset_addr = insn_get(s, OT_LONG);
5230 } else {
5231 offset_addr = insn_get(s, OT_WORD);
5233 gen_op_movl_A0_im(offset_addr);
5235 gen_add_A0_ds_seg(s);
5236 if ((b & 2) == 0) {
5237 gen_op_ld_T0_A0(ot + s->mem_index);
5238 gen_op_mov_reg_T0(ot, R_EAX);
5239 } else {
5240 gen_op_mov_TN_reg(ot, 0, R_EAX);
5241 gen_op_st_T0_A0(ot + s->mem_index);
5244 break;
5245 case 0xd7: /* xlat */
5246 #ifdef TARGET_X86_64
5247 if (s->aflag == 2) {
5248 gen_op_movq_A0_reg(R_EBX);
5249 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5250 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5251 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5252 } else
5253 #endif
5255 gen_op_movl_A0_reg(R_EBX);
5256 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5257 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5258 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5259 if (s->aflag == 0)
5260 gen_op_andl_A0_ffff();
5261 else
5262 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5264 gen_add_A0_ds_seg(s);
5265 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5266 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5267 break;
5268 case 0xb0 ... 0xb7: /* mov R, Ib */
5269 val = insn_get(s, OT_BYTE);
5270 gen_op_movl_T0_im(val);
5271 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5272 break;
5273 case 0xb8 ... 0xbf: /* mov R, Iv */
5274 #ifdef TARGET_X86_64
5275 if (dflag == 2) {
5276 uint64_t tmp;
5277 /* 64 bit case */
5278 tmp = ldq_code(s->pc);
5279 s->pc += 8;
5280 reg = (b & 7) | REX_B(s);
5281 gen_movtl_T0_im(tmp);
5282 gen_op_mov_reg_T0(OT_QUAD, reg);
5283 } else
5284 #endif
5286 ot = dflag ? OT_LONG : OT_WORD;
5287 val = insn_get(s, ot);
5288 reg = (b & 7) | REX_B(s);
5289 gen_op_movl_T0_im(val);
5290 gen_op_mov_reg_T0(ot, reg);
5292 break;
5294 case 0x91 ... 0x97: /* xchg R, EAX */
5295 ot = dflag + OT_WORD;
5296 reg = (b & 7) | REX_B(s);
5297 rm = R_EAX;
5298 goto do_xchg_reg;
5299 case 0x86:
5300 case 0x87: /* xchg Ev, Gv */
5301 if ((b & 1) == 0)
5302 ot = OT_BYTE;
5303 else
5304 ot = dflag + OT_WORD;
5305 modrm = ldub_code(s->pc++);
5306 reg = ((modrm >> 3) & 7) | rex_r;
5307 mod = (modrm >> 6) & 3;
5308 if (mod == 3) {
5309 rm = (modrm & 7) | REX_B(s);
5310 do_xchg_reg:
5311 gen_op_mov_TN_reg(ot, 0, reg);
5312 gen_op_mov_TN_reg(ot, 1, rm);
5313 gen_op_mov_reg_T0(ot, rm);
5314 gen_op_mov_reg_T1(ot, reg);
5315 } else {
5316 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5317 gen_op_mov_TN_reg(ot, 0, reg);
5318 /* for xchg, lock is implicit */
5319 if (!(prefixes & PREFIX_LOCK))
5320 gen_helper_lock();
5321 gen_op_ld_T1_A0(ot + s->mem_index);
5322 gen_op_st_T0_A0(ot + s->mem_index);
5323 if (!(prefixes & PREFIX_LOCK))
5324 gen_helper_unlock();
5325 gen_op_mov_reg_T1(ot, reg);
5327 break;
5328 case 0xc4: /* les Gv */
5329 if (CODE64(s))
5330 goto illegal_op;
5331 op = R_ES;
5332 goto do_lxx;
5333 case 0xc5: /* lds Gv */
5334 if (CODE64(s))
5335 goto illegal_op;
5336 op = R_DS;
5337 goto do_lxx;
5338 case 0x1b2: /* lss Gv */
5339 op = R_SS;
5340 goto do_lxx;
5341 case 0x1b4: /* lfs Gv */
5342 op = R_FS;
5343 goto do_lxx;
5344 case 0x1b5: /* lgs Gv */
5345 op = R_GS;
5346 do_lxx:
5347 ot = dflag ? OT_LONG : OT_WORD;
5348 modrm = ldub_code(s->pc++);
5349 reg = ((modrm >> 3) & 7) | rex_r;
5350 mod = (modrm >> 6) & 3;
5351 if (mod == 3)
5352 goto illegal_op;
5353 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5354 gen_op_ld_T1_A0(ot + s->mem_index);
5355 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5356 /* load the segment first to handle exceptions properly */
5357 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5358 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5359 /* then put the data */
5360 gen_op_mov_reg_T1(ot, reg);
5361 if (s->is_jmp) {
5362 gen_jmp_im(s->pc - s->cs_base);
5363 gen_eob(s);
5365 break;
5367 /************************/
5368 /* shifts */
5369 case 0xc0:
5370 case 0xc1:
5371 /* shift Ev,Ib */
5372 shift = 2;
5373 grp2:
5375 if ((b & 1) == 0)
5376 ot = OT_BYTE;
5377 else
5378 ot = dflag + OT_WORD;
5380 modrm = ldub_code(s->pc++);
5381 mod = (modrm >> 6) & 3;
5382 op = (modrm >> 3) & 7;
5384 if (mod != 3) {
5385 if (shift == 2) {
5386 s->rip_offset = 1;
5388 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5389 opreg = OR_TMP0;
5390 } else {
5391 opreg = (modrm & 7) | REX_B(s);
5394 /* simpler op */
5395 if (shift == 0) {
5396 gen_shift(s, op, ot, opreg, OR_ECX);
5397 } else {
5398 if (shift == 2) {
5399 shift = ldub_code(s->pc++);
5401 gen_shifti(s, op, ot, opreg, shift);
5404 break;
5405 case 0xd0:
5406 case 0xd1:
5407 /* shift Ev,1 */
5408 shift = 1;
5409 goto grp2;
5410 case 0xd2:
5411 case 0xd3:
5412 /* shift Ev,cl */
5413 shift = 0;
5414 goto grp2;
5416 case 0x1a4: /* shld imm */
5417 op = 0;
5418 shift = 1;
5419 goto do_shiftd;
5420 case 0x1a5: /* shld cl */
5421 op = 0;
5422 shift = 0;
5423 goto do_shiftd;
5424 case 0x1ac: /* shrd imm */
5425 op = 1;
5426 shift = 1;
5427 goto do_shiftd;
5428 case 0x1ad: /* shrd cl */
5429 op = 1;
5430 shift = 0;
5431 do_shiftd:
5432 ot = dflag + OT_WORD;
5433 modrm = ldub_code(s->pc++);
5434 mod = (modrm >> 6) & 3;
5435 rm = (modrm & 7) | REX_B(s);
5436 reg = ((modrm >> 3) & 7) | rex_r;
5437 if (mod != 3) {
5438 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5439 opreg = OR_TMP0;
5440 } else {
5441 opreg = rm;
5443 gen_op_mov_TN_reg(ot, 1, reg);
5445 if (shift) {
5446 val = ldub_code(s->pc++);
5447 tcg_gen_movi_tl(cpu_T3, val);
5448 } else {
5449 tcg_gen_mov_tl(cpu_T3, cpu_regs[R_ECX]);
5451 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5452 break;
5454 /************************/
5455 /* floats */
5456 case 0xd8 ... 0xdf:
5457 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5458 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5459 /* XXX: what to do if illegal op ? */
5460 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5461 break;
5463 modrm = ldub_code(s->pc++);
5464 mod = (modrm >> 6) & 3;
5465 rm = modrm & 7;
5466 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5467 if (mod != 3) {
5468 /* memory op */
5469 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5470 switch(op) {
5471 case 0x00 ... 0x07: /* fxxxs */
5472 case 0x10 ... 0x17: /* fixxxl */
5473 case 0x20 ... 0x27: /* fxxxl */
5474 case 0x30 ... 0x37: /* fixxx */
5476 int op1;
5477 op1 = op & 7;
5479 switch(op >> 4) {
5480 case 0:
5481 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5482 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5483 gen_helper_flds_FT0(cpu_tmp2_i32);
5484 break;
5485 case 1:
5486 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5487 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5488 gen_helper_fildl_FT0(cpu_tmp2_i32);
5489 break;
5490 case 2:
5491 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5492 (s->mem_index >> 2) - 1);
5493 gen_helper_fldl_FT0(cpu_tmp1_i64);
5494 break;
5495 case 3:
5496 default:
5497 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5498 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5499 gen_helper_fildl_FT0(cpu_tmp2_i32);
5500 break;
5503 gen_helper_fp_arith_ST0_FT0(op1);
5504 if (op1 == 3) {
5505 /* fcomp needs pop */
5506 gen_helper_fpop();
5509 break;
5510 case 0x08: /* flds */
5511 case 0x0a: /* fsts */
5512 case 0x0b: /* fstps */
5513 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5514 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5515 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5516 switch(op & 7) {
5517 case 0:
5518 switch(op >> 4) {
5519 case 0:
5520 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5521 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5522 gen_helper_flds_ST0(cpu_tmp2_i32);
5523 break;
5524 case 1:
5525 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5526 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5527 gen_helper_fildl_ST0(cpu_tmp2_i32);
5528 break;
5529 case 2:
5530 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5531 (s->mem_index >> 2) - 1);
5532 gen_helper_fldl_ST0(cpu_tmp1_i64);
5533 break;
5534 case 3:
5535 default:
5536 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5537 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5538 gen_helper_fildl_ST0(cpu_tmp2_i32);
5539 break;
5541 break;
5542 case 1:
5543 /* XXX: the corresponding CPUID bit must be tested ! */
5544 switch(op >> 4) {
5545 case 1:
5546 gen_helper_fisttl_ST0(cpu_tmp2_i32);
5547 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5548 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5549 break;
5550 case 2:
5551 gen_helper_fisttll_ST0(cpu_tmp1_i64);
5552 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5553 (s->mem_index >> 2) - 1);
5554 break;
5555 case 3:
5556 default:
5557 gen_helper_fistt_ST0(cpu_tmp2_i32);
5558 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5559 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5560 break;
5562 gen_helper_fpop();
5563 break;
5564 default:
5565 switch(op >> 4) {
5566 case 0:
5567 gen_helper_fsts_ST0(cpu_tmp2_i32);
5568 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5569 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5570 break;
5571 case 1:
5572 gen_helper_fistl_ST0(cpu_tmp2_i32);
5573 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5574 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5575 break;
5576 case 2:
5577 gen_helper_fstl_ST0(cpu_tmp1_i64);
5578 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5579 (s->mem_index >> 2) - 1);
5580 break;
5581 case 3:
5582 default:
5583 gen_helper_fist_ST0(cpu_tmp2_i32);
5584 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5585 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5586 break;
5588 if ((op & 7) == 3)
5589 gen_helper_fpop();
5590 break;
5592 break;
5593 case 0x0c: /* fldenv mem */
5594 if (s->cc_op != CC_OP_DYNAMIC)
5595 gen_op_set_cc_op(s->cc_op);
5596 gen_jmp_im(pc_start - s->cs_base);
5597 gen_helper_fldenv(
5598 cpu_A0, tcg_const_i32(s->dflag));
5599 break;
5600 case 0x0d: /* fldcw mem */
5601 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5602 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5603 gen_helper_fldcw(cpu_tmp2_i32);
5604 break;
5605 case 0x0e: /* fnstenv mem */
5606 if (s->cc_op != CC_OP_DYNAMIC)
5607 gen_op_set_cc_op(s->cc_op);
5608 gen_jmp_im(pc_start - s->cs_base);
5609 gen_helper_fstenv(cpu_A0, tcg_const_i32(s->dflag));
5610 break;
5611 case 0x0f: /* fnstcw mem */
5612 gen_helper_fnstcw(cpu_tmp2_i32);
5613 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5614 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5615 break;
5616 case 0x1d: /* fldt mem */
5617 if (s->cc_op != CC_OP_DYNAMIC)
5618 gen_op_set_cc_op(s->cc_op);
5619 gen_jmp_im(pc_start - s->cs_base);
5620 gen_helper_fldt_ST0(cpu_A0);
5621 break;
5622 case 0x1f: /* fstpt mem */
5623 if (s->cc_op != CC_OP_DYNAMIC)
5624 gen_op_set_cc_op(s->cc_op);
5625 gen_jmp_im(pc_start - s->cs_base);
5626 gen_helper_fstt_ST0(cpu_A0);
5627 gen_helper_fpop();
5628 break;
5629 case 0x2c: /* frstor mem */
5630 if (s->cc_op != CC_OP_DYNAMIC)
5631 gen_op_set_cc_op(s->cc_op);
5632 gen_jmp_im(pc_start - s->cs_base);
5633 gen_helper_frstor(cpu_A0, tcg_const_i32(s->dflag));
5634 break;
5635 case 0x2e: /* fnsave mem */
5636 if (s->cc_op != CC_OP_DYNAMIC)
5637 gen_op_set_cc_op(s->cc_op);
5638 gen_jmp_im(pc_start - s->cs_base);
5639 gen_helper_fsave(cpu_A0, tcg_const_i32(s->dflag));
5640 break;
5641 case 0x2f: /* fnstsw mem */
5642 gen_helper_fnstsw(cpu_tmp2_i32);
5643 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5644 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5645 break;
5646 case 0x3c: /* fbld */
5647 if (s->cc_op != CC_OP_DYNAMIC)
5648 gen_op_set_cc_op(s->cc_op);
5649 gen_jmp_im(pc_start - s->cs_base);
5650 gen_helper_fbld_ST0(cpu_A0);
5651 break;
5652 case 0x3e: /* fbstp */
5653 if (s->cc_op != CC_OP_DYNAMIC)
5654 gen_op_set_cc_op(s->cc_op);
5655 gen_jmp_im(pc_start - s->cs_base);
5656 gen_helper_fbst_ST0(cpu_A0);
5657 gen_helper_fpop();
5658 break;
5659 case 0x3d: /* fildll */
5660 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5661 (s->mem_index >> 2) - 1);
5662 gen_helper_fildll_ST0(cpu_tmp1_i64);
5663 break;
5664 case 0x3f: /* fistpll */
5665 gen_helper_fistll_ST0(cpu_tmp1_i64);
5666 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5667 (s->mem_index >> 2) - 1);
5668 gen_helper_fpop();
5669 break;
5670 default:
5671 goto illegal_op;
5673 } else {
5674 /* register float ops */
5675 opreg = rm;
5677 switch(op) {
5678 case 0x08: /* fld sti */
5679 gen_helper_fpush();
5680 gen_helper_fmov_ST0_STN(tcg_const_i32((opreg + 1) & 7));
5681 break;
5682 case 0x09: /* fxchg sti */
5683 case 0x29: /* fxchg4 sti, undocumented op */
5684 case 0x39: /* fxchg7 sti, undocumented op */
5685 gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg));
5686 break;
5687 case 0x0a: /* grp d9/2 */
5688 switch(rm) {
5689 case 0: /* fnop */
5690 /* check exceptions (FreeBSD FPU probe) */
5691 if (s->cc_op != CC_OP_DYNAMIC)
5692 gen_op_set_cc_op(s->cc_op);
5693 gen_jmp_im(pc_start - s->cs_base);
5694 gen_helper_fwait();
5695 break;
5696 default:
5697 goto illegal_op;
5699 break;
5700 case 0x0c: /* grp d9/4 */
5701 switch(rm) {
5702 case 0: /* fchs */
5703 gen_helper_fchs_ST0();
5704 break;
5705 case 1: /* fabs */
5706 gen_helper_fabs_ST0();
5707 break;
5708 case 4: /* ftst */
5709 gen_helper_fldz_FT0();
5710 gen_helper_fcom_ST0_FT0();
5711 break;
5712 case 5: /* fxam */
5713 gen_helper_fxam_ST0();
5714 break;
5715 default:
5716 goto illegal_op;
5718 break;
5719 case 0x0d: /* grp d9/5 */
5721 switch(rm) {
5722 case 0:
5723 gen_helper_fpush();
5724 gen_helper_fld1_ST0();
5725 break;
5726 case 1:
5727 gen_helper_fpush();
5728 gen_helper_fldl2t_ST0();
5729 break;
5730 case 2:
5731 gen_helper_fpush();
5732 gen_helper_fldl2e_ST0();
5733 break;
5734 case 3:
5735 gen_helper_fpush();
5736 gen_helper_fldpi_ST0();
5737 break;
5738 case 4:
5739 gen_helper_fpush();
5740 gen_helper_fldlg2_ST0();
5741 break;
5742 case 5:
5743 gen_helper_fpush();
5744 gen_helper_fldln2_ST0();
5745 break;
5746 case 6:
5747 gen_helper_fpush();
5748 gen_helper_fldz_ST0();
5749 break;
5750 default:
5751 goto illegal_op;
5754 break;
5755 case 0x0e: /* grp d9/6 */
5756 switch(rm) {
5757 case 0: /* f2xm1 */
5758 gen_helper_f2xm1();
5759 break;
5760 case 1: /* fyl2x */
5761 gen_helper_fyl2x();
5762 break;
5763 case 2: /* fptan */
5764 gen_helper_fptan();
5765 break;
5766 case 3: /* fpatan */
5767 gen_helper_fpatan();
5768 break;
5769 case 4: /* fxtract */
5770 gen_helper_fxtract();
5771 break;
5772 case 5: /* fprem1 */
5773 gen_helper_fprem1();
5774 break;
5775 case 6: /* fdecstp */
5776 gen_helper_fdecstp();
5777 break;
5778 default:
5779 case 7: /* fincstp */
5780 gen_helper_fincstp();
5781 break;
5783 break;
5784 case 0x0f: /* grp d9/7 */
5785 switch(rm) {
5786 case 0: /* fprem */
5787 gen_helper_fprem();
5788 break;
5789 case 1: /* fyl2xp1 */
5790 gen_helper_fyl2xp1();
5791 break;
5792 case 2: /* fsqrt */
5793 gen_helper_fsqrt();
5794 break;
5795 case 3: /* fsincos */
5796 gen_helper_fsincos();
5797 break;
5798 case 5: /* fscale */
5799 gen_helper_fscale();
5800 break;
5801 case 4: /* frndint */
5802 gen_helper_frndint();
5803 break;
5804 case 6: /* fsin */
5805 gen_helper_fsin();
5806 break;
5807 default:
5808 case 7: /* fcos */
5809 gen_helper_fcos();
5810 break;
5812 break;
5813 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5814 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5815 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5817 int op1;
5819 op1 = op & 7;
5820 if (op >= 0x20) {
5821 gen_helper_fp_arith_STN_ST0(op1, opreg);
5822 if (op >= 0x30)
5823 gen_helper_fpop();
5824 } else {
5825 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5826 gen_helper_fp_arith_ST0_FT0(op1);
5829 break;
5830 case 0x02: /* fcom */
5831 case 0x22: /* fcom2, undocumented op */
5832 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5833 gen_helper_fcom_ST0_FT0();
5834 break;
5835 case 0x03: /* fcomp */
5836 case 0x23: /* fcomp3, undocumented op */
5837 case 0x32: /* fcomp5, undocumented op */
5838 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5839 gen_helper_fcom_ST0_FT0();
5840 gen_helper_fpop();
5841 break;
5842 case 0x15: /* da/5 */
5843 switch(rm) {
5844 case 1: /* fucompp */
5845 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5846 gen_helper_fucom_ST0_FT0();
5847 gen_helper_fpop();
5848 gen_helper_fpop();
5849 break;
5850 default:
5851 goto illegal_op;
5853 break;
5854 case 0x1c:
5855 switch(rm) {
5856 case 0: /* feni (287 only, just do nop here) */
5857 break;
5858 case 1: /* fdisi (287 only, just do nop here) */
5859 break;
5860 case 2: /* fclex */
5861 gen_helper_fclex();
5862 break;
5863 case 3: /* fninit */
5864 gen_helper_fninit();
5865 break;
5866 case 4: /* fsetpm (287 only, just do nop here) */
5867 break;
5868 default:
5869 goto illegal_op;
5871 break;
5872 case 0x1d: /* fucomi */
5873 if (s->cc_op != CC_OP_DYNAMIC)
5874 gen_op_set_cc_op(s->cc_op);
5875 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5876 gen_helper_fucomi_ST0_FT0();
5877 s->cc_op = CC_OP_EFLAGS;
5878 break;
5879 case 0x1e: /* fcomi */
5880 if (s->cc_op != CC_OP_DYNAMIC)
5881 gen_op_set_cc_op(s->cc_op);
5882 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5883 gen_helper_fcomi_ST0_FT0();
5884 s->cc_op = CC_OP_EFLAGS;
5885 break;
5886 case 0x28: /* ffree sti */
5887 gen_helper_ffree_STN(tcg_const_i32(opreg));
5888 break;
5889 case 0x2a: /* fst sti */
5890 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5891 break;
5892 case 0x2b: /* fstp sti */
5893 case 0x0b: /* fstp1 sti, undocumented op */
5894 case 0x3a: /* fstp8 sti, undocumented op */
5895 case 0x3b: /* fstp9 sti, undocumented op */
5896 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5897 gen_helper_fpop();
5898 break;
5899 case 0x2c: /* fucom st(i) */
5900 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5901 gen_helper_fucom_ST0_FT0();
5902 break;
5903 case 0x2d: /* fucomp st(i) */
5904 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5905 gen_helper_fucom_ST0_FT0();
5906 gen_helper_fpop();
5907 break;
5908 case 0x33: /* de/3 */
5909 switch(rm) {
5910 case 1: /* fcompp */
5911 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5912 gen_helper_fcom_ST0_FT0();
5913 gen_helper_fpop();
5914 gen_helper_fpop();
5915 break;
5916 default:
5917 goto illegal_op;
5919 break;
5920 case 0x38: /* ffreep sti, undocumented op */
5921 gen_helper_ffree_STN(tcg_const_i32(opreg));
5922 gen_helper_fpop();
5923 break;
5924 case 0x3c: /* df/4 */
5925 switch(rm) {
5926 case 0:
5927 gen_helper_fnstsw(cpu_tmp2_i32);
5928 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5929 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5930 break;
5931 default:
5932 goto illegal_op;
5934 break;
5935 case 0x3d: /* fucomip */
5936 if (s->cc_op != CC_OP_DYNAMIC)
5937 gen_op_set_cc_op(s->cc_op);
5938 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5939 gen_helper_fucomi_ST0_FT0();
5940 gen_helper_fpop();
5941 s->cc_op = CC_OP_EFLAGS;
5942 break;
5943 case 0x3e: /* fcomip */
5944 if (s->cc_op != CC_OP_DYNAMIC)
5945 gen_op_set_cc_op(s->cc_op);
5946 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5947 gen_helper_fcomi_ST0_FT0();
5948 gen_helper_fpop();
5949 s->cc_op = CC_OP_EFLAGS;
5950 break;
5951 case 0x10 ... 0x13: /* fcmovxx */
5952 case 0x18 ... 0x1b:
5954 int op1, l1;
5955 static const uint8_t fcmov_cc[8] = {
5956 (JCC_B << 1),
5957 (JCC_Z << 1),
5958 (JCC_BE << 1),
5959 (JCC_P << 1),
5961 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5962 l1 = gen_new_label();
5963 gen_jcc1(s, s->cc_op, op1, l1);
5964 gen_helper_fmov_ST0_STN(tcg_const_i32(opreg));
5965 gen_set_label(l1);
5967 break;
5968 default:
5969 goto illegal_op;
5972 break;
5973 /************************/
5974 /* string ops */
5976 case 0xa4: /* movsS */
5977 case 0xa5:
5978 if ((b & 1) == 0)
5979 ot = OT_BYTE;
5980 else
5981 ot = dflag + OT_WORD;
5983 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5984 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5985 } else {
5986 gen_movs(s, ot);
5988 break;
5990 case 0xaa: /* stosS */
5991 case 0xab:
5992 if ((b & 1) == 0)
5993 ot = OT_BYTE;
5994 else
5995 ot = dflag + OT_WORD;
5997 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5998 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5999 } else {
6000 gen_stos(s, ot);
6002 break;
6003 case 0xac: /* lodsS */
6004 case 0xad:
6005 if ((b & 1) == 0)
6006 ot = OT_BYTE;
6007 else
6008 ot = dflag + OT_WORD;
6009 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6010 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6011 } else {
6012 gen_lods(s, ot);
6014 break;
6015 case 0xae: /* scasS */
6016 case 0xaf:
6017 if ((b & 1) == 0)
6018 ot = OT_BYTE;
6019 else
6020 ot = dflag + OT_WORD;
6021 if (prefixes & PREFIX_REPNZ) {
6022 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6023 } else if (prefixes & PREFIX_REPZ) {
6024 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6025 } else {
6026 gen_scas(s, ot);
6027 s->cc_op = CC_OP_SUBB + ot;
6029 break;
6031 case 0xa6: /* cmpsS */
6032 case 0xa7:
6033 if ((b & 1) == 0)
6034 ot = OT_BYTE;
6035 else
6036 ot = dflag + OT_WORD;
6037 if (prefixes & PREFIX_REPNZ) {
6038 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6039 } else if (prefixes & PREFIX_REPZ) {
6040 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6041 } else {
6042 gen_cmps(s, ot);
6043 s->cc_op = CC_OP_SUBB + ot;
6045 break;
6046 case 0x6c: /* insS */
6047 case 0x6d:
6048 if ((b & 1) == 0)
6049 ot = OT_BYTE;
6050 else
6051 ot = dflag ? OT_LONG : OT_WORD;
6052 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6053 gen_op_andl_T0_ffff();
6054 gen_check_io(s, ot, pc_start - s->cs_base,
6055 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6056 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6057 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6058 } else {
6059 gen_ins(s, ot);
6060 if (use_icount) {
6061 gen_jmp(s, s->pc - s->cs_base);
6064 break;
6065 case 0x6e: /* outsS */
6066 case 0x6f:
6067 if ((b & 1) == 0)
6068 ot = OT_BYTE;
6069 else
6070 ot = dflag ? OT_LONG : OT_WORD;
6071 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6072 gen_op_andl_T0_ffff();
6073 gen_check_io(s, ot, pc_start - s->cs_base,
6074 svm_is_rep(prefixes) | 4);
6075 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6076 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6077 } else {
6078 gen_outs(s, ot);
6079 if (use_icount) {
6080 gen_jmp(s, s->pc - s->cs_base);
6083 break;
6085 /************************/
6086 /* port I/O */
6088 case 0xe4:
6089 case 0xe5:
6090 if ((b & 1) == 0)
6091 ot = OT_BYTE;
6092 else
6093 ot = dflag ? OT_LONG : OT_WORD;
6094 val = ldub_code(s->pc++);
6095 gen_op_movl_T0_im(val);
6096 gen_check_io(s, ot, pc_start - s->cs_base,
6097 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6098 if (use_icount)
6099 gen_io_start();
6100 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6101 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6102 gen_op_mov_reg_T1(ot, R_EAX);
6103 if (use_icount) {
6104 gen_io_end();
6105 gen_jmp(s, s->pc - s->cs_base);
6107 break;
6108 case 0xe6:
6109 case 0xe7:
6110 if ((b & 1) == 0)
6111 ot = OT_BYTE;
6112 else
6113 ot = dflag ? OT_LONG : OT_WORD;
6114 val = ldub_code(s->pc++);
6115 gen_op_movl_T0_im(val);
6116 gen_check_io(s, ot, pc_start - s->cs_base,
6117 svm_is_rep(prefixes));
6118 gen_op_mov_TN_reg(ot, 1, R_EAX);
6120 if (use_icount)
6121 gen_io_start();
6122 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6123 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6124 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6125 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6126 if (use_icount) {
6127 gen_io_end();
6128 gen_jmp(s, s->pc - s->cs_base);
6130 break;
6131 case 0xec:
6132 case 0xed:
6133 if ((b & 1) == 0)
6134 ot = OT_BYTE;
6135 else
6136 ot = dflag ? OT_LONG : OT_WORD;
6137 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6138 gen_op_andl_T0_ffff();
6139 gen_check_io(s, ot, pc_start - s->cs_base,
6140 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6141 if (use_icount)
6142 gen_io_start();
6143 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6144 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6145 gen_op_mov_reg_T1(ot, R_EAX);
6146 if (use_icount) {
6147 gen_io_end();
6148 gen_jmp(s, s->pc - s->cs_base);
6150 break;
6151 case 0xee:
6152 case 0xef:
6153 if ((b & 1) == 0)
6154 ot = OT_BYTE;
6155 else
6156 ot = dflag ? OT_LONG : OT_WORD;
6157 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6158 gen_op_andl_T0_ffff();
6159 gen_check_io(s, ot, pc_start - s->cs_base,
6160 svm_is_rep(prefixes));
6161 gen_op_mov_TN_reg(ot, 1, R_EAX);
6163 if (use_icount)
6164 gen_io_start();
6165 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6166 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6167 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6168 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6169 if (use_icount) {
6170 gen_io_end();
6171 gen_jmp(s, s->pc - s->cs_base);
6173 break;
6175 /************************/
6176 /* control */
6177 case 0xc2: /* ret im */
6178 val = ldsw_code(s->pc);
6179 s->pc += 2;
6180 gen_pop_T0(s);
6181 if (CODE64(s) && s->dflag)
6182 s->dflag = 2;
6183 gen_stack_update(s, val + (2 << s->dflag));
6184 if (s->dflag == 0)
6185 gen_op_andl_T0_ffff();
6186 gen_op_jmp_T0();
6187 gen_eob(s);
6188 break;
6189 case 0xc3: /* ret */
6190 gen_pop_T0(s);
6191 gen_pop_update(s);
6192 if (s->dflag == 0)
6193 gen_op_andl_T0_ffff();
6194 gen_op_jmp_T0();
6195 gen_eob(s);
6196 break;
6197 case 0xca: /* lret im */
6198 val = ldsw_code(s->pc);
6199 s->pc += 2;
6200 do_lret:
6201 if (s->pe && !s->vm86) {
6202 if (s->cc_op != CC_OP_DYNAMIC)
6203 gen_op_set_cc_op(s->cc_op);
6204 gen_jmp_im(pc_start - s->cs_base);
6205 gen_helper_lret_protected(tcg_const_i32(s->dflag),
6206 tcg_const_i32(val));
6207 } else {
6208 gen_stack_A0(s);
6209 /* pop offset */
6210 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6211 if (s->dflag == 0)
6212 gen_op_andl_T0_ffff();
6213 /* NOTE: keeping EIP updated is not a problem in case of
6214 exception */
6215 gen_op_jmp_T0();
6216 /* pop selector */
6217 gen_op_addl_A0_im(2 << s->dflag);
6218 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6219 gen_op_movl_seg_T0_vm(R_CS);
6220 /* add stack offset */
6221 gen_stack_update(s, val + (4 << s->dflag));
6223 gen_eob(s);
6224 break;
6225 case 0xcb: /* lret */
6226 val = 0;
6227 goto do_lret;
6228 case 0xcf: /* iret */
6229 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6230 if (!s->pe) {
6231 /* real mode */
6232 gen_helper_iret_real(tcg_const_i32(s->dflag));
6233 s->cc_op = CC_OP_EFLAGS;
6234 } else if (s->vm86) {
6235 if (s->iopl != 3) {
6236 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6237 } else {
6238 gen_helper_iret_real(tcg_const_i32(s->dflag));
6239 s->cc_op = CC_OP_EFLAGS;
6241 } else {
6242 if (s->cc_op != CC_OP_DYNAMIC)
6243 gen_op_set_cc_op(s->cc_op);
6244 gen_jmp_im(pc_start - s->cs_base);
6245 gen_helper_iret_protected(tcg_const_i32(s->dflag),
6246 tcg_const_i32(s->pc - s->cs_base));
6247 s->cc_op = CC_OP_EFLAGS;
6249 gen_eob(s);
6250 break;
6251 case 0xe8: /* call im */
6253 if (dflag)
6254 tval = (int32_t)insn_get(s, OT_LONG);
6255 else
6256 tval = (int16_t)insn_get(s, OT_WORD);
6257 next_eip = s->pc - s->cs_base;
6258 tval += next_eip;
6259 if (s->dflag == 0)
6260 tval &= 0xffff;
6261 else if(!CODE64(s))
6262 tval &= 0xffffffff;
6263 gen_movtl_T0_im(next_eip);
6264 gen_push_T0(s);
6265 gen_jmp(s, tval);
6267 break;
6268 case 0x9a: /* lcall im */
6270 unsigned int selector, offset;
6272 if (CODE64(s))
6273 goto illegal_op;
6274 ot = dflag ? OT_LONG : OT_WORD;
6275 offset = insn_get(s, ot);
6276 selector = insn_get(s, OT_WORD);
6278 gen_op_movl_T0_im(selector);
6279 gen_op_movl_T1_imu(offset);
6281 goto do_lcall;
6282 case 0xe9: /* jmp im */
6283 if (dflag)
6284 tval = (int32_t)insn_get(s, OT_LONG);
6285 else
6286 tval = (int16_t)insn_get(s, OT_WORD);
6287 tval += s->pc - s->cs_base;
6288 if (s->dflag == 0)
6289 tval &= 0xffff;
6290 else if(!CODE64(s))
6291 tval &= 0xffffffff;
6292 gen_jmp(s, tval);
6293 break;
6294 case 0xea: /* ljmp im */
6296 unsigned int selector, offset;
6298 if (CODE64(s))
6299 goto illegal_op;
6300 ot = dflag ? OT_LONG : OT_WORD;
6301 offset = insn_get(s, ot);
6302 selector = insn_get(s, OT_WORD);
6304 gen_op_movl_T0_im(selector);
6305 gen_op_movl_T1_imu(offset);
6307 goto do_ljmp;
6308 case 0xeb: /* jmp Jb */
6309 tval = (int8_t)insn_get(s, OT_BYTE);
6310 tval += s->pc - s->cs_base;
6311 if (s->dflag == 0)
6312 tval &= 0xffff;
6313 gen_jmp(s, tval);
6314 break;
6315 case 0x70 ... 0x7f: /* jcc Jb */
6316 tval = (int8_t)insn_get(s, OT_BYTE);
6317 goto do_jcc;
6318 case 0x180 ... 0x18f: /* jcc Jv */
6319 if (dflag) {
6320 tval = (int32_t)insn_get(s, OT_LONG);
6321 } else {
6322 tval = (int16_t)insn_get(s, OT_WORD);
6324 do_jcc:
6325 next_eip = s->pc - s->cs_base;
6326 tval += next_eip;
6327 if (s->dflag == 0)
6328 tval &= 0xffff;
6329 gen_jcc(s, b, tval, next_eip);
6330 break;
6332 case 0x190 ... 0x19f: /* setcc Gv */
6333 modrm = ldub_code(s->pc++);
6334 gen_setcc(s, b);
6335 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6336 break;
6337 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6339 int l1;
6340 TCGv t0;
6342 ot = dflag + OT_WORD;
6343 modrm = ldub_code(s->pc++);
6344 reg = ((modrm >> 3) & 7) | rex_r;
6345 mod = (modrm >> 6) & 3;
6346 t0 = tcg_temp_local_new();
6347 if (mod != 3) {
6348 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6349 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6350 } else {
6351 rm = (modrm & 7) | REX_B(s);
6352 gen_op_mov_v_reg(ot, t0, rm);
6354 #ifdef TARGET_X86_64
6355 if (ot == OT_LONG) {
6356 /* XXX: specific Intel behaviour ? */
6357 l1 = gen_new_label();
6358 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6359 tcg_gen_mov_tl(cpu_regs[reg], t0);
6360 gen_set_label(l1);
6361 tcg_gen_ext32u_tl(cpu_regs[reg], cpu_regs[reg]);
6362 } else
6363 #endif
6365 l1 = gen_new_label();
6366 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6367 gen_op_mov_reg_v(ot, reg, t0);
6368 gen_set_label(l1);
6370 tcg_temp_free(t0);
6372 break;
6374 /************************/
6375 /* flags */
6376 case 0x9c: /* pushf */
6377 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6378 if (s->vm86 && s->iopl != 3) {
6379 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6380 } else {
6381 if (s->cc_op != CC_OP_DYNAMIC)
6382 gen_op_set_cc_op(s->cc_op);
6383 gen_helper_read_eflags(cpu_T[0]);
6384 gen_push_T0(s);
6386 break;
6387 case 0x9d: /* popf */
6388 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6389 if (s->vm86 && s->iopl != 3) {
6390 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6391 } else {
6392 gen_pop_T0(s);
6393 if (s->cpl == 0) {
6394 if (s->dflag) {
6395 gen_helper_write_eflags(cpu_T[0],
6396 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6397 } else {
6398 gen_helper_write_eflags(cpu_T[0],
6399 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6401 } else {
6402 if (s->cpl <= s->iopl) {
6403 if (s->dflag) {
6404 gen_helper_write_eflags(cpu_T[0],
6405 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6406 } else {
6407 gen_helper_write_eflags(cpu_T[0],
6408 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6410 } else {
6411 if (s->dflag) {
6412 gen_helper_write_eflags(cpu_T[0],
6413 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6414 } else {
6415 gen_helper_write_eflags(cpu_T[0],
6416 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6420 gen_pop_update(s);
6421 s->cc_op = CC_OP_EFLAGS;
6422 /* abort translation because TF flag may change */
6423 gen_jmp_im(s->pc - s->cs_base);
6424 gen_eob(s);
6426 break;
6427 case 0x9e: /* sahf */
6428 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6429 goto illegal_op;
6430 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6431 if (s->cc_op != CC_OP_DYNAMIC)
6432 gen_op_set_cc_op(s->cc_op);
6433 gen_compute_eflags(cpu_cc_src);
6434 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6435 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6436 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6437 s->cc_op = CC_OP_EFLAGS;
6438 break;
6439 case 0x9f: /* lahf */
6440 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6441 goto illegal_op;
6442 if (s->cc_op != CC_OP_DYNAMIC)
6443 gen_op_set_cc_op(s->cc_op);
6444 gen_compute_eflags(cpu_T[0]);
6445 /* Note: gen_compute_eflags() only gives the condition codes */
6446 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6447 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6448 break;
6449 case 0xf5: /* cmc */
6450 if (s->cc_op != CC_OP_DYNAMIC)
6451 gen_op_set_cc_op(s->cc_op);
6452 gen_compute_eflags(cpu_cc_src);
6453 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6454 s->cc_op = CC_OP_EFLAGS;
6455 break;
6456 case 0xf8: /* clc */
6457 if (s->cc_op != CC_OP_DYNAMIC)
6458 gen_op_set_cc_op(s->cc_op);
6459 gen_compute_eflags(cpu_cc_src);
6460 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6461 s->cc_op = CC_OP_EFLAGS;
6462 break;
6463 case 0xf9: /* stc */
6464 if (s->cc_op != CC_OP_DYNAMIC)
6465 gen_op_set_cc_op(s->cc_op);
6466 gen_compute_eflags(cpu_cc_src);
6467 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6468 s->cc_op = CC_OP_EFLAGS;
6469 break;
6470 case 0xfc: /* cld */
6471 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6472 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6473 break;
6474 case 0xfd: /* std */
6475 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6476 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6477 break;
6479 /************************/
6480 /* bit operations */
6481 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6482 ot = dflag + OT_WORD;
6483 modrm = ldub_code(s->pc++);
6484 op = (modrm >> 3) & 7;
6485 mod = (modrm >> 6) & 3;
6486 rm = (modrm & 7) | REX_B(s);
6487 if (mod != 3) {
6488 s->rip_offset = 1;
6489 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6490 gen_op_ld_T0_A0(ot + s->mem_index);
6491 } else {
6492 gen_op_mov_TN_reg(ot, 0, rm);
6494 /* load shift */
6495 val = ldub_code(s->pc++);
6496 gen_op_movl_T1_im(val);
6497 if (op < 4)
6498 goto illegal_op;
6499 op -= 4;
6500 goto bt_op;
6501 case 0x1a3: /* bt Gv, Ev */
6502 op = 0;
6503 goto do_btx;
6504 case 0x1ab: /* bts */
6505 op = 1;
6506 goto do_btx;
6507 case 0x1b3: /* btr */
6508 op = 2;
6509 goto do_btx;
6510 case 0x1bb: /* btc */
6511 op = 3;
6512 do_btx:
6513 ot = dflag + OT_WORD;
6514 modrm = ldub_code(s->pc++);
6515 reg = ((modrm >> 3) & 7) | rex_r;
6516 mod = (modrm >> 6) & 3;
6517 rm = (modrm & 7) | REX_B(s);
6518 gen_op_mov_TN_reg(OT_LONG, 1, reg);
6519 if (mod != 3) {
6520 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6521 /* specific case: we need to add a displacement */
6522 gen_exts(ot, cpu_T[1]);
6523 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6524 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6525 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6526 gen_op_ld_T0_A0(ot + s->mem_index);
6527 } else {
6528 gen_op_mov_TN_reg(ot, 0, rm);
6530 bt_op:
6531 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6532 switch(op) {
6533 case 0:
6534 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6535 tcg_gen_movi_tl(cpu_cc_dst, 0);
6536 break;
6537 case 1:
6538 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6539 tcg_gen_movi_tl(cpu_tmp0, 1);
6540 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6541 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6542 break;
6543 case 2:
6544 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6545 tcg_gen_movi_tl(cpu_tmp0, 1);
6546 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6547 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6548 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6549 break;
6550 default:
6551 case 3:
6552 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6553 tcg_gen_movi_tl(cpu_tmp0, 1);
6554 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6555 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6556 break;
6558 s->cc_op = CC_OP_SARB + ot;
6559 if (op != 0) {
6560 if (mod != 3)
6561 gen_op_st_T0_A0(ot + s->mem_index);
6562 else
6563 gen_op_mov_reg_T0(ot, rm);
6564 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6565 tcg_gen_movi_tl(cpu_cc_dst, 0);
6567 break;
6568 case 0x1bc: /* bsf */
6569 case 0x1bd: /* bsr */
6571 int label1;
6572 TCGv t0;
6574 ot = dflag + OT_WORD;
6575 modrm = ldub_code(s->pc++);
6576 reg = ((modrm >> 3) & 7) | rex_r;
6577 gen_ldst_modrm(s,modrm, ot, OR_TMP0, 0);
6578 gen_extu(ot, cpu_T[0]);
6579 t0 = tcg_temp_local_new();
6580 tcg_gen_mov_tl(t0, cpu_T[0]);
6581 if ((b & 1) && (prefixes & PREFIX_REPZ) &&
6582 (s->cpuid_ext3_features & CPUID_EXT3_ABM)) {
6583 switch(ot) {
6584 case OT_WORD: gen_helper_lzcnt(cpu_T[0], t0,
6585 tcg_const_i32(16)); break;
6586 case OT_LONG: gen_helper_lzcnt(cpu_T[0], t0,
6587 tcg_const_i32(32)); break;
6588 case OT_QUAD: gen_helper_lzcnt(cpu_T[0], t0,
6589 tcg_const_i32(64)); break;
6591 gen_op_mov_reg_T0(ot, reg);
6592 } else {
6593 label1 = gen_new_label();
6594 tcg_gen_movi_tl(cpu_cc_dst, 0);
6595 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6596 if (b & 1) {
6597 gen_helper_bsr(cpu_T[0], t0);
6598 } else {
6599 gen_helper_bsf(cpu_T[0], t0);
6601 gen_op_mov_reg_T0(ot, reg);
6602 tcg_gen_movi_tl(cpu_cc_dst, 1);
6603 gen_set_label(label1);
6604 tcg_gen_discard_tl(cpu_cc_src);
6605 s->cc_op = CC_OP_LOGICB + ot;
6607 tcg_temp_free(t0);
6609 break;
6610 /************************/
6611 /* bcd */
6612 case 0x27: /* daa */
6613 if (CODE64(s))
6614 goto illegal_op;
6615 if (s->cc_op != CC_OP_DYNAMIC)
6616 gen_op_set_cc_op(s->cc_op);
6617 gen_helper_daa();
6618 s->cc_op = CC_OP_EFLAGS;
6619 break;
6620 case 0x2f: /* das */
6621 if (CODE64(s))
6622 goto illegal_op;
6623 if (s->cc_op != CC_OP_DYNAMIC)
6624 gen_op_set_cc_op(s->cc_op);
6625 gen_helper_das();
6626 s->cc_op = CC_OP_EFLAGS;
6627 break;
6628 case 0x37: /* aaa */
6629 if (CODE64(s))
6630 goto illegal_op;
6631 if (s->cc_op != CC_OP_DYNAMIC)
6632 gen_op_set_cc_op(s->cc_op);
6633 gen_helper_aaa();
6634 s->cc_op = CC_OP_EFLAGS;
6635 break;
6636 case 0x3f: /* aas */
6637 if (CODE64(s))
6638 goto illegal_op;
6639 if (s->cc_op != CC_OP_DYNAMIC)
6640 gen_op_set_cc_op(s->cc_op);
6641 gen_helper_aas();
6642 s->cc_op = CC_OP_EFLAGS;
6643 break;
6644 case 0xd4: /* aam */
6645 if (CODE64(s))
6646 goto illegal_op;
6647 val = ldub_code(s->pc++);
6648 if (val == 0) {
6649 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6650 } else {
6651 gen_helper_aam(tcg_const_i32(val));
6652 s->cc_op = CC_OP_LOGICB;
6654 break;
6655 case 0xd5: /* aad */
6656 if (CODE64(s))
6657 goto illegal_op;
6658 val = ldub_code(s->pc++);
6659 gen_helper_aad(tcg_const_i32(val));
6660 s->cc_op = CC_OP_LOGICB;
6661 break;
6662 /************************/
6663 /* misc */
6664 case 0x90: /* nop */
6665 /* XXX: xchg + rex handling */
6666 /* XXX: correct lock test for all insn */
6667 if (prefixes & PREFIX_LOCK)
6668 goto illegal_op;
6669 if (prefixes & PREFIX_REPZ) {
6670 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6672 break;
6673 case 0x9b: /* fwait */
6674 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6675 (HF_MP_MASK | HF_TS_MASK)) {
6676 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6677 } else {
6678 if (s->cc_op != CC_OP_DYNAMIC)
6679 gen_op_set_cc_op(s->cc_op);
6680 gen_jmp_im(pc_start - s->cs_base);
6681 gen_helper_fwait();
6683 break;
6684 case 0xcc: /* int3 */
6685 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6686 break;
6687 case 0xcd: /* int N */
6688 val = ldub_code(s->pc++);
6689 if (s->vm86 && s->iopl != 3) {
6690 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6691 } else {
6692 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6694 break;
6695 case 0xce: /* into */
6696 if (CODE64(s))
6697 goto illegal_op;
6698 if (s->cc_op != CC_OP_DYNAMIC)
6699 gen_op_set_cc_op(s->cc_op);
6700 gen_jmp_im(pc_start - s->cs_base);
6701 gen_helper_into(tcg_const_i32(s->pc - pc_start));
6702 break;
6703 #ifdef WANT_ICEBP
6704 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6705 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6706 #if 1
6707 gen_debug(s, pc_start - s->cs_base);
6708 #else
6709 /* start debug */
6710 tb_flush(cpu_single_env);
6711 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6712 #endif
6713 break;
6714 #endif
6715 case 0xfa: /* cli */
6716 if (!s->vm86) {
6717 if (s->cpl <= s->iopl) {
6718 gen_helper_cli();
6719 } else {
6720 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6722 } else {
6723 if (s->iopl == 3) {
6724 gen_helper_cli();
6725 } else {
6726 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6729 break;
6730 case 0xfb: /* sti */
6731 if (!s->vm86) {
6732 if (s->cpl <= s->iopl) {
6733 gen_sti:
6734 gen_helper_sti();
6735 /* interruptions are enabled only the first insn after sti */
6736 /* If several instructions disable interrupts, only the
6737 _first_ does it */
6738 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6739 gen_helper_set_inhibit_irq();
6740 /* give a chance to handle pending irqs */
6741 gen_jmp_im(s->pc - s->cs_base);
6742 gen_eob(s);
6743 } else {
6744 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6746 } else {
6747 if (s->iopl == 3) {
6748 goto gen_sti;
6749 } else {
6750 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6753 break;
6754 case 0x62: /* bound */
6755 if (CODE64(s))
6756 goto illegal_op;
6757 ot = dflag ? OT_LONG : OT_WORD;
6758 modrm = ldub_code(s->pc++);
6759 reg = (modrm >> 3) & 7;
6760 mod = (modrm >> 6) & 3;
6761 if (mod == 3)
6762 goto illegal_op;
6763 gen_op_mov_TN_reg(ot, 0, reg);
6764 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6765 gen_jmp_im(pc_start - s->cs_base);
6766 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6767 if (ot == OT_WORD)
6768 gen_helper_boundw(cpu_A0, cpu_tmp2_i32);
6769 else
6770 gen_helper_boundl(cpu_A0, cpu_tmp2_i32);
6771 break;
6772 case 0x1c8 ... 0x1cf: /* bswap reg */
6773 reg = (b & 7) | REX_B(s);
6774 #ifdef TARGET_X86_64
6775 if (dflag == 2) {
6776 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6777 tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
6778 gen_op_mov_reg_T0(OT_QUAD, reg);
6779 } else
6780 #endif
6782 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6783 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
6784 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
6785 gen_op_mov_reg_T0(OT_LONG, reg);
6787 break;
6788 case 0xd6: /* salc */
6789 if (CODE64(s))
6790 goto illegal_op;
6791 if (s->cc_op != CC_OP_DYNAMIC)
6792 gen_op_set_cc_op(s->cc_op);
6793 gen_compute_eflags_c(cpu_T[0]);
6794 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6795 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6796 break;
6797 case 0xe0: /* loopnz */
6798 case 0xe1: /* loopz */
6799 case 0xe2: /* loop */
6800 case 0xe3: /* jecxz */
6802 int l1, l2, l3;
6804 tval = (int8_t)insn_get(s, OT_BYTE);
6805 next_eip = s->pc - s->cs_base;
6806 tval += next_eip;
6807 if (s->dflag == 0)
6808 tval &= 0xffff;
6810 l1 = gen_new_label();
6811 l2 = gen_new_label();
6812 l3 = gen_new_label();
6813 b &= 3;
6814 switch(b) {
6815 case 0: /* loopnz */
6816 case 1: /* loopz */
6817 if (s->cc_op != CC_OP_DYNAMIC)
6818 gen_op_set_cc_op(s->cc_op);
6819 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6820 gen_op_jz_ecx(s->aflag, l3);
6821 gen_compute_eflags(cpu_tmp0);
6822 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6823 if (b == 0) {
6824 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6825 } else {
6826 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6828 break;
6829 case 2: /* loop */
6830 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6831 gen_op_jnz_ecx(s->aflag, l1);
6832 break;
6833 default:
6834 case 3: /* jcxz */
6835 gen_op_jz_ecx(s->aflag, l1);
6836 break;
6839 gen_set_label(l3);
6840 gen_jmp_im(next_eip);
6841 tcg_gen_br(l2);
6843 gen_set_label(l1);
6844 gen_jmp_im(tval);
6845 gen_set_label(l2);
6846 gen_eob(s);
6848 break;
6849 case 0x130: /* wrmsr */
6850 case 0x132: /* rdmsr */
6851 if (s->cpl != 0) {
6852 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6853 } else {
6854 if (s->cc_op != CC_OP_DYNAMIC)
6855 gen_op_set_cc_op(s->cc_op);
6856 gen_jmp_im(pc_start - s->cs_base);
6857 if (b & 2) {
6858 gen_helper_rdmsr();
6859 } else {
6860 gen_helper_wrmsr();
6863 break;
6864 case 0x131: /* rdtsc */
6865 if (s->cc_op != CC_OP_DYNAMIC)
6866 gen_op_set_cc_op(s->cc_op);
6867 gen_jmp_im(pc_start - s->cs_base);
6868 if (use_icount)
6869 gen_io_start();
6870 gen_helper_rdtsc();
6871 if (use_icount) {
6872 gen_io_end();
6873 gen_jmp(s, s->pc - s->cs_base);
6875 break;
6876 case 0x133: /* rdpmc */
6877 if (s->cc_op != CC_OP_DYNAMIC)
6878 gen_op_set_cc_op(s->cc_op);
6879 gen_jmp_im(pc_start - s->cs_base);
6880 gen_helper_rdpmc();
6881 break;
6882 case 0x134: /* sysenter */
6883 /* For Intel SYSENTER is valid on 64-bit */
6884 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6885 goto illegal_op;
6886 if (!s->pe) {
6887 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6888 } else {
6889 if (s->cc_op != CC_OP_DYNAMIC) {
6890 gen_op_set_cc_op(s->cc_op);
6891 s->cc_op = CC_OP_DYNAMIC;
6893 gen_jmp_im(pc_start - s->cs_base);
6894 gen_helper_sysenter();
6895 gen_eob(s);
6897 break;
6898 case 0x135: /* sysexit */
6899 /* For Intel SYSEXIT is valid on 64-bit */
6900 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6901 goto illegal_op;
6902 if (!s->pe) {
6903 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6904 } else {
6905 if (s->cc_op != CC_OP_DYNAMIC) {
6906 gen_op_set_cc_op(s->cc_op);
6907 s->cc_op = CC_OP_DYNAMIC;
6909 gen_jmp_im(pc_start - s->cs_base);
6910 gen_helper_sysexit(tcg_const_i32(dflag));
6911 gen_eob(s);
6913 break;
6914 #ifdef TARGET_X86_64
6915 case 0x105: /* syscall */
6916 /* XXX: is it usable in real mode ? */
6917 if (s->cc_op != CC_OP_DYNAMIC) {
6918 gen_op_set_cc_op(s->cc_op);
6919 s->cc_op = CC_OP_DYNAMIC;
6921 gen_jmp_im(pc_start - s->cs_base);
6922 gen_helper_syscall(tcg_const_i32(s->pc - pc_start));
6923 gen_eob(s);
6924 break;
6925 case 0x107: /* sysret */
6926 if (!s->pe) {
6927 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6928 } else {
6929 if (s->cc_op != CC_OP_DYNAMIC) {
6930 gen_op_set_cc_op(s->cc_op);
6931 s->cc_op = CC_OP_DYNAMIC;
6933 gen_jmp_im(pc_start - s->cs_base);
6934 gen_helper_sysret(tcg_const_i32(s->dflag));
6935 /* condition codes are modified only in long mode */
6936 if (s->lma)
6937 s->cc_op = CC_OP_EFLAGS;
6938 gen_eob(s);
6940 break;
6941 #endif
6942 case 0x1a2: /* cpuid */
6943 if (s->cc_op != CC_OP_DYNAMIC)
6944 gen_op_set_cc_op(s->cc_op);
6945 gen_jmp_im(pc_start - s->cs_base);
6946 gen_helper_cpuid();
6947 break;
6948 case 0xf4: /* hlt */
6949 if (s->cpl != 0) {
6950 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6951 } else {
6952 if (s->cc_op != CC_OP_DYNAMIC)
6953 gen_op_set_cc_op(s->cc_op);
6954 gen_jmp_im(pc_start - s->cs_base);
6955 gen_helper_hlt(tcg_const_i32(s->pc - pc_start));
6956 s->is_jmp = 3;
6958 break;
6959 case 0x100:
6960 modrm = ldub_code(s->pc++);
6961 mod = (modrm >> 6) & 3;
6962 op = (modrm >> 3) & 7;
6963 switch(op) {
6964 case 0: /* sldt */
6965 if (!s->pe || s->vm86)
6966 goto illegal_op;
6967 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6968 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6969 ot = OT_WORD;
6970 if (mod == 3)
6971 ot += s->dflag;
6972 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6973 break;
6974 case 2: /* lldt */
6975 if (!s->pe || s->vm86)
6976 goto illegal_op;
6977 if (s->cpl != 0) {
6978 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6979 } else {
6980 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6981 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6982 gen_jmp_im(pc_start - s->cs_base);
6983 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6984 gen_helper_lldt(cpu_tmp2_i32);
6986 break;
6987 case 1: /* str */
6988 if (!s->pe || s->vm86)
6989 goto illegal_op;
6990 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6991 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6992 ot = OT_WORD;
6993 if (mod == 3)
6994 ot += s->dflag;
6995 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6996 break;
6997 case 3: /* ltr */
6998 if (!s->pe || s->vm86)
6999 goto illegal_op;
7000 if (s->cpl != 0) {
7001 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7002 } else {
7003 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7004 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7005 gen_jmp_im(pc_start - s->cs_base);
7006 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7007 gen_helper_ltr(cpu_tmp2_i32);
7009 break;
7010 case 4: /* verr */
7011 case 5: /* verw */
7012 if (!s->pe || s->vm86)
7013 goto illegal_op;
7014 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7015 if (s->cc_op != CC_OP_DYNAMIC)
7016 gen_op_set_cc_op(s->cc_op);
7017 if (op == 4)
7018 gen_helper_verr(cpu_T[0]);
7019 else
7020 gen_helper_verw(cpu_T[0]);
7021 s->cc_op = CC_OP_EFLAGS;
7022 break;
7023 default:
7024 goto illegal_op;
7026 break;
7027 case 0x101:
7028 modrm = ldub_code(s->pc++);
7029 mod = (modrm >> 6) & 3;
7030 op = (modrm >> 3) & 7;
7031 rm = modrm & 7;
7032 switch(op) {
7033 case 0: /* sgdt */
7034 if (mod == 3)
7035 goto illegal_op;
7036 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7037 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7038 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7039 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7040 gen_add_A0_im(s, 2);
7041 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7042 if (!s->dflag)
7043 gen_op_andl_T0_im(0xffffff);
7044 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7045 break;
7046 case 1:
7047 if (mod == 3) {
7048 switch (rm) {
7049 case 0: /* monitor */
7050 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7051 s->cpl != 0)
7052 goto illegal_op;
7053 if (s->cc_op != CC_OP_DYNAMIC)
7054 gen_op_set_cc_op(s->cc_op);
7055 gen_jmp_im(pc_start - s->cs_base);
7056 #ifdef TARGET_X86_64
7057 if (s->aflag == 2) {
7058 gen_op_movq_A0_reg(R_EAX);
7059 } else
7060 #endif
7062 gen_op_movl_A0_reg(R_EAX);
7063 if (s->aflag == 0)
7064 gen_op_andl_A0_ffff();
7066 gen_add_A0_ds_seg(s);
7067 gen_helper_monitor(cpu_A0);
7068 break;
7069 case 1: /* mwait */
7070 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7071 s->cpl != 0)
7072 goto illegal_op;
7073 if (s->cc_op != CC_OP_DYNAMIC) {
7074 gen_op_set_cc_op(s->cc_op);
7075 s->cc_op = CC_OP_DYNAMIC;
7077 gen_jmp_im(pc_start - s->cs_base);
7078 gen_helper_mwait(tcg_const_i32(s->pc - pc_start));
7079 gen_eob(s);
7080 break;
7081 default:
7082 goto illegal_op;
7084 } else { /* sidt */
7085 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7086 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7087 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7088 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7089 gen_add_A0_im(s, 2);
7090 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7091 if (!s->dflag)
7092 gen_op_andl_T0_im(0xffffff);
7093 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7095 break;
7096 case 2: /* lgdt */
7097 case 3: /* lidt */
7098 if (mod == 3) {
7099 if (s->cc_op != CC_OP_DYNAMIC)
7100 gen_op_set_cc_op(s->cc_op);
7101 gen_jmp_im(pc_start - s->cs_base);
7102 switch(rm) {
7103 case 0: /* VMRUN */
7104 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7105 goto illegal_op;
7106 if (s->cpl != 0) {
7107 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7108 break;
7109 } else {
7110 gen_helper_vmrun(tcg_const_i32(s->aflag),
7111 tcg_const_i32(s->pc - pc_start));
7112 tcg_gen_exit_tb(0);
7113 s->is_jmp = 3;
7115 break;
7116 case 1: /* VMMCALL */
7117 if (!(s->flags & HF_SVME_MASK))
7118 goto illegal_op;
7119 gen_helper_vmmcall();
7120 break;
7121 case 2: /* VMLOAD */
7122 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7123 goto illegal_op;
7124 if (s->cpl != 0) {
7125 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7126 break;
7127 } else {
7128 gen_helper_vmload(tcg_const_i32(s->aflag));
7130 break;
7131 case 3: /* VMSAVE */
7132 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7133 goto illegal_op;
7134 if (s->cpl != 0) {
7135 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7136 break;
7137 } else {
7138 gen_helper_vmsave(tcg_const_i32(s->aflag));
7140 break;
7141 case 4: /* STGI */
7142 if ((!(s->flags & HF_SVME_MASK) &&
7143 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7144 !s->pe)
7145 goto illegal_op;
7146 if (s->cpl != 0) {
7147 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7148 break;
7149 } else {
7150 gen_helper_stgi();
7152 break;
7153 case 5: /* CLGI */
7154 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7155 goto illegal_op;
7156 if (s->cpl != 0) {
7157 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7158 break;
7159 } else {
7160 gen_helper_clgi();
7162 break;
7163 case 6: /* SKINIT */
7164 if ((!(s->flags & HF_SVME_MASK) &&
7165 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7166 !s->pe)
7167 goto illegal_op;
7168 gen_helper_skinit();
7169 break;
7170 case 7: /* INVLPGA */
7171 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7172 goto illegal_op;
7173 if (s->cpl != 0) {
7174 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7175 break;
7176 } else {
7177 gen_helper_invlpga(tcg_const_i32(s->aflag));
7179 break;
7180 default:
7181 goto illegal_op;
7183 } else if (s->cpl != 0) {
7184 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7185 } else {
7186 gen_svm_check_intercept(s, pc_start,
7187 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7188 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7189 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7190 gen_add_A0_im(s, 2);
7191 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7192 if (!s->dflag)
7193 gen_op_andl_T0_im(0xffffff);
7194 if (op == 2) {
7195 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7196 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7197 } else {
7198 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7199 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7202 break;
7203 case 4: /* smsw */
7204 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7205 #if defined TARGET_X86_64 && defined HOST_WORDS_BIGENDIAN
7206 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4);
7207 #else
7208 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7209 #endif
7210 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7211 break;
7212 case 6: /* lmsw */
7213 if (s->cpl != 0) {
7214 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7215 } else {
7216 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7217 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7218 gen_helper_lmsw(cpu_T[0]);
7219 gen_jmp_im(s->pc - s->cs_base);
7220 gen_eob(s);
7222 break;
7223 case 7:
7224 if (mod != 3) { /* invlpg */
7225 if (s->cpl != 0) {
7226 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7227 } else {
7228 if (s->cc_op != CC_OP_DYNAMIC)
7229 gen_op_set_cc_op(s->cc_op);
7230 gen_jmp_im(pc_start - s->cs_base);
7231 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7232 gen_helper_invlpg(cpu_A0);
7233 gen_jmp_im(s->pc - s->cs_base);
7234 gen_eob(s);
7236 } else {
7237 switch (rm) {
7238 case 0: /* swapgs */
7239 #ifdef TARGET_X86_64
7240 if (CODE64(s)) {
7241 if (s->cpl != 0) {
7242 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7243 } else {
7244 tcg_gen_ld_tl(cpu_T[0], cpu_env,
7245 offsetof(CPUX86State,segs[R_GS].base));
7246 tcg_gen_ld_tl(cpu_T[1], cpu_env,
7247 offsetof(CPUX86State,kernelgsbase));
7248 tcg_gen_st_tl(cpu_T[1], cpu_env,
7249 offsetof(CPUX86State,segs[R_GS].base));
7250 tcg_gen_st_tl(cpu_T[0], cpu_env,
7251 offsetof(CPUX86State,kernelgsbase));
7253 } else
7254 #endif
7256 goto illegal_op;
7258 break;
7259 case 1: /* rdtscp */
7260 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7261 goto illegal_op;
7262 if (s->cc_op != CC_OP_DYNAMIC)
7263 gen_op_set_cc_op(s->cc_op);
7264 gen_jmp_im(pc_start - s->cs_base);
7265 if (use_icount)
7266 gen_io_start();
7267 gen_helper_rdtscp();
7268 if (use_icount) {
7269 gen_io_end();
7270 gen_jmp(s, s->pc - s->cs_base);
7272 break;
7273 default:
7274 goto illegal_op;
7277 break;
7278 default:
7279 goto illegal_op;
7281 break;
7282 case 0x108: /* invd */
7283 case 0x109: /* wbinvd */
7284 if (s->cpl != 0) {
7285 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7286 } else {
7287 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7288 /* nothing to do */
7290 break;
7291 case 0x63: /* arpl or movslS (x86_64) */
7292 #ifdef TARGET_X86_64
7293 if (CODE64(s)) {
7294 int d_ot;
7295 /* d_ot is the size of destination */
7296 d_ot = dflag + OT_WORD;
7298 modrm = ldub_code(s->pc++);
7299 reg = ((modrm >> 3) & 7) | rex_r;
7300 mod = (modrm >> 6) & 3;
7301 rm = (modrm & 7) | REX_B(s);
7303 if (mod == 3) {
7304 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7305 /* sign extend */
7306 if (d_ot == OT_QUAD)
7307 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7308 gen_op_mov_reg_T0(d_ot, reg);
7309 } else {
7310 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7311 if (d_ot == OT_QUAD) {
7312 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7313 } else {
7314 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7316 gen_op_mov_reg_T0(d_ot, reg);
7318 } else
7319 #endif
7321 int label1;
7322 TCGv t0, t1, t2, a0;
7324 if (!s->pe || s->vm86)
7325 goto illegal_op;
7326 t0 = tcg_temp_local_new();
7327 t1 = tcg_temp_local_new();
7328 t2 = tcg_temp_local_new();
7329 ot = OT_WORD;
7330 modrm = ldub_code(s->pc++);
7331 reg = (modrm >> 3) & 7;
7332 mod = (modrm >> 6) & 3;
7333 rm = modrm & 7;
7334 if (mod != 3) {
7335 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7336 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7337 a0 = tcg_temp_local_new();
7338 tcg_gen_mov_tl(a0, cpu_A0);
7339 } else {
7340 gen_op_mov_v_reg(ot, t0, rm);
7341 TCGV_UNUSED(a0);
7343 gen_op_mov_v_reg(ot, t1, reg);
7344 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7345 tcg_gen_andi_tl(t1, t1, 3);
7346 tcg_gen_movi_tl(t2, 0);
7347 label1 = gen_new_label();
7348 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7349 tcg_gen_andi_tl(t0, t0, ~3);
7350 tcg_gen_or_tl(t0, t0, t1);
7351 tcg_gen_movi_tl(t2, CC_Z);
7352 gen_set_label(label1);
7353 if (mod != 3) {
7354 gen_op_st_v(ot + s->mem_index, t0, a0);
7355 tcg_temp_free(a0);
7356 } else {
7357 gen_op_mov_reg_v(ot, rm, t0);
7359 if (s->cc_op != CC_OP_DYNAMIC)
7360 gen_op_set_cc_op(s->cc_op);
7361 gen_compute_eflags(cpu_cc_src);
7362 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7363 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7364 s->cc_op = CC_OP_EFLAGS;
7365 tcg_temp_free(t0);
7366 tcg_temp_free(t1);
7367 tcg_temp_free(t2);
7369 break;
7370 case 0x102: /* lar */
7371 case 0x103: /* lsl */
7373 int label1;
7374 TCGv t0;
7375 if (!s->pe || s->vm86)
7376 goto illegal_op;
7377 ot = dflag ? OT_LONG : OT_WORD;
7378 modrm = ldub_code(s->pc++);
7379 reg = ((modrm >> 3) & 7) | rex_r;
7380 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7381 t0 = tcg_temp_local_new();
7382 if (s->cc_op != CC_OP_DYNAMIC)
7383 gen_op_set_cc_op(s->cc_op);
7384 if (b == 0x102)
7385 gen_helper_lar(t0, cpu_T[0]);
7386 else
7387 gen_helper_lsl(t0, cpu_T[0]);
7388 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7389 label1 = gen_new_label();
7390 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7391 gen_op_mov_reg_v(ot, reg, t0);
7392 gen_set_label(label1);
7393 s->cc_op = CC_OP_EFLAGS;
7394 tcg_temp_free(t0);
7396 break;
7397 case 0x118:
7398 modrm = ldub_code(s->pc++);
7399 mod = (modrm >> 6) & 3;
7400 op = (modrm >> 3) & 7;
7401 switch(op) {
7402 case 0: /* prefetchnta */
7403 case 1: /* prefetchnt0 */
7404 case 2: /* prefetchnt0 */
7405 case 3: /* prefetchnt0 */
7406 if (mod == 3)
7407 goto illegal_op;
7408 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7409 /* nothing more to do */
7410 break;
7411 default: /* nop (multi byte) */
7412 gen_nop_modrm(s, modrm);
7413 break;
7415 break;
7416 case 0x119 ... 0x11f: /* nop (multi byte) */
7417 modrm = ldub_code(s->pc++);
7418 gen_nop_modrm(s, modrm);
7419 break;
7420 case 0x120: /* mov reg, crN */
7421 case 0x122: /* mov crN, reg */
7422 if (s->cpl != 0) {
7423 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7424 } else {
7425 modrm = ldub_code(s->pc++);
7426 if ((modrm & 0xc0) != 0xc0)
7427 goto illegal_op;
7428 rm = (modrm & 7) | REX_B(s);
7429 reg = ((modrm >> 3) & 7) | rex_r;
7430 if (CODE64(s))
7431 ot = OT_QUAD;
7432 else
7433 ot = OT_LONG;
7434 if ((prefixes & PREFIX_LOCK) && (reg == 0) &&
7435 (s->cpuid_ext3_features & CPUID_EXT3_CR8LEG)) {
7436 reg = 8;
7438 switch(reg) {
7439 case 0:
7440 case 2:
7441 case 3:
7442 case 4:
7443 case 8:
7444 if (s->cc_op != CC_OP_DYNAMIC)
7445 gen_op_set_cc_op(s->cc_op);
7446 gen_jmp_im(pc_start - s->cs_base);
7447 if (b & 2) {
7448 gen_op_mov_TN_reg(ot, 0, rm);
7449 gen_helper_write_crN(tcg_const_i32(reg), cpu_T[0]);
7450 gen_jmp_im(s->pc - s->cs_base);
7451 gen_eob(s);
7452 } else {
7453 gen_helper_read_crN(cpu_T[0], tcg_const_i32(reg));
7454 gen_op_mov_reg_T0(ot, rm);
7456 break;
7457 default:
7458 goto illegal_op;
7461 break;
7462 case 0x121: /* mov reg, drN */
7463 case 0x123: /* mov drN, reg */
7464 if (s->cpl != 0) {
7465 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7466 } else {
7467 modrm = ldub_code(s->pc++);
7468 if ((modrm & 0xc0) != 0xc0)
7469 goto illegal_op;
7470 rm = (modrm & 7) | REX_B(s);
7471 reg = ((modrm >> 3) & 7) | rex_r;
7472 if (CODE64(s))
7473 ot = OT_QUAD;
7474 else
7475 ot = OT_LONG;
7476 /* XXX: do it dynamically with CR4.DE bit */
7477 if (reg == 4 || reg == 5 || reg >= 8)
7478 goto illegal_op;
7479 if (b & 2) {
7480 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7481 gen_op_mov_TN_reg(ot, 0, rm);
7482 gen_helper_movl_drN_T0(tcg_const_i32(reg), cpu_T[0]);
7483 gen_jmp_im(s->pc - s->cs_base);
7484 gen_eob(s);
7485 } else {
7486 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7487 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7488 gen_op_mov_reg_T0(ot, rm);
7491 break;
7492 case 0x106: /* clts */
7493 if (s->cpl != 0) {
7494 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7495 } else {
7496 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7497 gen_helper_clts();
7498 /* abort block because static cpu state changed */
7499 gen_jmp_im(s->pc - s->cs_base);
7500 gen_eob(s);
7502 break;
7503 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7504 case 0x1c3: /* MOVNTI reg, mem */
7505 if (!(s->cpuid_features & CPUID_SSE2))
7506 goto illegal_op;
7507 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
7508 modrm = ldub_code(s->pc++);
7509 mod = (modrm >> 6) & 3;
7510 if (mod == 3)
7511 goto illegal_op;
7512 reg = ((modrm >> 3) & 7) | rex_r;
7513 /* generate a generic store */
7514 gen_ldst_modrm(s, modrm, ot, reg, 1);
7515 break;
7516 case 0x1ae:
7517 modrm = ldub_code(s->pc++);
7518 mod = (modrm >> 6) & 3;
7519 op = (modrm >> 3) & 7;
7520 switch(op) {
7521 case 0: /* fxsave */
7522 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7523 (s->prefix & PREFIX_LOCK))
7524 goto illegal_op;
7525 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
7526 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7527 break;
7529 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7530 if (s->cc_op != CC_OP_DYNAMIC)
7531 gen_op_set_cc_op(s->cc_op);
7532 gen_jmp_im(pc_start - s->cs_base);
7533 gen_helper_fxsave(cpu_A0, tcg_const_i32((s->dflag == 2)));
7534 break;
7535 case 1: /* fxrstor */
7536 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7537 (s->prefix & PREFIX_LOCK))
7538 goto illegal_op;
7539 if ((s->flags & HF_EM_MASK) || (s->flags & HF_TS_MASK)) {
7540 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7541 break;
7543 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7544 if (s->cc_op != CC_OP_DYNAMIC)
7545 gen_op_set_cc_op(s->cc_op);
7546 gen_jmp_im(pc_start - s->cs_base);
7547 gen_helper_fxrstor(cpu_A0, tcg_const_i32((s->dflag == 2)));
7548 break;
7549 case 2: /* ldmxcsr */
7550 case 3: /* stmxcsr */
7551 if (s->flags & HF_TS_MASK) {
7552 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7553 break;
7555 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
7556 mod == 3)
7557 goto illegal_op;
7558 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7559 if (op == 2) {
7560 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7561 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7562 } else {
7563 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7564 gen_op_st_T0_A0(OT_LONG + s->mem_index);
7566 break;
7567 case 5: /* lfence */
7568 case 6: /* mfence */
7569 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
7570 goto illegal_op;
7571 break;
7572 case 7: /* sfence / clflush */
7573 if ((modrm & 0xc7) == 0xc0) {
7574 /* sfence */
7575 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7576 if (!(s->cpuid_features & CPUID_SSE))
7577 goto illegal_op;
7578 } else {
7579 /* clflush */
7580 if (!(s->cpuid_features & CPUID_CLFLUSH))
7581 goto illegal_op;
7582 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7584 break;
7585 default:
7586 goto illegal_op;
7588 break;
7589 case 0x10d: /* 3DNow! prefetch(w) */
7590 modrm = ldub_code(s->pc++);
7591 mod = (modrm >> 6) & 3;
7592 if (mod == 3)
7593 goto illegal_op;
7594 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7595 /* ignore for now */
7596 break;
7597 case 0x1aa: /* rsm */
7598 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7599 if (!(s->flags & HF_SMM_MASK))
7600 goto illegal_op;
7601 if (s->cc_op != CC_OP_DYNAMIC) {
7602 gen_op_set_cc_op(s->cc_op);
7603 s->cc_op = CC_OP_DYNAMIC;
7605 gen_jmp_im(s->pc - s->cs_base);
7606 gen_helper_rsm();
7607 gen_eob(s);
7608 break;
7609 case 0x1b8: /* SSE4.2 popcnt */
7610 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
7611 PREFIX_REPZ)
7612 goto illegal_op;
7613 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
7614 goto illegal_op;
7616 modrm = ldub_code(s->pc++);
7617 reg = ((modrm >> 3) & 7);
7619 if (s->prefix & PREFIX_DATA)
7620 ot = OT_WORD;
7621 else if (s->dflag != 2)
7622 ot = OT_LONG;
7623 else
7624 ot = OT_QUAD;
7626 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7627 gen_helper_popcnt(cpu_T[0], cpu_T[0], tcg_const_i32(ot));
7628 gen_op_mov_reg_T0(ot, reg);
7630 s->cc_op = CC_OP_EFLAGS;
7631 break;
7632 case 0x10e ... 0x10f:
7633 /* 3DNow! instructions, ignore prefixes */
7634 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7635 case 0x110 ... 0x117:
7636 case 0x128 ... 0x12f:
7637 case 0x138 ... 0x13a:
7638 case 0x150 ... 0x179:
7639 case 0x17c ... 0x17f:
7640 case 0x1c2:
7641 case 0x1c4 ... 0x1c6:
7642 case 0x1d0 ... 0x1fe:
7643 gen_sse(s, b, pc_start, rex_r);
7644 break;
7645 default:
7646 goto illegal_op;
7648 /* lock generation */
7649 if (s->prefix & PREFIX_LOCK)
7650 gen_helper_unlock();
7651 return s->pc;
7652 illegal_op:
7653 if (s->prefix & PREFIX_LOCK)
7654 gen_helper_unlock();
7655 /* XXX: ensure that no lock was generated */
7656 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7657 return s->pc;
7660 void optimize_flags_init(void)
7662 #if TCG_TARGET_REG_BITS == 32
7663 assert(sizeof(CCTable) == (1 << 3));
7664 #else
7665 assert(sizeof(CCTable) == (1 << 4));
7666 #endif
7667 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
7668 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
7669 offsetof(CPUState, cc_op), "cc_op");
7670 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
7671 "cc_src");
7672 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
7673 "cc_dst");
7674 cpu_cc_tmp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_tmp),
7675 "cc_tmp");
7677 #ifdef TARGET_X86_64
7678 cpu_regs[R_EAX] = tcg_global_mem_new_i64(TCG_AREG0,
7679 offsetof(CPUState, regs[R_EAX]), "rax");
7680 cpu_regs[R_ECX] = tcg_global_mem_new_i64(TCG_AREG0,
7681 offsetof(CPUState, regs[R_ECX]), "rcx");
7682 cpu_regs[R_EDX] = tcg_global_mem_new_i64(TCG_AREG0,
7683 offsetof(CPUState, regs[R_EDX]), "rdx");
7684 cpu_regs[R_EBX] = tcg_global_mem_new_i64(TCG_AREG0,
7685 offsetof(CPUState, regs[R_EBX]), "rbx");
7686 cpu_regs[R_ESP] = tcg_global_mem_new_i64(TCG_AREG0,
7687 offsetof(CPUState, regs[R_ESP]), "rsp");
7688 cpu_regs[R_EBP] = tcg_global_mem_new_i64(TCG_AREG0,
7689 offsetof(CPUState, regs[R_EBP]), "rbp");
7690 cpu_regs[R_ESI] = tcg_global_mem_new_i64(TCG_AREG0,
7691 offsetof(CPUState, regs[R_ESI]), "rsi");
7692 cpu_regs[R_EDI] = tcg_global_mem_new_i64(TCG_AREG0,
7693 offsetof(CPUState, regs[R_EDI]), "rdi");
7694 cpu_regs[8] = tcg_global_mem_new_i64(TCG_AREG0,
7695 offsetof(CPUState, regs[8]), "r8");
7696 cpu_regs[9] = tcg_global_mem_new_i64(TCG_AREG0,
7697 offsetof(CPUState, regs[9]), "r9");
7698 cpu_regs[10] = tcg_global_mem_new_i64(TCG_AREG0,
7699 offsetof(CPUState, regs[10]), "r10");
7700 cpu_regs[11] = tcg_global_mem_new_i64(TCG_AREG0,
7701 offsetof(CPUState, regs[11]), "r11");
7702 cpu_regs[12] = tcg_global_mem_new_i64(TCG_AREG0,
7703 offsetof(CPUState, regs[12]), "r12");
7704 cpu_regs[13] = tcg_global_mem_new_i64(TCG_AREG0,
7705 offsetof(CPUState, regs[13]), "r13");
7706 cpu_regs[14] = tcg_global_mem_new_i64(TCG_AREG0,
7707 offsetof(CPUState, regs[14]), "r14");
7708 cpu_regs[15] = tcg_global_mem_new_i64(TCG_AREG0,
7709 offsetof(CPUState, regs[15]), "r15");
7710 #else
7711 cpu_regs[R_EAX] = tcg_global_mem_new_i32(TCG_AREG0,
7712 offsetof(CPUState, regs[R_EAX]), "eax");
7713 cpu_regs[R_ECX] = tcg_global_mem_new_i32(TCG_AREG0,
7714 offsetof(CPUState, regs[R_ECX]), "ecx");
7715 cpu_regs[R_EDX] = tcg_global_mem_new_i32(TCG_AREG0,
7716 offsetof(CPUState, regs[R_EDX]), "edx");
7717 cpu_regs[R_EBX] = tcg_global_mem_new_i32(TCG_AREG0,
7718 offsetof(CPUState, regs[R_EBX]), "ebx");
7719 cpu_regs[R_ESP] = tcg_global_mem_new_i32(TCG_AREG0,
7720 offsetof(CPUState, regs[R_ESP]), "esp");
7721 cpu_regs[R_EBP] = tcg_global_mem_new_i32(TCG_AREG0,
7722 offsetof(CPUState, regs[R_EBP]), "ebp");
7723 cpu_regs[R_ESI] = tcg_global_mem_new_i32(TCG_AREG0,
7724 offsetof(CPUState, regs[R_ESI]), "esi");
7725 cpu_regs[R_EDI] = tcg_global_mem_new_i32(TCG_AREG0,
7726 offsetof(CPUState, regs[R_EDI]), "edi");
7727 #endif
7729 /* register helpers */
7730 #define GEN_HELPER 2
7731 #include "helper.h"
7734 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7735 basic block 'tb'. If search_pc is TRUE, also generate PC
7736 information for each intermediate instruction. */
7737 static inline void gen_intermediate_code_internal(CPUState *env,
7738 TranslationBlock *tb,
7739 int search_pc)
7741 DisasContext dc1, *dc = &dc1;
7742 target_ulong pc_ptr;
7743 uint16_t *gen_opc_end;
7744 CPUBreakpoint *bp;
7745 int j, lj, cflags;
7746 uint64_t flags;
7747 target_ulong pc_start;
7748 target_ulong cs_base;
7749 int num_insns;
7750 int max_insns;
7752 /* generate intermediate code */
7753 pc_start = tb->pc;
7754 cs_base = tb->cs_base;
7755 flags = tb->flags;
7756 cflags = tb->cflags;
7758 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7759 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7760 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7761 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7762 dc->f_st = 0;
7763 dc->vm86 = (flags >> VM_SHIFT) & 1;
7764 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7765 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7766 dc->tf = (flags >> TF_SHIFT) & 1;
7767 dc->singlestep_enabled = env->singlestep_enabled;
7768 dc->cc_op = CC_OP_DYNAMIC;
7769 dc->cs_base = cs_base;
7770 dc->tb = tb;
7771 dc->popl_esp_hack = 0;
7772 /* select memory access functions */
7773 dc->mem_index = 0;
7774 if (flags & HF_SOFTMMU_MASK) {
7775 if (dc->cpl == 3)
7776 dc->mem_index = 2 * 4;
7777 else
7778 dc->mem_index = 1 * 4;
7780 dc->cpuid_features = env->cpuid_features;
7781 dc->cpuid_ext_features = env->cpuid_ext_features;
7782 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7783 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7784 #ifdef TARGET_X86_64
7785 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7786 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7787 #endif
7788 dc->flags = flags;
7789 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7790 (flags & HF_INHIBIT_IRQ_MASK)
7791 #ifndef CONFIG_SOFTMMU
7792 || (flags & HF_SOFTMMU_MASK)
7793 #endif
7795 #if 0
7796 /* check addseg logic */
7797 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7798 printf("ERROR addseg\n");
7799 #endif
7801 cpu_T[0] = tcg_temp_new();
7802 cpu_T[1] = tcg_temp_new();
7803 cpu_A0 = tcg_temp_new();
7804 cpu_T3 = tcg_temp_new();
7806 cpu_tmp0 = tcg_temp_new();
7807 cpu_tmp1_i64 = tcg_temp_new_i64();
7808 cpu_tmp2_i32 = tcg_temp_new_i32();
7809 cpu_tmp3_i32 = tcg_temp_new_i32();
7810 cpu_tmp4 = tcg_temp_new();
7811 cpu_tmp5 = tcg_temp_new();
7812 cpu_ptr0 = tcg_temp_new_ptr();
7813 cpu_ptr1 = tcg_temp_new_ptr();
7815 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7817 dc->is_jmp = DISAS_NEXT;
7818 pc_ptr = pc_start;
7819 lj = -1;
7820 num_insns = 0;
7821 max_insns = tb->cflags & CF_COUNT_MASK;
7822 if (max_insns == 0)
7823 max_insns = CF_COUNT_MASK;
7825 gen_icount_start();
7826 for(;;) {
7827 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
7828 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
7829 if (bp->pc == pc_ptr &&
7830 !((bp->flags & BP_CPU) && (tb->flags & HF_RF_MASK))) {
7831 gen_debug(dc, pc_ptr - dc->cs_base);
7832 break;
7836 if (search_pc) {
7837 j = gen_opc_ptr - gen_opc_buf;
7838 if (lj < j) {
7839 lj++;
7840 while (lj < j)
7841 gen_opc_instr_start[lj++] = 0;
7843 gen_opc_pc[lj] = pc_ptr;
7844 gen_opc_cc_op[lj] = dc->cc_op;
7845 gen_opc_instr_start[lj] = 1;
7846 gen_opc_icount[lj] = num_insns;
7848 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7849 gen_io_start();
7851 pc_ptr = disas_insn(dc, pc_ptr);
7852 num_insns++;
7853 /* stop translation if indicated */
7854 if (dc->is_jmp)
7855 break;
7856 /* if single step mode, we generate only one instruction and
7857 generate an exception */
7858 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7859 the flag and abort the translation to give the irqs a
7860 change to be happen */
7861 if (dc->tf || dc->singlestep_enabled ||
7862 (flags & HF_INHIBIT_IRQ_MASK)) {
7863 gen_jmp_im(pc_ptr - dc->cs_base);
7864 gen_eob(dc);
7865 break;
7867 /* if too long translation, stop generation too */
7868 if (gen_opc_ptr >= gen_opc_end ||
7869 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
7870 num_insns >= max_insns) {
7871 gen_jmp_im(pc_ptr - dc->cs_base);
7872 gen_eob(dc);
7873 break;
7875 if (singlestep) {
7876 gen_jmp_im(pc_ptr - dc->cs_base);
7877 gen_eob(dc);
7878 break;
7881 if (tb->cflags & CF_LAST_IO)
7882 gen_io_end();
7883 gen_icount_end(tb, num_insns);
7884 *gen_opc_ptr = INDEX_op_end;
7885 /* we don't forget to fill the last values */
7886 if (search_pc) {
7887 j = gen_opc_ptr - gen_opc_buf;
7888 lj++;
7889 while (lj <= j)
7890 gen_opc_instr_start[lj++] = 0;
7893 #ifdef DEBUG_DISAS
7894 log_cpu_state_mask(CPU_LOG_TB_CPU, env, X86_DUMP_CCOP);
7895 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
7896 int disas_flags;
7897 qemu_log("----------------\n");
7898 qemu_log("IN: %s\n", lookup_symbol(pc_start));
7899 #ifdef TARGET_X86_64
7900 if (dc->code64)
7901 disas_flags = 2;
7902 else
7903 #endif
7904 disas_flags = !dc->code32;
7905 log_target_disas(pc_start, pc_ptr - pc_start, disas_flags);
7906 qemu_log("\n");
7908 #endif
7910 if (!search_pc) {
7911 tb->size = pc_ptr - pc_start;
7912 tb->icount = num_insns;
7916 void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7918 gen_intermediate_code_internal(env, tb, 0);
7921 void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7923 gen_intermediate_code_internal(env, tb, 1);
7926 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7927 unsigned long searched_pc, int pc_pos, void *puc)
7929 int cc_op;
7930 #ifdef DEBUG_DISAS
7931 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) {
7932 int i;
7933 qemu_log("RESTORE:\n");
7934 for(i = 0;i <= pc_pos; i++) {
7935 if (gen_opc_instr_start[i]) {
7936 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7939 qemu_log("spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7940 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7941 (uint32_t)tb->cs_base);
7943 #endif
7944 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7945 cc_op = gen_opc_cc_op[pc_pos];
7946 if (cc_op != CC_OP_DYNAMIC)
7947 env->cc_op = cc_op;