2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
38 static const int tcg_target_reg_alloc_order
[] = {
48 static const int tcg_target_call_iarg_regs
[3] = { TCG_REG_EAX
, TCG_REG_EDX
, TCG_REG_ECX
};
49 static const int tcg_target_call_oarg_regs
[2] = { TCG_REG_EAX
, TCG_REG_EDX
};
51 static uint8_t *tb_ret_addr
;
53 static void patch_reloc(uint8_t *code_ptr
, int type
,
54 tcg_target_long value
, tcg_target_long addend
)
59 *(uint32_t *)code_ptr
= value
;
62 *(uint32_t *)code_ptr
= value
- (long)code_ptr
;
65 value
-= (long)code_ptr
;
66 if (value
!= (int8_t)value
) {
69 *(uint8_t *)code_ptr
= value
;
76 /* maximum number of register used for input function arguments */
77 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
79 flags
&= TCG_CALL_TYPE_MASK
;
81 case TCG_CALL_TYPE_STD
:
83 case TCG_CALL_TYPE_REGPARM_1
:
84 case TCG_CALL_TYPE_REGPARM_2
:
85 case TCG_CALL_TYPE_REGPARM
:
86 return flags
- TCG_CALL_TYPE_REGPARM_1
+ 1;
92 /* parse target specific constraints */
93 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
100 ct
->ct
|= TCG_CT_REG
;
101 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EAX
);
104 ct
->ct
|= TCG_CT_REG
;
105 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EBX
);
108 ct
->ct
|= TCG_CT_REG
;
109 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_ECX
);
112 ct
->ct
|= TCG_CT_REG
;
113 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EDX
);
116 ct
->ct
|= TCG_CT_REG
;
117 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_ESI
);
120 ct
->ct
|= TCG_CT_REG
;
121 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EDI
);
124 ct
->ct
|= TCG_CT_REG
;
125 tcg_regset_set32(ct
->u
.regs
, 0, 0xf);
128 ct
->ct
|= TCG_CT_REG
;
129 tcg_regset_set32(ct
->u
.regs
, 0, 0xff);
132 /* qemu_ld/st address constraint */
134 ct
->ct
|= TCG_CT_REG
;
135 tcg_regset_set32(ct
->u
.regs
, 0, 0xff);
136 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_EAX
);
137 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_EDX
);
147 /* test if a constant matches the constraint */
148 static inline int tcg_target_const_match(tcg_target_long val
,
149 const TCGArgConstraint
*arg_ct
)
153 if (ct
& TCG_CT_CONST
)
159 #define P_EXT 0x100 /* 0x0f opcode prefix */
161 #define OPC_BSWAP (0xc8 | P_EXT)
162 #define OPC_MOVB_EvGv (0x88) /* stores, more or less */
163 #define OPC_MOVL_EvGv (0x89) /* stores, more or less */
164 #define OPC_MOVL_GvEv (0x8b) /* loads, more or less */
165 #define OPC_SHIFT_1 (0xd1)
166 #define OPC_SHIFT_Ib (0xc1)
167 #define OPC_SHIFT_cl (0xd3)
202 #define P_EXT 0x100 /* 0x0f opcode prefix */
204 static const uint8_t tcg_cond_to_jcc
[10] = {
205 [TCG_COND_EQ
] = JCC_JE
,
206 [TCG_COND_NE
] = JCC_JNE
,
207 [TCG_COND_LT
] = JCC_JL
,
208 [TCG_COND_GE
] = JCC_JGE
,
209 [TCG_COND_LE
] = JCC_JLE
,
210 [TCG_COND_GT
] = JCC_JG
,
211 [TCG_COND_LTU
] = JCC_JB
,
212 [TCG_COND_GEU
] = JCC_JAE
,
213 [TCG_COND_LEU
] = JCC_JBE
,
214 [TCG_COND_GTU
] = JCC_JA
,
217 static inline void tcg_out_opc(TCGContext
*s
, int opc
)
224 static inline void tcg_out_modrm(TCGContext
*s
, int opc
, int r
, int rm
)
227 tcg_out8(s
, 0xc0 | (r
<< 3) | rm
);
230 /* rm == -1 means no register index */
231 static inline void tcg_out_modrm_offset(TCGContext
*s
, int opc
, int r
, int rm
,
236 tcg_out8(s
, 0x05 | (r
<< 3));
237 tcg_out32(s
, offset
);
238 } else if (offset
== 0 && rm
!= TCG_REG_EBP
) {
239 if (rm
== TCG_REG_ESP
) {
240 tcg_out8(s
, 0x04 | (r
<< 3));
243 tcg_out8(s
, 0x00 | (r
<< 3) | rm
);
245 } else if ((int8_t)offset
== offset
) {
246 if (rm
== TCG_REG_ESP
) {
247 tcg_out8(s
, 0x44 | (r
<< 3));
250 tcg_out8(s
, 0x40 | (r
<< 3) | rm
);
254 if (rm
== TCG_REG_ESP
) {
255 tcg_out8(s
, 0x84 | (r
<< 3));
258 tcg_out8(s
, 0x80 | (r
<< 3) | rm
);
260 tcg_out32(s
, offset
);
264 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
267 tcg_out_modrm(s
, OPC_MOVL_GvEv
, ret
, arg
);
271 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
272 int ret
, int32_t arg
)
276 tcg_out_modrm(s
, 0x01 | (ARITH_XOR
<< 3), ret
, ret
);
278 tcg_out8(s
, 0xb8 + ret
);
283 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
284 int arg1
, tcg_target_long arg2
)
286 tcg_out_modrm_offset(s
, OPC_MOVL_GvEv
, ret
, arg1
, arg2
);
289 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int arg
,
290 int arg1
, tcg_target_long arg2
)
292 tcg_out_modrm_offset(s
, OPC_MOVL_EvGv
, arg
, arg1
, arg2
);
295 static void tcg_out_shifti(TCGContext
*s
, int subopc
, int reg
, int count
)
298 tcg_out_modrm(s
, OPC_SHIFT_1
, subopc
, reg
);
300 tcg_out_modrm(s
, OPC_SHIFT_Ib
, subopc
, reg
);
305 static inline void tcg_out_bswap32(TCGContext
*s
, int reg
)
307 tcg_out_opc(s
, OPC_BSWAP
+ reg
);
310 static inline void tcg_out_rolw_8(TCGContext
*s
, int reg
)
313 tcg_out_shifti(s
, SHIFT_ROL
, reg
, 8);
316 static inline void tgen_arithi(TCGContext
*s
, int c
, int r0
, int32_t val
, int cf
)
318 if (!cf
&& ((c
== ARITH_ADD
&& val
== 1) || (c
== ARITH_SUB
&& val
== -1))) {
320 tcg_out_opc(s
, 0x40 + r0
);
321 } else if (!cf
&& ((c
== ARITH_ADD
&& val
== -1) || (c
== ARITH_SUB
&& val
== 1))) {
323 tcg_out_opc(s
, 0x48 + r0
);
324 } else if (val
== (int8_t)val
) {
325 tcg_out_modrm(s
, 0x83, c
, r0
);
327 } else if (c
== ARITH_AND
&& val
== 0xffu
&& r0
< 4) {
329 tcg_out_modrm(s
, 0xb6 | P_EXT
, r0
, r0
);
330 } else if (c
== ARITH_AND
&& val
== 0xffffu
) {
332 tcg_out_modrm(s
, 0xb7 | P_EXT
, r0
, r0
);
334 tcg_out_modrm(s
, 0x81, c
, r0
);
339 static void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
342 tgen_arithi(s
, ARITH_ADD
, reg
, val
, 0);
345 /* Use SMALL != 0 to force a short forward branch. */
346 static void tcg_out_jxx(TCGContext
*s
, int opc
, int label_index
, int small
)
349 TCGLabel
*l
= &s
->labels
[label_index
];
352 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
354 if ((int8_t)val1
== val1
) {
358 tcg_out8(s
, 0x70 + opc
);
367 tcg_out32(s
, val
- 5);
370 tcg_out8(s
, 0x80 + opc
);
371 tcg_out32(s
, val
- 6);
378 tcg_out8(s
, 0x70 + opc
);
380 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC8
, label_index
, -1);
387 tcg_out8(s
, 0x80 + opc
);
389 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC32
, label_index
, -4);
394 static void tcg_out_cmp(TCGContext
*s
, TCGArg arg1
, TCGArg arg2
,
400 tcg_out_modrm(s
, 0x85, arg1
, arg1
);
402 tgen_arithi(s
, ARITH_CMP
, arg1
, arg2
, 0);
405 tcg_out_modrm(s
, 0x01 | (ARITH_CMP
<< 3), arg2
, arg1
);
409 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
,
410 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
411 int label_index
, int small
)
413 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
414 tcg_out_jxx(s
, tcg_cond_to_jcc
[cond
], label_index
, small
);
417 /* XXX: we implement it at the target level to avoid having to
418 handle cross basic blocks temporaries */
419 static void tcg_out_brcond2(TCGContext
*s
, const TCGArg
*args
,
420 const int *const_args
, int small
)
423 label_next
= gen_new_label();
426 tcg_out_brcond(s
, TCG_COND_NE
, args
[0], args
[2], const_args
[2],
428 tcg_out_brcond(s
, TCG_COND_EQ
, args
[1], args
[3], const_args
[3],
432 tcg_out_brcond(s
, TCG_COND_NE
, args
[0], args
[2], const_args
[2],
434 tcg_out_brcond(s
, TCG_COND_NE
, args
[1], args
[3], const_args
[3],
438 tcg_out_brcond(s
, TCG_COND_LT
, args
[1], args
[3], const_args
[3],
440 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
441 tcg_out_brcond(s
, TCG_COND_LTU
, args
[0], args
[2], const_args
[2],
445 tcg_out_brcond(s
, TCG_COND_LT
, args
[1], args
[3], const_args
[3],
447 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
448 tcg_out_brcond(s
, TCG_COND_LEU
, args
[0], args
[2], const_args
[2],
452 tcg_out_brcond(s
, TCG_COND_GT
, args
[1], args
[3], const_args
[3],
454 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
455 tcg_out_brcond(s
, TCG_COND_GTU
, args
[0], args
[2], const_args
[2],
459 tcg_out_brcond(s
, TCG_COND_GT
, args
[1], args
[3], const_args
[3],
461 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
462 tcg_out_brcond(s
, TCG_COND_GEU
, args
[0], args
[2], const_args
[2],
466 tcg_out_brcond(s
, TCG_COND_LTU
, args
[1], args
[3], const_args
[3],
468 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
469 tcg_out_brcond(s
, TCG_COND_LTU
, args
[0], args
[2], const_args
[2],
473 tcg_out_brcond(s
, TCG_COND_LTU
, args
[1], args
[3], const_args
[3],
475 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
476 tcg_out_brcond(s
, TCG_COND_LEU
, args
[0], args
[2], const_args
[2],
480 tcg_out_brcond(s
, TCG_COND_GTU
, args
[1], args
[3], const_args
[3],
482 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
483 tcg_out_brcond(s
, TCG_COND_GTU
, args
[0], args
[2], const_args
[2],
487 tcg_out_brcond(s
, TCG_COND_GTU
, args
[1], args
[3], const_args
[3],
489 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
490 tcg_out_brcond(s
, TCG_COND_GEU
, args
[0], args
[2], const_args
[2],
496 tcg_out_label(s
, label_next
, (tcg_target_long
)s
->code_ptr
);
499 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGArg dest
,
500 TCGArg arg1
, TCGArg arg2
, int const_arg2
)
502 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
504 tcg_out_modrm(s
, 0x90 | tcg_cond_to_jcc
[cond
] | P_EXT
, 0, dest
);
505 tgen_arithi(s
, ARITH_AND
, dest
, 0xff, 0);
508 static void tcg_out_setcond2(TCGContext
*s
, const TCGArg
*args
,
509 const int *const_args
)
512 int label_true
, label_over
;
514 memcpy(new_args
, args
+1, 5*sizeof(TCGArg
));
516 if (args
[0] == args
[1] || args
[0] == args
[2]
517 || (!const_args
[3] && args
[0] == args
[3])
518 || (!const_args
[4] && args
[0] == args
[4])) {
519 /* When the destination overlaps with one of the argument
520 registers, don't do anything tricky. */
521 label_true
= gen_new_label();
522 label_over
= gen_new_label();
524 new_args
[5] = label_true
;
525 tcg_out_brcond2(s
, new_args
, const_args
+1, 1);
527 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 0);
528 tcg_out_jxx(s
, JCC_JMP
, label_over
, 1);
529 tcg_out_label(s
, label_true
, (tcg_target_long
)s
->code_ptr
);
531 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 1);
532 tcg_out_label(s
, label_over
, (tcg_target_long
)s
->code_ptr
);
534 /* When the destination does not overlap one of the arguments,
535 clear the destination first, jump if cond false, and emit an
536 increment in the true case. This results in smaller code. */
538 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 0);
540 label_over
= gen_new_label();
541 new_args
[4] = tcg_invert_cond(new_args
[4]);
542 new_args
[5] = label_over
;
543 tcg_out_brcond2(s
, new_args
, const_args
+1, 1);
545 tgen_arithi(s
, ARITH_ADD
, args
[0], 1, 0);
546 tcg_out_label(s
, label_over
, (tcg_target_long
)s
->code_ptr
);
550 #if defined(CONFIG_SOFTMMU)
552 #include "../../softmmu_defs.h"
554 static void *qemu_ld_helpers
[4] = {
561 static void *qemu_st_helpers
[4] = {
569 #ifndef CONFIG_USER_ONLY
573 /* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and
574 EAX. It will be useful once fixed registers globals are less
576 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
579 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
580 #if defined(CONFIG_SOFTMMU)
581 uint8_t *label1_ptr
, *label2_ptr
;
583 #if TARGET_LONG_BITS == 64
584 #if defined(CONFIG_SOFTMMU)
596 #if TARGET_LONG_BITS == 64
605 #if defined(CONFIG_SOFTMMU)
606 tcg_out_mov(s
, r1
, addr_reg
);
608 tcg_out_mov(s
, r0
, addr_reg
);
610 tcg_out_shifti(s
, SHIFT_SHR
, r1
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
612 tcg_out_modrm(s
, 0x81, 4, r0
); /* andl $x, r0 */
613 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
615 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
616 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
618 tcg_out_opc(s
, 0x8d); /* lea offset(r1, %ebp), r1 */
619 tcg_out8(s
, 0x80 | (r1
<< 3) | 0x04);
620 tcg_out8(s
, (5 << 3) | r1
);
621 tcg_out32(s
, offsetof(CPUState
, tlb_table
[mem_index
][0].addr_read
));
624 tcg_out_modrm_offset(s
, 0x3b, r0
, r1
, 0);
626 tcg_out_mov(s
, r0
, addr_reg
);
628 #if TARGET_LONG_BITS == 32
630 tcg_out8(s
, 0x70 + JCC_JE
);
631 label1_ptr
= s
->code_ptr
;
635 tcg_out8(s
, 0x70 + JCC_JNE
);
636 label3_ptr
= s
->code_ptr
;
639 /* cmp 4(r1), addr_reg2 */
640 tcg_out_modrm_offset(s
, 0x3b, addr_reg2
, r1
, 4);
643 tcg_out8(s
, 0x70 + JCC_JE
);
644 label1_ptr
= s
->code_ptr
;
648 *label3_ptr
= s
->code_ptr
- label3_ptr
- 1;
651 /* XXX: move that code at the end of the TB */
652 #if TARGET_LONG_BITS == 32
653 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_EDX
, mem_index
);
655 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
656 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_ECX
, mem_index
);
659 tcg_out32(s
, (tcg_target_long
)qemu_ld_helpers
[s_bits
] -
660 (tcg_target_long
)s
->code_ptr
- 4);
665 tcg_out_modrm(s
, 0xbe | P_EXT
, data_reg
, TCG_REG_EAX
);
669 tcg_out_modrm(s
, 0xbf | P_EXT
, data_reg
, TCG_REG_EAX
);
673 tcg_out_modrm(s
, 0xb6 | P_EXT
, data_reg
, TCG_REG_EAX
);
677 tcg_out_modrm(s
, 0xb7 | P_EXT
, data_reg
, TCG_REG_EAX
);
681 tcg_out_mov(s
, data_reg
, TCG_REG_EAX
);
684 if (data_reg
== TCG_REG_EDX
) {
685 tcg_out_opc(s
, 0x90 + TCG_REG_EDX
); /* xchg %edx, %eax */
686 tcg_out_mov(s
, data_reg2
, TCG_REG_EAX
);
688 tcg_out_mov(s
, data_reg
, TCG_REG_EAX
);
689 tcg_out_mov(s
, data_reg2
, TCG_REG_EDX
);
696 label2_ptr
= s
->code_ptr
;
700 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
703 tcg_out_modrm_offset(s
, 0x03, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
704 offsetof(CPUTLBEntry
, addr_read
));
709 #ifdef TARGET_WORDS_BIGENDIAN
717 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, data_reg
, r0
, GUEST_BASE
);
721 tcg_out_modrm_offset(s
, 0xbe | P_EXT
, data_reg
, r0
, GUEST_BASE
);
725 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, data_reg
, r0
, GUEST_BASE
);
727 tcg_out_rolw_8(s
, data_reg
);
732 tcg_out_modrm_offset(s
, 0xbf | P_EXT
, data_reg
, r0
, GUEST_BASE
);
734 tcg_out_rolw_8(s
, data_reg
);
736 /* movswl data_reg, data_reg */
737 tcg_out_modrm(s
, 0xbf | P_EXT
, data_reg
, data_reg
);
741 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
743 tcg_out_bswap32(s
, data_reg
);
747 /* XXX: could be nicer */
748 if (r0
== data_reg
) {
752 tcg_out_mov(s
, r1
, r0
);
756 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
757 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg2
, r0
, GUEST_BASE
+ 4);
759 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
+ 4);
760 tcg_out_bswap32(s
, data_reg
);
762 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg2
, r0
, GUEST_BASE
);
763 tcg_out_bswap32(s
, data_reg2
);
770 #if defined(CONFIG_SOFTMMU)
772 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
777 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
780 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
781 #if defined(CONFIG_SOFTMMU)
782 uint8_t *label1_ptr
, *label2_ptr
;
784 #if TARGET_LONG_BITS == 64
785 #if defined(CONFIG_SOFTMMU)
797 #if TARGET_LONG_BITS == 64
807 #if defined(CONFIG_SOFTMMU)
808 tcg_out_mov(s
, r1
, addr_reg
);
810 tcg_out_mov(s
, r0
, addr_reg
);
812 tcg_out_shifti(s
, SHIFT_SHR
, r1
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
814 tcg_out_modrm(s
, 0x81, 4, r0
); /* andl $x, r0 */
815 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
817 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
818 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
820 tcg_out_opc(s
, 0x8d); /* lea offset(r1, %ebp), r1 */
821 tcg_out8(s
, 0x80 | (r1
<< 3) | 0x04);
822 tcg_out8(s
, (5 << 3) | r1
);
823 tcg_out32(s
, offsetof(CPUState
, tlb_table
[mem_index
][0].addr_write
));
826 tcg_out_modrm_offset(s
, 0x3b, r0
, r1
, 0);
828 tcg_out_mov(s
, r0
, addr_reg
);
830 #if TARGET_LONG_BITS == 32
832 tcg_out8(s
, 0x70 + JCC_JE
);
833 label1_ptr
= s
->code_ptr
;
837 tcg_out8(s
, 0x70 + JCC_JNE
);
838 label3_ptr
= s
->code_ptr
;
841 /* cmp 4(r1), addr_reg2 */
842 tcg_out_modrm_offset(s
, 0x3b, addr_reg2
, r1
, 4);
845 tcg_out8(s
, 0x70 + JCC_JE
);
846 label1_ptr
= s
->code_ptr
;
850 *label3_ptr
= s
->code_ptr
- label3_ptr
- 1;
853 /* XXX: move that code at the end of the TB */
854 #if TARGET_LONG_BITS == 32
856 tcg_out_mov(s
, TCG_REG_EDX
, data_reg
);
857 tcg_out_mov(s
, TCG_REG_ECX
, data_reg2
);
858 tcg_out8(s
, 0x6a); /* push Ib */
859 tcg_out8(s
, mem_index
);
861 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
862 (tcg_target_long
)s
->code_ptr
- 4);
863 tcg_out_addi(s
, TCG_REG_ESP
, 4);
868 tcg_out_modrm(s
, 0xb6 | P_EXT
, TCG_REG_EDX
, data_reg
);
872 tcg_out_modrm(s
, 0xb7 | P_EXT
, TCG_REG_EDX
, data_reg
);
875 tcg_out_mov(s
, TCG_REG_EDX
, data_reg
);
878 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_ECX
, mem_index
);
880 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
881 (tcg_target_long
)s
->code_ptr
- 4);
885 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
886 tcg_out8(s
, 0x6a); /* push Ib */
887 tcg_out8(s
, mem_index
);
888 tcg_out_opc(s
, 0x50 + data_reg2
); /* push */
889 tcg_out_opc(s
, 0x50 + data_reg
); /* push */
891 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
892 (tcg_target_long
)s
->code_ptr
- 4);
893 tcg_out_addi(s
, TCG_REG_ESP
, 12);
895 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
899 tcg_out_modrm(s
, 0xb6 | P_EXT
, TCG_REG_ECX
, data_reg
);
903 tcg_out_modrm(s
, 0xb7 | P_EXT
, TCG_REG_ECX
, data_reg
);
906 tcg_out_mov(s
, TCG_REG_ECX
, data_reg
);
909 tcg_out8(s
, 0x6a); /* push Ib */
910 tcg_out8(s
, mem_index
);
912 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
913 (tcg_target_long
)s
->code_ptr
- 4);
914 tcg_out_addi(s
, TCG_REG_ESP
, 4);
920 label2_ptr
= s
->code_ptr
;
924 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
927 tcg_out_modrm_offset(s
, 0x03, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
928 offsetof(CPUTLBEntry
, addr_write
));
933 #ifdef TARGET_WORDS_BIGENDIAN
940 tcg_out_modrm_offset(s
, OPC_MOVB_EvGv
, data_reg
, r0
, GUEST_BASE
);
944 tcg_out_mov(s
, r1
, data_reg
);
945 tcg_out_rolw_8(s
, r1
);
950 tcg_out_modrm_offset(s
, OPC_MOVL_EvGv
, data_reg
, r0
, GUEST_BASE
);
954 tcg_out_mov(s
, r1
, data_reg
);
955 tcg_out_bswap32(s
, r1
);
958 tcg_out_st(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
962 tcg_out_mov(s
, r1
, data_reg2
);
963 tcg_out_bswap32(s
, r1
);
964 tcg_out_st(s
, TCG_TYPE_I32
, r1
, r0
, GUEST_BASE
);
965 tcg_out_mov(s
, r1
, data_reg
);
966 tcg_out_bswap32(s
, r1
);
967 tcg_out_st(s
, TCG_TYPE_I32
, r1
, r0
, GUEST_BASE
+ 4);
969 tcg_out_st(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
970 tcg_out_st(s
, TCG_TYPE_I32
, data_reg2
, r0
, GUEST_BASE
+ 4);
977 #if defined(CONFIG_SOFTMMU)
979 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
983 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
984 const TCGArg
*args
, const int *const_args
)
989 case INDEX_op_exit_tb
:
990 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_EAX
, args
[0]);
991 tcg_out8(s
, 0xe9); /* jmp tb_ret_addr */
992 tcg_out32(s
, tb_ret_addr
- s
->code_ptr
- 4);
994 case INDEX_op_goto_tb
:
995 if (s
->tb_jmp_offset
) {
996 /* direct jump method */
997 tcg_out8(s
, 0xe9); /* jmp im */
998 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1001 /* indirect jump method */
1003 tcg_out_modrm_offset(s
, 0xff, 4, -1,
1004 (tcg_target_long
)(s
->tb_next
+ args
[0]));
1006 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1009 if (const_args
[0]) {
1011 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
1013 tcg_out_modrm(s
, 0xff, 2, args
[0]);
1017 if (const_args
[0]) {
1019 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
1021 tcg_out_modrm(s
, 0xff, 4, args
[0]);
1025 tcg_out_jxx(s
, JCC_JMP
, args
[0], 0);
1027 case INDEX_op_movi_i32
:
1028 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1030 case INDEX_op_ld8u_i32
:
1032 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, args
[0], args
[1], args
[2]);
1034 case INDEX_op_ld8s_i32
:
1036 tcg_out_modrm_offset(s
, 0xbe | P_EXT
, args
[0], args
[1], args
[2]);
1038 case INDEX_op_ld16u_i32
:
1040 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, args
[0], args
[1], args
[2]);
1042 case INDEX_op_ld16s_i32
:
1044 tcg_out_modrm_offset(s
, 0xbf | P_EXT
, args
[0], args
[1], args
[2]);
1046 case INDEX_op_ld_i32
:
1047 tcg_out_ld(s
, TCG_TYPE_I32
, args
[0], args
[1], args
[2]);
1049 case INDEX_op_st8_i32
:
1051 tcg_out_modrm_offset(s
, OPC_MOVB_EvGv
, args
[0], args
[1], args
[2]);
1053 case INDEX_op_st16_i32
:
1056 tcg_out_modrm_offset(s
, OPC_MOVL_EvGv
, args
[0], args
[1], args
[2]);
1058 case INDEX_op_st_i32
:
1059 tcg_out_st(s
, TCG_TYPE_I32
, args
[0], args
[1], args
[2]);
1061 case INDEX_op_sub_i32
:
1064 case INDEX_op_and_i32
:
1067 case INDEX_op_or_i32
:
1070 case INDEX_op_xor_i32
:
1073 case INDEX_op_add_i32
:
1076 if (const_args
[2]) {
1077 tgen_arithi(s
, c
, args
[0], args
[2], 0);
1079 tcg_out_modrm(s
, 0x01 | (c
<< 3), args
[2], args
[0]);
1082 case INDEX_op_mul_i32
:
1083 if (const_args
[2]) {
1086 if (val
== (int8_t)val
) {
1087 tcg_out_modrm(s
, 0x6b, args
[0], args
[0]);
1090 tcg_out_modrm(s
, 0x69, args
[0], args
[0]);
1094 tcg_out_modrm(s
, 0xaf | P_EXT
, args
[0], args
[2]);
1097 case INDEX_op_mulu2_i32
:
1098 tcg_out_modrm(s
, 0xf7, 4, args
[3]);
1100 case INDEX_op_div2_i32
:
1101 tcg_out_modrm(s
, 0xf7, 7, args
[4]);
1103 case INDEX_op_divu2_i32
:
1104 tcg_out_modrm(s
, 0xf7, 6, args
[4]);
1106 case INDEX_op_shl_i32
:
1109 if (const_args
[2]) {
1110 tcg_out_shifti(s
, c
, args
[0], args
[2]);
1112 tcg_out_modrm(s
, OPC_SHIFT_cl
, c
, args
[0]);
1115 case INDEX_op_shr_i32
:
1118 case INDEX_op_sar_i32
:
1121 case INDEX_op_rotl_i32
:
1124 case INDEX_op_rotr_i32
:
1128 case INDEX_op_add2_i32
:
1130 tgen_arithi(s
, ARITH_ADD
, args
[0], args
[4], 1);
1132 tcg_out_modrm(s
, 0x01 | (ARITH_ADD
<< 3), args
[4], args
[0]);
1134 tgen_arithi(s
, ARITH_ADC
, args
[1], args
[5], 1);
1136 tcg_out_modrm(s
, 0x01 | (ARITH_ADC
<< 3), args
[5], args
[1]);
1138 case INDEX_op_sub2_i32
:
1140 tgen_arithi(s
, ARITH_SUB
, args
[0], args
[4], 1);
1142 tcg_out_modrm(s
, 0x01 | (ARITH_SUB
<< 3), args
[4], args
[0]);
1144 tgen_arithi(s
, ARITH_SBB
, args
[1], args
[5], 1);
1146 tcg_out_modrm(s
, 0x01 | (ARITH_SBB
<< 3), args
[5], args
[1]);
1148 case INDEX_op_brcond_i32
:
1149 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1152 case INDEX_op_brcond2_i32
:
1153 tcg_out_brcond2(s
, args
, const_args
, 0);
1156 case INDEX_op_bswap16_i32
:
1157 tcg_out_rolw_8(s
, args
[0]);
1159 case INDEX_op_bswap32_i32
:
1160 tcg_out_bswap32(s
, args
[0]);
1163 case INDEX_op_neg_i32
:
1164 tcg_out_modrm(s
, 0xf7, 3, args
[0]);
1167 case INDEX_op_not_i32
:
1168 tcg_out_modrm(s
, 0xf7, 2, args
[0]);
1171 case INDEX_op_ext8s_i32
:
1172 tcg_out_modrm(s
, 0xbe | P_EXT
, args
[0], args
[1]);
1174 case INDEX_op_ext16s_i32
:
1175 tcg_out_modrm(s
, 0xbf | P_EXT
, args
[0], args
[1]);
1177 case INDEX_op_ext8u_i32
:
1178 tcg_out_modrm(s
, 0xb6 | P_EXT
, args
[0], args
[1]);
1180 case INDEX_op_ext16u_i32
:
1181 tcg_out_modrm(s
, 0xb7 | P_EXT
, args
[0], args
[1]);
1184 case INDEX_op_setcond_i32
:
1185 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2], const_args
[2]);
1187 case INDEX_op_setcond2_i32
:
1188 tcg_out_setcond2(s
, args
, const_args
);
1191 case INDEX_op_qemu_ld8u
:
1192 tcg_out_qemu_ld(s
, args
, 0);
1194 case INDEX_op_qemu_ld8s
:
1195 tcg_out_qemu_ld(s
, args
, 0 | 4);
1197 case INDEX_op_qemu_ld16u
:
1198 tcg_out_qemu_ld(s
, args
, 1);
1200 case INDEX_op_qemu_ld16s
:
1201 tcg_out_qemu_ld(s
, args
, 1 | 4);
1203 case INDEX_op_qemu_ld32
:
1204 tcg_out_qemu_ld(s
, args
, 2);
1206 case INDEX_op_qemu_ld64
:
1207 tcg_out_qemu_ld(s
, args
, 3);
1210 case INDEX_op_qemu_st8
:
1211 tcg_out_qemu_st(s
, args
, 0);
1213 case INDEX_op_qemu_st16
:
1214 tcg_out_qemu_st(s
, args
, 1);
1216 case INDEX_op_qemu_st32
:
1217 tcg_out_qemu_st(s
, args
, 2);
1219 case INDEX_op_qemu_st64
:
1220 tcg_out_qemu_st(s
, args
, 3);
1228 static const TCGTargetOpDef x86_op_defs
[] = {
1229 { INDEX_op_exit_tb
, { } },
1230 { INDEX_op_goto_tb
, { } },
1231 { INDEX_op_call
, { "ri" } },
1232 { INDEX_op_jmp
, { "ri" } },
1233 { INDEX_op_br
, { } },
1234 { INDEX_op_mov_i32
, { "r", "r" } },
1235 { INDEX_op_movi_i32
, { "r" } },
1236 { INDEX_op_ld8u_i32
, { "r", "r" } },
1237 { INDEX_op_ld8s_i32
, { "r", "r" } },
1238 { INDEX_op_ld16u_i32
, { "r", "r" } },
1239 { INDEX_op_ld16s_i32
, { "r", "r" } },
1240 { INDEX_op_ld_i32
, { "r", "r" } },
1241 { INDEX_op_st8_i32
, { "q", "r" } },
1242 { INDEX_op_st16_i32
, { "r", "r" } },
1243 { INDEX_op_st_i32
, { "r", "r" } },
1245 { INDEX_op_add_i32
, { "r", "0", "ri" } },
1246 { INDEX_op_sub_i32
, { "r", "0", "ri" } },
1247 { INDEX_op_mul_i32
, { "r", "0", "ri" } },
1248 { INDEX_op_mulu2_i32
, { "a", "d", "a", "r" } },
1249 { INDEX_op_div2_i32
, { "a", "d", "0", "1", "r" } },
1250 { INDEX_op_divu2_i32
, { "a", "d", "0", "1", "r" } },
1251 { INDEX_op_and_i32
, { "r", "0", "ri" } },
1252 { INDEX_op_or_i32
, { "r", "0", "ri" } },
1253 { INDEX_op_xor_i32
, { "r", "0", "ri" } },
1255 { INDEX_op_shl_i32
, { "r", "0", "ci" } },
1256 { INDEX_op_shr_i32
, { "r", "0", "ci" } },
1257 { INDEX_op_sar_i32
, { "r", "0", "ci" } },
1258 { INDEX_op_rotl_i32
, { "r", "0", "ci" } },
1259 { INDEX_op_rotr_i32
, { "r", "0", "ci" } },
1261 { INDEX_op_brcond_i32
, { "r", "ri" } },
1263 { INDEX_op_add2_i32
, { "r", "r", "0", "1", "ri", "ri" } },
1264 { INDEX_op_sub2_i32
, { "r", "r", "0", "1", "ri", "ri" } },
1265 { INDEX_op_brcond2_i32
, { "r", "r", "ri", "ri" } },
1267 { INDEX_op_bswap16_i32
, { "r", "0" } },
1268 { INDEX_op_bswap32_i32
, { "r", "0" } },
1270 { INDEX_op_neg_i32
, { "r", "0" } },
1272 { INDEX_op_not_i32
, { "r", "0" } },
1274 { INDEX_op_ext8s_i32
, { "r", "q" } },
1275 { INDEX_op_ext16s_i32
, { "r", "r" } },
1276 { INDEX_op_ext8u_i32
, { "r", "q"} },
1277 { INDEX_op_ext16u_i32
, { "r", "r"} },
1279 { INDEX_op_setcond_i32
, { "q", "r", "ri" } },
1280 { INDEX_op_setcond2_i32
, { "r", "r", "r", "ri", "ri" } },
1282 #if TARGET_LONG_BITS == 32
1283 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1284 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1285 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1286 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1287 { INDEX_op_qemu_ld32
, { "r", "L" } },
1288 { INDEX_op_qemu_ld64
, { "r", "r", "L" } },
1290 { INDEX_op_qemu_st8
, { "cb", "L" } },
1291 { INDEX_op_qemu_st16
, { "L", "L" } },
1292 { INDEX_op_qemu_st32
, { "L", "L" } },
1293 { INDEX_op_qemu_st64
, { "L", "L", "L" } },
1295 { INDEX_op_qemu_ld8u
, { "r", "L", "L" } },
1296 { INDEX_op_qemu_ld8s
, { "r", "L", "L" } },
1297 { INDEX_op_qemu_ld16u
, { "r", "L", "L" } },
1298 { INDEX_op_qemu_ld16s
, { "r", "L", "L" } },
1299 { INDEX_op_qemu_ld32
, { "r", "L", "L" } },
1300 { INDEX_op_qemu_ld64
, { "r", "r", "L", "L" } },
1302 { INDEX_op_qemu_st8
, { "cb", "L", "L" } },
1303 { INDEX_op_qemu_st16
, { "L", "L", "L" } },
1304 { INDEX_op_qemu_st32
, { "L", "L", "L" } },
1305 { INDEX_op_qemu_st64
, { "L", "L", "L", "L" } },
1310 static int tcg_target_callee_save_regs
[] = {
1311 /* TCG_REG_EBP, */ /* currently used for the global env, so no
1318 static inline void tcg_out_push(TCGContext
*s
, int reg
)
1320 tcg_out_opc(s
, 0x50 + reg
);
1323 static inline void tcg_out_pop(TCGContext
*s
, int reg
)
1325 tcg_out_opc(s
, 0x58 + reg
);
1328 /* Generate global QEMU prologue and epilogue code */
1329 void tcg_target_qemu_prologue(TCGContext
*s
)
1331 int i
, frame_size
, push_size
, stack_addend
;
1334 /* save all callee saved registers */
1335 for(i
= 0; i
< ARRAY_SIZE(tcg_target_callee_save_regs
); i
++) {
1336 tcg_out_push(s
, tcg_target_callee_save_regs
[i
]);
1338 /* reserve some stack space */
1339 push_size
= 4 + ARRAY_SIZE(tcg_target_callee_save_regs
) * 4;
1340 frame_size
= push_size
+ TCG_STATIC_CALL_ARGS_SIZE
;
1341 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1342 ~(TCG_TARGET_STACK_ALIGN
- 1);
1343 stack_addend
= frame_size
- push_size
;
1344 tcg_out_addi(s
, TCG_REG_ESP
, -stack_addend
);
1346 tcg_out_modrm(s
, 0xff, 4, TCG_REG_EAX
); /* jmp *%eax */
1349 tb_ret_addr
= s
->code_ptr
;
1350 tcg_out_addi(s
, TCG_REG_ESP
, stack_addend
);
1351 for(i
= ARRAY_SIZE(tcg_target_callee_save_regs
) - 1; i
>= 0; i
--) {
1352 tcg_out_pop(s
, tcg_target_callee_save_regs
[i
]);
1354 tcg_out8(s
, 0xc3); /* ret */
1357 void tcg_target_init(TCGContext
*s
)
1359 #if !defined(CONFIG_USER_ONLY)
1361 if ((1 << CPU_TLB_ENTRY_BITS
) != sizeof(CPUTLBEntry
))
1365 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xff);
1367 tcg_regset_clear(tcg_target_call_clobber_regs
);
1368 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_EAX
);
1369 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_EDX
);
1370 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_ECX
);
1372 tcg_regset_clear(s
->reserved_regs
);
1373 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ESP
);
1375 tcg_add_target_add_op_defs(x86_op_defs
);