2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
38 static const int tcg_target_reg_alloc_order
[] = {
48 static const int tcg_target_call_iarg_regs
[3] = { TCG_REG_EAX
, TCG_REG_EDX
, TCG_REG_ECX
};
49 static const int tcg_target_call_oarg_regs
[2] = { TCG_REG_EAX
, TCG_REG_EDX
};
51 static uint8_t *tb_ret_addr
;
53 static void patch_reloc(uint8_t *code_ptr
, int type
,
54 tcg_target_long value
, tcg_target_long addend
)
59 *(uint32_t *)code_ptr
= value
;
62 *(uint32_t *)code_ptr
= value
- (long)code_ptr
;
65 value
-= (long)code_ptr
;
66 if (value
!= (int8_t)value
) {
69 *(uint8_t *)code_ptr
= value
;
76 /* maximum number of register used for input function arguments */
77 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
79 flags
&= TCG_CALL_TYPE_MASK
;
81 case TCG_CALL_TYPE_STD
:
83 case TCG_CALL_TYPE_REGPARM_1
:
84 case TCG_CALL_TYPE_REGPARM_2
:
85 case TCG_CALL_TYPE_REGPARM
:
86 return flags
- TCG_CALL_TYPE_REGPARM_1
+ 1;
92 /* parse target specific constraints */
93 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
100 ct
->ct
|= TCG_CT_REG
;
101 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EAX
);
104 ct
->ct
|= TCG_CT_REG
;
105 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EBX
);
108 ct
->ct
|= TCG_CT_REG
;
109 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_ECX
);
112 ct
->ct
|= TCG_CT_REG
;
113 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EDX
);
116 ct
->ct
|= TCG_CT_REG
;
117 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_ESI
);
120 ct
->ct
|= TCG_CT_REG
;
121 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EDI
);
124 ct
->ct
|= TCG_CT_REG
;
125 tcg_regset_set32(ct
->u
.regs
, 0, 0xf);
128 ct
->ct
|= TCG_CT_REG
;
129 tcg_regset_set32(ct
->u
.regs
, 0, 0xff);
132 /* qemu_ld/st address constraint */
134 ct
->ct
|= TCG_CT_REG
;
135 tcg_regset_set32(ct
->u
.regs
, 0, 0xff);
136 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_EAX
);
137 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_EDX
);
147 /* test if a constant matches the constraint */
148 static inline int tcg_target_const_match(tcg_target_long val
,
149 const TCGArgConstraint
*arg_ct
)
153 if (ct
& TCG_CT_CONST
)
159 #define P_EXT 0x100 /* 0x0f opcode prefix */
161 #define OPC_BSWAP (0xc8 | P_EXT)
162 #define OPC_JCC_long (0x80 | P_EXT) /* ... plus condition code */
163 #define OPC_JCC_short (0x70) /* ... plus condition code */
164 #define OPC_JMP_long (0xe9)
165 #define OPC_JMP_short (0xeb)
166 #define OPC_MOVB_EvGv (0x88) /* stores, more or less */
167 #define OPC_MOVL_EvGv (0x89) /* stores, more or less */
168 #define OPC_MOVL_GvEv (0x8b) /* loads, more or less */
169 #define OPC_MOVSBL (0xbe | P_EXT)
170 #define OPC_MOVSWL (0xbf | P_EXT)
171 #define OPC_MOVZBL (0xb6 | P_EXT)
172 #define OPC_MOVZWL (0xb7 | P_EXT)
173 #define OPC_SHIFT_1 (0xd1)
174 #define OPC_SHIFT_Ib (0xc1)
175 #define OPC_SHIFT_cl (0xd3)
177 /* Group 1 opcode extensions for 0x80-0x83. */
187 /* Group 2 opcode extensions for 0xc0, 0xc1, 0xd0-0xd3. */
194 /* Group 5 opcode extensions for 0xff. */
195 #define EXT_JMPN_Ev 4
197 /* Condition codes to be added to OPC_JCC_{long,short}. */
216 static const uint8_t tcg_cond_to_jcc
[10] = {
217 [TCG_COND_EQ
] = JCC_JE
,
218 [TCG_COND_NE
] = JCC_JNE
,
219 [TCG_COND_LT
] = JCC_JL
,
220 [TCG_COND_GE
] = JCC_JGE
,
221 [TCG_COND_LE
] = JCC_JLE
,
222 [TCG_COND_GT
] = JCC_JG
,
223 [TCG_COND_LTU
] = JCC_JB
,
224 [TCG_COND_GEU
] = JCC_JAE
,
225 [TCG_COND_LEU
] = JCC_JBE
,
226 [TCG_COND_GTU
] = JCC_JA
,
229 static inline void tcg_out_opc(TCGContext
*s
, int opc
)
236 static inline void tcg_out_modrm(TCGContext
*s
, int opc
, int r
, int rm
)
239 tcg_out8(s
, 0xc0 | (r
<< 3) | rm
);
242 /* rm == -1 means no register index */
243 static inline void tcg_out_modrm_offset(TCGContext
*s
, int opc
, int r
, int rm
,
248 tcg_out8(s
, 0x05 | (r
<< 3));
249 tcg_out32(s
, offset
);
250 } else if (offset
== 0 && rm
!= TCG_REG_EBP
) {
251 if (rm
== TCG_REG_ESP
) {
252 tcg_out8(s
, 0x04 | (r
<< 3));
255 tcg_out8(s
, 0x00 | (r
<< 3) | rm
);
257 } else if ((int8_t)offset
== offset
) {
258 if (rm
== TCG_REG_ESP
) {
259 tcg_out8(s
, 0x44 | (r
<< 3));
262 tcg_out8(s
, 0x40 | (r
<< 3) | rm
);
266 if (rm
== TCG_REG_ESP
) {
267 tcg_out8(s
, 0x84 | (r
<< 3));
270 tcg_out8(s
, 0x80 | (r
<< 3) | rm
);
272 tcg_out32(s
, offset
);
276 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
279 tcg_out_modrm(s
, OPC_MOVL_GvEv
, ret
, arg
);
283 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
284 int ret
, int32_t arg
)
288 tcg_out_modrm(s
, 0x01 | (ARITH_XOR
<< 3), ret
, ret
);
290 tcg_out8(s
, 0xb8 + ret
);
295 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
296 int arg1
, tcg_target_long arg2
)
298 tcg_out_modrm_offset(s
, OPC_MOVL_GvEv
, ret
, arg1
, arg2
);
301 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int arg
,
302 int arg1
, tcg_target_long arg2
)
304 tcg_out_modrm_offset(s
, OPC_MOVL_EvGv
, arg
, arg1
, arg2
);
307 static void tcg_out_shifti(TCGContext
*s
, int subopc
, int reg
, int count
)
310 tcg_out_modrm(s
, OPC_SHIFT_1
, subopc
, reg
);
312 tcg_out_modrm(s
, OPC_SHIFT_Ib
, subopc
, reg
);
317 static inline void tcg_out_bswap32(TCGContext
*s
, int reg
)
319 tcg_out_opc(s
, OPC_BSWAP
+ reg
);
322 static inline void tcg_out_rolw_8(TCGContext
*s
, int reg
)
325 tcg_out_shifti(s
, SHIFT_ROL
, reg
, 8);
328 static inline void tcg_out_ext8u(TCGContext
*s
, int dest
, int src
)
332 tcg_out_modrm(s
, OPC_MOVZBL
, dest
, src
);
335 static void tcg_out_ext8s(TCGContext
*s
, int dest
, int src
)
339 tcg_out_modrm(s
, OPC_MOVSBL
, dest
, src
);
342 static inline void tcg_out_ext16u(TCGContext
*s
, int dest
, int src
)
345 tcg_out_modrm(s
, OPC_MOVZWL
, dest
, src
);
348 static inline void tcg_out_ext16s(TCGContext
*s
, int dest
, int src
)
351 tcg_out_modrm(s
, OPC_MOVSWL
, dest
, src
);
354 static inline void tgen_arithi(TCGContext
*s
, int c
, int r0
, int32_t val
, int cf
)
356 if (!cf
&& ((c
== ARITH_ADD
&& val
== 1) || (c
== ARITH_SUB
&& val
== -1))) {
358 tcg_out_opc(s
, 0x40 + r0
);
359 } else if (!cf
&& ((c
== ARITH_ADD
&& val
== -1) || (c
== ARITH_SUB
&& val
== 1))) {
361 tcg_out_opc(s
, 0x48 + r0
);
362 } else if (val
== (int8_t)val
) {
363 tcg_out_modrm(s
, 0x83, c
, r0
);
365 } else if (c
== ARITH_AND
&& val
== 0xffu
&& r0
< 4) {
366 tcg_out_ext8u(s
, r0
, r0
);
367 } else if (c
== ARITH_AND
&& val
== 0xffffu
) {
368 tcg_out_ext16u(s
, r0
, r0
);
370 tcg_out_modrm(s
, 0x81, c
, r0
);
375 static void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
378 tgen_arithi(s
, ARITH_ADD
, reg
, val
, 0);
381 /* Use SMALL != 0 to force a short forward branch. */
382 static void tcg_out_jxx(TCGContext
*s
, int opc
, int label_index
, int small
)
385 TCGLabel
*l
= &s
->labels
[label_index
];
388 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
390 if ((int8_t)val1
== val1
) {
392 tcg_out8(s
, OPC_JMP_short
);
394 tcg_out8(s
, OPC_JCC_short
+ opc
);
402 tcg_out8(s
, OPC_JMP_long
);
403 tcg_out32(s
, val
- 5);
405 tcg_out_opc(s
, OPC_JCC_long
+ opc
);
406 tcg_out32(s
, val
- 6);
411 tcg_out8(s
, OPC_JMP_short
);
413 tcg_out8(s
, OPC_JCC_short
+ opc
);
415 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC8
, label_index
, -1);
419 tcg_out8(s
, OPC_JMP_long
);
421 tcg_out_opc(s
, OPC_JCC_long
+ opc
);
423 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC32
, label_index
, -4);
428 static void tcg_out_cmp(TCGContext
*s
, TCGArg arg1
, TCGArg arg2
,
434 tcg_out_modrm(s
, 0x85, arg1
, arg1
);
436 tgen_arithi(s
, ARITH_CMP
, arg1
, arg2
, 0);
439 tcg_out_modrm(s
, 0x01 | (ARITH_CMP
<< 3), arg2
, arg1
);
443 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
,
444 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
445 int label_index
, int small
)
447 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
448 tcg_out_jxx(s
, tcg_cond_to_jcc
[cond
], label_index
, small
);
451 /* XXX: we implement it at the target level to avoid having to
452 handle cross basic blocks temporaries */
453 static void tcg_out_brcond2(TCGContext
*s
, const TCGArg
*args
,
454 const int *const_args
, int small
)
457 label_next
= gen_new_label();
460 tcg_out_brcond(s
, TCG_COND_NE
, args
[0], args
[2], const_args
[2],
462 tcg_out_brcond(s
, TCG_COND_EQ
, args
[1], args
[3], const_args
[3],
466 tcg_out_brcond(s
, TCG_COND_NE
, args
[0], args
[2], const_args
[2],
468 tcg_out_brcond(s
, TCG_COND_NE
, args
[1], args
[3], const_args
[3],
472 tcg_out_brcond(s
, TCG_COND_LT
, args
[1], args
[3], const_args
[3],
474 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
475 tcg_out_brcond(s
, TCG_COND_LTU
, args
[0], args
[2], const_args
[2],
479 tcg_out_brcond(s
, TCG_COND_LT
, args
[1], args
[3], const_args
[3],
481 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
482 tcg_out_brcond(s
, TCG_COND_LEU
, args
[0], args
[2], const_args
[2],
486 tcg_out_brcond(s
, TCG_COND_GT
, args
[1], args
[3], const_args
[3],
488 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
489 tcg_out_brcond(s
, TCG_COND_GTU
, args
[0], args
[2], const_args
[2],
493 tcg_out_brcond(s
, TCG_COND_GT
, args
[1], args
[3], const_args
[3],
495 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
496 tcg_out_brcond(s
, TCG_COND_GEU
, args
[0], args
[2], const_args
[2],
500 tcg_out_brcond(s
, TCG_COND_LTU
, args
[1], args
[3], const_args
[3],
502 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
503 tcg_out_brcond(s
, TCG_COND_LTU
, args
[0], args
[2], const_args
[2],
507 tcg_out_brcond(s
, TCG_COND_LTU
, args
[1], args
[3], const_args
[3],
509 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
510 tcg_out_brcond(s
, TCG_COND_LEU
, args
[0], args
[2], const_args
[2],
514 tcg_out_brcond(s
, TCG_COND_GTU
, args
[1], args
[3], const_args
[3],
516 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
517 tcg_out_brcond(s
, TCG_COND_GTU
, args
[0], args
[2], const_args
[2],
521 tcg_out_brcond(s
, TCG_COND_GTU
, args
[1], args
[3], const_args
[3],
523 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
524 tcg_out_brcond(s
, TCG_COND_GEU
, args
[0], args
[2], const_args
[2],
530 tcg_out_label(s
, label_next
, (tcg_target_long
)s
->code_ptr
);
533 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGArg dest
,
534 TCGArg arg1
, TCGArg arg2
, int const_arg2
)
536 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
538 tcg_out_modrm(s
, 0x90 | tcg_cond_to_jcc
[cond
] | P_EXT
, 0, dest
);
539 tgen_arithi(s
, ARITH_AND
, dest
, 0xff, 0);
542 static void tcg_out_setcond2(TCGContext
*s
, const TCGArg
*args
,
543 const int *const_args
)
546 int label_true
, label_over
;
548 memcpy(new_args
, args
+1, 5*sizeof(TCGArg
));
550 if (args
[0] == args
[1] || args
[0] == args
[2]
551 || (!const_args
[3] && args
[0] == args
[3])
552 || (!const_args
[4] && args
[0] == args
[4])) {
553 /* When the destination overlaps with one of the argument
554 registers, don't do anything tricky. */
555 label_true
= gen_new_label();
556 label_over
= gen_new_label();
558 new_args
[5] = label_true
;
559 tcg_out_brcond2(s
, new_args
, const_args
+1, 1);
561 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 0);
562 tcg_out_jxx(s
, JCC_JMP
, label_over
, 1);
563 tcg_out_label(s
, label_true
, (tcg_target_long
)s
->code_ptr
);
565 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 1);
566 tcg_out_label(s
, label_over
, (tcg_target_long
)s
->code_ptr
);
568 /* When the destination does not overlap one of the arguments,
569 clear the destination first, jump if cond false, and emit an
570 increment in the true case. This results in smaller code. */
572 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 0);
574 label_over
= gen_new_label();
575 new_args
[4] = tcg_invert_cond(new_args
[4]);
576 new_args
[5] = label_over
;
577 tcg_out_brcond2(s
, new_args
, const_args
+1, 1);
579 tgen_arithi(s
, ARITH_ADD
, args
[0], 1, 0);
580 tcg_out_label(s
, label_over
, (tcg_target_long
)s
->code_ptr
);
584 #if defined(CONFIG_SOFTMMU)
586 #include "../../softmmu_defs.h"
588 static void *qemu_ld_helpers
[4] = {
595 static void *qemu_st_helpers
[4] = {
603 #ifndef CONFIG_USER_ONLY
607 /* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and
608 EAX. It will be useful once fixed registers globals are less
610 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
613 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
614 #if defined(CONFIG_SOFTMMU)
615 uint8_t *label1_ptr
, *label2_ptr
;
617 #if TARGET_LONG_BITS == 64
618 #if defined(CONFIG_SOFTMMU)
630 #if TARGET_LONG_BITS == 64
639 #if defined(CONFIG_SOFTMMU)
640 tcg_out_mov(s
, r1
, addr_reg
);
642 tcg_out_mov(s
, r0
, addr_reg
);
644 tcg_out_shifti(s
, SHIFT_SHR
, r1
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
646 tcg_out_modrm(s
, 0x81, 4, r0
); /* andl $x, r0 */
647 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
649 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
650 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
652 tcg_out_opc(s
, 0x8d); /* lea offset(r1, %ebp), r1 */
653 tcg_out8(s
, 0x80 | (r1
<< 3) | 0x04);
654 tcg_out8(s
, (5 << 3) | r1
);
655 tcg_out32(s
, offsetof(CPUState
, tlb_table
[mem_index
][0].addr_read
));
658 tcg_out_modrm_offset(s
, 0x3b, r0
, r1
, 0);
660 tcg_out_mov(s
, r0
, addr_reg
);
662 #if TARGET_LONG_BITS == 32
664 tcg_out8(s
, OPC_JCC_short
+ JCC_JE
);
665 label1_ptr
= s
->code_ptr
;
669 tcg_out8(s
, OPC_JCC_short
+ JCC_JNE
);
670 label3_ptr
= s
->code_ptr
;
673 /* cmp 4(r1), addr_reg2 */
674 tcg_out_modrm_offset(s
, 0x3b, addr_reg2
, r1
, 4);
677 tcg_out8(s
, OPC_JCC_short
+ JCC_JE
);
678 label1_ptr
= s
->code_ptr
;
682 *label3_ptr
= s
->code_ptr
- label3_ptr
- 1;
685 /* XXX: move that code at the end of the TB */
686 #if TARGET_LONG_BITS == 32
687 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_EDX
, mem_index
);
689 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
690 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_ECX
, mem_index
);
693 tcg_out32(s
, (tcg_target_long
)qemu_ld_helpers
[s_bits
] -
694 (tcg_target_long
)s
->code_ptr
- 4);
698 tcg_out_ext8s(s
, data_reg
, TCG_REG_EAX
);
701 tcg_out_ext16s(s
, data_reg
, TCG_REG_EAX
);
704 tcg_out_ext8u(s
, data_reg
, TCG_REG_EAX
);
707 tcg_out_ext16u(s
, data_reg
, TCG_REG_EAX
);
711 tcg_out_mov(s
, data_reg
, TCG_REG_EAX
);
714 if (data_reg
== TCG_REG_EDX
) {
715 tcg_out_opc(s
, 0x90 + TCG_REG_EDX
); /* xchg %edx, %eax */
716 tcg_out_mov(s
, data_reg2
, TCG_REG_EAX
);
718 tcg_out_mov(s
, data_reg
, TCG_REG_EAX
);
719 tcg_out_mov(s
, data_reg2
, TCG_REG_EDX
);
725 tcg_out8(s
, OPC_JMP_short
);
726 label2_ptr
= s
->code_ptr
;
730 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
733 tcg_out_modrm_offset(s
, 0x03, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
734 offsetof(CPUTLBEntry
, addr_read
));
739 #ifdef TARGET_WORDS_BIGENDIAN
747 tcg_out_modrm_offset(s
, OPC_MOVZBL
, data_reg
, r0
, GUEST_BASE
);
751 tcg_out_modrm_offset(s
, OPC_MOVSBL
, data_reg
, r0
, GUEST_BASE
);
755 tcg_out_modrm_offset(s
, OPC_MOVZWL
, data_reg
, r0
, GUEST_BASE
);
757 tcg_out_rolw_8(s
, data_reg
);
762 tcg_out_modrm_offset(s
, OPC_MOVSWL
, data_reg
, r0
, GUEST_BASE
);
764 tcg_out_rolw_8(s
, data_reg
);
766 /* movswl data_reg, data_reg */
767 tcg_out_modrm(s
, OPC_MOVSWL
, data_reg
, data_reg
);
771 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
773 tcg_out_bswap32(s
, data_reg
);
779 data_reg
= data_reg2
;
782 if (r0
!= data_reg
) {
783 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
784 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg2
, r0
, GUEST_BASE
+ 4);
786 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg2
, r0
, GUEST_BASE
+ 4);
787 tcg_out_ld(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
790 tcg_out_bswap32(s
, data_reg
);
791 tcg_out_bswap32(s
, data_reg2
);
798 #if defined(CONFIG_SOFTMMU)
800 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
805 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
808 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
809 #if defined(CONFIG_SOFTMMU)
810 uint8_t *label1_ptr
, *label2_ptr
;
812 #if TARGET_LONG_BITS == 64
813 #if defined(CONFIG_SOFTMMU)
825 #if TARGET_LONG_BITS == 64
835 #if defined(CONFIG_SOFTMMU)
836 tcg_out_mov(s
, r1
, addr_reg
);
838 tcg_out_mov(s
, r0
, addr_reg
);
840 tcg_out_shifti(s
, SHIFT_SHR
, r1
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
842 tcg_out_modrm(s
, 0x81, 4, r0
); /* andl $x, r0 */
843 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
845 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
846 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
848 tcg_out_opc(s
, 0x8d); /* lea offset(r1, %ebp), r1 */
849 tcg_out8(s
, 0x80 | (r1
<< 3) | 0x04);
850 tcg_out8(s
, (5 << 3) | r1
);
851 tcg_out32(s
, offsetof(CPUState
, tlb_table
[mem_index
][0].addr_write
));
854 tcg_out_modrm_offset(s
, 0x3b, r0
, r1
, 0);
856 tcg_out_mov(s
, r0
, addr_reg
);
858 #if TARGET_LONG_BITS == 32
860 tcg_out8(s
, OPC_JCC_short
+ JCC_JE
);
861 label1_ptr
= s
->code_ptr
;
865 tcg_out8(s
, OPC_JCC_short
+ JCC_JNE
);
866 label3_ptr
= s
->code_ptr
;
869 /* cmp 4(r1), addr_reg2 */
870 tcg_out_modrm_offset(s
, 0x3b, addr_reg2
, r1
, 4);
873 tcg_out8(s
, OPC_JCC_short
+ JCC_JE
);
874 label1_ptr
= s
->code_ptr
;
878 *label3_ptr
= s
->code_ptr
- label3_ptr
- 1;
881 /* XXX: move that code at the end of the TB */
882 #if TARGET_LONG_BITS == 32
884 tcg_out_mov(s
, TCG_REG_EDX
, data_reg
);
885 tcg_out_mov(s
, TCG_REG_ECX
, data_reg2
);
886 tcg_out8(s
, 0x6a); /* push Ib */
887 tcg_out8(s
, mem_index
);
889 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
890 (tcg_target_long
)s
->code_ptr
- 4);
891 tcg_out_addi(s
, TCG_REG_ESP
, 4);
895 tcg_out_ext8u(s
, TCG_REG_EDX
, data_reg
);
898 tcg_out_ext16u(s
, TCG_REG_EDX
, data_reg
);
901 tcg_out_mov(s
, TCG_REG_EDX
, data_reg
);
904 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_ECX
, mem_index
);
906 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
907 (tcg_target_long
)s
->code_ptr
- 4);
911 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
912 tcg_out8(s
, 0x6a); /* push Ib */
913 tcg_out8(s
, mem_index
);
914 tcg_out_opc(s
, 0x50 + data_reg2
); /* push */
915 tcg_out_opc(s
, 0x50 + data_reg
); /* push */
917 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
918 (tcg_target_long
)s
->code_ptr
- 4);
919 tcg_out_addi(s
, TCG_REG_ESP
, 12);
921 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
924 tcg_out_ext8u(s
, TCG_REG_ECX
, data_reg
);
927 tcg_out_ext16u(s
, TCG_REG_ECX
, data_reg
);
930 tcg_out_mov(s
, TCG_REG_ECX
, data_reg
);
933 tcg_out8(s
, 0x6a); /* push Ib */
934 tcg_out8(s
, mem_index
);
936 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
937 (tcg_target_long
)s
->code_ptr
- 4);
938 tcg_out_addi(s
, TCG_REG_ESP
, 4);
943 tcg_out8(s
, OPC_JMP_short
);
944 label2_ptr
= s
->code_ptr
;
948 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
951 tcg_out_modrm_offset(s
, 0x03, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
952 offsetof(CPUTLBEntry
, addr_write
));
957 #ifdef TARGET_WORDS_BIGENDIAN
964 tcg_out_modrm_offset(s
, OPC_MOVB_EvGv
, data_reg
, r0
, GUEST_BASE
);
968 tcg_out_mov(s
, r1
, data_reg
);
969 tcg_out_rolw_8(s
, r1
);
974 tcg_out_modrm_offset(s
, OPC_MOVL_EvGv
, data_reg
, r0
, GUEST_BASE
);
978 tcg_out_mov(s
, r1
, data_reg
);
979 tcg_out_bswap32(s
, r1
);
982 tcg_out_st(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
986 tcg_out_mov(s
, r1
, data_reg2
);
987 tcg_out_bswap32(s
, r1
);
988 tcg_out_st(s
, TCG_TYPE_I32
, r1
, r0
, GUEST_BASE
);
989 tcg_out_mov(s
, r1
, data_reg
);
990 tcg_out_bswap32(s
, r1
);
991 tcg_out_st(s
, TCG_TYPE_I32
, r1
, r0
, GUEST_BASE
+ 4);
993 tcg_out_st(s
, TCG_TYPE_I32
, data_reg
, r0
, GUEST_BASE
);
994 tcg_out_st(s
, TCG_TYPE_I32
, data_reg2
, r0
, GUEST_BASE
+ 4);
1001 #if defined(CONFIG_SOFTMMU)
1003 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
1007 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1008 const TCGArg
*args
, const int *const_args
)
1013 case INDEX_op_exit_tb
:
1014 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_EAX
, args
[0]);
1015 tcg_out8(s
, OPC_JMP_long
); /* jmp tb_ret_addr */
1016 tcg_out32(s
, tb_ret_addr
- s
->code_ptr
- 4);
1018 case INDEX_op_goto_tb
:
1019 if (s
->tb_jmp_offset
) {
1020 /* direct jump method */
1021 tcg_out8(s
, OPC_JMP_long
); /* jmp im */
1022 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1025 /* indirect jump method */
1026 tcg_out_modrm_offset(s
, 0xff, EXT_JMPN_Ev
, -1,
1027 (tcg_target_long
)(s
->tb_next
+ args
[0]));
1029 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1032 if (const_args
[0]) {
1034 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
1036 tcg_out_modrm(s
, 0xff, 2, args
[0]);
1040 if (const_args
[0]) {
1041 tcg_out8(s
, OPC_JMP_long
);
1042 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
1045 tcg_out_modrm(s
, 0xff, EXT_JMPN_Ev
, args
[0]);
1049 tcg_out_jxx(s
, JCC_JMP
, args
[0], 0);
1051 case INDEX_op_movi_i32
:
1052 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1054 case INDEX_op_ld8u_i32
:
1056 tcg_out_modrm_offset(s
, OPC_MOVZBL
, args
[0], args
[1], args
[2]);
1058 case INDEX_op_ld8s_i32
:
1060 tcg_out_modrm_offset(s
, OPC_MOVSBL
, args
[0], args
[1], args
[2]);
1062 case INDEX_op_ld16u_i32
:
1064 tcg_out_modrm_offset(s
, OPC_MOVZWL
, args
[0], args
[1], args
[2]);
1066 case INDEX_op_ld16s_i32
:
1068 tcg_out_modrm_offset(s
, OPC_MOVSWL
, args
[0], args
[1], args
[2]);
1070 case INDEX_op_ld_i32
:
1071 tcg_out_ld(s
, TCG_TYPE_I32
, args
[0], args
[1], args
[2]);
1073 case INDEX_op_st8_i32
:
1075 tcg_out_modrm_offset(s
, OPC_MOVB_EvGv
, args
[0], args
[1], args
[2]);
1077 case INDEX_op_st16_i32
:
1080 tcg_out_modrm_offset(s
, OPC_MOVL_EvGv
, args
[0], args
[1], args
[2]);
1082 case INDEX_op_st_i32
:
1083 tcg_out_st(s
, TCG_TYPE_I32
, args
[0], args
[1], args
[2]);
1085 case INDEX_op_sub_i32
:
1088 case INDEX_op_and_i32
:
1091 case INDEX_op_or_i32
:
1094 case INDEX_op_xor_i32
:
1097 case INDEX_op_add_i32
:
1100 if (const_args
[2]) {
1101 tgen_arithi(s
, c
, args
[0], args
[2], 0);
1103 tcg_out_modrm(s
, 0x01 | (c
<< 3), args
[2], args
[0]);
1106 case INDEX_op_mul_i32
:
1107 if (const_args
[2]) {
1110 if (val
== (int8_t)val
) {
1111 tcg_out_modrm(s
, 0x6b, args
[0], args
[0]);
1114 tcg_out_modrm(s
, 0x69, args
[0], args
[0]);
1118 tcg_out_modrm(s
, 0xaf | P_EXT
, args
[0], args
[2]);
1121 case INDEX_op_mulu2_i32
:
1122 tcg_out_modrm(s
, 0xf7, 4, args
[3]);
1124 case INDEX_op_div2_i32
:
1125 tcg_out_modrm(s
, 0xf7, 7, args
[4]);
1127 case INDEX_op_divu2_i32
:
1128 tcg_out_modrm(s
, 0xf7, 6, args
[4]);
1130 case INDEX_op_shl_i32
:
1133 if (const_args
[2]) {
1134 tcg_out_shifti(s
, c
, args
[0], args
[2]);
1136 tcg_out_modrm(s
, OPC_SHIFT_cl
, c
, args
[0]);
1139 case INDEX_op_shr_i32
:
1142 case INDEX_op_sar_i32
:
1145 case INDEX_op_rotl_i32
:
1148 case INDEX_op_rotr_i32
:
1152 case INDEX_op_add2_i32
:
1154 tgen_arithi(s
, ARITH_ADD
, args
[0], args
[4], 1);
1156 tcg_out_modrm(s
, 0x01 | (ARITH_ADD
<< 3), args
[4], args
[0]);
1158 tgen_arithi(s
, ARITH_ADC
, args
[1], args
[5], 1);
1160 tcg_out_modrm(s
, 0x01 | (ARITH_ADC
<< 3), args
[5], args
[1]);
1162 case INDEX_op_sub2_i32
:
1164 tgen_arithi(s
, ARITH_SUB
, args
[0], args
[4], 1);
1166 tcg_out_modrm(s
, 0x01 | (ARITH_SUB
<< 3), args
[4], args
[0]);
1168 tgen_arithi(s
, ARITH_SBB
, args
[1], args
[5], 1);
1170 tcg_out_modrm(s
, 0x01 | (ARITH_SBB
<< 3), args
[5], args
[1]);
1172 case INDEX_op_brcond_i32
:
1173 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1176 case INDEX_op_brcond2_i32
:
1177 tcg_out_brcond2(s
, args
, const_args
, 0);
1180 case INDEX_op_bswap16_i32
:
1181 tcg_out_rolw_8(s
, args
[0]);
1183 case INDEX_op_bswap32_i32
:
1184 tcg_out_bswap32(s
, args
[0]);
1187 case INDEX_op_neg_i32
:
1188 tcg_out_modrm(s
, 0xf7, 3, args
[0]);
1191 case INDEX_op_not_i32
:
1192 tcg_out_modrm(s
, 0xf7, 2, args
[0]);
1195 case INDEX_op_ext8s_i32
:
1196 tcg_out_ext8s(s
, args
[0], args
[1]);
1198 case INDEX_op_ext16s_i32
:
1199 tcg_out_ext16s(s
, args
[0], args
[1]);
1201 case INDEX_op_ext8u_i32
:
1202 tcg_out_ext8u(s
, args
[0], args
[1]);
1204 case INDEX_op_ext16u_i32
:
1205 tcg_out_ext16u(s
, args
[0], args
[1]);
1208 case INDEX_op_setcond_i32
:
1209 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2], const_args
[2]);
1211 case INDEX_op_setcond2_i32
:
1212 tcg_out_setcond2(s
, args
, const_args
);
1215 case INDEX_op_qemu_ld8u
:
1216 tcg_out_qemu_ld(s
, args
, 0);
1218 case INDEX_op_qemu_ld8s
:
1219 tcg_out_qemu_ld(s
, args
, 0 | 4);
1221 case INDEX_op_qemu_ld16u
:
1222 tcg_out_qemu_ld(s
, args
, 1);
1224 case INDEX_op_qemu_ld16s
:
1225 tcg_out_qemu_ld(s
, args
, 1 | 4);
1227 case INDEX_op_qemu_ld32
:
1228 tcg_out_qemu_ld(s
, args
, 2);
1230 case INDEX_op_qemu_ld64
:
1231 tcg_out_qemu_ld(s
, args
, 3);
1234 case INDEX_op_qemu_st8
:
1235 tcg_out_qemu_st(s
, args
, 0);
1237 case INDEX_op_qemu_st16
:
1238 tcg_out_qemu_st(s
, args
, 1);
1240 case INDEX_op_qemu_st32
:
1241 tcg_out_qemu_st(s
, args
, 2);
1243 case INDEX_op_qemu_st64
:
1244 tcg_out_qemu_st(s
, args
, 3);
1252 static const TCGTargetOpDef x86_op_defs
[] = {
1253 { INDEX_op_exit_tb
, { } },
1254 { INDEX_op_goto_tb
, { } },
1255 { INDEX_op_call
, { "ri" } },
1256 { INDEX_op_jmp
, { "ri" } },
1257 { INDEX_op_br
, { } },
1258 { INDEX_op_mov_i32
, { "r", "r" } },
1259 { INDEX_op_movi_i32
, { "r" } },
1260 { INDEX_op_ld8u_i32
, { "r", "r" } },
1261 { INDEX_op_ld8s_i32
, { "r", "r" } },
1262 { INDEX_op_ld16u_i32
, { "r", "r" } },
1263 { INDEX_op_ld16s_i32
, { "r", "r" } },
1264 { INDEX_op_ld_i32
, { "r", "r" } },
1265 { INDEX_op_st8_i32
, { "q", "r" } },
1266 { INDEX_op_st16_i32
, { "r", "r" } },
1267 { INDEX_op_st_i32
, { "r", "r" } },
1269 { INDEX_op_add_i32
, { "r", "0", "ri" } },
1270 { INDEX_op_sub_i32
, { "r", "0", "ri" } },
1271 { INDEX_op_mul_i32
, { "r", "0", "ri" } },
1272 { INDEX_op_mulu2_i32
, { "a", "d", "a", "r" } },
1273 { INDEX_op_div2_i32
, { "a", "d", "0", "1", "r" } },
1274 { INDEX_op_divu2_i32
, { "a", "d", "0", "1", "r" } },
1275 { INDEX_op_and_i32
, { "r", "0", "ri" } },
1276 { INDEX_op_or_i32
, { "r", "0", "ri" } },
1277 { INDEX_op_xor_i32
, { "r", "0", "ri" } },
1279 { INDEX_op_shl_i32
, { "r", "0", "ci" } },
1280 { INDEX_op_shr_i32
, { "r", "0", "ci" } },
1281 { INDEX_op_sar_i32
, { "r", "0", "ci" } },
1282 { INDEX_op_rotl_i32
, { "r", "0", "ci" } },
1283 { INDEX_op_rotr_i32
, { "r", "0", "ci" } },
1285 { INDEX_op_brcond_i32
, { "r", "ri" } },
1287 { INDEX_op_add2_i32
, { "r", "r", "0", "1", "ri", "ri" } },
1288 { INDEX_op_sub2_i32
, { "r", "r", "0", "1", "ri", "ri" } },
1289 { INDEX_op_brcond2_i32
, { "r", "r", "ri", "ri" } },
1291 { INDEX_op_bswap16_i32
, { "r", "0" } },
1292 { INDEX_op_bswap32_i32
, { "r", "0" } },
1294 { INDEX_op_neg_i32
, { "r", "0" } },
1296 { INDEX_op_not_i32
, { "r", "0" } },
1298 { INDEX_op_ext8s_i32
, { "r", "q" } },
1299 { INDEX_op_ext16s_i32
, { "r", "r" } },
1300 { INDEX_op_ext8u_i32
, { "r", "q" } },
1301 { INDEX_op_ext16u_i32
, { "r", "r" } },
1303 { INDEX_op_setcond_i32
, { "q", "r", "ri" } },
1304 { INDEX_op_setcond2_i32
, { "r", "r", "r", "ri", "ri" } },
1306 #if TARGET_LONG_BITS == 32
1307 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1308 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1309 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1310 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1311 { INDEX_op_qemu_ld32
, { "r", "L" } },
1312 { INDEX_op_qemu_ld64
, { "r", "r", "L" } },
1314 { INDEX_op_qemu_st8
, { "cb", "L" } },
1315 { INDEX_op_qemu_st16
, { "L", "L" } },
1316 { INDEX_op_qemu_st32
, { "L", "L" } },
1317 { INDEX_op_qemu_st64
, { "L", "L", "L" } },
1319 { INDEX_op_qemu_ld8u
, { "r", "L", "L" } },
1320 { INDEX_op_qemu_ld8s
, { "r", "L", "L" } },
1321 { INDEX_op_qemu_ld16u
, { "r", "L", "L" } },
1322 { INDEX_op_qemu_ld16s
, { "r", "L", "L" } },
1323 { INDEX_op_qemu_ld32
, { "r", "L", "L" } },
1324 { INDEX_op_qemu_ld64
, { "r", "r", "L", "L" } },
1326 { INDEX_op_qemu_st8
, { "cb", "L", "L" } },
1327 { INDEX_op_qemu_st16
, { "L", "L", "L" } },
1328 { INDEX_op_qemu_st32
, { "L", "L", "L" } },
1329 { INDEX_op_qemu_st64
, { "L", "L", "L", "L" } },
1334 static int tcg_target_callee_save_regs
[] = {
1335 /* TCG_REG_EBP, */ /* currently used for the global env, so no
1342 static inline void tcg_out_push(TCGContext
*s
, int reg
)
1344 tcg_out_opc(s
, 0x50 + reg
);
1347 static inline void tcg_out_pop(TCGContext
*s
, int reg
)
1349 tcg_out_opc(s
, 0x58 + reg
);
1352 /* Generate global QEMU prologue and epilogue code */
1353 void tcg_target_qemu_prologue(TCGContext
*s
)
1355 int i
, frame_size
, push_size
, stack_addend
;
1358 /* save all callee saved registers */
1359 for(i
= 0; i
< ARRAY_SIZE(tcg_target_callee_save_regs
); i
++) {
1360 tcg_out_push(s
, tcg_target_callee_save_regs
[i
]);
1362 /* reserve some stack space */
1363 push_size
= 4 + ARRAY_SIZE(tcg_target_callee_save_regs
) * 4;
1364 frame_size
= push_size
+ TCG_STATIC_CALL_ARGS_SIZE
;
1365 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1366 ~(TCG_TARGET_STACK_ALIGN
- 1);
1367 stack_addend
= frame_size
- push_size
;
1368 tcg_out_addi(s
, TCG_REG_ESP
, -stack_addend
);
1370 tcg_out_modrm(s
, 0xff, EXT_JMPN_Ev
, TCG_REG_EAX
); /* jmp *%eax */
1373 tb_ret_addr
= s
->code_ptr
;
1374 tcg_out_addi(s
, TCG_REG_ESP
, stack_addend
);
1375 for(i
= ARRAY_SIZE(tcg_target_callee_save_regs
) - 1; i
>= 0; i
--) {
1376 tcg_out_pop(s
, tcg_target_callee_save_regs
[i
]);
1378 tcg_out8(s
, 0xc3); /* ret */
1381 void tcg_target_init(TCGContext
*s
)
1383 #if !defined(CONFIG_USER_ONLY)
1385 if ((1 << CPU_TLB_ENTRY_BITS
) != sizeof(CPUTLBEntry
))
1389 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xff);
1391 tcg_regset_clear(tcg_target_call_clobber_regs
);
1392 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_EAX
);
1393 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_EDX
);
1394 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_ECX
);
1396 tcg_regset_clear(s
->reserved_regs
);
1397 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ESP
);
1399 tcg_add_target_add_op_defs(x86_op_defs
);