2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
46 static const int tcg_target_reg_alloc_order
[] = {
64 static const int tcg_target_call_iarg_regs
[6] = {
73 static const int tcg_target_call_oarg_regs
[2] = {
78 static uint8_t *tb_ret_addr
;
80 static void patch_reloc(uint8_t *code_ptr
, int type
,
81 tcg_target_long value
, tcg_target_long addend
)
86 if (value
!= (uint32_t)value
)
88 *(uint32_t *)code_ptr
= value
;
91 if (value
!= (int32_t)value
)
93 *(uint32_t *)code_ptr
= value
;
96 value
-= (long)code_ptr
;
97 if (value
!= (int32_t)value
)
99 *(uint32_t *)code_ptr
= value
;
106 /* maximum number of register used for input function arguments */
107 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
112 /* parse target specific constraints */
113 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
120 ct
->ct
|= TCG_CT_REG
;
121 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RAX
);
124 ct
->ct
|= TCG_CT_REG
;
125 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RBX
);
128 ct
->ct
|= TCG_CT_REG
;
129 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RCX
);
132 ct
->ct
|= TCG_CT_REG
;
133 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RDX
);
136 ct
->ct
|= TCG_CT_REG
;
137 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RSI
);
140 ct
->ct
|= TCG_CT_REG
;
141 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RDI
);
144 ct
->ct
|= TCG_CT_REG
;
145 tcg_regset_set32(ct
->u
.regs
, 0, 0xf);
148 ct
->ct
|= TCG_CT_REG
;
149 tcg_regset_set32(ct
->u
.regs
, 0, 0xffff);
151 case 'L': /* qemu_ld/st constraint */
152 ct
->ct
|= TCG_CT_REG
;
153 tcg_regset_set32(ct
->u
.regs
, 0, 0xffff);
154 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_RSI
);
155 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_RDI
);
158 ct
->ct
|= TCG_CT_CONST_S32
;
161 ct
->ct
|= TCG_CT_CONST_U32
;
171 /* test if a constant matches the constraint */
172 static inline int tcg_target_const_match(tcg_target_long val
,
173 const TCGArgConstraint
*arg_ct
)
177 if (ct
& TCG_CT_CONST
)
179 else if ((ct
& TCG_CT_CONST_S32
) && val
== (int32_t)val
)
181 else if ((ct
& TCG_CT_CONST_U32
) && val
== (uint32_t)val
)
220 #define P_EXT 0x100 /* 0x0f opcode prefix */
221 #define P_REXW 0x200 /* set rex.w = 1 */
222 #define P_REXB 0x400 /* force rex use for byte registers */
224 static const uint8_t tcg_cond_to_jcc
[10] = {
225 [TCG_COND_EQ
] = JCC_JE
,
226 [TCG_COND_NE
] = JCC_JNE
,
227 [TCG_COND_LT
] = JCC_JL
,
228 [TCG_COND_GE
] = JCC_JGE
,
229 [TCG_COND_LE
] = JCC_JLE
,
230 [TCG_COND_GT
] = JCC_JG
,
231 [TCG_COND_LTU
] = JCC_JB
,
232 [TCG_COND_GEU
] = JCC_JAE
,
233 [TCG_COND_LEU
] = JCC_JBE
,
234 [TCG_COND_GTU
] = JCC_JA
,
237 static inline void tcg_out_opc(TCGContext
*s
, int opc
, int r
, int rm
, int x
)
240 rex
= ((opc
>> 6) & 0x8) | ((r
>> 1) & 0x4) |
241 ((x
>> 2) & 2) | ((rm
>> 3) & 1);
242 if (rex
|| (opc
& P_REXB
)) {
243 tcg_out8(s
, rex
| 0x40);
247 tcg_out8(s
, opc
& 0xff);
250 static inline void tcg_out_modrm(TCGContext
*s
, int opc
, int r
, int rm
)
252 tcg_out_opc(s
, opc
, r
, rm
, 0);
253 tcg_out8(s
, 0xc0 | ((r
& 7) << 3) | (rm
& 7));
256 /* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
257 static inline void tcg_out_modrm_offset(TCGContext
*s
, int opc
, int r
, int rm
,
258 tcg_target_long offset
)
262 tcg_out_opc(s
, opc
, r
, 0, 0);
263 val
= offset
- ((tcg_target_long
)s
->code_ptr
+ 5 + (-rm
- 1));
264 if (val
== (int32_t)val
) {
266 tcg_out8(s
, 0x05 | ((r
& 7) << 3));
268 } else if (offset
== (int32_t)offset
) {
269 tcg_out8(s
, 0x04 | ((r
& 7) << 3));
270 tcg_out8(s
, 0x25); /* sib */
271 tcg_out32(s
, offset
);
275 } else if (offset
== 0 && (rm
& 7) != TCG_REG_RBP
) {
276 tcg_out_opc(s
, opc
, r
, rm
, 0);
277 if ((rm
& 7) == TCG_REG_RSP
) {
278 tcg_out8(s
, 0x04 | ((r
& 7) << 3));
281 tcg_out8(s
, 0x00 | ((r
& 7) << 3) | (rm
& 7));
283 } else if ((int8_t)offset
== offset
) {
284 tcg_out_opc(s
, opc
, r
, rm
, 0);
285 if ((rm
& 7) == TCG_REG_RSP
) {
286 tcg_out8(s
, 0x44 | ((r
& 7) << 3));
289 tcg_out8(s
, 0x40 | ((r
& 7) << 3) | (rm
& 7));
293 tcg_out_opc(s
, opc
, r
, rm
, 0);
294 if ((rm
& 7) == TCG_REG_RSP
) {
295 tcg_out8(s
, 0x84 | ((r
& 7) << 3));
298 tcg_out8(s
, 0x80 | ((r
& 7) << 3) | (rm
& 7));
300 tcg_out32(s
, offset
);
304 #if defined(CONFIG_SOFTMMU)
305 /* XXX: incomplete. index must be different from ESP */
306 static void tcg_out_modrm_offset2(TCGContext
*s
, int opc
, int r
, int rm
,
307 int index
, int shift
,
308 tcg_target_long offset
)
313 if (offset
== 0 && (rm
& 7) != TCG_REG_RBP
) {
315 } else if (offset
== (int8_t)offset
) {
317 } else if (offset
== (int32_t)offset
) {
323 tcg_out_opc(s
, opc
, r
, rm
, 0);
324 if ((rm
& 7) == TCG_REG_RSP
) {
325 tcg_out8(s
, mod
| ((r
& 7) << 3) | 0x04);
326 tcg_out8(s
, 0x04 | (rm
& 7));
328 tcg_out8(s
, mod
| ((r
& 7) << 3) | (rm
& 7));
331 tcg_out_opc(s
, opc
, r
, rm
, index
);
332 tcg_out8(s
, mod
| ((r
& 7) << 3) | 0x04);
333 tcg_out8(s
, (shift
<< 6) | ((index
& 7) << 3) | (rm
& 7));
337 } else if (mod
== 0x80) {
338 tcg_out32(s
, offset
);
343 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
345 tcg_out_modrm(s
, 0x8b | P_REXW
, ret
, arg
);
348 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
349 int ret
, tcg_target_long arg
)
352 tcg_out_modrm(s
, 0x01 | (ARITH_XOR
<< 3), ret
, ret
); /* xor r0,r0 */
353 } else if (arg
== (uint32_t)arg
|| type
== TCG_TYPE_I32
) {
354 tcg_out_opc(s
, 0xb8 + (ret
& 7), 0, ret
, 0);
356 } else if (arg
== (int32_t)arg
) {
357 tcg_out_modrm(s
, 0xc7 | P_REXW
, 0, ret
);
360 tcg_out_opc(s
, (0xb8 + (ret
& 7)) | P_REXW
, 0, ret
, 0);
362 tcg_out32(s
, arg
>> 32);
366 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
367 int arg1
, tcg_target_long arg2
)
369 if (type
== TCG_TYPE_I32
)
370 tcg_out_modrm_offset(s
, 0x8b, ret
, arg1
, arg2
); /* movl */
372 tcg_out_modrm_offset(s
, 0x8b | P_REXW
, ret
, arg1
, arg2
); /* movq */
375 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int arg
,
376 int arg1
, tcg_target_long arg2
)
378 if (type
== TCG_TYPE_I32
)
379 tcg_out_modrm_offset(s
, 0x89, arg
, arg1
, arg2
); /* movl */
381 tcg_out_modrm_offset(s
, 0x89 | P_REXW
, arg
, arg1
, arg2
); /* movq */
384 static inline void tgen_arithi32(TCGContext
*s
, int c
, int r0
, int32_t val
)
386 if (val
== (int8_t)val
) {
387 tcg_out_modrm(s
, 0x83, c
, r0
);
389 } else if (c
== ARITH_AND
&& val
== 0xffu
) {
391 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXB
, r0
, r0
);
392 } else if (c
== ARITH_AND
&& val
== 0xffffu
) {
394 tcg_out_modrm(s
, 0xb7 | P_EXT
, r0
, r0
);
396 tcg_out_modrm(s
, 0x81, c
, r0
);
401 static inline void tgen_arithi64(TCGContext
*s
, int c
, int r0
, int64_t val
)
403 if (val
== (int8_t)val
) {
404 tcg_out_modrm(s
, 0x83 | P_REXW
, c
, r0
);
406 } else if (c
== ARITH_AND
&& val
== 0xffu
) {
408 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXW
, r0
, r0
);
409 } else if (c
== ARITH_AND
&& val
== 0xffffu
) {
411 tcg_out_modrm(s
, 0xb7 | P_EXT
| P_REXW
, r0
, r0
);
412 } else if (c
== ARITH_AND
&& val
== 0xffffffffu
) {
413 /* 32-bit mov zero extends */
414 tcg_out_modrm(s
, 0x8b, r0
, r0
);
415 } else if (val
== (int32_t)val
) {
416 tcg_out_modrm(s
, 0x81 | P_REXW
, c
, r0
);
418 } else if (c
== ARITH_AND
&& val
== (uint32_t)val
) {
419 tcg_out_modrm(s
, 0x81, c
, r0
);
426 static void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
429 tgen_arithi64(s
, ARITH_ADD
, reg
, val
);
432 static void tcg_out_jxx(TCGContext
*s
, int opc
, int label_index
)
435 TCGLabel
*l
= &s
->labels
[label_index
];
438 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
440 if ((int8_t)val1
== val1
) {
444 tcg_out8(s
, 0x70 + opc
);
449 tcg_out32(s
, val
- 5);
452 tcg_out8(s
, 0x80 + opc
);
453 tcg_out32(s
, val
- 6);
461 tcg_out8(s
, 0x80 + opc
);
463 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC32
, label_index
, -4);
468 static void tcg_out_brcond(TCGContext
*s
, int cond
,
469 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
470 int label_index
, int rexw
)
475 tcg_out_modrm(s
, 0x85 | rexw
, arg1
, arg1
);
478 tgen_arithi64(s
, ARITH_CMP
, arg1
, arg2
);
480 tgen_arithi32(s
, ARITH_CMP
, arg1
, arg2
);
483 tcg_out_modrm(s
, 0x01 | (ARITH_CMP
<< 3) | rexw
, arg2
, arg1
);
485 tcg_out_jxx(s
, tcg_cond_to_jcc
[cond
], label_index
);
488 #if defined(CONFIG_SOFTMMU)
490 #include "../../softmmu_defs.h"
492 static void *qemu_ld_helpers
[4] = {
499 static void *qemu_st_helpers
[4] = {
507 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
510 int addr_reg
, data_reg
, r0
, r1
, mem_index
, s_bits
, bswap
, rexw
;
512 #if defined(CONFIG_SOFTMMU)
513 uint8_t *label1_ptr
, *label2_ptr
;
524 #if TARGET_LONG_BITS == 32
529 #if defined(CONFIG_SOFTMMU)
531 tcg_out_modrm(s
, 0x8b | rexw
, r1
, addr_reg
);
534 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
536 tcg_out_modrm(s
, 0xc1 | rexw
, 5, r1
); /* shr $x, r1 */
537 tcg_out8(s
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
539 tcg_out_modrm(s
, 0x81 | rexw
, 4, r0
); /* andl $x, r0 */
540 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
542 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
543 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
545 /* lea offset(r1, env), r1 */
546 tcg_out_modrm_offset2(s
, 0x8d | P_REXW
, r1
, r1
, TCG_AREG0
, 0,
547 offsetof(CPUState
, tlb_table
[mem_index
][0].addr_read
));
550 tcg_out_modrm_offset(s
, 0x3b | rexw
, r0
, r1
, 0);
553 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
556 tcg_out8(s
, 0x70 + JCC_JE
);
557 label1_ptr
= s
->code_ptr
;
560 /* XXX: move that code at the end of the TB */
561 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_RSI
, mem_index
);
563 tcg_out32(s
, (tcg_target_long
)qemu_ld_helpers
[s_bits
] -
564 (tcg_target_long
)s
->code_ptr
- 4);
569 tcg_out_modrm(s
, 0xbe | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
573 tcg_out_modrm(s
, 0xbf | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
577 tcg_out_modrm(s
, 0x63 | P_REXW
, data_reg
, TCG_REG_RAX
);
581 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
585 tcg_out_modrm(s
, 0xb7 | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
590 tcg_out_modrm(s
, 0x8b, data_reg
, TCG_REG_RAX
);
593 tcg_out_mov(s
, data_reg
, TCG_REG_RAX
);
599 label2_ptr
= s
->code_ptr
;
603 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
606 tcg_out_modrm_offset(s
, 0x03 | P_REXW
, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
607 offsetof(CPUTLBEntry
, addr_read
));
610 if (GUEST_BASE
== (int32_t)GUEST_BASE
) {
615 /* movq $GUEST_BASE, r0 */
616 tcg_out_opc(s
, (0xb8 + (r0
& 7)) | P_REXW
, 0, r0
, 0);
617 tcg_out32(s
, GUEST_BASE
);
618 tcg_out32(s
, GUEST_BASE
>> 32);
619 /* addq addr_reg, r0 */
620 tcg_out_modrm(s
, 0x01 | P_REXW
, addr_reg
, r0
);
624 #ifdef TARGET_WORDS_BIGENDIAN
632 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, data_reg
, r0
, offset
);
636 tcg_out_modrm_offset(s
, 0xbe | P_EXT
| rexw
, data_reg
, r0
, offset
);
640 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, data_reg
, r0
, offset
);
642 /* rolw $8, data_reg */
644 tcg_out_modrm(s
, 0xc1, 0, data_reg
);
651 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, data_reg
, r0
, offset
);
652 /* rolw $8, data_reg */
654 tcg_out_modrm(s
, 0xc1, 0, data_reg
);
657 /* movswX data_reg, data_reg */
658 tcg_out_modrm(s
, 0xbf | P_EXT
| rexw
, data_reg
, data_reg
);
661 tcg_out_modrm_offset(s
, 0xbf | P_EXT
| rexw
, data_reg
, r0
, offset
);
665 /* movl (r0), data_reg */
666 tcg_out_modrm_offset(s
, 0x8b, data_reg
, r0
, offset
);
669 tcg_out_opc(s
, (0xc8 + (data_reg
& 7)) | P_EXT
, 0, data_reg
, 0);
674 /* movl (r0), data_reg */
675 tcg_out_modrm_offset(s
, 0x8b, data_reg
, r0
, offset
);
677 tcg_out_opc(s
, (0xc8 + (data_reg
& 7)) | P_EXT
, 0, data_reg
, 0);
679 tcg_out_modrm(s
, 0x63 | P_REXW
, data_reg
, data_reg
);
682 tcg_out_modrm_offset(s
, 0x63 | P_REXW
, data_reg
, r0
, offset
);
686 /* movq (r0), data_reg */
687 tcg_out_modrm_offset(s
, 0x8b | P_REXW
, data_reg
, r0
, offset
);
690 tcg_out_opc(s
, (0xc8 + (data_reg
& 7)) | P_EXT
| P_REXW
, 0, data_reg
, 0);
697 #if defined(CONFIG_SOFTMMU)
699 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
703 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
706 int addr_reg
, data_reg
, r0
, r1
, mem_index
, s_bits
, bswap
, rexw
;
708 #if defined(CONFIG_SOFTMMU)
709 uint8_t *label1_ptr
, *label2_ptr
;
721 #if TARGET_LONG_BITS == 32
726 #if defined(CONFIG_SOFTMMU)
728 tcg_out_modrm(s
, 0x8b | rexw
, r1
, addr_reg
);
731 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
733 tcg_out_modrm(s
, 0xc1 | rexw
, 5, r1
); /* shr $x, r1 */
734 tcg_out8(s
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
736 tcg_out_modrm(s
, 0x81 | rexw
, 4, r0
); /* andl $x, r0 */
737 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
739 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
740 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
742 /* lea offset(r1, env), r1 */
743 tcg_out_modrm_offset2(s
, 0x8d | P_REXW
, r1
, r1
, TCG_AREG0
, 0,
744 offsetof(CPUState
, tlb_table
[mem_index
][0].addr_write
));
747 tcg_out_modrm_offset(s
, 0x3b | rexw
, r0
, r1
, 0);
750 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
753 tcg_out8(s
, 0x70 + JCC_JE
);
754 label1_ptr
= s
->code_ptr
;
757 /* XXX: move that code at the end of the TB */
761 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXB
, TCG_REG_RSI
, data_reg
);
765 tcg_out_modrm(s
, 0xb7 | P_EXT
, TCG_REG_RSI
, data_reg
);
769 tcg_out_modrm(s
, 0x8b, TCG_REG_RSI
, data_reg
);
773 tcg_out_mov(s
, TCG_REG_RSI
, data_reg
);
776 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_RDX
, mem_index
);
778 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
779 (tcg_target_long
)s
->code_ptr
- 4);
783 label2_ptr
= s
->code_ptr
;
787 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
790 tcg_out_modrm_offset(s
, 0x03 | P_REXW
, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
791 offsetof(CPUTLBEntry
, addr_write
));
794 if (GUEST_BASE
== (int32_t)GUEST_BASE
) {
799 /* movq $GUEST_BASE, r0 */
800 tcg_out_opc(s
, (0xb8 + (r0
& 7)) | P_REXW
, 0, r0
, 0);
801 tcg_out32(s
, GUEST_BASE
);
802 tcg_out32(s
, GUEST_BASE
>> 32);
803 /* addq addr_reg, r0 */
804 tcg_out_modrm(s
, 0x01 | P_REXW
, addr_reg
, r0
);
808 #ifdef TARGET_WORDS_BIGENDIAN
816 tcg_out_modrm_offset(s
, 0x88 | P_REXB
, data_reg
, r0
, offset
);
820 tcg_out_modrm(s
, 0x8b, r1
, data_reg
); /* movl */
821 tcg_out8(s
, 0x66); /* rolw $8, %ecx */
822 tcg_out_modrm(s
, 0xc1, 0, r1
);
828 tcg_out_modrm_offset(s
, 0x89, data_reg
, r0
, offset
);
832 tcg_out_modrm(s
, 0x8b, r1
, data_reg
); /* movl */
834 tcg_out_opc(s
, (0xc8 + r1
) | P_EXT
, 0, r1
, 0);
838 tcg_out_modrm_offset(s
, 0x89, data_reg
, r0
, offset
);
842 tcg_out_mov(s
, r1
, data_reg
);
844 tcg_out_opc(s
, (0xc8 + r1
) | P_EXT
| P_REXW
, 0, r1
, 0);
848 tcg_out_modrm_offset(s
, 0x89 | P_REXW
, data_reg
, r0
, offset
);
854 #if defined(CONFIG_SOFTMMU)
856 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
860 static inline void tcg_out_op(TCGContext
*s
, int opc
, const TCGArg
*args
,
861 const int *const_args
)
866 case INDEX_op_exit_tb
:
867 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_RAX
, args
[0]);
868 tcg_out8(s
, 0xe9); /* jmp tb_ret_addr */
869 tcg_out32(s
, tb_ret_addr
- s
->code_ptr
- 4);
871 case INDEX_op_goto_tb
:
872 if (s
->tb_jmp_offset
) {
873 /* direct jump method */
874 tcg_out8(s
, 0xe9); /* jmp im */
875 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
878 /* indirect jump method */
880 tcg_out_modrm_offset(s
, 0xff, 4, -1,
881 (tcg_target_long
)(s
->tb_next
+
884 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
889 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
891 tcg_out_modrm(s
, 0xff, 2, args
[0]);
897 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
899 tcg_out_modrm(s
, 0xff, 4, args
[0]);
903 tcg_out_jxx(s
, JCC_JMP
, args
[0]);
905 case INDEX_op_movi_i32
:
906 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], (uint32_t)args
[1]);
908 case INDEX_op_movi_i64
:
909 tcg_out_movi(s
, TCG_TYPE_I64
, args
[0], args
[1]);
911 case INDEX_op_ld8u_i32
:
912 case INDEX_op_ld8u_i64
:
914 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, args
[0], args
[1], args
[2]);
916 case INDEX_op_ld8s_i32
:
918 tcg_out_modrm_offset(s
, 0xbe | P_EXT
, args
[0], args
[1], args
[2]);
920 case INDEX_op_ld8s_i64
:
922 tcg_out_modrm_offset(s
, 0xbe | P_EXT
| P_REXW
, args
[0], args
[1], args
[2]);
924 case INDEX_op_ld16u_i32
:
925 case INDEX_op_ld16u_i64
:
927 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, args
[0], args
[1], args
[2]);
929 case INDEX_op_ld16s_i32
:
931 tcg_out_modrm_offset(s
, 0xbf | P_EXT
, args
[0], args
[1], args
[2]);
933 case INDEX_op_ld16s_i64
:
935 tcg_out_modrm_offset(s
, 0xbf | P_EXT
| P_REXW
, args
[0], args
[1], args
[2]);
937 case INDEX_op_ld_i32
:
938 case INDEX_op_ld32u_i64
:
940 tcg_out_modrm_offset(s
, 0x8b, args
[0], args
[1], args
[2]);
942 case INDEX_op_ld32s_i64
:
944 tcg_out_modrm_offset(s
, 0x63 | P_REXW
, args
[0], args
[1], args
[2]);
946 case INDEX_op_ld_i64
:
948 tcg_out_modrm_offset(s
, 0x8b | P_REXW
, args
[0], args
[1], args
[2]);
951 case INDEX_op_st8_i32
:
952 case INDEX_op_st8_i64
:
954 tcg_out_modrm_offset(s
, 0x88 | P_REXB
, args
[0], args
[1], args
[2]);
956 case INDEX_op_st16_i32
:
957 case INDEX_op_st16_i64
:
960 tcg_out_modrm_offset(s
, 0x89, args
[0], args
[1], args
[2]);
962 case INDEX_op_st_i32
:
963 case INDEX_op_st32_i64
:
965 tcg_out_modrm_offset(s
, 0x89, args
[0], args
[1], args
[2]);
967 case INDEX_op_st_i64
:
969 tcg_out_modrm_offset(s
, 0x89 | P_REXW
, args
[0], args
[1], args
[2]);
972 case INDEX_op_sub_i32
:
975 case INDEX_op_and_i32
:
978 case INDEX_op_or_i32
:
981 case INDEX_op_xor_i32
:
984 case INDEX_op_add_i32
:
988 tgen_arithi32(s
, c
, args
[0], args
[2]);
990 tcg_out_modrm(s
, 0x01 | (c
<< 3), args
[2], args
[0]);
994 case INDEX_op_sub_i64
:
997 case INDEX_op_and_i64
:
1000 case INDEX_op_or_i64
:
1003 case INDEX_op_xor_i64
:
1006 case INDEX_op_add_i64
:
1009 if (const_args
[2]) {
1010 tgen_arithi64(s
, c
, args
[0], args
[2]);
1012 tcg_out_modrm(s
, 0x01 | (c
<< 3) | P_REXW
, args
[2], args
[0]);
1016 case INDEX_op_mul_i32
:
1017 if (const_args
[2]) {
1020 if (val
== (int8_t)val
) {
1021 tcg_out_modrm(s
, 0x6b, args
[0], args
[0]);
1024 tcg_out_modrm(s
, 0x69, args
[0], args
[0]);
1028 tcg_out_modrm(s
, 0xaf | P_EXT
, args
[0], args
[2]);
1031 case INDEX_op_mul_i64
:
1032 if (const_args
[2]) {
1035 if (val
== (int8_t)val
) {
1036 tcg_out_modrm(s
, 0x6b | P_REXW
, args
[0], args
[0]);
1039 tcg_out_modrm(s
, 0x69 | P_REXW
, args
[0], args
[0]);
1043 tcg_out_modrm(s
, 0xaf | P_EXT
| P_REXW
, args
[0], args
[2]);
1046 case INDEX_op_div2_i32
:
1047 tcg_out_modrm(s
, 0xf7, 7, args
[4]);
1049 case INDEX_op_divu2_i32
:
1050 tcg_out_modrm(s
, 0xf7, 6, args
[4]);
1052 case INDEX_op_div2_i64
:
1053 tcg_out_modrm(s
, 0xf7 | P_REXW
, 7, args
[4]);
1055 case INDEX_op_divu2_i64
:
1056 tcg_out_modrm(s
, 0xf7 | P_REXW
, 6, args
[4]);
1059 case INDEX_op_shl_i32
:
1062 if (const_args
[2]) {
1064 tcg_out_modrm(s
, 0xd1, c
, args
[0]);
1066 tcg_out_modrm(s
, 0xc1, c
, args
[0]);
1067 tcg_out8(s
, args
[2]);
1070 tcg_out_modrm(s
, 0xd3, c
, args
[0]);
1073 case INDEX_op_shr_i32
:
1076 case INDEX_op_sar_i32
:
1079 case INDEX_op_rotl_i32
:
1082 case INDEX_op_rotr_i32
:
1086 case INDEX_op_shl_i64
:
1089 if (const_args
[2]) {
1091 tcg_out_modrm(s
, 0xd1 | P_REXW
, c
, args
[0]);
1093 tcg_out_modrm(s
, 0xc1 | P_REXW
, c
, args
[0]);
1094 tcg_out8(s
, args
[2]);
1097 tcg_out_modrm(s
, 0xd3 | P_REXW
, c
, args
[0]);
1100 case INDEX_op_shr_i64
:
1103 case INDEX_op_sar_i64
:
1106 case INDEX_op_rotl_i64
:
1109 case INDEX_op_rotr_i64
:
1113 case INDEX_op_brcond_i32
:
1114 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1117 case INDEX_op_brcond_i64
:
1118 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1122 case INDEX_op_bswap16_i32
:
1123 case INDEX_op_bswap16_i64
:
1125 tcg_out_modrm(s
, 0xc1, SHIFT_ROL
, args
[0]);
1128 case INDEX_op_bswap32_i32
:
1129 case INDEX_op_bswap32_i64
:
1130 tcg_out_opc(s
, (0xc8 + (args
[0] & 7)) | P_EXT
, 0, args
[0], 0);
1132 case INDEX_op_bswap64_i64
:
1133 tcg_out_opc(s
, (0xc8 + (args
[0] & 7)) | P_EXT
| P_REXW
, 0, args
[0], 0);
1136 case INDEX_op_neg_i32
:
1137 tcg_out_modrm(s
, 0xf7, 3, args
[0]);
1139 case INDEX_op_neg_i64
:
1140 tcg_out_modrm(s
, 0xf7 | P_REXW
, 3, args
[0]);
1143 case INDEX_op_not_i32
:
1144 tcg_out_modrm(s
, 0xf7, 2, args
[0]);
1146 case INDEX_op_not_i64
:
1147 tcg_out_modrm(s
, 0xf7 | P_REXW
, 2, args
[0]);
1150 case INDEX_op_ext8s_i32
:
1151 tcg_out_modrm(s
, 0xbe | P_EXT
| P_REXB
, args
[0], args
[1]);
1153 case INDEX_op_ext16s_i32
:
1154 tcg_out_modrm(s
, 0xbf | P_EXT
, args
[0], args
[1]);
1156 case INDEX_op_ext8s_i64
:
1157 tcg_out_modrm(s
, 0xbe | P_EXT
| P_REXW
, args
[0], args
[1]);
1159 case INDEX_op_ext16s_i64
:
1160 tcg_out_modrm(s
, 0xbf | P_EXT
| P_REXW
, args
[0], args
[1]);
1162 case INDEX_op_ext32s_i64
:
1163 tcg_out_modrm(s
, 0x63 | P_REXW
, args
[0], args
[1]);
1166 case INDEX_op_qemu_ld8u
:
1167 tcg_out_qemu_ld(s
, args
, 0);
1169 case INDEX_op_qemu_ld8s
:
1170 tcg_out_qemu_ld(s
, args
, 0 | 4);
1172 case INDEX_op_qemu_ld16u
:
1173 tcg_out_qemu_ld(s
, args
, 1);
1175 case INDEX_op_qemu_ld16s
:
1176 tcg_out_qemu_ld(s
, args
, 1 | 4);
1178 case INDEX_op_qemu_ld32u
:
1179 tcg_out_qemu_ld(s
, args
, 2);
1181 case INDEX_op_qemu_ld32s
:
1182 tcg_out_qemu_ld(s
, args
, 2 | 4);
1184 case INDEX_op_qemu_ld64
:
1185 tcg_out_qemu_ld(s
, args
, 3);
1188 case INDEX_op_qemu_st8
:
1189 tcg_out_qemu_st(s
, args
, 0);
1191 case INDEX_op_qemu_st16
:
1192 tcg_out_qemu_st(s
, args
, 1);
1194 case INDEX_op_qemu_st32
:
1195 tcg_out_qemu_st(s
, args
, 2);
1197 case INDEX_op_qemu_st64
:
1198 tcg_out_qemu_st(s
, args
, 3);
1206 static int tcg_target_callee_save_regs
[] = {
1211 /* TCG_REG_R14, */ /* currently used for the global env, so no
1216 static inline void tcg_out_push(TCGContext
*s
, int reg
)
1218 tcg_out_opc(s
, (0x50 + (reg
& 7)), 0, reg
, 0);
1221 static inline void tcg_out_pop(TCGContext
*s
, int reg
)
1223 tcg_out_opc(s
, (0x58 + (reg
& 7)), 0, reg
, 0);
1226 /* Generate global QEMU prologue and epilogue code */
1227 void tcg_target_qemu_prologue(TCGContext
*s
)
1229 int i
, frame_size
, push_size
, stack_addend
;
1232 /* save all callee saved registers */
1233 for(i
= 0; i
< ARRAY_SIZE(tcg_target_callee_save_regs
); i
++) {
1234 tcg_out_push(s
, tcg_target_callee_save_regs
[i
]);
1237 /* reserve some stack space */
1238 push_size
= 8 + ARRAY_SIZE(tcg_target_callee_save_regs
) * 8;
1239 frame_size
= push_size
+ TCG_STATIC_CALL_ARGS_SIZE
;
1240 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1241 ~(TCG_TARGET_STACK_ALIGN
- 1);
1242 stack_addend
= frame_size
- push_size
;
1243 tcg_out_addi(s
, TCG_REG_RSP
, -stack_addend
);
1245 tcg_out_modrm(s
, 0xff, 4, TCG_REG_RDI
); /* jmp *%rdi */
1248 tb_ret_addr
= s
->code_ptr
;
1249 tcg_out_addi(s
, TCG_REG_RSP
, stack_addend
);
1250 for(i
= ARRAY_SIZE(tcg_target_callee_save_regs
) - 1; i
>= 0; i
--) {
1251 tcg_out_pop(s
, tcg_target_callee_save_regs
[i
]);
1253 tcg_out8(s
, 0xc3); /* ret */
1256 static const TCGTargetOpDef x86_64_op_defs
[] = {
1257 { INDEX_op_exit_tb
, { } },
1258 { INDEX_op_goto_tb
, { } },
1259 { INDEX_op_call
, { "ri" } }, /* XXX: might need a specific constant constraint */
1260 { INDEX_op_jmp
, { "ri" } }, /* XXX: might need a specific constant constraint */
1261 { INDEX_op_br
, { } },
1263 { INDEX_op_mov_i32
, { "r", "r" } },
1264 { INDEX_op_movi_i32
, { "r" } },
1265 { INDEX_op_ld8u_i32
, { "r", "r" } },
1266 { INDEX_op_ld8s_i32
, { "r", "r" } },
1267 { INDEX_op_ld16u_i32
, { "r", "r" } },
1268 { INDEX_op_ld16s_i32
, { "r", "r" } },
1269 { INDEX_op_ld_i32
, { "r", "r" } },
1270 { INDEX_op_st8_i32
, { "r", "r" } },
1271 { INDEX_op_st16_i32
, { "r", "r" } },
1272 { INDEX_op_st_i32
, { "r", "r" } },
1274 { INDEX_op_add_i32
, { "r", "0", "ri" } },
1275 { INDEX_op_mul_i32
, { "r", "0", "ri" } },
1276 { INDEX_op_div2_i32
, { "a", "d", "0", "1", "r" } },
1277 { INDEX_op_divu2_i32
, { "a", "d", "0", "1", "r" } },
1278 { INDEX_op_sub_i32
, { "r", "0", "ri" } },
1279 { INDEX_op_and_i32
, { "r", "0", "ri" } },
1280 { INDEX_op_or_i32
, { "r", "0", "ri" } },
1281 { INDEX_op_xor_i32
, { "r", "0", "ri" } },
1283 { INDEX_op_shl_i32
, { "r", "0", "ci" } },
1284 { INDEX_op_shr_i32
, { "r", "0", "ci" } },
1285 { INDEX_op_sar_i32
, { "r", "0", "ci" } },
1286 { INDEX_op_rotl_i32
, { "r", "0", "ci" } },
1287 { INDEX_op_rotr_i32
, { "r", "0", "ci" } },
1289 { INDEX_op_brcond_i32
, { "r", "ri" } },
1291 { INDEX_op_mov_i64
, { "r", "r" } },
1292 { INDEX_op_movi_i64
, { "r" } },
1293 { INDEX_op_ld8u_i64
, { "r", "r" } },
1294 { INDEX_op_ld8s_i64
, { "r", "r" } },
1295 { INDEX_op_ld16u_i64
, { "r", "r" } },
1296 { INDEX_op_ld16s_i64
, { "r", "r" } },
1297 { INDEX_op_ld32u_i64
, { "r", "r" } },
1298 { INDEX_op_ld32s_i64
, { "r", "r" } },
1299 { INDEX_op_ld_i64
, { "r", "r" } },
1300 { INDEX_op_st8_i64
, { "r", "r" } },
1301 { INDEX_op_st16_i64
, { "r", "r" } },
1302 { INDEX_op_st32_i64
, { "r", "r" } },
1303 { INDEX_op_st_i64
, { "r", "r" } },
1305 { INDEX_op_add_i64
, { "r", "0", "re" } },
1306 { INDEX_op_mul_i64
, { "r", "0", "re" } },
1307 { INDEX_op_div2_i64
, { "a", "d", "0", "1", "r" } },
1308 { INDEX_op_divu2_i64
, { "a", "d", "0", "1", "r" } },
1309 { INDEX_op_sub_i64
, { "r", "0", "re" } },
1310 { INDEX_op_and_i64
, { "r", "0", "reZ" } },
1311 { INDEX_op_or_i64
, { "r", "0", "re" } },
1312 { INDEX_op_xor_i64
, { "r", "0", "re" } },
1314 { INDEX_op_shl_i64
, { "r", "0", "ci" } },
1315 { INDEX_op_shr_i64
, { "r", "0", "ci" } },
1316 { INDEX_op_sar_i64
, { "r", "0", "ci" } },
1317 { INDEX_op_rotl_i64
, { "r", "0", "ci" } },
1318 { INDEX_op_rotr_i64
, { "r", "0", "ci" } },
1320 { INDEX_op_brcond_i64
, { "r", "re" } },
1322 { INDEX_op_bswap16_i32
, { "r", "0" } },
1323 { INDEX_op_bswap16_i64
, { "r", "0" } },
1324 { INDEX_op_bswap32_i32
, { "r", "0" } },
1325 { INDEX_op_bswap32_i64
, { "r", "0" } },
1326 { INDEX_op_bswap64_i64
, { "r", "0" } },
1328 { INDEX_op_neg_i32
, { "r", "0" } },
1329 { INDEX_op_neg_i64
, { "r", "0" } },
1331 { INDEX_op_not_i32
, { "r", "0" } },
1332 { INDEX_op_not_i64
, { "r", "0" } },
1334 { INDEX_op_ext8s_i32
, { "r", "r"} },
1335 { INDEX_op_ext16s_i32
, { "r", "r"} },
1336 { INDEX_op_ext8s_i64
, { "r", "r"} },
1337 { INDEX_op_ext16s_i64
, { "r", "r"} },
1338 { INDEX_op_ext32s_i64
, { "r", "r"} },
1340 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1341 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1342 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1343 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1344 { INDEX_op_qemu_ld32u
, { "r", "L" } },
1345 { INDEX_op_qemu_ld32s
, { "r", "L" } },
1346 { INDEX_op_qemu_ld64
, { "r", "L" } },
1348 { INDEX_op_qemu_st8
, { "L", "L" } },
1349 { INDEX_op_qemu_st16
, { "L", "L" } },
1350 { INDEX_op_qemu_st32
, { "L", "L" } },
1351 { INDEX_op_qemu_st64
, { "L", "L", "L" } },
1356 void tcg_target_init(TCGContext
*s
)
1359 if ((1 << CPU_TLB_ENTRY_BITS
) != sizeof(CPUTLBEntry
))
1362 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xffff);
1363 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I64
], 0, 0xffff);
1364 tcg_regset_set32(tcg_target_call_clobber_regs
, 0,
1365 (1 << TCG_REG_RDI
) |
1366 (1 << TCG_REG_RSI
) |
1367 (1 << TCG_REG_RDX
) |
1368 (1 << TCG_REG_RCX
) |
1371 (1 << TCG_REG_RAX
) |
1372 (1 << TCG_REG_R10
) |
1373 (1 << TCG_REG_R11
));
1375 tcg_regset_clear(s
->reserved_regs
);
1376 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RSP
);
1378 tcg_add_target_add_op_defs(x86_64_op_defs
);