2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 * - See TODO comments in code.
29 /* Marker for missing code. */
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
37 /* Bitfield n...m (in 32 bit value). */
38 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
40 /* Macros used in tcg_target_op_defs. */
43 #if TCG_TARGET_REG_BITS == 32
48 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
56 /* TODO: documentation. */
57 static const TCGTargetOpDef tcg_target_op_defs[] = {
58 { INDEX_op_exit_tb, { NULL } },
59 { INDEX_op_goto_tb, { NULL } },
60 { INDEX_op_br, { NULL } },
62 { INDEX_op_ld8u_i32, { R, R } },
63 { INDEX_op_ld8s_i32, { R, R } },
64 { INDEX_op_ld16u_i32, { R, R } },
65 { INDEX_op_ld16s_i32, { R, R } },
66 { INDEX_op_ld_i32, { R, R } },
67 { INDEX_op_st8_i32, { R, R } },
68 { INDEX_op_st16_i32, { R, R } },
69 { INDEX_op_st_i32, { R, R } },
71 { INDEX_op_add_i32, { R, RI, RI } },
72 { INDEX_op_sub_i32, { R, RI, RI } },
73 { INDEX_op_mul_i32, { R, RI, RI } },
74 #if TCG_TARGET_HAS_div_i32
75 { INDEX_op_div_i32, { R, R, R } },
76 { INDEX_op_divu_i32, { R, R, R } },
77 { INDEX_op_rem_i32, { R, R, R } },
78 { INDEX_op_remu_i32, { R, R, R } },
79 #elif TCG_TARGET_HAS_div2_i32
80 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
81 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
83 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
84 If both operands are constants, we can optimize. */
85 { INDEX_op_and_i32, { R, RI, RI } },
86 #if TCG_TARGET_HAS_andc_i32
87 { INDEX_op_andc_i32, { R, RI, RI } },
89 #if TCG_TARGET_HAS_eqv_i32
90 { INDEX_op_eqv_i32, { R, RI, RI } },
92 #if TCG_TARGET_HAS_nand_i32
93 { INDEX_op_nand_i32, { R, RI, RI } },
95 #if TCG_TARGET_HAS_nor_i32
96 { INDEX_op_nor_i32, { R, RI, RI } },
98 { INDEX_op_or_i32, { R, RI, RI } },
99 #if TCG_TARGET_HAS_orc_i32
100 { INDEX_op_orc_i32, { R, RI, RI } },
102 { INDEX_op_xor_i32, { R, RI, RI } },
103 { INDEX_op_shl_i32, { R, RI, RI } },
104 { INDEX_op_shr_i32, { R, RI, RI } },
105 { INDEX_op_sar_i32, { R, RI, RI } },
106 #if TCG_TARGET_HAS_rot_i32
107 { INDEX_op_rotl_i32, { R, RI, RI } },
108 { INDEX_op_rotr_i32, { R, RI, RI } },
110 #if TCG_TARGET_HAS_deposit_i32
111 { INDEX_op_deposit_i32, { R, "0", R } },
114 { INDEX_op_brcond_i32, { R, RI } },
116 { INDEX_op_setcond_i32, { R, R, RI } },
117 #if TCG_TARGET_REG_BITS == 64
118 { INDEX_op_setcond_i64, { R, R, RI } },
119 #endif /* TCG_TARGET_REG_BITS == 64 */
121 #if TCG_TARGET_REG_BITS == 32
122 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
123 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
124 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
125 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
126 { INDEX_op_mulu2_i32, { R, R, R, R } },
127 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
130 #if TCG_TARGET_HAS_not_i32
131 { INDEX_op_not_i32, { R, R } },
133 #if TCG_TARGET_HAS_neg_i32
134 { INDEX_op_neg_i32, { R, R } },
137 #if TCG_TARGET_REG_BITS == 64
138 { INDEX_op_ld8u_i64, { R, R } },
139 { INDEX_op_ld8s_i64, { R, R } },
140 { INDEX_op_ld16u_i64, { R, R } },
141 { INDEX_op_ld16s_i64, { R, R } },
142 { INDEX_op_ld32u_i64, { R, R } },
143 { INDEX_op_ld32s_i64, { R, R } },
144 { INDEX_op_ld_i64, { R, R } },
146 { INDEX_op_st8_i64, { R, R } },
147 { INDEX_op_st16_i64, { R, R } },
148 { INDEX_op_st32_i64, { R, R } },
149 { INDEX_op_st_i64, { R, R } },
151 { INDEX_op_add_i64, { R, RI, RI } },
152 { INDEX_op_sub_i64, { R, RI, RI } },
153 { INDEX_op_mul_i64, { R, RI, RI } },
154 #if TCG_TARGET_HAS_div_i64
155 { INDEX_op_div_i64, { R, R, R } },
156 { INDEX_op_divu_i64, { R, R, R } },
157 { INDEX_op_rem_i64, { R, R, R } },
158 { INDEX_op_remu_i64, { R, R, R } },
159 #elif TCG_TARGET_HAS_div2_i64
160 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
161 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
163 { INDEX_op_and_i64, { R, RI, RI } },
164 #if TCG_TARGET_HAS_andc_i64
165 { INDEX_op_andc_i64, { R, RI, RI } },
167 #if TCG_TARGET_HAS_eqv_i64
168 { INDEX_op_eqv_i64, { R, RI, RI } },
170 #if TCG_TARGET_HAS_nand_i64
171 { INDEX_op_nand_i64, { R, RI, RI } },
173 #if TCG_TARGET_HAS_nor_i64
174 { INDEX_op_nor_i64, { R, RI, RI } },
176 { INDEX_op_or_i64, { R, RI, RI } },
177 #if TCG_TARGET_HAS_orc_i64
178 { INDEX_op_orc_i64, { R, RI, RI } },
180 { INDEX_op_xor_i64, { R, RI, RI } },
181 { INDEX_op_shl_i64, { R, RI, RI } },
182 { INDEX_op_shr_i64, { R, RI, RI } },
183 { INDEX_op_sar_i64, { R, RI, RI } },
184 #if TCG_TARGET_HAS_rot_i64
185 { INDEX_op_rotl_i64, { R, RI, RI } },
186 { INDEX_op_rotr_i64, { R, RI, RI } },
188 #if TCG_TARGET_HAS_deposit_i64
189 { INDEX_op_deposit_i64, { R, "0", R } },
191 { INDEX_op_brcond_i64, { R, RI } },
193 #if TCG_TARGET_HAS_ext8s_i64
194 { INDEX_op_ext8s_i64, { R, R } },
196 #if TCG_TARGET_HAS_ext16s_i64
197 { INDEX_op_ext16s_i64, { R, R } },
199 #if TCG_TARGET_HAS_ext32s_i64
200 { INDEX_op_ext32s_i64, { R, R } },
202 #if TCG_TARGET_HAS_ext8u_i64
203 { INDEX_op_ext8u_i64, { R, R } },
205 #if TCG_TARGET_HAS_ext16u_i64
206 { INDEX_op_ext16u_i64, { R, R } },
208 #if TCG_TARGET_HAS_ext32u_i64
209 { INDEX_op_ext32u_i64, { R, R } },
211 { INDEX_op_ext_i32_i64, { R, R } },
212 { INDEX_op_extu_i32_i64, { R, R } },
213 #if TCG_TARGET_HAS_bswap16_i64
214 { INDEX_op_bswap16_i64, { R, R } },
216 #if TCG_TARGET_HAS_bswap32_i64
217 { INDEX_op_bswap32_i64, { R, R } },
219 #if TCG_TARGET_HAS_bswap64_i64
220 { INDEX_op_bswap64_i64, { R, R } },
222 #if TCG_TARGET_HAS_not_i64
223 { INDEX_op_not_i64, { R, R } },
225 #if TCG_TARGET_HAS_neg_i64
226 { INDEX_op_neg_i64, { R, R } },
228 #endif /* TCG_TARGET_REG_BITS == 64 */
230 { INDEX_op_qemu_ld_i32, { R, L } },
231 { INDEX_op_qemu_ld_i64, { R64, L } },
233 { INDEX_op_qemu_st_i32, { R, S } },
234 { INDEX_op_qemu_st_i64, { R64, S } },
236 #if TCG_TARGET_HAS_ext8s_i32
237 { INDEX_op_ext8s_i32, { R, R } },
239 #if TCG_TARGET_HAS_ext16s_i32
240 { INDEX_op_ext16s_i32, { R, R } },
242 #if TCG_TARGET_HAS_ext8u_i32
243 { INDEX_op_ext8u_i32, { R, R } },
245 #if TCG_TARGET_HAS_ext16u_i32
246 { INDEX_op_ext16u_i32, { R, R } },
249 #if TCG_TARGET_HAS_bswap16_i32
250 { INDEX_op_bswap16_i32, { R, R } },
252 #if TCG_TARGET_HAS_bswap32_i32
253 { INDEX_op_bswap32_i32, { R, R } },
256 { INDEX_op_mb, { } },
260 static const TCGTargetOpDef *tcg_target_op_def(TCGOpcode op)
262 int i, n = ARRAY_SIZE(tcg_target_op_defs);
264 for (i = 0; i < n; ++i) {
265 if (tcg_target_op_defs[i].op == op) {
266 return &tcg_target_op_defs[i];
272 static const int tcg_target_reg_alloc_order[] = {
277 #if 0 /* used for TCG_REG_CALL_STACK */
283 #if TCG_TARGET_NB_REGS >= 16
295 #if MAX_OPC_PARAM_IARGS != 6
296 # error Fix needed, number of supported input arguments changed!
299 static const int tcg_target_call_iarg_regs[] = {
304 #if 0 /* used for TCG_REG_CALL_STACK */
309 #if TCG_TARGET_REG_BITS == 32
310 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
312 #if TCG_TARGET_NB_REGS >= 16
319 # error Too few input registers available
324 static const int tcg_target_call_oarg_regs[] = {
326 #if TCG_TARGET_REG_BITS == 32
331 #ifdef CONFIG_DEBUG_TCG
332 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
341 #if TCG_TARGET_NB_REGS >= 16
350 #if TCG_TARGET_NB_REGS >= 32
372 static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
373 intptr_t value, intptr_t addend)
375 /* tcg_out_reloc always uses the same type, addend. */
376 tcg_debug_assert(type == sizeof(tcg_target_long));
377 tcg_debug_assert(addend == 0);
378 tcg_debug_assert(value != 0);
379 if (TCG_TARGET_REG_BITS == 32) {
380 tcg_patch32(code_ptr, value);
382 tcg_patch64(code_ptr, value);
387 /* Parse target specific constraints. */
388 static const char *target_parse_constraint(TCGArgConstraint *ct,
389 const char *ct_str, TCGType type)
393 case 'L': /* qemu_ld constraint */
394 case 'S': /* qemu_st constraint */
395 ct->ct |= TCG_CT_REG;
396 ct->u.regs = BIT(TCG_TARGET_NB_REGS) - 1;
404 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
405 /* Show current bytecode. Used by tcg interpreter. */
406 void tci_disas(uint8_t opc)
408 const TCGOpDef *def = &tcg_op_defs[opc];
409 fprintf(stderr, "TCG %s %u, %u, %u\n",
410 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
414 /* Write value (native size). */
415 static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
417 if (TCG_TARGET_REG_BITS == 32) {
425 static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
431 /* Write register. */
432 static void tcg_out_r(TCGContext *s, TCGArg t0)
434 tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
438 /* Write register or constant (native size). */
439 static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
442 tcg_debug_assert(const_arg == 1);
443 tcg_out8(s, TCG_CONST);
450 /* Write register or constant (32 bit). */
451 static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
454 tcg_debug_assert(const_arg == 1);
455 tcg_out8(s, TCG_CONST);
462 #if TCG_TARGET_REG_BITS == 64
463 /* Write register or constant (64 bit). */
464 static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
467 tcg_debug_assert(const_arg == 1);
468 tcg_out8(s, TCG_CONST);
477 static void tci_out_label(TCGContext *s, TCGLabel *label)
479 if (label->has_value) {
480 tcg_out_i(s, label->u.value);
481 tcg_debug_assert(label->u.value);
483 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
484 s->code_ptr += sizeof(tcg_target_ulong);
488 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
491 uint8_t *old_code_ptr = s->code_ptr;
492 if (type == TCG_TYPE_I32) {
493 tcg_out_op_t(s, INDEX_op_ld_i32);
498 tcg_debug_assert(type == TCG_TYPE_I64);
499 #if TCG_TARGET_REG_BITS == 64
500 tcg_out_op_t(s, INDEX_op_ld_i64);
503 tcg_debug_assert(arg2 == (int32_t)arg2);
509 old_code_ptr[1] = s->code_ptr - old_code_ptr;
512 static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
514 uint8_t *old_code_ptr = s->code_ptr;
515 tcg_debug_assert(ret != arg);
516 #if TCG_TARGET_REG_BITS == 32
517 tcg_out_op_t(s, INDEX_op_mov_i32);
519 tcg_out_op_t(s, INDEX_op_mov_i64);
523 old_code_ptr[1] = s->code_ptr - old_code_ptr;
527 static void tcg_out_movi(TCGContext *s, TCGType type,
528 TCGReg t0, tcg_target_long arg)
530 uint8_t *old_code_ptr = s->code_ptr;
531 uint32_t arg32 = arg;
532 if (type == TCG_TYPE_I32 || arg == arg32) {
533 tcg_out_op_t(s, INDEX_op_movi_i32);
537 tcg_debug_assert(type == TCG_TYPE_I64);
538 #if TCG_TARGET_REG_BITS == 64
539 tcg_out_op_t(s, INDEX_op_movi_i64);
546 old_code_ptr[1] = s->code_ptr - old_code_ptr;
549 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *arg)
551 uint8_t *old_code_ptr = s->code_ptr;
552 tcg_out_op_t(s, INDEX_op_call);
553 tcg_out_ri(s, 1, (uintptr_t)arg);
554 old_code_ptr[1] = s->code_ptr - old_code_ptr;
557 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
558 const int *const_args)
560 uint8_t *old_code_ptr = s->code_ptr;
562 tcg_out_op_t(s, opc);
565 case INDEX_op_exit_tb:
566 tcg_out64(s, args[0]);
568 case INDEX_op_goto_tb:
569 if (s->tb_jmp_insn_offset) {
570 /* Direct jump method. */
571 /* Align for atomic patching and thread safety */
572 s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
573 s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
576 /* Indirect jump method. */
579 set_jmp_reset_offset(s, args[0]);
582 tci_out_label(s, arg_label(args[0]));
584 case INDEX_op_setcond_i32:
585 tcg_out_r(s, args[0]);
586 tcg_out_r(s, args[1]);
587 tcg_out_ri32(s, const_args[2], args[2]);
588 tcg_out8(s, args[3]); /* condition */
590 #if TCG_TARGET_REG_BITS == 32
591 case INDEX_op_setcond2_i32:
592 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
593 tcg_out_r(s, args[0]);
594 tcg_out_r(s, args[1]);
595 tcg_out_r(s, args[2]);
596 tcg_out_ri32(s, const_args[3], args[3]);
597 tcg_out_ri32(s, const_args[4], args[4]);
598 tcg_out8(s, args[5]); /* condition */
600 #elif TCG_TARGET_REG_BITS == 64
601 case INDEX_op_setcond_i64:
602 tcg_out_r(s, args[0]);
603 tcg_out_r(s, args[1]);
604 tcg_out_ri64(s, const_args[2], args[2]);
605 tcg_out8(s, args[3]); /* condition */
608 case INDEX_op_ld8u_i32:
609 case INDEX_op_ld8s_i32:
610 case INDEX_op_ld16u_i32:
611 case INDEX_op_ld16s_i32:
612 case INDEX_op_ld_i32:
613 case INDEX_op_st8_i32:
614 case INDEX_op_st16_i32:
615 case INDEX_op_st_i32:
616 case INDEX_op_ld8u_i64:
617 case INDEX_op_ld8s_i64:
618 case INDEX_op_ld16u_i64:
619 case INDEX_op_ld16s_i64:
620 case INDEX_op_ld32u_i64:
621 case INDEX_op_ld32s_i64:
622 case INDEX_op_ld_i64:
623 case INDEX_op_st8_i64:
624 case INDEX_op_st16_i64:
625 case INDEX_op_st32_i64:
626 case INDEX_op_st_i64:
627 tcg_out_r(s, args[0]);
628 tcg_out_r(s, args[1]);
629 tcg_debug_assert(args[2] == (int32_t)args[2]);
630 tcg_out32(s, args[2]);
632 case INDEX_op_add_i32:
633 case INDEX_op_sub_i32:
634 case INDEX_op_mul_i32:
635 case INDEX_op_and_i32:
636 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
637 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
638 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
639 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
640 case INDEX_op_or_i32:
641 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
642 case INDEX_op_xor_i32:
643 case INDEX_op_shl_i32:
644 case INDEX_op_shr_i32:
645 case INDEX_op_sar_i32:
646 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
647 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
648 tcg_out_r(s, args[0]);
649 tcg_out_ri32(s, const_args[1], args[1]);
650 tcg_out_ri32(s, const_args[2], args[2]);
652 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
653 tcg_out_r(s, args[0]);
654 tcg_out_r(s, args[1]);
655 tcg_out_r(s, args[2]);
656 tcg_debug_assert(args[3] <= UINT8_MAX);
657 tcg_out8(s, args[3]);
658 tcg_debug_assert(args[4] <= UINT8_MAX);
659 tcg_out8(s, args[4]);
662 #if TCG_TARGET_REG_BITS == 64
663 case INDEX_op_add_i64:
664 case INDEX_op_sub_i64:
665 case INDEX_op_mul_i64:
666 case INDEX_op_and_i64:
667 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
668 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
669 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
670 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
671 case INDEX_op_or_i64:
672 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
673 case INDEX_op_xor_i64:
674 case INDEX_op_shl_i64:
675 case INDEX_op_shr_i64:
676 case INDEX_op_sar_i64:
677 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
678 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
679 tcg_out_r(s, args[0]);
680 tcg_out_ri64(s, const_args[1], args[1]);
681 tcg_out_ri64(s, const_args[2], args[2]);
683 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
684 tcg_out_r(s, args[0]);
685 tcg_out_r(s, args[1]);
686 tcg_out_r(s, args[2]);
687 tcg_debug_assert(args[3] <= UINT8_MAX);
688 tcg_out8(s, args[3]);
689 tcg_debug_assert(args[4] <= UINT8_MAX);
690 tcg_out8(s, args[4]);
692 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
693 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
694 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
695 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
698 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
699 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
702 case INDEX_op_brcond_i64:
703 tcg_out_r(s, args[0]);
704 tcg_out_ri64(s, const_args[1], args[1]);
705 tcg_out8(s, args[2]); /* condition */
706 tci_out_label(s, arg_label(args[3]));
708 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
709 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
710 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
711 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
712 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
713 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
714 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
715 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
716 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
717 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
718 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
719 case INDEX_op_ext_i32_i64:
720 case INDEX_op_extu_i32_i64:
721 #endif /* TCG_TARGET_REG_BITS == 64 */
722 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
723 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
724 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
725 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
726 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
727 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
728 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
729 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
730 tcg_out_r(s, args[0]);
731 tcg_out_r(s, args[1]);
733 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
734 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
735 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
736 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
737 tcg_out_r(s, args[0]);
738 tcg_out_ri32(s, const_args[1], args[1]);
739 tcg_out_ri32(s, const_args[2], args[2]);
741 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
742 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
745 #if TCG_TARGET_REG_BITS == 32
746 case INDEX_op_add2_i32:
747 case INDEX_op_sub2_i32:
748 tcg_out_r(s, args[0]);
749 tcg_out_r(s, args[1]);
750 tcg_out_r(s, args[2]);
751 tcg_out_r(s, args[3]);
752 tcg_out_r(s, args[4]);
753 tcg_out_r(s, args[5]);
755 case INDEX_op_brcond2_i32:
756 tcg_out_r(s, args[0]);
757 tcg_out_r(s, args[1]);
758 tcg_out_ri32(s, const_args[2], args[2]);
759 tcg_out_ri32(s, const_args[3], args[3]);
760 tcg_out8(s, args[4]); /* condition */
761 tci_out_label(s, arg_label(args[5]));
763 case INDEX_op_mulu2_i32:
764 tcg_out_r(s, args[0]);
765 tcg_out_r(s, args[1]);
766 tcg_out_r(s, args[2]);
767 tcg_out_r(s, args[3]);
770 case INDEX_op_brcond_i32:
771 tcg_out_r(s, args[0]);
772 tcg_out_ri32(s, const_args[1], args[1]);
773 tcg_out8(s, args[2]); /* condition */
774 tci_out_label(s, arg_label(args[3]));
776 case INDEX_op_qemu_ld_i32:
777 tcg_out_r(s, *args++);
778 tcg_out_r(s, *args++);
779 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
780 tcg_out_r(s, *args++);
782 tcg_out_i(s, *args++);
784 case INDEX_op_qemu_ld_i64:
785 tcg_out_r(s, *args++);
786 if (TCG_TARGET_REG_BITS == 32) {
787 tcg_out_r(s, *args++);
789 tcg_out_r(s, *args++);
790 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
791 tcg_out_r(s, *args++);
793 tcg_out_i(s, *args++);
795 case INDEX_op_qemu_st_i32:
796 tcg_out_r(s, *args++);
797 tcg_out_r(s, *args++);
798 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
799 tcg_out_r(s, *args++);
801 tcg_out_i(s, *args++);
803 case INDEX_op_qemu_st_i64:
804 tcg_out_r(s, *args++);
805 if (TCG_TARGET_REG_BITS == 32) {
806 tcg_out_r(s, *args++);
808 tcg_out_r(s, *args++);
809 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
810 tcg_out_r(s, *args++);
812 tcg_out_i(s, *args++);
816 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
817 case INDEX_op_mov_i64:
818 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
819 case INDEX_op_movi_i64:
820 case INDEX_op_call: /* Always emitted via tcg_out_call. */
824 old_code_ptr[1] = s->code_ptr - old_code_ptr;
827 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
830 uint8_t *old_code_ptr = s->code_ptr;
831 if (type == TCG_TYPE_I32) {
832 tcg_out_op_t(s, INDEX_op_st_i32);
837 tcg_debug_assert(type == TCG_TYPE_I64);
838 #if TCG_TARGET_REG_BITS == 64
839 tcg_out_op_t(s, INDEX_op_st_i64);
847 old_code_ptr[1] = s->code_ptr - old_code_ptr;
850 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
851 TCGReg base, intptr_t ofs)
856 /* Test if a constant matches the constraint. */
857 static int tcg_target_const_match(tcg_target_long val, TCGType type,
858 const TCGArgConstraint *arg_ct)
860 /* No need to return 0 or 1, 0 or != 0 is good enough. */
861 return arg_ct->ct & TCG_CT_CONST;
864 static void tcg_target_init(TCGContext *s)
866 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
867 const char *envval = getenv("DEBUG_TCG");
869 qemu_set_log(strtol(envval, NULL, 0));
873 /* The current code uses uint8_t for tcg operations. */
874 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
876 /* Registers available for 32 bit operations. */
877 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
878 /* Registers available for 64 bit operations. */
879 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
880 /* TODO: Which registers should be set here? */
881 tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1;
883 s->reserved_regs = 0;
884 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
886 /* We use negative offsets from "sp" so that we can distinguish
887 stores that might pretend to be call arguments. */
888 tcg_set_frame(s, TCG_REG_CALL_STACK,
889 -CPU_TEMP_BUF_NLONGS * sizeof(long),
890 CPU_TEMP_BUF_NLONGS * sizeof(long));
893 /* Generate global QEMU prologue and epilogue code. */
894 static inline void tcg_target_qemu_prologue(TCGContext *s)