2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "../tcg-pool.c.inc"
27 static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
30 case INDEX_op_goto_ptr:
33 case INDEX_op_ld8u_i32:
34 case INDEX_op_ld8s_i32:
35 case INDEX_op_ld16u_i32:
36 case INDEX_op_ld16s_i32:
38 case INDEX_op_ld8u_i64:
39 case INDEX_op_ld8s_i64:
40 case INDEX_op_ld16u_i64:
41 case INDEX_op_ld16s_i64:
42 case INDEX_op_ld32u_i64:
43 case INDEX_op_ld32s_i64:
45 case INDEX_op_not_i32:
46 case INDEX_op_not_i64:
47 case INDEX_op_neg_i32:
48 case INDEX_op_neg_i64:
49 case INDEX_op_ext8s_i32:
50 case INDEX_op_ext8s_i64:
51 case INDEX_op_ext16s_i32:
52 case INDEX_op_ext16s_i64:
53 case INDEX_op_ext8u_i32:
54 case INDEX_op_ext8u_i64:
55 case INDEX_op_ext16u_i32:
56 case INDEX_op_ext16u_i64:
57 case INDEX_op_ext32s_i64:
58 case INDEX_op_ext32u_i64:
59 case INDEX_op_ext_i32_i64:
60 case INDEX_op_extu_i32_i64:
61 case INDEX_op_bswap16_i32:
62 case INDEX_op_bswap16_i64:
63 case INDEX_op_bswap32_i32:
64 case INDEX_op_bswap32_i64:
65 case INDEX_op_bswap64_i64:
66 case INDEX_op_extract_i32:
67 case INDEX_op_extract_i64:
68 case INDEX_op_sextract_i32:
69 case INDEX_op_sextract_i64:
70 case INDEX_op_ctpop_i32:
71 case INDEX_op_ctpop_i64:
74 case INDEX_op_st8_i32:
75 case INDEX_op_st16_i32:
77 case INDEX_op_st8_i64:
78 case INDEX_op_st16_i64:
79 case INDEX_op_st32_i64:
83 case INDEX_op_div_i32:
84 case INDEX_op_div_i64:
85 case INDEX_op_divu_i32:
86 case INDEX_op_divu_i64:
87 case INDEX_op_rem_i32:
88 case INDEX_op_rem_i64:
89 case INDEX_op_remu_i32:
90 case INDEX_op_remu_i64:
91 case INDEX_op_add_i32:
92 case INDEX_op_add_i64:
93 case INDEX_op_sub_i32:
94 case INDEX_op_sub_i64:
95 case INDEX_op_mul_i32:
96 case INDEX_op_mul_i64:
97 case INDEX_op_and_i32:
98 case INDEX_op_and_i64:
99 case INDEX_op_andc_i32:
100 case INDEX_op_andc_i64:
101 case INDEX_op_eqv_i32:
102 case INDEX_op_eqv_i64:
103 case INDEX_op_nand_i32:
104 case INDEX_op_nand_i64:
105 case INDEX_op_nor_i32:
106 case INDEX_op_nor_i64:
107 case INDEX_op_or_i32:
108 case INDEX_op_or_i64:
109 case INDEX_op_orc_i32:
110 case INDEX_op_orc_i64:
111 case INDEX_op_xor_i32:
112 case INDEX_op_xor_i64:
113 case INDEX_op_shl_i32:
114 case INDEX_op_shl_i64:
115 case INDEX_op_shr_i32:
116 case INDEX_op_shr_i64:
117 case INDEX_op_sar_i32:
118 case INDEX_op_sar_i64:
119 case INDEX_op_rotl_i32:
120 case INDEX_op_rotl_i64:
121 case INDEX_op_rotr_i32:
122 case INDEX_op_rotr_i64:
123 case INDEX_op_setcond_i32:
124 case INDEX_op_setcond_i64:
125 case INDEX_op_deposit_i32:
126 case INDEX_op_deposit_i64:
127 case INDEX_op_clz_i32:
128 case INDEX_op_clz_i64:
129 case INDEX_op_ctz_i32:
130 case INDEX_op_ctz_i64:
131 return C_O1_I2(r, r, r);
133 case INDEX_op_brcond_i32:
134 case INDEX_op_brcond_i64:
135 return C_O0_I2(r, r);
137 #if TCG_TARGET_REG_BITS == 32
138 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
139 case INDEX_op_add2_i32:
140 case INDEX_op_sub2_i32:
141 return C_O2_I4(r, r, r, r, r, r);
142 case INDEX_op_brcond2_i32:
143 return C_O0_I4(r, r, r, r);
144 case INDEX_op_mulu2_i32:
145 return C_O2_I2(r, r, r, r);
148 case INDEX_op_movcond_i32:
149 case INDEX_op_movcond_i64:
150 case INDEX_op_setcond2_i32:
151 return C_O1_I4(r, r, r, r, r);
153 case INDEX_op_qemu_ld_i32:
154 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
157 case INDEX_op_qemu_ld_i64:
158 return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
159 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
160 : C_O2_I2(r, r, r, r));
161 case INDEX_op_qemu_st_i32:
162 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
165 case INDEX_op_qemu_st_i64:
166 return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
167 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
168 : C_O0_I4(r, r, r, r));
171 g_assert_not_reached();
175 static const int tcg_target_reg_alloc_order[] = {
194 #if MAX_OPC_PARAM_IARGS != 6
195 # error Fix needed, number of supported input arguments changed!
198 /* No call arguments via registers. All will be stored on the "stack". */
199 static const int tcg_target_call_iarg_regs[] = { };
201 static const int tcg_target_call_oarg_regs[] = {
203 #if TCG_TARGET_REG_BITS == 32
208 #ifdef CONFIG_DEBUG_TCG
209 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
229 static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
230 intptr_t value, intptr_t addend)
232 intptr_t diff = value - (intptr_t)(code_ptr + 1);
234 tcg_debug_assert(addend == 0);
235 tcg_debug_assert(type == 20);
237 if (diff == sextract32(diff, 0, type)) {
238 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
244 static void stack_bounds_check(TCGReg base, target_long offset)
246 if (base == TCG_REG_CALL_STACK) {
247 tcg_debug_assert(offset >= 0);
248 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
249 TCG_STATIC_FRAME_SIZE));
253 static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
255 tcg_insn_unit insn = 0;
257 tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
258 insn = deposit32(insn, 0, 8, op);
262 static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
264 tcg_insn_unit insn = 0;
267 /* Special case for exit_tb: map null -> 0. */
271 diff = p0 - (void *)(s->code_ptr + 1);
272 tcg_debug_assert(diff != 0);
273 if (diff != sextract32(diff, 0, 20)) {
274 tcg_raise_tb_overflow(s);
277 insn = deposit32(insn, 0, 8, op);
278 insn = deposit32(insn, 12, 20, diff);
282 static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
284 tcg_insn_unit insn = 0;
286 insn = deposit32(insn, 0, 8, op);
287 insn = deposit32(insn, 8, 4, r0);
291 static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
293 tcg_out32(s, (uint8_t)op);
296 static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
298 tcg_insn_unit insn = 0;
300 tcg_debug_assert(i1 == sextract32(i1, 0, 20));
301 insn = deposit32(insn, 0, 8, op);
302 insn = deposit32(insn, 8, 4, r0);
303 insn = deposit32(insn, 12, 20, i1);
307 static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
309 tcg_insn_unit insn = 0;
311 tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
312 insn = deposit32(insn, 0, 8, op);
313 insn = deposit32(insn, 8, 4, r0);
317 static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
319 tcg_insn_unit insn = 0;
321 insn = deposit32(insn, 0, 8, op);
322 insn = deposit32(insn, 8, 4, r0);
323 insn = deposit32(insn, 12, 4, r1);
327 static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
328 TCGReg r0, TCGReg r1, TCGArg m2)
330 tcg_insn_unit insn = 0;
332 tcg_debug_assert(m2 == extract32(m2, 0, 12));
333 insn = deposit32(insn, 0, 8, op);
334 insn = deposit32(insn, 8, 4, r0);
335 insn = deposit32(insn, 12, 4, r1);
336 insn = deposit32(insn, 20, 12, m2);
340 static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
341 TCGReg r0, TCGReg r1, TCGReg r2)
343 tcg_insn_unit insn = 0;
345 insn = deposit32(insn, 0, 8, op);
346 insn = deposit32(insn, 8, 4, r0);
347 insn = deposit32(insn, 12, 4, r1);
348 insn = deposit32(insn, 16, 4, r2);
352 static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
353 TCGReg r0, TCGReg r1, intptr_t i2)
355 tcg_insn_unit insn = 0;
357 tcg_debug_assert(i2 == sextract32(i2, 0, 16));
358 insn = deposit32(insn, 0, 8, op);
359 insn = deposit32(insn, 8, 4, r0);
360 insn = deposit32(insn, 12, 4, r1);
361 insn = deposit32(insn, 16, 16, i2);
365 static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
366 TCGReg r1, uint8_t b2, uint8_t b3)
368 tcg_insn_unit insn = 0;
370 tcg_debug_assert(b2 == extract32(b2, 0, 6));
371 tcg_debug_assert(b3 == extract32(b3, 0, 6));
372 insn = deposit32(insn, 0, 8, op);
373 insn = deposit32(insn, 8, 4, r0);
374 insn = deposit32(insn, 12, 4, r1);
375 insn = deposit32(insn, 16, 6, b2);
376 insn = deposit32(insn, 22, 6, b3);
380 static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
381 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
383 tcg_insn_unit insn = 0;
385 insn = deposit32(insn, 0, 8, op);
386 insn = deposit32(insn, 8, 4, r0);
387 insn = deposit32(insn, 12, 4, r1);
388 insn = deposit32(insn, 16, 4, r2);
389 insn = deposit32(insn, 20, 4, c3);
393 static void tcg_out_op_rrrm(TCGContext *s, TCGOpcode op,
394 TCGReg r0, TCGReg r1, TCGReg r2, TCGArg m3)
396 tcg_insn_unit insn = 0;
398 tcg_debug_assert(m3 == extract32(m3, 0, 12));
399 insn = deposit32(insn, 0, 8, op);
400 insn = deposit32(insn, 8, 4, r0);
401 insn = deposit32(insn, 12, 4, r1);
402 insn = deposit32(insn, 16, 4, r2);
403 insn = deposit32(insn, 20, 12, m3);
407 static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
408 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
410 tcg_insn_unit insn = 0;
412 tcg_debug_assert(b3 == extract32(b3, 0, 6));
413 tcg_debug_assert(b4 == extract32(b4, 0, 6));
414 insn = deposit32(insn, 0, 8, op);
415 insn = deposit32(insn, 8, 4, r0);
416 insn = deposit32(insn, 12, 4, r1);
417 insn = deposit32(insn, 16, 4, r2);
418 insn = deposit32(insn, 20, 6, b3);
419 insn = deposit32(insn, 26, 6, b4);
423 static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0,
424 TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4)
426 tcg_insn_unit insn = 0;
428 insn = deposit32(insn, 0, 8, op);
429 insn = deposit32(insn, 8, 4, r0);
430 insn = deposit32(insn, 12, 4, r1);
431 insn = deposit32(insn, 16, 4, r2);
432 insn = deposit32(insn, 20, 4, r3);
433 insn = deposit32(insn, 24, 4, r4);
437 #if TCG_TARGET_REG_BITS == 32
438 static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
439 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
441 tcg_insn_unit insn = 0;
443 insn = deposit32(insn, 0, 8, op);
444 insn = deposit32(insn, 8, 4, r0);
445 insn = deposit32(insn, 12, 4, r1);
446 insn = deposit32(insn, 16, 4, r2);
447 insn = deposit32(insn, 20, 4, r3);
452 static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
453 TCGReg r0, TCGReg r1, TCGReg r2,
454 TCGReg r3, TCGReg r4, TCGCond c5)
456 tcg_insn_unit insn = 0;
458 insn = deposit32(insn, 0, 8, op);
459 insn = deposit32(insn, 8, 4, r0);
460 insn = deposit32(insn, 12, 4, r1);
461 insn = deposit32(insn, 16, 4, r2);
462 insn = deposit32(insn, 20, 4, r3);
463 insn = deposit32(insn, 24, 4, r4);
464 insn = deposit32(insn, 28, 4, c5);
468 #if TCG_TARGET_REG_BITS == 32
469 static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
470 TCGReg r0, TCGReg r1, TCGReg r2,
471 TCGReg r3, TCGReg r4, TCGReg r5)
473 tcg_insn_unit insn = 0;
475 insn = deposit32(insn, 0, 8, op);
476 insn = deposit32(insn, 8, 4, r0);
477 insn = deposit32(insn, 12, 4, r1);
478 insn = deposit32(insn, 16, 4, r2);
479 insn = deposit32(insn, 20, 4, r3);
480 insn = deposit32(insn, 24, 4, r4);
481 insn = deposit32(insn, 28, 4, r5);
486 static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
487 TCGReg base, intptr_t offset)
489 stack_bounds_check(base, offset);
490 if (offset != sextract32(offset, 0, 16)) {
491 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
492 tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32
493 ? INDEX_op_add_i32 : INDEX_op_add_i64),
494 TCG_REG_TMP, TCG_REG_TMP, base);
498 tcg_out_op_rrs(s, op, val, base, offset);
501 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
506 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
508 #if TCG_TARGET_REG_BITS == 64
510 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
514 g_assert_not_reached();
518 static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
522 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg);
524 #if TCG_TARGET_REG_BITS == 64
526 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg);
530 g_assert_not_reached();
535 static void tcg_out_movi(TCGContext *s, TCGType type,
536 TCGReg ret, tcg_target_long arg)
540 #if TCG_TARGET_REG_BITS == 64
547 g_assert_not_reached();
550 if (arg == sextract32(arg, 0, 20)) {
551 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
553 tcg_insn_unit insn = 0;
555 new_pool_label(s, arg, 20, s->code_ptr, 0);
556 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
557 insn = deposit32(insn, 8, 4, ret);
562 static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
565 tcg_insn_unit insn = 0;
568 if (cif->rtype == &ffi_type_void) {
570 } else if (cif->rtype->size == 4) {
573 tcg_debug_assert(cif->rtype->size == 8);
576 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
577 insn = deposit32(insn, 0, 8, INDEX_op_call);
578 insn = deposit32(insn, 8, 4, which);
582 #if TCG_TARGET_REG_BITS == 64
583 # define CASE_32_64(x) \
584 case glue(glue(INDEX_op_, x), _i64): \
585 case glue(glue(INDEX_op_, x), _i32):
586 # define CASE_64(x) \
587 case glue(glue(INDEX_op_, x), _i64):
589 # define CASE_32_64(x) \
590 case glue(glue(INDEX_op_, x), _i32):
594 static void tcg_out_op(TCGContext *s, TCGOpcode opc,
595 const TCGArg args[TCG_MAX_OP_ARGS],
596 const int const_args[TCG_MAX_OP_ARGS])
599 case INDEX_op_exit_tb:
600 tcg_out_op_p(s, opc, (void *)args[0]);
603 case INDEX_op_goto_tb:
604 tcg_debug_assert(s->tb_jmp_insn_offset == 0);
605 /* indirect jump method. */
606 tcg_out_op_p(s, opc, s->tb_jmp_target_addr + args[0]);
607 set_jmp_reset_offset(s, args[0]);
610 case INDEX_op_goto_ptr:
611 tcg_out_op_r(s, opc, args[0]);
615 tcg_out_op_l(s, opc, arg_label(args[0]));
619 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
623 case INDEX_op_setcond2_i32:
624 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
625 args[3], args[4], args[5]);
632 case INDEX_op_ld_i32:
638 case INDEX_op_st_i32:
641 tcg_out_ldst(s, opc, args[0], args[1], args[2]);
650 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */
651 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */
652 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */
653 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */
654 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */
658 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */
659 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */
660 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */
661 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */
662 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */
663 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */
664 CASE_32_64(clz) /* Optional (TCG_TARGET_HAS_clz_*). */
665 CASE_32_64(ctz) /* Optional (TCG_TARGET_HAS_ctz_*). */
666 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
669 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */
671 TCGArg pos = args[3], len = args[4];
672 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64;
674 tcg_debug_assert(pos < max);
675 tcg_debug_assert(pos + len <= max);
677 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len);
681 CASE_32_64(extract) /* Optional (TCG_TARGET_HAS_extract_*). */
682 CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
684 TCGArg pos = args[2], len = args[3];
685 TCGArg max = tcg_op_defs[opc].flags & TCG_OPF_64BIT ? 64 : 32;
687 tcg_debug_assert(pos < max);
688 tcg_debug_assert(pos + len <= max);
690 tcg_out_op_rrbb(s, opc, args[0], args[1], pos, len);
695 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
696 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
697 TCG_REG_TMP, args[0], args[1], args[2]);
698 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
701 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
702 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
703 CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */
704 CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */
705 CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
706 CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
707 CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
708 CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */
711 CASE_32_64(bswap16) /* Optional (TCG_TARGET_HAS_bswap16_*). */
712 CASE_32_64(bswap32) /* Optional (TCG_TARGET_HAS_bswap32_*). */
713 CASE_64(bswap64) /* Optional (TCG_TARGET_HAS_bswap64_i64). */
714 CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */
715 tcg_out_op_rr(s, opc, args[0], args[1]);
718 #if TCG_TARGET_REG_BITS == 32
719 case INDEX_op_add2_i32:
720 case INDEX_op_sub2_i32:
721 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
722 args[3], args[4], args[5]);
724 case INDEX_op_brcond2_i32:
725 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
726 args[0], args[1], args[2], args[3], args[4]);
727 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
729 case INDEX_op_mulu2_i32:
730 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
734 case INDEX_op_qemu_ld_i32:
735 case INDEX_op_qemu_st_i32:
736 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
737 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
739 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
743 case INDEX_op_qemu_ld_i64:
744 case INDEX_op_qemu_st_i64:
745 if (TCG_TARGET_REG_BITS == 64) {
746 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
747 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
748 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
750 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]);
751 tcg_out_op_rrrrr(s, opc, args[0], args[1],
752 args[2], args[3], TCG_REG_TMP);
757 tcg_out_op_v(s, opc);
760 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
761 case INDEX_op_mov_i64:
762 case INDEX_op_call: /* Always emitted via tcg_out_call. */
768 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
771 stack_bounds_check(base, offset);
774 tcg_out_op_rrs(s, INDEX_op_st_i32, val, base, offset);
776 #if TCG_TARGET_REG_BITS == 64
778 tcg_out_op_rrs(s, INDEX_op_st_i64, val, base, offset);
782 g_assert_not_reached();
786 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
787 TCGReg base, intptr_t ofs)
792 /* Test if a constant matches the constraint. */
793 static bool tcg_target_const_match(int64_t val, TCGType type, int ct)
795 return ct & TCG_CT_CONST;
798 static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
800 memset(p, 0, sizeof(*p) * count);
803 static void tcg_target_init(TCGContext *s)
805 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
806 const char *envval = getenv("DEBUG_TCG");
808 qemu_set_log(strtol(envval, NULL, 0));
812 /* The current code uses uint8_t for tcg operations. */
813 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
815 /* Registers available for 32 bit operations. */
816 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
817 /* Registers available for 64 bit operations. */
818 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
820 * The interpreter "registers" are in the local stack frame and
821 * cannot be clobbered by the called helper functions. However,
822 * the interpreter assumes a 64-bit return value and assigns to
823 * the return value registers.
825 tcg_target_call_clobber_regs =
826 MAKE_64BIT_MASK(TCG_REG_R0, 64 / TCG_TARGET_REG_BITS);
828 s->reserved_regs = 0;
829 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
830 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
832 /* The call arguments come first, followed by the temp storage. */
833 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
834 TCG_STATIC_FRAME_SIZE);
837 /* Generate global QEMU prologue and epilogue code. */
838 static inline void tcg_target_qemu_prologue(TCGContext *s)