2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "../tcg-pool.c.inc"
27 static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
30 case INDEX_op_ld8u_i32:
31 case INDEX_op_ld8s_i32:
32 case INDEX_op_ld16u_i32:
33 case INDEX_op_ld16s_i32:
35 case INDEX_op_ld8u_i64:
36 case INDEX_op_ld8s_i64:
37 case INDEX_op_ld16u_i64:
38 case INDEX_op_ld16s_i64:
39 case INDEX_op_ld32u_i64:
40 case INDEX_op_ld32s_i64:
42 case INDEX_op_not_i32:
43 case INDEX_op_not_i64:
44 case INDEX_op_neg_i32:
45 case INDEX_op_neg_i64:
46 case INDEX_op_ext8s_i32:
47 case INDEX_op_ext8s_i64:
48 case INDEX_op_ext16s_i32:
49 case INDEX_op_ext16s_i64:
50 case INDEX_op_ext8u_i32:
51 case INDEX_op_ext8u_i64:
52 case INDEX_op_ext16u_i32:
53 case INDEX_op_ext16u_i64:
54 case INDEX_op_ext32s_i64:
55 case INDEX_op_ext32u_i64:
56 case INDEX_op_ext_i32_i64:
57 case INDEX_op_extu_i32_i64:
58 case INDEX_op_bswap16_i32:
59 case INDEX_op_bswap16_i64:
60 case INDEX_op_bswap32_i32:
61 case INDEX_op_bswap32_i64:
62 case INDEX_op_bswap64_i64:
65 case INDEX_op_st8_i32:
66 case INDEX_op_st16_i32:
68 case INDEX_op_st8_i64:
69 case INDEX_op_st16_i64:
70 case INDEX_op_st32_i64:
74 case INDEX_op_div_i32:
75 case INDEX_op_div_i64:
76 case INDEX_op_divu_i32:
77 case INDEX_op_divu_i64:
78 case INDEX_op_rem_i32:
79 case INDEX_op_rem_i64:
80 case INDEX_op_remu_i32:
81 case INDEX_op_remu_i64:
82 case INDEX_op_add_i32:
83 case INDEX_op_add_i64:
84 case INDEX_op_sub_i32:
85 case INDEX_op_sub_i64:
86 case INDEX_op_mul_i32:
87 case INDEX_op_mul_i64:
88 case INDEX_op_and_i32:
89 case INDEX_op_and_i64:
90 case INDEX_op_andc_i32:
91 case INDEX_op_andc_i64:
92 case INDEX_op_eqv_i32:
93 case INDEX_op_eqv_i64:
94 case INDEX_op_nand_i32:
95 case INDEX_op_nand_i64:
96 case INDEX_op_nor_i32:
97 case INDEX_op_nor_i64:
100 case INDEX_op_orc_i32:
101 case INDEX_op_orc_i64:
102 case INDEX_op_xor_i32:
103 case INDEX_op_xor_i64:
104 case INDEX_op_shl_i32:
105 case INDEX_op_shl_i64:
106 case INDEX_op_shr_i32:
107 case INDEX_op_shr_i64:
108 case INDEX_op_sar_i32:
109 case INDEX_op_sar_i64:
110 case INDEX_op_rotl_i32:
111 case INDEX_op_rotl_i64:
112 case INDEX_op_rotr_i32:
113 case INDEX_op_rotr_i64:
114 case INDEX_op_setcond_i32:
115 case INDEX_op_setcond_i64:
116 case INDEX_op_deposit_i32:
117 case INDEX_op_deposit_i64:
118 return C_O1_I2(r, r, r);
120 case INDEX_op_brcond_i32:
121 case INDEX_op_brcond_i64:
122 return C_O0_I2(r, r);
124 #if TCG_TARGET_REG_BITS == 32
125 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
126 case INDEX_op_add2_i32:
127 case INDEX_op_sub2_i32:
128 return C_O2_I4(r, r, r, r, r, r);
129 case INDEX_op_brcond2_i32:
130 return C_O0_I4(r, r, r, r);
131 case INDEX_op_mulu2_i32:
132 return C_O2_I2(r, r, r, r);
133 case INDEX_op_setcond2_i32:
134 return C_O1_I4(r, r, r, r, r);
137 case INDEX_op_qemu_ld_i32:
138 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
141 case INDEX_op_qemu_ld_i64:
142 return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
143 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
144 : C_O2_I2(r, r, r, r));
145 case INDEX_op_qemu_st_i32:
146 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
149 case INDEX_op_qemu_st_i64:
150 return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
151 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
152 : C_O0_I4(r, r, r, r));
155 g_assert_not_reached();
159 static const int tcg_target_reg_alloc_order[] = {
178 #if MAX_OPC_PARAM_IARGS != 6
179 # error Fix needed, number of supported input arguments changed!
182 /* No call arguments via registers. All will be stored on the "stack". */
183 static const int tcg_target_call_iarg_regs[] = { };
185 static const int tcg_target_call_oarg_regs[] = {
187 #if TCG_TARGET_REG_BITS == 32
192 #ifdef CONFIG_DEBUG_TCG
193 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
213 static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
214 intptr_t value, intptr_t addend)
216 intptr_t diff = value - (intptr_t)(code_ptr + 1);
218 tcg_debug_assert(addend == 0);
219 tcg_debug_assert(type == 20);
221 if (diff == sextract32(diff, 0, type)) {
222 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
228 static void stack_bounds_check(TCGReg base, target_long offset)
230 if (base == TCG_REG_CALL_STACK) {
231 tcg_debug_assert(offset >= 0);
232 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
233 TCG_STATIC_FRAME_SIZE));
237 static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
239 tcg_insn_unit insn = 0;
241 tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
242 insn = deposit32(insn, 0, 8, op);
246 static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
248 tcg_insn_unit insn = 0;
251 /* Special case for exit_tb: map null -> 0. */
255 diff = p0 - (void *)(s->code_ptr + 1);
256 tcg_debug_assert(diff != 0);
257 if (diff != sextract32(diff, 0, 20)) {
258 tcg_raise_tb_overflow(s);
261 insn = deposit32(insn, 0, 8, op);
262 insn = deposit32(insn, 12, 20, diff);
266 static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
268 tcg_out32(s, (uint8_t)op);
271 static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
273 tcg_insn_unit insn = 0;
275 tcg_debug_assert(i1 == sextract32(i1, 0, 20));
276 insn = deposit32(insn, 0, 8, op);
277 insn = deposit32(insn, 8, 4, r0);
278 insn = deposit32(insn, 12, 20, i1);
282 static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
284 tcg_insn_unit insn = 0;
286 tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
287 insn = deposit32(insn, 0, 8, op);
288 insn = deposit32(insn, 8, 4, r0);
292 static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
294 tcg_insn_unit insn = 0;
296 insn = deposit32(insn, 0, 8, op);
297 insn = deposit32(insn, 8, 4, r0);
298 insn = deposit32(insn, 12, 4, r1);
302 static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
303 TCGReg r0, TCGReg r1, TCGArg m2)
305 tcg_insn_unit insn = 0;
307 tcg_debug_assert(m2 == extract32(m2, 0, 12));
308 insn = deposit32(insn, 0, 8, op);
309 insn = deposit32(insn, 8, 4, r0);
310 insn = deposit32(insn, 12, 4, r1);
311 insn = deposit32(insn, 20, 12, m2);
315 static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
316 TCGReg r0, TCGReg r1, TCGReg r2)
318 tcg_insn_unit insn = 0;
320 insn = deposit32(insn, 0, 8, op);
321 insn = deposit32(insn, 8, 4, r0);
322 insn = deposit32(insn, 12, 4, r1);
323 insn = deposit32(insn, 16, 4, r2);
327 static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
328 TCGReg r0, TCGReg r1, intptr_t i2)
330 tcg_insn_unit insn = 0;
332 tcg_debug_assert(i2 == sextract32(i2, 0, 16));
333 insn = deposit32(insn, 0, 8, op);
334 insn = deposit32(insn, 8, 4, r0);
335 insn = deposit32(insn, 12, 4, r1);
336 insn = deposit32(insn, 16, 16, i2);
340 static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
341 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
343 tcg_insn_unit insn = 0;
345 insn = deposit32(insn, 0, 8, op);
346 insn = deposit32(insn, 8, 4, r0);
347 insn = deposit32(insn, 12, 4, r1);
348 insn = deposit32(insn, 16, 4, r2);
349 insn = deposit32(insn, 20, 4, c3);
353 static void tcg_out_op_rrrm(TCGContext *s, TCGOpcode op,
354 TCGReg r0, TCGReg r1, TCGReg r2, TCGArg m3)
356 tcg_insn_unit insn = 0;
358 tcg_debug_assert(m3 == extract32(m3, 0, 12));
359 insn = deposit32(insn, 0, 8, op);
360 insn = deposit32(insn, 8, 4, r0);
361 insn = deposit32(insn, 12, 4, r1);
362 insn = deposit32(insn, 16, 4, r2);
363 insn = deposit32(insn, 20, 12, m3);
367 static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
368 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
370 tcg_insn_unit insn = 0;
372 tcg_debug_assert(b3 == extract32(b3, 0, 6));
373 tcg_debug_assert(b4 == extract32(b4, 0, 6));
374 insn = deposit32(insn, 0, 8, op);
375 insn = deposit32(insn, 8, 4, r0);
376 insn = deposit32(insn, 12, 4, r1);
377 insn = deposit32(insn, 16, 4, r2);
378 insn = deposit32(insn, 20, 6, b3);
379 insn = deposit32(insn, 26, 6, b4);
383 static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0,
384 TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4)
386 tcg_insn_unit insn = 0;
388 insn = deposit32(insn, 0, 8, op);
389 insn = deposit32(insn, 8, 4, r0);
390 insn = deposit32(insn, 12, 4, r1);
391 insn = deposit32(insn, 16, 4, r2);
392 insn = deposit32(insn, 20, 4, r3);
393 insn = deposit32(insn, 24, 4, r4);
397 #if TCG_TARGET_REG_BITS == 32
398 static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
399 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
401 tcg_insn_unit insn = 0;
403 insn = deposit32(insn, 0, 8, op);
404 insn = deposit32(insn, 8, 4, r0);
405 insn = deposit32(insn, 12, 4, r1);
406 insn = deposit32(insn, 16, 4, r2);
407 insn = deposit32(insn, 20, 4, r3);
411 static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
412 TCGReg r0, TCGReg r1, TCGReg r2,
413 TCGReg r3, TCGReg r4, TCGCond c5)
415 tcg_insn_unit insn = 0;
417 insn = deposit32(insn, 0, 8, op);
418 insn = deposit32(insn, 8, 4, r0);
419 insn = deposit32(insn, 12, 4, r1);
420 insn = deposit32(insn, 16, 4, r2);
421 insn = deposit32(insn, 20, 4, r3);
422 insn = deposit32(insn, 24, 4, r4);
423 insn = deposit32(insn, 28, 4, c5);
427 static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
428 TCGReg r0, TCGReg r1, TCGReg r2,
429 TCGReg r3, TCGReg r4, TCGReg r5)
431 tcg_insn_unit insn = 0;
433 insn = deposit32(insn, 0, 8, op);
434 insn = deposit32(insn, 8, 4, r0);
435 insn = deposit32(insn, 12, 4, r1);
436 insn = deposit32(insn, 16, 4, r2);
437 insn = deposit32(insn, 20, 4, r3);
438 insn = deposit32(insn, 24, 4, r4);
439 insn = deposit32(insn, 28, 4, r5);
444 static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
445 TCGReg base, intptr_t offset)
447 stack_bounds_check(base, offset);
448 if (offset != sextract32(offset, 0, 16)) {
449 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
450 tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32
451 ? INDEX_op_add_i32 : INDEX_op_add_i64),
452 TCG_REG_TMP, TCG_REG_TMP, base);
456 tcg_out_op_rrs(s, op, val, base, offset);
459 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
464 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
466 #if TCG_TARGET_REG_BITS == 64
468 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
472 g_assert_not_reached();
476 static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
480 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg);
482 #if TCG_TARGET_REG_BITS == 64
484 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg);
488 g_assert_not_reached();
493 static void tcg_out_movi(TCGContext *s, TCGType type,
494 TCGReg ret, tcg_target_long arg)
498 #if TCG_TARGET_REG_BITS == 64
505 g_assert_not_reached();
508 if (arg == sextract32(arg, 0, 20)) {
509 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
511 tcg_insn_unit insn = 0;
513 new_pool_label(s, arg, 20, s->code_ptr, 0);
514 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
515 insn = deposit32(insn, 8, 4, ret);
520 static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
523 tcg_insn_unit insn = 0;
526 if (cif->rtype == &ffi_type_void) {
528 } else if (cif->rtype->size == 4) {
531 tcg_debug_assert(cif->rtype->size == 8);
534 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
535 insn = deposit32(insn, 0, 8, INDEX_op_call);
536 insn = deposit32(insn, 8, 4, which);
540 #if TCG_TARGET_REG_BITS == 64
541 # define CASE_32_64(x) \
542 case glue(glue(INDEX_op_, x), _i64): \
543 case glue(glue(INDEX_op_, x), _i32):
544 # define CASE_64(x) \
545 case glue(glue(INDEX_op_, x), _i64):
547 # define CASE_32_64(x) \
548 case glue(glue(INDEX_op_, x), _i32):
552 static void tcg_out_op(TCGContext *s, TCGOpcode opc,
553 const TCGArg args[TCG_MAX_OP_ARGS],
554 const int const_args[TCG_MAX_OP_ARGS])
557 case INDEX_op_exit_tb:
558 tcg_out_op_p(s, opc, (void *)args[0]);
561 case INDEX_op_goto_tb:
562 tcg_debug_assert(s->tb_jmp_insn_offset == 0);
563 /* indirect jump method. */
564 tcg_out_op_p(s, opc, s->tb_jmp_target_addr + args[0]);
565 set_jmp_reset_offset(s, args[0]);
569 tcg_out_op_l(s, opc, arg_label(args[0]));
573 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
576 #if TCG_TARGET_REG_BITS == 32
577 case INDEX_op_setcond2_i32:
578 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
579 args[3], args[4], args[5]);
587 case INDEX_op_ld_i32:
593 case INDEX_op_st_i32:
596 tcg_out_ldst(s, opc, args[0], args[1], args[2]);
605 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */
606 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */
607 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */
608 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */
609 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */
613 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */
614 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */
615 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */
616 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */
617 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */
618 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */
619 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
622 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */
624 TCGArg pos = args[3], len = args[4];
625 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64;
627 tcg_debug_assert(pos < max);
628 tcg_debug_assert(pos + len <= max);
630 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len);
635 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
636 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
637 TCG_REG_TMP, args[0], args[1], args[2]);
638 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
641 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
642 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
643 CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */
644 CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */
645 CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
646 CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
647 CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
648 CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */
651 CASE_32_64(bswap16) /* Optional (TCG_TARGET_HAS_bswap16_*). */
652 CASE_32_64(bswap32) /* Optional (TCG_TARGET_HAS_bswap32_*). */
653 CASE_64(bswap64) /* Optional (TCG_TARGET_HAS_bswap64_i64). */
654 tcg_out_op_rr(s, opc, args[0], args[1]);
657 #if TCG_TARGET_REG_BITS == 32
658 case INDEX_op_add2_i32:
659 case INDEX_op_sub2_i32:
660 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
661 args[3], args[4], args[5]);
663 case INDEX_op_brcond2_i32:
664 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
665 args[0], args[1], args[2], args[3], args[4]);
666 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
668 case INDEX_op_mulu2_i32:
669 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
673 case INDEX_op_qemu_ld_i32:
674 case INDEX_op_qemu_st_i32:
675 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
676 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
678 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
682 case INDEX_op_qemu_ld_i64:
683 case INDEX_op_qemu_st_i64:
684 if (TCG_TARGET_REG_BITS == 64) {
685 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
686 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
687 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
689 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]);
690 tcg_out_op_rrrrr(s, opc, args[0], args[1],
691 args[2], args[3], TCG_REG_TMP);
696 tcg_out_op_v(s, opc);
699 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
700 case INDEX_op_mov_i64:
701 case INDEX_op_call: /* Always emitted via tcg_out_call. */
707 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
710 stack_bounds_check(base, offset);
713 tcg_out_op_rrs(s, INDEX_op_st_i32, val, base, offset);
715 #if TCG_TARGET_REG_BITS == 64
717 tcg_out_op_rrs(s, INDEX_op_st_i64, val, base, offset);
721 g_assert_not_reached();
725 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
726 TCGReg base, intptr_t ofs)
731 /* Test if a constant matches the constraint. */
732 static bool tcg_target_const_match(int64_t val, TCGType type, int ct)
734 return ct & TCG_CT_CONST;
737 static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
739 memset(p, 0, sizeof(*p) * count);
742 static void tcg_target_init(TCGContext *s)
744 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
745 const char *envval = getenv("DEBUG_TCG");
747 qemu_set_log(strtol(envval, NULL, 0));
751 /* The current code uses uint8_t for tcg operations. */
752 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
754 /* Registers available for 32 bit operations. */
755 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
756 /* Registers available for 64 bit operations. */
757 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
759 * The interpreter "registers" are in the local stack frame and
760 * cannot be clobbered by the called helper functions. However,
761 * the interpreter assumes a 64-bit return value and assigns to
762 * the return value registers.
764 tcg_target_call_clobber_regs =
765 MAKE_64BIT_MASK(TCG_REG_R0, 64 / TCG_TARGET_REG_BITS);
767 s->reserved_regs = 0;
768 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
769 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
771 /* The call arguments come first, followed by the temp storage. */
772 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
773 TCG_STATIC_FRAME_SIZE);
776 /* Generate global QEMU prologue and epilogue code. */
777 static inline void tcg_target_qemu_prologue(TCGContext *s)