2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "../tcg-pool.c.inc"
27 static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
30 case INDEX_op_goto_ptr:
33 case INDEX_op_ld8u_i32:
34 case INDEX_op_ld8s_i32:
35 case INDEX_op_ld16u_i32:
36 case INDEX_op_ld16s_i32:
38 case INDEX_op_ld8u_i64:
39 case INDEX_op_ld8s_i64:
40 case INDEX_op_ld16u_i64:
41 case INDEX_op_ld16s_i64:
42 case INDEX_op_ld32u_i64:
43 case INDEX_op_ld32s_i64:
45 case INDEX_op_not_i32:
46 case INDEX_op_not_i64:
47 case INDEX_op_neg_i32:
48 case INDEX_op_neg_i64:
49 case INDEX_op_ext8s_i32:
50 case INDEX_op_ext8s_i64:
51 case INDEX_op_ext16s_i32:
52 case INDEX_op_ext16s_i64:
53 case INDEX_op_ext8u_i32:
54 case INDEX_op_ext8u_i64:
55 case INDEX_op_ext16u_i32:
56 case INDEX_op_ext16u_i64:
57 case INDEX_op_ext32s_i64:
58 case INDEX_op_ext32u_i64:
59 case INDEX_op_ext_i32_i64:
60 case INDEX_op_extu_i32_i64:
61 case INDEX_op_bswap16_i32:
62 case INDEX_op_bswap16_i64:
63 case INDEX_op_bswap32_i32:
64 case INDEX_op_bswap32_i64:
65 case INDEX_op_bswap64_i64:
66 case INDEX_op_extract_i32:
67 case INDEX_op_extract_i64:
68 case INDEX_op_sextract_i32:
69 case INDEX_op_sextract_i64:
70 case INDEX_op_ctpop_i32:
71 case INDEX_op_ctpop_i64:
74 case INDEX_op_st8_i32:
75 case INDEX_op_st16_i32:
77 case INDEX_op_st8_i64:
78 case INDEX_op_st16_i64:
79 case INDEX_op_st32_i64:
83 case INDEX_op_div_i32:
84 case INDEX_op_div_i64:
85 case INDEX_op_divu_i32:
86 case INDEX_op_divu_i64:
87 case INDEX_op_rem_i32:
88 case INDEX_op_rem_i64:
89 case INDEX_op_remu_i32:
90 case INDEX_op_remu_i64:
91 case INDEX_op_add_i32:
92 case INDEX_op_add_i64:
93 case INDEX_op_sub_i32:
94 case INDEX_op_sub_i64:
95 case INDEX_op_mul_i32:
96 case INDEX_op_mul_i64:
97 case INDEX_op_and_i32:
98 case INDEX_op_and_i64:
99 case INDEX_op_andc_i32:
100 case INDEX_op_andc_i64:
101 case INDEX_op_eqv_i32:
102 case INDEX_op_eqv_i64:
103 case INDEX_op_nand_i32:
104 case INDEX_op_nand_i64:
105 case INDEX_op_nor_i32:
106 case INDEX_op_nor_i64:
107 case INDEX_op_or_i32:
108 case INDEX_op_or_i64:
109 case INDEX_op_orc_i32:
110 case INDEX_op_orc_i64:
111 case INDEX_op_xor_i32:
112 case INDEX_op_xor_i64:
113 case INDEX_op_shl_i32:
114 case INDEX_op_shl_i64:
115 case INDEX_op_shr_i32:
116 case INDEX_op_shr_i64:
117 case INDEX_op_sar_i32:
118 case INDEX_op_sar_i64:
119 case INDEX_op_rotl_i32:
120 case INDEX_op_rotl_i64:
121 case INDEX_op_rotr_i32:
122 case INDEX_op_rotr_i64:
123 case INDEX_op_setcond_i32:
124 case INDEX_op_setcond_i64:
125 case INDEX_op_deposit_i32:
126 case INDEX_op_deposit_i64:
127 case INDEX_op_clz_i32:
128 case INDEX_op_clz_i64:
129 case INDEX_op_ctz_i32:
130 case INDEX_op_ctz_i64:
131 return C_O1_I2(r, r, r);
133 case INDEX_op_brcond_i32:
134 case INDEX_op_brcond_i64:
135 return C_O0_I2(r, r);
137 case INDEX_op_add2_i32:
138 case INDEX_op_add2_i64:
139 case INDEX_op_sub2_i32:
140 case INDEX_op_sub2_i64:
141 return C_O2_I4(r, r, r, r, r, r);
143 #if TCG_TARGET_REG_BITS == 32
144 case INDEX_op_brcond2_i32:
145 return C_O0_I4(r, r, r, r);
148 case INDEX_op_mulu2_i32:
149 case INDEX_op_mulu2_i64:
150 case INDEX_op_muls2_i32:
151 case INDEX_op_muls2_i64:
152 return C_O2_I2(r, r, r, r);
154 case INDEX_op_movcond_i32:
155 case INDEX_op_movcond_i64:
156 case INDEX_op_setcond2_i32:
157 return C_O1_I4(r, r, r, r, r);
159 case INDEX_op_qemu_ld_i32:
160 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
163 case INDEX_op_qemu_ld_i64:
164 return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
165 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
166 : C_O2_I2(r, r, r, r));
167 case INDEX_op_qemu_st_i32:
168 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
171 case INDEX_op_qemu_st_i64:
172 return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
173 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
174 : C_O0_I4(r, r, r, r));
177 g_assert_not_reached();
181 static const int tcg_target_reg_alloc_order[] = {
200 #if MAX_OPC_PARAM_IARGS != 7
201 # error Fix needed, number of supported input arguments changed!
204 /* No call arguments via registers. All will be stored on the "stack". */
205 static const int tcg_target_call_iarg_regs[] = { };
207 static const int tcg_target_call_oarg_regs[] = {
209 #if TCG_TARGET_REG_BITS == 32
214 #ifdef CONFIG_DEBUG_TCG
215 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
235 static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
236 intptr_t value, intptr_t addend)
238 intptr_t diff = value - (intptr_t)(code_ptr + 1);
240 tcg_debug_assert(addend == 0);
241 tcg_debug_assert(type == 20);
243 if (diff == sextract32(diff, 0, type)) {
244 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
250 static void stack_bounds_check(TCGReg base, target_long offset)
252 if (base == TCG_REG_CALL_STACK) {
253 tcg_debug_assert(offset >= 0);
254 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
255 TCG_STATIC_FRAME_SIZE));
259 static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
261 tcg_insn_unit insn = 0;
263 tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
264 insn = deposit32(insn, 0, 8, op);
268 static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
270 tcg_insn_unit insn = 0;
273 /* Special case for exit_tb: map null -> 0. */
277 diff = p0 - (void *)(s->code_ptr + 1);
278 tcg_debug_assert(diff != 0);
279 if (diff != sextract32(diff, 0, 20)) {
280 tcg_raise_tb_overflow(s);
283 insn = deposit32(insn, 0, 8, op);
284 insn = deposit32(insn, 12, 20, diff);
288 static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
290 tcg_insn_unit insn = 0;
292 insn = deposit32(insn, 0, 8, op);
293 insn = deposit32(insn, 8, 4, r0);
297 static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
299 tcg_out32(s, (uint8_t)op);
302 static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
304 tcg_insn_unit insn = 0;
306 tcg_debug_assert(i1 == sextract32(i1, 0, 20));
307 insn = deposit32(insn, 0, 8, op);
308 insn = deposit32(insn, 8, 4, r0);
309 insn = deposit32(insn, 12, 20, i1);
313 static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
315 tcg_insn_unit insn = 0;
317 tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
318 insn = deposit32(insn, 0, 8, op);
319 insn = deposit32(insn, 8, 4, r0);
323 static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
325 tcg_insn_unit insn = 0;
327 insn = deposit32(insn, 0, 8, op);
328 insn = deposit32(insn, 8, 4, r0);
329 insn = deposit32(insn, 12, 4, r1);
333 static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
334 TCGReg r0, TCGReg r1, TCGArg m2)
336 tcg_insn_unit insn = 0;
338 tcg_debug_assert(m2 == extract32(m2, 0, 12));
339 insn = deposit32(insn, 0, 8, op);
340 insn = deposit32(insn, 8, 4, r0);
341 insn = deposit32(insn, 12, 4, r1);
342 insn = deposit32(insn, 20, 12, m2);
346 static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
347 TCGReg r0, TCGReg r1, TCGReg r2)
349 tcg_insn_unit insn = 0;
351 insn = deposit32(insn, 0, 8, op);
352 insn = deposit32(insn, 8, 4, r0);
353 insn = deposit32(insn, 12, 4, r1);
354 insn = deposit32(insn, 16, 4, r2);
358 static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
359 TCGReg r0, TCGReg r1, intptr_t i2)
361 tcg_insn_unit insn = 0;
363 tcg_debug_assert(i2 == sextract32(i2, 0, 16));
364 insn = deposit32(insn, 0, 8, op);
365 insn = deposit32(insn, 8, 4, r0);
366 insn = deposit32(insn, 12, 4, r1);
367 insn = deposit32(insn, 16, 16, i2);
371 static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
372 TCGReg r1, uint8_t b2, uint8_t b3)
374 tcg_insn_unit insn = 0;
376 tcg_debug_assert(b2 == extract32(b2, 0, 6));
377 tcg_debug_assert(b3 == extract32(b3, 0, 6));
378 insn = deposit32(insn, 0, 8, op);
379 insn = deposit32(insn, 8, 4, r0);
380 insn = deposit32(insn, 12, 4, r1);
381 insn = deposit32(insn, 16, 6, b2);
382 insn = deposit32(insn, 22, 6, b3);
386 static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
387 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
389 tcg_insn_unit insn = 0;
391 insn = deposit32(insn, 0, 8, op);
392 insn = deposit32(insn, 8, 4, r0);
393 insn = deposit32(insn, 12, 4, r1);
394 insn = deposit32(insn, 16, 4, r2);
395 insn = deposit32(insn, 20, 4, c3);
399 static void tcg_out_op_rrrm(TCGContext *s, TCGOpcode op,
400 TCGReg r0, TCGReg r1, TCGReg r2, TCGArg m3)
402 tcg_insn_unit insn = 0;
404 tcg_debug_assert(m3 == extract32(m3, 0, 12));
405 insn = deposit32(insn, 0, 8, op);
406 insn = deposit32(insn, 8, 4, r0);
407 insn = deposit32(insn, 12, 4, r1);
408 insn = deposit32(insn, 16, 4, r2);
409 insn = deposit32(insn, 20, 12, m3);
413 static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
414 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
416 tcg_insn_unit insn = 0;
418 tcg_debug_assert(b3 == extract32(b3, 0, 6));
419 tcg_debug_assert(b4 == extract32(b4, 0, 6));
420 insn = deposit32(insn, 0, 8, op);
421 insn = deposit32(insn, 8, 4, r0);
422 insn = deposit32(insn, 12, 4, r1);
423 insn = deposit32(insn, 16, 4, r2);
424 insn = deposit32(insn, 20, 6, b3);
425 insn = deposit32(insn, 26, 6, b4);
429 static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0,
430 TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4)
432 tcg_insn_unit insn = 0;
434 insn = deposit32(insn, 0, 8, op);
435 insn = deposit32(insn, 8, 4, r0);
436 insn = deposit32(insn, 12, 4, r1);
437 insn = deposit32(insn, 16, 4, r2);
438 insn = deposit32(insn, 20, 4, r3);
439 insn = deposit32(insn, 24, 4, r4);
443 static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
444 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
446 tcg_insn_unit insn = 0;
448 insn = deposit32(insn, 0, 8, op);
449 insn = deposit32(insn, 8, 4, r0);
450 insn = deposit32(insn, 12, 4, r1);
451 insn = deposit32(insn, 16, 4, r2);
452 insn = deposit32(insn, 20, 4, r3);
456 static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
457 TCGReg r0, TCGReg r1, TCGReg r2,
458 TCGReg r3, TCGReg r4, TCGCond c5)
460 tcg_insn_unit insn = 0;
462 insn = deposit32(insn, 0, 8, op);
463 insn = deposit32(insn, 8, 4, r0);
464 insn = deposit32(insn, 12, 4, r1);
465 insn = deposit32(insn, 16, 4, r2);
466 insn = deposit32(insn, 20, 4, r3);
467 insn = deposit32(insn, 24, 4, r4);
468 insn = deposit32(insn, 28, 4, c5);
472 static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
473 TCGReg r0, TCGReg r1, TCGReg r2,
474 TCGReg r3, TCGReg r4, TCGReg r5)
476 tcg_insn_unit insn = 0;
478 insn = deposit32(insn, 0, 8, op);
479 insn = deposit32(insn, 8, 4, r0);
480 insn = deposit32(insn, 12, 4, r1);
481 insn = deposit32(insn, 16, 4, r2);
482 insn = deposit32(insn, 20, 4, r3);
483 insn = deposit32(insn, 24, 4, r4);
484 insn = deposit32(insn, 28, 4, r5);
488 static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
489 TCGReg base, intptr_t offset)
491 stack_bounds_check(base, offset);
492 if (offset != sextract32(offset, 0, 16)) {
493 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
494 tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32
495 ? INDEX_op_add_i32 : INDEX_op_add_i64),
496 TCG_REG_TMP, TCG_REG_TMP, base);
500 tcg_out_op_rrs(s, op, val, base, offset);
503 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
508 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
510 #if TCG_TARGET_REG_BITS == 64
512 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
516 g_assert_not_reached();
520 static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
524 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg);
526 #if TCG_TARGET_REG_BITS == 64
528 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg);
532 g_assert_not_reached();
537 static void tcg_out_movi(TCGContext *s, TCGType type,
538 TCGReg ret, tcg_target_long arg)
542 #if TCG_TARGET_REG_BITS == 64
549 g_assert_not_reached();
552 if (arg == sextract32(arg, 0, 20)) {
553 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
555 tcg_insn_unit insn = 0;
557 new_pool_label(s, arg, 20, s->code_ptr, 0);
558 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
559 insn = deposit32(insn, 8, 4, ret);
564 static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
567 tcg_insn_unit insn = 0;
570 if (cif->rtype == &ffi_type_void) {
572 } else if (cif->rtype->size == 4) {
575 tcg_debug_assert(cif->rtype->size == 8);
578 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
579 insn = deposit32(insn, 0, 8, INDEX_op_call);
580 insn = deposit32(insn, 8, 4, which);
584 #if TCG_TARGET_REG_BITS == 64
585 # define CASE_32_64(x) \
586 case glue(glue(INDEX_op_, x), _i64): \
587 case glue(glue(INDEX_op_, x), _i32):
588 # define CASE_64(x) \
589 case glue(glue(INDEX_op_, x), _i64):
591 # define CASE_32_64(x) \
592 case glue(glue(INDEX_op_, x), _i32):
596 static void tcg_out_op(TCGContext *s, TCGOpcode opc,
597 const TCGArg args[TCG_MAX_OP_ARGS],
598 const int const_args[TCG_MAX_OP_ARGS])
603 case INDEX_op_exit_tb:
604 tcg_out_op_p(s, opc, (void *)args[0]);
607 case INDEX_op_goto_tb:
608 tcg_debug_assert(s->tb_jmp_insn_offset == 0);
609 /* indirect jump method. */
610 tcg_out_op_p(s, opc, s->tb_jmp_target_addr + args[0]);
611 set_jmp_reset_offset(s, args[0]);
614 case INDEX_op_goto_ptr:
615 tcg_out_op_r(s, opc, args[0]);
619 tcg_out_op_l(s, opc, arg_label(args[0]));
623 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
627 case INDEX_op_setcond2_i32:
628 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
629 args[3], args[4], args[5]);
636 case INDEX_op_ld_i32:
642 case INDEX_op_st_i32:
645 tcg_out_ldst(s, opc, args[0], args[1], args[2]);
654 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */
655 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */
656 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */
657 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */
658 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */
662 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */
663 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */
664 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */
665 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */
666 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */
667 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */
668 CASE_32_64(clz) /* Optional (TCG_TARGET_HAS_clz_*). */
669 CASE_32_64(ctz) /* Optional (TCG_TARGET_HAS_ctz_*). */
670 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
673 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */
675 TCGArg pos = args[3], len = args[4];
676 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64;
678 tcg_debug_assert(pos < max);
679 tcg_debug_assert(pos + len <= max);
681 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len);
685 CASE_32_64(extract) /* Optional (TCG_TARGET_HAS_extract_*). */
686 CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
688 TCGArg pos = args[2], len = args[3];
689 TCGArg max = tcg_op_defs[opc].flags & TCG_OPF_64BIT ? 64 : 32;
691 tcg_debug_assert(pos < max);
692 tcg_debug_assert(pos + len <= max);
694 tcg_out_op_rrbb(s, opc, args[0], args[1], pos, len);
699 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
700 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
701 TCG_REG_TMP, args[0], args[1], args[2]);
702 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
705 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
706 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
707 CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */
708 CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */
709 CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
710 CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
711 CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
712 CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */
715 CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */
716 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
717 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
718 tcg_out_op_rr(s, opc, args[0], args[1]);
721 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
722 exts = INDEX_op_ext16s_i32;
724 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
725 exts = INDEX_op_ext16s_i64;
727 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
728 exts = INDEX_op_ext32s_i64;
730 /* The base tci bswaps zero-extend, and ignore high bits. */
731 tcg_out_op_rr(s, opc, args[0], args[1]);
732 if (args[2] & TCG_BSWAP_OS) {
733 tcg_out_op_rr(s, exts, args[0], args[0]);
739 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
740 args[3], args[4], args[5]);
743 #if TCG_TARGET_REG_BITS == 32
744 case INDEX_op_brcond2_i32:
745 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
746 args[0], args[1], args[2], args[3], args[4]);
747 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
753 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
756 case INDEX_op_qemu_ld_i32:
757 case INDEX_op_qemu_st_i32:
758 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
759 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
761 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
765 case INDEX_op_qemu_ld_i64:
766 case INDEX_op_qemu_st_i64:
767 if (TCG_TARGET_REG_BITS == 64) {
768 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
769 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
770 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
772 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]);
773 tcg_out_op_rrrrr(s, opc, args[0], args[1],
774 args[2], args[3], TCG_REG_TMP);
779 tcg_out_op_v(s, opc);
782 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
783 case INDEX_op_mov_i64:
784 case INDEX_op_call: /* Always emitted via tcg_out_call. */
790 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
795 tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
797 #if TCG_TARGET_REG_BITS == 64
799 tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
803 g_assert_not_reached();
807 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
808 TCGReg base, intptr_t ofs)
813 /* Test if a constant matches the constraint. */
814 static bool tcg_target_const_match(int64_t val, TCGType type, int ct)
816 return ct & TCG_CT_CONST;
819 static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
821 memset(p, 0, sizeof(*p) * count);
824 static void tcg_target_init(TCGContext *s)
826 /* The current code uses uint8_t for tcg operations. */
827 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
829 /* Registers available for 32 bit operations. */
830 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
831 /* Registers available for 64 bit operations. */
832 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
834 * The interpreter "registers" are in the local stack frame and
835 * cannot be clobbered by the called helper functions. However,
836 * the interpreter assumes a 64-bit return value and assigns to
837 * the return value registers.
839 tcg_target_call_clobber_regs =
840 MAKE_64BIT_MASK(TCG_REG_R0, 64 / TCG_TARGET_REG_BITS);
842 s->reserved_regs = 0;
843 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
844 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
846 /* The call arguments come first, followed by the temp storage. */
847 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
848 TCG_STATIC_FRAME_SIZE);
851 /* Generate global QEMU prologue and epilogue code. */
852 static inline void tcg_target_qemu_prologue(TCGContext *s)