2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
21 #include "qemu-common.h"
22 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
23 #include "exec/cpu_ldst.h"
24 #include "tcg/tcg-op.h"
25 #include "qemu/compiler.h"
30 * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
31 * Without assertions, the interpreter runs much faster.
33 #if defined(CONFIG_DEBUG_TCG)
34 # define tci_assert(cond) assert(cond)
36 # define tci_assert(cond) ((void)(cond))
39 __thread
uintptr_t tci_tb_ptr
;
41 static void tci_write_reg64(tcg_target_ulong
*regs
, uint32_t high_index
,
42 uint32_t low_index
, uint64_t value
)
44 regs
[low_index
] = (uint32_t)value
;
45 regs
[high_index
] = value
>> 32;
48 /* Create a 64 bit value from two 32 bit values. */
49 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
51 return ((uint64_t)high
<< 32) + low
;
55 * Load sets of arguments all at once. The naming convention is:
56 * tci_args_<arguments>
57 * where arguments is a sequence of
59 * b = immediate (bit position)
60 * c = condition (TCGCond)
61 * i = immediate (uint32_t)
62 * I = immediate (tcg_target_ulong)
63 * l = label or pointer
64 * m = immediate (TCGMemOpIdx)
65 * n = immediate (call return length)
67 * s = signed ldst offset
70 static void tci_args_l(uint32_t insn
, const void *tb_ptr
, void **l0
)
72 int diff
= sextract32(insn
, 12, 20);
73 *l0
= diff
? (void *)tb_ptr
+ diff
: NULL
;
76 static void tci_args_r(uint32_t insn
, TCGReg
*r0
)
78 *r0
= extract32(insn
, 8, 4);
81 static void tci_args_nl(uint32_t insn
, const void *tb_ptr
,
82 uint8_t *n0
, void **l1
)
84 *n0
= extract32(insn
, 8, 4);
85 *l1
= sextract32(insn
, 12, 20) + (void *)tb_ptr
;
88 static void tci_args_rl(uint32_t insn
, const void *tb_ptr
,
89 TCGReg
*r0
, void **l1
)
91 *r0
= extract32(insn
, 8, 4);
92 *l1
= sextract32(insn
, 12, 20) + (void *)tb_ptr
;
95 static void tci_args_rr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
)
97 *r0
= extract32(insn
, 8, 4);
98 *r1
= extract32(insn
, 12, 4);
101 static void tci_args_ri(uint32_t insn
, TCGReg
*r0
, tcg_target_ulong
*i1
)
103 *r0
= extract32(insn
, 8, 4);
104 *i1
= sextract32(insn
, 12, 20);
107 static void tci_args_rrm(uint32_t insn
, TCGReg
*r0
,
108 TCGReg
*r1
, TCGMemOpIdx
*m2
)
110 *r0
= extract32(insn
, 8, 4);
111 *r1
= extract32(insn
, 12, 4);
112 *m2
= extract32(insn
, 20, 12);
115 static void tci_args_rrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
)
117 *r0
= extract32(insn
, 8, 4);
118 *r1
= extract32(insn
, 12, 4);
119 *r2
= extract32(insn
, 16, 4);
122 static void tci_args_rrs(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
, int32_t *i2
)
124 *r0
= extract32(insn
, 8, 4);
125 *r1
= extract32(insn
, 12, 4);
126 *i2
= sextract32(insn
, 16, 16);
129 static void tci_args_rrbb(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
130 uint8_t *i2
, uint8_t *i3
)
132 *r0
= extract32(insn
, 8, 4);
133 *r1
= extract32(insn
, 12, 4);
134 *i2
= extract32(insn
, 16, 6);
135 *i3
= extract32(insn
, 22, 6);
138 static void tci_args_rrrc(uint32_t insn
,
139 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGCond
*c3
)
141 *r0
= extract32(insn
, 8, 4);
142 *r1
= extract32(insn
, 12, 4);
143 *r2
= extract32(insn
, 16, 4);
144 *c3
= extract32(insn
, 20, 4);
147 static void tci_args_rrrm(uint32_t insn
,
148 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGMemOpIdx
*m3
)
150 *r0
= extract32(insn
, 8, 4);
151 *r1
= extract32(insn
, 12, 4);
152 *r2
= extract32(insn
, 16, 4);
153 *m3
= extract32(insn
, 20, 12);
156 static void tci_args_rrrbb(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
157 TCGReg
*r2
, uint8_t *i3
, uint8_t *i4
)
159 *r0
= extract32(insn
, 8, 4);
160 *r1
= extract32(insn
, 12, 4);
161 *r2
= extract32(insn
, 16, 4);
162 *i3
= extract32(insn
, 20, 6);
163 *i4
= extract32(insn
, 26, 6);
166 static void tci_args_rrrrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
167 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
)
169 *r0
= extract32(insn
, 8, 4);
170 *r1
= extract32(insn
, 12, 4);
171 *r2
= extract32(insn
, 16, 4);
172 *r3
= extract32(insn
, 20, 4);
173 *r4
= extract32(insn
, 24, 4);
176 static void tci_args_rrrr(uint32_t insn
,
177 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGReg
*r3
)
179 *r0
= extract32(insn
, 8, 4);
180 *r1
= extract32(insn
, 12, 4);
181 *r2
= extract32(insn
, 16, 4);
182 *r3
= extract32(insn
, 20, 4);
185 static void tci_args_rrrrrc(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
186 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGCond
*c5
)
188 *r0
= extract32(insn
, 8, 4);
189 *r1
= extract32(insn
, 12, 4);
190 *r2
= extract32(insn
, 16, 4);
191 *r3
= extract32(insn
, 20, 4);
192 *r4
= extract32(insn
, 24, 4);
193 *c5
= extract32(insn
, 28, 4);
196 static void tci_args_rrrrrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
197 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGReg
*r5
)
199 *r0
= extract32(insn
, 8, 4);
200 *r1
= extract32(insn
, 12, 4);
201 *r2
= extract32(insn
, 16, 4);
202 *r3
= extract32(insn
, 20, 4);
203 *r4
= extract32(insn
, 24, 4);
204 *r5
= extract32(insn
, 28, 4);
207 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
244 g_assert_not_reached();
249 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
286 g_assert_not_reached();
291 static uint64_t tci_qemu_ld(CPUArchState
*env
, target_ulong taddr
,
292 TCGMemOpIdx oi
, const void *tb_ptr
)
294 MemOp mop
= get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
);
295 uintptr_t ra
= (uintptr_t)tb_ptr
;
297 #ifdef CONFIG_SOFTMMU
300 return helper_ret_ldub_mmu(env
, taddr
, oi
, ra
);
302 return helper_ret_ldsb_mmu(env
, taddr
, oi
, ra
);
304 return helper_le_lduw_mmu(env
, taddr
, oi
, ra
);
306 return helper_le_ldsw_mmu(env
, taddr
, oi
, ra
);
308 return helper_le_ldul_mmu(env
, taddr
, oi
, ra
);
310 return helper_le_ldsl_mmu(env
, taddr
, oi
, ra
);
312 return helper_le_ldq_mmu(env
, taddr
, oi
, ra
);
314 return helper_be_lduw_mmu(env
, taddr
, oi
, ra
);
316 return helper_be_ldsw_mmu(env
, taddr
, oi
, ra
);
318 return helper_be_ldul_mmu(env
, taddr
, oi
, ra
);
320 return helper_be_ldsl_mmu(env
, taddr
, oi
, ra
);
322 return helper_be_ldq_mmu(env
, taddr
, oi
, ra
);
324 g_assert_not_reached();
327 void *haddr
= g2h(env_cpu(env
), taddr
);
330 set_helper_retaddr(ra
);
339 ret
= lduw_le_p(haddr
);
342 ret
= ldsw_le_p(haddr
);
345 ret
= (uint32_t)ldl_le_p(haddr
);
348 ret
= (int32_t)ldl_le_p(haddr
);
351 ret
= ldq_le_p(haddr
);
354 ret
= lduw_be_p(haddr
);
357 ret
= ldsw_be_p(haddr
);
360 ret
= (uint32_t)ldl_be_p(haddr
);
363 ret
= (int32_t)ldl_be_p(haddr
);
366 ret
= ldq_be_p(haddr
);
369 g_assert_not_reached();
371 clear_helper_retaddr();
376 static void tci_qemu_st(CPUArchState
*env
, target_ulong taddr
, uint64_t val
,
377 TCGMemOpIdx oi
, const void *tb_ptr
)
379 MemOp mop
= get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
);
380 uintptr_t ra
= (uintptr_t)tb_ptr
;
382 #ifdef CONFIG_SOFTMMU
385 helper_ret_stb_mmu(env
, taddr
, val
, oi
, ra
);
388 helper_le_stw_mmu(env
, taddr
, val
, oi
, ra
);
391 helper_le_stl_mmu(env
, taddr
, val
, oi
, ra
);
394 helper_le_stq_mmu(env
, taddr
, val
, oi
, ra
);
397 helper_be_stw_mmu(env
, taddr
, val
, oi
, ra
);
400 helper_be_stl_mmu(env
, taddr
, val
, oi
, ra
);
403 helper_be_stq_mmu(env
, taddr
, val
, oi
, ra
);
406 g_assert_not_reached();
409 void *haddr
= g2h(env_cpu(env
), taddr
);
411 set_helper_retaddr(ra
);
417 stw_le_p(haddr
, val
);
420 stl_le_p(haddr
, val
);
423 stq_le_p(haddr
, val
);
426 stw_be_p(haddr
, val
);
429 stl_be_p(haddr
, val
);
432 stq_be_p(haddr
, val
);
435 g_assert_not_reached();
437 clear_helper_retaddr();
441 #if TCG_TARGET_REG_BITS == 64
442 # define CASE_32_64(x) \
443 case glue(glue(INDEX_op_, x), _i64): \
444 case glue(glue(INDEX_op_, x), _i32):
445 # define CASE_64(x) \
446 case glue(glue(INDEX_op_, x), _i64):
448 # define CASE_32_64(x) \
449 case glue(glue(INDEX_op_, x), _i32):
453 /* Interpret pseudo code in tb. */
455 * Disable CFI checks.
456 * One possible operation in the pseudo code is a call to binary code.
457 * Therefore, disable CFI checks in the interpreter function
459 uintptr_t QEMU_DISABLE_CFI
tcg_qemu_tb_exec(CPUArchState
*env
,
460 const void *v_tb_ptr
)
462 const uint32_t *tb_ptr
= v_tb_ptr
;
463 tcg_target_ulong regs
[TCG_TARGET_NB_REGS
];
464 uint64_t stack
[(TCG_STATIC_CALL_ARGS_SIZE
+ TCG_STATIC_FRAME_SIZE
)
466 void *call_slots
[TCG_STATIC_CALL_ARGS_SIZE
/ sizeof(uint64_t)];
468 regs
[TCG_AREG0
] = (tcg_target_ulong
)env
;
469 regs
[TCG_REG_CALL_STACK
] = (uintptr_t)stack
;
470 /* Other call_slots entries initialized at first use (see below). */
471 call_slots
[0] = NULL
;
477 TCGReg r0
, r1
, r2
, r3
, r4
, r5
;
490 opc
= extract32(insn
, 0, 8);
495 * Set up the ffi_avalue array once, delayed until now
496 * because many TB's do not make any calls. In tcg_gen_callN,
497 * we arranged for every real argument to be "left-aligned"
498 * in each 64-bit slot.
500 if (unlikely(call_slots
[0] == NULL
)) {
501 for (int i
= 0; i
< ARRAY_SIZE(call_slots
); ++i
) {
502 call_slots
[i
] = &stack
[i
];
506 tci_args_nl(insn
, tb_ptr
, &len
, &ptr
);
508 /* Helper functions may need to access the "return address" */
509 tci_tb_ptr
= (uintptr_t)tb_ptr
;
513 ffi_call(pptr
[1], pptr
[0], stack
, call_slots
);
516 /* Any result winds up "left-aligned" in the stack[0] slot. */
520 case 1: /* uint32_t */
522 * Note that libffi has an odd special case in that it will
523 * always widen an integral result to ffi_arg.
525 if (sizeof(ffi_arg
) == 4) {
526 regs
[TCG_REG_R0
] = *(uint32_t *)stack
;
530 case 2: /* uint64_t */
531 if (TCG_TARGET_REG_BITS
== 32) {
532 tci_write_reg64(regs
, TCG_REG_R1
, TCG_REG_R0
, stack
[0]);
534 regs
[TCG_REG_R0
] = stack
[0];
538 g_assert_not_reached();
543 tci_args_l(insn
, tb_ptr
, &ptr
);
546 case INDEX_op_setcond_i32
:
547 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &condition
);
548 regs
[r0
] = tci_compare32(regs
[r1
], regs
[r2
], condition
);
550 case INDEX_op_movcond_i32
:
551 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &condition
);
552 tmp32
= tci_compare32(regs
[r1
], regs
[r2
], condition
);
553 regs
[r0
] = regs
[tmp32
? r3
: r4
];
555 #if TCG_TARGET_REG_BITS == 32
556 case INDEX_op_setcond2_i32
:
557 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &condition
);
558 T1
= tci_uint64(regs
[r2
], regs
[r1
]);
559 T2
= tci_uint64(regs
[r4
], regs
[r3
]);
560 regs
[r0
] = tci_compare64(T1
, T2
, condition
);
562 #elif TCG_TARGET_REG_BITS == 64
563 case INDEX_op_setcond_i64
:
564 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &condition
);
565 regs
[r0
] = tci_compare64(regs
[r1
], regs
[r2
], condition
);
567 case INDEX_op_movcond_i64
:
568 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &condition
);
569 tmp32
= tci_compare64(regs
[r1
], regs
[r2
], condition
);
570 regs
[r0
] = regs
[tmp32
? r3
: r4
];
574 tci_args_rr(insn
, &r0
, &r1
);
577 case INDEX_op_tci_movi
:
578 tci_args_ri(insn
, &r0
, &t1
);
581 case INDEX_op_tci_movl
:
582 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
583 regs
[r0
] = *(tcg_target_ulong
*)ptr
;
586 /* Load/store operations (32 bit). */
589 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
590 ptr
= (void *)(regs
[r1
] + ofs
);
591 regs
[r0
] = *(uint8_t *)ptr
;
594 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
595 ptr
= (void *)(regs
[r1
] + ofs
);
596 regs
[r0
] = *(int8_t *)ptr
;
599 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
600 ptr
= (void *)(regs
[r1
] + ofs
);
601 regs
[r0
] = *(uint16_t *)ptr
;
604 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
605 ptr
= (void *)(regs
[r1
] + ofs
);
606 regs
[r0
] = *(int16_t *)ptr
;
608 case INDEX_op_ld_i32
:
610 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
611 ptr
= (void *)(regs
[r1
] + ofs
);
612 regs
[r0
] = *(uint32_t *)ptr
;
615 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
616 ptr
= (void *)(regs
[r1
] + ofs
);
617 *(uint8_t *)ptr
= regs
[r0
];
620 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
621 ptr
= (void *)(regs
[r1
] + ofs
);
622 *(uint16_t *)ptr
= regs
[r0
];
624 case INDEX_op_st_i32
:
626 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
627 ptr
= (void *)(regs
[r1
] + ofs
);
628 *(uint32_t *)ptr
= regs
[r0
];
631 /* Arithmetic operations (mixed 32/64 bit). */
634 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
635 regs
[r0
] = regs
[r1
] + regs
[r2
];
638 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
639 regs
[r0
] = regs
[r1
] - regs
[r2
];
642 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
643 regs
[r0
] = regs
[r1
] * regs
[r2
];
646 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
647 regs
[r0
] = regs
[r1
] & regs
[r2
];
650 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
651 regs
[r0
] = regs
[r1
] | regs
[r2
];
654 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
655 regs
[r0
] = regs
[r1
] ^ regs
[r2
];
657 #if TCG_TARGET_HAS_andc_i32 || TCG_TARGET_HAS_andc_i64
659 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
660 regs
[r0
] = regs
[r1
] & ~regs
[r2
];
663 #if TCG_TARGET_HAS_orc_i32 || TCG_TARGET_HAS_orc_i64
665 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
666 regs
[r0
] = regs
[r1
] | ~regs
[r2
];
669 #if TCG_TARGET_HAS_eqv_i32 || TCG_TARGET_HAS_eqv_i64
671 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
672 regs
[r0
] = ~(regs
[r1
] ^ regs
[r2
]);
675 #if TCG_TARGET_HAS_nand_i32 || TCG_TARGET_HAS_nand_i64
677 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
678 regs
[r0
] = ~(regs
[r1
] & regs
[r2
]);
681 #if TCG_TARGET_HAS_nor_i32 || TCG_TARGET_HAS_nor_i64
683 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
684 regs
[r0
] = ~(regs
[r1
] | regs
[r2
]);
688 /* Arithmetic operations (32 bit). */
690 case INDEX_op_div_i32
:
691 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
692 regs
[r0
] = (int32_t)regs
[r1
] / (int32_t)regs
[r2
];
694 case INDEX_op_divu_i32
:
695 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
696 regs
[r0
] = (uint32_t)regs
[r1
] / (uint32_t)regs
[r2
];
698 case INDEX_op_rem_i32
:
699 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
700 regs
[r0
] = (int32_t)regs
[r1
] % (int32_t)regs
[r2
];
702 case INDEX_op_remu_i32
:
703 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
704 regs
[r0
] = (uint32_t)regs
[r1
] % (uint32_t)regs
[r2
];
706 #if TCG_TARGET_HAS_clz_i32
707 case INDEX_op_clz_i32
:
708 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
710 regs
[r0
] = tmp32
? clz32(tmp32
) : regs
[r2
];
713 #if TCG_TARGET_HAS_ctz_i32
714 case INDEX_op_ctz_i32
:
715 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
717 regs
[r0
] = tmp32
? ctz32(tmp32
) : regs
[r2
];
720 #if TCG_TARGET_HAS_ctpop_i32
721 case INDEX_op_ctpop_i32
:
722 tci_args_rr(insn
, &r0
, &r1
);
723 regs
[r0
] = ctpop32(regs
[r1
]);
727 /* Shift/rotate operations (32 bit). */
729 case INDEX_op_shl_i32
:
730 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
731 regs
[r0
] = (uint32_t)regs
[r1
] << (regs
[r2
] & 31);
733 case INDEX_op_shr_i32
:
734 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
735 regs
[r0
] = (uint32_t)regs
[r1
] >> (regs
[r2
] & 31);
737 case INDEX_op_sar_i32
:
738 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
739 regs
[r0
] = (int32_t)regs
[r1
] >> (regs
[r2
] & 31);
741 #if TCG_TARGET_HAS_rot_i32
742 case INDEX_op_rotl_i32
:
743 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
744 regs
[r0
] = rol32(regs
[r1
], regs
[r2
] & 31);
746 case INDEX_op_rotr_i32
:
747 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
748 regs
[r0
] = ror32(regs
[r1
], regs
[r2
] & 31);
751 #if TCG_TARGET_HAS_deposit_i32
752 case INDEX_op_deposit_i32
:
753 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
754 regs
[r0
] = deposit32(regs
[r1
], pos
, len
, regs
[r2
]);
757 #if TCG_TARGET_HAS_extract_i32
758 case INDEX_op_extract_i32
:
759 tci_args_rrbb(insn
, &r0
, &r1
, &pos
, &len
);
760 regs
[r0
] = extract32(regs
[r1
], pos
, len
);
763 #if TCG_TARGET_HAS_sextract_i32
764 case INDEX_op_sextract_i32
:
765 tci_args_rrbb(insn
, &r0
, &r1
, &pos
, &len
);
766 regs
[r0
] = sextract32(regs
[r1
], pos
, len
);
769 case INDEX_op_brcond_i32
:
770 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
771 if ((uint32_t)regs
[r0
]) {
775 #if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_add2_i32
776 case INDEX_op_add2_i32
:
777 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
778 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
779 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
780 tci_write_reg64(regs
, r1
, r0
, T1
+ T2
);
783 #if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_sub2_i32
784 case INDEX_op_sub2_i32
:
785 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
786 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
787 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
788 tci_write_reg64(regs
, r1
, r0
, T1
- T2
);
791 #if TCG_TARGET_HAS_mulu2_i32
792 case INDEX_op_mulu2_i32
:
793 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
794 tmp64
= (uint64_t)(uint32_t)regs
[r2
] * (uint32_t)regs
[r3
];
795 tci_write_reg64(regs
, r1
, r0
, tmp64
);
798 #if TCG_TARGET_HAS_muls2_i32
799 case INDEX_op_muls2_i32
:
800 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
801 tmp64
= (int64_t)(int32_t)regs
[r2
] * (int32_t)regs
[r3
];
802 tci_write_reg64(regs
, r1
, r0
, tmp64
);
805 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
807 tci_args_rr(insn
, &r0
, &r1
);
808 regs
[r0
] = (int8_t)regs
[r1
];
811 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 || \
812 TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
814 tci_args_rr(insn
, &r0
, &r1
);
815 regs
[r0
] = (int16_t)regs
[r1
];
818 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
820 tci_args_rr(insn
, &r0
, &r1
);
821 regs
[r0
] = (uint8_t)regs
[r1
];
824 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
826 tci_args_rr(insn
, &r0
, &r1
);
827 regs
[r0
] = (uint16_t)regs
[r1
];
830 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
832 tci_args_rr(insn
, &r0
, &r1
);
833 regs
[r0
] = bswap16(regs
[r1
]);
836 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
838 tci_args_rr(insn
, &r0
, &r1
);
839 regs
[r0
] = bswap32(regs
[r1
]);
842 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
844 tci_args_rr(insn
, &r0
, &r1
);
845 regs
[r0
] = ~regs
[r1
];
848 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
850 tci_args_rr(insn
, &r0
, &r1
);
851 regs
[r0
] = -regs
[r1
];
854 #if TCG_TARGET_REG_BITS == 64
855 /* Load/store operations (64 bit). */
857 case INDEX_op_ld32s_i64
:
858 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
859 ptr
= (void *)(regs
[r1
] + ofs
);
860 regs
[r0
] = *(int32_t *)ptr
;
862 case INDEX_op_ld_i64
:
863 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
864 ptr
= (void *)(regs
[r1
] + ofs
);
865 regs
[r0
] = *(uint64_t *)ptr
;
867 case INDEX_op_st_i64
:
868 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
869 ptr
= (void *)(regs
[r1
] + ofs
);
870 *(uint64_t *)ptr
= regs
[r0
];
873 /* Arithmetic operations (64 bit). */
875 case INDEX_op_div_i64
:
876 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
877 regs
[r0
] = (int64_t)regs
[r1
] / (int64_t)regs
[r2
];
879 case INDEX_op_divu_i64
:
880 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
881 regs
[r0
] = (uint64_t)regs
[r1
] / (uint64_t)regs
[r2
];
883 case INDEX_op_rem_i64
:
884 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
885 regs
[r0
] = (int64_t)regs
[r1
] % (int64_t)regs
[r2
];
887 case INDEX_op_remu_i64
:
888 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
889 regs
[r0
] = (uint64_t)regs
[r1
] % (uint64_t)regs
[r2
];
891 #if TCG_TARGET_HAS_clz_i64
892 case INDEX_op_clz_i64
:
893 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
894 regs
[r0
] = regs
[r1
] ? clz64(regs
[r1
]) : regs
[r2
];
897 #if TCG_TARGET_HAS_ctz_i64
898 case INDEX_op_ctz_i64
:
899 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
900 regs
[r0
] = regs
[r1
] ? ctz64(regs
[r1
]) : regs
[r2
];
903 #if TCG_TARGET_HAS_ctpop_i64
904 case INDEX_op_ctpop_i64
:
905 tci_args_rr(insn
, &r0
, &r1
);
906 regs
[r0
] = ctpop64(regs
[r1
]);
909 #if TCG_TARGET_HAS_mulu2_i64
910 case INDEX_op_mulu2_i64
:
911 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
912 mulu64(®s
[r0
], ®s
[r1
], regs
[r2
], regs
[r3
]);
915 #if TCG_TARGET_HAS_muls2_i64
916 case INDEX_op_muls2_i64
:
917 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
918 muls64(®s
[r0
], ®s
[r1
], regs
[r2
], regs
[r3
]);
921 #if TCG_TARGET_HAS_add2_i64
922 case INDEX_op_add2_i64
:
923 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
924 T1
= regs
[r2
] + regs
[r4
];
925 T2
= regs
[r3
] + regs
[r5
] + (T1
< regs
[r2
]);
930 #if TCG_TARGET_HAS_add2_i64
931 case INDEX_op_sub2_i64
:
932 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
933 T1
= regs
[r2
] - regs
[r4
];
934 T2
= regs
[r3
] - regs
[r5
] - (regs
[r2
] < regs
[r4
]);
940 /* Shift/rotate operations (64 bit). */
942 case INDEX_op_shl_i64
:
943 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
944 regs
[r0
] = regs
[r1
] << (regs
[r2
] & 63);
946 case INDEX_op_shr_i64
:
947 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
948 regs
[r0
] = regs
[r1
] >> (regs
[r2
] & 63);
950 case INDEX_op_sar_i64
:
951 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
952 regs
[r0
] = (int64_t)regs
[r1
] >> (regs
[r2
] & 63);
954 #if TCG_TARGET_HAS_rot_i64
955 case INDEX_op_rotl_i64
:
956 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
957 regs
[r0
] = rol64(regs
[r1
], regs
[r2
] & 63);
959 case INDEX_op_rotr_i64
:
960 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
961 regs
[r0
] = ror64(regs
[r1
], regs
[r2
] & 63);
964 #if TCG_TARGET_HAS_deposit_i64
965 case INDEX_op_deposit_i64
:
966 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
967 regs
[r0
] = deposit64(regs
[r1
], pos
, len
, regs
[r2
]);
970 #if TCG_TARGET_HAS_extract_i64
971 case INDEX_op_extract_i64
:
972 tci_args_rrbb(insn
, &r0
, &r1
, &pos
, &len
);
973 regs
[r0
] = extract64(regs
[r1
], pos
, len
);
976 #if TCG_TARGET_HAS_sextract_i64
977 case INDEX_op_sextract_i64
:
978 tci_args_rrbb(insn
, &r0
, &r1
, &pos
, &len
);
979 regs
[r0
] = sextract64(regs
[r1
], pos
, len
);
982 case INDEX_op_brcond_i64
:
983 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
988 case INDEX_op_ext32s_i64
:
989 case INDEX_op_ext_i32_i64
:
990 tci_args_rr(insn
, &r0
, &r1
);
991 regs
[r0
] = (int32_t)regs
[r1
];
993 case INDEX_op_ext32u_i64
:
994 case INDEX_op_extu_i32_i64
:
995 tci_args_rr(insn
, &r0
, &r1
);
996 regs
[r0
] = (uint32_t)regs
[r1
];
998 #if TCG_TARGET_HAS_bswap64_i64
999 case INDEX_op_bswap64_i64
:
1000 tci_args_rr(insn
, &r0
, &r1
);
1001 regs
[r0
] = bswap64(regs
[r1
]);
1004 #endif /* TCG_TARGET_REG_BITS == 64 */
1006 /* QEMU specific operations. */
1008 case INDEX_op_exit_tb
:
1009 tci_args_l(insn
, tb_ptr
, &ptr
);
1010 return (uintptr_t)ptr
;
1012 case INDEX_op_goto_tb
:
1013 tci_args_l(insn
, tb_ptr
, &ptr
);
1014 tb_ptr
= *(void **)ptr
;
1017 case INDEX_op_goto_ptr
:
1018 tci_args_r(insn
, &r0
);
1019 ptr
= (void *)regs
[r0
];
1026 case INDEX_op_qemu_ld_i32
:
1027 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
1028 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
1031 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
1032 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
1034 tmp32
= tci_qemu_ld(env
, taddr
, oi
, tb_ptr
);
1038 case INDEX_op_qemu_ld_i64
:
1039 if (TCG_TARGET_REG_BITS
== 64) {
1040 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
1042 } else if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
1043 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
1046 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
1047 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
1050 tmp64
= tci_qemu_ld(env
, taddr
, oi
, tb_ptr
);
1051 if (TCG_TARGET_REG_BITS
== 32) {
1052 tci_write_reg64(regs
, r1
, r0
, tmp64
);
1058 case INDEX_op_qemu_st_i32
:
1059 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
1060 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
1063 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
1064 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
1067 tci_qemu_st(env
, taddr
, tmp32
, oi
, tb_ptr
);
1070 case INDEX_op_qemu_st_i64
:
1071 if (TCG_TARGET_REG_BITS
== 64) {
1072 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
1076 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
1077 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
1080 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
1081 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
1084 tmp64
= tci_uint64(regs
[r1
], regs
[r0
]);
1086 tci_qemu_st(env
, taddr
, tmp64
, oi
, tb_ptr
);
1090 /* Ensure ordering for all kinds */
1094 g_assert_not_reached();
1100 * Disassembler that matches the interpreter
1103 static const char *str_r(TCGReg r
)
1105 static const char regs
[TCG_TARGET_NB_REGS
][4] = {
1106 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
1107 "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
1110 QEMU_BUILD_BUG_ON(TCG_AREG0
!= TCG_REG_R14
);
1111 QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK
!= TCG_REG_R15
);
1113 assert((unsigned)r
< TCG_TARGET_NB_REGS
);
1117 static const char *str_c(TCGCond c
)
1119 static const char cond
[16][8] = {
1120 [TCG_COND_NEVER
] = "never",
1121 [TCG_COND_ALWAYS
] = "always",
1122 [TCG_COND_EQ
] = "eq",
1123 [TCG_COND_NE
] = "ne",
1124 [TCG_COND_LT
] = "lt",
1125 [TCG_COND_GE
] = "ge",
1126 [TCG_COND_LE
] = "le",
1127 [TCG_COND_GT
] = "gt",
1128 [TCG_COND_LTU
] = "ltu",
1129 [TCG_COND_GEU
] = "geu",
1130 [TCG_COND_LEU
] = "leu",
1131 [TCG_COND_GTU
] = "gtu",
1134 assert((unsigned)c
< ARRAY_SIZE(cond
));
1135 assert(cond
[c
][0] != 0);
1139 /* Disassemble TCI bytecode. */
1140 int print_insn_tci(bfd_vma addr
, disassemble_info
*info
)
1142 const uint32_t *tb_ptr
= (const void *)(uintptr_t)addr
;
1143 const TCGOpDef
*def
;
1144 const char *op_name
;
1147 TCGReg r0
, r1
, r2
, r3
, r4
, r5
;
1148 tcg_target_ulong i1
;
1155 /* TCI is always the host, so we don't need to load indirect. */
1158 info
->fprintf_func(info
->stream
, "%08x ", insn
);
1160 op
= extract32(insn
, 0, 8);
1161 def
= &tcg_op_defs
[op
];
1162 op_name
= def
->name
;
1166 case INDEX_op_exit_tb
:
1167 case INDEX_op_goto_tb
:
1168 tci_args_l(insn
, tb_ptr
, &ptr
);
1169 info
->fprintf_func(info
->stream
, "%-12s %p", op_name
, ptr
);
1172 case INDEX_op_goto_ptr
:
1173 tci_args_r(insn
, &r0
);
1174 info
->fprintf_func(info
->stream
, "%-12s %s", op_name
, str_r(r0
));
1178 tci_args_nl(insn
, tb_ptr
, &len
, &ptr
);
1179 info
->fprintf_func(info
->stream
, "%-12s %d, %p", op_name
, len
, ptr
);
1182 case INDEX_op_brcond_i32
:
1183 case INDEX_op_brcond_i64
:
1184 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
1185 info
->fprintf_func(info
->stream
, "%-12s %s, 0, ne, %p",
1186 op_name
, str_r(r0
), ptr
);
1189 case INDEX_op_setcond_i32
:
1190 case INDEX_op_setcond_i64
:
1191 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &c
);
1192 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s",
1193 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), str_c(c
));
1196 case INDEX_op_tci_movi
:
1197 tci_args_ri(insn
, &r0
, &i1
);
1198 info
->fprintf_func(info
->stream
, "%-12s %s, 0x%" TCG_PRIlx
,
1199 op_name
, str_r(r0
), i1
);
1202 case INDEX_op_tci_movl
:
1203 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
1204 info
->fprintf_func(info
->stream
, "%-12s %s, %p",
1205 op_name
, str_r(r0
), ptr
);
1208 case INDEX_op_ld8u_i32
:
1209 case INDEX_op_ld8u_i64
:
1210 case INDEX_op_ld8s_i32
:
1211 case INDEX_op_ld8s_i64
:
1212 case INDEX_op_ld16u_i32
:
1213 case INDEX_op_ld16u_i64
:
1214 case INDEX_op_ld16s_i32
:
1215 case INDEX_op_ld16s_i64
:
1216 case INDEX_op_ld32u_i64
:
1217 case INDEX_op_ld32s_i64
:
1218 case INDEX_op_ld_i32
:
1219 case INDEX_op_ld_i64
:
1220 case INDEX_op_st8_i32
:
1221 case INDEX_op_st8_i64
:
1222 case INDEX_op_st16_i32
:
1223 case INDEX_op_st16_i64
:
1224 case INDEX_op_st32_i64
:
1225 case INDEX_op_st_i32
:
1226 case INDEX_op_st_i64
:
1227 tci_args_rrs(insn
, &r0
, &r1
, &s2
);
1228 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %d",
1229 op_name
, str_r(r0
), str_r(r1
), s2
);
1232 case INDEX_op_mov_i32
:
1233 case INDEX_op_mov_i64
:
1234 case INDEX_op_ext8s_i32
:
1235 case INDEX_op_ext8s_i64
:
1236 case INDEX_op_ext8u_i32
:
1237 case INDEX_op_ext8u_i64
:
1238 case INDEX_op_ext16s_i32
:
1239 case INDEX_op_ext16s_i64
:
1240 case INDEX_op_ext16u_i32
:
1241 case INDEX_op_ext32s_i64
:
1242 case INDEX_op_ext32u_i64
:
1243 case INDEX_op_ext_i32_i64
:
1244 case INDEX_op_extu_i32_i64
:
1245 case INDEX_op_bswap16_i32
:
1246 case INDEX_op_bswap16_i64
:
1247 case INDEX_op_bswap32_i32
:
1248 case INDEX_op_bswap32_i64
:
1249 case INDEX_op_bswap64_i64
:
1250 case INDEX_op_not_i32
:
1251 case INDEX_op_not_i64
:
1252 case INDEX_op_neg_i32
:
1253 case INDEX_op_neg_i64
:
1254 case INDEX_op_ctpop_i32
:
1255 case INDEX_op_ctpop_i64
:
1256 tci_args_rr(insn
, &r0
, &r1
);
1257 info
->fprintf_func(info
->stream
, "%-12s %s, %s",
1258 op_name
, str_r(r0
), str_r(r1
));
1261 case INDEX_op_add_i32
:
1262 case INDEX_op_add_i64
:
1263 case INDEX_op_sub_i32
:
1264 case INDEX_op_sub_i64
:
1265 case INDEX_op_mul_i32
:
1266 case INDEX_op_mul_i64
:
1267 case INDEX_op_and_i32
:
1268 case INDEX_op_and_i64
:
1269 case INDEX_op_or_i32
:
1270 case INDEX_op_or_i64
:
1271 case INDEX_op_xor_i32
:
1272 case INDEX_op_xor_i64
:
1273 case INDEX_op_andc_i32
:
1274 case INDEX_op_andc_i64
:
1275 case INDEX_op_orc_i32
:
1276 case INDEX_op_orc_i64
:
1277 case INDEX_op_eqv_i32
:
1278 case INDEX_op_eqv_i64
:
1279 case INDEX_op_nand_i32
:
1280 case INDEX_op_nand_i64
:
1281 case INDEX_op_nor_i32
:
1282 case INDEX_op_nor_i64
:
1283 case INDEX_op_div_i32
:
1284 case INDEX_op_div_i64
:
1285 case INDEX_op_rem_i32
:
1286 case INDEX_op_rem_i64
:
1287 case INDEX_op_divu_i32
:
1288 case INDEX_op_divu_i64
:
1289 case INDEX_op_remu_i32
:
1290 case INDEX_op_remu_i64
:
1291 case INDEX_op_shl_i32
:
1292 case INDEX_op_shl_i64
:
1293 case INDEX_op_shr_i32
:
1294 case INDEX_op_shr_i64
:
1295 case INDEX_op_sar_i32
:
1296 case INDEX_op_sar_i64
:
1297 case INDEX_op_rotl_i32
:
1298 case INDEX_op_rotl_i64
:
1299 case INDEX_op_rotr_i32
:
1300 case INDEX_op_rotr_i64
:
1301 case INDEX_op_clz_i32
:
1302 case INDEX_op_clz_i64
:
1303 case INDEX_op_ctz_i32
:
1304 case INDEX_op_ctz_i64
:
1305 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
1306 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s",
1307 op_name
, str_r(r0
), str_r(r1
), str_r(r2
));
1310 case INDEX_op_deposit_i32
:
1311 case INDEX_op_deposit_i64
:
1312 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
1313 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %d, %d",
1314 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), pos
, len
);
1317 case INDEX_op_extract_i32
:
1318 case INDEX_op_extract_i64
:
1319 case INDEX_op_sextract_i32
:
1320 case INDEX_op_sextract_i64
:
1321 tci_args_rrbb(insn
, &r0
, &r1
, &pos
, &len
);
1322 info
->fprintf_func(info
->stream
, "%-12s %s,%s,%d,%d",
1323 op_name
, str_r(r0
), str_r(r1
), pos
, len
);
1326 case INDEX_op_movcond_i32
:
1327 case INDEX_op_movcond_i64
:
1328 case INDEX_op_setcond2_i32
:
1329 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &c
);
1330 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s, %s",
1331 op_name
, str_r(r0
), str_r(r1
), str_r(r2
),
1332 str_r(r3
), str_r(r4
), str_c(c
));
1335 case INDEX_op_mulu2_i32
:
1336 case INDEX_op_mulu2_i64
:
1337 case INDEX_op_muls2_i32
:
1338 case INDEX_op_muls2_i64
:
1339 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
1340 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s",
1341 op_name
, str_r(r0
), str_r(r1
),
1342 str_r(r2
), str_r(r3
));
1345 case INDEX_op_add2_i32
:
1346 case INDEX_op_add2_i64
:
1347 case INDEX_op_sub2_i32
:
1348 case INDEX_op_sub2_i64
:
1349 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
1350 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s, %s",
1351 op_name
, str_r(r0
), str_r(r1
), str_r(r2
),
1352 str_r(r3
), str_r(r4
), str_r(r5
));
1355 case INDEX_op_qemu_ld_i64
:
1356 case INDEX_op_qemu_st_i64
:
1357 len
= DIV_ROUND_UP(64, TCG_TARGET_REG_BITS
);
1359 case INDEX_op_qemu_ld_i32
:
1360 case INDEX_op_qemu_st_i32
:
1363 len
+= DIV_ROUND_UP(TARGET_LONG_BITS
, TCG_TARGET_REG_BITS
);
1366 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
1367 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %x",
1368 op_name
, str_r(r0
), str_r(r1
), oi
);
1371 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
1372 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %x",
1373 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), oi
);
1376 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
1377 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s",
1378 op_name
, str_r(r0
), str_r(r1
),
1379 str_r(r2
), str_r(r3
), str_r(r4
));
1382 g_assert_not_reached();
1387 /* tcg_out_nop_fill uses zeros */
1389 info
->fprintf_func(info
->stream
, "align");
1395 info
->fprintf_func(info
->stream
, "illegal opcode %d", op
);
1399 return sizeof(insn
);