2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
27 #include "qemu-common.h"
28 #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
31 /* Marker for missing code. */
34 fprintf(stderr, "TODO %s:%u: %s()\n", \
35 __FILE__, __LINE__, __func__); \
39 #if MAX_OPC_PARAM_IARGS != 5
40 # error Fix needed, number of supported input arguments changed!
42 #if TCG_TARGET_REG_BITS == 32
43 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
44 tcg_target_ulong
, tcg_target_ulong
,
45 tcg_target_ulong
, tcg_target_ulong
,
46 tcg_target_ulong
, tcg_target_ulong
,
47 tcg_target_ulong
, tcg_target_ulong
);
49 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
50 tcg_target_ulong
, tcg_target_ulong
,
54 /* Targets which don't use GETPC also don't need tci_tb_ptr
55 which makes them a little faster. */
60 static tcg_target_ulong tci_reg
[TCG_TARGET_NB_REGS
];
62 static tcg_target_ulong
tci_read_reg(TCGReg index
)
64 assert(index
< ARRAY_SIZE(tci_reg
));
65 return tci_reg
[index
];
68 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
69 static int8_t tci_read_reg8s(TCGReg index
)
71 return (int8_t)tci_read_reg(index
);
75 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
76 static int16_t tci_read_reg16s(TCGReg index
)
78 return (int16_t)tci_read_reg(index
);
82 #if TCG_TARGET_REG_BITS == 64
83 static int32_t tci_read_reg32s(TCGReg index
)
85 return (int32_t)tci_read_reg(index
);
89 static uint8_t tci_read_reg8(TCGReg index
)
91 return (uint8_t)tci_read_reg(index
);
94 static uint16_t tci_read_reg16(TCGReg index
)
96 return (uint16_t)tci_read_reg(index
);
99 static uint32_t tci_read_reg32(TCGReg index
)
101 return (uint32_t)tci_read_reg(index
);
104 #if TCG_TARGET_REG_BITS == 64
105 static uint64_t tci_read_reg64(TCGReg index
)
107 return tci_read_reg(index
);
111 static void tci_write_reg(TCGReg index
, tcg_target_ulong value
)
113 assert(index
< ARRAY_SIZE(tci_reg
));
114 assert(index
!= TCG_AREG0
);
115 assert(index
!= TCG_REG_CALL_STACK
);
116 tci_reg
[index
] = value
;
119 #if TCG_TARGET_REG_BITS == 64
120 static void tci_write_reg32s(TCGReg index
, int32_t value
)
122 tci_write_reg(index
, value
);
126 static void tci_write_reg8(TCGReg index
, uint8_t value
)
128 tci_write_reg(index
, value
);
131 static void tci_write_reg32(TCGReg index
, uint32_t value
)
133 tci_write_reg(index
, value
);
136 #if TCG_TARGET_REG_BITS == 32
137 static void tci_write_reg64(uint32_t high_index
, uint32_t low_index
,
140 tci_write_reg(low_index
, value
);
141 tci_write_reg(high_index
, value
>> 32);
143 #elif TCG_TARGET_REG_BITS == 64
144 static void tci_write_reg64(TCGReg index
, uint64_t value
)
146 tci_write_reg(index
, value
);
150 #if TCG_TARGET_REG_BITS == 32
151 /* Create a 64 bit value from two 32 bit values. */
152 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
154 return ((uint64_t)high
<< 32) + low
;
158 /* Read constant (native size) from bytecode. */
159 static tcg_target_ulong
tci_read_i(uint8_t **tb_ptr
)
161 tcg_target_ulong value
= *(tcg_target_ulong
*)(*tb_ptr
);
162 *tb_ptr
+= sizeof(value
);
166 /* Read unsigned constant (32 bit) from bytecode. */
167 static uint32_t tci_read_i32(uint8_t **tb_ptr
)
169 uint32_t value
= *(uint32_t *)(*tb_ptr
);
170 *tb_ptr
+= sizeof(value
);
174 /* Read signed constant (32 bit) from bytecode. */
175 static int32_t tci_read_s32(uint8_t **tb_ptr
)
177 int32_t value
= *(int32_t *)(*tb_ptr
);
178 *tb_ptr
+= sizeof(value
);
182 #if TCG_TARGET_REG_BITS == 64
183 /* Read constant (64 bit) from bytecode. */
184 static uint64_t tci_read_i64(uint8_t **tb_ptr
)
186 uint64_t value
= *(uint64_t *)(*tb_ptr
);
187 *tb_ptr
+= sizeof(value
);
192 /* Read indexed register (native size) from bytecode. */
193 static tcg_target_ulong
tci_read_r(uint8_t **tb_ptr
)
195 tcg_target_ulong value
= tci_read_reg(**tb_ptr
);
200 /* Read indexed register (8 bit) from bytecode. */
201 static uint8_t tci_read_r8(uint8_t **tb_ptr
)
203 uint8_t value
= tci_read_reg8(**tb_ptr
);
208 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
209 /* Read indexed register (8 bit signed) from bytecode. */
210 static int8_t tci_read_r8s(uint8_t **tb_ptr
)
212 int8_t value
= tci_read_reg8s(**tb_ptr
);
218 /* Read indexed register (16 bit) from bytecode. */
219 static uint16_t tci_read_r16(uint8_t **tb_ptr
)
221 uint16_t value
= tci_read_reg16(**tb_ptr
);
226 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
227 /* Read indexed register (16 bit signed) from bytecode. */
228 static int16_t tci_read_r16s(uint8_t **tb_ptr
)
230 int16_t value
= tci_read_reg16s(**tb_ptr
);
236 /* Read indexed register (32 bit) from bytecode. */
237 static uint32_t tci_read_r32(uint8_t **tb_ptr
)
239 uint32_t value
= tci_read_reg32(**tb_ptr
);
244 #if TCG_TARGET_REG_BITS == 32
245 /* Read two indexed registers (2 * 32 bit) from bytecode. */
246 static uint64_t tci_read_r64(uint8_t **tb_ptr
)
248 uint32_t low
= tci_read_r32(tb_ptr
);
249 return tci_uint64(tci_read_r32(tb_ptr
), low
);
251 #elif TCG_TARGET_REG_BITS == 64
252 /* Read indexed register (32 bit signed) from bytecode. */
253 static int32_t tci_read_r32s(uint8_t **tb_ptr
)
255 int32_t value
= tci_read_reg32s(**tb_ptr
);
260 /* Read indexed register (64 bit) from bytecode. */
261 static uint64_t tci_read_r64(uint8_t **tb_ptr
)
263 uint64_t value
= tci_read_reg64(**tb_ptr
);
269 /* Read indexed register(s) with target address from bytecode. */
270 static target_ulong
tci_read_ulong(uint8_t **tb_ptr
)
272 target_ulong taddr
= tci_read_r(tb_ptr
);
273 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
274 taddr
+= (uint64_t)tci_read_r(tb_ptr
) << 32;
279 /* Read indexed register or constant (native size) from bytecode. */
280 static tcg_target_ulong
tci_read_ri(uint8_t **tb_ptr
)
282 tcg_target_ulong value
;
285 if (r
== TCG_CONST
) {
286 value
= tci_read_i(tb_ptr
);
288 value
= tci_read_reg(r
);
293 /* Read indexed register or constant (32 bit) from bytecode. */
294 static uint32_t tci_read_ri32(uint8_t **tb_ptr
)
299 if (r
== TCG_CONST
) {
300 value
= tci_read_i32(tb_ptr
);
302 value
= tci_read_reg32(r
);
307 #if TCG_TARGET_REG_BITS == 32
308 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
309 static uint64_t tci_read_ri64(uint8_t **tb_ptr
)
311 uint32_t low
= tci_read_ri32(tb_ptr
);
312 return tci_uint64(tci_read_ri32(tb_ptr
), low
);
314 #elif TCG_TARGET_REG_BITS == 64
315 /* Read indexed register or constant (64 bit) from bytecode. */
316 static uint64_t tci_read_ri64(uint8_t **tb_ptr
)
321 if (r
== TCG_CONST
) {
322 value
= tci_read_i64(tb_ptr
);
324 value
= tci_read_reg64(r
);
330 static tcg_target_ulong
tci_read_label(uint8_t **tb_ptr
)
332 tcg_target_ulong label
= tci_read_i(tb_ptr
);
337 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
379 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
421 #ifdef CONFIG_SOFTMMU
422 # define mmuidx tci_read_i(&tb_ptr)
423 # define qemu_ld_ub \
424 helper_ret_ldub_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
425 # define qemu_ld_leuw \
426 helper_le_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
427 # define qemu_ld_leul \
428 helper_le_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
429 # define qemu_ld_leq \
430 helper_le_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
431 # define qemu_ld_beuw \
432 helper_be_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
433 # define qemu_ld_beul \
434 helper_be_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
435 # define qemu_ld_beq \
436 helper_be_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
437 # define qemu_st_b(X) \
438 helper_ret_stb_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
439 # define qemu_st_lew(X) \
440 helper_le_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
441 # define qemu_st_lel(X) \
442 helper_le_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
443 # define qemu_st_leq(X) \
444 helper_le_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
445 # define qemu_st_bew(X) \
446 helper_be_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
447 # define qemu_st_bel(X) \
448 helper_be_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
449 # define qemu_st_beq(X) \
450 helper_be_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
452 # define qemu_ld_ub ldub_p(g2h(taddr))
453 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
454 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
455 # define qemu_ld_leq ldq_le_p(g2h(taddr))
456 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
457 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
458 # define qemu_ld_beq ldq_be_p(g2h(taddr))
459 # define qemu_st_b(X) stb_p(g2h(taddr), X)
460 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
461 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
462 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
463 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
464 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
465 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
468 /* Interpret pseudo code in tb. */
469 uintptr_t tcg_qemu_tb_exec(CPUArchState
*env
, uint8_t *tb_ptr
)
471 long tcg_temps
[CPU_TEMP_BUF_NLONGS
];
472 uintptr_t sp_value
= (uintptr_t)(tcg_temps
+ CPU_TEMP_BUF_NLONGS
);
473 uintptr_t next_tb
= 0;
475 tci_reg
[TCG_AREG0
] = (tcg_target_ulong
)env
;
476 tci_reg
[TCG_REG_CALL_STACK
] = sp_value
;
480 TCGOpcode opc
= tb_ptr
[0];
482 uint8_t op_size
= tb_ptr
[1];
483 uint8_t *old_code_ptr
= tb_ptr
;
488 tcg_target_ulong label
;
495 #if TCG_TARGET_REG_BITS == 32
501 tci_tb_ptr
= (uintptr_t)tb_ptr
;
504 /* Skip opcode and size entry. */
515 case INDEX_op_discard
:
518 case INDEX_op_set_label
:
522 t0
= tci_read_ri(&tb_ptr
);
523 #if TCG_TARGET_REG_BITS == 32
524 tmp64
= ((helper_function
)t0
)(tci_read_reg(TCG_REG_R0
),
525 tci_read_reg(TCG_REG_R1
),
526 tci_read_reg(TCG_REG_R2
),
527 tci_read_reg(TCG_REG_R3
),
528 tci_read_reg(TCG_REG_R5
),
529 tci_read_reg(TCG_REG_R6
),
530 tci_read_reg(TCG_REG_R7
),
531 tci_read_reg(TCG_REG_R8
),
532 tci_read_reg(TCG_REG_R9
),
533 tci_read_reg(TCG_REG_R10
));
534 tci_write_reg(TCG_REG_R0
, tmp64
);
535 tci_write_reg(TCG_REG_R1
, tmp64
>> 32);
537 tmp64
= ((helper_function
)t0
)(tci_read_reg(TCG_REG_R0
),
538 tci_read_reg(TCG_REG_R1
),
539 tci_read_reg(TCG_REG_R2
),
540 tci_read_reg(TCG_REG_R3
),
541 tci_read_reg(TCG_REG_R5
));
542 tci_write_reg(TCG_REG_R0
, tmp64
);
546 label
= tci_read_label(&tb_ptr
);
547 assert(tb_ptr
== old_code_ptr
+ op_size
);
548 tb_ptr
= (uint8_t *)label
;
550 case INDEX_op_setcond_i32
:
552 t1
= tci_read_r32(&tb_ptr
);
553 t2
= tci_read_ri32(&tb_ptr
);
554 condition
= *tb_ptr
++;
555 tci_write_reg32(t0
, tci_compare32(t1
, t2
, condition
));
557 #if TCG_TARGET_REG_BITS == 32
558 case INDEX_op_setcond2_i32
:
560 tmp64
= tci_read_r64(&tb_ptr
);
561 v64
= tci_read_ri64(&tb_ptr
);
562 condition
= *tb_ptr
++;
563 tci_write_reg32(t0
, tci_compare64(tmp64
, v64
, condition
));
565 #elif TCG_TARGET_REG_BITS == 64
566 case INDEX_op_setcond_i64
:
568 t1
= tci_read_r64(&tb_ptr
);
569 t2
= tci_read_ri64(&tb_ptr
);
570 condition
= *tb_ptr
++;
571 tci_write_reg64(t0
, tci_compare64(t1
, t2
, condition
));
574 case INDEX_op_mov_i32
:
576 t1
= tci_read_r32(&tb_ptr
);
577 tci_write_reg32(t0
, t1
);
579 case INDEX_op_movi_i32
:
581 t1
= tci_read_i32(&tb_ptr
);
582 tci_write_reg32(t0
, t1
);
585 /* Load/store operations (32 bit). */
587 case INDEX_op_ld8u_i32
:
589 t1
= tci_read_r(&tb_ptr
);
590 t2
= tci_read_s32(&tb_ptr
);
591 tci_write_reg8(t0
, *(uint8_t *)(t1
+ t2
));
593 case INDEX_op_ld8s_i32
:
594 case INDEX_op_ld16u_i32
:
597 case INDEX_op_ld16s_i32
:
600 case INDEX_op_ld_i32
:
602 t1
= tci_read_r(&tb_ptr
);
603 t2
= tci_read_s32(&tb_ptr
);
604 tci_write_reg32(t0
, *(uint32_t *)(t1
+ t2
));
606 case INDEX_op_st8_i32
:
607 t0
= tci_read_r8(&tb_ptr
);
608 t1
= tci_read_r(&tb_ptr
);
609 t2
= tci_read_s32(&tb_ptr
);
610 *(uint8_t *)(t1
+ t2
) = t0
;
612 case INDEX_op_st16_i32
:
613 t0
= tci_read_r16(&tb_ptr
);
614 t1
= tci_read_r(&tb_ptr
);
615 t2
= tci_read_s32(&tb_ptr
);
616 *(uint16_t *)(t1
+ t2
) = t0
;
618 case INDEX_op_st_i32
:
619 t0
= tci_read_r32(&tb_ptr
);
620 t1
= tci_read_r(&tb_ptr
);
621 t2
= tci_read_s32(&tb_ptr
);
622 assert(t1
!= sp_value
|| (int32_t)t2
< 0);
623 *(uint32_t *)(t1
+ t2
) = t0
;
626 /* Arithmetic operations (32 bit). */
628 case INDEX_op_add_i32
:
630 t1
= tci_read_ri32(&tb_ptr
);
631 t2
= tci_read_ri32(&tb_ptr
);
632 tci_write_reg32(t0
, t1
+ t2
);
634 case INDEX_op_sub_i32
:
636 t1
= tci_read_ri32(&tb_ptr
);
637 t2
= tci_read_ri32(&tb_ptr
);
638 tci_write_reg32(t0
, t1
- t2
);
640 case INDEX_op_mul_i32
:
642 t1
= tci_read_ri32(&tb_ptr
);
643 t2
= tci_read_ri32(&tb_ptr
);
644 tci_write_reg32(t0
, t1
* t2
);
646 #if TCG_TARGET_HAS_div_i32
647 case INDEX_op_div_i32
:
649 t1
= tci_read_ri32(&tb_ptr
);
650 t2
= tci_read_ri32(&tb_ptr
);
651 tci_write_reg32(t0
, (int32_t)t1
/ (int32_t)t2
);
653 case INDEX_op_divu_i32
:
655 t1
= tci_read_ri32(&tb_ptr
);
656 t2
= tci_read_ri32(&tb_ptr
);
657 tci_write_reg32(t0
, t1
/ t2
);
659 case INDEX_op_rem_i32
:
661 t1
= tci_read_ri32(&tb_ptr
);
662 t2
= tci_read_ri32(&tb_ptr
);
663 tci_write_reg32(t0
, (int32_t)t1
% (int32_t)t2
);
665 case INDEX_op_remu_i32
:
667 t1
= tci_read_ri32(&tb_ptr
);
668 t2
= tci_read_ri32(&tb_ptr
);
669 tci_write_reg32(t0
, t1
% t2
);
671 #elif TCG_TARGET_HAS_div2_i32
672 case INDEX_op_div2_i32
:
673 case INDEX_op_divu2_i32
:
677 case INDEX_op_and_i32
:
679 t1
= tci_read_ri32(&tb_ptr
);
680 t2
= tci_read_ri32(&tb_ptr
);
681 tci_write_reg32(t0
, t1
& t2
);
683 case INDEX_op_or_i32
:
685 t1
= tci_read_ri32(&tb_ptr
);
686 t2
= tci_read_ri32(&tb_ptr
);
687 tci_write_reg32(t0
, t1
| t2
);
689 case INDEX_op_xor_i32
:
691 t1
= tci_read_ri32(&tb_ptr
);
692 t2
= tci_read_ri32(&tb_ptr
);
693 tci_write_reg32(t0
, t1
^ t2
);
696 /* Shift/rotate operations (32 bit). */
698 case INDEX_op_shl_i32
:
700 t1
= tci_read_ri32(&tb_ptr
);
701 t2
= tci_read_ri32(&tb_ptr
);
702 tci_write_reg32(t0
, t1
<< (t2
& 31));
704 case INDEX_op_shr_i32
:
706 t1
= tci_read_ri32(&tb_ptr
);
707 t2
= tci_read_ri32(&tb_ptr
);
708 tci_write_reg32(t0
, t1
>> (t2
& 31));
710 case INDEX_op_sar_i32
:
712 t1
= tci_read_ri32(&tb_ptr
);
713 t2
= tci_read_ri32(&tb_ptr
);
714 tci_write_reg32(t0
, ((int32_t)t1
>> (t2
& 31)));
716 #if TCG_TARGET_HAS_rot_i32
717 case INDEX_op_rotl_i32
:
719 t1
= tci_read_ri32(&tb_ptr
);
720 t2
= tci_read_ri32(&tb_ptr
);
721 tci_write_reg32(t0
, rol32(t1
, t2
& 31));
723 case INDEX_op_rotr_i32
:
725 t1
= tci_read_ri32(&tb_ptr
);
726 t2
= tci_read_ri32(&tb_ptr
);
727 tci_write_reg32(t0
, ror32(t1
, t2
& 31));
730 #if TCG_TARGET_HAS_deposit_i32
731 case INDEX_op_deposit_i32
:
733 t1
= tci_read_r32(&tb_ptr
);
734 t2
= tci_read_r32(&tb_ptr
);
737 tmp32
= (((1 << tmp8
) - 1) << tmp16
);
738 tci_write_reg32(t0
, (t1
& ~tmp32
) | ((t2
<< tmp16
) & tmp32
));
741 case INDEX_op_brcond_i32
:
742 t0
= tci_read_r32(&tb_ptr
);
743 t1
= tci_read_ri32(&tb_ptr
);
744 condition
= *tb_ptr
++;
745 label
= tci_read_label(&tb_ptr
);
746 if (tci_compare32(t0
, t1
, condition
)) {
747 assert(tb_ptr
== old_code_ptr
+ op_size
);
748 tb_ptr
= (uint8_t *)label
;
752 #if TCG_TARGET_REG_BITS == 32
753 case INDEX_op_add2_i32
:
756 tmp64
= tci_read_r64(&tb_ptr
);
757 tmp64
+= tci_read_r64(&tb_ptr
);
758 tci_write_reg64(t1
, t0
, tmp64
);
760 case INDEX_op_sub2_i32
:
763 tmp64
= tci_read_r64(&tb_ptr
);
764 tmp64
-= tci_read_r64(&tb_ptr
);
765 tci_write_reg64(t1
, t0
, tmp64
);
767 case INDEX_op_brcond2_i32
:
768 tmp64
= tci_read_r64(&tb_ptr
);
769 v64
= tci_read_ri64(&tb_ptr
);
770 condition
= *tb_ptr
++;
771 label
= tci_read_label(&tb_ptr
);
772 if (tci_compare64(tmp64
, v64
, condition
)) {
773 assert(tb_ptr
== old_code_ptr
+ op_size
);
774 tb_ptr
= (uint8_t *)label
;
778 case INDEX_op_mulu2_i32
:
781 t2
= tci_read_r32(&tb_ptr
);
782 tmp64
= tci_read_r32(&tb_ptr
);
783 tci_write_reg64(t1
, t0
, t2
* tmp64
);
785 #endif /* TCG_TARGET_REG_BITS == 32 */
786 #if TCG_TARGET_HAS_ext8s_i32
787 case INDEX_op_ext8s_i32
:
789 t1
= tci_read_r8s(&tb_ptr
);
790 tci_write_reg32(t0
, t1
);
793 #if TCG_TARGET_HAS_ext16s_i32
794 case INDEX_op_ext16s_i32
:
796 t1
= tci_read_r16s(&tb_ptr
);
797 tci_write_reg32(t0
, t1
);
800 #if TCG_TARGET_HAS_ext8u_i32
801 case INDEX_op_ext8u_i32
:
803 t1
= tci_read_r8(&tb_ptr
);
804 tci_write_reg32(t0
, t1
);
807 #if TCG_TARGET_HAS_ext16u_i32
808 case INDEX_op_ext16u_i32
:
810 t1
= tci_read_r16(&tb_ptr
);
811 tci_write_reg32(t0
, t1
);
814 #if TCG_TARGET_HAS_bswap16_i32
815 case INDEX_op_bswap16_i32
:
817 t1
= tci_read_r16(&tb_ptr
);
818 tci_write_reg32(t0
, bswap16(t1
));
821 #if TCG_TARGET_HAS_bswap32_i32
822 case INDEX_op_bswap32_i32
:
824 t1
= tci_read_r32(&tb_ptr
);
825 tci_write_reg32(t0
, bswap32(t1
));
828 #if TCG_TARGET_HAS_not_i32
829 case INDEX_op_not_i32
:
831 t1
= tci_read_r32(&tb_ptr
);
832 tci_write_reg32(t0
, ~t1
);
835 #if TCG_TARGET_HAS_neg_i32
836 case INDEX_op_neg_i32
:
838 t1
= tci_read_r32(&tb_ptr
);
839 tci_write_reg32(t0
, -t1
);
842 #if TCG_TARGET_REG_BITS == 64
843 case INDEX_op_mov_i64
:
845 t1
= tci_read_r64(&tb_ptr
);
846 tci_write_reg64(t0
, t1
);
848 case INDEX_op_movi_i64
:
850 t1
= tci_read_i64(&tb_ptr
);
851 tci_write_reg64(t0
, t1
);
854 /* Load/store operations (64 bit). */
856 case INDEX_op_ld8u_i64
:
858 t1
= tci_read_r(&tb_ptr
);
859 t2
= tci_read_s32(&tb_ptr
);
860 tci_write_reg8(t0
, *(uint8_t *)(t1
+ t2
));
862 case INDEX_op_ld8s_i64
:
863 case INDEX_op_ld16u_i64
:
864 case INDEX_op_ld16s_i64
:
867 case INDEX_op_ld32u_i64
:
869 t1
= tci_read_r(&tb_ptr
);
870 t2
= tci_read_s32(&tb_ptr
);
871 tci_write_reg32(t0
, *(uint32_t *)(t1
+ t2
));
873 case INDEX_op_ld32s_i64
:
875 t1
= tci_read_r(&tb_ptr
);
876 t2
= tci_read_s32(&tb_ptr
);
877 tci_write_reg32s(t0
, *(int32_t *)(t1
+ t2
));
879 case INDEX_op_ld_i64
:
881 t1
= tci_read_r(&tb_ptr
);
882 t2
= tci_read_s32(&tb_ptr
);
883 tci_write_reg64(t0
, *(uint64_t *)(t1
+ t2
));
885 case INDEX_op_st8_i64
:
886 t0
= tci_read_r8(&tb_ptr
);
887 t1
= tci_read_r(&tb_ptr
);
888 t2
= tci_read_s32(&tb_ptr
);
889 *(uint8_t *)(t1
+ t2
) = t0
;
891 case INDEX_op_st16_i64
:
892 t0
= tci_read_r16(&tb_ptr
);
893 t1
= tci_read_r(&tb_ptr
);
894 t2
= tci_read_s32(&tb_ptr
);
895 *(uint16_t *)(t1
+ t2
) = t0
;
897 case INDEX_op_st32_i64
:
898 t0
= tci_read_r32(&tb_ptr
);
899 t1
= tci_read_r(&tb_ptr
);
900 t2
= tci_read_s32(&tb_ptr
);
901 *(uint32_t *)(t1
+ t2
) = t0
;
903 case INDEX_op_st_i64
:
904 t0
= tci_read_r64(&tb_ptr
);
905 t1
= tci_read_r(&tb_ptr
);
906 t2
= tci_read_s32(&tb_ptr
);
907 assert(t1
!= sp_value
|| (int32_t)t2
< 0);
908 *(uint64_t *)(t1
+ t2
) = t0
;
911 /* Arithmetic operations (64 bit). */
913 case INDEX_op_add_i64
:
915 t1
= tci_read_ri64(&tb_ptr
);
916 t2
= tci_read_ri64(&tb_ptr
);
917 tci_write_reg64(t0
, t1
+ t2
);
919 case INDEX_op_sub_i64
:
921 t1
= tci_read_ri64(&tb_ptr
);
922 t2
= tci_read_ri64(&tb_ptr
);
923 tci_write_reg64(t0
, t1
- t2
);
925 case INDEX_op_mul_i64
:
927 t1
= tci_read_ri64(&tb_ptr
);
928 t2
= tci_read_ri64(&tb_ptr
);
929 tci_write_reg64(t0
, t1
* t2
);
931 #if TCG_TARGET_HAS_div_i64
932 case INDEX_op_div_i64
:
933 case INDEX_op_divu_i64
:
934 case INDEX_op_rem_i64
:
935 case INDEX_op_remu_i64
:
938 #elif TCG_TARGET_HAS_div2_i64
939 case INDEX_op_div2_i64
:
940 case INDEX_op_divu2_i64
:
944 case INDEX_op_and_i64
:
946 t1
= tci_read_ri64(&tb_ptr
);
947 t2
= tci_read_ri64(&tb_ptr
);
948 tci_write_reg64(t0
, t1
& t2
);
950 case INDEX_op_or_i64
:
952 t1
= tci_read_ri64(&tb_ptr
);
953 t2
= tci_read_ri64(&tb_ptr
);
954 tci_write_reg64(t0
, t1
| t2
);
956 case INDEX_op_xor_i64
:
958 t1
= tci_read_ri64(&tb_ptr
);
959 t2
= tci_read_ri64(&tb_ptr
);
960 tci_write_reg64(t0
, t1
^ t2
);
963 /* Shift/rotate operations (64 bit). */
965 case INDEX_op_shl_i64
:
967 t1
= tci_read_ri64(&tb_ptr
);
968 t2
= tci_read_ri64(&tb_ptr
);
969 tci_write_reg64(t0
, t1
<< (t2
& 63));
971 case INDEX_op_shr_i64
:
973 t1
= tci_read_ri64(&tb_ptr
);
974 t2
= tci_read_ri64(&tb_ptr
);
975 tci_write_reg64(t0
, t1
>> (t2
& 63));
977 case INDEX_op_sar_i64
:
979 t1
= tci_read_ri64(&tb_ptr
);
980 t2
= tci_read_ri64(&tb_ptr
);
981 tci_write_reg64(t0
, ((int64_t)t1
>> (t2
& 63)));
983 #if TCG_TARGET_HAS_rot_i64
984 case INDEX_op_rotl_i64
:
986 t1
= tci_read_ri64(&tb_ptr
);
987 t2
= tci_read_ri64(&tb_ptr
);
988 tci_write_reg64(t0
, rol64(t1
, t2
& 63));
990 case INDEX_op_rotr_i64
:
992 t1
= tci_read_ri64(&tb_ptr
);
993 t2
= tci_read_ri64(&tb_ptr
);
994 tci_write_reg64(t0
, ror64(t1
, t2
& 63));
997 #if TCG_TARGET_HAS_deposit_i64
998 case INDEX_op_deposit_i64
:
1000 t1
= tci_read_r64(&tb_ptr
);
1001 t2
= tci_read_r64(&tb_ptr
);
1004 tmp64
= (((1ULL << tmp8
) - 1) << tmp16
);
1005 tci_write_reg64(t0
, (t1
& ~tmp64
) | ((t2
<< tmp16
) & tmp64
));
1008 case INDEX_op_brcond_i64
:
1009 t0
= tci_read_r64(&tb_ptr
);
1010 t1
= tci_read_ri64(&tb_ptr
);
1011 condition
= *tb_ptr
++;
1012 label
= tci_read_label(&tb_ptr
);
1013 if (tci_compare64(t0
, t1
, condition
)) {
1014 assert(tb_ptr
== old_code_ptr
+ op_size
);
1015 tb_ptr
= (uint8_t *)label
;
1019 #if TCG_TARGET_HAS_ext8u_i64
1020 case INDEX_op_ext8u_i64
:
1022 t1
= tci_read_r8(&tb_ptr
);
1023 tci_write_reg64(t0
, t1
);
1026 #if TCG_TARGET_HAS_ext8s_i64
1027 case INDEX_op_ext8s_i64
:
1029 t1
= tci_read_r8s(&tb_ptr
);
1030 tci_write_reg64(t0
, t1
);
1033 #if TCG_TARGET_HAS_ext16s_i64
1034 case INDEX_op_ext16s_i64
:
1036 t1
= tci_read_r16s(&tb_ptr
);
1037 tci_write_reg64(t0
, t1
);
1040 #if TCG_TARGET_HAS_ext16u_i64
1041 case INDEX_op_ext16u_i64
:
1043 t1
= tci_read_r16(&tb_ptr
);
1044 tci_write_reg64(t0
, t1
);
1047 #if TCG_TARGET_HAS_ext32s_i64
1048 case INDEX_op_ext32s_i64
:
1050 t1
= tci_read_r32s(&tb_ptr
);
1051 tci_write_reg64(t0
, t1
);
1054 #if TCG_TARGET_HAS_ext32u_i64
1055 case INDEX_op_ext32u_i64
:
1057 t1
= tci_read_r32(&tb_ptr
);
1058 tci_write_reg64(t0
, t1
);
1061 #if TCG_TARGET_HAS_bswap16_i64
1062 case INDEX_op_bswap16_i64
:
1065 t1
= tci_read_r16(&tb_ptr
);
1066 tci_write_reg64(t0
, bswap16(t1
));
1069 #if TCG_TARGET_HAS_bswap32_i64
1070 case INDEX_op_bswap32_i64
:
1072 t1
= tci_read_r32(&tb_ptr
);
1073 tci_write_reg64(t0
, bswap32(t1
));
1076 #if TCG_TARGET_HAS_bswap64_i64
1077 case INDEX_op_bswap64_i64
:
1079 t1
= tci_read_r64(&tb_ptr
);
1080 tci_write_reg64(t0
, bswap64(t1
));
1083 #if TCG_TARGET_HAS_not_i64
1084 case INDEX_op_not_i64
:
1086 t1
= tci_read_r64(&tb_ptr
);
1087 tci_write_reg64(t0
, ~t1
);
1090 #if TCG_TARGET_HAS_neg_i64
1091 case INDEX_op_neg_i64
:
1093 t1
= tci_read_r64(&tb_ptr
);
1094 tci_write_reg64(t0
, -t1
);
1097 #endif /* TCG_TARGET_REG_BITS == 64 */
1099 /* QEMU specific operations. */
1101 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1102 case INDEX_op_debug_insn_start
:
1106 case INDEX_op_debug_insn_start
:
1110 case INDEX_op_exit_tb
:
1111 next_tb
= *(uint64_t *)tb_ptr
;
1114 case INDEX_op_goto_tb
:
1115 t0
= tci_read_i32(&tb_ptr
);
1116 assert(tb_ptr
== old_code_ptr
+ op_size
);
1117 tb_ptr
+= (int32_t)t0
;
1119 case INDEX_op_qemu_ld_i32
:
1121 taddr
= tci_read_ulong(&tb_ptr
);
1122 memop
= tci_read_i(&tb_ptr
);
1128 tmp32
= (int8_t)qemu_ld_ub
;
1131 tmp32
= qemu_ld_leuw
;
1134 tmp32
= (int16_t)qemu_ld_leuw
;
1137 tmp32
= qemu_ld_leul
;
1140 tmp32
= qemu_ld_beuw
;
1143 tmp32
= (int16_t)qemu_ld_beuw
;
1146 tmp32
= qemu_ld_beul
;
1151 tci_write_reg(t0
, tmp32
);
1153 case INDEX_op_qemu_ld_i64
:
1155 if (TCG_TARGET_REG_BITS
== 32) {
1158 taddr
= tci_read_ulong(&tb_ptr
);
1159 memop
= tci_read_i(&tb_ptr
);
1165 tmp64
= (int8_t)qemu_ld_ub
;
1168 tmp64
= qemu_ld_leuw
;
1171 tmp64
= (int16_t)qemu_ld_leuw
;
1174 tmp64
= qemu_ld_leul
;
1177 tmp64
= (int32_t)qemu_ld_leul
;
1180 tmp64
= qemu_ld_leq
;
1183 tmp64
= qemu_ld_beuw
;
1186 tmp64
= (int16_t)qemu_ld_beuw
;
1189 tmp64
= qemu_ld_beul
;
1192 tmp64
= (int32_t)qemu_ld_beul
;
1195 tmp64
= qemu_ld_beq
;
1200 tci_write_reg(t0
, tmp64
);
1201 if (TCG_TARGET_REG_BITS
== 32) {
1202 tci_write_reg(t1
, tmp64
>> 32);
1205 case INDEX_op_qemu_st_i32
:
1206 t0
= tci_read_r(&tb_ptr
);
1207 taddr
= tci_read_ulong(&tb_ptr
);
1208 memop
= tci_read_i(&tb_ptr
);
1229 case INDEX_op_qemu_st_i64
:
1230 tmp64
= tci_read_r64(&tb_ptr
);
1231 taddr
= tci_read_ulong(&tb_ptr
);
1232 memop
= tci_read_i(&tb_ptr
);
1263 assert(tb_ptr
== old_code_ptr
+ op_size
);