2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
27 #include "qemu-common.h"
28 #include "dyngen-exec.h" /* env */
29 #include "exec-all.h" /* MAX_OPC_PARAM_IARGS */
32 /* Marker for missing code. */
35 fprintf(stderr, "TODO %s:%u: %s()\n", \
36 __FILE__, __LINE__, __func__); \
40 #if MAX_OPC_PARAM_IARGS != 4
41 # error Fix needed, number of supported input arguments changed!
43 #if TCG_TARGET_REG_BITS == 32
44 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
45 tcg_target_ulong
, tcg_target_ulong
,
46 tcg_target_ulong
, tcg_target_ulong
,
47 tcg_target_ulong
, tcg_target_ulong
);
49 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
50 tcg_target_ulong
, tcg_target_ulong
);
53 /* TCI can optionally use a global register variable for env. */
58 /* Targets which don't use GETPC also don't need tci_tb_ptr
59 which makes them a little faster. */
64 static tcg_target_ulong tci_reg
[TCG_TARGET_NB_REGS
];
66 #if !defined(CONFIG_TCG_PASS_AREG0)
67 # define helper_ldb_mmu(env, addr, mmu_idx) __ldb_mmu(addr, mmu_idx)
68 # define helper_ldw_mmu(env, addr, mmu_idx) __ldw_mmu(addr, mmu_idx)
69 # define helper_ldl_mmu(env, addr, mmu_idx) __ldl_mmu(addr, mmu_idx)
70 # define helper_ldq_mmu(env, addr, mmu_idx) __ldq_mmu(addr, mmu_idx)
71 # define helper_stb_mmu(env, addr, val, mmu_idx) __stb_mmu(addr, val, mmu_idx)
72 # define helper_stw_mmu(env, addr, val, mmu_idx) __stw_mmu(addr, val, mmu_idx)
73 # define helper_stl_mmu(env, addr, val, mmu_idx) __stl_mmu(addr, val, mmu_idx)
74 # define helper_stq_mmu(env, addr, val, mmu_idx) __stq_mmu(addr, val, mmu_idx)
75 #endif /* !CONFIG_TCG_PASS_AREG0 */
77 static tcg_target_ulong
tci_read_reg(TCGReg index
)
79 assert(index
< ARRAY_SIZE(tci_reg
));
80 return tci_reg
[index
];
83 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
84 static int8_t tci_read_reg8s(TCGReg index
)
86 return (int8_t)tci_read_reg(index
);
90 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
91 static int16_t tci_read_reg16s(TCGReg index
)
93 return (int16_t)tci_read_reg(index
);
97 #if TCG_TARGET_REG_BITS == 64
98 static int32_t tci_read_reg32s(TCGReg index
)
100 return (int32_t)tci_read_reg(index
);
104 static uint8_t tci_read_reg8(TCGReg index
)
106 return (uint8_t)tci_read_reg(index
);
109 static uint16_t tci_read_reg16(TCGReg index
)
111 return (uint16_t)tci_read_reg(index
);
114 static uint32_t tci_read_reg32(TCGReg index
)
116 return (uint32_t)tci_read_reg(index
);
119 #if TCG_TARGET_REG_BITS == 64
120 static uint64_t tci_read_reg64(TCGReg index
)
122 return tci_read_reg(index
);
126 static void tci_write_reg(TCGReg index
, tcg_target_ulong value
)
128 assert(index
< ARRAY_SIZE(tci_reg
));
129 assert(index
!= TCG_AREG0
);
130 tci_reg
[index
] = value
;
133 static void tci_write_reg8s(TCGReg index
, int8_t value
)
135 tci_write_reg(index
, value
);
138 static void tci_write_reg16s(TCGReg index
, int16_t value
)
140 tci_write_reg(index
, value
);
143 #if TCG_TARGET_REG_BITS == 64
144 static void tci_write_reg32s(TCGReg index
, int32_t value
)
146 tci_write_reg(index
, value
);
150 static void tci_write_reg8(TCGReg index
, uint8_t value
)
152 tci_write_reg(index
, value
);
155 static void tci_write_reg16(TCGReg index
, uint16_t value
)
157 tci_write_reg(index
, value
);
160 static void tci_write_reg32(TCGReg index
, uint32_t value
)
162 tci_write_reg(index
, value
);
165 #if TCG_TARGET_REG_BITS == 32
166 static void tci_write_reg64(uint32_t high_index
, uint32_t low_index
,
169 tci_write_reg(low_index
, value
);
170 tci_write_reg(high_index
, value
>> 32);
172 #elif TCG_TARGET_REG_BITS == 64
173 static void tci_write_reg64(TCGReg index
, uint64_t value
)
175 tci_write_reg(index
, value
);
179 #if TCG_TARGET_REG_BITS == 32
180 /* Create a 64 bit value from two 32 bit values. */
181 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
183 return ((uint64_t)high
<< 32) + low
;
187 /* Read constant (native size) from bytecode. */
188 static tcg_target_ulong
tci_read_i(uint8_t **tb_ptr
)
190 tcg_target_ulong value
= *(tcg_target_ulong
*)(*tb_ptr
);
191 *tb_ptr
+= sizeof(value
);
195 /* Read constant (32 bit) from bytecode. */
196 static uint32_t tci_read_i32(uint8_t **tb_ptr
)
198 uint32_t value
= *(uint32_t *)(*tb_ptr
);
199 *tb_ptr
+= sizeof(value
);
203 #if TCG_TARGET_REG_BITS == 64
204 /* Read constant (64 bit) from bytecode. */
205 static uint64_t tci_read_i64(uint8_t **tb_ptr
)
207 uint64_t value
= *(uint64_t *)(*tb_ptr
);
208 *tb_ptr
+= sizeof(value
);
213 /* Read indexed register (native size) from bytecode. */
214 static tcg_target_ulong
tci_read_r(uint8_t **tb_ptr
)
216 tcg_target_ulong value
= tci_read_reg(**tb_ptr
);
221 /* Read indexed register (8 bit) from bytecode. */
222 static uint8_t tci_read_r8(uint8_t **tb_ptr
)
224 uint8_t value
= tci_read_reg8(**tb_ptr
);
229 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
230 /* Read indexed register (8 bit signed) from bytecode. */
231 static int8_t tci_read_r8s(uint8_t **tb_ptr
)
233 int8_t value
= tci_read_reg8s(**tb_ptr
);
239 /* Read indexed register (16 bit) from bytecode. */
240 static uint16_t tci_read_r16(uint8_t **tb_ptr
)
242 uint16_t value
= tci_read_reg16(**tb_ptr
);
247 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
248 /* Read indexed register (16 bit signed) from bytecode. */
249 static int16_t tci_read_r16s(uint8_t **tb_ptr
)
251 int16_t value
= tci_read_reg16s(**tb_ptr
);
257 /* Read indexed register (32 bit) from bytecode. */
258 static uint32_t tci_read_r32(uint8_t **tb_ptr
)
260 uint32_t value
= tci_read_reg32(**tb_ptr
);
265 #if TCG_TARGET_REG_BITS == 32
266 /* Read two indexed registers (2 * 32 bit) from bytecode. */
267 static uint64_t tci_read_r64(uint8_t **tb_ptr
)
269 uint32_t low
= tci_read_r32(tb_ptr
);
270 return tci_uint64(tci_read_r32(tb_ptr
), low
);
272 #elif TCG_TARGET_REG_BITS == 64
273 /* Read indexed register (32 bit signed) from bytecode. */
274 static int32_t tci_read_r32s(uint8_t **tb_ptr
)
276 int32_t value
= tci_read_reg32s(**tb_ptr
);
281 /* Read indexed register (64 bit) from bytecode. */
282 static uint64_t tci_read_r64(uint8_t **tb_ptr
)
284 uint64_t value
= tci_read_reg64(**tb_ptr
);
290 /* Read indexed register(s) with target address from bytecode. */
291 static target_ulong
tci_read_ulong(uint8_t **tb_ptr
)
293 target_ulong taddr
= tci_read_r(tb_ptr
);
294 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
295 taddr
+= (uint64_t)tci_read_r(tb_ptr
) << 32;
300 /* Read indexed register or constant (native size) from bytecode. */
301 static tcg_target_ulong
tci_read_ri(uint8_t **tb_ptr
)
303 tcg_target_ulong value
;
306 if (r
== TCG_CONST
) {
307 value
= tci_read_i(tb_ptr
);
309 value
= tci_read_reg(r
);
314 /* Read indexed register or constant (32 bit) from bytecode. */
315 static uint32_t tci_read_ri32(uint8_t **tb_ptr
)
320 if (r
== TCG_CONST
) {
321 value
= tci_read_i32(tb_ptr
);
323 value
= tci_read_reg32(r
);
328 #if TCG_TARGET_REG_BITS == 32
329 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
330 static uint64_t tci_read_ri64(uint8_t **tb_ptr
)
332 uint32_t low
= tci_read_ri32(tb_ptr
);
333 return tci_uint64(tci_read_ri32(tb_ptr
), low
);
335 #elif TCG_TARGET_REG_BITS == 64
336 /* Read indexed register or constant (64 bit) from bytecode. */
337 static uint64_t tci_read_ri64(uint8_t **tb_ptr
)
342 if (r
== TCG_CONST
) {
343 value
= tci_read_i64(tb_ptr
);
345 value
= tci_read_reg64(r
);
351 static target_ulong
tci_read_label(uint8_t **tb_ptr
)
353 target_ulong label
= tci_read_i(tb_ptr
);
358 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
400 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
442 /* Interpret pseudo code in tb. */
443 tcg_target_ulong
tcg_qemu_tb_exec(CPUArchState
*cpustate
, uint8_t *tb_ptr
)
445 tcg_target_ulong next_tb
= 0;
448 tci_reg
[TCG_AREG0
] = (tcg_target_ulong
)env
;
453 tci_tb_ptr
= (uintptr_t)tb_ptr
;
455 TCGOpcode opc
= tb_ptr
[0];
457 uint8_t op_size
= tb_ptr
[1];
458 uint8_t *old_code_ptr
= tb_ptr
;
463 tcg_target_ulong label
;
466 #ifndef CONFIG_SOFTMMU
467 tcg_target_ulong host_addr
;
473 #if TCG_TARGET_REG_BITS == 32
477 /* Skip opcode and size entry. */
488 case INDEX_op_discard
:
491 case INDEX_op_set_label
:
495 t0
= tci_read_ri(&tb_ptr
);
496 #if TCG_TARGET_REG_BITS == 32
497 tmp64
= ((helper_function
)t0
)(tci_read_reg(TCG_REG_R0
),
498 tci_read_reg(TCG_REG_R1
),
499 tci_read_reg(TCG_REG_R2
),
500 tci_read_reg(TCG_REG_R3
),
501 tci_read_reg(TCG_REG_R5
),
502 tci_read_reg(TCG_REG_R6
),
503 tci_read_reg(TCG_REG_R7
),
504 tci_read_reg(TCG_REG_R8
));
505 tci_write_reg(TCG_REG_R0
, tmp64
);
506 tci_write_reg(TCG_REG_R1
, tmp64
>> 32);
508 tmp64
= ((helper_function
)t0
)(tci_read_reg(TCG_REG_R0
),
509 tci_read_reg(TCG_REG_R1
),
510 tci_read_reg(TCG_REG_R2
),
511 tci_read_reg(TCG_REG_R3
));
512 tci_write_reg(TCG_REG_R0
, tmp64
);
517 label
= tci_read_label(&tb_ptr
);
518 assert(tb_ptr
== old_code_ptr
+ op_size
);
519 tb_ptr
= (uint8_t *)label
;
521 case INDEX_op_setcond_i32
:
523 t1
= tci_read_r32(&tb_ptr
);
524 t2
= tci_read_ri32(&tb_ptr
);
525 condition
= *tb_ptr
++;
526 tci_write_reg32(t0
, tci_compare32(t1
, t2
, condition
));
528 #if TCG_TARGET_REG_BITS == 32
529 case INDEX_op_setcond2_i32
:
531 tmp64
= tci_read_r64(&tb_ptr
);
532 v64
= tci_read_ri64(&tb_ptr
);
533 condition
= *tb_ptr
++;
534 tci_write_reg32(t0
, tci_compare64(tmp64
, v64
, condition
));
536 #elif TCG_TARGET_REG_BITS == 64
537 case INDEX_op_setcond_i64
:
539 t1
= tci_read_r64(&tb_ptr
);
540 t2
= tci_read_ri64(&tb_ptr
);
541 condition
= *tb_ptr
++;
542 tci_write_reg64(t0
, tci_compare64(t1
, t2
, condition
));
545 case INDEX_op_mov_i32
:
547 t1
= tci_read_r32(&tb_ptr
);
548 tci_write_reg32(t0
, t1
);
550 case INDEX_op_movi_i32
:
552 t1
= tci_read_i32(&tb_ptr
);
553 tci_write_reg32(t0
, t1
);
556 /* Load/store operations (32 bit). */
558 case INDEX_op_ld8u_i32
:
560 t1
= tci_read_r(&tb_ptr
);
561 t2
= tci_read_i32(&tb_ptr
);
562 tci_write_reg8(t0
, *(uint8_t *)(t1
+ t2
));
564 case INDEX_op_ld8s_i32
:
565 case INDEX_op_ld16u_i32
:
568 case INDEX_op_ld16s_i32
:
571 case INDEX_op_ld_i32
:
573 t1
= tci_read_r(&tb_ptr
);
574 t2
= tci_read_i32(&tb_ptr
);
575 tci_write_reg32(t0
, *(uint32_t *)(t1
+ t2
));
577 case INDEX_op_st8_i32
:
578 t0
= tci_read_r8(&tb_ptr
);
579 t1
= tci_read_r(&tb_ptr
);
580 t2
= tci_read_i32(&tb_ptr
);
581 *(uint8_t *)(t1
+ t2
) = t0
;
583 case INDEX_op_st16_i32
:
584 t0
= tci_read_r16(&tb_ptr
);
585 t1
= tci_read_r(&tb_ptr
);
586 t2
= tci_read_i32(&tb_ptr
);
587 *(uint16_t *)(t1
+ t2
) = t0
;
589 case INDEX_op_st_i32
:
590 t0
= tci_read_r32(&tb_ptr
);
591 t1
= tci_read_r(&tb_ptr
);
592 t2
= tci_read_i32(&tb_ptr
);
593 *(uint32_t *)(t1
+ t2
) = t0
;
596 /* Arithmetic operations (32 bit). */
598 case INDEX_op_add_i32
:
600 t1
= tci_read_ri32(&tb_ptr
);
601 t2
= tci_read_ri32(&tb_ptr
);
602 tci_write_reg32(t0
, t1
+ t2
);
604 case INDEX_op_sub_i32
:
606 t1
= tci_read_ri32(&tb_ptr
);
607 t2
= tci_read_ri32(&tb_ptr
);
608 tci_write_reg32(t0
, t1
- t2
);
610 case INDEX_op_mul_i32
:
612 t1
= tci_read_ri32(&tb_ptr
);
613 t2
= tci_read_ri32(&tb_ptr
);
614 tci_write_reg32(t0
, t1
* t2
);
616 #if TCG_TARGET_HAS_div_i32
617 case INDEX_op_div_i32
:
619 t1
= tci_read_ri32(&tb_ptr
);
620 t2
= tci_read_ri32(&tb_ptr
);
621 tci_write_reg32(t0
, (int32_t)t1
/ (int32_t)t2
);
623 case INDEX_op_divu_i32
:
625 t1
= tci_read_ri32(&tb_ptr
);
626 t2
= tci_read_ri32(&tb_ptr
);
627 tci_write_reg32(t0
, t1
/ t2
);
629 case INDEX_op_rem_i32
:
631 t1
= tci_read_ri32(&tb_ptr
);
632 t2
= tci_read_ri32(&tb_ptr
);
633 tci_write_reg32(t0
, (int32_t)t1
% (int32_t)t2
);
635 case INDEX_op_remu_i32
:
637 t1
= tci_read_ri32(&tb_ptr
);
638 t2
= tci_read_ri32(&tb_ptr
);
639 tci_write_reg32(t0
, t1
% t2
);
641 #elif TCG_TARGET_HAS_div2_i32
642 case INDEX_op_div2_i32
:
643 case INDEX_op_divu2_i32
:
647 case INDEX_op_and_i32
:
649 t1
= tci_read_ri32(&tb_ptr
);
650 t2
= tci_read_ri32(&tb_ptr
);
651 tci_write_reg32(t0
, t1
& t2
);
653 case INDEX_op_or_i32
:
655 t1
= tci_read_ri32(&tb_ptr
);
656 t2
= tci_read_ri32(&tb_ptr
);
657 tci_write_reg32(t0
, t1
| t2
);
659 case INDEX_op_xor_i32
:
661 t1
= tci_read_ri32(&tb_ptr
);
662 t2
= tci_read_ri32(&tb_ptr
);
663 tci_write_reg32(t0
, t1
^ t2
);
666 /* Shift/rotate operations (32 bit). */
668 case INDEX_op_shl_i32
:
670 t1
= tci_read_ri32(&tb_ptr
);
671 t2
= tci_read_ri32(&tb_ptr
);
672 tci_write_reg32(t0
, t1
<< t2
);
674 case INDEX_op_shr_i32
:
676 t1
= tci_read_ri32(&tb_ptr
);
677 t2
= tci_read_ri32(&tb_ptr
);
678 tci_write_reg32(t0
, t1
>> t2
);
680 case INDEX_op_sar_i32
:
682 t1
= tci_read_ri32(&tb_ptr
);
683 t2
= tci_read_ri32(&tb_ptr
);
684 tci_write_reg32(t0
, ((int32_t)t1
>> t2
));
686 #if TCG_TARGET_HAS_rot_i32
687 case INDEX_op_rotl_i32
:
689 t1
= tci_read_ri32(&tb_ptr
);
690 t2
= tci_read_ri32(&tb_ptr
);
691 tci_write_reg32(t0
, (t1
<< t2
) | (t1
>> (32 - t2
)));
693 case INDEX_op_rotr_i32
:
695 t1
= tci_read_ri32(&tb_ptr
);
696 t2
= tci_read_ri32(&tb_ptr
);
697 tci_write_reg32(t0
, (t1
>> t2
) | (t1
<< (32 - t2
)));
700 case INDEX_op_brcond_i32
:
701 t0
= tci_read_r32(&tb_ptr
);
702 t1
= tci_read_ri32(&tb_ptr
);
703 condition
= *tb_ptr
++;
704 label
= tci_read_label(&tb_ptr
);
705 if (tci_compare32(t0
, t1
, condition
)) {
706 assert(tb_ptr
== old_code_ptr
+ op_size
);
707 tb_ptr
= (uint8_t *)label
;
711 #if TCG_TARGET_REG_BITS == 32
712 case INDEX_op_add2_i32
:
715 tmp64
= tci_read_r64(&tb_ptr
);
716 tmp64
+= tci_read_r64(&tb_ptr
);
717 tci_write_reg64(t1
, t0
, tmp64
);
719 case INDEX_op_sub2_i32
:
722 tmp64
= tci_read_r64(&tb_ptr
);
723 tmp64
-= tci_read_r64(&tb_ptr
);
724 tci_write_reg64(t1
, t0
, tmp64
);
726 case INDEX_op_brcond2_i32
:
727 tmp64
= tci_read_r64(&tb_ptr
);
728 v64
= tci_read_ri64(&tb_ptr
);
729 condition
= *tb_ptr
++;
730 label
= tci_read_label(&tb_ptr
);
731 if (tci_compare64(tmp64
, v64
, condition
)) {
732 assert(tb_ptr
== old_code_ptr
+ op_size
);
733 tb_ptr
= (uint8_t *)label
;
737 case INDEX_op_mulu2_i32
:
740 t2
= tci_read_r32(&tb_ptr
);
741 tmp64
= tci_read_r32(&tb_ptr
);
742 tci_write_reg64(t1
, t0
, t2
* tmp64
);
744 #endif /* TCG_TARGET_REG_BITS == 32 */
745 #if TCG_TARGET_HAS_ext8s_i32
746 case INDEX_op_ext8s_i32
:
748 t1
= tci_read_r8s(&tb_ptr
);
749 tci_write_reg32(t0
, t1
);
752 #if TCG_TARGET_HAS_ext16s_i32
753 case INDEX_op_ext16s_i32
:
755 t1
= tci_read_r16s(&tb_ptr
);
756 tci_write_reg32(t0
, t1
);
759 #if TCG_TARGET_HAS_ext8u_i32
760 case INDEX_op_ext8u_i32
:
762 t1
= tci_read_r8(&tb_ptr
);
763 tci_write_reg32(t0
, t1
);
766 #if TCG_TARGET_HAS_ext16u_i32
767 case INDEX_op_ext16u_i32
:
769 t1
= tci_read_r16(&tb_ptr
);
770 tci_write_reg32(t0
, t1
);
773 #if TCG_TARGET_HAS_bswap16_i32
774 case INDEX_op_bswap16_i32
:
776 t1
= tci_read_r16(&tb_ptr
);
777 tci_write_reg32(t0
, bswap16(t1
));
780 #if TCG_TARGET_HAS_bswap32_i32
781 case INDEX_op_bswap32_i32
:
783 t1
= tci_read_r32(&tb_ptr
);
784 tci_write_reg32(t0
, bswap32(t1
));
787 #if TCG_TARGET_HAS_not_i32
788 case INDEX_op_not_i32
:
790 t1
= tci_read_r32(&tb_ptr
);
791 tci_write_reg32(t0
, ~t1
);
794 #if TCG_TARGET_HAS_neg_i32
795 case INDEX_op_neg_i32
:
797 t1
= tci_read_r32(&tb_ptr
);
798 tci_write_reg32(t0
, -t1
);
801 #if TCG_TARGET_REG_BITS == 64
802 case INDEX_op_mov_i64
:
804 t1
= tci_read_r64(&tb_ptr
);
805 tci_write_reg64(t0
, t1
);
807 case INDEX_op_movi_i64
:
809 t1
= tci_read_i64(&tb_ptr
);
810 tci_write_reg64(t0
, t1
);
813 /* Load/store operations (64 bit). */
815 case INDEX_op_ld8u_i64
:
817 t1
= tci_read_r(&tb_ptr
);
818 t2
= tci_read_i32(&tb_ptr
);
819 tci_write_reg8(t0
, *(uint8_t *)(t1
+ t2
));
821 case INDEX_op_ld8s_i64
:
822 case INDEX_op_ld16u_i64
:
823 case INDEX_op_ld16s_i64
:
826 case INDEX_op_ld32u_i64
:
828 t1
= tci_read_r(&tb_ptr
);
829 t2
= tci_read_i32(&tb_ptr
);
830 tci_write_reg32(t0
, *(uint32_t *)(t1
+ t2
));
832 case INDEX_op_ld32s_i64
:
834 t1
= tci_read_r(&tb_ptr
);
835 t2
= tci_read_i32(&tb_ptr
);
836 tci_write_reg32s(t0
, *(int32_t *)(t1
+ t2
));
838 case INDEX_op_ld_i64
:
840 t1
= tci_read_r(&tb_ptr
);
841 t2
= tci_read_i32(&tb_ptr
);
842 tci_write_reg64(t0
, *(uint64_t *)(t1
+ t2
));
844 case INDEX_op_st8_i64
:
845 t0
= tci_read_r8(&tb_ptr
);
846 t1
= tci_read_r(&tb_ptr
);
847 t2
= tci_read_i32(&tb_ptr
);
848 *(uint8_t *)(t1
+ t2
) = t0
;
850 case INDEX_op_st16_i64
:
851 t0
= tci_read_r16(&tb_ptr
);
852 t1
= tci_read_r(&tb_ptr
);
853 t2
= tci_read_i32(&tb_ptr
);
854 *(uint16_t *)(t1
+ t2
) = t0
;
856 case INDEX_op_st32_i64
:
857 t0
= tci_read_r32(&tb_ptr
);
858 t1
= tci_read_r(&tb_ptr
);
859 t2
= tci_read_i32(&tb_ptr
);
860 *(uint32_t *)(t1
+ t2
) = t0
;
862 case INDEX_op_st_i64
:
863 t0
= tci_read_r64(&tb_ptr
);
864 t1
= tci_read_r(&tb_ptr
);
865 t2
= tci_read_i32(&tb_ptr
);
866 *(uint64_t *)(t1
+ t2
) = t0
;
869 /* Arithmetic operations (64 bit). */
871 case INDEX_op_add_i64
:
873 t1
= tci_read_ri64(&tb_ptr
);
874 t2
= tci_read_ri64(&tb_ptr
);
875 tci_write_reg64(t0
, t1
+ t2
);
877 case INDEX_op_sub_i64
:
879 t1
= tci_read_ri64(&tb_ptr
);
880 t2
= tci_read_ri64(&tb_ptr
);
881 tci_write_reg64(t0
, t1
- t2
);
883 case INDEX_op_mul_i64
:
885 t1
= tci_read_ri64(&tb_ptr
);
886 t2
= tci_read_ri64(&tb_ptr
);
887 tci_write_reg64(t0
, t1
* t2
);
889 #if TCG_TARGET_HAS_div_i64
890 case INDEX_op_div_i64
:
891 case INDEX_op_divu_i64
:
892 case INDEX_op_rem_i64
:
893 case INDEX_op_remu_i64
:
896 #elif TCG_TARGET_HAS_div2_i64
897 case INDEX_op_div2_i64
:
898 case INDEX_op_divu2_i64
:
902 case INDEX_op_and_i64
:
904 t1
= tci_read_ri64(&tb_ptr
);
905 t2
= tci_read_ri64(&tb_ptr
);
906 tci_write_reg64(t0
, t1
& t2
);
908 case INDEX_op_or_i64
:
910 t1
= tci_read_ri64(&tb_ptr
);
911 t2
= tci_read_ri64(&tb_ptr
);
912 tci_write_reg64(t0
, t1
| t2
);
914 case INDEX_op_xor_i64
:
916 t1
= tci_read_ri64(&tb_ptr
);
917 t2
= tci_read_ri64(&tb_ptr
);
918 tci_write_reg64(t0
, t1
^ t2
);
921 /* Shift/rotate operations (64 bit). */
923 case INDEX_op_shl_i64
:
925 t1
= tci_read_ri64(&tb_ptr
);
926 t2
= tci_read_ri64(&tb_ptr
);
927 tci_write_reg64(t0
, t1
<< t2
);
929 case INDEX_op_shr_i64
:
931 t1
= tci_read_ri64(&tb_ptr
);
932 t2
= tci_read_ri64(&tb_ptr
);
933 tci_write_reg64(t0
, t1
>> t2
);
935 case INDEX_op_sar_i64
:
937 t1
= tci_read_ri64(&tb_ptr
);
938 t2
= tci_read_ri64(&tb_ptr
);
939 tci_write_reg64(t0
, ((int64_t)t1
>> t2
));
941 #if TCG_TARGET_HAS_rot_i64
942 case INDEX_op_rotl_i64
:
943 case INDEX_op_rotr_i64
:
947 case INDEX_op_brcond_i64
:
948 t0
= tci_read_r64(&tb_ptr
);
949 t1
= tci_read_ri64(&tb_ptr
);
950 condition
= *tb_ptr
++;
951 label
= tci_read_label(&tb_ptr
);
952 if (tci_compare64(t0
, t1
, condition
)) {
953 assert(tb_ptr
== old_code_ptr
+ op_size
);
954 tb_ptr
= (uint8_t *)label
;
958 #if TCG_TARGET_HAS_ext8u_i64
959 case INDEX_op_ext8u_i64
:
961 t1
= tci_read_r8(&tb_ptr
);
962 tci_write_reg64(t0
, t1
);
965 #if TCG_TARGET_HAS_ext8s_i64
966 case INDEX_op_ext8s_i64
:
968 t1
= tci_read_r8s(&tb_ptr
);
969 tci_write_reg64(t0
, t1
);
972 #if TCG_TARGET_HAS_ext16s_i64
973 case INDEX_op_ext16s_i64
:
975 t1
= tci_read_r16s(&tb_ptr
);
976 tci_write_reg64(t0
, t1
);
979 #if TCG_TARGET_HAS_ext16u_i64
980 case INDEX_op_ext16u_i64
:
982 t1
= tci_read_r16(&tb_ptr
);
983 tci_write_reg64(t0
, t1
);
986 #if TCG_TARGET_HAS_ext32s_i64
987 case INDEX_op_ext32s_i64
:
989 t1
= tci_read_r32s(&tb_ptr
);
990 tci_write_reg64(t0
, t1
);
993 #if TCG_TARGET_HAS_ext32u_i64
994 case INDEX_op_ext32u_i64
:
996 t1
= tci_read_r32(&tb_ptr
);
997 tci_write_reg64(t0
, t1
);
1000 #if TCG_TARGET_HAS_bswap16_i64
1001 case INDEX_op_bswap16_i64
:
1004 t1
= tci_read_r16(&tb_ptr
);
1005 tci_write_reg64(t0
, bswap16(t1
));
1008 #if TCG_TARGET_HAS_bswap32_i64
1009 case INDEX_op_bswap32_i64
:
1011 t1
= tci_read_r32(&tb_ptr
);
1012 tci_write_reg64(t0
, bswap32(t1
));
1015 #if TCG_TARGET_HAS_bswap64_i64
1016 case INDEX_op_bswap64_i64
:
1019 t1
= tci_read_r64(&tb_ptr
);
1020 tci_write_reg64(t0
, bswap64(t1
));
1023 #if TCG_TARGET_HAS_not_i64
1024 case INDEX_op_not_i64
:
1026 t1
= tci_read_r64(&tb_ptr
);
1027 tci_write_reg64(t0
, ~t1
);
1030 #if TCG_TARGET_HAS_neg_i64
1031 case INDEX_op_neg_i64
:
1033 t1
= tci_read_r64(&tb_ptr
);
1034 tci_write_reg64(t0
, -t1
);
1037 #endif /* TCG_TARGET_REG_BITS == 64 */
1039 /* QEMU specific operations. */
1041 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1042 case INDEX_op_debug_insn_start
:
1046 case INDEX_op_debug_insn_start
:
1050 case INDEX_op_exit_tb
:
1051 next_tb
= *(uint64_t *)tb_ptr
;
1054 case INDEX_op_goto_tb
:
1055 t0
= tci_read_i32(&tb_ptr
);
1056 assert(tb_ptr
== old_code_ptr
+ op_size
);
1057 tb_ptr
+= (int32_t)t0
;
1059 case INDEX_op_qemu_ld8u
:
1061 taddr
= tci_read_ulong(&tb_ptr
);
1062 #ifdef CONFIG_SOFTMMU
1063 tmp8
= helper_ldb_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1065 host_addr
= (tcg_target_ulong
)taddr
;
1066 assert(taddr
== host_addr
);
1067 tmp8
= *(uint8_t *)(host_addr
+ GUEST_BASE
);
1069 tci_write_reg8(t0
, tmp8
);
1071 case INDEX_op_qemu_ld8s
:
1073 taddr
= tci_read_ulong(&tb_ptr
);
1074 #ifdef CONFIG_SOFTMMU
1075 tmp8
= helper_ldb_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1077 host_addr
= (tcg_target_ulong
)taddr
;
1078 assert(taddr
== host_addr
);
1079 tmp8
= *(uint8_t *)(host_addr
+ GUEST_BASE
);
1081 tci_write_reg8s(t0
, tmp8
);
1083 case INDEX_op_qemu_ld16u
:
1085 taddr
= tci_read_ulong(&tb_ptr
);
1086 #ifdef CONFIG_SOFTMMU
1087 tmp16
= helper_ldw_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1089 host_addr
= (tcg_target_ulong
)taddr
;
1090 assert(taddr
== host_addr
);
1091 tmp16
= tswap16(*(uint16_t *)(host_addr
+ GUEST_BASE
));
1093 tci_write_reg16(t0
, tmp16
);
1095 case INDEX_op_qemu_ld16s
:
1097 taddr
= tci_read_ulong(&tb_ptr
);
1098 #ifdef CONFIG_SOFTMMU
1099 tmp16
= helper_ldw_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1101 host_addr
= (tcg_target_ulong
)taddr
;
1102 assert(taddr
== host_addr
);
1103 tmp16
= tswap16(*(uint16_t *)(host_addr
+ GUEST_BASE
));
1105 tci_write_reg16s(t0
, tmp16
);
1107 #if TCG_TARGET_REG_BITS == 64
1108 case INDEX_op_qemu_ld32u
:
1110 taddr
= tci_read_ulong(&tb_ptr
);
1111 #ifdef CONFIG_SOFTMMU
1112 tmp32
= helper_ldl_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1114 host_addr
= (tcg_target_ulong
)taddr
;
1115 assert(taddr
== host_addr
);
1116 tmp32
= tswap32(*(uint32_t *)(host_addr
+ GUEST_BASE
));
1118 tci_write_reg32(t0
, tmp32
);
1120 case INDEX_op_qemu_ld32s
:
1122 taddr
= tci_read_ulong(&tb_ptr
);
1123 #ifdef CONFIG_SOFTMMU
1124 tmp32
= helper_ldl_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1126 host_addr
= (tcg_target_ulong
)taddr
;
1127 assert(taddr
== host_addr
);
1128 tmp32
= tswap32(*(uint32_t *)(host_addr
+ GUEST_BASE
));
1130 tci_write_reg32s(t0
, tmp32
);
1132 #endif /* TCG_TARGET_REG_BITS == 64 */
1133 case INDEX_op_qemu_ld32
:
1135 taddr
= tci_read_ulong(&tb_ptr
);
1136 #ifdef CONFIG_SOFTMMU
1137 tmp32
= helper_ldl_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1139 host_addr
= (tcg_target_ulong
)taddr
;
1140 assert(taddr
== host_addr
);
1141 tmp32
= tswap32(*(uint32_t *)(host_addr
+ GUEST_BASE
));
1143 tci_write_reg32(t0
, tmp32
);
1145 case INDEX_op_qemu_ld64
:
1147 #if TCG_TARGET_REG_BITS == 32
1150 taddr
= tci_read_ulong(&tb_ptr
);
1151 #ifdef CONFIG_SOFTMMU
1152 tmp64
= helper_ldq_mmu(env
, taddr
, tci_read_i(&tb_ptr
));
1154 host_addr
= (tcg_target_ulong
)taddr
;
1155 assert(taddr
== host_addr
);
1156 tmp64
= tswap64(*(uint64_t *)(host_addr
+ GUEST_BASE
));
1158 tci_write_reg(t0
, tmp64
);
1159 #if TCG_TARGET_REG_BITS == 32
1160 tci_write_reg(t1
, tmp64
>> 32);
1163 case INDEX_op_qemu_st8
:
1164 t0
= tci_read_r8(&tb_ptr
);
1165 taddr
= tci_read_ulong(&tb_ptr
);
1166 #ifdef CONFIG_SOFTMMU
1167 t2
= tci_read_i(&tb_ptr
);
1168 helper_stb_mmu(env
, taddr
, t0
, t2
);
1170 host_addr
= (tcg_target_ulong
)taddr
;
1171 assert(taddr
== host_addr
);
1172 *(uint8_t *)(host_addr
+ GUEST_BASE
) = t0
;
1175 case INDEX_op_qemu_st16
:
1176 t0
= tci_read_r16(&tb_ptr
);
1177 taddr
= tci_read_ulong(&tb_ptr
);
1178 #ifdef CONFIG_SOFTMMU
1179 t2
= tci_read_i(&tb_ptr
);
1180 helper_stw_mmu(env
, taddr
, t0
, t2
);
1182 host_addr
= (tcg_target_ulong
)taddr
;
1183 assert(taddr
== host_addr
);
1184 *(uint16_t *)(host_addr
+ GUEST_BASE
) = tswap16(t0
);
1187 case INDEX_op_qemu_st32
:
1188 t0
= tci_read_r32(&tb_ptr
);
1189 taddr
= tci_read_ulong(&tb_ptr
);
1190 #ifdef CONFIG_SOFTMMU
1191 t2
= tci_read_i(&tb_ptr
);
1192 helper_stl_mmu(env
, taddr
, t0
, t2
);
1194 host_addr
= (tcg_target_ulong
)taddr
;
1195 assert(taddr
== host_addr
);
1196 *(uint32_t *)(host_addr
+ GUEST_BASE
) = tswap32(t0
);
1199 case INDEX_op_qemu_st64
:
1200 tmp64
= tci_read_r64(&tb_ptr
);
1201 taddr
= tci_read_ulong(&tb_ptr
);
1202 #ifdef CONFIG_SOFTMMU
1203 t2
= tci_read_i(&tb_ptr
);
1204 helper_stq_mmu(env
, taddr
, tmp64
, t2
);
1206 host_addr
= (tcg_target_ulong
)taddr
;
1207 assert(taddr
== host_addr
);
1208 *(uint64_t *)(host_addr
+ GUEST_BASE
) = tswap64(tmp64
);
1215 assert(tb_ptr
== old_code_ptr
+ op_size
);