2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
70 /* check if we really need so many registers :P */
71 static const int tcg_target_reg_alloc_order
[] = {
97 static const int tcg_target_call_iarg_regs
[4] = {
104 static const int tcg_target_call_oarg_regs
[2] = {
109 static uint8_t *tb_ret_addr
;
111 static inline uint32_t reloc_lo16_val (void *pc
, tcg_target_long target
)
113 return target
& 0xffff;
116 static inline void reloc_lo16 (void *pc
, tcg_target_long target
)
118 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
119 | reloc_lo16_val(pc
, target
);
122 static inline uint32_t reloc_hi16_val (void *pc
, tcg_target_long target
)
124 return (target
>> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc
, tcg_target_long target
)
129 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
130 | reloc_hi16_val(pc
, target
);
133 static inline uint32_t reloc_pc16_val (void *pc
, tcg_target_long target
)
137 disp
= target
- (tcg_target_long
) pc
- 4;
138 if (disp
!= (disp
<< 14) >> 14) {
142 return (disp
>> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc
, tcg_target_long target
)
147 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
148 | reloc_pc16_val(pc
, target
);
151 static inline uint32_t reloc_26_val (void *pc
, tcg_target_long target
)
153 if ((((tcg_target_long
)pc
+ 4) & 0xf0000000) != (target
& 0xf0000000)) {
157 return (target
>> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc
, tcg_target_long target
)
162 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3ffffff)
163 | reloc_26_val(pc
, target
);
166 static void patch_reloc(uint8_t *code_ptr
, int type
,
167 tcg_target_long value
, tcg_target_long addend
)
172 reloc_lo16(code_ptr
, value
);
175 reloc_hi16(code_ptr
, value
);
178 reloc_pc16(code_ptr
, value
);
181 reloc_pc26(code_ptr
, value
);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
202 ct
->ct
|= TCG_CT_REG
;
203 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
206 ct
->ct
|= TCG_CT_REG
;
207 tcg_regset_clear(ct
->u
.regs
);
208 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_T9
);
210 case 'L': /* qemu_ld output arg constraint */
211 ct
->ct
|= TCG_CT_REG
;
212 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
213 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
215 case 'l': /* qemu_ld input arg constraint */
216 ct
->ct
|= TCG_CT_REG
;
217 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
220 # if (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
225 case 'S': /* qemu_st constraint */
226 ct
->ct
|= TCG_CT_REG
;
227 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
228 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
229 #if defined(CONFIG_SOFTMMU)
230 # if (TARGET_LONG_BITS == 32)
231 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
233 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
234 # if TARGET_LONG_BITS == 64
235 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
240 ct
->ct
|= TCG_CT_CONST_U16
;
243 ct
->ct
|= TCG_CT_CONST_S16
;
246 /* We are cheating a bit here, using the fact that the register
247 ZERO is also the register number 0. Hence there is no need
248 to check for const_args in each instruction. */
249 ct
->ct
|= TCG_CT_CONST_ZERO
;
259 /* test if a constant matches the constraint */
260 static inline int tcg_target_const_match(tcg_target_long val
,
261 const TCGArgConstraint
*arg_ct
)
265 if (ct
& TCG_CT_CONST
)
267 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
269 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
271 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
277 /* instruction opcodes */
279 OPC_BEQ
= 0x04 << 26,
280 OPC_BNE
= 0x05 << 26,
281 OPC_ADDIU
= 0x09 << 26,
282 OPC_SLTI
= 0x0A << 26,
283 OPC_SLTIU
= 0x0B << 26,
284 OPC_ANDI
= 0x0C << 26,
285 OPC_ORI
= 0x0D << 26,
286 OPC_XORI
= 0x0E << 26,
287 OPC_LUI
= 0x0F << 26,
291 OPC_LBU
= 0x24 << 26,
292 OPC_LHU
= 0x25 << 26,
293 OPC_LWU
= 0x27 << 26,
298 OPC_SPECIAL
= 0x00 << 26,
299 OPC_SLL
= OPC_SPECIAL
| 0x00,
300 OPC_SRL
= OPC_SPECIAL
| 0x02,
301 OPC_SRA
= OPC_SPECIAL
| 0x03,
302 OPC_SLLV
= OPC_SPECIAL
| 0x04,
303 OPC_SRLV
= OPC_SPECIAL
| 0x06,
304 OPC_SRAV
= OPC_SPECIAL
| 0x07,
305 OPC_JR
= OPC_SPECIAL
| 0x08,
306 OPC_JALR
= OPC_SPECIAL
| 0x09,
307 OPC_MFHI
= OPC_SPECIAL
| 0x10,
308 OPC_MFLO
= OPC_SPECIAL
| 0x12,
309 OPC_MULT
= OPC_SPECIAL
| 0x18,
310 OPC_MULTU
= OPC_SPECIAL
| 0x19,
311 OPC_DIV
= OPC_SPECIAL
| 0x1A,
312 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
313 OPC_ADDU
= OPC_SPECIAL
| 0x21,
314 OPC_SUBU
= OPC_SPECIAL
| 0x23,
315 OPC_AND
= OPC_SPECIAL
| 0x24,
316 OPC_OR
= OPC_SPECIAL
| 0x25,
317 OPC_XOR
= OPC_SPECIAL
| 0x26,
318 OPC_NOR
= OPC_SPECIAL
| 0x27,
319 OPC_SLT
= OPC_SPECIAL
| 0x2A,
320 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
322 OPC_SPECIAL3
= 0x1f << 26,
323 OPC_SEB
= OPC_SPECIAL3
| 0x420,
324 OPC_SEH
= OPC_SPECIAL3
| 0x620,
330 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
, int rd
, int rs
, int rt
)
335 inst
|= (rs
& 0x1F) << 21;
336 inst
|= (rt
& 0x1F) << 16;
337 inst
|= (rd
& 0x1F) << 11;
344 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
, int rt
, int rs
, int imm
)
349 inst
|= (rs
& 0x1F) << 21;
350 inst
|= (rt
& 0x1F) << 16;
351 inst
|= (imm
& 0xffff);
358 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
, int rt
, int rs
)
360 /* We pay attention here to not modify the branch target by reading
361 the existing value and using it again. This ensure that caches and
362 memory are kept coherent during retranslation. */
363 uint16_t offset
= (uint16_t)(*(uint32_t *) s
->code_ptr
);
365 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
371 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
, int rd
, int rt
, int sa
)
376 inst
|= (rt
& 0x1F) << 16;
377 inst
|= (rd
& 0x1F) << 11;
378 inst
|= (sa
& 0x1F) << 6;
383 static inline void tcg_out_nop(TCGContext
*s
)
388 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
389 TCGReg ret
, TCGReg arg
)
391 /* Simple reg-reg move, optimising out the 'do nothing' case */
393 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
397 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
398 TCGReg reg
, tcg_target_long arg
)
400 if (arg
== (int16_t)arg
) {
401 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
402 } else if (arg
== (uint16_t)arg
) {
403 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
405 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
406 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
410 static inline void tcg_out_bswap16(TCGContext
*s
, int ret
, int arg
)
412 /* ret and arg can't be register at */
413 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
417 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
418 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0x00ff);
420 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
421 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
422 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
425 static inline void tcg_out_bswap16s(TCGContext
*s
, int ret
, int arg
)
427 /* ret and arg can't be register at */
428 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
432 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
433 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff);
435 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
436 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
437 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
440 static inline void tcg_out_bswap32(TCGContext
*s
, int ret
, int arg
)
442 /* ret and arg must be different and can't be register at */
443 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
447 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
449 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
450 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
452 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
453 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
454 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
456 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
457 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
458 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
461 static inline void tcg_out_ext8s(TCGContext
*s
, int ret
, int arg
)
463 #ifdef _MIPS_ARCH_MIPS32R2
464 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
466 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
467 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
471 static inline void tcg_out_ext16s(TCGContext
*s
, int ret
, int arg
)
473 #ifdef _MIPS_ARCH_MIPS32R2
474 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
476 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
477 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
481 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, int arg
,
482 int arg1
, tcg_target_long arg2
)
484 if (arg2
== (int16_t) arg2
) {
485 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
487 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
488 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
489 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
493 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
494 TCGReg arg1
, tcg_target_long arg2
)
496 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
499 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
500 TCGReg arg1
, tcg_target_long arg2
)
502 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
505 static inline void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
507 if (val
== (int16_t)val
) {
508 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
510 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
511 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
515 /* Helper routines for marshalling helper function arguments into
516 * the correct registers and stack.
517 * arg_num is where we want to put this argument, and is updated to be ready
518 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
519 * real registers, 4+ on stack.
521 * We provide routines for arguments which are: immediate, 32 bit
522 * value in register, 16 and 8 bit values in register (which must be zero
523 * extended before use) and 64 bit value in a lo:hi register pair.
525 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
526 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
528 if (*arg_num < 4) { \
529 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
531 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
532 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
536 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
537 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
538 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
539 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
540 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
541 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
542 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
543 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
544 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
545 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
546 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, uint32_t arg
)
547 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
549 /* We don't use the macro for this one to avoid an unnecessary reg-reg
550 move when storing to the stack. */
551 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
555 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
557 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
562 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
563 TCGReg arg_low
, TCGReg arg_high
)
565 (*arg_num
) = (*arg_num
+ 1) & ~1;
567 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
568 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
569 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
571 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
572 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
576 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, int arg1
,
577 int arg2
, int label_index
)
579 TCGLabel
*l
= &s
->labels
[label_index
];
583 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
586 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
589 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
590 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
593 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
594 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
597 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
598 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
601 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
602 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
605 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
606 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
609 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
610 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
613 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
614 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
617 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
618 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
625 reloc_pc16(s
->code_ptr
- 4, l
->u
.value
);
627 tcg_out_reloc(s
, s
->code_ptr
- 4, R_MIPS_PC16
, label_index
, 0);
632 /* XXX: we implement it at the target level to avoid having to
633 handle cross basic blocks temporaries */
634 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, int arg1
,
635 int arg2
, int arg3
, int arg4
, int label_index
)
641 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
642 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
648 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
652 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
656 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
660 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
666 label_ptr
= s
->code_ptr
;
667 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
672 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
676 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
680 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
684 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
688 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
694 reloc_pc16(label_ptr
, (tcg_target_long
) s
->code_ptr
);
697 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, int ret
,
703 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
704 } else if (arg2
== 0) {
705 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
707 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
708 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
713 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
714 } else if (arg2
== 0) {
715 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
717 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
718 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
722 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
725 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
728 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
729 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
732 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
733 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
736 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
737 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
740 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
741 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
744 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
747 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
755 /* XXX: we implement it at the target level to avoid having to
756 handle cross basic blocks temporaries */
757 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, int ret
,
758 int arg1
, int arg2
, int arg3
, int arg4
)
762 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
763 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
764 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
767 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
768 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
769 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
773 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
777 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
781 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
785 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
792 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
797 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
801 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
805 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
809 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
815 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
816 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
819 #if defined(CONFIG_SOFTMMU)
821 #include "../../softmmu_defs.h"
823 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
825 static const void * const qemu_ld_helpers
[4] = {
832 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
833 uintxx_t val, int mmu_idx) */
834 static const void * const qemu_st_helpers
[4] = {
842 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
845 int addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
846 #if defined(CONFIG_SOFTMMU)
847 void *label1_ptr
, *label2_ptr
;
849 int mem_index
, s_bits
;
851 # if TARGET_LONG_BITS == 64
853 int addr_regh
, addr_memh
;
862 #if defined(CONFIG_SOFTMMU)
863 # if TARGET_LONG_BITS == 64
865 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
880 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
881 data_reg1
= data_regh
;
882 data_reg2
= data_regl
;
884 data_reg1
= data_regl
;
885 data_reg2
= data_regh
;
888 data_reg1
= data_regl
;
891 #if defined(CONFIG_SOFTMMU)
892 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
893 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
894 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
895 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
896 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
897 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
898 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
900 # if TARGET_LONG_BITS == 64
901 label3_ptr
= s
->code_ptr
;
902 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
905 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
906 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
908 label1_ptr
= s
->code_ptr
;
909 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
912 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
914 label1_ptr
= s
->code_ptr
;
915 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
921 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
922 # if TARGET_LONG_BITS == 64
923 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
925 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
927 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
928 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
929 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
934 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
937 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
940 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
943 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
946 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
949 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
950 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
956 label2_ptr
= s
->code_ptr
;
957 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
960 /* label1: fast path */
961 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
963 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
964 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
965 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
967 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
968 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
970 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
971 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
977 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
980 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
983 if (TCG_NEED_BSWAP
) {
984 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
985 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
987 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
991 if (TCG_NEED_BSWAP
) {
992 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
993 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
995 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
999 if (TCG_NEED_BSWAP
) {
1000 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1001 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1003 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1007 if (TCG_NEED_BSWAP
) {
1008 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1009 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1010 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1011 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1013 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1014 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1021 #if defined(CONFIG_SOFTMMU)
1022 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1026 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1029 int addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
1030 #if defined(CONFIG_SOFTMMU)
1031 uint8_t *label1_ptr
, *label2_ptr
;
1033 int mem_index
, s_bits
;
1036 #if TARGET_LONG_BITS == 64
1037 # if defined(CONFIG_SOFTMMU)
1038 uint8_t *label3_ptr
;
1039 int addr_regh
, addr_memh
;
1042 data_regl
= *args
++;
1044 data_regh
= *args
++;
1048 addr_regl
= *args
++;
1049 #if defined(CONFIG_SOFTMMU)
1050 # if TARGET_LONG_BITS == 64
1051 addr_regh
= *args
++;
1052 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1067 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1068 data_reg1
= data_regh
;
1069 data_reg2
= data_regl
;
1071 data_reg1
= data_regl
;
1072 data_reg2
= data_regh
;
1075 data_reg1
= data_regl
;
1079 #if defined(CONFIG_SOFTMMU)
1080 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1081 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1082 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1083 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1084 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1085 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1086 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1088 # if TARGET_LONG_BITS == 64
1089 label3_ptr
= s
->code_ptr
;
1090 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1093 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1094 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1096 label1_ptr
= s
->code_ptr
;
1097 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1100 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1102 label1_ptr
= s
->code_ptr
;
1103 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1109 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1110 # if TARGET_LONG_BITS == 64
1111 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1113 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1117 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1120 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1123 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1126 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1131 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1132 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1133 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1136 label2_ptr
= s
->code_ptr
;
1137 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1140 /* label1: fast path */
1141 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1143 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1144 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1145 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1147 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1148 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1150 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1151 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1158 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1161 if (TCG_NEED_BSWAP
) {
1162 tcg_out_bswap16(s
, TCG_REG_T0
, data_reg1
);
1163 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1165 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1169 if (TCG_NEED_BSWAP
) {
1170 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1171 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1173 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1177 if (TCG_NEED_BSWAP
) {
1178 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1179 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1180 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1181 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1183 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1184 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1191 #if defined(CONFIG_SOFTMMU)
1192 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1196 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1197 const TCGArg
*args
, const int *const_args
)
1200 case INDEX_op_exit_tb
:
1201 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1202 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, (tcg_target_long
)tb_ret_addr
);
1203 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1206 case INDEX_op_goto_tb
:
1207 if (s
->tb_jmp_offset
) {
1208 /* direct jump method */
1211 /* indirect jump method */
1212 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1213 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1214 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1217 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1220 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, args
[0], 0);
1224 tcg_out_opc_reg(s
, OPC_JR
, 0, args
[0], 0);
1228 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1231 case INDEX_op_mov_i32
:
1232 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1234 case INDEX_op_movi_i32
:
1235 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1238 case INDEX_op_ld8u_i32
:
1239 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1241 case INDEX_op_ld8s_i32
:
1242 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1244 case INDEX_op_ld16u_i32
:
1245 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1247 case INDEX_op_ld16s_i32
:
1248 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1250 case INDEX_op_ld_i32
:
1251 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1253 case INDEX_op_st8_i32
:
1254 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1256 case INDEX_op_st16_i32
:
1257 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1259 case INDEX_op_st_i32
:
1260 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1263 case INDEX_op_add_i32
:
1264 if (const_args
[2]) {
1265 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1267 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1270 case INDEX_op_add2_i32
:
1271 if (const_args
[4]) {
1272 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1274 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1276 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1277 if (const_args
[5]) {
1278 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1280 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1282 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1283 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1285 case INDEX_op_sub_i32
:
1286 if (const_args
[2]) {
1287 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1289 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1292 case INDEX_op_sub2_i32
:
1293 if (const_args
[4]) {
1294 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1296 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1298 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1299 if (const_args
[5]) {
1300 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1302 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1304 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1305 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1307 case INDEX_op_mul_i32
:
1308 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1309 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1311 case INDEX_op_mulu2_i32
:
1312 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1313 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1314 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1316 case INDEX_op_div_i32
:
1317 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1318 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1320 case INDEX_op_divu_i32
:
1321 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1322 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1324 case INDEX_op_rem_i32
:
1325 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1326 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1328 case INDEX_op_remu_i32
:
1329 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1330 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1333 case INDEX_op_and_i32
:
1334 if (const_args
[2]) {
1335 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1337 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1340 case INDEX_op_or_i32
:
1341 if (const_args
[2]) {
1342 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1344 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1347 case INDEX_op_nor_i32
:
1348 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1350 case INDEX_op_not_i32
:
1351 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1353 case INDEX_op_xor_i32
:
1354 if (const_args
[2]) {
1355 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1357 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1361 case INDEX_op_sar_i32
:
1362 if (const_args
[2]) {
1363 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1365 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1368 case INDEX_op_shl_i32
:
1369 if (const_args
[2]) {
1370 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1372 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1375 case INDEX_op_shr_i32
:
1376 if (const_args
[2]) {
1377 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1379 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1383 case INDEX_op_ext8s_i32
:
1384 tcg_out_ext8s(s
, args
[0], args
[1]);
1386 case INDEX_op_ext16s_i32
:
1387 tcg_out_ext16s(s
, args
[0], args
[1]);
1390 case INDEX_op_brcond_i32
:
1391 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1393 case INDEX_op_brcond2_i32
:
1394 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1397 case INDEX_op_setcond_i32
:
1398 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1400 case INDEX_op_setcond2_i32
:
1401 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1404 case INDEX_op_qemu_ld8u
:
1405 tcg_out_qemu_ld(s
, args
, 0);
1407 case INDEX_op_qemu_ld8s
:
1408 tcg_out_qemu_ld(s
, args
, 0 | 4);
1410 case INDEX_op_qemu_ld16u
:
1411 tcg_out_qemu_ld(s
, args
, 1);
1413 case INDEX_op_qemu_ld16s
:
1414 tcg_out_qemu_ld(s
, args
, 1 | 4);
1416 case INDEX_op_qemu_ld32
:
1417 tcg_out_qemu_ld(s
, args
, 2);
1419 case INDEX_op_qemu_ld64
:
1420 tcg_out_qemu_ld(s
, args
, 3);
1422 case INDEX_op_qemu_st8
:
1423 tcg_out_qemu_st(s
, args
, 0);
1425 case INDEX_op_qemu_st16
:
1426 tcg_out_qemu_st(s
, args
, 1);
1428 case INDEX_op_qemu_st32
:
1429 tcg_out_qemu_st(s
, args
, 2);
1431 case INDEX_op_qemu_st64
:
1432 tcg_out_qemu_st(s
, args
, 3);
1440 static const TCGTargetOpDef mips_op_defs
[] = {
1441 { INDEX_op_exit_tb
, { } },
1442 { INDEX_op_goto_tb
, { } },
1443 { INDEX_op_call
, { "C" } },
1444 { INDEX_op_jmp
, { "r" } },
1445 { INDEX_op_br
, { } },
1447 { INDEX_op_mov_i32
, { "r", "r" } },
1448 { INDEX_op_movi_i32
, { "r" } },
1449 { INDEX_op_ld8u_i32
, { "r", "r" } },
1450 { INDEX_op_ld8s_i32
, { "r", "r" } },
1451 { INDEX_op_ld16u_i32
, { "r", "r" } },
1452 { INDEX_op_ld16s_i32
, { "r", "r" } },
1453 { INDEX_op_ld_i32
, { "r", "r" } },
1454 { INDEX_op_st8_i32
, { "rZ", "r" } },
1455 { INDEX_op_st16_i32
, { "rZ", "r" } },
1456 { INDEX_op_st_i32
, { "rZ", "r" } },
1458 { INDEX_op_add_i32
, { "r", "rZ", "rJ" } },
1459 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1460 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1461 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1462 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1463 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1464 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1465 { INDEX_op_sub_i32
, { "r", "rZ", "rJ" } },
1467 { INDEX_op_and_i32
, { "r", "rZ", "rI" } },
1468 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1469 { INDEX_op_not_i32
, { "r", "rZ" } },
1470 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1471 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1473 { INDEX_op_shl_i32
, { "r", "rZ", "ri" } },
1474 { INDEX_op_shr_i32
, { "r", "rZ", "ri" } },
1475 { INDEX_op_sar_i32
, { "r", "rZ", "ri" } },
1477 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1478 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1480 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1481 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1482 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1484 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1485 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1486 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1488 #if TARGET_LONG_BITS == 32
1489 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1490 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1491 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1492 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1493 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1494 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1496 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1497 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1498 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1499 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1501 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1502 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1503 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1504 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1505 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1506 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1508 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1509 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1510 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1511 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1516 static int tcg_target_callee_save_regs
[] = {
1517 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1527 TCG_REG_RA
, /* should be last for ABI compliance */
1530 /* Generate global QEMU prologue and epilogue code */
1531 static void tcg_target_qemu_prologue(TCGContext
*s
)
1535 /* reserve some stack space */
1536 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1537 + TCG_STATIC_CALL_ARGS_SIZE
;
1538 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1539 ~(TCG_TARGET_STACK_ALIGN
- 1);
1542 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1543 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1544 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1545 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1548 /* Call generated code */
1549 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1550 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1551 tb_ret_addr
= s
->code_ptr
;
1554 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1555 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1556 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1559 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1560 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1563 static void tcg_target_init(TCGContext
*s
)
1565 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1566 tcg_regset_set(tcg_target_call_clobber_regs
,
1583 tcg_regset_clear(s
->reserved_regs
);
1584 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1585 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1586 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1587 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1588 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1589 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1590 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1592 tcg_add_target_add_op_defs(mips_op_defs
);
1593 tcg_set_frame(s
, TCG_AREG0
, offsetof(CPUArchState
, temp_buf
),
1594 CPU_TEMP_BUF_NLONGS
* sizeof(long));