2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order
[] = {
97 static const TCGReg tcg_target_call_iarg_regs
[4] = {
104 static const TCGReg tcg_target_call_oarg_regs
[2] = {
109 static uint8_t *tb_ret_addr
;
111 static inline uint32_t reloc_lo16_val (void *pc
, tcg_target_long target
)
113 return target
& 0xffff;
116 static inline void reloc_lo16 (void *pc
, tcg_target_long target
)
118 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
119 | reloc_lo16_val(pc
, target
);
122 static inline uint32_t reloc_hi16_val (void *pc
, tcg_target_long target
)
124 return (target
>> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc
, tcg_target_long target
)
129 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
130 | reloc_hi16_val(pc
, target
);
133 static inline uint32_t reloc_pc16_val (void *pc
, tcg_target_long target
)
137 disp
= target
- (tcg_target_long
) pc
- 4;
138 if (disp
!= (disp
<< 14) >> 14) {
142 return (disp
>> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc
, tcg_target_long target
)
147 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
148 | reloc_pc16_val(pc
, target
);
151 static inline uint32_t reloc_26_val (void *pc
, tcg_target_long target
)
153 if ((((tcg_target_long
)pc
+ 4) & 0xf0000000) != (target
& 0xf0000000)) {
157 return (target
>> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc
, tcg_target_long target
)
162 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3ffffff)
163 | reloc_26_val(pc
, target
);
166 static void patch_reloc(uint8_t *code_ptr
, int type
,
167 tcg_target_long value
, tcg_target_long addend
)
172 reloc_lo16(code_ptr
, value
);
175 reloc_hi16(code_ptr
, value
);
178 reloc_pc16(code_ptr
, value
);
181 reloc_pc26(code_ptr
, value
);
188 /* parse target specific constraints */
189 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
196 ct
->ct
|= TCG_CT_REG
;
197 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
200 ct
->ct
|= TCG_CT_REG
;
201 tcg_regset_clear(ct
->u
.regs
);
202 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_T9
);
204 case 'L': /* qemu_ld output arg constraint */
205 ct
->ct
|= TCG_CT_REG
;
206 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
207 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
209 case 'l': /* qemu_ld input arg constraint */
210 ct
->ct
|= TCG_CT_REG
;
211 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
212 #if defined(CONFIG_SOFTMMU)
213 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
214 # if (TARGET_LONG_BITS == 64)
215 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
219 case 'S': /* qemu_st constraint */
220 ct
->ct
|= TCG_CT_REG
;
221 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
222 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
223 #if defined(CONFIG_SOFTMMU)
224 # if (TARGET_LONG_BITS == 32)
225 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
227 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
228 # if TARGET_LONG_BITS == 64
229 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
234 ct
->ct
|= TCG_CT_CONST_U16
;
237 ct
->ct
|= TCG_CT_CONST_S16
;
240 /* We are cheating a bit here, using the fact that the register
241 ZERO is also the register number 0. Hence there is no need
242 to check for const_args in each instruction. */
243 ct
->ct
|= TCG_CT_CONST_ZERO
;
253 /* test if a constant matches the constraint */
254 static inline int tcg_target_const_match(tcg_target_long val
,
255 const TCGArgConstraint
*arg_ct
)
259 if (ct
& TCG_CT_CONST
)
261 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
263 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
265 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
271 /* instruction opcodes */
273 OPC_BEQ
= 0x04 << 26,
274 OPC_BNE
= 0x05 << 26,
275 OPC_BLEZ
= 0x06 << 26,
276 OPC_BGTZ
= 0x07 << 26,
277 OPC_ADDIU
= 0x09 << 26,
278 OPC_SLTI
= 0x0A << 26,
279 OPC_SLTIU
= 0x0B << 26,
280 OPC_ANDI
= 0x0C << 26,
281 OPC_ORI
= 0x0D << 26,
282 OPC_XORI
= 0x0E << 26,
283 OPC_LUI
= 0x0F << 26,
287 OPC_LBU
= 0x24 << 26,
288 OPC_LHU
= 0x25 << 26,
289 OPC_LWU
= 0x27 << 26,
294 OPC_SPECIAL
= 0x00 << 26,
295 OPC_SLL
= OPC_SPECIAL
| 0x00,
296 OPC_SRL
= OPC_SPECIAL
| 0x02,
297 OPC_ROTR
= OPC_SPECIAL
| (0x01 << 21) | 0x02,
298 OPC_SRA
= OPC_SPECIAL
| 0x03,
299 OPC_SLLV
= OPC_SPECIAL
| 0x04,
300 OPC_SRLV
= OPC_SPECIAL
| 0x06,
301 OPC_ROTRV
= OPC_SPECIAL
| (0x01 << 6) | 0x06,
302 OPC_SRAV
= OPC_SPECIAL
| 0x07,
303 OPC_JR
= OPC_SPECIAL
| 0x08,
304 OPC_JALR
= OPC_SPECIAL
| 0x09,
305 OPC_MOVZ
= OPC_SPECIAL
| 0x0A,
306 OPC_MOVN
= OPC_SPECIAL
| 0x0B,
307 OPC_MFHI
= OPC_SPECIAL
| 0x10,
308 OPC_MFLO
= OPC_SPECIAL
| 0x12,
309 OPC_MULT
= OPC_SPECIAL
| 0x18,
310 OPC_MULTU
= OPC_SPECIAL
| 0x19,
311 OPC_DIV
= OPC_SPECIAL
| 0x1A,
312 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
313 OPC_ADDU
= OPC_SPECIAL
| 0x21,
314 OPC_SUBU
= OPC_SPECIAL
| 0x23,
315 OPC_AND
= OPC_SPECIAL
| 0x24,
316 OPC_OR
= OPC_SPECIAL
| 0x25,
317 OPC_XOR
= OPC_SPECIAL
| 0x26,
318 OPC_NOR
= OPC_SPECIAL
| 0x27,
319 OPC_SLT
= OPC_SPECIAL
| 0x2A,
320 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
322 OPC_REGIMM
= 0x01 << 26,
323 OPC_BLTZ
= OPC_REGIMM
| (0x00 << 16),
324 OPC_BGEZ
= OPC_REGIMM
| (0x01 << 16),
326 OPC_SPECIAL3
= 0x1f << 26,
327 OPC_INS
= OPC_SPECIAL3
| 0x004,
328 OPC_WSBH
= OPC_SPECIAL3
| 0x0a0,
329 OPC_SEB
= OPC_SPECIAL3
| 0x420,
330 OPC_SEH
= OPC_SPECIAL3
| 0x620,
336 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
,
337 TCGReg rd
, TCGReg rs
, TCGReg rt
)
342 inst
|= (rs
& 0x1F) << 21;
343 inst
|= (rt
& 0x1F) << 16;
344 inst
|= (rd
& 0x1F) << 11;
351 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
,
352 TCGReg rt
, TCGReg rs
, TCGArg imm
)
357 inst
|= (rs
& 0x1F) << 21;
358 inst
|= (rt
& 0x1F) << 16;
359 inst
|= (imm
& 0xffff);
366 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
,
367 TCGReg rt
, TCGReg rs
)
369 /* We pay attention here to not modify the branch target by reading
370 the existing value and using it again. This ensure that caches and
371 memory are kept coherent during retranslation. */
372 uint16_t offset
= (uint16_t)(*(uint32_t *) s
->code_ptr
);
374 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
380 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
,
381 TCGReg rd
, TCGReg rt
, TCGArg sa
)
386 inst
|= (rt
& 0x1F) << 16;
387 inst
|= (rd
& 0x1F) << 11;
388 inst
|= (sa
& 0x1F) << 6;
393 static inline void tcg_out_nop(TCGContext
*s
)
398 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
399 TCGReg ret
, TCGReg arg
)
401 /* Simple reg-reg move, optimising out the 'do nothing' case */
403 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
407 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
408 TCGReg reg
, tcg_target_long arg
)
410 if (arg
== (int16_t)arg
) {
411 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
412 } else if (arg
== (uint16_t)arg
) {
413 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
415 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
416 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
420 static inline void tcg_out_bswap16(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
422 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
423 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
425 /* ret and arg can't be register at */
426 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
430 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
431 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
432 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
433 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
437 static inline void tcg_out_bswap16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
439 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
440 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
441 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, ret
);
443 /* ret and arg can't be register at */
444 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
448 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
449 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
450 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
451 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
455 static inline void tcg_out_bswap32(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
457 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
458 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
459 tcg_out_opc_sa(s
, OPC_ROTR
, ret
, ret
, 16);
461 /* ret and arg must be different and can't be register at */
462 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
466 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
468 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
469 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
471 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
472 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
473 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
475 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
476 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
477 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
481 static inline void tcg_out_ext8s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
483 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
484 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
486 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
487 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
491 static inline void tcg_out_ext16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
493 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
494 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
496 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
497 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
501 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, TCGArg arg
,
502 TCGReg arg1
, TCGArg arg2
)
504 if (arg2
== (int16_t) arg2
) {
505 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
507 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
508 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
509 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
513 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
514 TCGReg arg1
, tcg_target_long arg2
)
516 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
519 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
520 TCGReg arg1
, tcg_target_long arg2
)
522 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
525 static inline void tcg_out_addi(TCGContext
*s
, TCGReg reg
, TCGArg val
)
527 if (val
== (int16_t)val
) {
528 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
530 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
531 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
535 /* Helper routines for marshalling helper function arguments into
536 * the correct registers and stack.
537 * arg_num is where we want to put this argument, and is updated to be ready
538 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
539 * real registers, 4+ on stack.
541 * We provide routines for arguments which are: immediate, 32 bit
542 * value in register, 16 and 8 bit values in register (which must be zero
543 * extended before use) and 64 bit value in a lo:hi register pair.
545 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
546 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
548 if (*arg_num < 4) { \
549 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
551 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
552 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
556 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
557 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
558 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
559 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
560 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
561 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
562 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
563 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
564 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
565 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
566 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, TCGArg arg
)
567 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
569 /* We don't use the macro for this one to avoid an unnecessary reg-reg
570 move when storing to the stack. */
571 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
575 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
577 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
582 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
583 TCGReg arg_low
, TCGReg arg_high
)
585 (*arg_num
) = (*arg_num
+ 1) & ~1;
587 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
588 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
589 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
591 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
592 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
596 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
597 TCGArg arg2
, int label_index
)
599 TCGLabel
*l
= &s
->labels
[label_index
];
603 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
606 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
610 tcg_out_opc_br(s
, OPC_BLTZ
, 0, arg1
);
612 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
613 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
617 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
618 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
622 tcg_out_opc_br(s
, OPC_BGEZ
, 0, arg1
);
624 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
625 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
629 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
630 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
634 tcg_out_opc_br(s
, OPC_BLEZ
, 0, arg1
);
636 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
637 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
641 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
642 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
646 tcg_out_opc_br(s
, OPC_BGTZ
, 0, arg1
);
648 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
649 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
653 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
654 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
661 reloc_pc16(s
->code_ptr
- 4, l
->u
.value
);
663 tcg_out_reloc(s
, s
->code_ptr
- 4, R_MIPS_PC16
, label_index
, 0);
668 /* XXX: we implement it at the target level to avoid having to
669 handle cross basic blocks temporaries */
670 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
671 TCGArg arg2
, TCGArg arg3
, TCGArg arg4
,
678 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
679 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
685 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
689 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
693 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
697 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
703 label_ptr
= s
->code_ptr
;
704 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
709 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
713 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
717 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
721 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
725 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
731 reloc_pc16(label_ptr
, (tcg_target_long
) s
->code_ptr
);
734 static void tcg_out_movcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
735 TCGArg c1
, TCGArg c2
, TCGArg v
)
740 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c2
);
741 } else if (c2
== 0) {
742 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c1
);
744 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
745 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
750 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c2
);
751 } else if (c2
== 0) {
752 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c1
);
754 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
755 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
759 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
760 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
763 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
764 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
767 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
768 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
771 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
772 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
775 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
776 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
779 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
780 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
783 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
784 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
787 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
788 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
796 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
797 TCGArg arg1
, TCGArg arg2
)
802 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
803 } else if (arg2
== 0) {
804 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
806 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
807 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
812 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
813 } else if (arg2
== 0) {
814 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
816 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
817 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
821 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
824 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
827 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
828 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
831 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
832 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
835 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
836 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
839 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
840 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
843 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
846 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
854 /* XXX: we implement it at the target level to avoid having to
855 handle cross basic blocks temporaries */
856 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
857 TCGArg arg1
, TCGArg arg2
, TCGArg arg3
, TCGArg arg4
)
861 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
862 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
863 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
866 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
867 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
868 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
872 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
876 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
880 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
884 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
891 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
896 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
900 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
904 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
908 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
914 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
915 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
918 #if defined(CONFIG_SOFTMMU)
920 #include "../../softmmu_defs.h"
922 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
924 static const void * const qemu_ld_helpers
[4] = {
931 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
932 uintxx_t val, int mmu_idx) */
933 static const void * const qemu_st_helpers
[4] = {
941 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
944 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
945 #if defined(CONFIG_SOFTMMU)
946 void *label1_ptr
, *label2_ptr
;
948 int mem_index
, s_bits
;
950 # if TARGET_LONG_BITS == 64
962 #if defined(CONFIG_SOFTMMU)
963 # if TARGET_LONG_BITS == 64
965 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
980 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
981 data_reg1
= data_regh
;
982 data_reg2
= data_regl
;
984 data_reg1
= data_regl
;
985 data_reg2
= data_regh
;
988 data_reg1
= data_regl
;
991 #if defined(CONFIG_SOFTMMU)
992 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
993 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
994 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
995 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
996 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
997 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
998 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1000 # if TARGET_LONG_BITS == 64
1001 label3_ptr
= s
->code_ptr
;
1002 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1005 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1006 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
1008 label1_ptr
= s
->code_ptr
;
1009 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1012 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1014 label1_ptr
= s
->code_ptr
;
1015 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1021 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1022 # if TARGET_LONG_BITS == 64
1023 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1025 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1027 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1028 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
1029 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1034 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
1037 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
1040 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
1043 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
1046 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1049 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
1050 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1056 label2_ptr
= s
->code_ptr
;
1057 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1060 /* label1: fast path */
1061 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1063 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1064 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1065 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
1067 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1068 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
1070 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
1071 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
1077 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
1080 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
1083 if (TCG_NEED_BSWAP
) {
1084 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1085 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
1087 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
1091 if (TCG_NEED_BSWAP
) {
1092 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1093 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
1095 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
1099 if (TCG_NEED_BSWAP
) {
1100 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1101 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1103 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1107 if (TCG_NEED_BSWAP
) {
1108 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1109 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1110 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1111 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1113 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1114 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1121 #if defined(CONFIG_SOFTMMU)
1122 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1126 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1129 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
1130 #if defined(CONFIG_SOFTMMU)
1131 uint8_t *label1_ptr
, *label2_ptr
;
1133 int mem_index
, s_bits
;
1136 #if TARGET_LONG_BITS == 64
1137 # if defined(CONFIG_SOFTMMU)
1138 uint8_t *label3_ptr
;
1143 data_regl
= *args
++;
1145 data_regh
= *args
++;
1149 addr_regl
= *args
++;
1150 #if defined(CONFIG_SOFTMMU)
1151 # if TARGET_LONG_BITS == 64
1152 addr_regh
= *args
++;
1153 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1168 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1169 data_reg1
= data_regh
;
1170 data_reg2
= data_regl
;
1172 data_reg1
= data_regl
;
1173 data_reg2
= data_regh
;
1176 data_reg1
= data_regl
;
1180 #if defined(CONFIG_SOFTMMU)
1181 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1182 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1183 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1184 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1185 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1186 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1187 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1189 # if TARGET_LONG_BITS == 64
1190 label3_ptr
= s
->code_ptr
;
1191 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1194 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1195 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1197 label1_ptr
= s
->code_ptr
;
1198 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1201 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1203 label1_ptr
= s
->code_ptr
;
1204 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1210 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1211 # if TARGET_LONG_BITS == 64
1212 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1214 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1218 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1221 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1224 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1227 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1232 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1233 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1234 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1237 label2_ptr
= s
->code_ptr
;
1238 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1241 /* label1: fast path */
1242 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1244 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1245 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1246 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1248 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1249 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1251 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1252 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1259 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1262 if (TCG_NEED_BSWAP
) {
1263 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_T0
, data_reg1
, 0xffff);
1264 tcg_out_bswap16(s
, TCG_REG_T0
, TCG_REG_T0
);
1265 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1267 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1271 if (TCG_NEED_BSWAP
) {
1272 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1273 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1275 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1279 if (TCG_NEED_BSWAP
) {
1280 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1281 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1282 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1283 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1285 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1286 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1293 #if defined(CONFIG_SOFTMMU)
1294 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1298 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1299 const TCGArg
*args
, const int *const_args
)
1302 case INDEX_op_exit_tb
:
1303 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1304 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, (tcg_target_long
)tb_ret_addr
);
1305 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1308 case INDEX_op_goto_tb
:
1309 if (s
->tb_jmp_offset
) {
1310 /* direct jump method */
1313 /* indirect jump method */
1314 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1315 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1316 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1319 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1322 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, args
[0], 0);
1326 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1329 case INDEX_op_mov_i32
:
1330 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1332 case INDEX_op_movi_i32
:
1333 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1336 case INDEX_op_ld8u_i32
:
1337 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1339 case INDEX_op_ld8s_i32
:
1340 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1342 case INDEX_op_ld16u_i32
:
1343 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1345 case INDEX_op_ld16s_i32
:
1346 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1348 case INDEX_op_ld_i32
:
1349 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1351 case INDEX_op_st8_i32
:
1352 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1354 case INDEX_op_st16_i32
:
1355 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1357 case INDEX_op_st_i32
:
1358 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1361 case INDEX_op_add_i32
:
1362 if (const_args
[2]) {
1363 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1365 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1368 case INDEX_op_add2_i32
:
1369 if (const_args
[4]) {
1370 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1372 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1374 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1375 if (const_args
[5]) {
1376 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1378 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1380 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1381 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1383 case INDEX_op_sub_i32
:
1384 if (const_args
[2]) {
1385 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1387 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1390 case INDEX_op_sub2_i32
:
1391 if (const_args
[4]) {
1392 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1394 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1396 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1397 if (const_args
[5]) {
1398 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1400 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1402 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1403 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1405 case INDEX_op_mul_i32
:
1406 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1407 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1409 case INDEX_op_mulu2_i32
:
1410 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1411 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1412 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1414 case INDEX_op_div_i32
:
1415 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1416 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1418 case INDEX_op_divu_i32
:
1419 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1420 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1422 case INDEX_op_rem_i32
:
1423 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1424 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1426 case INDEX_op_remu_i32
:
1427 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1428 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1431 case INDEX_op_and_i32
:
1432 if (const_args
[2]) {
1433 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1435 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1438 case INDEX_op_or_i32
:
1439 if (const_args
[2]) {
1440 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1442 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1445 case INDEX_op_nor_i32
:
1446 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1448 case INDEX_op_not_i32
:
1449 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1451 case INDEX_op_xor_i32
:
1452 if (const_args
[2]) {
1453 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1455 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1459 case INDEX_op_sar_i32
:
1460 if (const_args
[2]) {
1461 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1463 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1466 case INDEX_op_shl_i32
:
1467 if (const_args
[2]) {
1468 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1470 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1473 case INDEX_op_shr_i32
:
1474 if (const_args
[2]) {
1475 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1477 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1480 case INDEX_op_rotl_i32
:
1481 if (const_args
[2]) {
1482 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], 0x20 - args
[2]);
1484 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, 32);
1485 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, TCG_REG_AT
, args
[2]);
1486 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], TCG_REG_AT
, args
[1]);
1489 case INDEX_op_rotr_i32
:
1490 if (const_args
[2]) {
1491 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], args
[2]);
1493 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], args
[2], args
[1]);
1497 /* The bswap routines do not work on non-R2 CPU. In that case
1498 we let TCG generating the corresponding code. */
1499 case INDEX_op_bswap16_i32
:
1500 tcg_out_bswap16(s
, args
[0], args
[1]);
1502 case INDEX_op_bswap32_i32
:
1503 tcg_out_bswap32(s
, args
[0], args
[1]);
1506 case INDEX_op_ext8s_i32
:
1507 tcg_out_ext8s(s
, args
[0], args
[1]);
1509 case INDEX_op_ext16s_i32
:
1510 tcg_out_ext16s(s
, args
[0], args
[1]);
1513 case INDEX_op_deposit_i32
:
1514 tcg_out_opc_imm(s
, OPC_INS
, args
[0], args
[2],
1515 ((args
[3] + args
[4] - 1) << 11) | (args
[3] << 6));
1518 case INDEX_op_brcond_i32
:
1519 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1521 case INDEX_op_brcond2_i32
:
1522 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1525 case INDEX_op_movcond_i32
:
1526 tcg_out_movcond(s
, args
[5], args
[0], args
[1], args
[2], args
[3]);
1529 case INDEX_op_setcond_i32
:
1530 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1532 case INDEX_op_setcond2_i32
:
1533 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1536 case INDEX_op_qemu_ld8u
:
1537 tcg_out_qemu_ld(s
, args
, 0);
1539 case INDEX_op_qemu_ld8s
:
1540 tcg_out_qemu_ld(s
, args
, 0 | 4);
1542 case INDEX_op_qemu_ld16u
:
1543 tcg_out_qemu_ld(s
, args
, 1);
1545 case INDEX_op_qemu_ld16s
:
1546 tcg_out_qemu_ld(s
, args
, 1 | 4);
1548 case INDEX_op_qemu_ld32
:
1549 tcg_out_qemu_ld(s
, args
, 2);
1551 case INDEX_op_qemu_ld64
:
1552 tcg_out_qemu_ld(s
, args
, 3);
1554 case INDEX_op_qemu_st8
:
1555 tcg_out_qemu_st(s
, args
, 0);
1557 case INDEX_op_qemu_st16
:
1558 tcg_out_qemu_st(s
, args
, 1);
1560 case INDEX_op_qemu_st32
:
1561 tcg_out_qemu_st(s
, args
, 2);
1563 case INDEX_op_qemu_st64
:
1564 tcg_out_qemu_st(s
, args
, 3);
1572 static const TCGTargetOpDef mips_op_defs
[] = {
1573 { INDEX_op_exit_tb
, { } },
1574 { INDEX_op_goto_tb
, { } },
1575 { INDEX_op_call
, { "C" } },
1576 { INDEX_op_br
, { } },
1578 { INDEX_op_mov_i32
, { "r", "r" } },
1579 { INDEX_op_movi_i32
, { "r" } },
1580 { INDEX_op_ld8u_i32
, { "r", "r" } },
1581 { INDEX_op_ld8s_i32
, { "r", "r" } },
1582 { INDEX_op_ld16u_i32
, { "r", "r" } },
1583 { INDEX_op_ld16s_i32
, { "r", "r" } },
1584 { INDEX_op_ld_i32
, { "r", "r" } },
1585 { INDEX_op_st8_i32
, { "rZ", "r" } },
1586 { INDEX_op_st16_i32
, { "rZ", "r" } },
1587 { INDEX_op_st_i32
, { "rZ", "r" } },
1589 { INDEX_op_add_i32
, { "r", "rZ", "rJ" } },
1590 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1591 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1592 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1593 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1594 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1595 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1596 { INDEX_op_sub_i32
, { "r", "rZ", "rJ" } },
1598 { INDEX_op_and_i32
, { "r", "rZ", "rI" } },
1599 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1600 { INDEX_op_not_i32
, { "r", "rZ" } },
1601 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1602 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1604 { INDEX_op_shl_i32
, { "r", "rZ", "ri" } },
1605 { INDEX_op_shr_i32
, { "r", "rZ", "ri" } },
1606 { INDEX_op_sar_i32
, { "r", "rZ", "ri" } },
1607 { INDEX_op_rotr_i32
, { "r", "rZ", "ri" } },
1608 { INDEX_op_rotl_i32
, { "r", "rZ", "ri" } },
1610 { INDEX_op_bswap16_i32
, { "r", "r" } },
1611 { INDEX_op_bswap32_i32
, { "r", "r" } },
1613 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1614 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1616 { INDEX_op_deposit_i32
, { "r", "0", "rZ" } },
1618 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1619 { INDEX_op_movcond_i32
, { "r", "rZ", "rZ", "rZ", "0" } },
1620 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1621 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1623 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1624 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1625 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1627 #if TARGET_LONG_BITS == 32
1628 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1629 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1630 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1631 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1632 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1633 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1635 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1636 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1637 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1638 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1640 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1641 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1642 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1643 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1644 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1645 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1647 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1648 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1649 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1650 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1655 static int tcg_target_callee_save_regs
[] = {
1656 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1665 TCG_REG_RA
, /* should be last for ABI compliance */
1668 /* Generate global QEMU prologue and epilogue code */
1669 static void tcg_target_qemu_prologue(TCGContext
*s
)
1673 /* reserve some stack space, also for TCG temps. */
1674 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1675 + TCG_STATIC_CALL_ARGS_SIZE
1676 + CPU_TEMP_BUF_NLONGS
* sizeof(long);
1677 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1678 ~(TCG_TARGET_STACK_ALIGN
- 1);
1679 tcg_set_frame(s
, TCG_REG_SP
, ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1680 + TCG_STATIC_CALL_ARGS_SIZE
,
1681 CPU_TEMP_BUF_NLONGS
* sizeof(long));
1684 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1685 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1686 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1687 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1690 /* Call generated code */
1691 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1692 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1693 tb_ret_addr
= s
->code_ptr
;
1696 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1697 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1698 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1701 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1702 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1705 static void tcg_target_init(TCGContext
*s
)
1707 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1708 tcg_regset_set(tcg_target_call_clobber_regs
,
1725 tcg_regset_clear(s
->reserved_regs
);
1726 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1727 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1728 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1729 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1730 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1731 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1732 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1733 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_GP
); /* global pointer */
1735 tcg_add_target_add_op_defs(mips_op_defs
);