2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #include "tcg-be-null.h"
29 #if defined(HOST_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
30 # define TCG_NEED_BSWAP 0
32 # define TCG_NEED_BSWAP 1
36 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
72 /* check if we really need so many registers :P */
73 static const TCGReg tcg_target_reg_alloc_order
[] = {
99 static const TCGReg tcg_target_call_iarg_regs
[4] = {
106 static const TCGReg tcg_target_call_oarg_regs
[2] = {
111 static tcg_insn_unit
*tb_ret_addr
;
113 static inline uint32_t reloc_pc16_val(tcg_insn_unit
*pc
, tcg_insn_unit
*target
)
115 /* Let the compiler perform the right-shift as part of the arithmetic. */
116 ptrdiff_t disp
= target
- (pc
+ 1);
117 assert(disp
== (int16_t)disp
);
118 return disp
& 0xffff;
121 static inline void reloc_pc16(tcg_insn_unit
*pc
, tcg_insn_unit
*target
)
123 *pc
= deposit32(*pc
, 0, 16, reloc_pc16_val(pc
, target
));
126 static inline uint32_t reloc_26_val(tcg_insn_unit
*pc
, tcg_insn_unit
*target
)
128 assert((((uintptr_t)pc
^ (uintptr_t)target
) & 0xf0000000) == 0);
129 return ((uintptr_t)target
>> 2) & 0x3ffffff;
132 static inline void reloc_26(tcg_insn_unit
*pc
, tcg_insn_unit
*target
)
134 *pc
= deposit32(*pc
, 0, 26, reloc_26_val(pc
, target
));
137 static void patch_reloc(tcg_insn_unit
*code_ptr
, int type
,
138 intptr_t value
, intptr_t addend
)
140 assert(type
== R_MIPS_PC16
);
142 reloc_pc16(code_ptr
, (tcg_insn_unit
*)value
);
145 /* parse target specific constraints */
146 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
153 ct
->ct
|= TCG_CT_REG
;
154 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
156 case 'L': /* qemu_ld output arg constraint */
157 ct
->ct
|= TCG_CT_REG
;
158 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
159 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
161 case 'l': /* qemu_ld input arg constraint */
162 ct
->ct
|= TCG_CT_REG
;
163 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
164 #if defined(CONFIG_SOFTMMU)
165 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
166 # if (TARGET_LONG_BITS == 64)
167 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
171 case 'S': /* qemu_st constraint */
172 ct
->ct
|= TCG_CT_REG
;
173 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
174 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
175 #if defined(CONFIG_SOFTMMU)
176 # if (TARGET_LONG_BITS == 32)
177 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
179 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
180 # if TARGET_LONG_BITS == 64
181 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
186 ct
->ct
|= TCG_CT_CONST_U16
;
189 ct
->ct
|= TCG_CT_CONST_S16
;
192 /* We are cheating a bit here, using the fact that the register
193 ZERO is also the register number 0. Hence there is no need
194 to check for const_args in each instruction. */
195 ct
->ct
|= TCG_CT_CONST_ZERO
;
205 /* test if a constant matches the constraint */
206 static inline int tcg_target_const_match(tcg_target_long val
, TCGType type
,
207 const TCGArgConstraint
*arg_ct
)
211 if (ct
& TCG_CT_CONST
)
213 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
215 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
217 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
223 /* instruction opcodes */
225 OPC_BEQ
= 0x04 << 26,
226 OPC_BNE
= 0x05 << 26,
227 OPC_BLEZ
= 0x06 << 26,
228 OPC_BGTZ
= 0x07 << 26,
229 OPC_ADDIU
= 0x09 << 26,
230 OPC_SLTI
= 0x0A << 26,
231 OPC_SLTIU
= 0x0B << 26,
232 OPC_ANDI
= 0x0C << 26,
233 OPC_ORI
= 0x0D << 26,
234 OPC_XORI
= 0x0E << 26,
235 OPC_LUI
= 0x0F << 26,
239 OPC_LBU
= 0x24 << 26,
240 OPC_LHU
= 0x25 << 26,
241 OPC_LWU
= 0x27 << 26,
246 OPC_SPECIAL
= 0x00 << 26,
247 OPC_SLL
= OPC_SPECIAL
| 0x00,
248 OPC_SRL
= OPC_SPECIAL
| 0x02,
249 OPC_ROTR
= OPC_SPECIAL
| (0x01 << 21) | 0x02,
250 OPC_SRA
= OPC_SPECIAL
| 0x03,
251 OPC_SLLV
= OPC_SPECIAL
| 0x04,
252 OPC_SRLV
= OPC_SPECIAL
| 0x06,
253 OPC_ROTRV
= OPC_SPECIAL
| (0x01 << 6) | 0x06,
254 OPC_SRAV
= OPC_SPECIAL
| 0x07,
255 OPC_JR
= OPC_SPECIAL
| 0x08,
256 OPC_JALR
= OPC_SPECIAL
| 0x09,
257 OPC_MOVZ
= OPC_SPECIAL
| 0x0A,
258 OPC_MOVN
= OPC_SPECIAL
| 0x0B,
259 OPC_MFHI
= OPC_SPECIAL
| 0x10,
260 OPC_MFLO
= OPC_SPECIAL
| 0x12,
261 OPC_MULT
= OPC_SPECIAL
| 0x18,
262 OPC_MULTU
= OPC_SPECIAL
| 0x19,
263 OPC_DIV
= OPC_SPECIAL
| 0x1A,
264 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
265 OPC_ADDU
= OPC_SPECIAL
| 0x21,
266 OPC_SUBU
= OPC_SPECIAL
| 0x23,
267 OPC_AND
= OPC_SPECIAL
| 0x24,
268 OPC_OR
= OPC_SPECIAL
| 0x25,
269 OPC_XOR
= OPC_SPECIAL
| 0x26,
270 OPC_NOR
= OPC_SPECIAL
| 0x27,
271 OPC_SLT
= OPC_SPECIAL
| 0x2A,
272 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
274 OPC_REGIMM
= 0x01 << 26,
275 OPC_BLTZ
= OPC_REGIMM
| (0x00 << 16),
276 OPC_BGEZ
= OPC_REGIMM
| (0x01 << 16),
278 OPC_SPECIAL2
= 0x1c << 26,
279 OPC_MUL
= OPC_SPECIAL2
| 0x002,
281 OPC_SPECIAL3
= 0x1f << 26,
282 OPC_INS
= OPC_SPECIAL3
| 0x004,
283 OPC_WSBH
= OPC_SPECIAL3
| 0x0a0,
284 OPC_SEB
= OPC_SPECIAL3
| 0x420,
285 OPC_SEH
= OPC_SPECIAL3
| 0x620,
291 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
,
292 TCGReg rd
, TCGReg rs
, TCGReg rt
)
297 inst
|= (rs
& 0x1F) << 21;
298 inst
|= (rt
& 0x1F) << 16;
299 inst
|= (rd
& 0x1F) << 11;
306 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
,
307 TCGReg rt
, TCGReg rs
, TCGArg imm
)
312 inst
|= (rs
& 0x1F) << 21;
313 inst
|= (rt
& 0x1F) << 16;
314 inst
|= (imm
& 0xffff);
321 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
,
322 TCGReg rt
, TCGReg rs
)
324 /* We pay attention here to not modify the branch target by reading
325 the existing value and using it again. This ensure that caches and
326 memory are kept coherent during retranslation. */
327 uint16_t offset
= (uint16_t)*s
->code_ptr
;
329 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
335 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
,
336 TCGReg rd
, TCGReg rt
, TCGArg sa
)
341 inst
|= (rt
& 0x1F) << 16;
342 inst
|= (rd
& 0x1F) << 11;
343 inst
|= (sa
& 0x1F) << 6;
348 static inline void tcg_out_nop(TCGContext
*s
)
353 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
354 TCGReg ret
, TCGReg arg
)
356 /* Simple reg-reg move, optimising out the 'do nothing' case */
358 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
362 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
363 TCGReg reg
, tcg_target_long arg
)
365 if (arg
== (int16_t)arg
) {
366 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
367 } else if (arg
== (uint16_t)arg
) {
368 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
370 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
371 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
375 static inline void tcg_out_bswap16(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
377 if (use_mips32r2_instructions
) {
378 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
380 /* ret and arg can't be register at */
381 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
385 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
386 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
387 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
388 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
392 static inline void tcg_out_bswap16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
394 if (use_mips32r2_instructions
) {
395 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
396 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, ret
);
398 /* ret and arg can't be register at */
399 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
403 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
404 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
405 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
406 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
410 static inline void tcg_out_bswap32(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
412 if (use_mips32r2_instructions
) {
413 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
414 tcg_out_opc_sa(s
, OPC_ROTR
, ret
, ret
, 16);
416 /* ret and arg must be different and can't be register at */
417 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
421 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
423 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
424 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
426 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
427 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
428 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
430 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
431 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
432 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
436 static inline void tcg_out_ext8s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
438 if (use_mips32r2_instructions
) {
439 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
441 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
442 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
446 static inline void tcg_out_ext16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
448 if (use_mips32r2_instructions
) {
449 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
451 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
452 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
456 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, TCGArg arg
,
457 TCGReg arg1
, TCGArg arg2
)
459 if (arg2
== (int16_t) arg2
) {
460 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
462 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
463 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
464 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
468 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
469 TCGReg arg1
, intptr_t arg2
)
471 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
474 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
475 TCGReg arg1
, intptr_t arg2
)
477 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
480 static inline void tcg_out_addi(TCGContext
*s
, TCGReg reg
, TCGArg val
)
482 if (val
== (int16_t)val
) {
483 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
485 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
486 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
490 /* Helper routines for marshalling helper function arguments into
491 * the correct registers and stack.
492 * arg_num is where we want to put this argument, and is updated to be ready
493 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
494 * real registers, 4+ on stack.
496 * We provide routines for arguments which are: immediate, 32 bit
497 * value in register, 16 and 8 bit values in register (which must be zero
498 * extended before use) and 64 bit value in a lo:hi register pair.
500 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
501 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
503 if (*arg_num < 4) { \
504 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
506 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
507 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
511 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
512 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
513 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
514 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
515 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
516 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
517 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
518 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
519 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
520 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
521 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, TCGArg arg
)
522 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
524 /* We don't use the macro for this one to avoid an unnecessary reg-reg
525 move when storing to the stack. */
526 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
530 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
532 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
537 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
538 TCGReg arg_low
, TCGReg arg_high
)
540 (*arg_num
) = (*arg_num
+ 1) & ~1;
542 #if defined(HOST_WORDS_BIGENDIAN)
543 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
544 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
546 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
547 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
551 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
552 TCGArg arg2
, int label_index
)
554 TCGLabel
*l
= &s
->labels
[label_index
];
558 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
561 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
565 tcg_out_opc_br(s
, OPC_BLTZ
, 0, arg1
);
567 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
568 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
572 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
573 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
577 tcg_out_opc_br(s
, OPC_BGEZ
, 0, arg1
);
579 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
580 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
584 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
585 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
589 tcg_out_opc_br(s
, OPC_BLEZ
, 0, arg1
);
591 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
592 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
596 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
597 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
601 tcg_out_opc_br(s
, OPC_BGTZ
, 0, arg1
);
603 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
604 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
608 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
609 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
616 reloc_pc16(s
->code_ptr
- 1, l
->u
.value_ptr
);
618 tcg_out_reloc(s
, s
->code_ptr
- 1, R_MIPS_PC16
, label_index
, 0);
623 /* XXX: we implement it at the target level to avoid having to
624 handle cross basic blocks temporaries */
625 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
626 TCGArg arg2
, TCGArg arg3
, TCGArg arg4
,
629 tcg_insn_unit
*label_ptr
;
633 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
634 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
640 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
644 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
648 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
652 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
658 label_ptr
= s
->code_ptr
;
659 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
664 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
668 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
672 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
676 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
680 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
686 reloc_pc16(label_ptr
, s
->code_ptr
);
689 static void tcg_out_movcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
690 TCGArg c1
, TCGArg c2
, TCGArg v
)
695 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c2
);
696 } else if (c2
== 0) {
697 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c1
);
699 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
700 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
705 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c2
);
706 } else if (c2
== 0) {
707 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c1
);
709 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
710 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
714 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
715 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
718 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
719 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
722 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
723 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
726 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
727 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
730 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
731 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
734 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
735 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
738 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
739 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
742 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
743 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
751 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
752 TCGArg arg1
, TCGArg arg2
)
757 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
758 } else if (arg2
== 0) {
759 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
761 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
762 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
767 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
768 } else if (arg2
== 0) {
769 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
771 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
772 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
776 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
779 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
782 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
783 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
786 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
787 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
790 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
791 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
794 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
795 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
798 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
801 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
809 /* XXX: we implement it at the target level to avoid having to
810 handle cross basic blocks temporaries */
811 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
812 TCGArg arg1
, TCGArg arg2
, TCGArg arg3
, TCGArg arg4
)
816 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
817 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
818 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
821 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
822 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
823 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
827 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
831 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
835 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
839 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
846 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
851 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
855 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
859 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
863 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
869 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
870 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
873 #if defined(CONFIG_SOFTMMU)
874 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
876 static const void * const qemu_ld_helpers
[4] = {
883 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
884 uintxx_t val, int mmu_idx) */
885 static const void * const qemu_st_helpers
[4] = {
893 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
896 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
897 #if defined(CONFIG_SOFTMMU)
898 tcg_insn_unit
*label1_ptr
, *label2_ptr
;
900 int mem_index
, s_bits
;
902 # if TARGET_LONG_BITS == 64
903 tcg_insn_unit
*label3_ptr
;
914 #if defined(CONFIG_SOFTMMU)
915 # if TARGET_LONG_BITS == 64
917 # if defined(HOST_WORDS_BIGENDIAN)
932 #if defined(HOST_WORDS_BIGENDIAN)
933 data_reg1
= data_regh
;
934 data_reg2
= data_regl
;
936 data_reg1
= data_regl
;
937 data_reg2
= data_regh
;
940 data_reg1
= data_regl
;
943 #if defined(CONFIG_SOFTMMU)
944 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
945 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
946 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
947 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
948 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
949 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
950 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
952 # if TARGET_LONG_BITS == 64
953 label3_ptr
= s
->code_ptr
;
954 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
957 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
958 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
960 label1_ptr
= s
->code_ptr
;
961 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
964 reloc_pc16(label3_ptr
, s
->code_ptr
);
966 label1_ptr
= s
->code_ptr
;
967 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
973 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
974 # if TARGET_LONG_BITS == 64
975 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
977 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
979 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
980 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
981 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
986 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
989 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
992 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
995 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
998 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1001 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
1002 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1008 label2_ptr
= s
->code_ptr
;
1009 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1012 /* label1: fast path */
1013 reloc_pc16(label1_ptr
, s
->code_ptr
);
1015 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1016 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1017 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
1019 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1020 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
1022 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
1023 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
1029 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
1032 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
1035 if (TCG_NEED_BSWAP
) {
1036 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1037 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
1039 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
1043 if (TCG_NEED_BSWAP
) {
1044 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1045 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
1047 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
1051 if (TCG_NEED_BSWAP
) {
1052 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1053 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1055 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1059 if (TCG_NEED_BSWAP
) {
1060 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1061 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1062 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1063 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1065 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1066 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1073 #if defined(CONFIG_SOFTMMU)
1074 reloc_pc16(label2_ptr
, s
->code_ptr
);
1078 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1081 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
1082 #if defined(CONFIG_SOFTMMU)
1083 tcg_insn_unit
*label1_ptr
, *label2_ptr
;
1085 int mem_index
, s_bits
;
1088 #if TARGET_LONG_BITS == 64
1089 # if defined(CONFIG_SOFTMMU)
1090 tcg_insn_unit
*label3_ptr
;
1095 data_regl
= *args
++;
1097 data_regh
= *args
++;
1101 addr_regl
= *args
++;
1102 #if defined(CONFIG_SOFTMMU)
1103 # if TARGET_LONG_BITS == 64
1104 addr_regh
= *args
++;
1105 # if defined(HOST_WORDS_BIGENDIAN)
1120 #if defined(HOST_WORDS_BIGENDIAN)
1121 data_reg1
= data_regh
;
1122 data_reg2
= data_regl
;
1124 data_reg1
= data_regl
;
1125 data_reg2
= data_regh
;
1128 data_reg1
= data_regl
;
1132 #if defined(CONFIG_SOFTMMU)
1133 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1134 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1135 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1136 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1137 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1138 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1139 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1141 # if TARGET_LONG_BITS == 64
1142 label3_ptr
= s
->code_ptr
;
1143 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1146 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1147 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1149 label1_ptr
= s
->code_ptr
;
1150 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1153 reloc_pc16(label3_ptr
, s
->code_ptr
);
1155 label1_ptr
= s
->code_ptr
;
1156 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1162 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1163 # if TARGET_LONG_BITS == 64
1164 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1166 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1170 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1173 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1176 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1179 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1184 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1185 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1186 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1189 label2_ptr
= s
->code_ptr
;
1190 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1193 /* label1: fast path */
1194 reloc_pc16(label1_ptr
, s
->code_ptr
);
1196 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1197 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1198 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1200 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1201 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1203 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1204 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1211 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1214 if (TCG_NEED_BSWAP
) {
1215 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_T0
, data_reg1
, 0xffff);
1216 tcg_out_bswap16(s
, TCG_REG_T0
, TCG_REG_T0
);
1217 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1219 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1223 if (TCG_NEED_BSWAP
) {
1224 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1225 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1227 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1231 if (TCG_NEED_BSWAP
) {
1232 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1233 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1234 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1235 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1237 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1238 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1245 #if defined(CONFIG_SOFTMMU)
1246 reloc_pc16(label2_ptr
, s
->code_ptr
);
1250 static void tcg_out_call(TCGContext
*s
, tcg_insn_unit
*target
)
1252 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_T9
, (intptr_t)target
);
1253 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1257 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1258 const TCGArg
*args
, const int *const_args
)
1261 case INDEX_op_exit_tb
:
1262 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1263 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (uintptr_t)tb_ret_addr
);
1264 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1267 case INDEX_op_goto_tb
:
1268 if (s
->tb_jmp_offset
) {
1269 /* direct jump method */
1272 /* indirect jump method */
1273 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
,
1274 (uintptr_t)(s
->tb_next
+ args
[0]));
1275 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1276 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1279 s
->tb_next_offset
[args
[0]] = tcg_current_code_size(s
);
1282 assert(const_args
[0]);
1283 tcg_out_call(s
, (tcg_insn_unit
*)(intptr_t)args
[0]);
1286 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1289 case INDEX_op_mov_i32
:
1290 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1292 case INDEX_op_movi_i32
:
1293 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1296 case INDEX_op_ld8u_i32
:
1297 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1299 case INDEX_op_ld8s_i32
:
1300 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1302 case INDEX_op_ld16u_i32
:
1303 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1305 case INDEX_op_ld16s_i32
:
1306 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1308 case INDEX_op_ld_i32
:
1309 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1311 case INDEX_op_st8_i32
:
1312 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1314 case INDEX_op_st16_i32
:
1315 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1317 case INDEX_op_st_i32
:
1318 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1321 case INDEX_op_add_i32
:
1322 if (const_args
[2]) {
1323 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1325 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1328 case INDEX_op_add2_i32
:
1329 if (const_args
[4]) {
1330 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1332 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1334 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1335 if (const_args
[5]) {
1336 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1338 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1340 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1341 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1343 case INDEX_op_sub_i32
:
1344 if (const_args
[2]) {
1345 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1347 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1350 case INDEX_op_sub2_i32
:
1351 if (const_args
[4]) {
1352 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1354 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1356 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1357 if (const_args
[5]) {
1358 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1360 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1362 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1363 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1365 case INDEX_op_mul_i32
:
1366 if (use_mips32_instructions
) {
1367 tcg_out_opc_reg(s
, OPC_MUL
, args
[0], args
[1], args
[2]);
1369 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1370 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1373 case INDEX_op_muls2_i32
:
1374 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[2], args
[3]);
1375 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1376 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1378 case INDEX_op_mulu2_i32
:
1379 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1380 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1381 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1383 case INDEX_op_mulsh_i32
:
1384 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1385 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1387 case INDEX_op_muluh_i32
:
1388 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[1], args
[2]);
1389 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1391 case INDEX_op_div_i32
:
1392 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1393 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1395 case INDEX_op_divu_i32
:
1396 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1397 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1399 case INDEX_op_rem_i32
:
1400 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1401 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1403 case INDEX_op_remu_i32
:
1404 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1405 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1408 case INDEX_op_and_i32
:
1409 if (const_args
[2]) {
1410 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1412 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1415 case INDEX_op_or_i32
:
1416 if (const_args
[2]) {
1417 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1419 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1422 case INDEX_op_nor_i32
:
1423 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1425 case INDEX_op_not_i32
:
1426 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1428 case INDEX_op_xor_i32
:
1429 if (const_args
[2]) {
1430 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1432 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1436 case INDEX_op_sar_i32
:
1437 if (const_args
[2]) {
1438 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1440 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1443 case INDEX_op_shl_i32
:
1444 if (const_args
[2]) {
1445 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1447 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1450 case INDEX_op_shr_i32
:
1451 if (const_args
[2]) {
1452 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1454 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1457 case INDEX_op_rotl_i32
:
1458 if (const_args
[2]) {
1459 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], 0x20 - args
[2]);
1461 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, 32);
1462 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, TCG_REG_AT
, args
[2]);
1463 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], TCG_REG_AT
, args
[1]);
1466 case INDEX_op_rotr_i32
:
1467 if (const_args
[2]) {
1468 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], args
[2]);
1470 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], args
[2], args
[1]);
1474 case INDEX_op_bswap16_i32
:
1475 tcg_out_opc_reg(s
, OPC_WSBH
, args
[0], 0, args
[1]);
1477 case INDEX_op_bswap32_i32
:
1478 tcg_out_opc_reg(s
, OPC_WSBH
, args
[0], 0, args
[1]);
1479 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[0], 16);
1482 case INDEX_op_ext8s_i32
:
1483 tcg_out_opc_reg(s
, OPC_SEB
, args
[0], 0, args
[1]);
1485 case INDEX_op_ext16s_i32
:
1486 tcg_out_opc_reg(s
, OPC_SEH
, args
[0], 0, args
[1]);
1489 case INDEX_op_deposit_i32
:
1490 tcg_out_opc_imm(s
, OPC_INS
, args
[0], args
[2],
1491 ((args
[3] + args
[4] - 1) << 11) | (args
[3] << 6));
1494 case INDEX_op_brcond_i32
:
1495 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1497 case INDEX_op_brcond2_i32
:
1498 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1501 case INDEX_op_movcond_i32
:
1502 tcg_out_movcond(s
, args
[5], args
[0], args
[1], args
[2], args
[3]);
1505 case INDEX_op_setcond_i32
:
1506 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1508 case INDEX_op_setcond2_i32
:
1509 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1512 case INDEX_op_qemu_ld8u
:
1513 tcg_out_qemu_ld(s
, args
, 0);
1515 case INDEX_op_qemu_ld8s
:
1516 tcg_out_qemu_ld(s
, args
, 0 | 4);
1518 case INDEX_op_qemu_ld16u
:
1519 tcg_out_qemu_ld(s
, args
, 1);
1521 case INDEX_op_qemu_ld16s
:
1522 tcg_out_qemu_ld(s
, args
, 1 | 4);
1524 case INDEX_op_qemu_ld32
:
1525 tcg_out_qemu_ld(s
, args
, 2);
1527 case INDEX_op_qemu_ld64
:
1528 tcg_out_qemu_ld(s
, args
, 3);
1530 case INDEX_op_qemu_st8
:
1531 tcg_out_qemu_st(s
, args
, 0);
1533 case INDEX_op_qemu_st16
:
1534 tcg_out_qemu_st(s
, args
, 1);
1536 case INDEX_op_qemu_st32
:
1537 tcg_out_qemu_st(s
, args
, 2);
1539 case INDEX_op_qemu_st64
:
1540 tcg_out_qemu_st(s
, args
, 3);
1548 static const TCGTargetOpDef mips_op_defs
[] = {
1549 { INDEX_op_exit_tb
, { } },
1550 { INDEX_op_goto_tb
, { } },
1551 { INDEX_op_call
, { "i" } },
1552 { INDEX_op_br
, { } },
1554 { INDEX_op_mov_i32
, { "r", "r" } },
1555 { INDEX_op_movi_i32
, { "r" } },
1556 { INDEX_op_ld8u_i32
, { "r", "r" } },
1557 { INDEX_op_ld8s_i32
, { "r", "r" } },
1558 { INDEX_op_ld16u_i32
, { "r", "r" } },
1559 { INDEX_op_ld16s_i32
, { "r", "r" } },
1560 { INDEX_op_ld_i32
, { "r", "r" } },
1561 { INDEX_op_st8_i32
, { "rZ", "r" } },
1562 { INDEX_op_st16_i32
, { "rZ", "r" } },
1563 { INDEX_op_st_i32
, { "rZ", "r" } },
1565 { INDEX_op_add_i32
, { "r", "rZ", "rJ" } },
1566 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1567 { INDEX_op_muls2_i32
, { "r", "r", "rZ", "rZ" } },
1568 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1569 { INDEX_op_mulsh_i32
, { "r", "rZ", "rZ" } },
1570 { INDEX_op_muluh_i32
, { "r", "rZ", "rZ" } },
1571 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1572 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1573 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1574 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1575 { INDEX_op_sub_i32
, { "r", "rZ", "rJ" } },
1577 { INDEX_op_and_i32
, { "r", "rZ", "rI" } },
1578 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1579 { INDEX_op_not_i32
, { "r", "rZ" } },
1580 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1581 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1583 { INDEX_op_shl_i32
, { "r", "rZ", "ri" } },
1584 { INDEX_op_shr_i32
, { "r", "rZ", "ri" } },
1585 { INDEX_op_sar_i32
, { "r", "rZ", "ri" } },
1586 { INDEX_op_rotr_i32
, { "r", "rZ", "ri" } },
1587 { INDEX_op_rotl_i32
, { "r", "rZ", "ri" } },
1589 { INDEX_op_bswap16_i32
, { "r", "r" } },
1590 { INDEX_op_bswap32_i32
, { "r", "r" } },
1592 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1593 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1595 { INDEX_op_deposit_i32
, { "r", "0", "rZ" } },
1597 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1598 { INDEX_op_movcond_i32
, { "r", "rZ", "rZ", "rZ", "0" } },
1599 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1600 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1602 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1603 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1604 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1606 #if TARGET_LONG_BITS == 32
1607 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1608 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1609 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1610 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1611 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1612 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1614 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1615 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1616 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1617 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1619 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1620 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1621 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1622 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1623 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1624 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1626 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1627 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1628 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1629 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1634 static int tcg_target_callee_save_regs
[] = {
1635 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1644 TCG_REG_RA
, /* should be last for ABI compliance */
1647 /* The Linux kernel doesn't provide any information about the available
1648 instruction set. Probe it using a signal handler. */
1652 #ifndef use_movnz_instructions
1653 bool use_movnz_instructions
= false;
1656 #ifndef use_mips32_instructions
1657 bool use_mips32_instructions
= false;
1660 #ifndef use_mips32r2_instructions
1661 bool use_mips32r2_instructions
= false;
1664 static volatile sig_atomic_t got_sigill
;
1666 static void sigill_handler(int signo
, siginfo_t
*si
, void *data
)
1668 /* Skip the faulty instruction */
1669 ucontext_t
*uc
= (ucontext_t
*)data
;
1670 uc
->uc_mcontext
.pc
+= 4;
1675 static void tcg_target_detect_isa(void)
1677 struct sigaction sa_old
, sa_new
;
1679 memset(&sa_new
, 0, sizeof(sa_new
));
1680 sa_new
.sa_flags
= SA_SIGINFO
;
1681 sa_new
.sa_sigaction
= sigill_handler
;
1682 sigaction(SIGILL
, &sa_new
, &sa_old
);
1684 /* Probe for movn/movz, necessary to implement movcond. */
1685 #ifndef use_movnz_instructions
1687 asm volatile(".set push\n"
1689 "movn $zero, $zero, $zero\n"
1690 "movz $zero, $zero, $zero\n"
1693 use_movnz_instructions
= !got_sigill
;
1696 /* Probe for MIPS32 instructions. As no subsetting is allowed
1697 by the specification, it is only necessary to probe for one
1698 of the instructions. */
1699 #ifndef use_mips32_instructions
1701 asm volatile(".set push\n"
1703 "mul $zero, $zero\n"
1706 use_mips32_instructions
= !got_sigill
;
1709 /* Probe for MIPS32r2 instructions if MIPS32 instructions are
1710 available. As no subsetting is allowed by the specification,
1711 it is only necessary to probe for one of the instructions. */
1712 #ifndef use_mips32r2_instructions
1713 if (use_mips32_instructions
) {
1715 asm volatile(".set push\n"
1717 "seb $zero, $zero\n"
1720 use_mips32r2_instructions
= !got_sigill
;
1724 sigaction(SIGILL
, &sa_old
, NULL
);
1727 /* Generate global QEMU prologue and epilogue code */
1728 static void tcg_target_qemu_prologue(TCGContext
*s
)
1732 /* reserve some stack space, also for TCG temps. */
1733 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1734 + TCG_STATIC_CALL_ARGS_SIZE
1735 + CPU_TEMP_BUF_NLONGS
* sizeof(long);
1736 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1737 ~(TCG_TARGET_STACK_ALIGN
- 1);
1738 tcg_set_frame(s
, TCG_REG_SP
, ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1739 + TCG_STATIC_CALL_ARGS_SIZE
,
1740 CPU_TEMP_BUF_NLONGS
* sizeof(long));
1743 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1744 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1745 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1746 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1749 /* Call generated code */
1750 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1751 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1752 tb_ret_addr
= s
->code_ptr
;
1755 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1756 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1757 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1760 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1761 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1764 static void tcg_target_init(TCGContext
*s
)
1766 tcg_target_detect_isa();
1767 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1768 tcg_regset_set(tcg_target_call_clobber_regs
,
1785 tcg_regset_clear(s
->reserved_regs
);
1786 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1787 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1788 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1789 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1790 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1791 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1792 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1793 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_GP
); /* global pointer */
1795 tcg_add_target_add_op_defs(mips_op_defs
);