2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
62 static const int tcg_target_reg_alloc_order
[] = {
78 static const int tcg_target_call_iarg_regs
[6] = {
87 static const int tcg_target_call_oarg_regs
[2] = {
92 static inline int check_fit_tl(tcg_target_long val
, unsigned int bits
)
94 return (val
<< ((sizeof(tcg_target_long
) * 8 - bits
))
95 >> (sizeof(tcg_target_long
) * 8 - bits
)) == val
;
98 static inline int check_fit_i32(uint32_t val
, unsigned int bits
)
100 return ((val
<< (32 - bits
)) >> (32 - bits
)) == val
;
103 static void patch_reloc(uint8_t *code_ptr
, int type
,
104 tcg_target_long value
, tcg_target_long addend
)
109 if (value
!= (uint32_t)value
)
111 *(uint32_t *)code_ptr
= value
;
113 case R_SPARC_WDISP22
:
114 value
-= (long)code_ptr
;
116 if (!check_fit_tl(value
, 22))
118 *(uint32_t *)code_ptr
= ((*(uint32_t *)code_ptr
) & ~0x3fffff) | value
;
120 case R_SPARC_WDISP19
:
121 value
-= (long)code_ptr
;
123 if (!check_fit_tl(value
, 19))
125 *(uint32_t *)code_ptr
= ((*(uint32_t *)code_ptr
) & ~0x7ffff) | value
;
132 /* maximum number of register used for input function arguments */
133 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
138 /* parse target specific constraints */
139 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
146 ct
->ct
|= TCG_CT_REG
;
147 tcg_regset_set32(ct
->u
.regs
, 0, 0xffffffff);
149 case 'L': /* qemu_ld/st constraint */
150 ct
->ct
|= TCG_CT_REG
;
151 tcg_regset_set32(ct
->u
.regs
, 0, 0xffffffff);
153 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_O0
);
154 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_O1
);
155 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_O2
);
158 ct
->ct
|= TCG_CT_CONST_S11
;
161 ct
->ct
|= TCG_CT_CONST_S13
;
171 /* test if a constant matches the constraint */
172 static inline int tcg_target_const_match(tcg_target_long val
,
173 const TCGArgConstraint
*arg_ct
)
178 if (ct
& TCG_CT_CONST
)
180 else if ((ct
& TCG_CT_CONST_S11
) && check_fit_tl(val
, 11))
182 else if ((ct
& TCG_CT_CONST_S13
) && check_fit_tl(val
, 13))
188 #define INSN_OP(x) ((x) << 30)
189 #define INSN_OP2(x) ((x) << 22)
190 #define INSN_OP3(x) ((x) << 19)
191 #define INSN_OPF(x) ((x) << 5)
192 #define INSN_RD(x) ((x) << 25)
193 #define INSN_RS1(x) ((x) << 14)
194 #define INSN_RS2(x) (x)
195 #define INSN_ASI(x) ((x) << 5)
197 #define INSN_IMM11(x) ((1 << 13) | ((x) & 0x7ff))
198 #define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
199 #define INSN_OFF19(x) (((x) >> 2) & 0x07ffff)
200 #define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
202 #define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
219 #define BA (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
221 #define MOVCC_ICC (1 << 18)
222 #define MOVCC_XCC (1 << 18 | 1 << 12)
224 #define ARITH_ADD (INSN_OP(2) | INSN_OP3(0x00))
225 #define ARITH_ADDCC (INSN_OP(2) | INSN_OP3(0x10))
226 #define ARITH_AND (INSN_OP(2) | INSN_OP3(0x01))
227 #define ARITH_OR (INSN_OP(2) | INSN_OP3(0x02))
228 #define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
229 #define ARITH_XOR (INSN_OP(2) | INSN_OP3(0x03))
230 #define ARITH_SUB (INSN_OP(2) | INSN_OP3(0x04))
231 #define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
232 #define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
233 #define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
234 #define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
235 #define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
236 #define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
237 #define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
238 #define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
239 #define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
240 #define ARITH_MOVCC (INSN_OP(2) | INSN_OP3(0x2c))
242 #define SHIFT_SLL (INSN_OP(2) | INSN_OP3(0x25))
243 #define SHIFT_SRL (INSN_OP(2) | INSN_OP3(0x26))
244 #define SHIFT_SRA (INSN_OP(2) | INSN_OP3(0x27))
246 #define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
247 #define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
248 #define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
250 #define RDY (INSN_OP(2) | INSN_OP3(0x28) | INSN_RS1(0))
251 #define WRY (INSN_OP(2) | INSN_OP3(0x30) | INSN_RD(0))
252 #define JMPL (INSN_OP(2) | INSN_OP3(0x38))
253 #define SAVE (INSN_OP(2) | INSN_OP3(0x3c))
254 #define RESTORE (INSN_OP(2) | INSN_OP3(0x3d))
255 #define SETHI (INSN_OP(0) | INSN_OP2(0x4))
256 #define CALL INSN_OP(1)
257 #define LDUB (INSN_OP(3) | INSN_OP3(0x01))
258 #define LDSB (INSN_OP(3) | INSN_OP3(0x09))
259 #define LDUH (INSN_OP(3) | INSN_OP3(0x02))
260 #define LDSH (INSN_OP(3) | INSN_OP3(0x0a))
261 #define LDUW (INSN_OP(3) | INSN_OP3(0x00))
262 #define LDSW (INSN_OP(3) | INSN_OP3(0x08))
263 #define LDX (INSN_OP(3) | INSN_OP3(0x0b))
264 #define STB (INSN_OP(3) | INSN_OP3(0x05))
265 #define STH (INSN_OP(3) | INSN_OP3(0x06))
266 #define STW (INSN_OP(3) | INSN_OP3(0x04))
267 #define STX (INSN_OP(3) | INSN_OP3(0x0e))
268 #define LDUBA (INSN_OP(3) | INSN_OP3(0x11))
269 #define LDSBA (INSN_OP(3) | INSN_OP3(0x19))
270 #define LDUHA (INSN_OP(3) | INSN_OP3(0x12))
271 #define LDSHA (INSN_OP(3) | INSN_OP3(0x1a))
272 #define LDUWA (INSN_OP(3) | INSN_OP3(0x10))
273 #define LDSWA (INSN_OP(3) | INSN_OP3(0x18))
274 #define LDXA (INSN_OP(3) | INSN_OP3(0x1b))
275 #define STBA (INSN_OP(3) | INSN_OP3(0x15))
276 #define STHA (INSN_OP(3) | INSN_OP3(0x16))
277 #define STWA (INSN_OP(3) | INSN_OP3(0x14))
278 #define STXA (INSN_OP(3) | INSN_OP3(0x1e))
280 #ifndef ASI_PRIMARY_LITTLE
281 #define ASI_PRIMARY_LITTLE 0x88
284 static inline void tcg_out_arith(TCGContext
*s
, int rd
, int rs1
, int rs2
,
287 tcg_out32(s
, op
| INSN_RD(rd
) | INSN_RS1(rs1
) |
291 static inline void tcg_out_arithi(TCGContext
*s
, int rd
, int rs1
,
292 uint32_t offset
, int op
)
294 tcg_out32(s
, op
| INSN_RD(rd
) | INSN_RS1(rs1
) |
298 static void tcg_out_arithc(TCGContext
*s
, int rd
, int rs1
,
299 int val2
, int val2const
, int op
)
301 tcg_out32(s
, op
| INSN_RD(rd
) | INSN_RS1(rs1
)
302 | (val2const
? INSN_IMM13(val2
) : INSN_RS2(val2
)));
305 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
307 tcg_out_arith(s
, ret
, arg
, TCG_REG_G0
, ARITH_OR
);
310 static inline void tcg_out_sethi(TCGContext
*s
, int ret
, uint32_t arg
)
312 tcg_out32(s
, SETHI
| INSN_RD(ret
) | ((arg
& 0xfffffc00) >> 10));
315 static inline void tcg_out_movi_imm13(TCGContext
*s
, int ret
, uint32_t arg
)
317 tcg_out_arithi(s
, ret
, TCG_REG_G0
, arg
, ARITH_OR
);
320 static inline void tcg_out_movi_imm32(TCGContext
*s
, int ret
, uint32_t arg
)
322 if (check_fit_tl(arg
, 13))
323 tcg_out_movi_imm13(s
, ret
, arg
);
325 tcg_out_sethi(s
, ret
, arg
);
327 tcg_out_arithi(s
, ret
, ret
, arg
& 0x3ff, ARITH_OR
);
331 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
332 int ret
, tcg_target_long arg
)
334 /* All 32-bit constants, as well as 64-bit constants with
335 no high bits set go through movi_imm32. */
336 if (TCG_TARGET_REG_BITS
== 32
337 || type
== TCG_TYPE_I32
338 || (arg
& ~(tcg_target_long
)0xffffffff) == 0) {
339 tcg_out_movi_imm32(s
, ret
, arg
);
340 } else if (check_fit_tl(arg
, 13)) {
341 /* A 13-bit constant sign-extended to 64-bits. */
342 tcg_out_movi_imm13(s
, ret
, arg
);
343 } else if (check_fit_tl(arg
, 32)) {
344 /* A 32-bit constant sign-extended to 64-bits. */
345 tcg_out_sethi(s
, ret
, ~arg
);
346 tcg_out_arithi(s
, ret
, ret
, (arg
& 0x3ff) | -0x400, ARITH_XOR
);
348 tcg_out_movi_imm32(s
, TCG_REG_I4
, arg
>> (TCG_TARGET_REG_BITS
/ 2));
349 tcg_out_arithi(s
, TCG_REG_I4
, TCG_REG_I4
, 32, SHIFT_SLLX
);
350 tcg_out_movi_imm32(s
, ret
, arg
);
351 tcg_out_arith(s
, ret
, ret
, TCG_REG_I4
, ARITH_OR
);
355 static inline void tcg_out_ld_raw(TCGContext
*s
, int ret
,
358 tcg_out_sethi(s
, ret
, arg
);
359 tcg_out32(s
, LDUW
| INSN_RD(ret
) | INSN_RS1(ret
) |
360 INSN_IMM13(arg
& 0x3ff));
363 static inline void tcg_out_ld_ptr(TCGContext
*s
, int ret
,
366 if (!check_fit_tl(arg
, 10))
367 tcg_out_movi(s
, TCG_TYPE_PTR
, ret
, arg
& ~0x3ffULL
);
368 if (TCG_TARGET_REG_BITS
== 64) {
369 tcg_out32(s
, LDX
| INSN_RD(ret
) | INSN_RS1(ret
) |
370 INSN_IMM13(arg
& 0x3ff));
372 tcg_out32(s
, LDUW
| INSN_RD(ret
) | INSN_RS1(ret
) |
373 INSN_IMM13(arg
& 0x3ff));
377 static inline void tcg_out_ldst(TCGContext
*s
, int ret
, int addr
, int offset
, int op
)
379 if (check_fit_tl(offset
, 13))
380 tcg_out32(s
, op
| INSN_RD(ret
) | INSN_RS1(addr
) |
383 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I5
, offset
);
384 tcg_out32(s
, op
| INSN_RD(ret
) | INSN_RS1(TCG_REG_I5
) |
389 static inline void tcg_out_ldst_asi(TCGContext
*s
, int ret
, int addr
,
390 int offset
, int op
, int asi
)
392 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I5
, offset
);
393 tcg_out32(s
, op
| INSN_RD(ret
) | INSN_RS1(TCG_REG_I5
) |
394 INSN_ASI(asi
) | INSN_RS2(addr
));
397 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
398 int arg1
, tcg_target_long arg2
)
400 if (type
== TCG_TYPE_I32
)
401 tcg_out_ldst(s
, ret
, arg1
, arg2
, LDUW
);
403 tcg_out_ldst(s
, ret
, arg1
, arg2
, LDX
);
406 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int arg
,
407 int arg1
, tcg_target_long arg2
)
409 if (type
== TCG_TYPE_I32
)
410 tcg_out_ldst(s
, arg
, arg1
, arg2
, STW
);
412 tcg_out_ldst(s
, arg
, arg1
, arg2
, STX
);
415 static inline void tcg_out_sety(TCGContext
*s
, int rs
)
417 tcg_out32(s
, WRY
| INSN_RS1(TCG_REG_G0
) | INSN_RS2(rs
));
420 static inline void tcg_out_rdy(TCGContext
*s
, int rd
)
422 tcg_out32(s
, RDY
| INSN_RD(rd
));
425 static inline void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
428 if (check_fit_tl(val
, 13))
429 tcg_out_arithi(s
, reg
, reg
, val
, ARITH_ADD
);
431 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I5
, val
);
432 tcg_out_arith(s
, reg
, reg
, TCG_REG_I5
, ARITH_ADD
);
437 static inline void tcg_out_andi(TCGContext
*s
, int reg
, tcg_target_long val
)
440 if (check_fit_tl(val
, 13))
441 tcg_out_arithi(s
, reg
, reg
, val
, ARITH_AND
);
443 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_I5
, val
);
444 tcg_out_arith(s
, reg
, reg
, TCG_REG_I5
, ARITH_AND
);
449 static void tcg_out_div32(TCGContext
*s
, int rd
, int rs1
,
450 int val2
, int val2const
, int uns
)
452 /* Load Y with the sign/zero extension of RS1 to 64-bits. */
454 tcg_out_sety(s
, TCG_REG_G0
);
456 tcg_out_arithi(s
, TCG_REG_I5
, rs1
, 31, SHIFT_SRA
);
457 tcg_out_sety(s
, TCG_REG_I5
);
460 tcg_out_arithc(s
, rd
, rs1
, val2
, val2const
,
461 uns
? ARITH_UDIV
: ARITH_SDIV
);
464 static inline void tcg_out_nop(TCGContext
*s
)
466 tcg_out_sethi(s
, TCG_REG_G0
, 0);
469 static void tcg_out_branch_i32(TCGContext
*s
, int opc
, int label_index
)
472 TCGLabel
*l
= &s
->labels
[label_index
];
475 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
476 tcg_out32(s
, (INSN_OP(0) | INSN_COND(opc
, 0) | INSN_OP2(0x2)
477 | INSN_OFF22(l
->u
.value
- (unsigned long)s
->code_ptr
)));
479 tcg_out_reloc(s
, s
->code_ptr
, R_SPARC_WDISP22
, label_index
, 0);
480 tcg_out32(s
, (INSN_OP(0) | INSN_COND(opc
, 0) | INSN_OP2(0x2) | 0));
484 #if TCG_TARGET_REG_BITS == 64
485 static void tcg_out_branch_i64(TCGContext
*s
, int opc
, int label_index
)
488 TCGLabel
*l
= &s
->labels
[label_index
];
491 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
492 tcg_out32(s
, (INSN_OP(0) | INSN_COND(opc
, 0) | INSN_OP2(0x1) |
494 INSN_OFF19(l
->u
.value
- (unsigned long)s
->code_ptr
)));
496 tcg_out_reloc(s
, s
->code_ptr
, R_SPARC_WDISP19
, label_index
, 0);
497 tcg_out32(s
, (INSN_OP(0) | INSN_COND(opc
, 0) | INSN_OP2(0x1) |
503 static const uint8_t tcg_cond_to_bcond
[10] = {
504 [TCG_COND_EQ
] = COND_E
,
505 [TCG_COND_NE
] = COND_NE
,
506 [TCG_COND_LT
] = COND_L
,
507 [TCG_COND_GE
] = COND_GE
,
508 [TCG_COND_LE
] = COND_LE
,
509 [TCG_COND_GT
] = COND_G
,
510 [TCG_COND_LTU
] = COND_CS
,
511 [TCG_COND_GEU
] = COND_CC
,
512 [TCG_COND_LEU
] = COND_LEU
,
513 [TCG_COND_GTU
] = COND_GU
,
516 static void tcg_out_cmp(TCGContext
*s
, TCGArg c1
, TCGArg c2
, int c2const
)
518 tcg_out_arithc(s
, TCG_REG_G0
, c1
, c2
, c2const
, ARITH_SUBCC
);
521 static void tcg_out_brcond_i32(TCGContext
*s
, int cond
,
522 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
525 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
526 tcg_out_branch_i32(s
, tcg_cond_to_bcond
[cond
], label_index
);
530 #if TCG_TARGET_REG_BITS == 64
531 static void tcg_out_brcond_i64(TCGContext
*s
, int cond
,
532 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
535 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
536 tcg_out_branch_i64(s
, tcg_cond_to_bcond
[cond
], label_index
);
540 static void tcg_out_brcond2_i32(TCGContext
*s
, int cond
,
541 TCGArg al
, TCGArg ah
,
542 TCGArg bl
, int blconst
,
543 TCGArg bh
, int bhconst
, int label_dest
)
545 int cc
, label_next
= gen_new_label();
547 tcg_out_cmp(s
, ah
, bh
, bhconst
);
549 /* Note that we fill one of the delay slots with the second compare. */
552 cc
= INSN_COND(tcg_cond_to_bcond
[TCG_COND_NE
], 0);
553 tcg_out_branch_i32(s
, cc
, label_next
);
554 tcg_out_cmp(s
, al
, bl
, blconst
);
555 cc
= INSN_COND(tcg_cond_to_bcond
[TCG_COND_EQ
], 0);
556 tcg_out_branch_i32(s
, cc
, label_dest
);
560 cc
= INSN_COND(tcg_cond_to_bcond
[TCG_COND_NE
], 0);
561 tcg_out_branch_i32(s
, cc
, label_dest
);
562 tcg_out_cmp(s
, al
, bl
, blconst
);
563 tcg_out_branch_i32(s
, cc
, label_dest
);
567 /* ??? One could fairly easily special-case 64-bit unsigned
568 compares against 32-bit zero-extended constants. For instance,
569 we know that (unsigned)AH < 0 is false and need not emit it.
570 Similarly, (unsigned)AH > 0 being true implies AH != 0, so the
571 second branch will never be taken. */
572 cc
= INSN_COND(tcg_cond_to_bcond
[cond
], 0);
573 tcg_out_branch_i32(s
, cc
, label_dest
);
575 cc
= INSN_COND(tcg_cond_to_bcond
[TCG_COND_NE
], 0);
576 tcg_out_branch_i32(s
, cc
, label_next
);
577 tcg_out_cmp(s
, al
, bl
, blconst
);
578 cc
= INSN_COND(tcg_cond_to_bcond
[tcg_unsigned_cond(cond
)], 0);
579 tcg_out_branch_i32(s
, cc
, label_dest
);
584 tcg_out_label(s
, label_next
, (tcg_target_long
)s
->code_ptr
);
588 static void tcg_out_setcond_i32(TCGContext
*s
, int cond
, TCGArg ret
,
589 TCGArg c1
, TCGArg c2
, int c2const
)
593 /* For 32-bit comparisons, we can play games with ADDX/SUBX. */
598 tcg_out_arithc(s
, ret
, c1
, c2
, c2const
, ARITH_XOR
);
600 c1
= TCG_REG_G0
, c2
= ret
, c2const
= 0;
601 cond
= (cond
== TCG_COND_EQ
? TCG_COND_LEU
: TCG_COND_LTU
);
606 if (c2const
&& c2
!= 0) {
607 tcg_out_movi_imm13(s
, TCG_REG_I5
, c2
);
610 t
= c1
, c1
= c2
, c2
= t
, c2const
= 0;
611 cond
= tcg_swap_cond(cond
);
619 tcg_out_cmp(s
, c1
, c2
, c2const
);
620 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
621 tcg_out_movi_imm13(s
, ret
, 0);
622 tcg_out32 (s
, ARITH_MOVCC
| INSN_RD(ret
)
623 | INSN_RS1(tcg_cond_to_bcond
[cond
])
624 | MOVCC_ICC
| INSN_IMM11(1));
627 tcg_out_branch_i32(s
, INSN_COND(tcg_cond_to_bcond
[cond
], 1), t
);
628 tcg_out_movi_imm13(s
, ret
, 1);
629 tcg_out_movi_imm13(s
, ret
, 0);
630 tcg_out_label(s
, t
, (tcg_target_long
)s
->code_ptr
);
635 tcg_out_cmp(s
, c1
, c2
, c2const
);
636 if (cond
== TCG_COND_LTU
) {
637 tcg_out_arithi(s
, ret
, TCG_REG_G0
, 0, ARITH_ADDX
);
639 tcg_out_arithi(s
, ret
, TCG_REG_G0
, -1, ARITH_SUBX
);
643 #if TCG_TARGET_REG_BITS == 64
644 static void tcg_out_setcond_i64(TCGContext
*s
, int cond
, TCGArg ret
,
645 TCGArg c1
, TCGArg c2
, int c2const
)
647 tcg_out_cmp(s
, c1
, c2
, c2const
);
648 tcg_out_movi_imm13(s
, ret
, 0);
649 tcg_out32 (s
, ARITH_MOVCC
| INSN_RD(ret
)
650 | INSN_RS1(tcg_cond_to_bcond
[cond
])
651 | MOVCC_XCC
| INSN_IMM11(1));
654 static void tcg_out_setcond2_i32(TCGContext
*s
, int cond
, TCGArg ret
,
655 TCGArg al
, TCGArg ah
,
656 TCGArg bl
, int blconst
,
657 TCGArg bh
, int bhconst
)
663 tcg_out_setcond_i32(s
, TCG_COND_EQ
, TCG_REG_I5
, al
, bl
, blconst
);
664 tcg_out_setcond_i32(s
, TCG_COND_EQ
, ret
, ah
, bh
, bhconst
);
665 tcg_out_arith(s
, ret
, ret
, TCG_REG_I5
, ARITH_AND
);
669 tcg_out_setcond_i32(s
, TCG_COND_NE
, TCG_REG_I5
, al
, al
, blconst
);
670 tcg_out_setcond_i32(s
, TCG_COND_NE
, ret
, ah
, bh
, bhconst
);
671 tcg_out_arith(s
, ret
, ret
, TCG_REG_I5
, ARITH_OR
);
675 lab
= gen_new_label();
677 tcg_out_cmp(s
, ah
, bh
, bhconst
);
678 tcg_out_branch_i32(s
, INSN_COND(tcg_cond_to_bcond
[cond
], 1), lab
);
679 tcg_out_movi_imm13(s
, ret
, 1);
680 tcg_out_branch_i32(s
, INSN_COND(COND_NE
, 1), lab
);
681 tcg_out_movi_imm13(s
, ret
, 0);
683 tcg_out_setcond_i32(s
, tcg_unsigned_cond(cond
), ret
, al
, bl
, blconst
);
685 tcg_out_label(s
, lab
, (tcg_target_long
)s
->code_ptr
);
691 /* Generate global QEMU prologue and epilogue code */
692 void tcg_target_qemu_prologue(TCGContext
*s
)
694 tcg_out32(s
, SAVE
| INSN_RD(TCG_REG_O6
) | INSN_RS1(TCG_REG_O6
) |
695 INSN_IMM13(-TCG_TARGET_STACK_MINFRAME
));
696 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I0
) |
697 INSN_RS2(TCG_REG_G0
));
701 #if defined(CONFIG_SOFTMMU)
703 #include "../../softmmu_defs.h"
705 static const void * const qemu_ld_helpers
[4] = {
712 static const void * const qemu_st_helpers
[4] = {
720 #if TARGET_LONG_BITS == 32
721 #define TARGET_LD_OP LDUW
723 #define TARGET_LD_OP LDX
726 #if TARGET_PHYS_ADDR_BITS == 32
727 #define TARGET_ADDEND_LD_OP LDUW
729 #define TARGET_ADDEND_LD_OP LDX
733 #define HOST_LD_OP LDX
734 #define HOST_ST_OP STX
735 #define HOST_SLL_OP SHIFT_SLLX
736 #define HOST_SRA_OP SHIFT_SRAX
738 #define HOST_LD_OP LDUW
739 #define HOST_ST_OP STW
740 #define HOST_SLL_OP SHIFT_SLL
741 #define HOST_SRA_OP SHIFT_SRA
744 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
747 int addr_reg
, data_reg
, arg0
, arg1
, arg2
, mem_index
, s_bits
;
748 #if defined(CONFIG_SOFTMMU)
749 uint32_t *label1_ptr
, *label2_ptr
;
761 #if defined(CONFIG_SOFTMMU)
762 /* srl addr_reg, x, arg1 */
763 tcg_out_arithi(s
, arg1
, addr_reg
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
,
765 /* and addr_reg, x, arg0 */
766 tcg_out_arithi(s
, arg0
, addr_reg
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1),
769 /* and arg1, x, arg1 */
770 tcg_out_andi(s
, arg1
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
772 /* add arg1, x, arg1 */
773 tcg_out_addi(s
, arg1
, offsetof(CPUState
,
774 tlb_table
[mem_index
][0].addr_read
));
776 /* add env, arg1, arg1 */
777 tcg_out_arith(s
, arg1
, TCG_AREG0
, arg1
, ARITH_ADD
);
779 /* ld [arg1], arg2 */
780 tcg_out32(s
, TARGET_LD_OP
| INSN_RD(arg2
) | INSN_RS1(arg1
) |
781 INSN_RS2(TCG_REG_G0
));
783 /* subcc arg0, arg2, %g0 */
784 tcg_out_arith(s
, TCG_REG_G0
, arg0
, arg2
, ARITH_SUBCC
);
790 label1_ptr
= (uint32_t *)s
->code_ptr
;
793 /* mov (delay slot) */
794 tcg_out_mov(s
, arg0
, addr_reg
);
797 tcg_out_movi(s
, TCG_TYPE_I32
, arg1
, mem_index
);
799 /* XXX: move that code at the end of the TB */
800 /* qemu_ld_helper[s_bits](arg0, arg1) */
801 tcg_out32(s
, CALL
| ((((tcg_target_ulong
)qemu_ld_helpers
[s_bits
]
802 - (tcg_target_ulong
)s
->code_ptr
) >> 2)
804 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
807 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
808 TCG_TARGET_CALL_STACK_OFFSET
- TCG_STATIC_CALL_ARGS_SIZE
-
809 sizeof(long), HOST_ST_OP
);
810 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
811 TCG_TARGET_CALL_STACK_OFFSET
- TCG_STATIC_CALL_ARGS_SIZE
-
812 sizeof(long), HOST_LD_OP
);
814 /* data_reg = sign_extend(arg0) */
817 /* sll arg0, 24/56, data_reg */
818 tcg_out_arithi(s
, data_reg
, arg0
, (int)sizeof(tcg_target_long
) * 8 - 8,
820 /* sra data_reg, 24/56, data_reg */
821 tcg_out_arithi(s
, data_reg
, data_reg
,
822 (int)sizeof(tcg_target_long
) * 8 - 8, HOST_SRA_OP
);
825 /* sll arg0, 16/48, data_reg */
826 tcg_out_arithi(s
, data_reg
, arg0
,
827 (int)sizeof(tcg_target_long
) * 8 - 16, HOST_SLL_OP
);
828 /* sra data_reg, 16/48, data_reg */
829 tcg_out_arithi(s
, data_reg
, data_reg
,
830 (int)sizeof(tcg_target_long
) * 8 - 16, HOST_SRA_OP
);
833 /* sll arg0, 32, data_reg */
834 tcg_out_arithi(s
, data_reg
, arg0
, 32, HOST_SLL_OP
);
835 /* sra data_reg, 32, data_reg */
836 tcg_out_arithi(s
, data_reg
, data_reg
, 32, HOST_SRA_OP
);
844 tcg_out_mov(s
, data_reg
, arg0
);
850 label2_ptr
= (uint32_t *)s
->code_ptr
;
853 /* nop (delay slot */
857 #if TARGET_LONG_BITS == 32
859 *label1_ptr
= (INSN_OP(0) | INSN_COND(COND_E
, 0) | INSN_OP2(0x2) |
860 INSN_OFF22((unsigned long)s
->code_ptr
-
861 (unsigned long)label1_ptr
));
863 /* be,pt %xcc label1 */
864 *label1_ptr
= (INSN_OP(0) | INSN_COND(COND_E
, 0) | INSN_OP2(0x1) |
865 (0x5 << 19) | INSN_OFF19((unsigned long)s
->code_ptr
-
866 (unsigned long)label1_ptr
));
869 /* ld [arg1 + x], arg1 */
870 tcg_out_ldst(s
, arg1
, arg1
, offsetof(CPUTLBEntry
, addend
) -
871 offsetof(CPUTLBEntry
, addr_read
), TARGET_ADDEND_LD_OP
);
873 #if TARGET_LONG_BITS == 32
874 /* and addr_reg, x, arg0 */
875 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_I5
, 0xffffffff);
876 tcg_out_arith(s
, arg0
, addr_reg
, TCG_REG_I5
, ARITH_AND
);
877 /* add arg0, arg1, arg0 */
878 tcg_out_arith(s
, arg0
, arg0
, arg1
, ARITH_ADD
);
880 /* add addr_reg, arg1, arg0 */
881 tcg_out_arith(s
, arg0
, addr_reg
, arg1
, ARITH_ADD
);
890 /* ldub [arg0], data_reg */
891 tcg_out_ldst(s
, data_reg
, arg0
, 0, LDUB
);
894 /* ldsb [arg0], data_reg */
895 tcg_out_ldst(s
, data_reg
, arg0
, 0, LDSB
);
898 #ifdef TARGET_WORDS_BIGENDIAN
899 /* lduh [arg0], data_reg */
900 tcg_out_ldst(s
, data_reg
, arg0
, 0, LDUH
);
902 /* lduha [arg0] ASI_PRIMARY_LITTLE, data_reg */
903 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, LDUHA
, ASI_PRIMARY_LITTLE
);
907 #ifdef TARGET_WORDS_BIGENDIAN
908 /* ldsh [arg0], data_reg */
909 tcg_out_ldst(s
, data_reg
, arg0
, 0, LDSH
);
911 /* ldsha [arg0] ASI_PRIMARY_LITTLE, data_reg */
912 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, LDSHA
, ASI_PRIMARY_LITTLE
);
916 #ifdef TARGET_WORDS_BIGENDIAN
917 /* lduw [arg0], data_reg */
918 tcg_out_ldst(s
, data_reg
, arg0
, 0, LDUW
);
920 /* lduwa [arg0] ASI_PRIMARY_LITTLE, data_reg */
921 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, LDUWA
, ASI_PRIMARY_LITTLE
);
925 #ifdef TARGET_WORDS_BIGENDIAN
926 /* ldsw [arg0], data_reg */
927 tcg_out_ldst(s
, data_reg
, arg0
, 0, LDSW
);
929 /* ldswa [arg0] ASI_PRIMARY_LITTLE, data_reg */
930 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, LDSWA
, ASI_PRIMARY_LITTLE
);
934 #ifdef TARGET_WORDS_BIGENDIAN
935 /* ldx [arg0], data_reg */
936 tcg_out_ldst(s
, data_reg
, arg0
, 0, LDX
);
938 /* ldxa [arg0] ASI_PRIMARY_LITTLE, data_reg */
939 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, LDXA
, ASI_PRIMARY_LITTLE
);
946 #if defined(CONFIG_SOFTMMU)
948 *label2_ptr
= (INSN_OP(0) | INSN_COND(COND_A
, 0) | INSN_OP2(0x2) |
949 INSN_OFF22((unsigned long)s
->code_ptr
-
950 (unsigned long)label2_ptr
));
954 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
957 int addr_reg
, data_reg
, arg0
, arg1
, arg2
, mem_index
, s_bits
;
958 #if defined(CONFIG_SOFTMMU)
959 uint32_t *label1_ptr
, *label2_ptr
;
972 #if defined(CONFIG_SOFTMMU)
973 /* srl addr_reg, x, arg1 */
974 tcg_out_arithi(s
, arg1
, addr_reg
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
,
977 /* and addr_reg, x, arg0 */
978 tcg_out_arithi(s
, arg0
, addr_reg
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1),
981 /* and arg1, x, arg1 */
982 tcg_out_andi(s
, arg1
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
984 /* add arg1, x, arg1 */
985 tcg_out_addi(s
, arg1
, offsetof(CPUState
,
986 tlb_table
[mem_index
][0].addr_write
));
988 /* add env, arg1, arg1 */
989 tcg_out_arith(s
, arg1
, TCG_AREG0
, arg1
, ARITH_ADD
);
991 /* ld [arg1], arg2 */
992 tcg_out32(s
, TARGET_LD_OP
| INSN_RD(arg2
) | INSN_RS1(arg1
) |
993 INSN_RS2(TCG_REG_G0
));
995 /* subcc arg0, arg2, %g0 */
996 tcg_out_arith(s
, TCG_REG_G0
, arg0
, arg2
, ARITH_SUBCC
);
1001 be,pt %xcc label1 */
1002 label1_ptr
= (uint32_t *)s
->code_ptr
;
1005 /* mov (delay slot) */
1006 tcg_out_mov(s
, arg0
, addr_reg
);
1009 tcg_out_mov(s
, arg1
, data_reg
);
1012 tcg_out_movi(s
, TCG_TYPE_I32
, arg2
, mem_index
);
1014 /* XXX: move that code at the end of the TB */
1015 /* qemu_st_helper[s_bits](arg0, arg1, arg2) */
1016 tcg_out32(s
, CALL
| ((((tcg_target_ulong
)qemu_st_helpers
[s_bits
]
1017 - (tcg_target_ulong
)s
->code_ptr
) >> 2)
1019 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1022 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
1023 TCG_TARGET_CALL_STACK_OFFSET
- TCG_STATIC_CALL_ARGS_SIZE
-
1024 sizeof(long), HOST_ST_OP
);
1025 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
1026 TCG_TARGET_CALL_STACK_OFFSET
- TCG_STATIC_CALL_ARGS_SIZE
-
1027 sizeof(long), HOST_LD_OP
);
1031 label2_ptr
= (uint32_t *)s
->code_ptr
;
1034 /* nop (delay slot) */
1037 #if TARGET_LONG_BITS == 32
1039 *label1_ptr
= (INSN_OP(0) | INSN_COND(COND_E
, 0) | INSN_OP2(0x2) |
1040 INSN_OFF22((unsigned long)s
->code_ptr
-
1041 (unsigned long)label1_ptr
));
1043 /* be,pt %xcc label1 */
1044 *label1_ptr
= (INSN_OP(0) | INSN_COND(COND_E
, 0) | INSN_OP2(0x1) |
1045 (0x5 << 19) | INSN_OFF19((unsigned long)s
->code_ptr
-
1046 (unsigned long)label1_ptr
));
1049 /* ld [arg1 + x], arg1 */
1050 tcg_out_ldst(s
, arg1
, arg1
, offsetof(CPUTLBEntry
, addend
) -
1051 offsetof(CPUTLBEntry
, addr_write
), TARGET_ADDEND_LD_OP
);
1053 #if TARGET_LONG_BITS == 32
1054 /* and addr_reg, x, arg0 */
1055 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_I5
, 0xffffffff);
1056 tcg_out_arith(s
, arg0
, addr_reg
, TCG_REG_I5
, ARITH_AND
);
1057 /* add arg0, arg1, arg0 */
1058 tcg_out_arith(s
, arg0
, arg0
, arg1
, ARITH_ADD
);
1060 /* add addr_reg, arg1, arg0 */
1061 tcg_out_arith(s
, arg0
, addr_reg
, arg1
, ARITH_ADD
);
1070 /* stb data_reg, [arg0] */
1071 tcg_out_ldst(s
, data_reg
, arg0
, 0, STB
);
1074 #ifdef TARGET_WORDS_BIGENDIAN
1075 /* sth data_reg, [arg0] */
1076 tcg_out_ldst(s
, data_reg
, arg0
, 0, STH
);
1078 /* stha data_reg, [arg0] ASI_PRIMARY_LITTLE */
1079 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, STHA
, ASI_PRIMARY_LITTLE
);
1083 #ifdef TARGET_WORDS_BIGENDIAN
1084 /* stw data_reg, [arg0] */
1085 tcg_out_ldst(s
, data_reg
, arg0
, 0, STW
);
1087 /* stwa data_reg, [arg0] ASI_PRIMARY_LITTLE */
1088 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, STWA
, ASI_PRIMARY_LITTLE
);
1092 #ifdef TARGET_WORDS_BIGENDIAN
1093 /* stx data_reg, [arg0] */
1094 tcg_out_ldst(s
, data_reg
, arg0
, 0, STX
);
1096 /* stxa data_reg, [arg0] ASI_PRIMARY_LITTLE */
1097 tcg_out_ldst_asi(s
, data_reg
, arg0
, 0, STXA
, ASI_PRIMARY_LITTLE
);
1104 #if defined(CONFIG_SOFTMMU)
1106 *label2_ptr
= (INSN_OP(0) | INSN_COND(COND_A
, 0) | INSN_OP2(0x2) |
1107 INSN_OFF22((unsigned long)s
->code_ptr
-
1108 (unsigned long)label2_ptr
));
1112 static inline void tcg_out_op(TCGContext
*s
, int opc
, const TCGArg
*args
,
1113 const int *const_args
)
1118 case INDEX_op_exit_tb
:
1119 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I0
, args
[0]);
1120 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I7
) |
1122 tcg_out32(s
, RESTORE
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_G0
) |
1123 INSN_RS2(TCG_REG_G0
));
1125 case INDEX_op_goto_tb
:
1126 if (s
->tb_jmp_offset
) {
1127 /* direct jump method */
1128 tcg_out_sethi(s
, TCG_REG_I5
, args
[0] & 0xffffe000);
1129 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I5
) |
1130 INSN_IMM13((args
[0] & 0x1fff)));
1131 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1133 /* indirect jump method */
1134 tcg_out_ld_ptr(s
, TCG_REG_I5
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1135 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I5
) |
1136 INSN_RS2(TCG_REG_G0
));
1139 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1143 tcg_out32(s
, CALL
| ((((tcg_target_ulong
)args
[0]
1144 - (tcg_target_ulong
)s
->code_ptr
) >> 2)
1147 tcg_out_ld_ptr(s
, TCG_REG_I5
,
1148 (tcg_target_long
)(s
->tb_next
+ args
[0]));
1149 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_O7
) | INSN_RS1(TCG_REG_I5
) |
1150 INSN_RS2(TCG_REG_G0
));
1152 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1155 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
1156 TCG_TARGET_CALL_STACK_OFFSET
- TCG_STATIC_CALL_ARGS_SIZE
-
1157 sizeof(long), HOST_ST_OP
);
1158 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
1159 TCG_TARGET_CALL_STACK_OFFSET
- TCG_STATIC_CALL_ARGS_SIZE
-
1160 sizeof(long), HOST_LD_OP
);
1164 tcg_out_branch_i32(s
, COND_A
, args
[0]);
1167 case INDEX_op_movi_i32
:
1168 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], (uint32_t)args
[1]);
1171 #if TCG_TARGET_REG_BITS == 64
1172 #define OP_32_64(x) \
1173 glue(glue(case INDEX_op_, x), _i32): \
1174 glue(glue(case INDEX_op_, x), _i64)
1176 #define OP_32_64(x) \
1177 glue(glue(case INDEX_op_, x), _i32)
1180 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDUB
);
1183 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDSB
);
1186 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDUH
);
1189 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDSH
);
1191 case INDEX_op_ld_i32
:
1192 #if TCG_TARGET_REG_BITS == 64
1193 case INDEX_op_ld32u_i64
:
1195 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDUW
);
1198 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STB
);
1201 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STH
);
1203 case INDEX_op_st_i32
:
1204 #if TCG_TARGET_REG_BITS == 64
1205 case INDEX_op_st32_i64
:
1207 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STW
);
1224 case INDEX_op_shl_i32
:
1227 case INDEX_op_shr_i32
:
1230 case INDEX_op_sar_i32
:
1233 case INDEX_op_mul_i32
:
1241 case INDEX_op_div_i32
:
1242 tcg_out_div32(s
, args
[0], args
[1], args
[2], const_args
[2], 0);
1244 case INDEX_op_divu_i32
:
1245 tcg_out_div32(s
, args
[0], args
[1], args
[2], const_args
[2], 1);
1248 case INDEX_op_rem_i32
:
1249 case INDEX_op_remu_i32
:
1250 tcg_out_div32(s
, TCG_REG_I5
, args
[1], args
[2], const_args
[2],
1251 opc
== INDEX_op_remu_i32
);
1252 tcg_out_arithc(s
, TCG_REG_I5
, TCG_REG_I5
, args
[2], const_args
[2],
1254 tcg_out_arith(s
, args
[0], args
[1], TCG_REG_I5
, ARITH_SUB
);
1257 case INDEX_op_brcond_i32
:
1258 tcg_out_brcond_i32(s
, args
[2], args
[0], args
[1], const_args
[1],
1261 case INDEX_op_setcond_i32
:
1262 tcg_out_setcond_i32(s
, args
[3], args
[0], args
[1],
1263 args
[2], const_args
[2]);
1266 #if TCG_TARGET_REG_BITS == 32
1267 case INDEX_op_brcond2_i32
:
1268 tcg_out_brcond2_i32(s
, args
[4], args
[0], args
[1],
1269 args
[2], const_args
[2],
1270 args
[3], const_args
[3], args
[5]);
1272 case INDEX_op_setcond2_i32
:
1273 tcg_out_setcond2_i32(s
, args
[5], args
[0], args
[1], args
[2],
1274 args
[3], const_args
[3],
1275 args
[4], const_args
[4]);
1277 case INDEX_op_add2_i32
:
1278 tcg_out_arithc(s
, args
[0], args
[2], args
[4], const_args
[4],
1280 tcg_out_arithc(s
, args
[1], args
[3], args
[5], const_args
[5],
1283 case INDEX_op_sub2_i32
:
1284 tcg_out_arithc(s
, args
[0], args
[2], args
[4], const_args
[4],
1286 tcg_out_arithc(s
, args
[1], args
[3], args
[5], const_args
[5],
1289 case INDEX_op_mulu2_i32
:
1290 tcg_out_arithc(s
, args
[0], args
[2], args
[3], const_args
[3],
1292 tcg_out_rdy(s
, args
[1]);
1296 case INDEX_op_qemu_ld8u
:
1297 tcg_out_qemu_ld(s
, args
, 0);
1299 case INDEX_op_qemu_ld8s
:
1300 tcg_out_qemu_ld(s
, args
, 0 | 4);
1302 case INDEX_op_qemu_ld16u
:
1303 tcg_out_qemu_ld(s
, args
, 1);
1305 case INDEX_op_qemu_ld16s
:
1306 tcg_out_qemu_ld(s
, args
, 1 | 4);
1308 case INDEX_op_qemu_ld32u
:
1309 tcg_out_qemu_ld(s
, args
, 2);
1311 case INDEX_op_qemu_ld32s
:
1312 tcg_out_qemu_ld(s
, args
, 2 | 4);
1314 case INDEX_op_qemu_st8
:
1315 tcg_out_qemu_st(s
, args
, 0);
1317 case INDEX_op_qemu_st16
:
1318 tcg_out_qemu_st(s
, args
, 1);
1320 case INDEX_op_qemu_st32
:
1321 tcg_out_qemu_st(s
, args
, 2);
1324 #if TCG_TARGET_REG_BITS == 64
1325 case INDEX_op_movi_i64
:
1326 tcg_out_movi(s
, TCG_TYPE_I64
, args
[0], args
[1]);
1328 case INDEX_op_ld32s_i64
:
1329 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDSW
);
1331 case INDEX_op_ld_i64
:
1332 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDX
);
1334 case INDEX_op_st_i64
:
1335 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STX
);
1337 case INDEX_op_shl_i64
:
1340 case INDEX_op_shr_i64
:
1343 case INDEX_op_sar_i64
:
1346 case INDEX_op_mul_i64
:
1349 case INDEX_op_div_i64
:
1352 case INDEX_op_divu_i64
:
1355 case INDEX_op_rem_i64
:
1356 case INDEX_op_remu_i64
:
1357 tcg_out_arithc(s
, TCG_REG_I5
, args
[1], args
[2], const_args
[2],
1358 opc
== INDEX_op_rem_i64
? ARITH_SDIVX
: ARITH_UDIVX
);
1359 tcg_out_arithc(s
, TCG_REG_I5
, TCG_REG_I5
, args
[2], const_args
[2],
1361 tcg_out_arith(s
, args
[0], args
[1], TCG_REG_I5
, ARITH_SUB
);
1363 case INDEX_op_ext32s_i64
:
1364 if (const_args
[1]) {
1365 tcg_out_movi(s
, TCG_TYPE_I64
, args
[0], (int32_t)args
[1]);
1367 tcg_out_arithi(s
, args
[0], args
[1], 0, SHIFT_SRA
);
1370 case INDEX_op_ext32u_i64
:
1371 if (const_args
[1]) {
1372 tcg_out_movi_imm32(s
, args
[0], args
[1]);
1374 tcg_out_arithi(s
, args
[0], args
[1], 0, SHIFT_SRL
);
1378 case INDEX_op_brcond_i64
:
1379 tcg_out_brcond_i64(s
, args
[2], args
[0], args
[1], const_args
[1],
1382 case INDEX_op_setcond_i64
:
1383 tcg_out_setcond_i64(s
, args
[3], args
[0], args
[1],
1384 args
[2], const_args
[2]);
1387 case INDEX_op_qemu_ld64
:
1388 tcg_out_qemu_ld(s
, args
, 3);
1390 case INDEX_op_qemu_st64
:
1391 tcg_out_qemu_st(s
, args
, 3);
1396 tcg_out_arithc(s
, args
[0], args
[1], args
[2], const_args
[2], c
);
1400 tcg_out_arithc(s
, args
[0], TCG_REG_G0
, args
[1], const_args
[1], c
);
1404 fprintf(stderr
, "unknown opcode 0x%x\n", opc
);
1409 static const TCGTargetOpDef sparc_op_defs
[] = {
1410 { INDEX_op_exit_tb
, { } },
1411 { INDEX_op_goto_tb
, { } },
1412 { INDEX_op_call
, { "ri" } },
1413 { INDEX_op_jmp
, { "ri" } },
1414 { INDEX_op_br
, { } },
1416 { INDEX_op_mov_i32
, { "r", "r" } },
1417 { INDEX_op_movi_i32
, { "r" } },
1418 { INDEX_op_ld8u_i32
, { "r", "r" } },
1419 { INDEX_op_ld8s_i32
, { "r", "r" } },
1420 { INDEX_op_ld16u_i32
, { "r", "r" } },
1421 { INDEX_op_ld16s_i32
, { "r", "r" } },
1422 { INDEX_op_ld_i32
, { "r", "r" } },
1423 { INDEX_op_st8_i32
, { "r", "r" } },
1424 { INDEX_op_st16_i32
, { "r", "r" } },
1425 { INDEX_op_st_i32
, { "r", "r" } },
1427 { INDEX_op_add_i32
, { "r", "r", "rJ" } },
1428 { INDEX_op_mul_i32
, { "r", "r", "rJ" } },
1429 { INDEX_op_div_i32
, { "r", "r", "rJ" } },
1430 { INDEX_op_divu_i32
, { "r", "r", "rJ" } },
1431 { INDEX_op_rem_i32
, { "r", "r", "rJ" } },
1432 { INDEX_op_remu_i32
, { "r", "r", "rJ" } },
1433 { INDEX_op_sub_i32
, { "r", "r", "rJ" } },
1434 { INDEX_op_and_i32
, { "r", "r", "rJ" } },
1435 { INDEX_op_or_i32
, { "r", "r", "rJ" } },
1436 { INDEX_op_xor_i32
, { "r", "r", "rJ" } },
1438 { INDEX_op_shl_i32
, { "r", "r", "rJ" } },
1439 { INDEX_op_shr_i32
, { "r", "r", "rJ" } },
1440 { INDEX_op_sar_i32
, { "r", "r", "rJ" } },
1442 { INDEX_op_neg_i32
, { "r", "rJ" } },
1444 { INDEX_op_brcond_i32
, { "r", "rJ" } },
1445 { INDEX_op_setcond_i32
, { "r", "r", "rJ" } },
1447 #if TCG_TARGET_REG_BITS == 32
1448 { INDEX_op_brcond2_i32
, { "r", "r", "rJ", "rJ" } },
1449 { INDEX_op_setcond2_i32
, { "r", "r", "r", "rJ", "rJ" } },
1450 { INDEX_op_add2_i32
, { "r", "r", "r", "r", "rJ", "rJ" } },
1451 { INDEX_op_sub2_i32
, { "r", "r", "r", "r", "rJ", "rJ" } },
1452 { INDEX_op_mulu2_i32
, { "r", "r", "r", "rJ" } },
1455 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1456 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1457 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1458 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1459 { INDEX_op_qemu_ld32u
, { "r", "L" } },
1460 { INDEX_op_qemu_ld32s
, { "r", "L" } },
1462 { INDEX_op_qemu_st8
, { "L", "L" } },
1463 { INDEX_op_qemu_st16
, { "L", "L" } },
1464 { INDEX_op_qemu_st32
, { "L", "L" } },
1466 #if TCG_TARGET_REG_BITS == 64
1467 { INDEX_op_mov_i64
, { "r", "r" } },
1468 { INDEX_op_movi_i64
, { "r" } },
1469 { INDEX_op_ld8u_i64
, { "r", "r" } },
1470 { INDEX_op_ld8s_i64
, { "r", "r" } },
1471 { INDEX_op_ld16u_i64
, { "r", "r" } },
1472 { INDEX_op_ld16s_i64
, { "r", "r" } },
1473 { INDEX_op_ld32u_i64
, { "r", "r" } },
1474 { INDEX_op_ld32s_i64
, { "r", "r" } },
1475 { INDEX_op_ld_i64
, { "r", "r" } },
1476 { INDEX_op_st8_i64
, { "r", "r" } },
1477 { INDEX_op_st16_i64
, { "r", "r" } },
1478 { INDEX_op_st32_i64
, { "r", "r" } },
1479 { INDEX_op_st_i64
, { "r", "r" } },
1480 { INDEX_op_qemu_ld64
, { "L", "L" } },
1481 { INDEX_op_qemu_st64
, { "L", "L" } },
1483 { INDEX_op_add_i64
, { "r", "r", "rJ" } },
1484 { INDEX_op_mul_i64
, { "r", "r", "rJ" } },
1485 { INDEX_op_div_i64
, { "r", "r", "rJ" } },
1486 { INDEX_op_divu_i64
, { "r", "r", "rJ" } },
1487 { INDEX_op_rem_i64
, { "r", "r", "rJ" } },
1488 { INDEX_op_remu_i64
, { "r", "r", "rJ" } },
1489 { INDEX_op_sub_i64
, { "r", "r", "rJ" } },
1490 { INDEX_op_and_i64
, { "r", "r", "rJ" } },
1491 { INDEX_op_or_i64
, { "r", "r", "rJ" } },
1492 { INDEX_op_xor_i64
, { "r", "r", "rJ" } },
1494 { INDEX_op_shl_i64
, { "r", "r", "rJ" } },
1495 { INDEX_op_shr_i64
, { "r", "r", "rJ" } },
1496 { INDEX_op_sar_i64
, { "r", "r", "rJ" } },
1498 { INDEX_op_neg_i64
, { "r", "rJ" } },
1500 { INDEX_op_ext32s_i64
, { "r", "ri" } },
1501 { INDEX_op_ext32u_i64
, { "r", "ri" } },
1503 { INDEX_op_brcond_i64
, { "r", "rJ" } },
1504 { INDEX_op_setcond_i64
, { "r", "r", "rJ" } },
1509 void tcg_target_init(TCGContext
*s
)
1511 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xffffffff);
1512 #if TCG_TARGET_REG_BITS == 64
1513 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I64
], 0, 0xffffffff);
1515 tcg_regset_set32(tcg_target_call_clobber_regs
, 0,
1531 tcg_regset_clear(s
->reserved_regs
);
1532 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_G0
);
1533 #if TCG_TARGET_REG_BITS == 64
1534 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I4
); // for internal use
1536 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I5
); // for internal use
1537 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I6
);
1538 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I7
);
1539 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_O6
);
1540 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_O7
);
1541 tcg_add_target_add_op_defs(sparc_op_defs
);