2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
60 static const int tcg_target_reg_alloc_order
[] = {
76 static const int tcg_target_call_iarg_regs
[6] = {
85 static const int tcg_target_call_oarg_regs
[2] = {
90 static inline int check_fit_tl(tcg_target_long val
, unsigned int bits
)
92 return (val
<< ((sizeof(tcg_target_long
) * 8 - bits
))
93 >> (sizeof(tcg_target_long
) * 8 - bits
)) == val
;
96 static inline int check_fit_i32(uint32_t val
, unsigned int bits
)
98 return ((val
<< (32 - bits
)) >> (32 - bits
)) == val
;
101 static void patch_reloc(uint8_t *code_ptr
, int type
,
102 tcg_target_long value
, tcg_target_long addend
)
107 if (value
!= (uint32_t)value
)
109 *(uint32_t *)code_ptr
= value
;
111 case R_SPARC_WDISP22
:
112 value
-= (long)code_ptr
;
114 if (!check_fit_tl(value
, 22))
116 *(uint32_t *)code_ptr
= ((*(uint32_t *)code_ptr
) & ~0x3fffff) | value
;
123 /* maximum number of register used for input function arguments */
124 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
129 /* parse target specific constraints */
130 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
137 case 'L': /* qemu_ld/st constraint */
138 ct
->ct
|= TCG_CT_REG
;
139 tcg_regset_set32(ct
->u
.regs
, 0, 0xffffffff);
140 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_I0
);
141 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_I1
);
144 ct
->ct
|= TCG_CT_CONST_S11
;
147 ct
->ct
|= TCG_CT_CONST_S13
;
157 /* test if a constant matches the constraint */
158 static inline int tcg_target_const_match(tcg_target_long val
,
159 const TCGArgConstraint
*arg_ct
)
164 if (ct
& TCG_CT_CONST
)
166 else if ((ct
& TCG_CT_CONST_S11
) && check_fit_tl(val
, 11))
168 else if ((ct
& TCG_CT_CONST_S13
) && check_fit_tl(val
, 13))
174 #define INSN_OP(x) ((x) << 30)
175 #define INSN_OP2(x) ((x) << 22)
176 #define INSN_OP3(x) ((x) << 19)
177 #define INSN_OPF(x) ((x) << 5)
178 #define INSN_RD(x) ((x) << 25)
179 #define INSN_RS1(x) ((x) << 14)
180 #define INSN_RS2(x) (x)
181 #define INSN_ASI(x) ((x) << 5)
183 #define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
184 #define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
186 #define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
203 #define BA (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
205 #define ARITH_ADD (INSN_OP(2) | INSN_OP3(0x00))
206 #define ARITH_AND (INSN_OP(2) | INSN_OP3(0x01))
207 #define ARITH_OR (INSN_OP(2) | INSN_OP3(0x02))
208 #define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
209 #define ARITH_XOR (INSN_OP(2) | INSN_OP3(0x03))
210 #define ARITH_SUB (INSN_OP(2) | INSN_OP3(0x04))
211 #define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
212 #define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
213 #define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
214 #define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
215 #define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
216 #define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
217 #define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
218 #define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
219 #define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
221 #define SHIFT_SLL (INSN_OP(2) | INSN_OP3(0x25))
222 #define SHIFT_SRL (INSN_OP(2) | INSN_OP3(0x26))
223 #define SHIFT_SRA (INSN_OP(2) | INSN_OP3(0x27))
225 #define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
226 #define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
227 #define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
229 #define WRY (INSN_OP(2) | INSN_OP3(0x30))
230 #define JMPL (INSN_OP(2) | INSN_OP3(0x38))
231 #define SAVE (INSN_OP(2) | INSN_OP3(0x3c))
232 #define RESTORE (INSN_OP(2) | INSN_OP3(0x3d))
233 #define SETHI (INSN_OP(0) | INSN_OP2(0x4))
234 #define CALL INSN_OP(1)
235 #define LDUB (INSN_OP(3) | INSN_OP3(0x01))
236 #define LDSB (INSN_OP(3) | INSN_OP3(0x09))
237 #define LDUH (INSN_OP(3) | INSN_OP3(0x02))
238 #define LDSH (INSN_OP(3) | INSN_OP3(0x0a))
239 #define LDUW (INSN_OP(3) | INSN_OP3(0x00))
240 #define LDSW (INSN_OP(3) | INSN_OP3(0x08))
241 #define LDX (INSN_OP(3) | INSN_OP3(0x0b))
242 #define STB (INSN_OP(3) | INSN_OP3(0x05))
243 #define STH (INSN_OP(3) | INSN_OP3(0x06))
244 #define STW (INSN_OP(3) | INSN_OP3(0x04))
245 #define STX (INSN_OP(3) | INSN_OP3(0x0e))
246 #define LDUBA (INSN_OP(3) | INSN_OP3(0x11))
247 #define LDSBA (INSN_OP(3) | INSN_OP3(0x19))
248 #define LDUHA (INSN_OP(3) | INSN_OP3(0x12))
249 #define LDSHA (INSN_OP(3) | INSN_OP3(0x1a))
250 #define LDUWA (INSN_OP(3) | INSN_OP3(0x10))
251 #define LDSWA (INSN_OP(3) | INSN_OP3(0x18))
252 #define LDXA (INSN_OP(3) | INSN_OP3(0x1b))
253 #define STBA (INSN_OP(3) | INSN_OP3(0x15))
254 #define STHA (INSN_OP(3) | INSN_OP3(0x16))
255 #define STWA (INSN_OP(3) | INSN_OP3(0x14))
256 #define STXA (INSN_OP(3) | INSN_OP3(0x1e))
258 #ifndef ASI_PRIMARY_LITTLE
259 #define ASI_PRIMARY_LITTLE 0x88
262 static inline void tcg_out_arith(TCGContext
*s
, int rd
, int rs1
, int rs2
,
265 tcg_out32(s
, op
| INSN_RD(rd
) | INSN_RS1(rs1
) |
269 static inline void tcg_out_arithi(TCGContext
*s
, int rd
, int rs1
, int offset
,
272 tcg_out32(s
, op
| INSN_RD(rd
) | INSN_RS1(rs1
) |
276 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
278 tcg_out_arith(s
, ret
, arg
, TCG_REG_G0
, ARITH_OR
);
281 static inline void tcg_out_sethi(TCGContext
*s
, int ret
, uint32_t arg
)
283 tcg_out32(s
, SETHI
| INSN_RD(ret
) | ((arg
& 0xfffffc00) >> 10));
286 static inline void tcg_out_movi_imm13(TCGContext
*s
, int ret
, uint32_t arg
)
288 tcg_out_arithi(s
, ret
, TCG_REG_G0
, arg
, ARITH_OR
);
291 static inline void tcg_out_movi_imm32(TCGContext
*s
, int ret
, uint32_t arg
)
293 if (check_fit_i32(arg
, 13))
294 tcg_out_movi_imm13(s
, ret
, arg
);
296 tcg_out_sethi(s
, ret
, arg
);
298 tcg_out_arithi(s
, ret
, ret
, arg
& 0x3ff, ARITH_OR
);
302 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
303 int ret
, tcg_target_long arg
)
305 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
306 if (!check_fit_tl(arg
, 32) && (arg
& ~0xffffffffULL
) != 0) {
307 tcg_out_movi_imm32(s
, TCG_REG_I4
, arg
>> 32);
308 tcg_out_arithi(s
, TCG_REG_I4
, TCG_REG_I4
, 32, SHIFT_SLLX
);
309 tcg_out_movi_imm32(s
, ret
, arg
);
310 tcg_out_arith(s
, ret
, ret
, TCG_REG_I4
, ARITH_OR
);
313 tcg_out_movi_imm32(s
, ret
, arg
);
316 static inline void tcg_out_ld_raw(TCGContext
*s
, int ret
,
319 tcg_out_sethi(s
, ret
, arg
);
320 tcg_out32(s
, LDUW
| INSN_RD(ret
) | INSN_RS1(ret
) |
321 INSN_IMM13(arg
& 0x3ff));
324 static inline void tcg_out_ld_ptr(TCGContext
*s
, int ret
,
327 if (!check_fit_tl(arg
, 10))
328 tcg_out_movi(s
, TCG_TYPE_PTR
, ret
, arg
& ~0x3ffULL
);
329 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
330 tcg_out32(s
, LDX
| INSN_RD(ret
) | INSN_RS1(ret
) |
331 INSN_IMM13(arg
& 0x3ff));
333 tcg_out32(s
, LDUW
| INSN_RD(ret
) | INSN_RS1(ret
) |
334 INSN_IMM13(arg
& 0x3ff));
338 static inline void tcg_out_ldst(TCGContext
*s
, int ret
, int addr
, int offset
, int op
)
340 if (check_fit_tl(offset
, 13))
341 tcg_out32(s
, op
| INSN_RD(ret
) | INSN_RS1(addr
) |
344 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I5
, offset
);
345 tcg_out32(s
, op
| INSN_RD(ret
) | INSN_RS1(TCG_REG_I5
) |
350 static inline void tcg_out_ldst_asi(TCGContext
*s
, int ret
, int addr
,
351 int offset
, int op
, int asi
)
353 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I5
, offset
);
354 tcg_out32(s
, op
| INSN_RD(ret
) | INSN_RS1(TCG_REG_I5
) |
355 INSN_ASI(asi
) | INSN_RS2(addr
));
358 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
359 int arg1
, tcg_target_long arg2
)
361 if (type
== TCG_TYPE_I32
)
362 tcg_out_ldst(s
, ret
, arg1
, arg2
, LDUW
);
364 tcg_out_ldst(s
, ret
, arg1
, arg2
, LDX
);
367 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int arg
,
368 int arg1
, tcg_target_long arg2
)
370 if (type
== TCG_TYPE_I32
)
371 tcg_out_ldst(s
, arg
, arg1
, arg2
, STW
);
373 tcg_out_ldst(s
, arg
, arg1
, arg2
, STX
);
376 static inline void tcg_out_sety(TCGContext
*s
, tcg_target_long val
)
378 if (val
== 0 || val
== -1)
379 tcg_out32(s
, WRY
| INSN_IMM13(val
));
381 fprintf(stderr
, "unimplemented sety %ld\n", (long)val
);
384 static inline void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
387 if (check_fit_tl(val
, 13))
388 tcg_out_arithi(s
, reg
, reg
, val
, ARITH_ADD
);
390 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I5
, val
);
391 tcg_out_arith(s
, reg
, reg
, TCG_REG_I5
, ARITH_ADD
);
396 static inline void tcg_out_nop(TCGContext
*s
)
398 tcg_out_sethi(s
, TCG_REG_G0
, 0);
401 static void tcg_out_branch(TCGContext
*s
, int opc
, int label_index
)
404 TCGLabel
*l
= &s
->labels
[label_index
];
407 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
408 tcg_out32(s
, (INSN_OP(0) | INSN_COND(opc
, 0) | INSN_OP2(0x2)
409 | INSN_OFF22(l
->u
.value
- (unsigned long)s
->code_ptr
)));
411 tcg_out_reloc(s
, s
->code_ptr
, R_SPARC_WDISP22
, label_index
, 0);
412 tcg_out32(s
, (INSN_OP(0) | INSN_COND(opc
, 0) | INSN_OP2(0x2) | 0));
416 static const uint8_t tcg_cond_to_bcond
[10] = {
417 [TCG_COND_EQ
] = COND_E
,
418 [TCG_COND_NE
] = COND_NE
,
419 [TCG_COND_LT
] = COND_L
,
420 [TCG_COND_GE
] = COND_GE
,
421 [TCG_COND_LE
] = COND_LE
,
422 [TCG_COND_GT
] = COND_G
,
423 [TCG_COND_LTU
] = COND_CS
,
424 [TCG_COND_GEU
] = COND_CC
,
425 [TCG_COND_LEU
] = COND_LEU
,
426 [TCG_COND_GTU
] = COND_GU
,
429 static void tcg_out_brcond(TCGContext
*s
, int cond
,
430 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
433 if (const_arg2
&& arg2
== 0)
434 /* orcc %g0, r, %g0 */
435 tcg_out_arith(s
, TCG_REG_G0
, TCG_REG_G0
, arg1
, ARITH_ORCC
);
437 /* subcc r1, r2, %g0 */
438 tcg_out_arith(s
, TCG_REG_G0
, arg1
, arg2
, ARITH_SUBCC
);
439 tcg_out_branch(s
, tcg_cond_to_bcond
[cond
], label_index
);
443 /* Generate global QEMU prologue and epilogue code */
444 void tcg_target_qemu_prologue(TCGContext
*s
)
446 tcg_out32(s
, SAVE
| INSN_RD(TCG_REG_O6
) | INSN_RS1(TCG_REG_O6
) |
447 INSN_IMM13(-TCG_TARGET_STACK_MINFRAME
));
448 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I0
) |
449 INSN_RS2(TCG_REG_G0
));
453 #if defined(CONFIG_SOFTMMU)
454 extern void __ldb_mmu(void);
455 extern void __ldw_mmu(void);
456 extern void __ldl_mmu(void);
457 extern void __ldq_mmu(void);
459 extern void __stb_mmu(void);
460 extern void __stw_mmu(void);
461 extern void __stl_mmu(void);
462 extern void __stq_mmu(void);
465 static const void * const qemu_ld_helpers
[4] = {
472 static const void * const qemu_st_helpers
[4] = {
480 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
483 int addr_reg
, data_reg
, r0
, r1
, mem_index
, s_bits
, ld_op
;
484 #if defined(CONFIG_SOFTMMU)
485 uint8_t *label1_ptr
, *label2_ptr
;
496 #if TARGET_LONG_BITS == 32
502 #if defined(CONFIG_SOFTMMU)
503 /* srl addr_reg, x, r1 */
504 tcg_out_arithi(s
, r1
, addr_reg
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
,
506 /* and addr_reg, x, r0 */
507 tcg_out_arithi(s
, r0
, addr_reg
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1),
511 tcg_out_arithi(s
, r1
, r1
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
,
515 tcg_out_arithi(s
, r1
, r1
, offsetof(CPUState
, tlb_table
[mem_index
][0].addr_read
),
518 /* ld [env + r1], r1 */
519 tcg_out_ldst(s
, r1
, TCG_AREG0
, r1
, ld_op
);
521 /* subcc r0, r1, %g0 */
522 tcg_out_arith(s
, TCG_REG_G0
, r0
, r1
, ARITH_SUBCC
);
526 label1_ptr
= s
->code_ptr
;
529 /* mov (delay slot)*/
530 tcg_out_mov(s
, r0
, addr_reg
);
532 /* XXX: move that code at the end of the TB */
533 tcg_out32(s
, CALL
| ((((tcg_target_ulong
)qemu_ld_helpers
[s_bits
]
534 - (tcg_target_ulong
)s
->code_ptr
) >> 2)
536 /* mov (delay slot)*/
537 tcg_out_movi(s
, TCG_TYPE_I32
, r1
, mem_index
);
541 /* sll i0, 24/56, i0 */
542 tcg_out_arithi(s
, TCG_REG_I0
, TCG_REG_I0
,
543 sizeof(tcg_target_long
) * 8 - 8, SHIFT_SLL
);
544 /* sra i0, 24/56, data_reg */
545 tcg_out_arithi(s
, data_reg
, TCG_REG_I0
,
546 sizeof(tcg_target_long
) * 8 - 8, SHIFT_SRA
);
549 /* sll i0, 16/48, i0 */
550 tcg_out_arithi(s
, TCG_REG_I0
, TCG_REG_I0
,
551 sizeof(tcg_target_long
) * 8 - 16, SHIFT_SLL
);
552 /* sra i0, 16/48, data_reg */
553 tcg_out_arithi(s
, data_reg
, TCG_REG_I0
,
554 sizeof(tcg_target_long
) * 8 - 16, SHIFT_SRA
);
558 tcg_out_arithi(s
, TCG_REG_I0
, TCG_REG_I0
, 32, SHIFT_SLL
);
559 /* sra i0, 32, data_reg */
560 tcg_out_arithi(s
, data_reg
, TCG_REG_I0
, 32, SHIFT_SRA
);
568 tcg_out_mov(s
, data_reg
, TCG_REG_I0
);
574 label2_ptr
= s
->code_ptr
;
578 *label1_ptr
= (INSN_OP(0) | INSN_COND(COND_A
, 0) | INSN_OP2(0x2) |
579 INSN_OFF22((unsigned long)label1_ptr
-
580 (unsigned long)s
->code_ptr
));
582 /* ld [r1 + x], r1 */
583 tcg_out_ldst(s
, r1
, r1
, offsetof(CPUTLBEntry
, addend
) -
584 offsetof(CPUTLBEntry
, addr_read
), ld_op
);
586 tcg_out_arith(s
, r0
, r1
, r0
, ARITH_ADD
);
593 /* ldub [r0], data_reg */
594 tcg_out_ldst(s
, data_reg
, r0
, 0, LDUB
);
597 /* ldsb [r0], data_reg */
598 tcg_out_ldst(s
, data_reg
, r0
, 0, LDSB
);
601 #ifdef TARGET_WORDS_BIGENDIAN
602 /* lduh [r0], data_reg */
603 tcg_out_ldst(s
, data_reg
, r0
, 0, LDUH
);
605 /* lduha [r0] ASI_PRIMARY_LITTLE, data_reg */
606 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, LDUHA
, ASI_PRIMARY_LITTLE
);
610 #ifdef TARGET_WORDS_BIGENDIAN
611 /* ldsh [r0], data_reg */
612 tcg_out_ldst(s
, data_reg
, r0
, 0, LDSH
);
614 /* ldsha [r0] ASI_PRIMARY_LITTLE, data_reg */
615 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, LDSHA
, ASI_PRIMARY_LITTLE
);
619 #ifdef TARGET_WORDS_BIGENDIAN
620 /* lduw [r0], data_reg */
621 tcg_out_ldst(s
, data_reg
, r0
, 0, LDUW
);
623 /* lduwa [r0] ASI_PRIMARY_LITTLE, data_reg */
624 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, LDUWA
, ASI_PRIMARY_LITTLE
);
628 #ifdef TARGET_WORDS_BIGENDIAN
629 /* ldsw [r0], data_reg */
630 tcg_out_ldst(s
, data_reg
, r0
, 0, LDSW
);
632 /* ldswa [r0] ASI_PRIMARY_LITTLE, data_reg */
633 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, LDSWA
, ASI_PRIMARY_LITTLE
);
637 #ifdef TARGET_WORDS_BIGENDIAN
638 /* ldx [r0], data_reg */
639 tcg_out_ldst(s
, data_reg
, r0
, 0, LDX
);
641 /* ldxa [r0] ASI_PRIMARY_LITTLE, data_reg */
642 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, LDXA
, ASI_PRIMARY_LITTLE
);
649 #if defined(CONFIG_SOFTMMU)
651 *label2_ptr
= (INSN_OP(0) | INSN_COND(COND_A
, 0) | INSN_OP2(0x2) |
652 INSN_OFF22((unsigned long)label2_ptr
-
653 (unsigned long)s
->code_ptr
));
657 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
660 int addr_reg
, data_reg
, r0
, r1
, mem_index
, s_bits
, ld_op
;
661 #if defined(CONFIG_SOFTMMU)
662 uint8_t *label1_ptr
, *label2_ptr
;
674 #if TARGET_LONG_BITS == 32
680 #if defined(CONFIG_SOFTMMU)
681 /* srl addr_reg, x, r1 */
682 tcg_out_arithi(s
, r1
, addr_reg
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
,
684 /* and addr_reg, x, r0 */
685 tcg_out_arithi(s
, r0
, addr_reg
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1),
689 tcg_out_arithi(s
, r1
, r1
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
,
693 tcg_out_arithi(s
, r1
, r1
,
694 offsetof(CPUState
, tlb_table
[mem_index
][0].addr_write
),
697 /* ld [env + r1], r1 */
698 tcg_out_ldst(s
, r1
, TCG_AREG0
, r1
, ld_op
);
700 /* subcc r0, r1, %g0 */
701 tcg_out_arith(s
, TCG_REG_G0
, r0
, r1
, ARITH_SUBCC
);
705 label1_ptr
= s
->code_ptr
;
707 /* mov (delay slot)*/
708 tcg_out_mov(s
, r0
, addr_reg
);
712 /* sll i0, 24/56, i0 */
713 tcg_out_arithi(s
, TCG_REG_I0
, TCG_REG_I0
,
714 sizeof(tcg_target_long
) * 8 - 8, SHIFT_SLL
);
715 /* sra i0, 24/56, data_reg */
716 tcg_out_arithi(s
, data_reg
, TCG_REG_I0
,
717 sizeof(tcg_target_long
) * 8 - 8, SHIFT_SRA
);
720 /* sll i0, 16/48, i0 */
721 tcg_out_arithi(s
, TCG_REG_I0
, TCG_REG_I0
,
722 sizeof(tcg_target_long
) * 8 - 16, SHIFT_SLL
);
723 /* sra i0, 16/48, data_reg */
724 tcg_out_arithi(s
, data_reg
, TCG_REG_I0
,
725 sizeof(tcg_target_long
) * 8 - 16, SHIFT_SRA
);
729 tcg_out_arithi(s
, TCG_REG_I0
, TCG_REG_I0
, 32, SHIFT_SLL
);
730 /* sra i0, 32, data_reg */
731 tcg_out_arithi(s
, data_reg
, TCG_REG_I0
, 32, SHIFT_SRA
);
739 tcg_out_mov(s
, data_reg
, TCG_REG_I0
);
743 tcg_out32(s
, CALL
| ((((tcg_target_ulong
)qemu_st_helpers
[s_bits
]
744 - (tcg_target_ulong
)s
->code_ptr
) >> 2)
746 /* mov (delay slot)*/
747 tcg_out_movi(s
, TCG_TYPE_I32
, r1
, mem_index
);
751 label2_ptr
= s
->code_ptr
;
755 *label1_ptr
= (INSN_OP(0) | INSN_COND(COND_A
, 0) | INSN_OP2(0x2) |
756 INSN_OFF22((unsigned long)label1_ptr
-
757 (unsigned long)s
->code_ptr
));
759 /* ld [r1 + x], r1 */
760 tcg_out_ldst(s
, r1
, r1
, offsetof(CPUTLBEntry
, addend
) -
761 offsetof(CPUTLBEntry
, addr_write
), ld_op
);
763 tcg_out_arith(s
, r0
, r1
, r0
, ARITH_ADD
);
770 /* stb data_reg, [r0] */
771 tcg_out_ldst(s
, data_reg
, r0
, 0, STB
);
774 #ifdef TARGET_WORDS_BIGENDIAN
775 /* sth data_reg, [r0] */
776 tcg_out_ldst(s
, data_reg
, r0
, 0, STH
);
778 /* stha data_reg, [r0] ASI_PRIMARY_LITTLE */
779 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, STHA
, ASI_PRIMARY_LITTLE
);
783 #ifdef TARGET_WORDS_BIGENDIAN
784 /* stw data_reg, [r0] */
785 tcg_out_ldst(s
, data_reg
, r0
, 0, STW
);
787 /* stwa data_reg, [r0] ASI_PRIMARY_LITTLE */
788 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, STWA
, ASI_PRIMARY_LITTLE
);
792 #ifdef TARGET_WORDS_BIGENDIAN
793 /* stx data_reg, [r0] */
794 tcg_out_ldst(s
, data_reg
, r0
, 0, STX
);
796 /* stxa data_reg, [r0] ASI_PRIMARY_LITTLE */
797 tcg_out_ldst_asi(s
, data_reg
, r0
, 0, STXA
, ASI_PRIMARY_LITTLE
);
804 #if defined(CONFIG_SOFTMMU)
806 *label2_ptr
= (INSN_OP(0) | INSN_COND(COND_A
, 0) | INSN_OP2(0x2) |
807 INSN_OFF22((unsigned long)label2_ptr
-
808 (unsigned long)s
->code_ptr
));
812 static inline void tcg_out_op(TCGContext
*s
, int opc
, const TCGArg
*args
,
813 const int *const_args
)
818 case INDEX_op_exit_tb
:
819 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_I0
, args
[0]);
820 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I7
) |
822 tcg_out32(s
, RESTORE
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_G0
) |
823 INSN_RS2(TCG_REG_G0
));
825 case INDEX_op_goto_tb
:
826 if (s
->tb_jmp_offset
) {
827 /* direct jump method */
828 tcg_out_sethi(s
, TCG_REG_I5
, args
[0] & 0xffffe000);
829 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I5
) |
830 INSN_IMM13((args
[0] & 0x1fff)));
831 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
833 /* indirect jump method */
834 tcg_out_ld_ptr(s
, TCG_REG_I5
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
835 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_G0
) | INSN_RS1(TCG_REG_I5
) |
836 INSN_RS2(TCG_REG_G0
));
839 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
843 unsigned int st_op
, ld_op
;
853 tcg_out32(s
, CALL
| ((((tcg_target_ulong
)args
[0]
854 - (tcg_target_ulong
)s
->code_ptr
) >> 2)
857 tcg_out_ld_ptr(s
, TCG_REG_I5
,
858 (tcg_target_long
)(s
->tb_next
+ args
[0]));
859 tcg_out32(s
, JMPL
| INSN_RD(TCG_REG_O7
) | INSN_RS1(TCG_REG_I5
) |
860 INSN_RS2(TCG_REG_G0
));
862 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
864 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
865 TCG_TARGET_CALL_STACK_OFFSET
- sizeof(long),
866 st_op
); // delay slot
867 tcg_out_ldst(s
, TCG_AREG0
, TCG_REG_CALL_STACK
,
868 TCG_TARGET_CALL_STACK_OFFSET
- sizeof(long),
874 tcg_out_branch(s
, COND_A
, args
[0]);
877 case INDEX_op_movi_i32
:
878 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], (uint32_t)args
[1]);
881 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
882 #define OP_32_64(x) \
883 glue(glue(case INDEX_op_, x), _i32:) \
884 glue(glue(case INDEX_op_, x), _i64:)
886 #define OP_32_64(x) \
887 glue(glue(case INDEX_op_, x), _i32:)
890 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDUB
);
893 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDSB
);
896 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDUH
);
899 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDSH
);
901 case INDEX_op_ld_i32
:
902 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
903 case INDEX_op_ld32u_i64
:
905 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDUW
);
908 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STB
);
911 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STH
);
913 case INDEX_op_st_i32
:
914 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
915 case INDEX_op_st32_i64
:
917 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STW
);
934 case INDEX_op_shl_i32
:
937 case INDEX_op_shr_i32
:
940 case INDEX_op_sar_i32
:
943 case INDEX_op_mul_i32
:
946 case INDEX_op_div2_i32
:
947 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
955 case INDEX_op_divu2_i32
:
956 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
965 case INDEX_op_brcond_i32
:
966 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
970 case INDEX_op_qemu_ld8u
:
971 tcg_out_qemu_ld(s
, args
, 0);
973 case INDEX_op_qemu_ld8s
:
974 tcg_out_qemu_ld(s
, args
, 0 | 4);
976 case INDEX_op_qemu_ld16u
:
977 tcg_out_qemu_ld(s
, args
, 1);
979 case INDEX_op_qemu_ld16s
:
980 tcg_out_qemu_ld(s
, args
, 1 | 4);
982 case INDEX_op_qemu_ld32u
:
983 tcg_out_qemu_ld(s
, args
, 2);
985 case INDEX_op_qemu_ld32s
:
986 tcg_out_qemu_ld(s
, args
, 2 | 4);
988 case INDEX_op_qemu_st8
:
989 tcg_out_qemu_st(s
, args
, 0);
991 case INDEX_op_qemu_st16
:
992 tcg_out_qemu_st(s
, args
, 1);
994 case INDEX_op_qemu_st32
:
995 tcg_out_qemu_st(s
, args
, 2);
998 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
999 case INDEX_op_movi_i64
:
1000 tcg_out_movi(s
, TCG_TYPE_I64
, args
[0], args
[1]);
1002 case INDEX_op_ld32s_i64
:
1003 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDSW
);
1005 case INDEX_op_ld_i64
:
1006 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDX
);
1008 case INDEX_op_st_i64
:
1009 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STX
);
1011 case INDEX_op_shl_i64
:
1014 case INDEX_op_shr_i64
:
1017 case INDEX_op_sar_i64
:
1020 case INDEX_op_mul_i64
:
1023 case INDEX_op_div2_i64
:
1026 case INDEX_op_divu2_i64
:
1030 case INDEX_op_brcond_i64
:
1031 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1034 case INDEX_op_qemu_ld64
:
1035 tcg_out_qemu_ld(s
, args
, 3);
1037 case INDEX_op_qemu_st64
:
1038 tcg_out_qemu_st(s
, args
, 3);
1043 if (const_args
[2]) {
1044 tcg_out_arithi(s
, args
[0], args
[1], args
[2], c
);
1046 tcg_out_arith(s
, args
[0], args
[1], args
[2], c
);
1051 fprintf(stderr
, "unknown opcode 0x%x\n", opc
);
1056 static const TCGTargetOpDef sparc_op_defs
[] = {
1057 { INDEX_op_exit_tb
, { } },
1058 { INDEX_op_goto_tb
, { } },
1059 { INDEX_op_call
, { "ri" } },
1060 { INDEX_op_jmp
, { "ri" } },
1061 { INDEX_op_br
, { } },
1063 { INDEX_op_mov_i32
, { "r", "r" } },
1064 { INDEX_op_movi_i32
, { "r" } },
1065 { INDEX_op_ld8u_i32
, { "r", "r" } },
1066 { INDEX_op_ld8s_i32
, { "r", "r" } },
1067 { INDEX_op_ld16u_i32
, { "r", "r" } },
1068 { INDEX_op_ld16s_i32
, { "r", "r" } },
1069 { INDEX_op_ld_i32
, { "r", "r" } },
1070 { INDEX_op_st8_i32
, { "r", "r" } },
1071 { INDEX_op_st16_i32
, { "r", "r" } },
1072 { INDEX_op_st_i32
, { "r", "r" } },
1074 { INDEX_op_add_i32
, { "r", "r", "rJ" } },
1075 { INDEX_op_mul_i32
, { "r", "r", "rJ" } },
1076 { INDEX_op_div2_i32
, { "r", "r", "0", "1", "r" } },
1077 { INDEX_op_divu2_i32
, { "r", "r", "0", "1", "r" } },
1078 { INDEX_op_sub_i32
, { "r", "r", "rJ" } },
1079 { INDEX_op_and_i32
, { "r", "r", "rJ" } },
1080 { INDEX_op_or_i32
, { "r", "r", "rJ" } },
1081 { INDEX_op_xor_i32
, { "r", "r", "rJ" } },
1083 { INDEX_op_shl_i32
, { "r", "r", "rJ" } },
1084 { INDEX_op_shr_i32
, { "r", "r", "rJ" } },
1085 { INDEX_op_sar_i32
, { "r", "r", "rJ" } },
1087 { INDEX_op_brcond_i32
, { "r", "ri" } },
1089 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1090 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1091 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1092 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1093 { INDEX_op_qemu_ld32u
, { "r", "L" } },
1094 { INDEX_op_qemu_ld32s
, { "r", "L" } },
1096 { INDEX_op_qemu_st8
, { "L", "L" } },
1097 { INDEX_op_qemu_st16
, { "L", "L" } },
1098 { INDEX_op_qemu_st32
, { "L", "L" } },
1100 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
1101 { INDEX_op_mov_i64
, { "r", "r" } },
1102 { INDEX_op_movi_i64
, { "r" } },
1103 { INDEX_op_ld8u_i64
, { "r", "r" } },
1104 { INDEX_op_ld8s_i64
, { "r", "r" } },
1105 { INDEX_op_ld16u_i64
, { "r", "r" } },
1106 { INDEX_op_ld16s_i64
, { "r", "r" } },
1107 { INDEX_op_ld32u_i64
, { "r", "r" } },
1108 { INDEX_op_ld32s_i64
, { "r", "r" } },
1109 { INDEX_op_ld_i64
, { "r", "r" } },
1110 { INDEX_op_st8_i64
, { "r", "r" } },
1111 { INDEX_op_st16_i64
, { "r", "r" } },
1112 { INDEX_op_st32_i64
, { "r", "r" } },
1113 { INDEX_op_st_i64
, { "r", "r" } },
1115 { INDEX_op_add_i64
, { "r", "r", "rJ" } },
1116 { INDEX_op_mul_i64
, { "r", "r", "rJ" } },
1117 { INDEX_op_div2_i64
, { "r", "r", "0", "1", "r" } },
1118 { INDEX_op_divu2_i64
, { "r", "r", "0", "1", "r" } },
1119 { INDEX_op_sub_i64
, { "r", "r", "rJ" } },
1120 { INDEX_op_and_i64
, { "r", "r", "rJ" } },
1121 { INDEX_op_or_i64
, { "r", "r", "rJ" } },
1122 { INDEX_op_xor_i64
, { "r", "r", "rJ" } },
1124 { INDEX_op_shl_i64
, { "r", "r", "rJ" } },
1125 { INDEX_op_shr_i64
, { "r", "r", "rJ" } },
1126 { INDEX_op_sar_i64
, { "r", "r", "rJ" } },
1128 { INDEX_op_brcond_i64
, { "r", "ri" } },
1133 void tcg_target_init(TCGContext
*s
)
1135 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xffffffff);
1136 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
1137 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I64
], 0, 0xffffffff);
1139 tcg_regset_set32(tcg_target_call_clobber_regs
, 0,
1155 tcg_regset_clear(s
->reserved_regs
);
1156 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_G0
);
1157 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
1158 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I4
); // for internal use
1160 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I5
); // for internal use
1161 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I6
);
1162 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_I7
);
1163 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_O6
);
1164 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_O7
);
1165 tcg_add_target_add_op_defs(sparc_op_defs
);