2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2018 SiFive, Inc
5 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
6 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
7 * Copyright (c) 2008 Fabrice Bellard
9 * Based on i386/tcg-target.c and mips/tcg-target.c
11 * Permission is hereby granted, free of charge, to any person obtaining a copy
12 * of this software and associated documentation files (the "Software"), to deal
13 * in the Software without restriction, including without limitation the rights
14 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
15 * copies of the Software, and to permit persons to whom the Software is
16 * furnished to do so, subject to the following conditions:
18 * The above copyright notice and this permission notice shall be included in
19 * all copies or substantial portions of the Software.
21 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
22 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
23 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
24 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
25 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
26 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
30 #include "tcg-pool.inc.c"
32 #ifdef CONFIG_DEBUG_TCG
33 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
69 static const int tcg_target_reg_alloc_order
[] = {
70 /* Call saved registers */
71 /* TCG_REG_S0 reservered for TCG_AREG0 */
84 /* Call clobbered registers */
93 /* Argument registers */
104 static const int tcg_target_call_iarg_regs
[] = {
115 static const int tcg_target_call_oarg_regs
[] = {
120 #define TCG_CT_CONST_ZERO 0x100
121 #define TCG_CT_CONST_S12 0x200
122 #define TCG_CT_CONST_N12 0x400
123 #define TCG_CT_CONST_M12 0x800
125 static inline tcg_target_long
sextreg(tcg_target_long val
, int pos
, int len
)
127 if (TCG_TARGET_REG_BITS
== 32) {
128 return sextract32(val
, pos
, len
);
130 return sextract64(val
, pos
, len
);
134 /* parse target specific constraints */
135 static const char *target_parse_constraint(TCGArgConstraint
*ct
,
136 const char *ct_str
, TCGType type
)
140 ct
->ct
|= TCG_CT_REG
;
141 ct
->u
.regs
= 0xffffffff;
144 /* qemu_ld/qemu_st constraint */
145 ct
->ct
|= TCG_CT_REG
;
146 ct
->u
.regs
= 0xffffffff;
147 /* qemu_ld/qemu_st uses TCG_REG_TMP0 */
148 #if defined(CONFIG_SOFTMMU)
149 tcg_regset_reset_reg(ct
->u
.regs
, tcg_target_call_iarg_regs
[0]);
150 tcg_regset_reset_reg(ct
->u
.regs
, tcg_target_call_iarg_regs
[1]);
151 tcg_regset_reset_reg(ct
->u
.regs
, tcg_target_call_iarg_regs
[2]);
152 tcg_regset_reset_reg(ct
->u
.regs
, tcg_target_call_iarg_regs
[3]);
153 tcg_regset_reset_reg(ct
->u
.regs
, tcg_target_call_iarg_regs
[4]);
157 ct
->ct
|= TCG_CT_CONST_S12
;
160 ct
->ct
|= TCG_CT_CONST_N12
;
163 ct
->ct
|= TCG_CT_CONST_M12
;
166 /* we can use a zero immediate as a zero register argument. */
167 ct
->ct
|= TCG_CT_CONST_ZERO
;
175 /* test if a constant matches the constraint */
176 static int tcg_target_const_match(tcg_target_long val
, TCGType type
,
177 const TCGArgConstraint
*arg_ct
)
180 if (ct
& TCG_CT_CONST
) {
183 if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0) {
186 if ((ct
& TCG_CT_CONST_S12
) && val
== sextreg(val
, 0, 12)) {
189 if ((ct
& TCG_CT_CONST_N12
) && -val
== sextreg(-val
, 0, 12)) {
192 if ((ct
& TCG_CT_CONST_M12
) && val
>= -0xfff && val
<= 0xfff) {
199 * RISC-V Base ISA opcodes (IM)
215 OPC_DIVU
= 0x2005033,
227 OPC_MULH
= 0x2001033,
228 OPC_MULHSU
= 0x2002033,
229 OPC_MULHU
= 0x2003033,
233 OPC_REMU
= 0x2007033,
243 OPC_SRA
= 0x40005033,
244 OPC_SRAI
= 0x40005013,
247 OPC_SUB
= 0x40000033,
252 #if TCG_TARGET_REG_BITS == 64
255 OPC_DIVUW
= 0x200503b,
256 OPC_DIVW
= 0x200403b,
257 OPC_MULW
= 0x200003b,
258 OPC_REMUW
= 0x200703b,
259 OPC_REMW
= 0x200603b,
262 OPC_SRAIW
= 0x4000501b,
263 OPC_SRAW
= 0x4000503b,
266 OPC_SUBW
= 0x4000003b,
268 /* Simplify code throughout by defining aliases for RV32. */
269 OPC_ADDIW
= OPC_ADDI
,
271 OPC_DIVUW
= OPC_DIVU
,
274 OPC_REMUW
= OPC_REMU
,
276 OPC_SLLIW
= OPC_SLLI
,
278 OPC_SRAIW
= OPC_SRAI
,
280 OPC_SRLIW
= OPC_SRLI
,
285 OPC_FENCE
= 0x0000000f,
289 * RISC-V immediate and instruction encoders (excludes 16-bit RVC)
294 static int32_t encode_r(RISCVInsn opc
, TCGReg rd
, TCGReg rs1
, TCGReg rs2
)
296 return opc
| (rd
& 0x1f) << 7 | (rs1
& 0x1f) << 15 | (rs2
& 0x1f) << 20;
301 static int32_t encode_imm12(uint32_t imm
)
303 return (imm
& 0xfff) << 20;
306 static int32_t encode_i(RISCVInsn opc
, TCGReg rd
, TCGReg rs1
, uint32_t imm
)
308 return opc
| (rd
& 0x1f) << 7 | (rs1
& 0x1f) << 15 | encode_imm12(imm
);
313 static int32_t encode_simm12(uint32_t imm
)
317 ret
|= (imm
& 0xFE0) << 20;
318 ret
|= (imm
& 0x1F) << 7;
323 static int32_t encode_s(RISCVInsn opc
, TCGReg rs1
, TCGReg rs2
, uint32_t imm
)
325 return opc
| (rs1
& 0x1f) << 15 | (rs2
& 0x1f) << 20 | encode_simm12(imm
);
330 static int32_t encode_sbimm12(uint32_t imm
)
334 ret
|= (imm
& 0x1000) << 19;
335 ret
|= (imm
& 0x7e0) << 20;
336 ret
|= (imm
& 0x1e) << 7;
337 ret
|= (imm
& 0x800) >> 4;
342 static int32_t encode_sb(RISCVInsn opc
, TCGReg rs1
, TCGReg rs2
, uint32_t imm
)
344 return opc
| (rs1
& 0x1f) << 15 | (rs2
& 0x1f) << 20 | encode_sbimm12(imm
);
349 static int32_t encode_uimm20(uint32_t imm
)
351 return imm
& 0xfffff000;
354 static int32_t encode_u(RISCVInsn opc
, TCGReg rd
, uint32_t imm
)
356 return opc
| (rd
& 0x1f) << 7 | encode_uimm20(imm
);
361 static int32_t encode_ujimm20(uint32_t imm
)
365 ret
|= (imm
& 0x0007fe) << (21 - 1);
366 ret
|= (imm
& 0x000800) << (20 - 11);
367 ret
|= (imm
& 0x0ff000) << (12 - 12);
368 ret
|= (imm
& 0x100000) << (31 - 20);
373 static int32_t encode_uj(RISCVInsn opc
, TCGReg rd
, uint32_t imm
)
375 return opc
| (rd
& 0x1f) << 7 | encode_ujimm20(imm
);
379 * RISC-V instruction emitters
382 static void tcg_out_opc_reg(TCGContext
*s
, RISCVInsn opc
,
383 TCGReg rd
, TCGReg rs1
, TCGReg rs2
)
385 tcg_out32(s
, encode_r(opc
, rd
, rs1
, rs2
));
388 static void tcg_out_opc_imm(TCGContext
*s
, RISCVInsn opc
,
389 TCGReg rd
, TCGReg rs1
, TCGArg imm
)
391 tcg_out32(s
, encode_i(opc
, rd
, rs1
, imm
));
394 static void tcg_out_opc_store(TCGContext
*s
, RISCVInsn opc
,
395 TCGReg rs1
, TCGReg rs2
, uint32_t imm
)
397 tcg_out32(s
, encode_s(opc
, rs1
, rs2
, imm
));
400 static void tcg_out_opc_branch(TCGContext
*s
, RISCVInsn opc
,
401 TCGReg rs1
, TCGReg rs2
, uint32_t imm
)
403 tcg_out32(s
, encode_sb(opc
, rs1
, rs2
, imm
));
406 static void tcg_out_opc_upper(TCGContext
*s
, RISCVInsn opc
,
407 TCGReg rd
, uint32_t imm
)
409 tcg_out32(s
, encode_u(opc
, rd
, imm
));
412 static void tcg_out_opc_jump(TCGContext
*s
, RISCVInsn opc
,
413 TCGReg rd
, uint32_t imm
)
415 tcg_out32(s
, encode_uj(opc
, rd
, imm
));
418 static void tcg_out_nop_fill(tcg_insn_unit
*p
, int count
)
421 for (i
= 0; i
< count
; ++i
) {
422 p
[i
] = encode_i(OPC_ADDI
, TCG_REG_ZERO
, TCG_REG_ZERO
, 0);
430 static bool reloc_sbimm12(tcg_insn_unit
*code_ptr
, tcg_insn_unit
*target
)
432 intptr_t offset
= (intptr_t)target
- (intptr_t)code_ptr
;
434 if (offset
== sextreg(offset
, 1, 12) << 1) {
435 code_ptr
[0] |= encode_sbimm12(offset
);
442 static bool reloc_jimm20(tcg_insn_unit
*code_ptr
, tcg_insn_unit
*target
)
444 intptr_t offset
= (intptr_t)target
- (intptr_t)code_ptr
;
446 if (offset
== sextreg(offset
, 1, 20) << 1) {
447 code_ptr
[0] |= encode_ujimm20(offset
);
454 static bool reloc_call(tcg_insn_unit
*code_ptr
, tcg_insn_unit
*target
)
456 intptr_t offset
= (intptr_t)target
- (intptr_t)code_ptr
;
457 int32_t lo
= sextreg(offset
, 0, 12);
458 int32_t hi
= offset
- lo
;
460 if (offset
== hi
+ lo
) {
461 code_ptr
[0] |= encode_uimm20(hi
);
462 code_ptr
[1] |= encode_imm12(lo
);
469 static bool patch_reloc(tcg_insn_unit
*code_ptr
, int type
,
470 intptr_t value
, intptr_t addend
)
472 uint32_t insn
= *code_ptr
;
476 tcg_debug_assert(addend
== 0);
480 diff
= value
- (uintptr_t)code_ptr
;
481 short_jmp
= diff
== sextreg(diff
, 0, 12);
483 return reloc_sbimm12(code_ptr
, (tcg_insn_unit
*)value
);
485 /* Invert the condition */
486 insn
= insn
^ (1 << 12);
487 /* Clear the offset */
489 /* Set the offset to the PC + 8 */
490 insn
|= encode_sbimm12(8);
495 /* Overwrite the NOP with jal x0,value */
496 diff
= value
- (uintptr_t)(code_ptr
+ 1);
497 insn
= encode_uj(OPC_JAL
, TCG_REG_ZERO
, diff
);
504 return reloc_jimm20(code_ptr
, (tcg_insn_unit
*)value
);
507 return reloc_call(code_ptr
, (tcg_insn_unit
*)value
);
518 static void tcg_out_mov(TCGContext
*s
, TCGType type
, TCGReg ret
, TCGReg arg
)
526 tcg_out_opc_imm(s
, OPC_ADDI
, ret
, arg
, 0);
529 g_assert_not_reached();
533 static void tcg_out_movi(TCGContext
*s
, TCGType type
, TCGReg rd
,
536 tcg_target_long lo
, hi
, tmp
;
539 if (TCG_TARGET_REG_BITS
== 64 && type
== TCG_TYPE_I32
) {
543 lo
= sextreg(val
, 0, 12);
545 tcg_out_opc_imm(s
, OPC_ADDI
, rd
, TCG_REG_ZERO
, lo
);
550 if (TCG_TARGET_REG_BITS
== 32 || val
== (int32_t)val
) {
551 tcg_out_opc_upper(s
, OPC_LUI
, rd
, hi
);
553 tcg_out_opc_imm(s
, OPC_ADDIW
, rd
, rd
, lo
);
558 /* We can only be here if TCG_TARGET_REG_BITS != 32 */
559 tmp
= tcg_pcrel_diff(s
, (void *)val
);
560 if (tmp
== (int32_t)tmp
) {
561 tcg_out_opc_upper(s
, OPC_AUIPC
, rd
, 0);
562 tcg_out_opc_imm(s
, OPC_ADDI
, rd
, rd
, 0);
563 ret
= reloc_call(s
->code_ptr
- 2, (tcg_insn_unit
*)val
);
564 tcg_debug_assert(ret
== true);
568 /* Look for a single 20-bit section. */
571 if (tmp
== sextreg(tmp
, 0, 20)) {
572 tcg_out_opc_upper(s
, OPC_LUI
, rd
, tmp
<< 12);
574 tcg_out_opc_imm(s
, OPC_SLLI
, rd
, rd
, shift
- 12);
576 tcg_out_opc_imm(s
, OPC_SRAI
, rd
, rd
, 12 - shift
);
581 /* Look for a few high zero bits, with lots of bits set in the middle. */
584 if (tmp
== sextreg(tmp
, 12, 20) << 12) {
585 tcg_out_opc_upper(s
, OPC_LUI
, rd
, tmp
);
586 tcg_out_opc_imm(s
, OPC_SRLI
, rd
, rd
, shift
);
588 } else if (tmp
== sextreg(tmp
, 0, 12)) {
589 tcg_out_opc_imm(s
, OPC_ADDI
, rd
, TCG_REG_ZERO
, tmp
);
590 tcg_out_opc_imm(s
, OPC_SRLI
, rd
, rd
, shift
);
594 /* Drop into the constant pool. */
595 new_pool_label(s
, val
, R_RISCV_CALL
, s
->code_ptr
, 0);
596 tcg_out_opc_upper(s
, OPC_AUIPC
, rd
, 0);
597 tcg_out_opc_imm(s
, OPC_LD
, rd
, rd
, 0);
600 static void tcg_out_ext8u(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
602 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, arg
, 0xff);
605 static void tcg_out_ext16u(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
607 tcg_out_opc_imm(s
, OPC_SLLIW
, ret
, arg
, 16);
608 tcg_out_opc_imm(s
, OPC_SRLIW
, ret
, ret
, 16);
611 static void tcg_out_ext32u(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
613 tcg_out_opc_imm(s
, OPC_SLLI
, ret
, arg
, 32);
614 tcg_out_opc_imm(s
, OPC_SRLI
, ret
, ret
, 32);
617 static void tcg_out_ext8s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
619 tcg_out_opc_imm(s
, OPC_SLLIW
, ret
, arg
, 24);
620 tcg_out_opc_imm(s
, OPC_SRAIW
, ret
, ret
, 24);
623 static void tcg_out_ext16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
625 tcg_out_opc_imm(s
, OPC_SLLIW
, ret
, arg
, 16);
626 tcg_out_opc_imm(s
, OPC_SRAIW
, ret
, ret
, 16);
629 static void tcg_out_ext32s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
631 tcg_out_opc_imm(s
, OPC_ADDIW
, ret
, arg
, 0);
634 static void tcg_out_ldst(TCGContext
*s
, RISCVInsn opc
, TCGReg data
,
635 TCGReg addr
, intptr_t offset
)
637 intptr_t imm12
= sextreg(offset
, 0, 12);
639 if (offset
!= imm12
) {
640 intptr_t diff
= offset
- (uintptr_t)s
->code_ptr
;
642 if (addr
== TCG_REG_ZERO
&& diff
== (int32_t)diff
) {
643 imm12
= sextreg(diff
, 0, 12);
644 tcg_out_opc_upper(s
, OPC_AUIPC
, TCG_REG_TMP2
, diff
- imm12
);
646 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_TMP2
, offset
- imm12
);
647 if (addr
!= TCG_REG_ZERO
) {
648 tcg_out_opc_reg(s
, OPC_ADD
, TCG_REG_TMP2
, TCG_REG_TMP2
, addr
);
659 tcg_out_opc_store(s
, opc
, addr
, data
, imm12
);
668 tcg_out_opc_imm(s
, opc
, data
, addr
, imm12
);
671 g_assert_not_reached();
675 static void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
676 TCGReg arg1
, intptr_t arg2
)
678 bool is32bit
= (TCG_TARGET_REG_BITS
== 32 || type
== TCG_TYPE_I32
);
679 tcg_out_ldst(s
, is32bit
? OPC_LW
: OPC_LD
, arg
, arg1
, arg2
);
682 static void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
683 TCGReg arg1
, intptr_t arg2
)
685 bool is32bit
= (TCG_TARGET_REG_BITS
== 32 || type
== TCG_TYPE_I32
);
686 tcg_out_ldst(s
, is32bit
? OPC_SW
: OPC_SD
, arg
, arg1
, arg2
);
689 static bool tcg_out_sti(TCGContext
*s
, TCGType type
, TCGArg val
,
690 TCGReg base
, intptr_t ofs
)
693 tcg_out_st(s
, type
, TCG_REG_ZERO
, base
, ofs
);
699 static void tcg_out_addsub2(TCGContext
*s
,
700 TCGReg rl
, TCGReg rh
,
701 TCGReg al
, TCGReg ah
,
702 TCGArg bl
, TCGArg bh
,
703 bool cbl
, bool cbh
, bool is_sub
, bool is32bit
)
705 const RISCVInsn opc_add
= is32bit
? OPC_ADDW
: OPC_ADD
;
706 const RISCVInsn opc_addi
= is32bit
? OPC_ADDIW
: OPC_ADDI
;
707 const RISCVInsn opc_sub
= is32bit
? OPC_SUBW
: OPC_SUB
;
708 TCGReg th
= TCG_REG_TMP1
;
710 /* If we have a negative constant such that negating it would
711 make the high part zero, we can (usually) eliminate one insn. */
712 if (cbl
&& cbh
&& bh
== -1 && bl
!= 0) {
718 /* By operating on the high part first, we get to use the final
719 carry operation to move back from the temporary. */
721 tcg_out_opc_reg(s
, (is_sub
? opc_sub
: opc_add
), th
, ah
, bh
);
722 } else if (bh
!= 0 || ah
== rl
) {
723 tcg_out_opc_imm(s
, opc_addi
, th
, ah
, (is_sub
? -bh
: bh
));
728 /* Note that tcg optimization should eliminate the bl == 0 case. */
731 tcg_out_opc_imm(s
, OPC_SLTIU
, TCG_REG_TMP0
, al
, bl
);
732 tcg_out_opc_imm(s
, opc_addi
, rl
, al
, -bl
);
734 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_TMP0
, al
, bl
);
735 tcg_out_opc_reg(s
, opc_sub
, rl
, al
, bl
);
737 tcg_out_opc_reg(s
, opc_sub
, rh
, th
, TCG_REG_TMP0
);
740 tcg_out_opc_imm(s
, opc_addi
, rl
, al
, bl
);
741 tcg_out_opc_imm(s
, OPC_SLTIU
, TCG_REG_TMP0
, rl
, bl
);
742 } else if (rl
== al
&& rl
== bl
) {
743 tcg_out_opc_imm(s
, OPC_SLTI
, TCG_REG_TMP0
, al
, 0);
744 tcg_out_opc_reg(s
, opc_addi
, rl
, al
, bl
);
746 tcg_out_opc_reg(s
, opc_add
, rl
, al
, bl
);
747 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_TMP0
,
748 rl
, (rl
== bl
? al
: bl
));
750 tcg_out_opc_reg(s
, opc_add
, rh
, th
, TCG_REG_TMP0
);
754 static const struct {
757 } tcg_brcond_to_riscv
[] = {
758 [TCG_COND_EQ
] = { OPC_BEQ
, false },
759 [TCG_COND_NE
] = { OPC_BNE
, false },
760 [TCG_COND_LT
] = { OPC_BLT
, false },
761 [TCG_COND_GE
] = { OPC_BGE
, false },
762 [TCG_COND_LE
] = { OPC_BGE
, true },
763 [TCG_COND_GT
] = { OPC_BLT
, true },
764 [TCG_COND_LTU
] = { OPC_BLTU
, false },
765 [TCG_COND_GEU
] = { OPC_BGEU
, false },
766 [TCG_COND_LEU
] = { OPC_BGEU
, true },
767 [TCG_COND_GTU
] = { OPC_BLTU
, true }
770 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, TCGReg arg1
,
771 TCGReg arg2
, TCGLabel
*l
)
773 RISCVInsn op
= tcg_brcond_to_riscv
[cond
].op
;
775 tcg_debug_assert(op
!= 0);
777 if (tcg_brcond_to_riscv
[cond
].swap
) {
784 intptr_t diff
= tcg_pcrel_diff(s
, l
->u
.value_ptr
);
785 if (diff
== sextreg(diff
, 0, 12)) {
786 tcg_out_opc_branch(s
, op
, arg1
, arg2
, diff
);
788 /* Invert the conditional branch. */
789 tcg_out_opc_branch(s
, op
^ (1 << 12), arg1
, arg2
, 8);
790 tcg_out_opc_jump(s
, OPC_JAL
, TCG_REG_ZERO
, diff
- 4);
793 tcg_out_reloc(s
, s
->code_ptr
, R_RISCV_BRANCH
, l
, 0);
794 tcg_out_opc_branch(s
, op
, arg1
, arg2
, 0);
795 /* NOP to allow patching later */
796 tcg_out_opc_imm(s
, OPC_ADDI
, TCG_REG_ZERO
, TCG_REG_ZERO
, 0);
800 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
801 TCGReg arg1
, TCGReg arg2
)
805 tcg_out_opc_reg(s
, OPC_SUB
, ret
, arg1
, arg2
);
806 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
809 tcg_out_opc_reg(s
, OPC_SUB
, ret
, arg1
, arg2
);
810 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
813 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
816 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
817 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
820 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
821 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
824 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
827 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
830 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
831 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
834 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
835 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
838 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
841 g_assert_not_reached();
846 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, TCGReg al
, TCGReg ah
,
847 TCGReg bl
, TCGReg bh
, TCGLabel
*l
)
850 g_assert_not_reached();
853 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
854 TCGReg al
, TCGReg ah
, TCGReg bl
, TCGReg bh
)
857 g_assert_not_reached();
860 static inline void tcg_out_goto(TCGContext
*s
, tcg_insn_unit
*target
)
862 ptrdiff_t offset
= tcg_pcrel_diff(s
, target
);
863 tcg_debug_assert(offset
== sextreg(offset
, 1, 20) << 1);
864 tcg_out_opc_jump(s
, OPC_JAL
, TCG_REG_ZERO
, offset
);
867 static void tcg_out_call_int(TCGContext
*s
, tcg_insn_unit
*arg
, bool tail
)
869 TCGReg link
= tail
? TCG_REG_ZERO
: TCG_REG_RA
;
870 ptrdiff_t offset
= tcg_pcrel_diff(s
, arg
);
873 if (offset
== sextreg(offset
, 1, 20) << 1) {
874 /* short jump: -2097150 to 2097152 */
875 tcg_out_opc_jump(s
, OPC_JAL
, link
, offset
);
876 } else if (TCG_TARGET_REG_BITS
== 32 ||
877 offset
== sextreg(offset
, 1, 31) << 1) {
878 /* long jump: -2147483646 to 2147483648 */
879 tcg_out_opc_upper(s
, OPC_AUIPC
, TCG_REG_TMP0
, 0);
880 tcg_out_opc_imm(s
, OPC_JALR
, link
, TCG_REG_TMP0
, 0);
881 ret
= reloc_call(s
->code_ptr
- 2, arg
);\
882 tcg_debug_assert(ret
== true);
883 } else if (TCG_TARGET_REG_BITS
== 64) {
884 /* far jump: 64-bit */
885 tcg_target_long imm
= sextreg((tcg_target_long
)arg
, 0, 12);
886 tcg_target_long base
= (tcg_target_long
)arg
- imm
;
887 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_TMP0
, base
);
888 tcg_out_opc_imm(s
, OPC_JALR
, link
, TCG_REG_TMP0
, imm
);
890 g_assert_not_reached();
894 static void tcg_out_call(TCGContext
*s
, tcg_insn_unit
*arg
)
896 tcg_out_call_int(s
, arg
, false);
899 static void tcg_out_mb(TCGContext
*s
, TCGArg a0
)
901 tcg_insn_unit insn
= OPC_FENCE
;
903 if (a0
& TCG_MO_LD_LD
) {
906 if (a0
& TCG_MO_ST_LD
) {
909 if (a0
& TCG_MO_LD_ST
) {
912 if (a0
& TCG_MO_ST_ST
) {
922 #if defined(CONFIG_SOFTMMU)
923 #include "tcg-ldst.inc.c"
925 /* helper signature: helper_ret_ld_mmu(CPUState *env, target_ulong addr,
926 * TCGMemOpIdx oi, uintptr_t ra)
928 static void * const qemu_ld_helpers
[16] = {
929 [MO_UB
] = helper_ret_ldub_mmu
,
930 [MO_SB
] = helper_ret_ldsb_mmu
,
931 [MO_LEUW
] = helper_le_lduw_mmu
,
932 [MO_LESW
] = helper_le_ldsw_mmu
,
933 [MO_LEUL
] = helper_le_ldul_mmu
,
934 #if TCG_TARGET_REG_BITS == 64
935 [MO_LESL
] = helper_le_ldsl_mmu
,
937 [MO_LEQ
] = helper_le_ldq_mmu
,
938 [MO_BEUW
] = helper_be_lduw_mmu
,
939 [MO_BESW
] = helper_be_ldsw_mmu
,
940 [MO_BEUL
] = helper_be_ldul_mmu
,
941 #if TCG_TARGET_REG_BITS == 64
942 [MO_BESL
] = helper_be_ldsl_mmu
,
944 [MO_BEQ
] = helper_be_ldq_mmu
,
947 /* helper signature: helper_ret_st_mmu(CPUState *env, target_ulong addr,
948 * uintxx_t val, TCGMemOpIdx oi,
951 static void * const qemu_st_helpers
[16] = {
952 [MO_UB
] = helper_ret_stb_mmu
,
953 [MO_LEUW
] = helper_le_stw_mmu
,
954 [MO_LEUL
] = helper_le_stl_mmu
,
955 [MO_LEQ
] = helper_le_stq_mmu
,
956 [MO_BEUW
] = helper_be_stw_mmu
,
957 [MO_BEUL
] = helper_be_stl_mmu
,
958 [MO_BEQ
] = helper_be_stq_mmu
,
961 /* We don't support oversize guests */
962 QEMU_BUILD_BUG_ON(TCG_TARGET_REG_BITS
< TARGET_LONG_BITS
);
964 /* We expect tlb_mask to be before tlb_table. */
965 QEMU_BUILD_BUG_ON(offsetof(CPUArchState
, tlb_table
) <
966 offsetof(CPUArchState
, tlb_mask
));
968 /* We expect tlb_mask to be "near" tlb_table. */
969 QEMU_BUILD_BUG_ON(offsetof(CPUArchState
, tlb_table
) -
970 offsetof(CPUArchState
, tlb_mask
) >= 0x800);
972 static void tcg_out_tlb_load(TCGContext
*s
, TCGReg addrl
,
973 TCGReg addrh
, TCGMemOpIdx oi
,
974 tcg_insn_unit
**label_ptr
, bool is_load
)
976 TCGMemOp opc
= get_memop(oi
);
977 unsigned s_bits
= opc
& MO_SIZE
;
978 unsigned a_bits
= get_alignment_bits(opc
);
979 tcg_target_long compare_mask
;
980 int mem_index
= get_mmuidx(oi
);
981 int mask_off
, table_off
;
982 TCGReg mask_base
= TCG_AREG0
, table_base
= TCG_AREG0
;
984 mask_off
= offsetof(CPUArchState
, tlb_mask
[mem_index
]);
985 table_off
= offsetof(CPUArchState
, tlb_table
[mem_index
]);
986 if (table_off
> 0x7ff) {
987 int mask_hi
= mask_off
- sextreg(mask_off
, 0, 12);
988 int table_hi
= table_off
- sextreg(table_off
, 0, 12);
990 if (likely(mask_hi
== table_hi
)) {
991 mask_base
= table_base
= TCG_REG_TMP1
;
992 tcg_out_opc_upper(s
, OPC_LUI
, mask_base
, mask_hi
);
993 tcg_out_opc_reg(s
, OPC_ADD
, mask_base
, mask_base
, TCG_AREG0
);
995 table_off
-= mask_hi
;
997 mask_base
= TCG_REG_TMP0
;
998 table_base
= TCG_REG_TMP1
;
999 tcg_out_opc_upper(s
, OPC_LUI
, mask_base
, mask_hi
);
1000 tcg_out_opc_reg(s
, OPC_ADD
, mask_base
, mask_base
, TCG_AREG0
);
1001 table_off
-= mask_off
;
1002 mask_off
-= mask_hi
;
1003 tcg_out_opc_imm(s
, OPC_ADDI
, table_base
, mask_base
, mask_off
);
1007 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_TMP0
, mask_base
, mask_off
);
1008 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_TMP1
, table_base
, table_off
);
1010 tcg_out_opc_imm(s
, OPC_SRLI
, TCG_REG_TMP2
, addrl
,
1011 TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1012 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_TMP2
, TCG_REG_TMP2
, TCG_REG_TMP0
);
1013 tcg_out_opc_reg(s
, OPC_ADD
, TCG_REG_TMP2
, TCG_REG_TMP2
, TCG_REG_TMP1
);
1015 /* Load the tlb comparator and the addend. */
1016 tcg_out_ld(s
, TCG_TYPE_TL
, TCG_REG_TMP0
, TCG_REG_TMP2
,
1017 is_load
? offsetof(CPUTLBEntry
, addr_read
)
1018 : offsetof(CPUTLBEntry
, addr_write
));
1019 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_TMP2
, TCG_REG_TMP2
,
1020 offsetof(CPUTLBEntry
, addend
));
1022 /* We don't support unaligned accesses. */
1023 if (a_bits
< s_bits
) {
1026 /* Clear the non-page, non-alignment bits from the address. */
1027 compare_mask
= (tcg_target_long
)TARGET_PAGE_MASK
| ((1 << a_bits
) - 1);
1028 if (compare_mask
== sextreg(compare_mask
, 0, 12)) {
1029 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_TMP1
, addrl
, compare_mask
);
1031 tcg_out_movi(s
, TCG_TYPE_TL
, TCG_REG_TMP1
, compare_mask
);
1032 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_TMP1
, TCG_REG_TMP1
, addrl
);
1035 /* Compare masked address with the TLB entry. */
1036 label_ptr
[0] = s
->code_ptr
;
1037 tcg_out_opc_branch(s
, OPC_BNE
, TCG_REG_TMP0
, TCG_REG_TMP1
, 0);
1038 /* NOP to allow patching later */
1039 tcg_out_opc_imm(s
, OPC_ADDI
, TCG_REG_ZERO
, TCG_REG_ZERO
, 0);
1041 /* TLB Hit - translate address using addend. */
1042 if (TCG_TARGET_REG_BITS
> TARGET_LONG_BITS
) {
1043 tcg_out_ext32u(s
, TCG_REG_TMP0
, addrl
);
1044 addrl
= TCG_REG_TMP0
;
1046 tcg_out_opc_reg(s
, OPC_ADD
, TCG_REG_TMP0
, TCG_REG_TMP2
, addrl
);
1049 static void add_qemu_ldst_label(TCGContext
*s
, int is_ld
, TCGMemOpIdx oi
,
1051 TCGReg datalo
, TCGReg datahi
,
1052 TCGReg addrlo
, TCGReg addrhi
,
1053 void *raddr
, tcg_insn_unit
**label_ptr
)
1055 TCGLabelQemuLdst
*label
= new_ldst_label(s
);
1057 label
->is_ld
= is_ld
;
1060 label
->datalo_reg
= datalo
;
1061 label
->datahi_reg
= datahi
;
1062 label
->addrlo_reg
= addrlo
;
1063 label
->addrhi_reg
= addrhi
;
1064 label
->raddr
= raddr
;
1065 label
->label_ptr
[0] = label_ptr
[0];
1068 static void tcg_out_qemu_ld_slow_path(TCGContext
*s
, TCGLabelQemuLdst
*l
)
1070 TCGMemOpIdx oi
= l
->oi
;
1071 TCGMemOp opc
= get_memop(oi
);
1072 TCGReg a0
= tcg_target_call_iarg_regs
[0];
1073 TCGReg a1
= tcg_target_call_iarg_regs
[1];
1074 TCGReg a2
= tcg_target_call_iarg_regs
[2];
1075 TCGReg a3
= tcg_target_call_iarg_regs
[3];
1077 /* We don't support oversize guests */
1078 if (TCG_TARGET_REG_BITS
< TARGET_LONG_BITS
) {
1079 g_assert_not_reached();
1082 /* resolve label address */
1083 patch_reloc(l
->label_ptr
[0], R_RISCV_BRANCH
, (intptr_t) s
->code_ptr
, 0);
1085 /* call load helper */
1086 tcg_out_mov(s
, TCG_TYPE_PTR
, a0
, TCG_AREG0
);
1087 tcg_out_mov(s
, TCG_TYPE_PTR
, a1
, l
->addrlo_reg
);
1088 tcg_out_movi(s
, TCG_TYPE_PTR
, a2
, oi
);
1089 tcg_out_movi(s
, TCG_TYPE_PTR
, a3
, (tcg_target_long
)l
->raddr
);
1091 tcg_out_call(s
, qemu_ld_helpers
[opc
& (MO_BSWAP
| MO_SSIZE
)]);
1092 tcg_out_mov(s
, (opc
& MO_SIZE
) == MO_64
, l
->datalo_reg
, a0
);
1094 tcg_out_goto(s
, l
->raddr
);
1097 static void tcg_out_qemu_st_slow_path(TCGContext
*s
, TCGLabelQemuLdst
*l
)
1099 TCGMemOpIdx oi
= l
->oi
;
1100 TCGMemOp opc
= get_memop(oi
);
1101 TCGMemOp s_bits
= opc
& MO_SIZE
;
1102 TCGReg a0
= tcg_target_call_iarg_regs
[0];
1103 TCGReg a1
= tcg_target_call_iarg_regs
[1];
1104 TCGReg a2
= tcg_target_call_iarg_regs
[2];
1105 TCGReg a3
= tcg_target_call_iarg_regs
[3];
1106 TCGReg a4
= tcg_target_call_iarg_regs
[4];
1108 /* We don't support oversize guests */
1109 if (TCG_TARGET_REG_BITS
< TARGET_LONG_BITS
) {
1110 g_assert_not_reached();
1113 /* resolve label address */
1114 patch_reloc(l
->label_ptr
[0], R_RISCV_BRANCH
, (intptr_t) s
->code_ptr
, 0);
1116 /* call store helper */
1117 tcg_out_mov(s
, TCG_TYPE_PTR
, a0
, TCG_AREG0
);
1118 tcg_out_mov(s
, TCG_TYPE_PTR
, a1
, l
->addrlo_reg
);
1119 tcg_out_mov(s
, TCG_TYPE_PTR
, a2
, l
->datalo_reg
);
1122 tcg_out_ext8u(s
, a2
, a2
);
1125 tcg_out_ext16u(s
, a2
, a2
);
1130 tcg_out_movi(s
, TCG_TYPE_PTR
, a3
, oi
);
1131 tcg_out_movi(s
, TCG_TYPE_PTR
, a4
, (tcg_target_long
)l
->raddr
);
1133 tcg_out_call(s
, qemu_st_helpers
[opc
& (MO_BSWAP
| MO_SSIZE
)]);
1135 tcg_out_goto(s
, l
->raddr
);
1137 #endif /* CONFIG_SOFTMMU */
1139 static void tcg_out_qemu_ld_direct(TCGContext
*s
, TCGReg lo
, TCGReg hi
,
1140 TCGReg base
, TCGMemOp opc
, bool is_64
)
1142 const TCGMemOp bswap
= opc
& MO_BSWAP
;
1144 /* We don't yet handle byteswapping, assert */
1147 switch (opc
& (MO_SSIZE
)) {
1149 tcg_out_opc_imm(s
, OPC_LBU
, lo
, base
, 0);
1152 tcg_out_opc_imm(s
, OPC_LB
, lo
, base
, 0);
1155 tcg_out_opc_imm(s
, OPC_LHU
, lo
, base
, 0);
1158 tcg_out_opc_imm(s
, OPC_LH
, lo
, base
, 0);
1161 if (TCG_TARGET_REG_BITS
== 64 && is_64
) {
1162 tcg_out_opc_imm(s
, OPC_LWU
, lo
, base
, 0);
1167 tcg_out_opc_imm(s
, OPC_LW
, lo
, base
, 0);
1170 /* Prefer to load from offset 0 first, but allow for overlap. */
1171 if (TCG_TARGET_REG_BITS
== 64) {
1172 tcg_out_opc_imm(s
, OPC_LD
, lo
, base
, 0);
1173 } else if (lo
!= base
) {
1174 tcg_out_opc_imm(s
, OPC_LW
, lo
, base
, 0);
1175 tcg_out_opc_imm(s
, OPC_LW
, hi
, base
, 4);
1177 tcg_out_opc_imm(s
, OPC_LW
, hi
, base
, 4);
1178 tcg_out_opc_imm(s
, OPC_LW
, lo
, base
, 0);
1182 g_assert_not_reached();
1186 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
, bool is_64
)
1188 TCGReg addr_regl
, addr_regh
__attribute__((unused
));
1189 TCGReg data_regl
, data_regh
;
1192 #if defined(CONFIG_SOFTMMU)
1193 tcg_insn_unit
*label_ptr
[1];
1195 TCGReg base
= TCG_REG_TMP0
;
1197 data_regl
= *args
++;
1198 data_regh
= (TCG_TARGET_REG_BITS
== 32 && is_64
? *args
++ : 0);
1199 addr_regl
= *args
++;
1200 addr_regh
= (TCG_TARGET_REG_BITS
< TARGET_LONG_BITS
? *args
++ : 0);
1202 opc
= get_memop(oi
);
1204 #if defined(CONFIG_SOFTMMU)
1205 tcg_out_tlb_load(s
, addr_regl
, addr_regh
, oi
, label_ptr
, 1);
1206 tcg_out_qemu_ld_direct(s
, data_regl
, data_regh
, base
, opc
, is_64
);
1207 add_qemu_ldst_label(s
, 1, oi
,
1208 (is_64
? TCG_TYPE_I64
: TCG_TYPE_I32
),
1209 data_regl
, data_regh
, addr_regl
, addr_regh
,
1210 s
->code_ptr
, label_ptr
);
1212 if (TCG_TARGET_REG_BITS
> TARGET_LONG_BITS
) {
1213 tcg_out_ext32u(s
, base
, addr_regl
);
1217 if (guest_base
== 0) {
1218 tcg_out_opc_reg(s
, OPC_ADD
, base
, addr_regl
, TCG_REG_ZERO
);
1220 tcg_out_opc_reg(s
, OPC_ADD
, base
, TCG_GUEST_BASE_REG
, addr_regl
);
1222 tcg_out_qemu_ld_direct(s
, data_regl
, data_regh
, base
, opc
, is_64
);
1226 static void tcg_out_qemu_st_direct(TCGContext
*s
, TCGReg lo
, TCGReg hi
,
1227 TCGReg base
, TCGMemOp opc
)
1229 const TCGMemOp bswap
= opc
& MO_BSWAP
;
1231 /* We don't yet handle byteswapping, assert */
1234 switch (opc
& (MO_SSIZE
)) {
1236 tcg_out_opc_store(s
, OPC_SB
, base
, lo
, 0);
1239 tcg_out_opc_store(s
, OPC_SH
, base
, lo
, 0);
1242 tcg_out_opc_store(s
, OPC_SW
, base
, lo
, 0);
1245 if (TCG_TARGET_REG_BITS
== 64) {
1246 tcg_out_opc_store(s
, OPC_SD
, base
, lo
, 0);
1248 tcg_out_opc_store(s
, OPC_SW
, base
, lo
, 0);
1249 tcg_out_opc_store(s
, OPC_SW
, base
, hi
, 4);
1253 g_assert_not_reached();
1257 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
, bool is_64
)
1259 TCGReg addr_regl
, addr_regh
__attribute__((unused
));
1260 TCGReg data_regl
, data_regh
;
1263 #if defined(CONFIG_SOFTMMU)
1264 tcg_insn_unit
*label_ptr
[1];
1266 TCGReg base
= TCG_REG_TMP0
;
1268 data_regl
= *args
++;
1269 data_regh
= (TCG_TARGET_REG_BITS
== 32 && is_64
? *args
++ : 0);
1270 addr_regl
= *args
++;
1271 addr_regh
= (TCG_TARGET_REG_BITS
< TARGET_LONG_BITS
? *args
++ : 0);
1273 opc
= get_memop(oi
);
1275 #if defined(CONFIG_SOFTMMU)
1276 tcg_out_tlb_load(s
, addr_regl
, addr_regh
, oi
, label_ptr
, 0);
1277 tcg_out_qemu_st_direct(s
, data_regl
, data_regh
, base
, opc
);
1278 add_qemu_ldst_label(s
, 0, oi
,
1279 (is_64
? TCG_TYPE_I64
: TCG_TYPE_I32
),
1280 data_regl
, data_regh
, addr_regl
, addr_regh
,
1281 s
->code_ptr
, label_ptr
);
1283 if (TCG_TARGET_REG_BITS
> TARGET_LONG_BITS
) {
1284 tcg_out_ext32u(s
, base
, addr_regl
);
1288 if (guest_base
== 0) {
1289 tcg_out_opc_reg(s
, OPC_ADD
, base
, addr_regl
, TCG_REG_ZERO
);
1291 tcg_out_opc_reg(s
, OPC_ADD
, base
, TCG_GUEST_BASE_REG
, addr_regl
);
1293 tcg_out_qemu_st_direct(s
, data_regl
, data_regh
, base
, opc
);
1297 static tcg_insn_unit
*tb_ret_addr
;
1299 static void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1300 const TCGArg
*args
, const int *const_args
)
1302 TCGArg a0
= args
[0];
1303 TCGArg a1
= args
[1];
1304 TCGArg a2
= args
[2];
1305 int c2
= const_args
[2];
1308 case INDEX_op_exit_tb
:
1309 /* Reuse the zeroing that exists for goto_ptr. */
1311 tcg_out_call_int(s
, s
->code_gen_epilogue
, true);
1313 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, a0
);
1314 tcg_out_call_int(s
, tb_ret_addr
, true);
1318 case INDEX_op_goto_tb
:
1319 assert(s
->tb_jmp_insn_offset
== 0);
1320 /* indirect jump method */
1321 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_TMP0
, TCG_REG_ZERO
,
1322 (uintptr_t)(s
->tb_jmp_target_addr
+ a0
));
1323 tcg_out_opc_imm(s
, OPC_JALR
, TCG_REG_ZERO
, TCG_REG_TMP0
, 0);
1324 set_jmp_reset_offset(s
, a0
);
1327 case INDEX_op_goto_ptr
:
1328 tcg_out_opc_imm(s
, OPC_JALR
, TCG_REG_ZERO
, a0
, 0);
1332 tcg_out_reloc(s
, s
->code_ptr
, R_RISCV_JAL
, arg_label(a0
), 0);
1333 tcg_out_opc_jump(s
, OPC_JAL
, TCG_REG_ZERO
, 0);
1336 case INDEX_op_ld8u_i32
:
1337 case INDEX_op_ld8u_i64
:
1338 tcg_out_ldst(s
, OPC_LBU
, a0
, a1
, a2
);
1340 case INDEX_op_ld8s_i32
:
1341 case INDEX_op_ld8s_i64
:
1342 tcg_out_ldst(s
, OPC_LB
, a0
, a1
, a2
);
1344 case INDEX_op_ld16u_i32
:
1345 case INDEX_op_ld16u_i64
:
1346 tcg_out_ldst(s
, OPC_LHU
, a0
, a1
, a2
);
1348 case INDEX_op_ld16s_i32
:
1349 case INDEX_op_ld16s_i64
:
1350 tcg_out_ldst(s
, OPC_LH
, a0
, a1
, a2
);
1352 case INDEX_op_ld32u_i64
:
1353 tcg_out_ldst(s
, OPC_LWU
, a0
, a1
, a2
);
1355 case INDEX_op_ld_i32
:
1356 case INDEX_op_ld32s_i64
:
1357 tcg_out_ldst(s
, OPC_LW
, a0
, a1
, a2
);
1359 case INDEX_op_ld_i64
:
1360 tcg_out_ldst(s
, OPC_LD
, a0
, a1
, a2
);
1363 case INDEX_op_st8_i32
:
1364 case INDEX_op_st8_i64
:
1365 tcg_out_ldst(s
, OPC_SB
, a0
, a1
, a2
);
1367 case INDEX_op_st16_i32
:
1368 case INDEX_op_st16_i64
:
1369 tcg_out_ldst(s
, OPC_SH
, a0
, a1
, a2
);
1371 case INDEX_op_st_i32
:
1372 case INDEX_op_st32_i64
:
1373 tcg_out_ldst(s
, OPC_SW
, a0
, a1
, a2
);
1375 case INDEX_op_st_i64
:
1376 tcg_out_ldst(s
, OPC_SD
, a0
, a1
, a2
);
1379 case INDEX_op_add_i32
:
1381 tcg_out_opc_imm(s
, OPC_ADDIW
, a0
, a1
, a2
);
1383 tcg_out_opc_reg(s
, OPC_ADDW
, a0
, a1
, a2
);
1386 case INDEX_op_add_i64
:
1388 tcg_out_opc_imm(s
, OPC_ADDI
, a0
, a1
, a2
);
1390 tcg_out_opc_reg(s
, OPC_ADD
, a0
, a1
, a2
);
1394 case INDEX_op_sub_i32
:
1396 tcg_out_opc_imm(s
, OPC_ADDIW
, a0
, a1
, -a2
);
1398 tcg_out_opc_reg(s
, OPC_SUBW
, a0
, a1
, a2
);
1401 case INDEX_op_sub_i64
:
1403 tcg_out_opc_imm(s
, OPC_ADDI
, a0
, a1
, -a2
);
1405 tcg_out_opc_reg(s
, OPC_SUB
, a0
, a1
, a2
);
1409 case INDEX_op_and_i32
:
1410 case INDEX_op_and_i64
:
1412 tcg_out_opc_imm(s
, OPC_ANDI
, a0
, a1
, a2
);
1414 tcg_out_opc_reg(s
, OPC_AND
, a0
, a1
, a2
);
1418 case INDEX_op_or_i32
:
1419 case INDEX_op_or_i64
:
1421 tcg_out_opc_imm(s
, OPC_ORI
, a0
, a1
, a2
);
1423 tcg_out_opc_reg(s
, OPC_OR
, a0
, a1
, a2
);
1427 case INDEX_op_xor_i32
:
1428 case INDEX_op_xor_i64
:
1430 tcg_out_opc_imm(s
, OPC_XORI
, a0
, a1
, a2
);
1432 tcg_out_opc_reg(s
, OPC_XOR
, a0
, a1
, a2
);
1436 case INDEX_op_not_i32
:
1437 case INDEX_op_not_i64
:
1438 tcg_out_opc_imm(s
, OPC_XORI
, a0
, a1
, -1);
1441 case INDEX_op_neg_i32
:
1442 tcg_out_opc_reg(s
, OPC_SUBW
, a0
, TCG_REG_ZERO
, a1
);
1444 case INDEX_op_neg_i64
:
1445 tcg_out_opc_reg(s
, OPC_SUB
, a0
, TCG_REG_ZERO
, a1
);
1448 case INDEX_op_mul_i32
:
1449 tcg_out_opc_reg(s
, OPC_MULW
, a0
, a1
, a2
);
1451 case INDEX_op_mul_i64
:
1452 tcg_out_opc_reg(s
, OPC_MUL
, a0
, a1
, a2
);
1455 case INDEX_op_div_i32
:
1456 tcg_out_opc_reg(s
, OPC_DIVW
, a0
, a1
, a2
);
1458 case INDEX_op_div_i64
:
1459 tcg_out_opc_reg(s
, OPC_DIV
, a0
, a1
, a2
);
1462 case INDEX_op_divu_i32
:
1463 tcg_out_opc_reg(s
, OPC_DIVUW
, a0
, a1
, a2
);
1465 case INDEX_op_divu_i64
:
1466 tcg_out_opc_reg(s
, OPC_DIVU
, a0
, a1
, a2
);
1469 case INDEX_op_rem_i32
:
1470 tcg_out_opc_reg(s
, OPC_REMW
, a0
, a1
, a2
);
1472 case INDEX_op_rem_i64
:
1473 tcg_out_opc_reg(s
, OPC_REM
, a0
, a1
, a2
);
1476 case INDEX_op_remu_i32
:
1477 tcg_out_opc_reg(s
, OPC_REMUW
, a0
, a1
, a2
);
1479 case INDEX_op_remu_i64
:
1480 tcg_out_opc_reg(s
, OPC_REMU
, a0
, a1
, a2
);
1483 case INDEX_op_shl_i32
:
1485 tcg_out_opc_imm(s
, OPC_SLLIW
, a0
, a1
, a2
);
1487 tcg_out_opc_reg(s
, OPC_SLLW
, a0
, a1
, a2
);
1490 case INDEX_op_shl_i64
:
1492 tcg_out_opc_imm(s
, OPC_SLLI
, a0
, a1
, a2
);
1494 tcg_out_opc_reg(s
, OPC_SLL
, a0
, a1
, a2
);
1498 case INDEX_op_shr_i32
:
1500 tcg_out_opc_imm(s
, OPC_SRLIW
, a0
, a1
, a2
);
1502 tcg_out_opc_reg(s
, OPC_SRLW
, a0
, a1
, a2
);
1505 case INDEX_op_shr_i64
:
1507 tcg_out_opc_imm(s
, OPC_SRLI
, a0
, a1
, a2
);
1509 tcg_out_opc_reg(s
, OPC_SRL
, a0
, a1
, a2
);
1513 case INDEX_op_sar_i32
:
1515 tcg_out_opc_imm(s
, OPC_SRAIW
, a0
, a1
, a2
);
1517 tcg_out_opc_reg(s
, OPC_SRAW
, a0
, a1
, a2
);
1520 case INDEX_op_sar_i64
:
1522 tcg_out_opc_imm(s
, OPC_SRAI
, a0
, a1
, a2
);
1524 tcg_out_opc_reg(s
, OPC_SRA
, a0
, a1
, a2
);
1528 case INDEX_op_add2_i32
:
1529 tcg_out_addsub2(s
, a0
, a1
, a2
, args
[3], args
[4], args
[5],
1530 const_args
[4], const_args
[5], false, true);
1532 case INDEX_op_add2_i64
:
1533 tcg_out_addsub2(s
, a0
, a1
, a2
, args
[3], args
[4], args
[5],
1534 const_args
[4], const_args
[5], false, false);
1536 case INDEX_op_sub2_i32
:
1537 tcg_out_addsub2(s
, a0
, a1
, a2
, args
[3], args
[4], args
[5],
1538 const_args
[4], const_args
[5], true, true);
1540 case INDEX_op_sub2_i64
:
1541 tcg_out_addsub2(s
, a0
, a1
, a2
, args
[3], args
[4], args
[5],
1542 const_args
[4], const_args
[5], true, false);
1545 case INDEX_op_brcond_i32
:
1546 case INDEX_op_brcond_i64
:
1547 tcg_out_brcond(s
, a2
, a0
, a1
, arg_label(args
[3]));
1549 case INDEX_op_brcond2_i32
:
1550 tcg_out_brcond2(s
, args
[4], a0
, a1
, a2
, args
[3], arg_label(args
[5]));
1553 case INDEX_op_setcond_i32
:
1554 case INDEX_op_setcond_i64
:
1555 tcg_out_setcond(s
, args
[3], a0
, a1
, a2
);
1557 case INDEX_op_setcond2_i32
:
1558 tcg_out_setcond2(s
, args
[5], a0
, a1
, a2
, args
[3], args
[4]);
1561 case INDEX_op_qemu_ld_i32
:
1562 tcg_out_qemu_ld(s
, args
, false);
1564 case INDEX_op_qemu_ld_i64
:
1565 tcg_out_qemu_ld(s
, args
, true);
1567 case INDEX_op_qemu_st_i32
:
1568 tcg_out_qemu_st(s
, args
, false);
1570 case INDEX_op_qemu_st_i64
:
1571 tcg_out_qemu_st(s
, args
, true);
1574 case INDEX_op_ext8u_i32
:
1575 case INDEX_op_ext8u_i64
:
1576 tcg_out_ext8u(s
, a0
, a1
);
1579 case INDEX_op_ext16u_i32
:
1580 case INDEX_op_ext16u_i64
:
1581 tcg_out_ext16u(s
, a0
, a1
);
1584 case INDEX_op_ext32u_i64
:
1585 case INDEX_op_extu_i32_i64
:
1586 tcg_out_ext32u(s
, a0
, a1
);
1589 case INDEX_op_ext8s_i32
:
1590 case INDEX_op_ext8s_i64
:
1591 tcg_out_ext8s(s
, a0
, a1
);
1594 case INDEX_op_ext16s_i32
:
1595 case INDEX_op_ext16s_i64
:
1596 tcg_out_ext16s(s
, a0
, a1
);
1599 case INDEX_op_ext32s_i64
:
1600 case INDEX_op_extrl_i64_i32
:
1601 case INDEX_op_ext_i32_i64
:
1602 tcg_out_ext32s(s
, a0
, a1
);
1605 case INDEX_op_extrh_i64_i32
:
1606 tcg_out_opc_imm(s
, OPC_SRAI
, a0
, a1
, 32);
1609 case INDEX_op_mulsh_i32
:
1610 case INDEX_op_mulsh_i64
:
1611 tcg_out_opc_reg(s
, OPC_MULH
, a0
, a1
, a2
);
1614 case INDEX_op_muluh_i32
:
1615 case INDEX_op_muluh_i64
:
1616 tcg_out_opc_reg(s
, OPC_MULHU
, a0
, a1
, a2
);
1623 case INDEX_op_mov_i32
: /* Always emitted via tcg_out_mov. */
1624 case INDEX_op_mov_i64
:
1625 case INDEX_op_movi_i32
: /* Always emitted via tcg_out_movi. */
1626 case INDEX_op_movi_i64
:
1627 case INDEX_op_call
: /* Always emitted via tcg_out_call. */
1629 g_assert_not_reached();
1633 static const TCGTargetOpDef
*tcg_target_op_def(TCGOpcode op
)
1635 static const TCGTargetOpDef r
1636 = { .args_ct_str
= { "r" } };
1637 static const TCGTargetOpDef r_r
1638 = { .args_ct_str
= { "r", "r" } };
1639 static const TCGTargetOpDef rZ_r
1640 = { .args_ct_str
= { "rZ", "r" } };
1641 static const TCGTargetOpDef rZ_rZ
1642 = { .args_ct_str
= { "rZ", "rZ" } };
1643 static const TCGTargetOpDef rZ_rZ_rZ_rZ
1644 = { .args_ct_str
= { "rZ", "rZ", "rZ", "rZ" } };
1645 static const TCGTargetOpDef r_r_ri
1646 = { .args_ct_str
= { "r", "r", "ri" } };
1647 static const TCGTargetOpDef r_r_rI
1648 = { .args_ct_str
= { "r", "r", "rI" } };
1649 static const TCGTargetOpDef r_rZ_rN
1650 = { .args_ct_str
= { "r", "rZ", "rN" } };
1651 static const TCGTargetOpDef r_rZ_rZ
1652 = { .args_ct_str
= { "r", "rZ", "rZ" } };
1653 static const TCGTargetOpDef r_rZ_rZ_rZ_rZ
1654 = { .args_ct_str
= { "r", "rZ", "rZ", "rZ", "rZ" } };
1655 static const TCGTargetOpDef r_L
1656 = { .args_ct_str
= { "r", "L" } };
1657 static const TCGTargetOpDef r_r_L
1658 = { .args_ct_str
= { "r", "r", "L" } };
1659 static const TCGTargetOpDef r_L_L
1660 = { .args_ct_str
= { "r", "L", "L" } };
1661 static const TCGTargetOpDef r_r_L_L
1662 = { .args_ct_str
= { "r", "r", "L", "L" } };
1663 static const TCGTargetOpDef LZ_L
1664 = { .args_ct_str
= { "LZ", "L" } };
1665 static const TCGTargetOpDef LZ_L_L
1666 = { .args_ct_str
= { "LZ", "L", "L" } };
1667 static const TCGTargetOpDef LZ_LZ_L
1668 = { .args_ct_str
= { "LZ", "LZ", "L" } };
1669 static const TCGTargetOpDef LZ_LZ_L_L
1670 = { .args_ct_str
= { "LZ", "LZ", "L", "L" } };
1671 static const TCGTargetOpDef r_r_rZ_rZ_rM_rM
1672 = { .args_ct_str
= { "r", "r", "rZ", "rZ", "rM", "rM" } };
1675 case INDEX_op_goto_ptr
:
1678 case INDEX_op_ld8u_i32
:
1679 case INDEX_op_ld8s_i32
:
1680 case INDEX_op_ld16u_i32
:
1681 case INDEX_op_ld16s_i32
:
1682 case INDEX_op_ld_i32
:
1683 case INDEX_op_not_i32
:
1684 case INDEX_op_neg_i32
:
1685 case INDEX_op_ld8u_i64
:
1686 case INDEX_op_ld8s_i64
:
1687 case INDEX_op_ld16u_i64
:
1688 case INDEX_op_ld16s_i64
:
1689 case INDEX_op_ld32s_i64
:
1690 case INDEX_op_ld32u_i64
:
1691 case INDEX_op_ld_i64
:
1692 case INDEX_op_not_i64
:
1693 case INDEX_op_neg_i64
:
1694 case INDEX_op_ext8u_i32
:
1695 case INDEX_op_ext8u_i64
:
1696 case INDEX_op_ext16u_i32
:
1697 case INDEX_op_ext16u_i64
:
1698 case INDEX_op_ext32u_i64
:
1699 case INDEX_op_extu_i32_i64
:
1700 case INDEX_op_ext8s_i32
:
1701 case INDEX_op_ext8s_i64
:
1702 case INDEX_op_ext16s_i32
:
1703 case INDEX_op_ext16s_i64
:
1704 case INDEX_op_ext32s_i64
:
1705 case INDEX_op_extrl_i64_i32
:
1706 case INDEX_op_extrh_i64_i32
:
1707 case INDEX_op_ext_i32_i64
:
1710 case INDEX_op_st8_i32
:
1711 case INDEX_op_st16_i32
:
1712 case INDEX_op_st_i32
:
1713 case INDEX_op_st8_i64
:
1714 case INDEX_op_st16_i64
:
1715 case INDEX_op_st32_i64
:
1716 case INDEX_op_st_i64
:
1719 case INDEX_op_add_i32
:
1720 case INDEX_op_and_i32
:
1721 case INDEX_op_or_i32
:
1722 case INDEX_op_xor_i32
:
1723 case INDEX_op_add_i64
:
1724 case INDEX_op_and_i64
:
1725 case INDEX_op_or_i64
:
1726 case INDEX_op_xor_i64
:
1729 case INDEX_op_sub_i32
:
1730 case INDEX_op_sub_i64
:
1733 case INDEX_op_mul_i32
:
1734 case INDEX_op_mulsh_i32
:
1735 case INDEX_op_muluh_i32
:
1736 case INDEX_op_div_i32
:
1737 case INDEX_op_divu_i32
:
1738 case INDEX_op_rem_i32
:
1739 case INDEX_op_remu_i32
:
1740 case INDEX_op_setcond_i32
:
1741 case INDEX_op_mul_i64
:
1742 case INDEX_op_mulsh_i64
:
1743 case INDEX_op_muluh_i64
:
1744 case INDEX_op_div_i64
:
1745 case INDEX_op_divu_i64
:
1746 case INDEX_op_rem_i64
:
1747 case INDEX_op_remu_i64
:
1748 case INDEX_op_setcond_i64
:
1751 case INDEX_op_shl_i32
:
1752 case INDEX_op_shr_i32
:
1753 case INDEX_op_sar_i32
:
1754 case INDEX_op_shl_i64
:
1755 case INDEX_op_shr_i64
:
1756 case INDEX_op_sar_i64
:
1759 case INDEX_op_brcond_i32
:
1760 case INDEX_op_brcond_i64
:
1763 case INDEX_op_add2_i32
:
1764 case INDEX_op_add2_i64
:
1765 case INDEX_op_sub2_i32
:
1766 case INDEX_op_sub2_i64
:
1767 return &r_r_rZ_rZ_rM_rM
;
1769 case INDEX_op_brcond2_i32
:
1770 return &rZ_rZ_rZ_rZ
;
1772 case INDEX_op_setcond2_i32
:
1773 return &r_rZ_rZ_rZ_rZ
;
1775 case INDEX_op_qemu_ld_i32
:
1776 return TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
? &r_L
: &r_L_L
;
1777 case INDEX_op_qemu_st_i32
:
1778 return TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
? &LZ_L
: &LZ_L_L
;
1779 case INDEX_op_qemu_ld_i64
:
1780 return TCG_TARGET_REG_BITS
== 64 ? &r_L
1781 : TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
? &r_r_L
1783 case INDEX_op_qemu_st_i64
:
1784 return TCG_TARGET_REG_BITS
== 64 ? &LZ_L
1785 : TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
? &LZ_LZ_L
1793 static const int tcg_target_callee_save_regs
[] = {
1794 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1806 TCG_REG_RA
, /* should be last for ABI compliance */
1809 /* Stack frame parameters. */
1810 #define REG_SIZE (TCG_TARGET_REG_BITS / 8)
1811 #define SAVE_SIZE ((int)ARRAY_SIZE(tcg_target_callee_save_regs) * REG_SIZE)
1812 #define TEMP_SIZE (CPU_TEMP_BUF_NLONGS * (int)sizeof(long))
1813 #define FRAME_SIZE ((TCG_STATIC_CALL_ARGS_SIZE + TEMP_SIZE + SAVE_SIZE \
1814 + TCG_TARGET_STACK_ALIGN - 1) \
1815 & -TCG_TARGET_STACK_ALIGN)
1816 #define SAVE_OFS (TCG_STATIC_CALL_ARGS_SIZE + TEMP_SIZE)
1818 /* We're expecting to be able to use an immediate for frame allocation. */
1819 QEMU_BUILD_BUG_ON(FRAME_SIZE
> 0x7ff);
1821 /* Generate global QEMU prologue and epilogue code */
1822 static void tcg_target_qemu_prologue(TCGContext
*s
)
1826 tcg_set_frame(s
, TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
, TEMP_SIZE
);
1829 tcg_out_opc_imm(s
, OPC_ADDI
, TCG_REG_SP
, TCG_REG_SP
, -FRAME_SIZE
);
1830 for (i
= 0; i
< ARRAY_SIZE(tcg_target_callee_save_regs
); i
++) {
1831 tcg_out_st(s
, TCG_TYPE_REG
, tcg_target_callee_save_regs
[i
],
1832 TCG_REG_SP
, SAVE_OFS
+ i
* REG_SIZE
);
1835 #if !defined(CONFIG_SOFTMMU)
1836 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_GUEST_BASE_REG
, guest_base
);
1837 tcg_regset_set_reg(s
->reserved_regs
, TCG_GUEST_BASE_REG
);
1840 /* Call generated code */
1841 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1842 tcg_out_opc_imm(s
, OPC_JALR
, TCG_REG_ZERO
, tcg_target_call_iarg_regs
[1], 0);
1844 /* Return path for goto_ptr. Set return value to 0 */
1845 s
->code_gen_epilogue
= s
->code_ptr
;
1846 tcg_out_mov(s
, TCG_TYPE_REG
, TCG_REG_A0
, TCG_REG_ZERO
);
1849 tb_ret_addr
= s
->code_ptr
;
1850 for (i
= 0; i
< ARRAY_SIZE(tcg_target_callee_save_regs
); i
++) {
1851 tcg_out_ld(s
, TCG_TYPE_REG
, tcg_target_callee_save_regs
[i
],
1852 TCG_REG_SP
, SAVE_OFS
+ i
* REG_SIZE
);
1855 tcg_out_opc_imm(s
, OPC_ADDI
, TCG_REG_SP
, TCG_REG_SP
, FRAME_SIZE
);
1856 tcg_out_opc_imm(s
, OPC_JALR
, TCG_REG_ZERO
, TCG_REG_RA
, 0);
1859 static void tcg_target_init(TCGContext
*s
)
1861 tcg_target_available_regs
[TCG_TYPE_I32
] = 0xffffffff;
1862 if (TCG_TARGET_REG_BITS
== 64) {
1863 tcg_target_available_regs
[TCG_TYPE_I64
] = 0xffffffff;
1866 tcg_target_call_clobber_regs
= -1u;
1867 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S0
);
1868 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S1
);
1869 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S2
);
1870 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S3
);
1871 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S4
);
1872 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S5
);
1873 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S6
);
1874 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S7
);
1875 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S8
);
1876 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S9
);
1877 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S10
);
1878 tcg_regset_reset_reg(tcg_target_call_clobber_regs
, TCG_REG_S11
);
1880 s
->reserved_regs
= 0;
1881 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
);
1882 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_TMP0
);
1883 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_TMP1
);
1884 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_TMP2
);
1885 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
);
1886 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_GP
);
1887 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_TP
);
1892 uint8_t fde_def_cfa
[4];
1893 uint8_t fde_reg_ofs
[ARRAY_SIZE(tcg_target_callee_save_regs
) * 2];
1896 #define ELF_HOST_MACHINE EM_RISCV
1898 static const DebugFrame debug_frame
= {
1899 .h
.cie
.len
= sizeof(DebugFrameCIE
) - 4, /* length after .len member */
1902 .h
.cie
.code_align
= 1,
1903 .h
.cie
.data_align
= -(TCG_TARGET_REG_BITS
/ 8) & 0x7f, /* sleb128 */
1904 .h
.cie
.return_column
= TCG_REG_RA
,
1906 /* Total FDE size does not include the "len" member. */
1907 .h
.fde
.len
= sizeof(DebugFrame
) - offsetof(DebugFrame
, h
.fde
.cie_offset
),
1910 12, TCG_REG_SP
, /* DW_CFA_def_cfa sp, ... */
1911 (FRAME_SIZE
& 0x7f) | 0x80, /* ... uleb128 FRAME_SIZE */
1915 0x80 + 9, 12, /* DW_CFA_offset, s1, -96 */
1916 0x80 + 18, 11, /* DW_CFA_offset, s2, -88 */
1917 0x80 + 19, 10, /* DW_CFA_offset, s3, -80 */
1918 0x80 + 20, 9, /* DW_CFA_offset, s4, -72 */
1919 0x80 + 21, 8, /* DW_CFA_offset, s5, -64 */
1920 0x80 + 22, 7, /* DW_CFA_offset, s6, -56 */
1921 0x80 + 23, 6, /* DW_CFA_offset, s7, -48 */
1922 0x80 + 24, 5, /* DW_CFA_offset, s8, -40 */
1923 0x80 + 25, 4, /* DW_CFA_offset, s9, -32 */
1924 0x80 + 26, 3, /* DW_CFA_offset, s10, -24 */
1925 0x80 + 27, 2, /* DW_CFA_offset, s11, -16 */
1926 0x80 + 1 , 1, /* DW_CFA_offset, ra, -8 */
1930 void tcg_register_jit(void *buf
, size_t buf_size
)
1932 tcg_register_jit_int(buf
, buf_size
, &debug_frame
, sizeof(debug_frame
));