2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 * Register definitions
31 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
43 #ifdef CONFIG_USE_GUEST_BASE
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
46 #define TCG_GUEST_BASE_REG TCG_REG_R0
52 /* Branch registers */
64 /* Floating point registers */
84 /* Predicate registers */
104 /* Application registers */
109 static const int tcg_target_reg_alloc_order
[] = {
163 static const int tcg_target_call_iarg_regs
[8] = {
174 static const int tcg_target_call_oarg_regs
[] = {
182 /* bundle templates: stops (double bar in the IA64 manual) are marked with
183 an uppercase letter. */
212 OPC_ADD_A1
= 0x10000000000ull
,
213 OPC_AND_A1
= 0x10060000000ull
,
214 OPC_AND_A3
= 0x10160000000ull
,
215 OPC_ANDCM_A1
= 0x10068000000ull
,
216 OPC_ANDCM_A3
= 0x10168000000ull
,
217 OPC_ADDS_A4
= 0x10800000000ull
,
218 OPC_ADDL_A5
= 0x12000000000ull
,
219 OPC_ALLOC_M34
= 0x02c00000000ull
,
220 OPC_BR_DPTK_FEW_B1
= 0x08400000000ull
,
221 OPC_BR_SPTK_MANY_B1
= 0x08000001000ull
,
222 OPC_BR_CALL_SPNT_FEW_B3
= 0x0a200000000ull
,
223 OPC_BR_SPTK_MANY_B4
= 0x00100001000ull
,
224 OPC_BR_CALL_SPTK_MANY_B5
= 0x02100001000ull
,
225 OPC_BR_RET_SPTK_MANY_B4
= 0x00108001100ull
,
226 OPC_BRL_SPTK_MANY_X3
= 0x18000001000ull
,
227 OPC_BRL_CALL_SPNT_MANY_X4
= 0x1a200001000ull
,
228 OPC_BRL_CALL_SPTK_MANY_X4
= 0x1a000001000ull
,
229 OPC_CMP_LT_A6
= 0x18000000000ull
,
230 OPC_CMP_LTU_A6
= 0x1a000000000ull
,
231 OPC_CMP_EQ_A6
= 0x1c000000000ull
,
232 OPC_CMP4_LT_A6
= 0x18400000000ull
,
233 OPC_CMP4_LTU_A6
= 0x1a400000000ull
,
234 OPC_CMP4_EQ_A6
= 0x1c400000000ull
,
235 OPC_DEP_I14
= 0x0ae00000000ull
,
236 OPC_DEP_I15
= 0x08000000000ull
,
237 OPC_DEP_Z_I12
= 0x0a600000000ull
,
238 OPC_EXTR_I11
= 0x0a400002000ull
,
239 OPC_EXTR_U_I11
= 0x0a400000000ull
,
240 OPC_FCVT_FX_TRUNC_S1_F10
= 0x004d0000000ull
,
241 OPC_FCVT_FXU_TRUNC_S1_F10
= 0x004d8000000ull
,
242 OPC_FCVT_XF_F11
= 0x000e0000000ull
,
243 OPC_FMA_S1_F1
= 0x10400000000ull
,
244 OPC_FNMA_S1_F1
= 0x18400000000ull
,
245 OPC_FRCPA_S1_F6
= 0x00600000000ull
,
246 OPC_GETF_SIG_M19
= 0x08708000000ull
,
247 OPC_LD1_M1
= 0x08000000000ull
,
248 OPC_LD1_M3
= 0x0a000000000ull
,
249 OPC_LD2_M1
= 0x08040000000ull
,
250 OPC_LD2_M3
= 0x0a040000000ull
,
251 OPC_LD4_M1
= 0x08080000000ull
,
252 OPC_LD4_M3
= 0x0a080000000ull
,
253 OPC_LD8_M1
= 0x080c0000000ull
,
254 OPC_LD8_M3
= 0x0a0c0000000ull
,
255 OPC_MUX1_I3
= 0x0eca0000000ull
,
256 OPC_NOP_B9
= 0x04008000000ull
,
257 OPC_NOP_F16
= 0x00008000000ull
,
258 OPC_NOP_I18
= 0x00008000000ull
,
259 OPC_NOP_M48
= 0x00008000000ull
,
260 OPC_MOV_I21
= 0x00e00100000ull
,
261 OPC_MOV_RET_I21
= 0x00e00500000ull
,
262 OPC_MOV_I22
= 0x00188000000ull
,
263 OPC_MOV_I_I26
= 0x00150000000ull
,
264 OPC_MOVL_X2
= 0x0c000000000ull
,
265 OPC_OR_A1
= 0x10070000000ull
,
266 OPC_OR_A3
= 0x10170000000ull
,
267 OPC_SETF_EXP_M18
= 0x0c748000000ull
,
268 OPC_SETF_SIG_M18
= 0x0c708000000ull
,
269 OPC_SHL_I7
= 0x0f240000000ull
,
270 OPC_SHR_I5
= 0x0f220000000ull
,
271 OPC_SHR_U_I5
= 0x0f200000000ull
,
272 OPC_SHRP_I10
= 0x0ac00000000ull
,
273 OPC_SXT1_I29
= 0x000a0000000ull
,
274 OPC_SXT2_I29
= 0x000a8000000ull
,
275 OPC_SXT4_I29
= 0x000b0000000ull
,
276 OPC_ST1_M4
= 0x08c00000000ull
,
277 OPC_ST2_M4
= 0x08c40000000ull
,
278 OPC_ST4_M4
= 0x08c80000000ull
,
279 OPC_ST8_M4
= 0x08cc0000000ull
,
280 OPC_SUB_A1
= 0x10028000000ull
,
281 OPC_SUB_A3
= 0x10128000000ull
,
282 OPC_UNPACK4_L_I2
= 0x0f860000000ull
,
283 OPC_XMA_L_F2
= 0x1d000000000ull
,
284 OPC_XOR_A1
= 0x10078000000ull
,
285 OPC_XOR_A3
= 0x10178000000ull
,
286 OPC_ZXT1_I29
= 0x00080000000ull
,
287 OPC_ZXT2_I29
= 0x00088000000ull
,
288 OPC_ZXT4_I29
= 0x00090000000ull
,
290 INSN_NOP_M
= OPC_NOP_M48
, /* nop.m 0 */
291 INSN_NOP_I
= OPC_NOP_I18
, /* nop.i 0 */
294 static inline uint64_t tcg_opc_a1(int qp
, uint64_t opc
, int r1
,
298 | ((r3
& 0x7f) << 20)
299 | ((r2
& 0x7f) << 13)
304 static inline uint64_t tcg_opc_a3(int qp
, uint64_t opc
, int r1
,
305 uint64_t imm
, int r3
)
308 | ((imm
& 0x80) << 29) /* s */
309 | ((imm
& 0x7f) << 13) /* imm7b */
310 | ((r3
& 0x7f) << 20)
315 static inline uint64_t tcg_opc_a4(int qp
, uint64_t opc
, int r1
,
316 uint64_t imm
, int r3
)
319 | ((imm
& 0x2000) << 23) /* s */
320 | ((imm
& 0x1f80) << 20) /* imm6d */
321 | ((imm
& 0x007f) << 13) /* imm7b */
322 | ((r3
& 0x7f) << 20)
327 static inline uint64_t tcg_opc_a5(int qp
, uint64_t opc
, int r1
,
328 uint64_t imm
, int r3
)
331 | ((imm
& 0x200000) << 15) /* s */
332 | ((imm
& 0x1f0000) << 6) /* imm5c */
333 | ((imm
& 0x00ff80) << 20) /* imm9d */
334 | ((imm
& 0x00007f) << 13) /* imm7b */
335 | ((r3
& 0x03) << 20)
340 static inline uint64_t tcg_opc_a6(int qp
, uint64_t opc
, int p1
,
341 int p2
, int r2
, int r3
)
344 | ((p2
& 0x3f) << 27)
345 | ((r3
& 0x7f) << 20)
346 | ((r2
& 0x7f) << 13)
351 static inline uint64_t tcg_opc_b1(int qp
, uint64_t opc
, uint64_t imm
)
354 | ((imm
& 0x100000) << 16) /* s */
355 | ((imm
& 0x0fffff) << 13) /* imm20b */
359 static inline uint64_t tcg_opc_b3(int qp
, uint64_t opc
, int b1
, uint64_t imm
)
362 | ((imm
& 0x100000) << 16) /* s */
363 | ((imm
& 0x0fffff) << 13) /* imm20b */
368 static inline uint64_t tcg_opc_b4(int qp
, uint64_t opc
, int b2
)
375 static inline uint64_t tcg_opc_b5(int qp
, uint64_t opc
, int b1
, int b2
)
384 static inline uint64_t tcg_opc_b9(int qp
, uint64_t opc
, uint64_t imm
)
387 | ((imm
& 0x100000) << 16) /* i */
388 | ((imm
& 0x0fffff) << 6) /* imm20a */
392 static inline uint64_t tcg_opc_f1(int qp
, uint64_t opc
, int f1
,
393 int f3
, int f4
, int f2
)
396 | ((f4
& 0x7f) << 27)
397 | ((f3
& 0x7f) << 20)
398 | ((f2
& 0x7f) << 13)
403 static inline uint64_t tcg_opc_f2(int qp
, uint64_t opc
, int f1
,
404 int f3
, int f4
, int f2
)
407 | ((f4
& 0x7f) << 27)
408 | ((f3
& 0x7f) << 20)
409 | ((f2
& 0x7f) << 13)
414 static inline uint64_t tcg_opc_f6(int qp
, uint64_t opc
, int f1
,
415 int p2
, int f2
, int f3
)
418 | ((p2
& 0x3f) << 27)
419 | ((f3
& 0x7f) << 20)
420 | ((f2
& 0x7f) << 13)
425 static inline uint64_t tcg_opc_f10(int qp
, uint64_t opc
, int f1
, int f2
)
428 | ((f2
& 0x7f) << 13)
433 static inline uint64_t tcg_opc_f11(int qp
, uint64_t opc
, int f1
, int f2
)
436 | ((f2
& 0x7f) << 13)
441 static inline uint64_t tcg_opc_f16(int qp
, uint64_t opc
, uint64_t imm
)
444 | ((imm
& 0x100000) << 16) /* i */
445 | ((imm
& 0x0fffff) << 6) /* imm20a */
449 static inline uint64_t tcg_opc_i2(int qp
, uint64_t opc
, int r1
,
453 | ((r3
& 0x7f) << 20)
454 | ((r2
& 0x7f) << 13)
459 static inline uint64_t tcg_opc_i3(int qp
, uint64_t opc
, int r1
,
463 | ((mbtype
& 0x0f) << 20)
464 | ((r2
& 0x7f) << 13)
469 static inline uint64_t tcg_opc_i5(int qp
, uint64_t opc
, int r1
,
473 | ((r3
& 0x7f) << 20)
474 | ((r2
& 0x7f) << 13)
479 static inline uint64_t tcg_opc_i7(int qp
, uint64_t opc
, int r1
,
483 | ((r3
& 0x7f) << 20)
484 | ((r2
& 0x7f) << 13)
489 static inline uint64_t tcg_opc_i10(int qp
, uint64_t opc
, int r1
,
490 int r2
, int r3
, uint64_t count
)
493 | ((count
& 0x3f) << 27)
494 | ((r3
& 0x7f) << 20)
495 | ((r2
& 0x7f) << 13)
500 static inline uint64_t tcg_opc_i11(int qp
, uint64_t opc
, int r1
,
501 int r3
, uint64_t pos
, uint64_t len
)
504 | ((len
& 0x3f) << 27)
505 | ((r3
& 0x7f) << 20)
506 | ((pos
& 0x3f) << 14)
511 static inline uint64_t tcg_opc_i12(int qp
, uint64_t opc
, int r1
,
512 int r2
, uint64_t pos
, uint64_t len
)
515 | ((len
& 0x3f) << 27)
516 | ((pos
& 0x3f) << 20)
517 | ((r2
& 0x7f) << 13)
522 static inline uint64_t tcg_opc_i14(int qp
, uint64_t opc
, int r1
, uint64_t imm
,
523 int r3
, uint64_t pos
, uint64_t len
)
526 | ((imm
& 0x01) << 36)
527 | ((len
& 0x3f) << 27)
528 | ((r3
& 0x7f) << 20)
529 | ((pos
& 0x3f) << 14)
534 static inline uint64_t tcg_opc_i15(int qp
, uint64_t opc
, int r1
, int r2
,
535 int r3
, uint64_t pos
, uint64_t len
)
538 | ((pos
& 0x3f) << 31)
539 | ((len
& 0x0f) << 27)
540 | ((r3
& 0x7f) << 20)
541 | ((r2
& 0x7f) << 13)
546 static inline uint64_t tcg_opc_i18(int qp
, uint64_t opc
, uint64_t imm
)
549 | ((imm
& 0x100000) << 16) /* i */
550 | ((imm
& 0x0fffff) << 6) /* imm20a */
554 static inline uint64_t tcg_opc_i21(int qp
, uint64_t opc
, int b1
,
555 int r2
, uint64_t imm
)
558 | ((imm
& 0x1ff) << 24)
559 | ((r2
& 0x7f) << 13)
564 static inline uint64_t tcg_opc_i22(int qp
, uint64_t opc
, int r1
, int b2
)
572 static inline uint64_t tcg_opc_i26(int qp
, uint64_t opc
, int ar3
, int r2
)
575 | ((ar3
& 0x7f) << 20)
576 | ((r2
& 0x7f) << 13)
580 static inline uint64_t tcg_opc_i29(int qp
, uint64_t opc
, int r1
, int r3
)
583 | ((r3
& 0x7f) << 20)
588 static inline uint64_t tcg_opc_l2(uint64_t imm
)
590 return (imm
& 0x7fffffffffc00000ull
) >> 22;
593 static inline uint64_t tcg_opc_l3(uint64_t imm
)
595 return (imm
& 0x07fffffffff00000ull
) >> 18;
598 #define tcg_opc_l4 tcg_opc_l3
600 static inline uint64_t tcg_opc_m1(int qp
, uint64_t opc
, int r1
, int r3
)
603 | ((r3
& 0x7f) << 20)
608 static inline uint64_t tcg_opc_m3(int qp
, uint64_t opc
, int r1
,
609 int r3
, uint64_t imm
)
612 | ((imm
& 0x100) << 28) /* s */
613 | ((imm
& 0x080) << 20) /* i */
614 | ((imm
& 0x07f) << 13) /* imm7b */
615 | ((r3
& 0x7f) << 20)
620 static inline uint64_t tcg_opc_m4(int qp
, uint64_t opc
, int r2
, int r3
)
623 | ((r3
& 0x7f) << 20)
624 | ((r2
& 0x7f) << 13)
628 static inline uint64_t tcg_opc_m18(int qp
, uint64_t opc
, int f1
, int r2
)
631 | ((r2
& 0x7f) << 13)
636 static inline uint64_t tcg_opc_m19(int qp
, uint64_t opc
, int r1
, int f2
)
639 | ((f2
& 0x7f) << 13)
644 static inline uint64_t tcg_opc_m34(int qp
, uint64_t opc
, int r1
,
645 int sof
, int sol
, int sor
)
648 | ((sor
& 0x0f) << 27)
649 | ((sol
& 0x7f) << 20)
650 | ((sof
& 0x7f) << 13)
655 static inline uint64_t tcg_opc_m48(int qp
, uint64_t opc
, uint64_t imm
)
658 | ((imm
& 0x100000) << 16) /* i */
659 | ((imm
& 0x0fffff) << 6) /* imm20a */
663 static inline uint64_t tcg_opc_x2(int qp
, uint64_t opc
,
664 int r1
, uint64_t imm
)
667 | ((imm
& 0x8000000000000000ull
) >> 27) /* i */
668 | (imm
& 0x0000000000200000ull
) /* ic */
669 | ((imm
& 0x00000000001f0000ull
) << 6) /* imm5c */
670 | ((imm
& 0x000000000000ff80ull
) << 20) /* imm9d */
671 | ((imm
& 0x000000000000007full
) << 13) /* imm7b */
676 static inline uint64_t tcg_opc_x3(int qp
, uint64_t opc
, uint64_t imm
)
679 | ((imm
& 0x0800000000000000ull
) >> 23) /* i */
680 | ((imm
& 0x00000000000fffffull
) << 13) /* imm20b */
684 static inline uint64_t tcg_opc_x4(int qp
, uint64_t opc
, int b1
, uint64_t imm
)
687 | ((imm
& 0x0800000000000000ull
) >> 23) /* i */
688 | ((imm
& 0x00000000000fffffull
) << 13) /* imm20b */
695 * Relocations - Note that we never encode branches elsewhere than slot 2.
698 static void reloc_pcrel21b_slot2(tcg_insn_unit
*pc
, tcg_insn_unit
*target
)
700 uint64_t imm
= target
- pc
;
702 pc
->hi
= (pc
->hi
& 0xf700000fffffffffull
)
703 | ((imm
& 0x100000) << 39) /* s */
704 | ((imm
& 0x0fffff) << 36); /* imm20b */
707 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit
*pc
)
709 int64_t high
= pc
->hi
;
711 return ((high
>> 39) & 0x100000) + /* s */
712 ((high
>> 36) & 0x0fffff); /* imm20b */
715 static void patch_reloc(tcg_insn_unit
*code_ptr
, int type
,
716 intptr_t value
, intptr_t addend
)
719 assert(type
== R_IA64_PCREL21B
);
720 reloc_pcrel21b_slot2(code_ptr
, (tcg_insn_unit
*)value
);
727 /* parse target specific constraints */
728 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
735 ct
->ct
|= TCG_CT_REG
;
736 tcg_regset_set(ct
->u
.regs
, 0xffffffffffffffffull
);
739 ct
->ct
|= TCG_CT_CONST_S22
;
742 ct
->ct
|= TCG_CT_REG
;
743 tcg_regset_set(ct
->u
.regs
, 0xffffffffffffffffull
);
744 #if defined(CONFIG_SOFTMMU)
745 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_R56
);
746 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_R57
);
747 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_R58
);
751 /* We are cheating a bit here, using the fact that the register
752 r0 is also the register number 0. Hence there is no need
753 to check for const_args in each instruction. */
754 ct
->ct
|= TCG_CT_CONST_ZERO
;
764 /* test if a constant matches the constraint */
765 static inline int tcg_target_const_match(tcg_target_long val
, TCGType type
,
766 const TCGArgConstraint
*arg_ct
)
770 if (ct
& TCG_CT_CONST
)
772 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
774 else if ((ct
& TCG_CT_CONST_S22
) && val
== ((int32_t)val
<< 10) >> 10)
784 static tcg_insn_unit
*tb_ret_addr
;
786 static inline void tcg_out_bundle(TCGContext
*s
, int template,
787 uint64_t slot0
, uint64_t slot1
,
790 template &= 0x1f; /* 5 bits */
791 slot0
&= 0x1ffffffffffull
; /* 41 bits */
792 slot1
&= 0x1ffffffffffull
; /* 41 bits */
793 slot2
&= 0x1ffffffffffull
; /* 41 bits */
795 *s
->code_ptr
++ = (tcg_insn_unit
){
796 (slot1
<< 46) | (slot0
<< 5) | template,
797 (slot2
<< 23) | (slot1
>> 18)
801 static inline uint64_t tcg_opc_mov_a(int qp
, TCGReg dst
, TCGReg src
)
803 return tcg_opc_a4(qp
, OPC_ADDS_A4
, dst
, 0, src
);
806 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
807 TCGReg ret
, TCGReg arg
)
809 tcg_out_bundle(s
, mmI
,
812 tcg_opc_mov_a(TCG_REG_P0
, ret
, arg
));
815 static inline uint64_t tcg_opc_movi_a(int qp
, TCGReg dst
, int64_t src
)
817 assert(src
== sextract64(src
, 0, 22));
818 return tcg_opc_a5(qp
, OPC_ADDL_A5
, dst
, src
, TCG_REG_R0
);
821 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
822 TCGReg reg
, tcg_target_long arg
)
824 tcg_out_bundle(s
, mLX
,
827 tcg_opc_x2 (TCG_REG_P0
, OPC_MOVL_X2
, reg
, arg
));
830 static void tcg_out_br(TCGContext
*s
, TCGLabel
*l
)
834 /* We pay attention here to not modify the branch target by reading
835 the existing value and using it again. This ensure that caches and
836 memory are kept coherent during retranslation. */
838 imm
= l
->u
.value_ptr
- s
->code_ptr
;
840 imm
= get_reloc_pcrel21b_slot2(s
->code_ptr
);
841 tcg_out_reloc(s
, s
->code_ptr
, R_IA64_PCREL21B
, l
, 0);
844 tcg_out_bundle(s
, mmB
,
847 tcg_opc_b1(TCG_REG_P0
, OPC_BR_SPTK_MANY_B1
, imm
));
850 static inline void tcg_out_call(TCGContext
*s
, tcg_insn_unit
*desc
)
852 uintptr_t func
= desc
->lo
, gp
= desc
->hi
, disp
;
854 /* Look through the function descriptor. */
855 tcg_out_bundle(s
, mlx
,
858 tcg_opc_x2 (TCG_REG_P0
, OPC_MOVL_X2
, TCG_REG_R1
, gp
));
859 disp
= (tcg_insn_unit
*)func
- s
->code_ptr
;
860 tcg_out_bundle(s
, mLX
,
863 tcg_opc_x4 (TCG_REG_P0
, OPC_BRL_CALL_SPTK_MANY_X4
,
867 static void tcg_out_exit_tb(TCGContext
*s
, tcg_target_long arg
)
871 /* At least arg == 0 is a common operation. */
872 if (arg
== sextract64(arg
, 0, 22)) {
873 opc1
= tcg_opc_movi_a(TCG_REG_P0
, TCG_REG_R8
, arg
);
875 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_R8
, arg
);
879 imm
= tb_ret_addr
- s
->code_ptr
;
881 tcg_out_bundle(s
, mLX
,
884 tcg_opc_x3 (TCG_REG_P0
, OPC_BRL_SPTK_MANY_X3
, imm
));
887 static inline void tcg_out_goto_tb(TCGContext
*s
, TCGArg arg
)
889 if (s
->tb_jmp_offset
) {
890 /* direct jump method */
893 /* indirect jump method */
894 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_R2
,
895 (tcg_target_long
)(s
->tb_next
+ arg
));
896 tcg_out_bundle(s
, MmI
,
897 tcg_opc_m1 (TCG_REG_P0
, OPC_LD8_M1
,
898 TCG_REG_R2
, TCG_REG_R2
),
900 tcg_opc_i21(TCG_REG_P0
, OPC_MOV_I21
, TCG_REG_B6
,
902 tcg_out_bundle(s
, mmB
,
905 tcg_opc_b4 (TCG_REG_P0
, OPC_BR_SPTK_MANY_B4
,
908 s
->tb_next_offset
[arg
] = tcg_current_code_size(s
);
911 static inline void tcg_out_jmp(TCGContext
*s
, TCGArg addr
)
913 tcg_out_bundle(s
, mmI
,
916 tcg_opc_i21(TCG_REG_P0
, OPC_MOV_I21
, TCG_REG_B6
, addr
, 0));
917 tcg_out_bundle(s
, mmB
,
920 tcg_opc_b4(TCG_REG_P0
, OPC_BR_SPTK_MANY_B4
, TCG_REG_B6
));
923 static inline void tcg_out_ld_rel(TCGContext
*s
, uint64_t opc_m4
, TCGArg arg
,
924 TCGArg arg1
, tcg_target_long arg2
)
926 if (arg2
== ((int16_t)arg2
>> 2) << 2) {
927 tcg_out_bundle(s
, MmI
,
928 tcg_opc_a4(TCG_REG_P0
, OPC_ADDS_A4
,
929 TCG_REG_R2
, arg2
, arg1
),
930 tcg_opc_m1 (TCG_REG_P0
, opc_m4
, arg
, TCG_REG_R2
),
933 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_R2
, arg2
);
934 tcg_out_bundle(s
, MmI
,
935 tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
,
936 TCG_REG_R2
, TCG_REG_R2
, arg1
),
937 tcg_opc_m1 (TCG_REG_P0
, opc_m4
, arg
, TCG_REG_R2
),
942 static inline void tcg_out_st_rel(TCGContext
*s
, uint64_t opc_m4
, TCGArg arg
,
943 TCGArg arg1
, tcg_target_long arg2
)
945 if (arg2
== ((int16_t)arg2
>> 2) << 2) {
946 tcg_out_bundle(s
, MmI
,
947 tcg_opc_a4(TCG_REG_P0
, OPC_ADDS_A4
,
948 TCG_REG_R2
, arg2
, arg1
),
949 tcg_opc_m4 (TCG_REG_P0
, opc_m4
, arg
, TCG_REG_R2
),
952 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_R2
, arg2
);
953 tcg_out_bundle(s
, MmI
,
954 tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
,
955 TCG_REG_R2
, TCG_REG_R2
, arg1
),
956 tcg_opc_m4 (TCG_REG_P0
, opc_m4
, arg
, TCG_REG_R2
),
961 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
962 TCGReg arg1
, intptr_t arg2
)
964 if (type
== TCG_TYPE_I32
) {
965 tcg_out_ld_rel(s
, OPC_LD4_M1
, arg
, arg1
, arg2
);
967 tcg_out_ld_rel(s
, OPC_LD8_M1
, arg
, arg1
, arg2
);
971 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
972 TCGReg arg1
, intptr_t arg2
)
974 if (type
== TCG_TYPE_I32
) {
975 tcg_out_st_rel(s
, OPC_ST4_M4
, arg
, arg1
, arg2
);
977 tcg_out_st_rel(s
, OPC_ST8_M4
, arg
, arg1
, arg2
);
981 static inline void tcg_out_alu(TCGContext
*s
, uint64_t opc_a1
, uint64_t opc_a3
,
982 TCGReg ret
, TCGArg arg1
, int const_arg1
,
983 TCGArg arg2
, int const_arg2
)
985 uint64_t opc1
= 0, opc2
= 0, opc3
= 0;
987 if (const_arg2
&& arg2
!= 0) {
988 opc2
= tcg_opc_movi_a(TCG_REG_P0
, TCG_REG_R3
, arg2
);
991 if (const_arg1
&& arg1
!= 0) {
992 if (opc_a3
&& arg1
== (int8_t)arg1
) {
993 opc3
= tcg_opc_a3(TCG_REG_P0
, opc_a3
, ret
, arg1
, arg2
);
995 opc1
= tcg_opc_movi_a(TCG_REG_P0
, TCG_REG_R2
, arg1
);
1000 opc3
= tcg_opc_a1(TCG_REG_P0
, opc_a1
, ret
, arg1
, arg2
);
1003 tcg_out_bundle(s
, (opc1
|| opc2
? mII
: miI
),
1004 opc1
? opc1
: INSN_NOP_M
,
1005 opc2
? opc2
: INSN_NOP_I
,
1009 static inline void tcg_out_add(TCGContext
*s
, TCGReg ret
, TCGReg arg1
,
1010 TCGArg arg2
, int const_arg2
)
1012 if (const_arg2
&& arg2
== sextract64(arg2
, 0, 14)) {
1013 tcg_out_bundle(s
, mmI
,
1016 tcg_opc_a4(TCG_REG_P0
, OPC_ADDS_A4
, ret
, arg2
, arg1
));
1018 tcg_out_alu(s
, OPC_ADD_A1
, 0, ret
, arg1
, 0, arg2
, const_arg2
);
1022 static inline void tcg_out_sub(TCGContext
*s
, TCGReg ret
, TCGArg arg1
,
1023 int const_arg1
, TCGArg arg2
, int const_arg2
)
1025 if (!const_arg1
&& const_arg2
&& -arg2
== sextract64(-arg2
, 0, 14)) {
1026 tcg_out_bundle(s
, mmI
,
1029 tcg_opc_a4(TCG_REG_P0
, OPC_ADDS_A4
, ret
, -arg2
, arg1
));
1031 tcg_out_alu(s
, OPC_SUB_A1
, OPC_SUB_A3
, ret
,
1032 arg1
, const_arg1
, arg2
, const_arg2
);
1036 static inline void tcg_out_eqv(TCGContext
*s
, TCGArg ret
,
1037 TCGArg arg1
, int const_arg1
,
1038 TCGArg arg2
, int const_arg2
)
1040 tcg_out_bundle(s
, mII
,
1042 tcg_opc_a1 (TCG_REG_P0
, OPC_XOR_A1
, ret
, arg1
, arg2
),
1043 tcg_opc_a3 (TCG_REG_P0
, OPC_ANDCM_A3
, ret
, -1, ret
));
1046 static inline void tcg_out_nand(TCGContext
*s
, TCGArg ret
,
1047 TCGArg arg1
, int const_arg1
,
1048 TCGArg arg2
, int const_arg2
)
1050 tcg_out_bundle(s
, mII
,
1052 tcg_opc_a1 (TCG_REG_P0
, OPC_AND_A1
, ret
, arg1
, arg2
),
1053 tcg_opc_a3 (TCG_REG_P0
, OPC_ANDCM_A3
, ret
, -1, ret
));
1056 static inline void tcg_out_nor(TCGContext
*s
, TCGArg ret
,
1057 TCGArg arg1
, int const_arg1
,
1058 TCGArg arg2
, int const_arg2
)
1060 tcg_out_bundle(s
, mII
,
1062 tcg_opc_a1 (TCG_REG_P0
, OPC_OR_A1
, ret
, arg1
, arg2
),
1063 tcg_opc_a3 (TCG_REG_P0
, OPC_ANDCM_A3
, ret
, -1, ret
));
1066 static inline void tcg_out_orc(TCGContext
*s
, TCGArg ret
,
1067 TCGArg arg1
, int const_arg1
,
1068 TCGArg arg2
, int const_arg2
)
1070 tcg_out_bundle(s
, mII
,
1072 tcg_opc_a3 (TCG_REG_P0
, OPC_ANDCM_A3
, TCG_REG_R2
, -1, arg2
),
1073 tcg_opc_a1 (TCG_REG_P0
, OPC_OR_A1
, ret
, arg1
, TCG_REG_R2
));
1076 static inline void tcg_out_mul(TCGContext
*s
, TCGArg ret
,
1077 TCGArg arg1
, TCGArg arg2
)
1079 tcg_out_bundle(s
, mmI
,
1080 tcg_opc_m18(TCG_REG_P0
, OPC_SETF_SIG_M18
, TCG_REG_F6
, arg1
),
1081 tcg_opc_m18(TCG_REG_P0
, OPC_SETF_SIG_M18
, TCG_REG_F7
, arg2
),
1083 tcg_out_bundle(s
, mmF
,
1086 tcg_opc_f2 (TCG_REG_P0
, OPC_XMA_L_F2
, TCG_REG_F6
, TCG_REG_F6
,
1087 TCG_REG_F7
, TCG_REG_F0
));
1088 tcg_out_bundle(s
, miI
,
1089 tcg_opc_m19(TCG_REG_P0
, OPC_GETF_SIG_M19
, ret
, TCG_REG_F6
),
1094 static inline void tcg_out_sar_i32(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1095 TCGArg arg2
, int const_arg2
)
1098 tcg_out_bundle(s
, miI
,
1101 tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_I11
,
1102 ret
, arg1
, arg2
, 31 - arg2
));
1104 tcg_out_bundle(s
, mII
,
1105 tcg_opc_a3 (TCG_REG_P0
, OPC_AND_A3
,
1106 TCG_REG_R3
, 0x1f, arg2
),
1107 tcg_opc_i29(TCG_REG_P0
, OPC_SXT4_I29
, TCG_REG_R2
, arg1
),
1108 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_I5
, ret
,
1109 TCG_REG_R2
, TCG_REG_R3
));
1113 static inline void tcg_out_sar_i64(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1114 TCGArg arg2
, int const_arg2
)
1117 tcg_out_bundle(s
, miI
,
1120 tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_I11
,
1121 ret
, arg1
, arg2
, 63 - arg2
));
1123 tcg_out_bundle(s
, miI
,
1126 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_I5
, ret
, arg1
, arg2
));
1130 static inline void tcg_out_shl_i32(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1131 TCGArg arg2
, int const_arg2
)
1134 tcg_out_bundle(s
, miI
,
1137 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
, ret
,
1138 arg1
, 63 - arg2
, 31 - arg2
));
1140 tcg_out_bundle(s
, mII
,
1142 tcg_opc_a3 (TCG_REG_P0
, OPC_AND_A3
, TCG_REG_R2
,
1144 tcg_opc_i7 (TCG_REG_P0
, OPC_SHL_I7
, ret
,
1149 static inline void tcg_out_shl_i64(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1150 TCGArg arg2
, int const_arg2
)
1153 tcg_out_bundle(s
, miI
,
1156 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
, ret
,
1157 arg1
, 63 - arg2
, 63 - arg2
));
1159 tcg_out_bundle(s
, miI
,
1162 tcg_opc_i7 (TCG_REG_P0
, OPC_SHL_I7
, ret
,
1167 static inline void tcg_out_shr_i32(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1168 TCGArg arg2
, int const_arg2
)
1171 tcg_out_bundle(s
, miI
,
1174 tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_U_I11
, ret
,
1175 arg1
, arg2
, 31 - arg2
));
1177 tcg_out_bundle(s
, mII
,
1178 tcg_opc_a3 (TCG_REG_P0
, OPC_AND_A3
, TCG_REG_R3
,
1180 tcg_opc_i29(TCG_REG_P0
, OPC_ZXT4_I29
, TCG_REG_R2
, arg1
),
1181 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_U_I5
, ret
,
1182 TCG_REG_R2
, TCG_REG_R3
));
1186 static inline void tcg_out_shr_i64(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1187 TCGArg arg2
, int const_arg2
)
1190 tcg_out_bundle(s
, miI
,
1193 tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_U_I11
, ret
,
1194 arg1
, arg2
, 63 - arg2
));
1196 tcg_out_bundle(s
, miI
,
1199 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_U_I5
, ret
,
1204 static inline void tcg_out_rotl_i32(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1205 TCGArg arg2
, int const_arg2
)
1208 tcg_out_bundle(s
, mII
,
1210 tcg_opc_i2 (TCG_REG_P0
, OPC_UNPACK4_L_I2
,
1211 TCG_REG_R2
, arg1
, arg1
),
1212 tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_U_I11
, ret
,
1213 TCG_REG_R2
, 32 - arg2
, 31));
1215 tcg_out_bundle(s
, miI
,
1217 tcg_opc_i2 (TCG_REG_P0
, OPC_UNPACK4_L_I2
,
1218 TCG_REG_R2
, arg1
, arg1
),
1219 tcg_opc_a3 (TCG_REG_P0
, OPC_AND_A3
, TCG_REG_R3
,
1221 tcg_out_bundle(s
, mII
,
1223 tcg_opc_a3 (TCG_REG_P0
, OPC_SUB_A3
, TCG_REG_R3
,
1225 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_U_I5
, ret
,
1226 TCG_REG_R2
, TCG_REG_R3
));
1230 static inline void tcg_out_rotl_i64(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1231 TCGArg arg2
, int const_arg2
)
1234 tcg_out_bundle(s
, miI
,
1237 tcg_opc_i10(TCG_REG_P0
, OPC_SHRP_I10
, ret
, arg1
,
1238 arg1
, 0x40 - arg2
));
1240 tcg_out_bundle(s
, mII
,
1241 tcg_opc_a3 (TCG_REG_P0
, OPC_SUB_A3
, TCG_REG_R2
,
1243 tcg_opc_i7 (TCG_REG_P0
, OPC_SHL_I7
, TCG_REG_R3
,
1245 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_U_I5
, TCG_REG_R2
,
1247 tcg_out_bundle(s
, miI
,
1250 tcg_opc_a1 (TCG_REG_P0
, OPC_OR_A1
, ret
,
1251 TCG_REG_R2
, TCG_REG_R3
));
1255 static inline void tcg_out_rotr_i32(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1256 TCGArg arg2
, int const_arg2
)
1259 tcg_out_bundle(s
, mII
,
1261 tcg_opc_i2 (TCG_REG_P0
, OPC_UNPACK4_L_I2
,
1262 TCG_REG_R2
, arg1
, arg1
),
1263 tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_U_I11
, ret
,
1264 TCG_REG_R2
, arg2
, 31));
1266 tcg_out_bundle(s
, mII
,
1267 tcg_opc_a3 (TCG_REG_P0
, OPC_AND_A3
, TCG_REG_R3
,
1269 tcg_opc_i2 (TCG_REG_P0
, OPC_UNPACK4_L_I2
,
1270 TCG_REG_R2
, arg1
, arg1
),
1271 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_U_I5
, ret
,
1272 TCG_REG_R2
, TCG_REG_R3
));
1276 static inline void tcg_out_rotr_i64(TCGContext
*s
, TCGArg ret
, TCGArg arg1
,
1277 TCGArg arg2
, int const_arg2
)
1280 tcg_out_bundle(s
, miI
,
1283 tcg_opc_i10(TCG_REG_P0
, OPC_SHRP_I10
, ret
, arg1
,
1286 tcg_out_bundle(s
, mII
,
1287 tcg_opc_a3 (TCG_REG_P0
, OPC_SUB_A3
, TCG_REG_R2
,
1289 tcg_opc_i5 (TCG_REG_P0
, OPC_SHR_U_I5
, TCG_REG_R3
,
1291 tcg_opc_i7 (TCG_REG_P0
, OPC_SHL_I7
, TCG_REG_R2
,
1293 tcg_out_bundle(s
, miI
,
1296 tcg_opc_a1 (TCG_REG_P0
, OPC_OR_A1
, ret
,
1297 TCG_REG_R2
, TCG_REG_R3
));
1301 static const uint64_t opc_ext_i29
[8] = {
1302 OPC_ZXT1_I29
, OPC_ZXT2_I29
, OPC_ZXT4_I29
, 0,
1303 OPC_SXT1_I29
, OPC_SXT2_I29
, OPC_SXT4_I29
, 0
1306 static inline uint64_t tcg_opc_ext_i(int qp
, TCGMemOp opc
, TCGReg d
, TCGReg s
)
1308 if ((opc
& MO_SIZE
) == MO_64
) {
1309 return tcg_opc_mov_a(qp
, d
, s
);
1311 return tcg_opc_i29(qp
, opc_ext_i29
[opc
& MO_SSIZE
], d
, s
);
1315 static inline void tcg_out_ext(TCGContext
*s
, uint64_t opc_i29
,
1316 TCGArg ret
, TCGArg arg
)
1318 tcg_out_bundle(s
, miI
,
1321 tcg_opc_i29(TCG_REG_P0
, opc_i29
, ret
, arg
));
1324 static inline uint64_t tcg_opc_bswap64_i(int qp
, TCGReg d
, TCGReg s
)
1326 return tcg_opc_i3(qp
, OPC_MUX1_I3
, d
, s
, 0xb);
1329 static inline void tcg_out_bswap16(TCGContext
*s
, TCGArg ret
, TCGArg arg
)
1331 tcg_out_bundle(s
, mII
,
1333 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
, ret
, arg
, 15, 15),
1334 tcg_opc_bswap64_i(TCG_REG_P0
, ret
, ret
));
1337 static inline void tcg_out_bswap32(TCGContext
*s
, TCGArg ret
, TCGArg arg
)
1339 tcg_out_bundle(s
, mII
,
1341 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
, ret
, arg
, 31, 31),
1342 tcg_opc_bswap64_i(TCG_REG_P0
, ret
, ret
));
1345 static inline void tcg_out_bswap64(TCGContext
*s
, TCGArg ret
, TCGArg arg
)
1347 tcg_out_bundle(s
, miI
,
1350 tcg_opc_bswap64_i(TCG_REG_P0
, ret
, arg
));
1353 static inline void tcg_out_deposit(TCGContext
*s
, TCGArg ret
, TCGArg a1
,
1354 TCGArg a2
, int const_a2
, int pos
, int len
)
1356 uint64_t i1
= 0, i2
= 0;
1357 int cpos
= 63 - pos
, lm1
= len
- 1;
1360 /* Truncate the value of a constant a2 to the width of the field. */
1361 int mask
= (1u << len
) - 1;
1364 if (a2
== 0 || a2
== mask
) {
1365 /* 1-bit signed constant inserted into register. */
1366 i2
= tcg_opc_i14(TCG_REG_P0
, OPC_DEP_I14
, ret
, a2
, a1
, cpos
, lm1
);
1368 /* Otherwise, load any constant into a temporary. Do this into
1369 the first I slot to help out with cross-unit delays. */
1370 i1
= tcg_opc_movi_a(TCG_REG_P0
, TCG_REG_R2
, a2
);
1375 i2
= tcg_opc_i15(TCG_REG_P0
, OPC_DEP_I15
, ret
, a2
, a1
, cpos
, lm1
);
1377 tcg_out_bundle(s
, (i1
? mII
: miI
),
1379 i1
? i1
: INSN_NOP_I
,
1383 static inline uint64_t tcg_opc_cmp_a(int qp
, TCGCond cond
, TCGArg arg1
,
1384 TCGArg arg2
, int cmp4
)
1386 uint64_t opc_eq_a6
, opc_lt_a6
, opc_ltu_a6
;
1389 opc_eq_a6
= OPC_CMP4_EQ_A6
;
1390 opc_lt_a6
= OPC_CMP4_LT_A6
;
1391 opc_ltu_a6
= OPC_CMP4_LTU_A6
;
1393 opc_eq_a6
= OPC_CMP_EQ_A6
;
1394 opc_lt_a6
= OPC_CMP_LT_A6
;
1395 opc_ltu_a6
= OPC_CMP_LTU_A6
;
1400 return tcg_opc_a6 (qp
, opc_eq_a6
, TCG_REG_P6
, TCG_REG_P7
, arg1
, arg2
);
1402 return tcg_opc_a6 (qp
, opc_eq_a6
, TCG_REG_P7
, TCG_REG_P6
, arg1
, arg2
);
1404 return tcg_opc_a6 (qp
, opc_lt_a6
, TCG_REG_P6
, TCG_REG_P7
, arg1
, arg2
);
1406 return tcg_opc_a6 (qp
, opc_ltu_a6
, TCG_REG_P6
, TCG_REG_P7
, arg1
, arg2
);
1408 return tcg_opc_a6 (qp
, opc_lt_a6
, TCG_REG_P7
, TCG_REG_P6
, arg1
, arg2
);
1410 return tcg_opc_a6 (qp
, opc_ltu_a6
, TCG_REG_P7
, TCG_REG_P6
, arg1
, arg2
);
1412 return tcg_opc_a6 (qp
, opc_lt_a6
, TCG_REG_P7
, TCG_REG_P6
, arg2
, arg1
);
1414 return tcg_opc_a6 (qp
, opc_ltu_a6
, TCG_REG_P7
, TCG_REG_P6
, arg2
, arg1
);
1416 return tcg_opc_a6 (qp
, opc_lt_a6
, TCG_REG_P6
, TCG_REG_P7
, arg2
, arg1
);
1418 return tcg_opc_a6 (qp
, opc_ltu_a6
, TCG_REG_P6
, TCG_REG_P7
, arg2
, arg1
);
1425 static inline void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, TCGReg arg1
,
1426 TCGReg arg2
, TCGLabel
*l
, int cmp4
)
1430 /* We pay attention here to not modify the branch target by reading
1431 the existing value and using it again. This ensure that caches and
1432 memory are kept coherent during retranslation. */
1434 imm
= l
->u
.value_ptr
- s
->code_ptr
;
1436 imm
= get_reloc_pcrel21b_slot2(s
->code_ptr
);
1437 tcg_out_reloc(s
, s
->code_ptr
, R_IA64_PCREL21B
, l
, 0);
1440 tcg_out_bundle(s
, miB
,
1442 tcg_opc_cmp_a(TCG_REG_P0
, cond
, arg1
, arg2
, cmp4
),
1443 tcg_opc_b1(TCG_REG_P6
, OPC_BR_DPTK_FEW_B1
, imm
));
1446 static inline void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGArg ret
,
1447 TCGArg arg1
, TCGArg arg2
, int cmp4
)
1449 tcg_out_bundle(s
, MmI
,
1450 tcg_opc_cmp_a(TCG_REG_P0
, cond
, arg1
, arg2
, cmp4
),
1451 tcg_opc_movi_a(TCG_REG_P6
, ret
, 1),
1452 tcg_opc_movi_a(TCG_REG_P7
, ret
, 0));
1455 static inline void tcg_out_movcond(TCGContext
*s
, TCGCond cond
, TCGArg ret
,
1456 TCGArg c1
, TCGArg c2
,
1457 TCGArg v1
, int const_v1
,
1458 TCGArg v2
, int const_v2
, int cmp4
)
1460 uint64_t opc1
, opc2
;
1463 opc1
= tcg_opc_movi_a(TCG_REG_P6
, ret
, v1
);
1464 } else if (ret
== v1
) {
1467 opc1
= tcg_opc_mov_a(TCG_REG_P6
, ret
, v1
);
1470 opc2
= tcg_opc_movi_a(TCG_REG_P7
, ret
, v2
);
1471 } else if (ret
== v2
) {
1474 opc2
= tcg_opc_mov_a(TCG_REG_P7
, ret
, v2
);
1477 tcg_out_bundle(s
, MmI
,
1478 tcg_opc_cmp_a(TCG_REG_P0
, cond
, c1
, c2
, cmp4
),
1483 #if defined(CONFIG_SOFTMMU)
1484 /* We're expecting to use an signed 22-bit immediate add. */
1485 QEMU_BUILD_BUG_ON(offsetof(CPUArchState
, tlb_table
[NB_MMU_MODES
- 1][1])
1488 /* Load and compare a TLB entry, and return the result in (p6, p7).
1489 R2 is loaded with the addend TLB entry.
1490 R57 is loaded with the address, zero extented on 32-bit targets.
1491 R1, R3 are clobbered, leaving R56 free for...
1492 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1493 static inline void tcg_out_qemu_tlb(TCGContext
*s
, TCGReg addr_reg
,
1494 TCGMemOp s_bits
, int off_rw
, int off_add
,
1495 uint64_t bswap1
, uint64_t bswap2
)
1500 extr.u r3 = addr_reg, ... # extract tlb page
1501 zxt4 r57 = addr_reg # or mov for 64-bit guest
1505 shl r3 = r3, cteb # via dep.z
1506 dep r1 = 0, r57, ... # zero page ofs, keep align
1511 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1516 cmp.eq p6, p7 = r3, r58
1520 tcg_out_bundle(s
, miI
,
1521 tcg_opc_movi_a(TCG_REG_P0
, TCG_REG_R2
, off_rw
),
1522 tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_U_I11
, TCG_REG_R3
,
1523 addr_reg
, TARGET_PAGE_BITS
, CPU_TLB_BITS
- 1),
1524 tcg_opc_ext_i(TCG_REG_P0
,
1525 TARGET_LONG_BITS
== 32 ? MO_UL
: MO_Q
,
1526 TCG_REG_R57
, addr_reg
));
1527 tcg_out_bundle(s
, miI
,
1528 tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
, TCG_REG_R2
,
1529 TCG_REG_R2
, TCG_AREG0
),
1530 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
, TCG_REG_R3
,
1531 TCG_REG_R3
, 63 - CPU_TLB_ENTRY_BITS
,
1532 63 - CPU_TLB_ENTRY_BITS
),
1533 tcg_opc_i14(TCG_REG_P0
, OPC_DEP_I14
, TCG_REG_R1
, 0,
1534 TCG_REG_R57
, 63 - s_bits
,
1535 TARGET_PAGE_BITS
- s_bits
- 1));
1536 tcg_out_bundle(s
, MmI
,
1537 tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
,
1538 TCG_REG_R2
, TCG_REG_R2
, TCG_REG_R3
),
1539 tcg_opc_m3 (TCG_REG_P0
,
1540 (TARGET_LONG_BITS
== 32
1541 ? OPC_LD4_M3
: OPC_LD8_M3
), TCG_REG_R3
,
1542 TCG_REG_R2
, off_add
- off_rw
),
1544 tcg_out_bundle(s
, mmI
,
1545 tcg_opc_m1 (TCG_REG_P0
, OPC_LD8_M1
, TCG_REG_R2
, TCG_REG_R2
),
1546 tcg_opc_a6 (TCG_REG_P0
, OPC_CMP_EQ_A6
, TCG_REG_P6
,
1547 TCG_REG_P7
, TCG_REG_R1
, TCG_REG_R3
),
1551 typedef struct TCGLabelQemuLdst
{
1554 tcg_insn_unit
*label_ptr
; /* label pointers to be updated */
1555 struct TCGLabelQemuLdst
*next
;
1558 typedef struct TCGBackendData
{
1559 TCGLabelQemuLdst
*labels
;
1562 static inline void tcg_out_tb_init(TCGContext
*s
)
1564 s
->be
->labels
= NULL
;
1567 static void add_qemu_ldst_label(TCGContext
*s
, bool is_ld
, TCGMemOp opc
,
1568 tcg_insn_unit
*label_ptr
)
1570 TCGBackendData
*be
= s
->be
;
1571 TCGLabelQemuLdst
*l
= tcg_malloc(sizeof(*l
));
1574 l
->size
= opc
& MO_SIZE
;
1575 l
->label_ptr
= label_ptr
;
1576 l
->next
= be
->labels
;
1580 static void tcg_out_tb_finalize(TCGContext
*s
)
1582 static const void * const helpers
[8] = {
1587 helper_ret_ldub_mmu
,
1592 tcg_insn_unit
*thunks
[8] = { };
1593 TCGLabelQemuLdst
*l
;
1595 for (l
= s
->be
->labels
; l
!= NULL
; l
= l
->next
) {
1596 long x
= l
->is_ld
* 4 + l
->size
;
1597 tcg_insn_unit
*dest
= thunks
[x
];
1599 /* The out-of-line thunks are all the same; load the return address
1600 from B0, load the GP, and branch to the code. Note that we are
1601 always post-call, so the register window has rolled, so we're
1602 using incomming parameter register numbers, not outgoing. */
1604 uintptr_t *desc
= (uintptr_t *)helpers
[x
];
1605 uintptr_t func
= desc
[0], gp
= desc
[1], disp
;
1607 thunks
[x
] = dest
= s
->code_ptr
;
1609 tcg_out_bundle(s
, mlx
,
1612 tcg_opc_x2 (TCG_REG_P0
, OPC_MOVL_X2
,
1614 tcg_out_bundle(s
, mii
,
1617 tcg_opc_i22(TCG_REG_P0
, OPC_MOV_I22
,
1618 l
->is_ld
? TCG_REG_R35
: TCG_REG_R36
,
1620 disp
= (tcg_insn_unit
*)func
- s
->code_ptr
;
1621 tcg_out_bundle(s
, mLX
,
1624 tcg_opc_x3 (TCG_REG_P0
, OPC_BRL_SPTK_MANY_X3
, disp
));
1627 reloc_pcrel21b_slot2(l
->label_ptr
, dest
);
1631 static inline void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
)
1633 static const uint64_t opc_ld_m1
[4] = {
1634 OPC_LD1_M1
, OPC_LD2_M1
, OPC_LD4_M1
, OPC_LD8_M1
1636 int addr_reg
, data_reg
, mem_index
;
1637 TCGMemOp opc
, s_bits
;
1638 uint64_t fin1
, fin2
;
1639 tcg_insn_unit
*label_ptr
;
1644 mem_index
= args
[3];
1645 s_bits
= opc
& MO_SIZE
;
1647 /* Read the TLB entry */
1648 tcg_out_qemu_tlb(s
, addr_reg
, s_bits
,
1649 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
),
1650 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
),
1651 INSN_NOP_I
, INSN_NOP_I
);
1653 /* P6 is the fast path, and P7 the slow path */
1656 if (opc
& MO_BSWAP
) {
1657 fin1
= tcg_opc_bswap64_i(TCG_REG_P0
, data_reg
, TCG_REG_R8
);
1658 if (s_bits
< MO_64
) {
1659 int shift
= 64 - (8 << s_bits
);
1660 fin2
= (opc
& MO_SIGN
? OPC_EXTR_I11
: OPC_EXTR_U_I11
);
1661 fin2
= tcg_opc_i11(TCG_REG_P0
, fin2
,
1662 data_reg
, data_reg
, shift
, 63 - shift
);
1665 fin1
= tcg_opc_ext_i(TCG_REG_P0
, opc
, data_reg
, TCG_REG_R8
);
1668 tcg_out_bundle(s
, mmI
,
1669 tcg_opc_mov_a(TCG_REG_P7
, TCG_REG_R56
, TCG_AREG0
),
1670 tcg_opc_a1 (TCG_REG_P6
, OPC_ADD_A1
, TCG_REG_R2
,
1671 TCG_REG_R2
, TCG_REG_R57
),
1672 tcg_opc_movi_a(TCG_REG_P7
, TCG_REG_R58
, mem_index
));
1673 label_ptr
= s
->code_ptr
;
1674 tcg_out_bundle(s
, miB
,
1675 tcg_opc_m1 (TCG_REG_P6
, opc_ld_m1
[s_bits
],
1676 TCG_REG_R8
, TCG_REG_R2
),
1678 tcg_opc_b3 (TCG_REG_P7
, OPC_BR_CALL_SPNT_FEW_B3
, TCG_REG_B0
,
1679 get_reloc_pcrel21b_slot2(label_ptr
)));
1681 add_qemu_ldst_label(s
, 1, opc
, label_ptr
);
1683 /* Note that we always use LE helper functions, so the bswap insns
1684 here for the fast path also apply to the slow path. */
1685 tcg_out_bundle(s
, (fin2
? mII
: miI
),
1688 fin2
? fin2
: INSN_NOP_I
);
1691 static inline void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
)
1693 static const uint64_t opc_st_m4
[4] = {
1694 OPC_ST1_M4
, OPC_ST2_M4
, OPC_ST4_M4
, OPC_ST8_M4
1696 TCGReg addr_reg
, data_reg
;
1698 uint64_t pre1
, pre2
;
1699 TCGMemOp opc
, s_bits
;
1700 tcg_insn_unit
*label_ptr
;
1705 mem_index
= args
[3];
1706 s_bits
= opc
& MO_SIZE
;
1708 /* Note that we always use LE helper functions, so the bswap insns
1709 that are here for the fast path also apply to the slow path,
1710 and move the data into the argument register. */
1712 if (opc
& MO_BSWAP
) {
1713 pre1
= tcg_opc_bswap64_i(TCG_REG_P0
, TCG_REG_R58
, data_reg
);
1714 if (s_bits
< MO_64
) {
1715 int shift
= 64 - (8 << s_bits
);
1716 pre2
= tcg_opc_i11(TCG_REG_P0
, OPC_EXTR_U_I11
,
1717 TCG_REG_R58
, TCG_REG_R58
, shift
, 63 - shift
);
1720 /* Just move the data into place for the slow path. */
1721 pre1
= tcg_opc_ext_i(TCG_REG_P0
, opc
, TCG_REG_R58
, data_reg
);
1724 tcg_out_qemu_tlb(s
, addr_reg
, s_bits
,
1725 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
),
1726 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
),
1729 /* P6 is the fast path, and P7 the slow path */
1730 tcg_out_bundle(s
, mmI
,
1731 tcg_opc_mov_a(TCG_REG_P7
, TCG_REG_R56
, TCG_AREG0
),
1732 tcg_opc_a1 (TCG_REG_P6
, OPC_ADD_A1
, TCG_REG_R2
,
1733 TCG_REG_R2
, TCG_REG_R57
),
1734 tcg_opc_movi_a(TCG_REG_P7
, TCG_REG_R59
, mem_index
));
1735 label_ptr
= s
->code_ptr
;
1736 tcg_out_bundle(s
, miB
,
1737 tcg_opc_m4 (TCG_REG_P6
, opc_st_m4
[s_bits
],
1738 TCG_REG_R58
, TCG_REG_R2
),
1740 tcg_opc_b3 (TCG_REG_P7
, OPC_BR_CALL_SPNT_FEW_B3
, TCG_REG_B0
,
1741 get_reloc_pcrel21b_slot2(label_ptr
)));
1743 add_qemu_ldst_label(s
, 0, opc
, label_ptr
);
1746 #else /* !CONFIG_SOFTMMU */
1747 # include "tcg-be-null.h"
1749 static inline void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
)
1751 static uint64_t const opc_ld_m1
[4] = {
1752 OPC_LD1_M1
, OPC_LD2_M1
, OPC_LD4_M1
, OPC_LD8_M1
1754 int addr_reg
, data_reg
;
1755 TCGMemOp opc
, s_bits
, bswap
;
1760 s_bits
= opc
& MO_SIZE
;
1761 bswap
= opc
& MO_BSWAP
;
1763 #if TARGET_LONG_BITS == 32
1764 if (GUEST_BASE
!= 0) {
1765 tcg_out_bundle(s
, mII
,
1767 tcg_opc_i29(TCG_REG_P0
, OPC_ZXT4_I29
,
1768 TCG_REG_R3
, addr_reg
),
1769 tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
, TCG_REG_R2
,
1770 TCG_GUEST_BASE_REG
, TCG_REG_R3
));
1772 tcg_out_bundle(s
, miI
,
1774 tcg_opc_i29(TCG_REG_P0
, OPC_ZXT4_I29
,
1775 TCG_REG_R2
, addr_reg
),
1780 if (!(opc
& MO_SIGN
)) {
1781 tcg_out_bundle(s
, miI
,
1782 tcg_opc_m1 (TCG_REG_P0
, opc_ld_m1
[s_bits
],
1783 data_reg
, TCG_REG_R2
),
1787 tcg_out_bundle(s
, mII
,
1788 tcg_opc_m1 (TCG_REG_P0
, opc_ld_m1
[s_bits
],
1789 data_reg
, TCG_REG_R2
),
1791 tcg_opc_ext_i(TCG_REG_P0
, opc
, data_reg
, data_reg
));
1793 } else if (s_bits
== MO_64
) {
1794 tcg_out_bundle(s
, mII
,
1795 tcg_opc_m1 (TCG_REG_P0
, opc_ld_m1
[s_bits
],
1796 data_reg
, TCG_REG_R2
),
1798 tcg_opc_bswap64_i(TCG_REG_P0
, data_reg
, data_reg
));
1800 if (s_bits
== MO_16
) {
1801 tcg_out_bundle(s
, mII
,
1802 tcg_opc_m1 (TCG_REG_P0
, opc_ld_m1
[s_bits
],
1803 data_reg
, TCG_REG_R2
),
1805 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1806 data_reg
, data_reg
, 15, 15));
1808 tcg_out_bundle(s
, mII
,
1809 tcg_opc_m1 (TCG_REG_P0
, opc_ld_m1
[s_bits
],
1810 data_reg
, TCG_REG_R2
),
1812 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1813 data_reg
, data_reg
, 31, 31));
1815 if (!(opc
& MO_SIGN
)) {
1816 tcg_out_bundle(s
, miI
,
1819 tcg_opc_bswap64_i(TCG_REG_P0
, data_reg
, data_reg
));
1821 tcg_out_bundle(s
, mII
,
1823 tcg_opc_bswap64_i(TCG_REG_P0
, data_reg
, data_reg
),
1824 tcg_opc_ext_i(TCG_REG_P0
, opc
, data_reg
, data_reg
));
1828 if (GUEST_BASE
!= 0) {
1829 tcg_out_bundle(s
, MmI
,
1830 tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
, TCG_REG_R2
,
1831 TCG_GUEST_BASE_REG
, addr_reg
),
1832 tcg_opc_m1 (TCG_REG_P0
, opc_ld_m1
[s_bits
],
1833 data_reg
, TCG_REG_R2
),
1836 tcg_out_bundle(s
, mmI
,
1838 tcg_opc_m1 (TCG_REG_P0
, opc_ld_m1
[s_bits
],
1839 data_reg
, addr_reg
),
1843 if (bswap
&& s_bits
== MO_16
) {
1844 tcg_out_bundle(s
, mII
,
1846 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1847 data_reg
, data_reg
, 15, 15),
1848 tcg_opc_bswap64_i(TCG_REG_P0
, data_reg
, data_reg
));
1849 } else if (bswap
&& s_bits
== MO_32
) {
1850 tcg_out_bundle(s
, mII
,
1852 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1853 data_reg
, data_reg
, 31, 31),
1854 tcg_opc_bswap64_i(TCG_REG_P0
, data_reg
, data_reg
));
1855 } else if (bswap
&& s_bits
== MO_64
) {
1856 tcg_out_bundle(s
, miI
,
1859 tcg_opc_bswap64_i(TCG_REG_P0
, data_reg
, data_reg
));
1861 if (opc
& MO_SIGN
) {
1862 tcg_out_bundle(s
, miI
,
1865 tcg_opc_ext_i(TCG_REG_P0
, opc
, data_reg
, data_reg
));
1870 static inline void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
)
1872 static uint64_t const opc_st_m4
[4] = {
1873 OPC_ST1_M4
, OPC_ST2_M4
, OPC_ST4_M4
, OPC_ST8_M4
1875 int addr_reg
, data_reg
;
1876 #if TARGET_LONG_BITS == 64
1877 uint64_t add_guest_base
;
1879 TCGMemOp opc
, s_bits
, bswap
;
1884 s_bits
= opc
& MO_SIZE
;
1885 bswap
= opc
& MO_BSWAP
;
1887 #if TARGET_LONG_BITS == 32
1888 if (GUEST_BASE
!= 0) {
1889 tcg_out_bundle(s
, mII
,
1891 tcg_opc_i29(TCG_REG_P0
, OPC_ZXT4_I29
,
1892 TCG_REG_R3
, addr_reg
),
1893 tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
, TCG_REG_R2
,
1894 TCG_GUEST_BASE_REG
, TCG_REG_R3
));
1896 tcg_out_bundle(s
, miI
,
1898 tcg_opc_i29(TCG_REG_P0
, OPC_ZXT4_I29
,
1899 TCG_REG_R2
, addr_reg
),
1904 if (s_bits
== MO_16
) {
1905 tcg_out_bundle(s
, mII
,
1907 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1908 TCG_REG_R3
, data_reg
, 15, 15),
1909 tcg_opc_bswap64_i(TCG_REG_P0
,
1910 TCG_REG_R3
, TCG_REG_R3
));
1911 data_reg
= TCG_REG_R3
;
1912 } else if (s_bits
== MO_32
) {
1913 tcg_out_bundle(s
, mII
,
1915 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1916 TCG_REG_R3
, data_reg
, 31, 31),
1917 tcg_opc_bswap64_i(TCG_REG_P0
,
1918 TCG_REG_R3
, TCG_REG_R3
));
1919 data_reg
= TCG_REG_R3
;
1920 } else if (s_bits
== MO_64
) {
1921 tcg_out_bundle(s
, miI
,
1924 tcg_opc_bswap64_i(TCG_REG_P0
, TCG_REG_R3
, data_reg
));
1925 data_reg
= TCG_REG_R3
;
1928 tcg_out_bundle(s
, mmI
,
1929 tcg_opc_m4 (TCG_REG_P0
, opc_st_m4
[s_bits
],
1930 data_reg
, TCG_REG_R2
),
1934 if (GUEST_BASE
!= 0) {
1935 add_guest_base
= tcg_opc_a1 (TCG_REG_P0
, OPC_ADD_A1
, TCG_REG_R2
,
1936 TCG_GUEST_BASE_REG
, addr_reg
);
1937 addr_reg
= TCG_REG_R2
;
1939 add_guest_base
= INSN_NOP_M
;
1943 tcg_out_bundle(s
, (GUEST_BASE
? MmI
: mmI
),
1945 tcg_opc_m4 (TCG_REG_P0
, opc_st_m4
[s_bits
],
1946 data_reg
, addr_reg
),
1949 if (s_bits
== MO_16
) {
1950 tcg_out_bundle(s
, mII
,
1952 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1953 TCG_REG_R3
, data_reg
, 15, 15),
1954 tcg_opc_bswap64_i(TCG_REG_P0
,
1955 TCG_REG_R3
, TCG_REG_R3
));
1956 data_reg
= TCG_REG_R3
;
1957 } else if (s_bits
== MO_32
) {
1958 tcg_out_bundle(s
, mII
,
1960 tcg_opc_i12(TCG_REG_P0
, OPC_DEP_Z_I12
,
1961 TCG_REG_R3
, data_reg
, 31, 31),
1962 tcg_opc_bswap64_i(TCG_REG_P0
,
1963 TCG_REG_R3
, TCG_REG_R3
));
1964 data_reg
= TCG_REG_R3
;
1965 } else if (s_bits
== MO_64
) {
1966 tcg_out_bundle(s
, miI
,
1969 tcg_opc_bswap64_i(TCG_REG_P0
, TCG_REG_R3
, data_reg
));
1970 data_reg
= TCG_REG_R3
;
1972 tcg_out_bundle(s
, miI
,
1973 tcg_opc_m4 (TCG_REG_P0
, opc_st_m4
[s_bits
],
1974 data_reg
, addr_reg
),
1983 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1984 const TCGArg
*args
, const int *const_args
)
1987 case INDEX_op_exit_tb
:
1988 tcg_out_exit_tb(s
, args
[0]);
1991 tcg_out_br(s
, arg_label(args
[0]));
1993 case INDEX_op_goto_tb
:
1994 tcg_out_goto_tb(s
, args
[0]);
1997 case INDEX_op_ld8u_i32
:
1998 case INDEX_op_ld8u_i64
:
1999 tcg_out_ld_rel(s
, OPC_LD1_M1
, args
[0], args
[1], args
[2]);
2001 case INDEX_op_ld8s_i32
:
2002 case INDEX_op_ld8s_i64
:
2003 tcg_out_ld_rel(s
, OPC_LD1_M1
, args
[0], args
[1], args
[2]);
2004 tcg_out_ext(s
, OPC_SXT1_I29
, args
[0], args
[0]);
2006 case INDEX_op_ld16u_i32
:
2007 case INDEX_op_ld16u_i64
:
2008 tcg_out_ld_rel(s
, OPC_LD2_M1
, args
[0], args
[1], args
[2]);
2010 case INDEX_op_ld16s_i32
:
2011 case INDEX_op_ld16s_i64
:
2012 tcg_out_ld_rel(s
, OPC_LD2_M1
, args
[0], args
[1], args
[2]);
2013 tcg_out_ext(s
, OPC_SXT2_I29
, args
[0], args
[0]);
2015 case INDEX_op_ld_i32
:
2016 case INDEX_op_ld32u_i64
:
2017 tcg_out_ld_rel(s
, OPC_LD4_M1
, args
[0], args
[1], args
[2]);
2019 case INDEX_op_ld32s_i64
:
2020 tcg_out_ld_rel(s
, OPC_LD4_M1
, args
[0], args
[1], args
[2]);
2021 tcg_out_ext(s
, OPC_SXT4_I29
, args
[0], args
[0]);
2023 case INDEX_op_ld_i64
:
2024 tcg_out_ld_rel(s
, OPC_LD8_M1
, args
[0], args
[1], args
[2]);
2026 case INDEX_op_st8_i32
:
2027 case INDEX_op_st8_i64
:
2028 tcg_out_st_rel(s
, OPC_ST1_M4
, args
[0], args
[1], args
[2]);
2030 case INDEX_op_st16_i32
:
2031 case INDEX_op_st16_i64
:
2032 tcg_out_st_rel(s
, OPC_ST2_M4
, args
[0], args
[1], args
[2]);
2034 case INDEX_op_st_i32
:
2035 case INDEX_op_st32_i64
:
2036 tcg_out_st_rel(s
, OPC_ST4_M4
, args
[0], args
[1], args
[2]);
2038 case INDEX_op_st_i64
:
2039 tcg_out_st_rel(s
, OPC_ST8_M4
, args
[0], args
[1], args
[2]);
2042 case INDEX_op_add_i32
:
2043 case INDEX_op_add_i64
:
2044 tcg_out_add(s
, args
[0], args
[1], args
[2], const_args
[2]);
2046 case INDEX_op_sub_i32
:
2047 case INDEX_op_sub_i64
:
2048 tcg_out_sub(s
, args
[0], args
[1], const_args
[1], args
[2], const_args
[2]);
2051 case INDEX_op_and_i32
:
2052 case INDEX_op_and_i64
:
2053 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2054 tcg_out_alu(s
, OPC_AND_A1
, OPC_AND_A3
, args
[0],
2055 args
[2], const_args
[2], args
[1], const_args
[1]);
2057 case INDEX_op_andc_i32
:
2058 case INDEX_op_andc_i64
:
2059 tcg_out_alu(s
, OPC_ANDCM_A1
, OPC_ANDCM_A3
, args
[0],
2060 args
[1], const_args
[1], args
[2], const_args
[2]);
2062 case INDEX_op_eqv_i32
:
2063 case INDEX_op_eqv_i64
:
2064 tcg_out_eqv(s
, args
[0], args
[1], const_args
[1],
2065 args
[2], const_args
[2]);
2067 case INDEX_op_nand_i32
:
2068 case INDEX_op_nand_i64
:
2069 tcg_out_nand(s
, args
[0], args
[1], const_args
[1],
2070 args
[2], const_args
[2]);
2072 case INDEX_op_nor_i32
:
2073 case INDEX_op_nor_i64
:
2074 tcg_out_nor(s
, args
[0], args
[1], const_args
[1],
2075 args
[2], const_args
[2]);
2077 case INDEX_op_or_i32
:
2078 case INDEX_op_or_i64
:
2079 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2080 tcg_out_alu(s
, OPC_OR_A1
, OPC_OR_A3
, args
[0],
2081 args
[2], const_args
[2], args
[1], const_args
[1]);
2083 case INDEX_op_orc_i32
:
2084 case INDEX_op_orc_i64
:
2085 tcg_out_orc(s
, args
[0], args
[1], const_args
[1],
2086 args
[2], const_args
[2]);
2088 case INDEX_op_xor_i32
:
2089 case INDEX_op_xor_i64
:
2090 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2091 tcg_out_alu(s
, OPC_XOR_A1
, OPC_XOR_A3
, args
[0],
2092 args
[2], const_args
[2], args
[1], const_args
[1]);
2095 case INDEX_op_mul_i32
:
2096 case INDEX_op_mul_i64
:
2097 tcg_out_mul(s
, args
[0], args
[1], args
[2]);
2100 case INDEX_op_sar_i32
:
2101 tcg_out_sar_i32(s
, args
[0], args
[1], args
[2], const_args
[2]);
2103 case INDEX_op_sar_i64
:
2104 tcg_out_sar_i64(s
, args
[0], args
[1], args
[2], const_args
[2]);
2106 case INDEX_op_shl_i32
:
2107 tcg_out_shl_i32(s
, args
[0], args
[1], args
[2], const_args
[2]);
2109 case INDEX_op_shl_i64
:
2110 tcg_out_shl_i64(s
, args
[0], args
[1], args
[2], const_args
[2]);
2112 case INDEX_op_shr_i32
:
2113 tcg_out_shr_i32(s
, args
[0], args
[1], args
[2], const_args
[2]);
2115 case INDEX_op_shr_i64
:
2116 tcg_out_shr_i64(s
, args
[0], args
[1], args
[2], const_args
[2]);
2118 case INDEX_op_rotl_i32
:
2119 tcg_out_rotl_i32(s
, args
[0], args
[1], args
[2], const_args
[2]);
2121 case INDEX_op_rotl_i64
:
2122 tcg_out_rotl_i64(s
, args
[0], args
[1], args
[2], const_args
[2]);
2124 case INDEX_op_rotr_i32
:
2125 tcg_out_rotr_i32(s
, args
[0], args
[1], args
[2], const_args
[2]);
2127 case INDEX_op_rotr_i64
:
2128 tcg_out_rotr_i64(s
, args
[0], args
[1], args
[2], const_args
[2]);
2131 case INDEX_op_ext8s_i32
:
2132 case INDEX_op_ext8s_i64
:
2133 tcg_out_ext(s
, OPC_SXT1_I29
, args
[0], args
[1]);
2135 case INDEX_op_ext8u_i32
:
2136 case INDEX_op_ext8u_i64
:
2137 tcg_out_ext(s
, OPC_ZXT1_I29
, args
[0], args
[1]);
2139 case INDEX_op_ext16s_i32
:
2140 case INDEX_op_ext16s_i64
:
2141 tcg_out_ext(s
, OPC_SXT2_I29
, args
[0], args
[1]);
2143 case INDEX_op_ext16u_i32
:
2144 case INDEX_op_ext16u_i64
:
2145 tcg_out_ext(s
, OPC_ZXT2_I29
, args
[0], args
[1]);
2147 case INDEX_op_ext32s_i64
:
2148 tcg_out_ext(s
, OPC_SXT4_I29
, args
[0], args
[1]);
2150 case INDEX_op_ext32u_i64
:
2151 tcg_out_ext(s
, OPC_ZXT4_I29
, args
[0], args
[1]);
2154 case INDEX_op_bswap16_i32
:
2155 case INDEX_op_bswap16_i64
:
2156 tcg_out_bswap16(s
, args
[0], args
[1]);
2158 case INDEX_op_bswap32_i32
:
2159 case INDEX_op_bswap32_i64
:
2160 tcg_out_bswap32(s
, args
[0], args
[1]);
2162 case INDEX_op_bswap64_i64
:
2163 tcg_out_bswap64(s
, args
[0], args
[1]);
2166 case INDEX_op_deposit_i32
:
2167 case INDEX_op_deposit_i64
:
2168 tcg_out_deposit(s
, args
[0], args
[1], args
[2], const_args
[2],
2172 case INDEX_op_brcond_i32
:
2173 tcg_out_brcond(s
, args
[2], args
[0], args
[1], arg_label(args
[3]), 1);
2175 case INDEX_op_brcond_i64
:
2176 tcg_out_brcond(s
, args
[2], args
[0], args
[1], arg_label(args
[3]), 0);
2178 case INDEX_op_setcond_i32
:
2179 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2], 1);
2181 case INDEX_op_setcond_i64
:
2182 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2], 0);
2184 case INDEX_op_movcond_i32
:
2185 tcg_out_movcond(s
, args
[5], args
[0], args
[1], args
[2],
2186 args
[3], const_args
[3], args
[4], const_args
[4], 1);
2188 case INDEX_op_movcond_i64
:
2189 tcg_out_movcond(s
, args
[5], args
[0], args
[1], args
[2],
2190 args
[3], const_args
[3], args
[4], const_args
[4], 0);
2193 case INDEX_op_qemu_ld_i32
:
2194 tcg_out_qemu_ld(s
, args
);
2196 case INDEX_op_qemu_ld_i64
:
2197 tcg_out_qemu_ld(s
, args
);
2199 case INDEX_op_qemu_st_i32
:
2200 tcg_out_qemu_st(s
, args
);
2202 case INDEX_op_qemu_st_i64
:
2203 tcg_out_qemu_st(s
, args
);
2206 case INDEX_op_mov_i32
: /* Always emitted via tcg_out_mov. */
2207 case INDEX_op_mov_i64
:
2208 case INDEX_op_movi_i32
: /* Always emitted via tcg_out_movi. */
2209 case INDEX_op_movi_i64
:
2210 case INDEX_op_call
: /* Always emitted via tcg_out_call. */
2216 static const TCGTargetOpDef ia64_op_defs
[] = {
2217 { INDEX_op_br
, { } },
2218 { INDEX_op_exit_tb
, { } },
2219 { INDEX_op_goto_tb
, { } },
2221 { INDEX_op_ld8u_i32
, { "r", "r" } },
2222 { INDEX_op_ld8s_i32
, { "r", "r" } },
2223 { INDEX_op_ld16u_i32
, { "r", "r" } },
2224 { INDEX_op_ld16s_i32
, { "r", "r" } },
2225 { INDEX_op_ld_i32
, { "r", "r" } },
2226 { INDEX_op_st8_i32
, { "rZ", "r" } },
2227 { INDEX_op_st16_i32
, { "rZ", "r" } },
2228 { INDEX_op_st_i32
, { "rZ", "r" } },
2230 { INDEX_op_add_i32
, { "r", "rZ", "rI" } },
2231 { INDEX_op_sub_i32
, { "r", "rI", "rI" } },
2233 { INDEX_op_and_i32
, { "r", "rI", "rI" } },
2234 { INDEX_op_andc_i32
, { "r", "rI", "rI" } },
2235 { INDEX_op_eqv_i32
, { "r", "rZ", "rZ" } },
2236 { INDEX_op_nand_i32
, { "r", "rZ", "rZ" } },
2237 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
2238 { INDEX_op_or_i32
, { "r", "rI", "rI" } },
2239 { INDEX_op_orc_i32
, { "r", "rZ", "rZ" } },
2240 { INDEX_op_xor_i32
, { "r", "rI", "rI" } },
2242 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
2244 { INDEX_op_sar_i32
, { "r", "rZ", "ri" } },
2245 { INDEX_op_shl_i32
, { "r", "rZ", "ri" } },
2246 { INDEX_op_shr_i32
, { "r", "rZ", "ri" } },
2247 { INDEX_op_rotl_i32
, { "r", "rZ", "ri" } },
2248 { INDEX_op_rotr_i32
, { "r", "rZ", "ri" } },
2250 { INDEX_op_ext8s_i32
, { "r", "rZ"} },
2251 { INDEX_op_ext8u_i32
, { "r", "rZ"} },
2252 { INDEX_op_ext16s_i32
, { "r", "rZ"} },
2253 { INDEX_op_ext16u_i32
, { "r", "rZ"} },
2255 { INDEX_op_bswap16_i32
, { "r", "rZ" } },
2256 { INDEX_op_bswap32_i32
, { "r", "rZ" } },
2258 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
2259 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
2260 { INDEX_op_movcond_i32
, { "r", "rZ", "rZ", "rI", "rI" } },
2262 { INDEX_op_ld8u_i64
, { "r", "r" } },
2263 { INDEX_op_ld8s_i64
, { "r", "r" } },
2264 { INDEX_op_ld16u_i64
, { "r", "r" } },
2265 { INDEX_op_ld16s_i64
, { "r", "r" } },
2266 { INDEX_op_ld32u_i64
, { "r", "r" } },
2267 { INDEX_op_ld32s_i64
, { "r", "r" } },
2268 { INDEX_op_ld_i64
, { "r", "r" } },
2269 { INDEX_op_st8_i64
, { "rZ", "r" } },
2270 { INDEX_op_st16_i64
, { "rZ", "r" } },
2271 { INDEX_op_st32_i64
, { "rZ", "r" } },
2272 { INDEX_op_st_i64
, { "rZ", "r" } },
2274 { INDEX_op_add_i64
, { "r", "rZ", "rI" } },
2275 { INDEX_op_sub_i64
, { "r", "rI", "rI" } },
2277 { INDEX_op_and_i64
, { "r", "rI", "rI" } },
2278 { INDEX_op_andc_i64
, { "r", "rI", "rI" } },
2279 { INDEX_op_eqv_i64
, { "r", "rZ", "rZ" } },
2280 { INDEX_op_nand_i64
, { "r", "rZ", "rZ" } },
2281 { INDEX_op_nor_i64
, { "r", "rZ", "rZ" } },
2282 { INDEX_op_or_i64
, { "r", "rI", "rI" } },
2283 { INDEX_op_orc_i64
, { "r", "rZ", "rZ" } },
2284 { INDEX_op_xor_i64
, { "r", "rI", "rI" } },
2286 { INDEX_op_mul_i64
, { "r", "rZ", "rZ" } },
2288 { INDEX_op_sar_i64
, { "r", "rZ", "ri" } },
2289 { INDEX_op_shl_i64
, { "r", "rZ", "ri" } },
2290 { INDEX_op_shr_i64
, { "r", "rZ", "ri" } },
2291 { INDEX_op_rotl_i64
, { "r", "rZ", "ri" } },
2292 { INDEX_op_rotr_i64
, { "r", "rZ", "ri" } },
2294 { INDEX_op_ext8s_i64
, { "r", "rZ"} },
2295 { INDEX_op_ext8u_i64
, { "r", "rZ"} },
2296 { INDEX_op_ext16s_i64
, { "r", "rZ"} },
2297 { INDEX_op_ext16u_i64
, { "r", "rZ"} },
2298 { INDEX_op_ext32s_i64
, { "r", "rZ"} },
2299 { INDEX_op_ext32u_i64
, { "r", "rZ"} },
2301 { INDEX_op_bswap16_i64
, { "r", "rZ" } },
2302 { INDEX_op_bswap32_i64
, { "r", "rZ" } },
2303 { INDEX_op_bswap64_i64
, { "r", "rZ" } },
2305 { INDEX_op_brcond_i64
, { "rZ", "rZ" } },
2306 { INDEX_op_setcond_i64
, { "r", "rZ", "rZ" } },
2307 { INDEX_op_movcond_i64
, { "r", "rZ", "rZ", "rI", "rI" } },
2309 { INDEX_op_deposit_i32
, { "r", "rZ", "ri" } },
2310 { INDEX_op_deposit_i64
, { "r", "rZ", "ri" } },
2312 { INDEX_op_qemu_ld_i32
, { "r", "r" } },
2313 { INDEX_op_qemu_ld_i64
, { "r", "r" } },
2314 { INDEX_op_qemu_st_i32
, { "SZ", "r" } },
2315 { INDEX_op_qemu_st_i64
, { "SZ", "r" } },
2320 /* Generate global QEMU prologue and epilogue code */
2321 static void tcg_target_qemu_prologue(TCGContext
*s
)
2325 /* reserve some stack space */
2326 frame_size
= TCG_STATIC_CALL_ARGS_SIZE
+
2327 CPU_TEMP_BUF_NLONGS
* sizeof(long);
2328 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
2329 ~(TCG_TARGET_STACK_ALIGN
- 1);
2330 tcg_set_frame(s
, TCG_REG_CALL_STACK
, TCG_STATIC_CALL_ARGS_SIZE
,
2331 CPU_TEMP_BUF_NLONGS
* sizeof(long));
2333 /* First emit adhoc function descriptor */
2334 *s
->code_ptr
= (tcg_insn_unit
){
2335 (uint64_t)(s
->code_ptr
+ 1), /* entry point */
2341 tcg_out_bundle(s
, miI
,
2342 tcg_opc_m34(TCG_REG_P0
, OPC_ALLOC_M34
,
2343 TCG_REG_R34
, 32, 24, 0),
2345 tcg_opc_i21(TCG_REG_P0
, OPC_MOV_I21
,
2346 TCG_REG_B6
, TCG_REG_R33
, 0));
2348 /* ??? If GUEST_BASE < 0x200000, we could load the register via
2349 an ADDL in the M slot of the next bundle. */
2350 if (GUEST_BASE
!= 0) {
2351 tcg_out_bundle(s
, mlx
,
2353 tcg_opc_l2 (GUEST_BASE
),
2354 tcg_opc_x2 (TCG_REG_P0
, OPC_MOVL_X2
,
2355 TCG_GUEST_BASE_REG
, GUEST_BASE
));
2356 tcg_regset_set_reg(s
->reserved_regs
, TCG_GUEST_BASE_REG
);
2359 tcg_out_bundle(s
, miB
,
2360 tcg_opc_a4 (TCG_REG_P0
, OPC_ADDS_A4
,
2361 TCG_REG_R12
, -frame_size
, TCG_REG_R12
),
2362 tcg_opc_i22(TCG_REG_P0
, OPC_MOV_I22
,
2363 TCG_REG_R33
, TCG_REG_B0
),
2364 tcg_opc_b4 (TCG_REG_P0
, OPC_BR_SPTK_MANY_B4
, TCG_REG_B6
));
2367 tb_ret_addr
= s
->code_ptr
;
2368 tcg_out_bundle(s
, miI
,
2370 tcg_opc_i21(TCG_REG_P0
, OPC_MOV_I21
,
2371 TCG_REG_B0
, TCG_REG_R33
, 0),
2372 tcg_opc_a4 (TCG_REG_P0
, OPC_ADDS_A4
,
2373 TCG_REG_R12
, frame_size
, TCG_REG_R12
));
2374 tcg_out_bundle(s
, miB
,
2376 tcg_opc_i26(TCG_REG_P0
, OPC_MOV_I_I26
,
2377 TCG_REG_PFS
, TCG_REG_R34
),
2378 tcg_opc_b4 (TCG_REG_P0
, OPC_BR_RET_SPTK_MANY_B4
,
2382 static void tcg_target_init(TCGContext
*s
)
2384 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
],
2385 0xffffffffffffffffull
);
2386 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I64
],
2387 0xffffffffffffffffull
);
2389 tcg_regset_clear(tcg_target_call_clobber_regs
);
2390 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R8
);
2391 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R9
);
2392 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R10
);
2393 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R11
);
2394 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R14
);
2395 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R15
);
2396 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R16
);
2397 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R17
);
2398 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R18
);
2399 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R19
);
2400 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R20
);
2401 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R21
);
2402 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R22
);
2403 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R23
);
2404 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R24
);
2405 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R25
);
2406 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R26
);
2407 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R27
);
2408 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R28
);
2409 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R29
);
2410 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R30
);
2411 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R31
);
2412 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R56
);
2413 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R57
);
2414 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R58
);
2415 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R59
);
2416 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R60
);
2417 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R61
);
2418 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R62
);
2419 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_R63
);
2421 tcg_regset_clear(s
->reserved_regs
);
2422 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R0
); /* zero register */
2423 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R1
); /* global pointer */
2424 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R2
); /* internal use */
2425 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R3
); /* internal use */
2426 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R12
); /* stack pointer */
2427 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R13
); /* thread pointer */
2428 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R33
); /* return address */
2429 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R34
); /* PFS */
2431 /* The following 4 are not in use, are call-saved, but *not* saved
2432 by the prologue. Therefore we cannot use them without modifying
2433 the prologue. There doesn't seem to be any good reason to use
2434 these as opposed to the windowed registers. */
2435 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R4
);
2436 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R5
);
2437 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R6
);
2438 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R7
);
2440 tcg_add_target_add_op_defs(ia64_op_defs
);