4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr
[TARGET_DPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
111 static int sign_extend(int x
, int len
)
114 return (x
<< len
) >> len
;
117 #define IS_IMM (insn & (1<<13))
119 static inline void gen_update_fprs_dirty(int rd
)
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs
, cpu_fprs
, (rd
< 32) ? 1 : 2);
126 /* floating point registers moves */
127 static TCGv_i32
gen_load_fpr_F(DisasContext
*dc
, unsigned int src
)
129 #if TCG_TARGET_REG_BITS == 32
131 return TCGV_LOW(cpu_fpr
[src
/ 2]);
133 return TCGV_HIGH(cpu_fpr
[src
/ 2]);
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr
[src
/ 2]));
139 TCGv_i32 ret
= tcg_temp_local_new_i32();
140 TCGv_i64 t
= tcg_temp_new_i64();
142 tcg_gen_shri_i64(t
, cpu_fpr
[src
/ 2], 32);
143 tcg_gen_trunc_i64_i32(ret
, t
);
144 tcg_temp_free_i64(t
);
146 dc
->t32
[dc
->n_t32
++] = ret
;
147 assert(dc
->n_t32
<= ARRAY_SIZE(dc
->t32
));
154 static void gen_store_fpr_F(DisasContext
*dc
, unsigned int dst
, TCGv_i32 v
)
156 #if TCG_TARGET_REG_BITS == 32
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr
[dst
/ 2]), v
);
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr
[dst
/ 2]), v
);
163 TCGv_i64 t
= MAKE_TCGV_I64(GET_TCGV_I32(v
));
164 tcg_gen_deposit_i64(cpu_fpr
[dst
/ 2], cpu_fpr
[dst
/ 2], t
,
165 (dst
& 1 ? 0 : 32), 32);
167 gen_update_fprs_dirty(dst
);
170 static TCGv_i32
gen_dest_fpr_F(void)
175 static TCGv_i64
gen_load_fpr_D(DisasContext
*dc
, unsigned int src
)
178 return cpu_fpr
[src
/ 2];
181 static void gen_store_fpr_D(DisasContext
*dc
, unsigned int dst
, TCGv_i64 v
)
184 tcg_gen_mov_i64(cpu_fpr
[dst
/ 2], v
);
185 gen_update_fprs_dirty(dst
);
188 static TCGv_i64
gen_dest_fpr_D(void)
193 static void gen_op_load_fpr_QT0(unsigned int src
)
195 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
196 offsetof(CPU_QuadU
, ll
.upper
));
197 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
198 offsetof(CPU_QuadU
, ll
.lower
));
201 static void gen_op_load_fpr_QT1(unsigned int src
)
203 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
204 offsetof(CPU_QuadU
, ll
.upper
));
205 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
206 offsetof(CPU_QuadU
, ll
.lower
));
209 static void gen_op_store_QT0_fpr(unsigned int dst
)
211 tcg_gen_ld_i64(cpu_fpr
[dst
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
212 offsetof(CPU_QuadU
, ll
.upper
));
213 tcg_gen_ld_i64(cpu_fpr
[dst
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
214 offsetof(CPU_QuadU
, ll
.lower
));
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd
, unsigned int rs
)
223 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2], cpu_fpr
[rs
/ 2]);
224 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2 + 1], cpu_fpr
[rs
/ 2 + 1]);
225 gen_update_fprs_dirty(rd
);
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
243 #ifdef TARGET_SPARC64
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
247 #define AM_CHECK(dc) (1)
251 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
253 #ifdef TARGET_SPARC64
255 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
259 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
262 tcg_gen_movi_tl(tn
, 0);
264 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
266 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
270 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
275 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
277 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
281 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
282 target_ulong pc
, target_ulong npc
)
284 TranslationBlock
*tb
;
287 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
288 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num
);
292 tcg_gen_movi_tl(cpu_pc
, pc
);
293 tcg_gen_movi_tl(cpu_npc
, npc
);
294 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc
, pc
);
298 tcg_gen_movi_tl(cpu_npc
, npc
);
304 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
306 tcg_gen_extu_i32_tl(reg
, src
);
307 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
308 tcg_gen_andi_tl(reg
, reg
, 0x1);
311 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
313 tcg_gen_extu_i32_tl(reg
, src
);
314 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
315 tcg_gen_andi_tl(reg
, reg
, 0x1);
318 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
320 tcg_gen_extu_i32_tl(reg
, src
);
321 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
322 tcg_gen_andi_tl(reg
, reg
, 0x1);
325 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
327 tcg_gen_extu_i32_tl(reg
, src
);
328 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
329 tcg_gen_andi_tl(reg
, reg
, 0x1);
332 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
338 l1
= gen_new_label();
340 r_temp
= tcg_temp_new();
341 tcg_gen_xor_tl(r_temp
, src1
, src2
);
342 tcg_gen_not_tl(r_temp
, r_temp
);
343 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
344 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
345 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
347 r_const
= tcg_const_i32(TT_TOVF
);
348 gen_helper_raise_exception(cpu_env
, r_const
);
349 tcg_temp_free_i32(r_const
);
351 tcg_temp_free(r_temp
);
354 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
359 l1
= gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
361 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
363 r_const
= tcg_const_i32(TT_TOVF
);
364 gen_helper_raise_exception(cpu_env
, r_const
);
365 tcg_temp_free_i32(r_const
);
369 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
371 tcg_gen_mov_tl(cpu_cc_src
, src1
);
372 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
373 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
374 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
377 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
379 tcg_gen_mov_tl(cpu_cc_src
, src1
);
380 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
381 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
382 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
385 static TCGv_i32
gen_add32_carry32(void)
387 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32
= tcg_temp_new_i32();
392 cc_src2_32
= tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
394 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
396 cc_src1_32
= cpu_cc_dst
;
397 cc_src2_32
= cpu_cc_src
;
400 carry_32
= tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32
);
405 tcg_temp_free_i32(cc_src2_32
);
411 static TCGv_i32
gen_sub32_carry32(void)
413 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32
= tcg_temp_new_i32();
418 cc_src2_32
= tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
420 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
422 cc_src1_32
= cpu_cc_src
;
423 cc_src2_32
= cpu_cc_src2
;
426 carry_32
= tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32
);
431 tcg_temp_free_i32(cc_src2_32
);
437 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
438 TCGv src2
, int update_cc
)
446 /* Carry is known to be zero. Fall back to plain ADD. */
448 gen_op_add_cc(dst
, src1
, src2
);
450 tcg_gen_add_tl(dst
, src1
, src2
);
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low
= tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
465 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
466 tcg_temp_free(dst_low
);
470 carry_32
= gen_add32_carry32();
476 carry_32
= gen_sub32_carry32();
480 /* We need external help to produce the carry. */
481 carry_32
= tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32
, cpu_env
);
486 #if TARGET_LONG_BITS == 64
487 carry
= tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry
, carry_32
);
493 tcg_gen_add_tl(dst
, src1
, src2
);
494 tcg_gen_add_tl(dst
, dst
, carry
);
496 tcg_temp_free_i32(carry_32
);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry
);
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
505 tcg_gen_mov_tl(cpu_cc_src
, src1
);
506 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
509 dc
->cc_op
= CC_OP_ADDX
;
513 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
515 tcg_gen_mov_tl(cpu_cc_src
, src1
);
516 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
517 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
518 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
521 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
523 tcg_gen_mov_tl(cpu_cc_src
, src1
);
524 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
525 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
526 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
527 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
528 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
531 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
537 l1
= gen_new_label();
539 r_temp
= tcg_temp_new();
540 tcg_gen_xor_tl(r_temp
, src1
, src2
);
541 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
542 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
543 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
545 r_const
= tcg_const_i32(TT_TOVF
);
546 gen_helper_raise_exception(cpu_env
, r_const
);
547 tcg_temp_free_i32(r_const
);
549 tcg_temp_free(r_temp
);
552 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
554 tcg_gen_mov_tl(cpu_cc_src
, src1
);
555 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
557 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
558 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
559 dc
->cc_op
= CC_OP_LOGIC
;
561 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
562 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
563 dc
->cc_op
= CC_OP_SUB
;
565 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
568 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
570 tcg_gen_mov_tl(cpu_cc_src
, src1
);
571 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
572 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
573 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
576 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
577 TCGv src2
, int update_cc
)
585 /* Carry is known to be zero. Fall back to plain SUB. */
587 gen_op_sub_cc(dst
, src1
, src2
);
589 tcg_gen_sub_tl(dst
, src1
, src2
);
596 carry_32
= gen_add32_carry32();
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low
= tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
610 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
611 tcg_temp_free(dst_low
);
615 carry_32
= gen_sub32_carry32();
619 /* We need external help to produce the carry. */
620 carry_32
= tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32
, cpu_env
);
625 #if TARGET_LONG_BITS == 64
626 carry
= tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry
, carry_32
);
632 tcg_gen_sub_tl(dst
, src1
, src2
);
633 tcg_gen_sub_tl(dst
, dst
, carry
);
635 tcg_temp_free_i32(carry_32
);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry
);
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
644 tcg_gen_mov_tl(cpu_cc_src
, src1
);
645 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
646 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
647 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
648 dc
->cc_op
= CC_OP_SUBX
;
652 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
654 tcg_gen_mov_tl(cpu_cc_src
, src1
);
655 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
656 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
657 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
660 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
662 tcg_gen_mov_tl(cpu_cc_src
, src1
);
663 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
664 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
665 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
666 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
667 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
670 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
675 l1
= gen_new_label();
676 r_temp
= tcg_temp_new();
682 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
683 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
686 tcg_gen_movi_tl(cpu_cc_src2
, 0);
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
692 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
693 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
694 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
696 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
699 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
700 gen_mov_reg_V(r_temp
, cpu_psr
);
701 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
702 tcg_temp_free(r_temp
);
704 // T0 = (b1 << 31) | (T0 >> 1);
706 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
707 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
708 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
710 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
712 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
715 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
717 TCGv_i32 r_src1
, r_src2
;
718 TCGv_i64 r_temp
, r_temp2
;
720 r_src1
= tcg_temp_new_i32();
721 r_src2
= tcg_temp_new_i32();
723 tcg_gen_trunc_tl_i32(r_src1
, src1
);
724 tcg_gen_trunc_tl_i32(r_src2
, src2
);
726 r_temp
= tcg_temp_new_i64();
727 r_temp2
= tcg_temp_new_i64();
730 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
731 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
733 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
734 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
737 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
739 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
741 tcg_temp_free_i64(r_temp
);
742 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
744 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
746 tcg_temp_free_i64(r_temp2
);
748 tcg_temp_free_i32(r_src1
);
749 tcg_temp_free_i32(r_src2
);
752 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst
, src1
, src2
, 0);
758 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst
, src1
, src2
, 1);
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
770 l1
= gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
772 r_const
= tcg_const_i32(TT_DIV_ZERO
);
773 gen_helper_raise_exception(cpu_env
, r_const
);
774 tcg_temp_free_i32(r_const
);
778 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
781 TCGv r_temp1
, r_temp2
;
783 l1
= gen_new_label();
784 l2
= gen_new_label();
785 r_temp1
= tcg_temp_local_new();
786 r_temp2
= tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1
, src1
);
788 tcg_gen_mov_tl(r_temp2
, src2
);
789 gen_trap_ifdivzero_tl(r_temp2
);
790 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp1
, INT64_MIN
, l1
);
791 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp2
, -1, l1
);
792 tcg_gen_movi_i64(dst
, INT64_MIN
);
795 tcg_gen_div_i64(dst
, r_temp1
, r_temp2
);
797 tcg_temp_free(r_temp1
);
798 tcg_temp_free(r_temp2
);
803 static inline void gen_op_eval_ba(TCGv dst
)
805 tcg_gen_movi_tl(dst
, 1);
809 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
811 gen_mov_reg_Z(dst
, src
);
815 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
817 gen_mov_reg_N(cpu_tmp0
, src
);
818 gen_mov_reg_V(dst
, src
);
819 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
820 gen_mov_reg_Z(cpu_tmp0
, src
);
821 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
825 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
827 gen_mov_reg_V(cpu_tmp0
, src
);
828 gen_mov_reg_N(dst
, src
);
829 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
833 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
835 gen_mov_reg_Z(cpu_tmp0
, src
);
836 gen_mov_reg_C(dst
, src
);
837 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
841 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
843 gen_mov_reg_C(dst
, src
);
847 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
849 gen_mov_reg_V(dst
, src
);
853 static inline void gen_op_eval_bn(TCGv dst
)
855 tcg_gen_movi_tl(dst
, 0);
859 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
861 gen_mov_reg_N(dst
, src
);
865 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
867 gen_mov_reg_Z(dst
, src
);
868 tcg_gen_xori_tl(dst
, dst
, 0x1);
872 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
874 gen_mov_reg_N(cpu_tmp0
, src
);
875 gen_mov_reg_V(dst
, src
);
876 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
877 gen_mov_reg_Z(cpu_tmp0
, src
);
878 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
879 tcg_gen_xori_tl(dst
, dst
, 0x1);
883 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
885 gen_mov_reg_V(cpu_tmp0
, src
);
886 gen_mov_reg_N(dst
, src
);
887 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
888 tcg_gen_xori_tl(dst
, dst
, 0x1);
892 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
894 gen_mov_reg_Z(cpu_tmp0
, src
);
895 gen_mov_reg_C(dst
, src
);
896 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
897 tcg_gen_xori_tl(dst
, dst
, 0x1);
901 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
903 gen_mov_reg_C(dst
, src
);
904 tcg_gen_xori_tl(dst
, dst
, 0x1);
908 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
910 gen_mov_reg_N(dst
, src
);
911 tcg_gen_xori_tl(dst
, dst
, 0x1);
915 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
917 gen_mov_reg_V(dst
, src
);
918 tcg_gen_xori_tl(dst
, dst
, 0x1);
922 FPSR bit field FCC1 | FCC0:
928 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
929 unsigned int fcc_offset
)
931 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
932 tcg_gen_andi_tl(reg
, reg
, 0x1);
935 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
936 unsigned int fcc_offset
)
938 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
939 tcg_gen_andi_tl(reg
, reg
, 0x1);
943 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
944 unsigned int fcc_offset
)
946 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
947 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
948 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
953 unsigned int fcc_offset
)
955 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
956 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
957 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
961 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
962 unsigned int fcc_offset
)
964 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
968 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
969 unsigned int fcc_offset
)
971 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
972 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
973 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
974 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
978 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
979 unsigned int fcc_offset
)
981 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
985 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
986 unsigned int fcc_offset
)
988 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
989 tcg_gen_xori_tl(dst
, dst
, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
991 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
995 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
996 unsigned int fcc_offset
)
998 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
999 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1000 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1005 unsigned int fcc_offset
)
1007 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1008 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1009 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1010 tcg_gen_xori_tl(dst
, dst
, 0x1);
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1015 unsigned int fcc_offset
)
1017 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1018 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1019 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1020 tcg_gen_xori_tl(dst
, dst
, 0x1);
1024 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1025 unsigned int fcc_offset
)
1027 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1028 tcg_gen_xori_tl(dst
, dst
, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1033 unsigned int fcc_offset
)
1035 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1036 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1037 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1038 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1039 tcg_gen_xori_tl(dst
, dst
, 0x1);
1043 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1044 unsigned int fcc_offset
)
1046 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1047 tcg_gen_xori_tl(dst
, dst
, 0x1);
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1052 unsigned int fcc_offset
)
1054 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1055 tcg_gen_xori_tl(dst
, dst
, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1057 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1058 tcg_gen_xori_tl(dst
, dst
, 0x1);
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1063 unsigned int fcc_offset
)
1065 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1066 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1067 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1068 tcg_gen_xori_tl(dst
, dst
, 0x1);
1071 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1072 target_ulong pc2
, TCGv r_cond
)
1076 l1
= gen_new_label();
1078 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1080 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1083 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1086 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1087 target_ulong pc2
, TCGv r_cond
)
1091 l1
= gen_new_label();
1093 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1095 gen_goto_tb(dc
, 0, pc2
, pc1
);
1098 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1101 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1106 l1
= gen_new_label();
1107 l2
= gen_new_label();
1109 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1111 tcg_gen_movi_tl(cpu_npc
, npc1
);
1115 tcg_gen_movi_tl(cpu_npc
, npc2
);
1119 /* call this function before using the condition register as it may
1120 have been set for a jump */
1121 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1123 if (dc
->npc
== JUMP_PC
) {
1124 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1125 dc
->npc
= DYNAMIC_PC
;
1129 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1131 if (dc
->npc
== JUMP_PC
) {
1132 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1133 dc
->npc
= DYNAMIC_PC
;
1134 } else if (dc
->npc
!= DYNAMIC_PC
) {
1135 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1139 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1141 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1142 /* flush pending conditional evaluations before exposing cpu state */
1143 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1144 dc
->cc_op
= CC_OP_FLAGS
;
1145 gen_helper_compute_psr(cpu_env
);
1150 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1152 if (dc
->npc
== JUMP_PC
) {
1153 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1154 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1155 dc
->pc
= DYNAMIC_PC
;
1156 } else if (dc
->npc
== DYNAMIC_PC
) {
1157 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1158 dc
->pc
= DYNAMIC_PC
;
1164 static inline void gen_op_next_insn(void)
1166 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1167 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1170 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1175 #ifdef TARGET_SPARC64
1183 switch (dc
->cc_op
) {
1187 gen_helper_compute_psr(cpu_env
);
1188 dc
->cc_op
= CC_OP_FLAGS
;
1193 gen_op_eval_bn(r_dst
);
1196 gen_op_eval_be(r_dst
, r_src
);
1199 gen_op_eval_ble(r_dst
, r_src
);
1202 gen_op_eval_bl(r_dst
, r_src
);
1205 gen_op_eval_bleu(r_dst
, r_src
);
1208 gen_op_eval_bcs(r_dst
, r_src
);
1211 gen_op_eval_bneg(r_dst
, r_src
);
1214 gen_op_eval_bvs(r_dst
, r_src
);
1217 gen_op_eval_ba(r_dst
);
1220 gen_op_eval_bne(r_dst
, r_src
);
1223 gen_op_eval_bg(r_dst
, r_src
);
1226 gen_op_eval_bge(r_dst
, r_src
);
1229 gen_op_eval_bgu(r_dst
, r_src
);
1232 gen_op_eval_bcc(r_dst
, r_src
);
1235 gen_op_eval_bpos(r_dst
, r_src
);
1238 gen_op_eval_bvc(r_dst
, r_src
);
1243 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1245 unsigned int offset
;
1265 gen_op_eval_bn(r_dst
);
1268 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1271 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1274 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1277 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1280 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1283 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1286 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1289 gen_op_eval_ba(r_dst
);
1292 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1295 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1298 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1301 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1304 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1307 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1310 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1315 #ifdef TARGET_SPARC64
1317 static const int gen_tcg_cond_reg
[8] = {
1328 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1332 l1
= gen_new_label();
1333 tcg_gen_movi_tl(r_dst
, 0);
1334 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1335 tcg_gen_movi_tl(r_dst
, 1);
1340 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1343 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1344 target_ulong target
= dc
->pc
+ offset
;
1347 /* unconditional not taken */
1349 dc
->pc
= dc
->npc
+ 4;
1350 dc
->npc
= dc
->pc
+ 4;
1353 dc
->npc
= dc
->pc
+ 4;
1355 } else if (cond
== 0x8) {
1356 /* unconditional taken */
1359 dc
->npc
= dc
->pc
+ 4;
1363 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1366 flush_cond(dc
, r_cond
);
1367 gen_cond(r_cond
, cc
, cond
, dc
);
1369 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1373 dc
->jump_pc
[0] = target
;
1374 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1375 dc
->jump_pc
[1] = DYNAMIC_PC
;
1376 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1378 dc
->jump_pc
[1] = dc
->npc
+ 4;
1385 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1388 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1389 target_ulong target
= dc
->pc
+ offset
;
1392 /* unconditional not taken */
1394 dc
->pc
= dc
->npc
+ 4;
1395 dc
->npc
= dc
->pc
+ 4;
1398 dc
->npc
= dc
->pc
+ 4;
1400 } else if (cond
== 0x8) {
1401 /* unconditional taken */
1404 dc
->npc
= dc
->pc
+ 4;
1408 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1411 flush_cond(dc
, r_cond
);
1412 gen_fcond(r_cond
, cc
, cond
);
1414 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1418 dc
->jump_pc
[0] = target
;
1419 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1420 dc
->jump_pc
[1] = DYNAMIC_PC
;
1421 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1423 dc
->jump_pc
[1] = dc
->npc
+ 4;
1430 #ifdef TARGET_SPARC64
1431 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1432 TCGv r_cond
, TCGv r_reg
)
1434 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1435 target_ulong target
= dc
->pc
+ offset
;
1437 flush_cond(dc
, r_cond
);
1438 gen_cond_reg(r_cond
, cond
, r_reg
);
1440 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1444 dc
->jump_pc
[0] = target
;
1445 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1446 dc
->jump_pc
[1] = DYNAMIC_PC
;
1447 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1449 dc
->jump_pc
[1] = dc
->npc
+ 4;
1455 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1459 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1462 gen_helper_fcmps_fcc1(cpu_env
, r_rs1
, r_rs2
);
1465 gen_helper_fcmps_fcc2(cpu_env
, r_rs1
, r_rs2
);
1468 gen_helper_fcmps_fcc3(cpu_env
, r_rs1
, r_rs2
);
1473 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1477 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1480 gen_helper_fcmpd_fcc1(cpu_env
, r_rs1
, r_rs2
);
1483 gen_helper_fcmpd_fcc2(cpu_env
, r_rs1
, r_rs2
);
1486 gen_helper_fcmpd_fcc3(cpu_env
, r_rs1
, r_rs2
);
1491 static inline void gen_op_fcmpq(int fccno
)
1495 gen_helper_fcmpq(cpu_env
);
1498 gen_helper_fcmpq_fcc1(cpu_env
);
1501 gen_helper_fcmpq_fcc2(cpu_env
);
1504 gen_helper_fcmpq_fcc3(cpu_env
);
1509 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1513 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1516 gen_helper_fcmpes_fcc1(cpu_env
, r_rs1
, r_rs2
);
1519 gen_helper_fcmpes_fcc2(cpu_env
, r_rs1
, r_rs2
);
1522 gen_helper_fcmpes_fcc3(cpu_env
, r_rs1
, r_rs2
);
1527 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1531 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1534 gen_helper_fcmped_fcc1(cpu_env
, r_rs1
, r_rs2
);
1537 gen_helper_fcmped_fcc2(cpu_env
, r_rs1
, r_rs2
);
1540 gen_helper_fcmped_fcc3(cpu_env
, r_rs1
, r_rs2
);
1545 static inline void gen_op_fcmpeq(int fccno
)
1549 gen_helper_fcmpeq(cpu_env
);
1552 gen_helper_fcmpeq_fcc1(cpu_env
);
1555 gen_helper_fcmpeq_fcc2(cpu_env
);
1558 gen_helper_fcmpeq_fcc3(cpu_env
);
1565 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1567 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1570 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1572 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1575 static inline void gen_op_fcmpq(int fccno
)
1577 gen_helper_fcmpq(cpu_env
);
1580 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1582 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1585 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1587 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1590 static inline void gen_op_fcmpeq(int fccno
)
1592 gen_helper_fcmpeq(cpu_env
);
1596 static inline void gen_op_fpexception_im(int fsr_flags
)
1600 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1601 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1602 r_const
= tcg_const_i32(TT_FP_EXCP
);
1603 gen_helper_raise_exception(cpu_env
, r_const
);
1604 tcg_temp_free_i32(r_const
);
1607 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1609 #if !defined(CONFIG_USER_ONLY)
1610 if (!dc
->fpu_enabled
) {
1613 save_state(dc
, r_cond
);
1614 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1615 gen_helper_raise_exception(cpu_env
, r_const
);
1616 tcg_temp_free_i32(r_const
);
1624 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1626 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1629 static inline void gen_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1630 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
))
1634 src
= gen_load_fpr_F(dc
, rs
);
1635 dst
= gen_dest_fpr_F();
1637 gen(dst
, cpu_env
, src
);
1639 gen_store_fpr_F(dc
, rd
, dst
);
1642 static inline void gen_ne_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1643 void (*gen
)(TCGv_i32
, TCGv_i32
))
1647 src
= gen_load_fpr_F(dc
, rs
);
1648 dst
= gen_dest_fpr_F();
1652 gen_store_fpr_F(dc
, rd
, dst
);
1655 static inline void gen_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1656 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1658 TCGv_i32 dst
, src1
, src2
;
1660 src1
= gen_load_fpr_F(dc
, rs1
);
1661 src2
= gen_load_fpr_F(dc
, rs2
);
1662 dst
= gen_dest_fpr_F();
1664 gen(dst
, cpu_env
, src1
, src2
);
1666 gen_store_fpr_F(dc
, rd
, dst
);
1669 #ifdef TARGET_SPARC64
1670 static inline void gen_ne_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1671 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
1673 TCGv_i32 dst
, src1
, src2
;
1675 src1
= gen_load_fpr_F(dc
, rs1
);
1676 src2
= gen_load_fpr_F(dc
, rs2
);
1677 dst
= gen_dest_fpr_F();
1679 gen(dst
, src1
, src2
);
1681 gen_store_fpr_F(dc
, rd
, dst
);
1685 static inline void gen_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1686 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
))
1690 src
= gen_load_fpr_D(dc
, rs
);
1691 dst
= gen_dest_fpr_D();
1693 gen(dst
, cpu_env
, src
);
1695 gen_store_fpr_D(dc
, rd
, dst
);
1698 #ifdef TARGET_SPARC64
1699 static inline void gen_ne_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1700 void (*gen
)(TCGv_i64
, TCGv_i64
))
1704 src
= gen_load_fpr_D(dc
, rs
);
1705 dst
= gen_dest_fpr_D();
1709 gen_store_fpr_D(dc
, rd
, dst
);
1713 static inline void gen_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1714 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1716 TCGv_i64 dst
, src1
, src2
;
1718 src1
= gen_load_fpr_D(dc
, rs1
);
1719 src2
= gen_load_fpr_D(dc
, rs2
);
1720 dst
= gen_dest_fpr_D();
1722 gen(dst
, cpu_env
, src1
, src2
);
1724 gen_store_fpr_D(dc
, rd
, dst
);
1727 #ifdef TARGET_SPARC64
1728 static inline void gen_ne_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1729 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
1731 TCGv_i64 dst
, src1
, src2
;
1733 src1
= gen_load_fpr_D(dc
, rs1
);
1734 src2
= gen_load_fpr_D(dc
, rs2
);
1735 dst
= gen_dest_fpr_D();
1737 gen(dst
, src1
, src2
);
1739 gen_store_fpr_D(dc
, rd
, dst
);
1742 static inline void gen_gsr_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1743 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1745 TCGv_i64 dst
, src1
, src2
;
1747 src1
= gen_load_fpr_D(dc
, rs1
);
1748 src2
= gen_load_fpr_D(dc
, rs2
);
1749 dst
= gen_dest_fpr_D();
1751 gen(dst
, cpu_gsr
, src1
, src2
);
1753 gen_store_fpr_D(dc
, rd
, dst
);
1756 static inline void gen_ne_fop_DDDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1757 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1759 TCGv_i64 dst
, src0
, src1
, src2
;
1761 src1
= gen_load_fpr_D(dc
, rs1
);
1762 src2
= gen_load_fpr_D(dc
, rs2
);
1763 src0
= gen_load_fpr_D(dc
, rd
);
1764 dst
= gen_dest_fpr_D();
1766 gen(dst
, src0
, src1
, src2
);
1768 gen_store_fpr_D(dc
, rd
, dst
);
1772 static inline void gen_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1773 void (*gen
)(TCGv_ptr
))
1775 gen_op_load_fpr_QT1(QFPREG(rs
));
1779 gen_op_store_QT0_fpr(QFPREG(rd
));
1780 gen_update_fprs_dirty(QFPREG(rd
));
1783 #ifdef TARGET_SPARC64
1784 static inline void gen_ne_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1785 void (*gen
)(TCGv_ptr
))
1787 gen_op_load_fpr_QT1(QFPREG(rs
));
1791 gen_op_store_QT0_fpr(QFPREG(rd
));
1792 gen_update_fprs_dirty(QFPREG(rd
));
1796 static inline void gen_fop_QQQ(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1797 void (*gen
)(TCGv_ptr
))
1799 gen_op_load_fpr_QT0(QFPREG(rs1
));
1800 gen_op_load_fpr_QT1(QFPREG(rs2
));
1804 gen_op_store_QT0_fpr(QFPREG(rd
));
1805 gen_update_fprs_dirty(QFPREG(rd
));
1808 static inline void gen_fop_DFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1809 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1812 TCGv_i32 src1
, src2
;
1814 src1
= gen_load_fpr_F(dc
, rs1
);
1815 src2
= gen_load_fpr_F(dc
, rs2
);
1816 dst
= gen_dest_fpr_D();
1818 gen(dst
, cpu_env
, src1
, src2
);
1820 gen_store_fpr_D(dc
, rd
, dst
);
1823 static inline void gen_fop_QDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1824 void (*gen
)(TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1826 TCGv_i64 src1
, src2
;
1828 src1
= gen_load_fpr_D(dc
, rs1
);
1829 src2
= gen_load_fpr_D(dc
, rs2
);
1831 gen(cpu_env
, src1
, src2
);
1833 gen_op_store_QT0_fpr(QFPREG(rd
));
1834 gen_update_fprs_dirty(QFPREG(rd
));
1837 #ifdef TARGET_SPARC64
1838 static inline void gen_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1839 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1844 src
= gen_load_fpr_F(dc
, rs
);
1845 dst
= gen_dest_fpr_D();
1847 gen(dst
, cpu_env
, src
);
1849 gen_store_fpr_D(dc
, rd
, dst
);
1853 static inline void gen_ne_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1854 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1859 src
= gen_load_fpr_F(dc
, rs
);
1860 dst
= gen_dest_fpr_D();
1862 gen(dst
, cpu_env
, src
);
1864 gen_store_fpr_D(dc
, rd
, dst
);
1867 static inline void gen_fop_FD(DisasContext
*dc
, int rd
, int rs
,
1868 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i64
))
1873 src
= gen_load_fpr_D(dc
, rs
);
1874 dst
= gen_dest_fpr_F();
1876 gen(dst
, cpu_env
, src
);
1878 gen_store_fpr_F(dc
, rd
, dst
);
1881 static inline void gen_fop_FQ(DisasContext
*dc
, int rd
, int rs
,
1882 void (*gen
)(TCGv_i32
, TCGv_ptr
))
1886 gen_op_load_fpr_QT1(QFPREG(rs
));
1887 dst
= gen_dest_fpr_F();
1891 gen_store_fpr_F(dc
, rd
, dst
);
1894 static inline void gen_fop_DQ(DisasContext
*dc
, int rd
, int rs
,
1895 void (*gen
)(TCGv_i64
, TCGv_ptr
))
1899 gen_op_load_fpr_QT1(QFPREG(rs
));
1900 dst
= gen_dest_fpr_D();
1904 gen_store_fpr_D(dc
, rd
, dst
);
1907 static inline void gen_ne_fop_QF(DisasContext
*dc
, int rd
, int rs
,
1908 void (*gen
)(TCGv_ptr
, TCGv_i32
))
1912 src
= gen_load_fpr_F(dc
, rs
);
1916 gen_op_store_QT0_fpr(QFPREG(rd
));
1917 gen_update_fprs_dirty(QFPREG(rd
));
1920 static inline void gen_ne_fop_QD(DisasContext
*dc
, int rd
, int rs
,
1921 void (*gen
)(TCGv_ptr
, TCGv_i64
))
1925 src
= gen_load_fpr_D(dc
, rs
);
1929 gen_op_store_QT0_fpr(QFPREG(rd
));
1930 gen_update_fprs_dirty(QFPREG(rd
));
1934 #ifdef TARGET_SPARC64
1935 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1941 r_asi
= tcg_temp_new_i32();
1942 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1944 asi
= GET_FIELD(insn
, 19, 26);
1945 r_asi
= tcg_const_i32(asi
);
1950 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1953 TCGv_i32 r_asi
, r_size
, r_sign
;
1955 r_asi
= gen_get_asi(insn
, addr
);
1956 r_size
= tcg_const_i32(size
);
1957 r_sign
= tcg_const_i32(sign
);
1958 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1959 tcg_temp_free_i32(r_sign
);
1960 tcg_temp_free_i32(r_size
);
1961 tcg_temp_free_i32(r_asi
);
1964 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1966 TCGv_i32 r_asi
, r_size
;
1968 r_asi
= gen_get_asi(insn
, addr
);
1969 r_size
= tcg_const_i32(size
);
1970 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1971 tcg_temp_free_i32(r_size
);
1972 tcg_temp_free_i32(r_asi
);
1975 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1977 TCGv_i32 r_asi
, r_size
, r_rd
;
1979 r_asi
= gen_get_asi(insn
, addr
);
1980 r_size
= tcg_const_i32(size
);
1981 r_rd
= tcg_const_i32(rd
);
1982 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1983 tcg_temp_free_i32(r_rd
);
1984 tcg_temp_free_i32(r_size
);
1985 tcg_temp_free_i32(r_asi
);
1988 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1990 TCGv_i32 r_asi
, r_size
, r_rd
;
1992 r_asi
= gen_get_asi(insn
, addr
);
1993 r_size
= tcg_const_i32(size
);
1994 r_rd
= tcg_const_i32(rd
);
1995 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1996 tcg_temp_free_i32(r_rd
);
1997 tcg_temp_free_i32(r_size
);
1998 tcg_temp_free_i32(r_asi
);
2001 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
2003 TCGv_i32 r_asi
, r_size
, r_sign
;
2005 r_asi
= gen_get_asi(insn
, addr
);
2006 r_size
= tcg_const_i32(4);
2007 r_sign
= tcg_const_i32(0);
2008 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
2009 tcg_temp_free_i32(r_sign
);
2010 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
2011 tcg_temp_free_i32(r_size
);
2012 tcg_temp_free_i32(r_asi
);
2013 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2016 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2018 TCGv_i32 r_asi
, r_rd
;
2020 r_asi
= gen_get_asi(insn
, addr
);
2021 r_rd
= tcg_const_i32(rd
);
2022 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
2023 tcg_temp_free_i32(r_rd
);
2024 tcg_temp_free_i32(r_asi
);
2027 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2029 TCGv_i32 r_asi
, r_size
;
2031 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
2032 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
2033 r_asi
= gen_get_asi(insn
, addr
);
2034 r_size
= tcg_const_i32(8);
2035 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
2036 tcg_temp_free_i32(r_size
);
2037 tcg_temp_free_i32(r_asi
);
2040 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
2046 r_val1
= tcg_temp_new();
2047 gen_movl_reg_TN(rd
, r_val1
);
2048 r_asi
= gen_get_asi(insn
, addr
);
2049 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
2050 tcg_temp_free_i32(r_asi
);
2051 tcg_temp_free(r_val1
);
2054 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
2059 gen_movl_reg_TN(rd
, cpu_tmp64
);
2060 r_asi
= gen_get_asi(insn
, addr
);
2061 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
2062 tcg_temp_free_i32(r_asi
);
2065 #elif !defined(CONFIG_USER_ONLY)
2067 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
2070 TCGv_i32 r_asi
, r_size
, r_sign
;
2072 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2073 r_size
= tcg_const_i32(size
);
2074 r_sign
= tcg_const_i32(sign
);
2075 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
2076 tcg_temp_free(r_sign
);
2077 tcg_temp_free(r_size
);
2078 tcg_temp_free(r_asi
);
2079 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2082 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
2084 TCGv_i32 r_asi
, r_size
;
2086 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
2087 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2088 r_size
= tcg_const_i32(size
);
2089 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
2090 tcg_temp_free(r_size
);
2091 tcg_temp_free(r_asi
);
2094 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
2096 TCGv_i32 r_asi
, r_size
, r_sign
;
2099 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2100 r_size
= tcg_const_i32(4);
2101 r_sign
= tcg_const_i32(0);
2102 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
2103 tcg_temp_free(r_sign
);
2104 r_val
= tcg_temp_new_i64();
2105 tcg_gen_extu_tl_i64(r_val
, dst
);
2106 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
2107 tcg_temp_free_i64(r_val
);
2108 tcg_temp_free(r_size
);
2109 tcg_temp_free(r_asi
);
2110 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2113 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2115 TCGv_i32 r_asi
, r_size
, r_sign
;
2117 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2118 r_size
= tcg_const_i32(8);
2119 r_sign
= tcg_const_i32(0);
2120 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
2121 tcg_temp_free(r_sign
);
2122 tcg_temp_free(r_size
);
2123 tcg_temp_free(r_asi
);
2124 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
2125 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
2126 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
2127 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
2128 gen_movl_TN_reg(rd
, hi
);
2131 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2133 TCGv_i32 r_asi
, r_size
;
2135 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
2136 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
2137 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2138 r_size
= tcg_const_i32(8);
2139 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
2140 tcg_temp_free(r_size
);
2141 tcg_temp_free(r_asi
);
2145 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2146 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
2149 TCGv_i32 r_asi
, r_size
;
2151 gen_ld_asi(dst
, addr
, insn
, 1, 0);
2153 r_val
= tcg_const_i64(0xffULL
);
2154 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2155 r_size
= tcg_const_i32(1);
2156 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
2157 tcg_temp_free_i32(r_size
);
2158 tcg_temp_free_i32(r_asi
);
2159 tcg_temp_free_i64(r_val
);
2163 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
2168 rs1
= GET_FIELD(insn
, 13, 17);
2170 tcg_gen_movi_tl(def
, 0);
2171 } else if (rs1
< 8) {
2172 r_rs1
= cpu_gregs
[rs1
];
2174 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
2179 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
2183 if (IS_IMM
) { /* immediate */
2184 target_long simm
= GET_FIELDs(insn
, 19, 31);
2185 tcg_gen_movi_tl(def
, simm
);
2186 } else { /* register */
2187 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
2189 tcg_gen_movi_tl(def
, 0);
2190 } else if (rs2
< 8) {
2191 r_rs2
= cpu_gregs
[rs2
];
2193 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
2199 #ifdef TARGET_SPARC64
2200 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
2202 TCGv_i32 r_tl
= tcg_temp_new_i32();
2204 /* load env->tl into r_tl */
2205 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
2207 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2208 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
2210 /* calculate offset to current trap state from env->ts, reuse r_tl */
2211 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
2212 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
2214 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2216 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
2217 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
2218 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
2219 tcg_temp_free_ptr(r_tl_tmp
);
2222 tcg_temp_free_i32(r_tl
);
2225 static void gen_edge(DisasContext
*dc
, TCGv dst
, TCGv s1
, TCGv s2
,
2226 int width
, bool cc
, bool left
)
2228 TCGv lo1
, lo2
, t1
, t2
;
2229 uint64_t amask
, tabl
, tabr
;
2230 int shift
, imask
, omask
;
2233 tcg_gen_mov_tl(cpu_cc_src
, s1
);
2234 tcg_gen_mov_tl(cpu_cc_src2
, s2
);
2235 tcg_gen_sub_tl(cpu_cc_dst
, s1
, s2
);
2236 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2237 dc
->cc_op
= CC_OP_SUB
;
2240 /* Theory of operation: there are two tables, left and right (not to
2241 be confused with the left and right versions of the opcode). These
2242 are indexed by the low 3 bits of the inputs. To make things "easy",
2243 these tables are loaded into two constants, TABL and TABR below.
2244 The operation index = (input & imask) << shift calculates the index
2245 into the constant, while val = (table >> index) & omask calculates
2246 the value we're looking for. */
2253 tabl
= 0x80c0e0f0f8fcfeffULL
;
2254 tabr
= 0xff7f3f1f0f070301ULL
;
2256 tabl
= 0x0103070f1f3f7fffULL
;
2257 tabr
= 0xfffefcf8f0e0c080ULL
;
2277 tabl
= (2 << 2) | 3;
2278 tabr
= (3 << 2) | 1;
2280 tabl
= (1 << 2) | 3;
2281 tabr
= (3 << 2) | 2;
2288 lo1
= tcg_temp_new();
2289 lo2
= tcg_temp_new();
2290 tcg_gen_andi_tl(lo1
, s1
, imask
);
2291 tcg_gen_andi_tl(lo2
, s2
, imask
);
2292 tcg_gen_shli_tl(lo1
, lo1
, shift
);
2293 tcg_gen_shli_tl(lo2
, lo2
, shift
);
2295 t1
= tcg_const_tl(tabl
);
2296 t2
= tcg_const_tl(tabr
);
2297 tcg_gen_shr_tl(lo1
, t1
, lo1
);
2298 tcg_gen_shr_tl(lo2
, t2
, lo2
);
2299 tcg_gen_andi_tl(dst
, lo1
, omask
);
2300 tcg_gen_andi_tl(lo2
, lo2
, omask
);
2304 amask
&= 0xffffffffULL
;
2306 tcg_gen_andi_tl(s1
, s1
, amask
);
2307 tcg_gen_andi_tl(s2
, s2
, amask
);
2309 /* We want to compute
2310 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2311 We've already done dst = lo1, so this reduces to
2312 dst &= (s1 == s2 ? -1 : lo2)
2317 tcg_gen_setcond_tl(TCG_COND_EQ
, t1
, s1
, s2
);
2318 tcg_gen_neg_tl(t1
, t1
);
2319 tcg_gen_or_tl(lo2
, lo2
, t1
);
2320 tcg_gen_and_tl(dst
, dst
, lo2
);
2328 static void gen_alignaddr(TCGv dst
, TCGv s1
, TCGv s2
, bool left
)
2330 TCGv tmp
= tcg_temp_new();
2332 tcg_gen_add_tl(tmp
, s1
, s2
);
2333 tcg_gen_andi_tl(dst
, tmp
, -8);
2335 tcg_gen_neg_tl(tmp
, tmp
);
2337 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, tmp
, 0, 3);
2342 static void gen_faligndata(TCGv dst
, TCGv gsr
, TCGv s1
, TCGv s2
)
2346 t1
= tcg_temp_new();
2347 t2
= tcg_temp_new();
2348 shift
= tcg_temp_new();
2350 tcg_gen_andi_tl(shift
, gsr
, 7);
2351 tcg_gen_shli_tl(shift
, shift
, 3);
2352 tcg_gen_shl_tl(t1
, s1
, shift
);
2354 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2355 shift of (up to 63) followed by a constant shift of 1. */
2356 tcg_gen_xori_tl(shift
, shift
, 63);
2357 tcg_gen_shr_tl(t2
, s2
, shift
);
2358 tcg_gen_shri_tl(t2
, t2
, 1);
2360 tcg_gen_or_tl(dst
, t1
, t2
);
2364 tcg_temp_free(shift
);
2368 #define CHECK_IU_FEATURE(dc, FEATURE) \
2369 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2371 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2372 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2375 /* before an instruction, dc->pc must be static */
2376 static void disas_sparc_insn(DisasContext
* dc
)
2378 unsigned int insn
, opc
, rs1
, rs2
, rd
;
2379 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
2380 TCGv_i32 cpu_src1_32
, cpu_src2_32
, cpu_dst_32
;
2381 TCGv_i64 cpu_src1_64
, cpu_src2_64
, cpu_dst_64
;
2384 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
2385 tcg_gen_debug_insn_start(dc
->pc
);
2386 insn
= ldl_code(dc
->pc
);
2387 opc
= GET_FIELD(insn
, 0, 1);
2389 rd
= GET_FIELD(insn
, 2, 6);
2391 cpu_tmp1
= cpu_src1
= tcg_temp_new();
2392 cpu_tmp2
= cpu_src2
= tcg_temp_new();
2395 case 0: /* branches/sethi */
2397 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2400 #ifdef TARGET_SPARC64
2401 case 0x1: /* V9 BPcc */
2405 target
= GET_FIELD_SP(insn
, 0, 18);
2406 target
= sign_extend(target
, 19);
2408 cc
= GET_FIELD_SP(insn
, 20, 21);
2410 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2412 do_branch(dc
, target
, insn
, 1, cpu_cond
);
2417 case 0x3: /* V9 BPr */
2419 target
= GET_FIELD_SP(insn
, 0, 13) |
2420 (GET_FIELD_SP(insn
, 20, 21) << 14);
2421 target
= sign_extend(target
, 16);
2423 cpu_src1
= get_src1(insn
, cpu_src1
);
2424 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
2427 case 0x5: /* V9 FBPcc */
2429 int cc
= GET_FIELD_SP(insn
, 20, 21);
2430 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2432 target
= GET_FIELD_SP(insn
, 0, 18);
2433 target
= sign_extend(target
, 19);
2435 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
2439 case 0x7: /* CBN+x */
2444 case 0x2: /* BN+x */
2446 target
= GET_FIELD(insn
, 10, 31);
2447 target
= sign_extend(target
, 22);
2449 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2452 case 0x6: /* FBN+x */
2454 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2456 target
= GET_FIELD(insn
, 10, 31);
2457 target
= sign_extend(target
, 22);
2459 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
2462 case 0x4: /* SETHI */
2464 uint32_t value
= GET_FIELD(insn
, 10, 31);
2467 r_const
= tcg_const_tl(value
<< 10);
2468 gen_movl_TN_reg(rd
, r_const
);
2469 tcg_temp_free(r_const
);
2472 case 0x0: /* UNIMPL */
2481 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2484 r_const
= tcg_const_tl(dc
->pc
);
2485 gen_movl_TN_reg(15, r_const
);
2486 tcg_temp_free(r_const
);
2488 gen_mov_pc_npc(dc
, cpu_cond
);
2492 case 2: /* FPU & Logical Operations */
2494 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2495 if (xop
== 0x3a) { /* generate trap */
2498 cpu_src1
= get_src1(insn
, cpu_src1
);
2500 rs2
= GET_FIELD(insn
, 25, 31);
2501 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2503 rs2
= GET_FIELD(insn
, 27, 31);
2505 gen_movl_reg_TN(rs2
, cpu_src2
);
2506 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2508 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2511 cond
= GET_FIELD(insn
, 3, 6);
2512 if (cond
== 0x8) { /* Trap Always */
2513 save_state(dc
, cpu_cond
);
2514 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2516 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2518 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2519 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2520 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2523 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2525 gen_helper_shutdown();
2528 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2530 } else if (cond
!= 0) {
2531 TCGv r_cond
= tcg_temp_new();
2533 #ifdef TARGET_SPARC64
2535 int cc
= GET_FIELD_SP(insn
, 11, 12);
2537 save_state(dc
, cpu_cond
);
2539 gen_cond(r_cond
, 0, cond
, dc
);
2541 gen_cond(r_cond
, 1, cond
, dc
);
2545 save_state(dc
, cpu_cond
);
2546 gen_cond(r_cond
, 0, cond
, dc
);
2548 l1
= gen_new_label();
2549 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2551 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2553 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2555 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2556 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2557 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2558 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2561 tcg_temp_free(r_cond
);
2567 } else if (xop
== 0x28) {
2568 rs1
= GET_FIELD(insn
, 13, 17);
2571 #ifndef TARGET_SPARC64
2572 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2573 manual, rdy on the microSPARC
2575 case 0x0f: /* stbar in the SPARCv8 manual,
2576 rdy on the microSPARC II */
2577 case 0x10 ... 0x1f: /* implementation-dependent in the
2578 SPARCv8 manual, rdy on the
2581 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2584 /* Read Asr17 for a Leon3 monoprocessor */
2585 r_const
= tcg_const_tl((1 << 8)
2586 | (dc
->def
->nwindows
- 1));
2587 gen_movl_TN_reg(rd
, r_const
);
2588 tcg_temp_free(r_const
);
2592 gen_movl_TN_reg(rd
, cpu_y
);
2594 #ifdef TARGET_SPARC64
2595 case 0x2: /* V9 rdccr */
2596 gen_helper_compute_psr(cpu_env
);
2597 gen_helper_rdccr(cpu_dst
, cpu_env
);
2598 gen_movl_TN_reg(rd
, cpu_dst
);
2600 case 0x3: /* V9 rdasi */
2601 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2602 gen_movl_TN_reg(rd
, cpu_dst
);
2604 case 0x4: /* V9 rdtick */
2608 r_tickptr
= tcg_temp_new_ptr();
2609 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2610 offsetof(CPUState
, tick
));
2611 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2612 tcg_temp_free_ptr(r_tickptr
);
2613 gen_movl_TN_reg(rd
, cpu_dst
);
2616 case 0x5: /* V9 rdpc */
2620 r_const
= tcg_const_tl(dc
->pc
);
2621 gen_movl_TN_reg(rd
, r_const
);
2622 tcg_temp_free(r_const
);
2625 case 0x6: /* V9 rdfprs */
2626 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2627 gen_movl_TN_reg(rd
, cpu_dst
);
2629 case 0xf: /* V9 membar */
2630 break; /* no effect */
2631 case 0x13: /* Graphics Status */
2632 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2634 gen_movl_TN_reg(rd
, cpu_gsr
);
2636 case 0x16: /* Softint */
2637 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2638 gen_movl_TN_reg(rd
, cpu_dst
);
2640 case 0x17: /* Tick compare */
2641 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2643 case 0x18: /* System tick */
2647 r_tickptr
= tcg_temp_new_ptr();
2648 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2649 offsetof(CPUState
, stick
));
2650 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2651 tcg_temp_free_ptr(r_tickptr
);
2652 gen_movl_TN_reg(rd
, cpu_dst
);
2655 case 0x19: /* System tick compare */
2656 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2658 case 0x10: /* Performance Control */
2659 case 0x11: /* Performance Instrumentation Counter */
2660 case 0x12: /* Dispatch Control */
2661 case 0x14: /* Softint set, WO */
2662 case 0x15: /* Softint clear, WO */
2667 #if !defined(CONFIG_USER_ONLY)
2668 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2669 #ifndef TARGET_SPARC64
2670 if (!supervisor(dc
))
2672 gen_helper_compute_psr(cpu_env
);
2673 dc
->cc_op
= CC_OP_FLAGS
;
2674 gen_helper_rdpsr(cpu_dst
, cpu_env
);
2676 CHECK_IU_FEATURE(dc
, HYPV
);
2677 if (!hypervisor(dc
))
2679 rs1
= GET_FIELD(insn
, 13, 17);
2682 // gen_op_rdhpstate();
2685 // gen_op_rdhtstate();
2688 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2691 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2694 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2696 case 31: // hstick_cmpr
2697 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2703 gen_movl_TN_reg(rd
, cpu_dst
);
2705 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2706 if (!supervisor(dc
))
2708 #ifdef TARGET_SPARC64
2709 rs1
= GET_FIELD(insn
, 13, 17);
2715 r_tsptr
= tcg_temp_new_ptr();
2716 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2717 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2718 offsetof(trap_state
, tpc
));
2719 tcg_temp_free_ptr(r_tsptr
);
2726 r_tsptr
= tcg_temp_new_ptr();
2727 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2728 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2729 offsetof(trap_state
, tnpc
));
2730 tcg_temp_free_ptr(r_tsptr
);
2737 r_tsptr
= tcg_temp_new_ptr();
2738 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2739 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2740 offsetof(trap_state
, tstate
));
2741 tcg_temp_free_ptr(r_tsptr
);
2748 r_tsptr
= tcg_temp_new_ptr();
2749 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2750 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2751 offsetof(trap_state
, tt
));
2752 tcg_temp_free_ptr(r_tsptr
);
2753 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2760 r_tickptr
= tcg_temp_new_ptr();
2761 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2762 offsetof(CPUState
, tick
));
2763 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2764 gen_movl_TN_reg(rd
, cpu_tmp0
);
2765 tcg_temp_free_ptr(r_tickptr
);
2769 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2772 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2773 offsetof(CPUSPARCState
, pstate
));
2774 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2777 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2778 offsetof(CPUSPARCState
, tl
));
2779 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2782 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2783 offsetof(CPUSPARCState
, psrpil
));
2784 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2787 gen_helper_rdcwp(cpu_tmp0
, cpu_env
);
2790 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2791 offsetof(CPUSPARCState
, cansave
));
2792 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2794 case 11: // canrestore
2795 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2796 offsetof(CPUSPARCState
, canrestore
));
2797 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2799 case 12: // cleanwin
2800 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2801 offsetof(CPUSPARCState
, cleanwin
));
2802 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2804 case 13: // otherwin
2805 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2806 offsetof(CPUSPARCState
, otherwin
));
2807 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2810 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2811 offsetof(CPUSPARCState
, wstate
));
2812 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2814 case 16: // UA2005 gl
2815 CHECK_IU_FEATURE(dc
, GL
);
2816 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2817 offsetof(CPUSPARCState
, gl
));
2818 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2820 case 26: // UA2005 strand status
2821 CHECK_IU_FEATURE(dc
, HYPV
);
2822 if (!hypervisor(dc
))
2824 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2827 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2834 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2836 gen_movl_TN_reg(rd
, cpu_tmp0
);
2838 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2839 #ifdef TARGET_SPARC64
2840 save_state(dc
, cpu_cond
);
2841 gen_helper_flushw(cpu_env
);
2843 if (!supervisor(dc
))
2845 gen_movl_TN_reg(rd
, cpu_tbr
);
2849 } else if (xop
== 0x34) { /* FPU Operations */
2850 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2852 gen_op_clear_ieee_excp_and_FTT();
2853 rs1
= GET_FIELD(insn
, 13, 17);
2854 rs2
= GET_FIELD(insn
, 27, 31);
2855 xop
= GET_FIELD(insn
, 18, 26);
2856 save_state(dc
, cpu_cond
);
2858 case 0x1: /* fmovs */
2859 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
2860 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
2862 case 0x5: /* fnegs */
2863 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fnegs
);
2865 case 0x9: /* fabss */
2866 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fabss
);
2868 case 0x29: /* fsqrts */
2869 CHECK_FPU_FEATURE(dc
, FSQRT
);
2870 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fsqrts
);
2872 case 0x2a: /* fsqrtd */
2873 CHECK_FPU_FEATURE(dc
, FSQRT
);
2874 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fsqrtd
);
2876 case 0x2b: /* fsqrtq */
2877 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2878 gen_fop_QQ(dc
, rd
, rs2
, gen_helper_fsqrtq
);
2880 case 0x41: /* fadds */
2881 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fadds
);
2883 case 0x42: /* faddd */
2884 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_faddd
);
2886 case 0x43: /* faddq */
2887 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2888 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_faddq
);
2890 case 0x45: /* fsubs */
2891 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fsubs
);
2893 case 0x46: /* fsubd */
2894 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fsubd
);
2896 case 0x47: /* fsubq */
2897 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2898 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fsubq
);
2900 case 0x49: /* fmuls */
2901 CHECK_FPU_FEATURE(dc
, FMUL
);
2902 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fmuls
);
2904 case 0x4a: /* fmuld */
2905 CHECK_FPU_FEATURE(dc
, FMUL
);
2906 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld
);
2908 case 0x4b: /* fmulq */
2909 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2910 CHECK_FPU_FEATURE(dc
, FMUL
);
2911 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fmulq
);
2913 case 0x4d: /* fdivs */
2914 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fdivs
);
2916 case 0x4e: /* fdivd */
2917 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fdivd
);
2919 case 0x4f: /* fdivq */
2920 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2921 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fdivq
);
2923 case 0x69: /* fsmuld */
2924 CHECK_FPU_FEATURE(dc
, FSMULD
);
2925 gen_fop_DFF(dc
, rd
, rs1
, rs2
, gen_helper_fsmuld
);
2927 case 0x6e: /* fdmulq */
2928 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2929 gen_fop_QDD(dc
, rd
, rs1
, rs2
, gen_helper_fdmulq
);
2931 case 0xc4: /* fitos */
2932 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fitos
);
2934 case 0xc6: /* fdtos */
2935 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtos
);
2937 case 0xc7: /* fqtos */
2938 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2939 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtos
);
2941 case 0xc8: /* fitod */
2942 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fitod
);
2944 case 0xc9: /* fstod */
2945 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fstod
);
2947 case 0xcb: /* fqtod */
2948 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2949 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtod
);
2951 case 0xcc: /* fitoq */
2952 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2953 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fitoq
);
2955 case 0xcd: /* fstoq */
2956 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2957 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fstoq
);
2959 case 0xce: /* fdtoq */
2960 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2961 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fdtoq
);
2963 case 0xd1: /* fstoi */
2964 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fstoi
);
2966 case 0xd2: /* fdtoi */
2967 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtoi
);
2969 case 0xd3: /* fqtoi */
2970 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2971 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtoi
);
2973 #ifdef TARGET_SPARC64
2974 case 0x2: /* V9 fmovd */
2975 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
2976 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
2978 case 0x3: /* V9 fmovq */
2979 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2980 gen_move_Q(rd
, rs2
);
2982 case 0x6: /* V9 fnegd */
2983 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fnegd
);
2985 case 0x7: /* V9 fnegq */
2986 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2987 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fnegq
);
2989 case 0xa: /* V9 fabsd */
2990 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fabsd
);
2992 case 0xb: /* V9 fabsq */
2993 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2994 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fabsq
);
2996 case 0x81: /* V9 fstox */
2997 gen_fop_DF(dc
, rd
, rs2
, gen_helper_fstox
);
2999 case 0x82: /* V9 fdtox */
3000 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fdtox
);
3002 case 0x83: /* V9 fqtox */
3003 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3004 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtox
);
3006 case 0x84: /* V9 fxtos */
3007 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fxtos
);
3009 case 0x88: /* V9 fxtod */
3010 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fxtod
);
3012 case 0x8c: /* V9 fxtoq */
3013 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3014 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fxtoq
);
3020 } else if (xop
== 0x35) { /* FPU Operations */
3021 #ifdef TARGET_SPARC64
3024 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3026 gen_op_clear_ieee_excp_and_FTT();
3027 rs1
= GET_FIELD(insn
, 13, 17);
3028 rs2
= GET_FIELD(insn
, 27, 31);
3029 xop
= GET_FIELD(insn
, 18, 26);
3030 save_state(dc
, cpu_cond
);
3031 #ifdef TARGET_SPARC64
3032 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
3035 l1
= gen_new_label();
3036 cond
= GET_FIELD_SP(insn
, 14, 17);
3037 cpu_src1
= get_src1(insn
, cpu_src1
);
3038 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3040 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
3041 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
3044 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
3047 l1
= gen_new_label();
3048 cond
= GET_FIELD_SP(insn
, 14, 17);
3049 cpu_src1
= get_src1(insn
, cpu_src1
);
3050 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3052 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
3053 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
3056 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
3059 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3060 l1
= gen_new_label();
3061 cond
= GET_FIELD_SP(insn
, 14, 17);
3062 cpu_src1
= get_src1(insn
, cpu_src1
);
3063 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3065 gen_move_Q(rd
, rs2
);
3071 #ifdef TARGET_SPARC64
3072 #define FMOVSCC(fcc) \
3077 l1 = gen_new_label(); \
3078 r_cond = tcg_temp_new(); \
3079 cond = GET_FIELD_SP(insn, 14, 17); \
3080 gen_fcond(r_cond, fcc, cond); \
3081 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3083 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3084 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3085 gen_set_label(l1); \
3086 tcg_temp_free(r_cond); \
3088 #define FMOVDCC(fcc) \
3093 l1 = gen_new_label(); \
3094 r_cond = tcg_temp_new(); \
3095 cond = GET_FIELD_SP(insn, 14, 17); \
3096 gen_fcond(r_cond, fcc, cond); \
3097 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3099 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3100 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3101 gen_set_label(l1); \
3102 tcg_temp_free(r_cond); \
3104 #define FMOVQCC(fcc) \
3109 l1 = gen_new_label(); \
3110 r_cond = tcg_temp_new(); \
3111 cond = GET_FIELD_SP(insn, 14, 17); \
3112 gen_fcond(r_cond, fcc, cond); \
3113 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3115 gen_move_Q(rd, rs2); \
3116 gen_set_label(l1); \
3117 tcg_temp_free(r_cond); \
3119 case 0x001: /* V9 fmovscc %fcc0 */
3122 case 0x002: /* V9 fmovdcc %fcc0 */
3125 case 0x003: /* V9 fmovqcc %fcc0 */
3126 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3129 case 0x041: /* V9 fmovscc %fcc1 */
3132 case 0x042: /* V9 fmovdcc %fcc1 */
3135 case 0x043: /* V9 fmovqcc %fcc1 */
3136 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3139 case 0x081: /* V9 fmovscc %fcc2 */
3142 case 0x082: /* V9 fmovdcc %fcc2 */
3145 case 0x083: /* V9 fmovqcc %fcc2 */
3146 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3149 case 0x0c1: /* V9 fmovscc %fcc3 */
3152 case 0x0c2: /* V9 fmovdcc %fcc3 */
3155 case 0x0c3: /* V9 fmovqcc %fcc3 */
3156 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3162 #define FMOVSCC(icc) \
3167 l1 = gen_new_label(); \
3168 r_cond = tcg_temp_new(); \
3169 cond = GET_FIELD_SP(insn, 14, 17); \
3170 gen_cond(r_cond, icc, cond, dc); \
3171 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3173 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3174 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3175 gen_set_label(l1); \
3176 tcg_temp_free(r_cond); \
3178 #define FMOVDCC(icc) \
3183 l1 = gen_new_label(); \
3184 r_cond = tcg_temp_new(); \
3185 cond = GET_FIELD_SP(insn, 14, 17); \
3186 gen_cond(r_cond, icc, cond, dc); \
3187 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3189 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3190 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3191 gen_update_fprs_dirty(DFPREG(rd)); \
3192 gen_set_label(l1); \
3193 tcg_temp_free(r_cond); \
3195 #define FMOVQCC(icc) \
3200 l1 = gen_new_label(); \
3201 r_cond = tcg_temp_new(); \
3202 cond = GET_FIELD_SP(insn, 14, 17); \
3203 gen_cond(r_cond, icc, cond, dc); \
3204 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3206 gen_move_Q(rd, rs2); \
3207 gen_set_label(l1); \
3208 tcg_temp_free(r_cond); \
3211 case 0x101: /* V9 fmovscc %icc */
3214 case 0x102: /* V9 fmovdcc %icc */
3217 case 0x103: /* V9 fmovqcc %icc */
3218 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3221 case 0x181: /* V9 fmovscc %xcc */
3224 case 0x182: /* V9 fmovdcc %xcc */
3227 case 0x183: /* V9 fmovqcc %xcc */
3228 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3235 case 0x51: /* fcmps, V9 %fcc */
3236 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3237 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3238 gen_op_fcmps(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3240 case 0x52: /* fcmpd, V9 %fcc */
3241 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3242 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3243 gen_op_fcmpd(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3245 case 0x53: /* fcmpq, V9 %fcc */
3246 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3247 gen_op_load_fpr_QT0(QFPREG(rs1
));
3248 gen_op_load_fpr_QT1(QFPREG(rs2
));
3249 gen_op_fcmpq(rd
& 3);
3251 case 0x55: /* fcmpes, V9 %fcc */
3252 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3253 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3254 gen_op_fcmpes(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3256 case 0x56: /* fcmped, V9 %fcc */
3257 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3258 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3259 gen_op_fcmped(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3261 case 0x57: /* fcmpeq, V9 %fcc */
3262 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3263 gen_op_load_fpr_QT0(QFPREG(rs1
));
3264 gen_op_load_fpr_QT1(QFPREG(rs2
));
3265 gen_op_fcmpeq(rd
& 3);
3270 } else if (xop
== 0x2) {
3273 rs1
= GET_FIELD(insn
, 13, 17);
3275 // or %g0, x, y -> mov T0, x; mov y, T0
3276 if (IS_IMM
) { /* immediate */
3279 simm
= GET_FIELDs(insn
, 19, 31);
3280 r_const
= tcg_const_tl(simm
);
3281 gen_movl_TN_reg(rd
, r_const
);
3282 tcg_temp_free(r_const
);
3283 } else { /* register */
3284 rs2
= GET_FIELD(insn
, 27, 31);
3285 gen_movl_reg_TN(rs2
, cpu_dst
);
3286 gen_movl_TN_reg(rd
, cpu_dst
);
3289 cpu_src1
= get_src1(insn
, cpu_src1
);
3290 if (IS_IMM
) { /* immediate */
3291 simm
= GET_FIELDs(insn
, 19, 31);
3292 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3293 gen_movl_TN_reg(rd
, cpu_dst
);
3294 } else { /* register */
3295 // or x, %g0, y -> mov T1, x; mov y, T1
3296 rs2
= GET_FIELD(insn
, 27, 31);
3298 gen_movl_reg_TN(rs2
, cpu_src2
);
3299 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3300 gen_movl_TN_reg(rd
, cpu_dst
);
3302 gen_movl_TN_reg(rd
, cpu_src1
);
3305 #ifdef TARGET_SPARC64
3306 } else if (xop
== 0x25) { /* sll, V9 sllx */
3307 cpu_src1
= get_src1(insn
, cpu_src1
);
3308 if (IS_IMM
) { /* immediate */
3309 simm
= GET_FIELDs(insn
, 20, 31);
3310 if (insn
& (1 << 12)) {
3311 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3313 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3315 } else { /* register */
3316 rs2
= GET_FIELD(insn
, 27, 31);
3317 gen_movl_reg_TN(rs2
, cpu_src2
);
3318 if (insn
& (1 << 12)) {
3319 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3321 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3323 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3325 gen_movl_TN_reg(rd
, cpu_dst
);
3326 } else if (xop
== 0x26) { /* srl, V9 srlx */
3327 cpu_src1
= get_src1(insn
, cpu_src1
);
3328 if (IS_IMM
) { /* immediate */
3329 simm
= GET_FIELDs(insn
, 20, 31);
3330 if (insn
& (1 << 12)) {
3331 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3333 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3334 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3336 } else { /* register */
3337 rs2
= GET_FIELD(insn
, 27, 31);
3338 gen_movl_reg_TN(rs2
, cpu_src2
);
3339 if (insn
& (1 << 12)) {
3340 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3341 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3343 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3344 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3345 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3348 gen_movl_TN_reg(rd
, cpu_dst
);
3349 } else if (xop
== 0x27) { /* sra, V9 srax */
3350 cpu_src1
= get_src1(insn
, cpu_src1
);
3351 if (IS_IMM
) { /* immediate */
3352 simm
= GET_FIELDs(insn
, 20, 31);
3353 if (insn
& (1 << 12)) {
3354 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3356 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3357 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3358 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3360 } else { /* register */
3361 rs2
= GET_FIELD(insn
, 27, 31);
3362 gen_movl_reg_TN(rs2
, cpu_src2
);
3363 if (insn
& (1 << 12)) {
3364 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3365 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3367 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3368 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3369 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3370 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3373 gen_movl_TN_reg(rd
, cpu_dst
);
3375 } else if (xop
< 0x36) {
3377 cpu_src1
= get_src1(insn
, cpu_src1
);
3378 cpu_src2
= get_src2(insn
, cpu_src2
);
3379 switch (xop
& ~0x10) {
3382 simm
= GET_FIELDs(insn
, 19, 31);
3384 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3385 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3386 dc
->cc_op
= CC_OP_ADD
;
3388 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3392 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3393 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3394 dc
->cc_op
= CC_OP_ADD
;
3396 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3402 simm
= GET_FIELDs(insn
, 19, 31);
3403 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3405 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3408 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3409 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3410 dc
->cc_op
= CC_OP_LOGIC
;
3415 simm
= GET_FIELDs(insn
, 19, 31);
3416 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3418 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3421 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3422 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3423 dc
->cc_op
= CC_OP_LOGIC
;
3428 simm
= GET_FIELDs(insn
, 19, 31);
3429 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3431 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3434 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3435 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3436 dc
->cc_op
= CC_OP_LOGIC
;
3441 simm
= GET_FIELDs(insn
, 19, 31);
3443 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3445 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3449 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3450 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3451 dc
->cc_op
= CC_OP_SUB
;
3453 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3457 case 0x5: /* andn */
3459 simm
= GET_FIELDs(insn
, 19, 31);
3460 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3462 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3465 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3466 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3467 dc
->cc_op
= CC_OP_LOGIC
;
3472 simm
= GET_FIELDs(insn
, 19, 31);
3473 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3475 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3478 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3479 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3480 dc
->cc_op
= CC_OP_LOGIC
;
3483 case 0x7: /* xorn */
3485 simm
= GET_FIELDs(insn
, 19, 31);
3486 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3488 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3489 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3492 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3493 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3494 dc
->cc_op
= CC_OP_LOGIC
;
3497 case 0x8: /* addx, V9 addc */
3498 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3501 #ifdef TARGET_SPARC64
3502 case 0x9: /* V9 mulx */
3504 simm
= GET_FIELDs(insn
, 19, 31);
3505 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3507 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3511 case 0xa: /* umul */
3512 CHECK_IU_FEATURE(dc
, MUL
);
3513 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3515 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3516 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3517 dc
->cc_op
= CC_OP_LOGIC
;
3520 case 0xb: /* smul */
3521 CHECK_IU_FEATURE(dc
, MUL
);
3522 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3524 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3525 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3526 dc
->cc_op
= CC_OP_LOGIC
;
3529 case 0xc: /* subx, V9 subc */
3530 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3533 #ifdef TARGET_SPARC64
3534 case 0xd: /* V9 udivx */
3536 TCGv r_temp1
, r_temp2
;
3537 r_temp1
= tcg_temp_local_new();
3538 r_temp2
= tcg_temp_local_new();
3539 tcg_gen_mov_tl(r_temp1
, cpu_src1
);
3540 tcg_gen_mov_tl(r_temp2
, cpu_src2
);
3541 gen_trap_ifdivzero_tl(r_temp2
);
3542 tcg_gen_divu_i64(cpu_dst
, r_temp1
, r_temp2
);
3543 tcg_temp_free(r_temp1
);
3544 tcg_temp_free(r_temp2
);
3548 case 0xe: /* udiv */
3549 CHECK_IU_FEATURE(dc
, DIV
);
3551 gen_helper_udiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3553 dc
->cc_op
= CC_OP_DIV
;
3555 gen_helper_udiv(cpu_dst
, cpu_env
, cpu_src1
,
3559 case 0xf: /* sdiv */
3560 CHECK_IU_FEATURE(dc
, DIV
);
3562 gen_helper_sdiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3564 dc
->cc_op
= CC_OP_DIV
;
3566 gen_helper_sdiv(cpu_dst
, cpu_env
, cpu_src1
,
3573 gen_movl_TN_reg(rd
, cpu_dst
);
3575 cpu_src1
= get_src1(insn
, cpu_src1
);
3576 cpu_src2
= get_src2(insn
, cpu_src2
);
3578 case 0x20: /* taddcc */
3579 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3580 gen_movl_TN_reg(rd
, cpu_dst
);
3581 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3582 dc
->cc_op
= CC_OP_TADD
;
3584 case 0x21: /* tsubcc */
3585 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3586 gen_movl_TN_reg(rd
, cpu_dst
);
3587 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3588 dc
->cc_op
= CC_OP_TSUB
;
3590 case 0x22: /* taddcctv */
3591 save_state(dc
, cpu_cond
);
3592 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3593 gen_movl_TN_reg(rd
, cpu_dst
);
3594 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3595 dc
->cc_op
= CC_OP_TADDTV
;
3597 case 0x23: /* tsubcctv */
3598 save_state(dc
, cpu_cond
);
3599 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3600 gen_movl_TN_reg(rd
, cpu_dst
);
3601 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3602 dc
->cc_op
= CC_OP_TSUBTV
;
3604 case 0x24: /* mulscc */
3605 gen_helper_compute_psr(cpu_env
);
3606 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3607 gen_movl_TN_reg(rd
, cpu_dst
);
3608 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3609 dc
->cc_op
= CC_OP_ADD
;
3611 #ifndef TARGET_SPARC64
3612 case 0x25: /* sll */
3613 if (IS_IMM
) { /* immediate */
3614 simm
= GET_FIELDs(insn
, 20, 31);
3615 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3616 } else { /* register */
3617 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3618 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3620 gen_movl_TN_reg(rd
, cpu_dst
);
3622 case 0x26: /* srl */
3623 if (IS_IMM
) { /* immediate */
3624 simm
= GET_FIELDs(insn
, 20, 31);
3625 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3626 } else { /* register */
3627 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3628 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3630 gen_movl_TN_reg(rd
, cpu_dst
);
3632 case 0x27: /* sra */
3633 if (IS_IMM
) { /* immediate */
3634 simm
= GET_FIELDs(insn
, 20, 31);
3635 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3636 } else { /* register */
3637 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3638 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3640 gen_movl_TN_reg(rd
, cpu_dst
);
3647 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3648 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3650 #ifndef TARGET_SPARC64
3651 case 0x01 ... 0x0f: /* undefined in the
3655 case 0x10 ... 0x1f: /* implementation-dependent
3661 case 0x2: /* V9 wrccr */
3662 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3663 gen_helper_wrccr(cpu_env
, cpu_dst
);
3664 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3665 dc
->cc_op
= CC_OP_FLAGS
;
3667 case 0x3: /* V9 wrasi */
3668 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3669 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3670 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3672 case 0x6: /* V9 wrfprs */
3673 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3674 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3675 save_state(dc
, cpu_cond
);
3680 case 0xf: /* V9 sir, nop if user */
3681 #if !defined(CONFIG_USER_ONLY)
3682 if (supervisor(dc
)) {
3687 case 0x13: /* Graphics Status */
3688 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3690 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3692 case 0x14: /* Softint set */
3693 if (!supervisor(dc
))
3695 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3696 gen_helper_set_softint(cpu_env
, cpu_tmp64
);
3698 case 0x15: /* Softint clear */
3699 if (!supervisor(dc
))
3701 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3702 gen_helper_clear_softint(cpu_env
, cpu_tmp64
);
3704 case 0x16: /* Softint write */
3705 if (!supervisor(dc
))
3707 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3708 gen_helper_write_softint(cpu_env
, cpu_tmp64
);
3710 case 0x17: /* Tick compare */
3711 #if !defined(CONFIG_USER_ONLY)
3712 if (!supervisor(dc
))
3718 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3720 r_tickptr
= tcg_temp_new_ptr();
3721 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3722 offsetof(CPUState
, tick
));
3723 gen_helper_tick_set_limit(r_tickptr
,
3725 tcg_temp_free_ptr(r_tickptr
);
3728 case 0x18: /* System tick */
3729 #if !defined(CONFIG_USER_ONLY)
3730 if (!supervisor(dc
))
3736 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3738 r_tickptr
= tcg_temp_new_ptr();
3739 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3740 offsetof(CPUState
, stick
));
3741 gen_helper_tick_set_count(r_tickptr
,
3743 tcg_temp_free_ptr(r_tickptr
);
3746 case 0x19: /* System tick compare */
3747 #if !defined(CONFIG_USER_ONLY)
3748 if (!supervisor(dc
))
3754 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3756 r_tickptr
= tcg_temp_new_ptr();
3757 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3758 offsetof(CPUState
, stick
));
3759 gen_helper_tick_set_limit(r_tickptr
,
3761 tcg_temp_free_ptr(r_tickptr
);
3765 case 0x10: /* Performance Control */
3766 case 0x11: /* Performance Instrumentation
3768 case 0x12: /* Dispatch Control */
3775 #if !defined(CONFIG_USER_ONLY)
3776 case 0x31: /* wrpsr, V9 saved, restored */
3778 if (!supervisor(dc
))
3780 #ifdef TARGET_SPARC64
3783 gen_helper_saved(cpu_env
);
3786 gen_helper_restored(cpu_env
);
3788 case 2: /* UA2005 allclean */
3789 case 3: /* UA2005 otherw */
3790 case 4: /* UA2005 normalw */
3791 case 5: /* UA2005 invalw */
3797 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3798 gen_helper_wrpsr(cpu_env
, cpu_dst
);
3799 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3800 dc
->cc_op
= CC_OP_FLAGS
;
3801 save_state(dc
, cpu_cond
);
3808 case 0x32: /* wrwim, V9 wrpr */
3810 if (!supervisor(dc
))
3812 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3813 #ifdef TARGET_SPARC64
3819 r_tsptr
= tcg_temp_new_ptr();
3820 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3821 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3822 offsetof(trap_state
, tpc
));
3823 tcg_temp_free_ptr(r_tsptr
);
3830 r_tsptr
= tcg_temp_new_ptr();
3831 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3832 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3833 offsetof(trap_state
, tnpc
));
3834 tcg_temp_free_ptr(r_tsptr
);
3841 r_tsptr
= tcg_temp_new_ptr();
3842 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3843 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3844 offsetof(trap_state
,
3846 tcg_temp_free_ptr(r_tsptr
);
3853 r_tsptr
= tcg_temp_new_ptr();
3854 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3855 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3856 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3857 offsetof(trap_state
, tt
));
3858 tcg_temp_free_ptr(r_tsptr
);
3865 r_tickptr
= tcg_temp_new_ptr();
3866 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3867 offsetof(CPUState
, tick
));
3868 gen_helper_tick_set_count(r_tickptr
,
3870 tcg_temp_free_ptr(r_tickptr
);
3874 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3878 TCGv r_tmp
= tcg_temp_local_new();
3880 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3881 save_state(dc
, cpu_cond
);
3882 gen_helper_wrpstate(cpu_env
, r_tmp
);
3883 tcg_temp_free(r_tmp
);
3884 dc
->npc
= DYNAMIC_PC
;
3889 TCGv r_tmp
= tcg_temp_local_new();
3891 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3892 save_state(dc
, cpu_cond
);
3893 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3894 tcg_temp_free(r_tmp
);
3895 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3896 offsetof(CPUSPARCState
, tl
));
3897 dc
->npc
= DYNAMIC_PC
;
3901 gen_helper_wrpil(cpu_env
, cpu_tmp0
);
3904 gen_helper_wrcwp(cpu_env
, cpu_tmp0
);
3907 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3908 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3909 offsetof(CPUSPARCState
,
3912 case 11: // canrestore
3913 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3914 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3915 offsetof(CPUSPARCState
,
3918 case 12: // cleanwin
3919 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3920 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3921 offsetof(CPUSPARCState
,
3924 case 13: // otherwin
3925 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3926 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3927 offsetof(CPUSPARCState
,
3931 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3932 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3933 offsetof(CPUSPARCState
,
3936 case 16: // UA2005 gl
3937 CHECK_IU_FEATURE(dc
, GL
);
3938 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3939 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3940 offsetof(CPUSPARCState
, gl
));
3942 case 26: // UA2005 strand status
3943 CHECK_IU_FEATURE(dc
, HYPV
);
3944 if (!hypervisor(dc
))
3946 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3952 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3953 if (dc
->def
->nwindows
!= 32)
3954 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3955 (1 << dc
->def
->nwindows
) - 1);
3956 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3960 case 0x33: /* wrtbr, UA2005 wrhpr */
3962 #ifndef TARGET_SPARC64
3963 if (!supervisor(dc
))
3965 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3967 CHECK_IU_FEATURE(dc
, HYPV
);
3968 if (!hypervisor(dc
))
3970 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3973 // XXX gen_op_wrhpstate();
3974 save_state(dc
, cpu_cond
);
3980 // XXX gen_op_wrhtstate();
3983 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3986 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3988 case 31: // hstick_cmpr
3992 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3993 r_tickptr
= tcg_temp_new_ptr();
3994 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3995 offsetof(CPUState
, hstick
));
3996 gen_helper_tick_set_limit(r_tickptr
,
3998 tcg_temp_free_ptr(r_tickptr
);
4001 case 6: // hver readonly
4009 #ifdef TARGET_SPARC64
4010 case 0x2c: /* V9 movcc */
4012 int cc
= GET_FIELD_SP(insn
, 11, 12);
4013 int cond
= GET_FIELD_SP(insn
, 14, 17);
4017 r_cond
= tcg_temp_new();
4018 if (insn
& (1 << 18)) {
4020 gen_cond(r_cond
, 0, cond
, dc
);
4022 gen_cond(r_cond
, 1, cond
, dc
);
4026 gen_fcond(r_cond
, cc
, cond
);
4029 l1
= gen_new_label();
4031 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
4032 if (IS_IMM
) { /* immediate */
4035 simm
= GET_FIELD_SPs(insn
, 0, 10);
4036 r_const
= tcg_const_tl(simm
);
4037 gen_movl_TN_reg(rd
, r_const
);
4038 tcg_temp_free(r_const
);
4040 rs2
= GET_FIELD_SP(insn
, 0, 4);
4041 gen_movl_reg_TN(rs2
, cpu_tmp0
);
4042 gen_movl_TN_reg(rd
, cpu_tmp0
);
4045 tcg_temp_free(r_cond
);
4048 case 0x2d: /* V9 sdivx */
4049 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
4050 gen_movl_TN_reg(rd
, cpu_dst
);
4052 case 0x2e: /* V9 popc */
4054 cpu_src2
= get_src2(insn
, cpu_src2
);
4055 gen_helper_popc(cpu_dst
, cpu_src2
);
4056 gen_movl_TN_reg(rd
, cpu_dst
);
4058 case 0x2f: /* V9 movr */
4060 int cond
= GET_FIELD_SP(insn
, 10, 12);
4063 cpu_src1
= get_src1(insn
, cpu_src1
);
4065 l1
= gen_new_label();
4067 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
4069 if (IS_IMM
) { /* immediate */
4072 simm
= GET_FIELD_SPs(insn
, 0, 9);
4073 r_const
= tcg_const_tl(simm
);
4074 gen_movl_TN_reg(rd
, r_const
);
4075 tcg_temp_free(r_const
);
4077 rs2
= GET_FIELD_SP(insn
, 0, 4);
4078 gen_movl_reg_TN(rs2
, cpu_tmp0
);
4079 gen_movl_TN_reg(rd
, cpu_tmp0
);
4089 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4090 #ifdef TARGET_SPARC64
4091 int opf
= GET_FIELD_SP(insn
, 5, 13);
4092 rs1
= GET_FIELD(insn
, 13, 17);
4093 rs2
= GET_FIELD(insn
, 27, 31);
4094 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4098 case 0x000: /* VIS I edge8cc */
4099 CHECK_FPU_FEATURE(dc
, VIS1
);
4100 gen_movl_reg_TN(rs1
, cpu_src1
);
4101 gen_movl_reg_TN(rs2
, cpu_src2
);
4102 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 0);
4103 gen_movl_TN_reg(rd
, cpu_dst
);
4105 case 0x001: /* VIS II edge8n */
4106 CHECK_FPU_FEATURE(dc
, VIS2
);
4107 gen_movl_reg_TN(rs1
, cpu_src1
);
4108 gen_movl_reg_TN(rs2
, cpu_src2
);
4109 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 0);
4110 gen_movl_TN_reg(rd
, cpu_dst
);
4112 case 0x002: /* VIS I edge8lcc */
4113 CHECK_FPU_FEATURE(dc
, VIS1
);
4114 gen_movl_reg_TN(rs1
, cpu_src1
);
4115 gen_movl_reg_TN(rs2
, cpu_src2
);
4116 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 1);
4117 gen_movl_TN_reg(rd
, cpu_dst
);
4119 case 0x003: /* VIS II edge8ln */
4120 CHECK_FPU_FEATURE(dc
, VIS2
);
4121 gen_movl_reg_TN(rs1
, cpu_src1
);
4122 gen_movl_reg_TN(rs2
, cpu_src2
);
4123 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 1);
4124 gen_movl_TN_reg(rd
, cpu_dst
);
4126 case 0x004: /* VIS I edge16cc */
4127 CHECK_FPU_FEATURE(dc
, VIS1
);
4128 gen_movl_reg_TN(rs1
, cpu_src1
);
4129 gen_movl_reg_TN(rs2
, cpu_src2
);
4130 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 0);
4131 gen_movl_TN_reg(rd
, cpu_dst
);
4133 case 0x005: /* VIS II edge16n */
4134 CHECK_FPU_FEATURE(dc
, VIS2
);
4135 gen_movl_reg_TN(rs1
, cpu_src1
);
4136 gen_movl_reg_TN(rs2
, cpu_src2
);
4137 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 0);
4138 gen_movl_TN_reg(rd
, cpu_dst
);
4140 case 0x006: /* VIS I edge16lcc */
4141 CHECK_FPU_FEATURE(dc
, VIS1
);
4142 gen_movl_reg_TN(rs1
, cpu_src1
);
4143 gen_movl_reg_TN(rs2
, cpu_src2
);
4144 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 1);
4145 gen_movl_TN_reg(rd
, cpu_dst
);
4147 case 0x007: /* VIS II edge16ln */
4148 CHECK_FPU_FEATURE(dc
, VIS2
);
4149 gen_movl_reg_TN(rs1
, cpu_src1
);
4150 gen_movl_reg_TN(rs2
, cpu_src2
);
4151 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 1);
4152 gen_movl_TN_reg(rd
, cpu_dst
);
4154 case 0x008: /* VIS I edge32cc */
4155 CHECK_FPU_FEATURE(dc
, VIS1
);
4156 gen_movl_reg_TN(rs1
, cpu_src1
);
4157 gen_movl_reg_TN(rs2
, cpu_src2
);
4158 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 0);
4159 gen_movl_TN_reg(rd
, cpu_dst
);
4161 case 0x009: /* VIS II edge32n */
4162 CHECK_FPU_FEATURE(dc
, VIS2
);
4163 gen_movl_reg_TN(rs1
, cpu_src1
);
4164 gen_movl_reg_TN(rs2
, cpu_src2
);
4165 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 0);
4166 gen_movl_TN_reg(rd
, cpu_dst
);
4168 case 0x00a: /* VIS I edge32lcc */
4169 CHECK_FPU_FEATURE(dc
, VIS1
);
4170 gen_movl_reg_TN(rs1
, cpu_src1
);
4171 gen_movl_reg_TN(rs2
, cpu_src2
);
4172 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 1);
4173 gen_movl_TN_reg(rd
, cpu_dst
);
4175 case 0x00b: /* VIS II edge32ln */
4176 CHECK_FPU_FEATURE(dc
, VIS2
);
4177 gen_movl_reg_TN(rs1
, cpu_src1
);
4178 gen_movl_reg_TN(rs2
, cpu_src2
);
4179 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 1);
4180 gen_movl_TN_reg(rd
, cpu_dst
);
4182 case 0x010: /* VIS I array8 */
4183 CHECK_FPU_FEATURE(dc
, VIS1
);
4184 cpu_src1
= get_src1(insn
, cpu_src1
);
4185 gen_movl_reg_TN(rs2
, cpu_src2
);
4186 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4187 gen_movl_TN_reg(rd
, cpu_dst
);
4189 case 0x012: /* VIS I array16 */
4190 CHECK_FPU_FEATURE(dc
, VIS1
);
4191 cpu_src1
= get_src1(insn
, cpu_src1
);
4192 gen_movl_reg_TN(rs2
, cpu_src2
);
4193 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4194 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
4195 gen_movl_TN_reg(rd
, cpu_dst
);
4197 case 0x014: /* VIS I array32 */
4198 CHECK_FPU_FEATURE(dc
, VIS1
);
4199 cpu_src1
= get_src1(insn
, cpu_src1
);
4200 gen_movl_reg_TN(rs2
, cpu_src2
);
4201 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4202 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
4203 gen_movl_TN_reg(rd
, cpu_dst
);
4205 case 0x018: /* VIS I alignaddr */
4206 CHECK_FPU_FEATURE(dc
, VIS1
);
4207 cpu_src1
= get_src1(insn
, cpu_src1
);
4208 gen_movl_reg_TN(rs2
, cpu_src2
);
4209 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 0);
4210 gen_movl_TN_reg(rd
, cpu_dst
);
4212 case 0x01a: /* VIS I alignaddrl */
4213 CHECK_FPU_FEATURE(dc
, VIS1
);
4214 cpu_src1
= get_src1(insn
, cpu_src1
);
4215 gen_movl_reg_TN(rs2
, cpu_src2
);
4216 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 1);
4217 gen_movl_TN_reg(rd
, cpu_dst
);
4219 case 0x019: /* VIS II bmask */
4220 CHECK_FPU_FEATURE(dc
, VIS2
);
4221 cpu_src1
= get_src1(insn
, cpu_src1
);
4222 cpu_src2
= get_src1(insn
, cpu_src2
);
4223 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4224 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, cpu_dst
, 32, 32);
4225 gen_movl_TN_reg(rd
, cpu_dst
);
4227 case 0x020: /* VIS I fcmple16 */
4228 CHECK_FPU_FEATURE(dc
, VIS1
);
4229 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4230 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4231 gen_helper_fcmple16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4232 gen_movl_TN_reg(rd
, cpu_dst
);
4234 case 0x022: /* VIS I fcmpne16 */
4235 CHECK_FPU_FEATURE(dc
, VIS1
);
4236 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4237 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4238 gen_helper_fcmpne16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4239 gen_movl_TN_reg(rd
, cpu_dst
);
4241 case 0x024: /* VIS I fcmple32 */
4242 CHECK_FPU_FEATURE(dc
, VIS1
);
4243 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4244 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4245 gen_helper_fcmple32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4246 gen_movl_TN_reg(rd
, cpu_dst
);
4248 case 0x026: /* VIS I fcmpne32 */
4249 CHECK_FPU_FEATURE(dc
, VIS1
);
4250 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4251 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4252 gen_helper_fcmpne32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4253 gen_movl_TN_reg(rd
, cpu_dst
);
4255 case 0x028: /* VIS I fcmpgt16 */
4256 CHECK_FPU_FEATURE(dc
, VIS1
);
4257 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4258 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4259 gen_helper_fcmpgt16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4260 gen_movl_TN_reg(rd
, cpu_dst
);
4262 case 0x02a: /* VIS I fcmpeq16 */
4263 CHECK_FPU_FEATURE(dc
, VIS1
);
4264 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4265 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4266 gen_helper_fcmpeq16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4267 gen_movl_TN_reg(rd
, cpu_dst
);
4269 case 0x02c: /* VIS I fcmpgt32 */
4270 CHECK_FPU_FEATURE(dc
, VIS1
);
4271 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4272 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4273 gen_helper_fcmpgt32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4274 gen_movl_TN_reg(rd
, cpu_dst
);
4276 case 0x02e: /* VIS I fcmpeq32 */
4277 CHECK_FPU_FEATURE(dc
, VIS1
);
4278 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4279 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4280 gen_helper_fcmpeq32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4281 gen_movl_TN_reg(rd
, cpu_dst
);
4283 case 0x031: /* VIS I fmul8x16 */
4284 CHECK_FPU_FEATURE(dc
, VIS1
);
4285 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16
);
4287 case 0x033: /* VIS I fmul8x16au */
4288 CHECK_FPU_FEATURE(dc
, VIS1
);
4289 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16au
);
4291 case 0x035: /* VIS I fmul8x16al */
4292 CHECK_FPU_FEATURE(dc
, VIS1
);
4293 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16al
);
4295 case 0x036: /* VIS I fmul8sux16 */
4296 CHECK_FPU_FEATURE(dc
, VIS1
);
4297 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8sux16
);
4299 case 0x037: /* VIS I fmul8ulx16 */
4300 CHECK_FPU_FEATURE(dc
, VIS1
);
4301 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8ulx16
);
4303 case 0x038: /* VIS I fmuld8sux16 */
4304 CHECK_FPU_FEATURE(dc
, VIS1
);
4305 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8sux16
);
4307 case 0x039: /* VIS I fmuld8ulx16 */
4308 CHECK_FPU_FEATURE(dc
, VIS1
);
4309 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8ulx16
);
4311 case 0x03a: /* VIS I fpack32 */
4312 CHECK_FPU_FEATURE(dc
, VIS1
);
4313 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpack32
);
4315 case 0x03b: /* VIS I fpack16 */
4316 CHECK_FPU_FEATURE(dc
, VIS1
);
4317 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4318 cpu_dst_32
= gen_dest_fpr_F();
4319 gen_helper_fpack16(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4320 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4322 case 0x03d: /* VIS I fpackfix */
4323 CHECK_FPU_FEATURE(dc
, VIS1
);
4324 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4325 cpu_dst_32
= gen_dest_fpr_F();
4326 gen_helper_fpackfix(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4327 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4329 case 0x03e: /* VIS I pdist */
4330 CHECK_FPU_FEATURE(dc
, VIS1
);
4331 gen_ne_fop_DDDD(dc
, rd
, rs1
, rs2
, gen_helper_pdist
);
4333 case 0x048: /* VIS I faligndata */
4334 CHECK_FPU_FEATURE(dc
, VIS1
);
4335 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_faligndata
);
4337 case 0x04b: /* VIS I fpmerge */
4338 CHECK_FPU_FEATURE(dc
, VIS1
);
4339 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpmerge
);
4341 case 0x04c: /* VIS II bshuffle */
4342 CHECK_FPU_FEATURE(dc
, VIS2
);
4343 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_bshuffle
);
4345 case 0x04d: /* VIS I fexpand */
4346 CHECK_FPU_FEATURE(dc
, VIS1
);
4347 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fexpand
);
4349 case 0x050: /* VIS I fpadd16 */
4350 CHECK_FPU_FEATURE(dc
, VIS1
);
4351 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16
);
4353 case 0x051: /* VIS I fpadd16s */
4354 CHECK_FPU_FEATURE(dc
, VIS1
);
4355 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16s
);
4357 case 0x052: /* VIS I fpadd32 */
4358 CHECK_FPU_FEATURE(dc
, VIS1
);
4359 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd32
);
4361 case 0x053: /* VIS I fpadd32s */
4362 CHECK_FPU_FEATURE(dc
, VIS1
);
4363 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_add_i32
);
4365 case 0x054: /* VIS I fpsub16 */
4366 CHECK_FPU_FEATURE(dc
, VIS1
);
4367 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16
);
4369 case 0x055: /* VIS I fpsub16s */
4370 CHECK_FPU_FEATURE(dc
, VIS1
);
4371 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16s
);
4373 case 0x056: /* VIS I fpsub32 */
4374 CHECK_FPU_FEATURE(dc
, VIS1
);
4375 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub32
);
4377 case 0x057: /* VIS I fpsub32s */
4378 CHECK_FPU_FEATURE(dc
, VIS1
);
4379 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_sub_i32
);
4381 case 0x060: /* VIS I fzero */
4382 CHECK_FPU_FEATURE(dc
, VIS1
);
4383 cpu_dst_64
= gen_dest_fpr_D();
4384 tcg_gen_movi_i64(cpu_dst_64
, 0);
4385 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4387 case 0x061: /* VIS I fzeros */
4388 CHECK_FPU_FEATURE(dc
, VIS1
);
4389 cpu_dst_32
= gen_dest_fpr_F();
4390 tcg_gen_movi_i32(cpu_dst_32
, 0);
4391 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4393 case 0x062: /* VIS I fnor */
4394 CHECK_FPU_FEATURE(dc
, VIS1
);
4395 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i64
);
4397 case 0x063: /* VIS I fnors */
4398 CHECK_FPU_FEATURE(dc
, VIS1
);
4399 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i32
);
4401 case 0x064: /* VIS I fandnot2 */
4402 CHECK_FPU_FEATURE(dc
, VIS1
);
4403 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i64
);
4405 case 0x065: /* VIS I fandnot2s */
4406 CHECK_FPU_FEATURE(dc
, VIS1
);
4407 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i32
);
4409 case 0x066: /* VIS I fnot2 */
4410 CHECK_FPU_FEATURE(dc
, VIS1
);
4411 gen_ne_fop_DD(dc
, rd
, rs2
, tcg_gen_not_i64
);
4413 case 0x067: /* VIS I fnot2s */
4414 CHECK_FPU_FEATURE(dc
, VIS1
);
4415 gen_ne_fop_FF(dc
, rd
, rs2
, tcg_gen_not_i32
);
4417 case 0x068: /* VIS I fandnot1 */
4418 CHECK_FPU_FEATURE(dc
, VIS1
);
4419 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i64
);
4421 case 0x069: /* VIS I fandnot1s */
4422 CHECK_FPU_FEATURE(dc
, VIS1
);
4423 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i32
);
4425 case 0x06a: /* VIS I fnot1 */
4426 CHECK_FPU_FEATURE(dc
, VIS1
);
4427 gen_ne_fop_DD(dc
, rd
, rs1
, tcg_gen_not_i64
);
4429 case 0x06b: /* VIS I fnot1s */
4430 CHECK_FPU_FEATURE(dc
, VIS1
);
4431 gen_ne_fop_FF(dc
, rd
, rs1
, tcg_gen_not_i32
);
4433 case 0x06c: /* VIS I fxor */
4434 CHECK_FPU_FEATURE(dc
, VIS1
);
4435 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i64
);
4437 case 0x06d: /* VIS I fxors */
4438 CHECK_FPU_FEATURE(dc
, VIS1
);
4439 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i32
);
4441 case 0x06e: /* VIS I fnand */
4442 CHECK_FPU_FEATURE(dc
, VIS1
);
4443 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i64
);
4445 case 0x06f: /* VIS I fnands */
4446 CHECK_FPU_FEATURE(dc
, VIS1
);
4447 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i32
);
4449 case 0x070: /* VIS I fand */
4450 CHECK_FPU_FEATURE(dc
, VIS1
);
4451 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_and_i64
);
4453 case 0x071: /* VIS I fands */
4454 CHECK_FPU_FEATURE(dc
, VIS1
);
4455 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_and_i32
);
4457 case 0x072: /* VIS I fxnor */
4458 CHECK_FPU_FEATURE(dc
, VIS1
);
4459 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i64
);
4461 case 0x073: /* VIS I fxnors */
4462 CHECK_FPU_FEATURE(dc
, VIS1
);
4463 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i32
);
4465 case 0x074: /* VIS I fsrc1 */
4466 CHECK_FPU_FEATURE(dc
, VIS1
);
4467 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4468 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4470 case 0x075: /* VIS I fsrc1s */
4471 CHECK_FPU_FEATURE(dc
, VIS1
);
4472 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
4473 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4475 case 0x076: /* VIS I fornot2 */
4476 CHECK_FPU_FEATURE(dc
, VIS1
);
4477 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i64
);
4479 case 0x077: /* VIS I fornot2s */
4480 CHECK_FPU_FEATURE(dc
, VIS1
);
4481 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i32
);
4483 case 0x078: /* VIS I fsrc2 */
4484 CHECK_FPU_FEATURE(dc
, VIS1
);
4485 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4486 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4488 case 0x079: /* VIS I fsrc2s */
4489 CHECK_FPU_FEATURE(dc
, VIS1
);
4490 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
4491 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4493 case 0x07a: /* VIS I fornot1 */
4494 CHECK_FPU_FEATURE(dc
, VIS1
);
4495 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i64
);
4497 case 0x07b: /* VIS I fornot1s */
4498 CHECK_FPU_FEATURE(dc
, VIS1
);
4499 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i32
);
4501 case 0x07c: /* VIS I for */
4502 CHECK_FPU_FEATURE(dc
, VIS1
);
4503 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_or_i64
);
4505 case 0x07d: /* VIS I fors */
4506 CHECK_FPU_FEATURE(dc
, VIS1
);
4507 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_or_i32
);
4509 case 0x07e: /* VIS I fone */
4510 CHECK_FPU_FEATURE(dc
, VIS1
);
4511 cpu_dst_64
= gen_dest_fpr_D();
4512 tcg_gen_movi_i64(cpu_dst_64
, -1);
4513 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4515 case 0x07f: /* VIS I fones */
4516 CHECK_FPU_FEATURE(dc
, VIS1
);
4517 cpu_dst_32
= gen_dest_fpr_F();
4518 tcg_gen_movi_i32(cpu_dst_32
, -1);
4519 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4521 case 0x080: /* VIS I shutdown */
4522 case 0x081: /* VIS II siam */
4531 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4532 #ifdef TARGET_SPARC64
4537 #ifdef TARGET_SPARC64
4538 } else if (xop
== 0x39) { /* V9 return */
4541 save_state(dc
, cpu_cond
);
4542 cpu_src1
= get_src1(insn
, cpu_src1
);
4543 if (IS_IMM
) { /* immediate */
4544 simm
= GET_FIELDs(insn
, 19, 31);
4545 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4546 } else { /* register */
4547 rs2
= GET_FIELD(insn
, 27, 31);
4549 gen_movl_reg_TN(rs2
, cpu_src2
);
4550 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4552 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4554 gen_helper_restore(cpu_env
);
4555 gen_mov_pc_npc(dc
, cpu_cond
);
4556 r_const
= tcg_const_i32(3);
4557 gen_helper_check_align(cpu_dst
, r_const
);
4558 tcg_temp_free_i32(r_const
);
4559 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4560 dc
->npc
= DYNAMIC_PC
;
4564 cpu_src1
= get_src1(insn
, cpu_src1
);
4565 if (IS_IMM
) { /* immediate */
4566 simm
= GET_FIELDs(insn
, 19, 31);
4567 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4568 } else { /* register */
4569 rs2
= GET_FIELD(insn
, 27, 31);
4571 gen_movl_reg_TN(rs2
, cpu_src2
);
4572 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4574 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4577 case 0x38: /* jmpl */
4582 r_pc
= tcg_const_tl(dc
->pc
);
4583 gen_movl_TN_reg(rd
, r_pc
);
4584 tcg_temp_free(r_pc
);
4585 gen_mov_pc_npc(dc
, cpu_cond
);
4586 r_const
= tcg_const_i32(3);
4587 gen_helper_check_align(cpu_dst
, r_const
);
4588 tcg_temp_free_i32(r_const
);
4589 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4590 dc
->npc
= DYNAMIC_PC
;
4593 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4594 case 0x39: /* rett, V9 return */
4598 if (!supervisor(dc
))
4600 gen_mov_pc_npc(dc
, cpu_cond
);
4601 r_const
= tcg_const_i32(3);
4602 gen_helper_check_align(cpu_dst
, r_const
);
4603 tcg_temp_free_i32(r_const
);
4604 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4605 dc
->npc
= DYNAMIC_PC
;
4606 gen_helper_rett(cpu_env
);
4610 case 0x3b: /* flush */
4611 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4615 case 0x3c: /* save */
4616 save_state(dc
, cpu_cond
);
4617 gen_helper_save(cpu_env
);
4618 gen_movl_TN_reg(rd
, cpu_dst
);
4620 case 0x3d: /* restore */
4621 save_state(dc
, cpu_cond
);
4622 gen_helper_restore(cpu_env
);
4623 gen_movl_TN_reg(rd
, cpu_dst
);
4625 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4626 case 0x3e: /* V9 done/retry */
4630 if (!supervisor(dc
))
4632 dc
->npc
= DYNAMIC_PC
;
4633 dc
->pc
= DYNAMIC_PC
;
4634 gen_helper_done(cpu_env
);
4637 if (!supervisor(dc
))
4639 dc
->npc
= DYNAMIC_PC
;
4640 dc
->pc
= DYNAMIC_PC
;
4641 gen_helper_retry(cpu_env
);
4656 case 3: /* load/store instructions */
4658 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4660 /* flush pending conditional evaluations before exposing
4662 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4663 dc
->cc_op
= CC_OP_FLAGS
;
4664 gen_helper_compute_psr(cpu_env
);
4666 cpu_src1
= get_src1(insn
, cpu_src1
);
4667 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4668 rs2
= GET_FIELD(insn
, 27, 31);
4669 gen_movl_reg_TN(rs2
, cpu_src2
);
4670 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4671 } else if (IS_IMM
) { /* immediate */
4672 simm
= GET_FIELDs(insn
, 19, 31);
4673 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4674 } else { /* register */
4675 rs2
= GET_FIELD(insn
, 27, 31);
4677 gen_movl_reg_TN(rs2
, cpu_src2
);
4678 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4680 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4682 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4683 (xop
> 0x17 && xop
<= 0x1d ) ||
4684 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4686 case 0x0: /* ld, V9 lduw, load unsigned word */
4687 gen_address_mask(dc
, cpu_addr
);
4688 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4690 case 0x1: /* ldub, load unsigned byte */
4691 gen_address_mask(dc
, cpu_addr
);
4692 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4694 case 0x2: /* lduh, load unsigned halfword */
4695 gen_address_mask(dc
, cpu_addr
);
4696 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4698 case 0x3: /* ldd, load double word */
4704 save_state(dc
, cpu_cond
);
4705 r_const
= tcg_const_i32(7);
4706 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4707 tcg_temp_free_i32(r_const
);
4708 gen_address_mask(dc
, cpu_addr
);
4709 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4710 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4711 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4712 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4713 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4714 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4715 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4718 case 0x9: /* ldsb, load signed byte */
4719 gen_address_mask(dc
, cpu_addr
);
4720 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4722 case 0xa: /* ldsh, load signed halfword */
4723 gen_address_mask(dc
, cpu_addr
);
4724 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4726 case 0xd: /* ldstub -- XXX: should be atomically */
4730 gen_address_mask(dc
, cpu_addr
);
4731 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4732 r_const
= tcg_const_tl(0xff);
4733 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4734 tcg_temp_free(r_const
);
4737 case 0x0f: /* swap, swap register with memory. Also
4739 CHECK_IU_FEATURE(dc
, SWAP
);
4740 gen_movl_reg_TN(rd
, cpu_val
);
4741 gen_address_mask(dc
, cpu_addr
);
4742 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4743 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4744 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4746 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4747 case 0x10: /* lda, V9 lduwa, load word alternate */
4748 #ifndef TARGET_SPARC64
4751 if (!supervisor(dc
))
4754 save_state(dc
, cpu_cond
);
4755 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4757 case 0x11: /* lduba, load unsigned byte alternate */
4758 #ifndef TARGET_SPARC64
4761 if (!supervisor(dc
))
4764 save_state(dc
, cpu_cond
);
4765 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4767 case 0x12: /* lduha, load unsigned halfword alternate */
4768 #ifndef TARGET_SPARC64
4771 if (!supervisor(dc
))
4774 save_state(dc
, cpu_cond
);
4775 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4777 case 0x13: /* ldda, load double word alternate */
4778 #ifndef TARGET_SPARC64
4781 if (!supervisor(dc
))
4786 save_state(dc
, cpu_cond
);
4787 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4789 case 0x19: /* ldsba, load signed byte alternate */
4790 #ifndef TARGET_SPARC64
4793 if (!supervisor(dc
))
4796 save_state(dc
, cpu_cond
);
4797 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4799 case 0x1a: /* ldsha, load signed halfword alternate */
4800 #ifndef TARGET_SPARC64
4803 if (!supervisor(dc
))
4806 save_state(dc
, cpu_cond
);
4807 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4809 case 0x1d: /* ldstuba -- XXX: should be atomically */
4810 #ifndef TARGET_SPARC64
4813 if (!supervisor(dc
))
4816 save_state(dc
, cpu_cond
);
4817 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4819 case 0x1f: /* swapa, swap reg with alt. memory. Also
4821 CHECK_IU_FEATURE(dc
, SWAP
);
4822 #ifndef TARGET_SPARC64
4825 if (!supervisor(dc
))
4828 save_state(dc
, cpu_cond
);
4829 gen_movl_reg_TN(rd
, cpu_val
);
4830 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4833 #ifndef TARGET_SPARC64
4834 case 0x30: /* ldc */
4835 case 0x31: /* ldcsr */
4836 case 0x33: /* lddc */
4840 #ifdef TARGET_SPARC64
4841 case 0x08: /* V9 ldsw */
4842 gen_address_mask(dc
, cpu_addr
);
4843 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4845 case 0x0b: /* V9 ldx */
4846 gen_address_mask(dc
, cpu_addr
);
4847 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4849 case 0x18: /* V9 ldswa */
4850 save_state(dc
, cpu_cond
);
4851 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4853 case 0x1b: /* V9 ldxa */
4854 save_state(dc
, cpu_cond
);
4855 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4857 case 0x2d: /* V9 prefetch, no effect */
4859 case 0x30: /* V9 ldfa */
4860 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4863 save_state(dc
, cpu_cond
);
4864 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4865 gen_update_fprs_dirty(rd
);
4867 case 0x33: /* V9 lddfa */
4868 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4871 save_state(dc
, cpu_cond
);
4872 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4873 gen_update_fprs_dirty(DFPREG(rd
));
4875 case 0x3d: /* V9 prefetcha, no effect */
4877 case 0x32: /* V9 ldqfa */
4878 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4879 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4882 save_state(dc
, cpu_cond
);
4883 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4884 gen_update_fprs_dirty(QFPREG(rd
));
4890 gen_movl_TN_reg(rd
, cpu_val
);
4891 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4894 } else if (xop
>= 0x20 && xop
< 0x24) {
4895 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4897 save_state(dc
, cpu_cond
);
4899 case 0x20: /* ldf, load fpreg */
4900 gen_address_mask(dc
, cpu_addr
);
4901 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4902 cpu_dst_32
= gen_dest_fpr_F();
4903 tcg_gen_trunc_tl_i32(cpu_dst_32
, cpu_tmp0
);
4904 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4906 case 0x21: /* ldfsr, V9 ldxfsr */
4907 #ifdef TARGET_SPARC64
4908 gen_address_mask(dc
, cpu_addr
);
4910 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4911 gen_helper_ldxfsr(cpu_env
, cpu_tmp64
);
4913 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4914 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4915 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4919 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4920 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4924 case 0x22: /* ldqf, load quad fpreg */
4928 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4929 r_const
= tcg_const_i32(dc
->mem_idx
);
4930 gen_address_mask(dc
, cpu_addr
);
4931 gen_helper_ldqf(cpu_addr
, r_const
);
4932 tcg_temp_free_i32(r_const
);
4933 gen_op_store_QT0_fpr(QFPREG(rd
));
4934 gen_update_fprs_dirty(QFPREG(rd
));
4937 case 0x23: /* lddf, load double fpreg */
4938 gen_address_mask(dc
, cpu_addr
);
4939 cpu_dst_64
= gen_dest_fpr_D();
4940 tcg_gen_qemu_ld64(cpu_dst_64
, cpu_addr
, dc
->mem_idx
);
4941 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4946 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4947 xop
== 0xe || xop
== 0x1e) {
4948 gen_movl_reg_TN(rd
, cpu_val
);
4950 case 0x4: /* st, store word */
4951 gen_address_mask(dc
, cpu_addr
);
4952 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4954 case 0x5: /* stb, store byte */
4955 gen_address_mask(dc
, cpu_addr
);
4956 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4958 case 0x6: /* sth, store halfword */
4959 gen_address_mask(dc
, cpu_addr
);
4960 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4962 case 0x7: /* std, store double word */
4968 save_state(dc
, cpu_cond
);
4969 gen_address_mask(dc
, cpu_addr
);
4970 r_const
= tcg_const_i32(7);
4971 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4972 tcg_temp_free_i32(r_const
);
4973 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4974 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4975 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4978 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4979 case 0x14: /* sta, V9 stwa, store word alternate */
4980 #ifndef TARGET_SPARC64
4983 if (!supervisor(dc
))
4986 save_state(dc
, cpu_cond
);
4987 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4988 dc
->npc
= DYNAMIC_PC
;
4990 case 0x15: /* stba, store byte alternate */
4991 #ifndef TARGET_SPARC64
4994 if (!supervisor(dc
))
4997 save_state(dc
, cpu_cond
);
4998 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4999 dc
->npc
= DYNAMIC_PC
;
5001 case 0x16: /* stha, store halfword alternate */
5002 #ifndef TARGET_SPARC64
5005 if (!supervisor(dc
))
5008 save_state(dc
, cpu_cond
);
5009 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
5010 dc
->npc
= DYNAMIC_PC
;
5012 case 0x17: /* stda, store double word alternate */
5013 #ifndef TARGET_SPARC64
5016 if (!supervisor(dc
))
5022 save_state(dc
, cpu_cond
);
5023 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
5027 #ifdef TARGET_SPARC64
5028 case 0x0e: /* V9 stx */
5029 gen_address_mask(dc
, cpu_addr
);
5030 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
5032 case 0x1e: /* V9 stxa */
5033 save_state(dc
, cpu_cond
);
5034 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
5035 dc
->npc
= DYNAMIC_PC
;
5041 } else if (xop
> 0x23 && xop
< 0x28) {
5042 if (gen_trap_ifnofpu(dc
, cpu_cond
))
5044 save_state(dc
, cpu_cond
);
5046 case 0x24: /* stf, store fpreg */
5047 gen_address_mask(dc
, cpu_addr
);
5048 cpu_src1_32
= gen_load_fpr_F(dc
, rd
);
5049 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_src1_32
);
5050 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
5052 case 0x25: /* stfsr, V9 stxfsr */
5053 #ifdef TARGET_SPARC64
5054 gen_address_mask(dc
, cpu_addr
);
5055 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
5057 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5059 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5061 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
5062 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
5066 #ifdef TARGET_SPARC64
5067 /* V9 stqf, store quad fpreg */
5071 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5072 gen_op_load_fpr_QT0(QFPREG(rd
));
5073 r_const
= tcg_const_i32(dc
->mem_idx
);
5074 gen_address_mask(dc
, cpu_addr
);
5075 gen_helper_stqf(cpu_addr
, r_const
);
5076 tcg_temp_free_i32(r_const
);
5079 #else /* !TARGET_SPARC64 */
5080 /* stdfq, store floating point queue */
5081 #if defined(CONFIG_USER_ONLY)
5084 if (!supervisor(dc
))
5086 if (gen_trap_ifnofpu(dc
, cpu_cond
))
5091 case 0x27: /* stdf, store double fpreg */
5092 gen_address_mask(dc
, cpu_addr
);
5093 cpu_src1_64
= gen_load_fpr_D(dc
, rd
);
5094 tcg_gen_qemu_st64(cpu_src1_64
, cpu_addr
, dc
->mem_idx
);
5099 } else if (xop
> 0x33 && xop
< 0x3f) {
5100 save_state(dc
, cpu_cond
);
5102 #ifdef TARGET_SPARC64
5103 case 0x34: /* V9 stfa */
5104 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
5107 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
5109 case 0x36: /* V9 stqfa */
5113 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5114 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
5117 r_const
= tcg_const_i32(7);
5118 gen_helper_check_align(cpu_addr
, r_const
);
5119 tcg_temp_free_i32(r_const
);
5120 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
5123 case 0x37: /* V9 stdfa */
5124 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
5127 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
5129 case 0x3c: /* V9 casa */
5130 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
5131 gen_movl_TN_reg(rd
, cpu_val
);
5133 case 0x3e: /* V9 casxa */
5134 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
5135 gen_movl_TN_reg(rd
, cpu_val
);
5138 case 0x34: /* stc */
5139 case 0x35: /* stcsr */
5140 case 0x36: /* stdcq */
5141 case 0x37: /* stdc */
5152 /* default case for non jump instructions */
5153 if (dc
->npc
== DYNAMIC_PC
) {
5154 dc
->pc
= DYNAMIC_PC
;
5156 } else if (dc
->npc
== JUMP_PC
) {
5157 /* we can do a static jump */
5158 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
5162 dc
->npc
= dc
->npc
+ 4;
5170 save_state(dc
, cpu_cond
);
5171 r_const
= tcg_const_i32(TT_ILL_INSN
);
5172 gen_helper_raise_exception(cpu_env
, r_const
);
5173 tcg_temp_free_i32(r_const
);
5181 save_state(dc
, cpu_cond
);
5182 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
5183 gen_helper_raise_exception(cpu_env
, r_const
);
5184 tcg_temp_free_i32(r_const
);
5188 #if !defined(CONFIG_USER_ONLY)
5193 save_state(dc
, cpu_cond
);
5194 r_const
= tcg_const_i32(TT_PRIV_INSN
);
5195 gen_helper_raise_exception(cpu_env
, r_const
);
5196 tcg_temp_free_i32(r_const
);
5202 save_state(dc
, cpu_cond
);
5203 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
5206 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5208 save_state(dc
, cpu_cond
);
5209 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
5213 #ifndef TARGET_SPARC64
5218 save_state(dc
, cpu_cond
);
5219 r_const
= tcg_const_i32(TT_NCP_INSN
);
5220 gen_helper_raise_exception(cpu_env
, r_const
);
5221 tcg_temp_free(r_const
);
5227 tcg_temp_free(cpu_tmp1
);
5228 tcg_temp_free(cpu_tmp2
);
5229 if (dc
->n_t32
!= 0) {
5231 for (i
= dc
->n_t32
- 1; i
>= 0; --i
) {
5232 tcg_temp_free_i32(dc
->t32
[i
]);
5238 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
5239 int spc
, CPUSPARCState
*env
)
5241 target_ulong pc_start
, last_pc
;
5242 uint16_t *gen_opc_end
;
5243 DisasContext dc1
, *dc
= &dc1
;
5249 memset(dc
, 0, sizeof(DisasContext
));
5254 dc
->npc
= (target_ulong
) tb
->cs_base
;
5255 dc
->cc_op
= CC_OP_DYNAMIC
;
5256 dc
->mem_idx
= cpu_mmu_index(env
);
5258 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
5259 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
5260 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
5261 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5263 cpu_tmp0
= tcg_temp_new();
5264 cpu_tmp32
= tcg_temp_new_i32();
5265 cpu_tmp64
= tcg_temp_new_i64();
5267 cpu_dst
= tcg_temp_local_new();
5270 cpu_val
= tcg_temp_local_new();
5271 cpu_addr
= tcg_temp_local_new();
5274 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5276 max_insns
= CF_COUNT_MASK
;
5279 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5280 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5281 if (bp
->pc
== dc
->pc
) {
5282 if (dc
->pc
!= pc_start
)
5283 save_state(dc
, cpu_cond
);
5284 gen_helper_debug(cpu_env
);
5292 qemu_log("Search PC...\n");
5293 j
= gen_opc_ptr
- gen_opc_buf
;
5297 gen_opc_instr_start
[lj
++] = 0;
5298 gen_opc_pc
[lj
] = dc
->pc
;
5299 gen_opc_npc
[lj
] = dc
->npc
;
5300 gen_opc_instr_start
[lj
] = 1;
5301 gen_opc_icount
[lj
] = num_insns
;
5304 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
5307 disas_sparc_insn(dc
);
5312 /* if the next PC is different, we abort now */
5313 if (dc
->pc
!= (last_pc
+ 4))
5315 /* if we reach a page boundary, we stop generation so that the
5316 PC of a TT_TFAULT exception is always in the right page */
5317 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
5319 /* if single step mode, we generate only one instruction and
5320 generate an exception */
5321 if (dc
->singlestep
) {
5324 } while ((gen_opc_ptr
< gen_opc_end
) &&
5325 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5326 num_insns
< max_insns
);
5329 tcg_temp_free(cpu_addr
);
5330 tcg_temp_free(cpu_val
);
5331 tcg_temp_free(cpu_dst
);
5332 tcg_temp_free_i64(cpu_tmp64
);
5333 tcg_temp_free_i32(cpu_tmp32
);
5334 tcg_temp_free(cpu_tmp0
);
5336 if (tb
->cflags
& CF_LAST_IO
)
5339 if (dc
->pc
!= DYNAMIC_PC
&&
5340 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5341 /* static PC and NPC: we can use direct chaining */
5342 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5344 if (dc
->pc
!= DYNAMIC_PC
)
5345 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5346 save_npc(dc
, cpu_cond
);
5350 gen_icount_end(tb
, num_insns
);
5351 *gen_opc_ptr
= INDEX_op_end
;
5353 j
= gen_opc_ptr
- gen_opc_buf
;
5356 gen_opc_instr_start
[lj
++] = 0;
5360 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5361 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5363 tb
->size
= last_pc
+ 4 - pc_start
;
5364 tb
->icount
= num_insns
;
5367 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5368 qemu_log("--------------\n");
5369 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5370 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5376 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5378 gen_intermediate_code_internal(tb
, 0, env
);
5381 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5383 gen_intermediate_code_internal(tb
, 1, env
);
5386 void gen_intermediate_code_init(CPUSPARCState
*env
)
5390 static const char * const gregnames
[8] = {
5391 NULL
, // g0 not used
5400 static const char * const fregnames
[32] = {
5401 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5402 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5403 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5404 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5407 /* init various static tables */
5411 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5412 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5413 offsetof(CPUState
, regwptr
),
5415 #ifdef TARGET_SPARC64
5416 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5418 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5420 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5422 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5424 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5425 offsetof(CPUState
, tick_cmpr
),
5427 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5428 offsetof(CPUState
, stick_cmpr
),
5430 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5431 offsetof(CPUState
, hstick_cmpr
),
5433 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5435 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5437 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5439 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5440 offsetof(CPUState
, ssr
), "ssr");
5441 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5442 offsetof(CPUState
, version
), "ver");
5443 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5444 offsetof(CPUState
, softint
),
5447 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5450 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5452 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5454 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5455 offsetof(CPUState
, cc_src2
),
5457 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5459 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5461 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5463 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5465 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5467 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5469 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5470 #ifndef CONFIG_USER_ONLY
5471 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5474 for (i
= 1; i
< 8; i
++) {
5475 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5476 offsetof(CPUState
, gregs
[i
]),
5479 for (i
= 0; i
< TARGET_DPREGS
; i
++) {
5480 cpu_fpr
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
5481 offsetof(CPUState
, fpr
[i
]),
5485 /* register helpers */
5487 #define GEN_HELPER 2
5492 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5495 env
->pc
= gen_opc_pc
[pc_pos
];
5496 npc
= gen_opc_npc
[pc_pos
];
5498 /* dynamic NPC: already stored */
5499 } else if (npc
== 2) {
5500 /* jump PC: use 'cond' and the jump targets of the translation */
5502 env
->npc
= gen_opc_jump_pc
[0];
5504 env
->npc
= gen_opc_jump_pc
[1];
5510 /* flush pending conditional evaluations before exposing cpu state */
5511 if (CC_OP
!= CC_OP_FLAGS
) {
5512 helper_compute_psr(env
);