4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
83 uint32_t cc_op
; /* current CC operation */
84 struct TranslationBlock
*tb
;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x
, int len
)
113 return (x
<< len
) >> len
;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
122 offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
132 offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
138 offsetof(CPU_DoubleU
, l
.upper
));
139 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
140 offsetof(CPU_DoubleU
, l
.lower
));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.upmost
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.upper
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.lower
));
151 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
152 offsetof(CPU_QuadU
, l
.lowest
));
155 static void gen_op_load_fpr_QT1(unsigned int src
)
157 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.upmost
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.upper
));
161 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
162 offsetof(CPU_QuadU
, l
.lower
));
163 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
164 offsetof(CPU_QuadU
, l
.lowest
));
167 static void gen_op_store_QT0_fpr(unsigned int dst
)
169 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
174 offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
176 offsetof(CPU_QuadU
, l
.lowest
));
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
193 #ifdef TARGET_SPARC64
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #define AM_CHECK(dc) (1)
201 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
203 #ifdef TARGET_SPARC64
205 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
209 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
212 tcg_gen_movi_tl(tn
, 0);
214 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
216 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
220 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
225 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
227 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
232 target_ulong pc
, target_ulong npc
)
234 TranslationBlock
*tb
;
237 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
238 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((long)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
288 l1
= gen_new_label();
290 r_temp
= tcg_temp_new();
291 tcg_gen_xor_tl(r_temp
, src1
, src2
);
292 tcg_gen_not_tl(r_temp
, r_temp
);
293 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
294 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
295 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
297 r_const
= tcg_const_i32(TT_TOVF
);
298 gen_helper_raise_exception(r_const
);
299 tcg_temp_free_i32(r_const
);
301 tcg_temp_free(r_temp
);
304 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
309 l1
= gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
311 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
313 r_const
= tcg_const_i32(TT_TOVF
);
314 gen_helper_raise_exception(r_const
);
315 tcg_temp_free_i32(r_const
);
319 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
321 tcg_gen_mov_tl(cpu_cc_src
, src1
);
322 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
323 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
324 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
327 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
329 tcg_gen_mov_tl(cpu_cc_src
, src1
);
330 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
331 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
332 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
335 static TCGv_i32
gen_add32_carry32(void)
337 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32
= tcg_temp_new_i32();
342 cc_src2_32
= tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
344 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
346 cc_src1_32
= cpu_cc_dst
;
347 cc_src2_32
= cpu_cc_src
;
350 carry_32
= tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32
);
355 tcg_temp_free_i32(cc_src2_32
);
361 static TCGv_i32
gen_sub32_carry32(void)
363 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32
= tcg_temp_new_i32();
368 cc_src2_32
= tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
370 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
372 cc_src1_32
= cpu_cc_src
;
373 cc_src2_32
= cpu_cc_src2
;
376 carry_32
= tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32
);
381 tcg_temp_free_i32(cc_src2_32
);
387 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
388 TCGv src2
, int update_cc
)
396 /* Carry is known to be zero. Fall back to plain ADD. */
398 gen_op_add_cc(dst
, src1
, src2
);
400 tcg_gen_add_tl(dst
, src1
, src2
);
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low
= tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
415 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
416 tcg_temp_free(dst_low
);
420 carry_32
= gen_add32_carry32();
426 carry_32
= gen_sub32_carry32();
430 /* We need external help to produce the carry. */
431 carry_32
= tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32
);
436 #if TARGET_LONG_BITS == 64
437 carry
= tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry
, carry_32
);
443 tcg_gen_add_tl(dst
, src1
, src2
);
444 tcg_gen_add_tl(dst
, dst
, carry
);
446 tcg_temp_free_i32(carry_32
);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry
);
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
455 tcg_gen_mov_tl(cpu_cc_src
, src1
);
456 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
457 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
458 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
459 dc
->cc_op
= CC_OP_ADDX
;
463 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
465 tcg_gen_mov_tl(cpu_cc_src
, src1
);
466 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
467 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
468 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
471 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
473 tcg_gen_mov_tl(cpu_cc_src
, src1
);
474 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
475 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
476 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
487 l1
= gen_new_label();
489 r_temp
= tcg_temp_new();
490 tcg_gen_xor_tl(r_temp
, src1
, src2
);
491 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
492 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
493 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
495 r_const
= tcg_const_i32(TT_TOVF
);
496 gen_helper_raise_exception(r_const
);
497 tcg_temp_free_i32(r_const
);
499 tcg_temp_free(r_temp
);
502 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
504 tcg_gen_mov_tl(cpu_cc_src
, src1
);
505 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
509 dc
->cc_op
= CC_OP_LOGIC
;
511 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
512 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
513 dc
->cc_op
= CC_OP_SUB
;
515 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
518 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
520 tcg_gen_mov_tl(cpu_cc_src
, src1
);
521 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
522 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
523 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
526 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
527 TCGv src2
, int update_cc
)
535 /* Carry is known to be zero. Fall back to plain SUB. */
537 gen_op_sub_cc(dst
, src1
, src2
);
539 tcg_gen_sub_tl(dst
, src1
, src2
);
546 carry_32
= gen_add32_carry32();
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low
= tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
560 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
561 tcg_temp_free(dst_low
);
565 carry_32
= gen_sub32_carry32();
569 /* We need external help to produce the carry. */
570 carry_32
= tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32
);
575 #if TARGET_LONG_BITS == 64
576 carry
= tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry
, carry_32
);
582 tcg_gen_sub_tl(dst
, src1
, src2
);
583 tcg_gen_sub_tl(dst
, dst
, carry
);
585 tcg_temp_free_i32(carry_32
);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry
);
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
594 tcg_gen_mov_tl(cpu_cc_src
, src1
);
595 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
596 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
597 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
598 dc
->cc_op
= CC_OP_SUBX
;
602 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
604 tcg_gen_mov_tl(cpu_cc_src
, src1
);
605 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
606 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
607 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
610 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
612 tcg_gen_mov_tl(cpu_cc_src
, src1
);
613 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
614 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
615 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
620 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
625 l1
= gen_new_label();
626 r_temp
= tcg_temp_new();
632 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
633 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
636 tcg_gen_movi_tl(cpu_cc_src2
, 0);
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
642 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
643 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
644 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
646 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
649 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
650 gen_mov_reg_V(r_temp
, cpu_psr
);
651 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
652 tcg_temp_free(r_temp
);
654 // T0 = (b1 << 31) | (T0 >> 1);
656 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
657 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
658 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
660 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
662 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
665 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
667 TCGv_i32 r_src1
, r_src2
;
668 TCGv_i64 r_temp
, r_temp2
;
670 r_src1
= tcg_temp_new_i32();
671 r_src2
= tcg_temp_new_i32();
673 tcg_gen_trunc_tl_i32(r_src1
, src1
);
674 tcg_gen_trunc_tl_i32(r_src2
, src2
);
676 r_temp
= tcg_temp_new_i64();
677 r_temp2
= tcg_temp_new_i64();
680 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
681 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
683 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
684 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
687 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
689 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
690 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
691 tcg_temp_free_i64(r_temp
);
692 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
694 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
696 tcg_temp_free_i64(r_temp2
);
698 tcg_temp_free_i32(r_src1
);
699 tcg_temp_free_i32(r_src2
);
702 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
704 /* zero-extend truncated operands before multiplication */
705 gen_op_multiply(dst
, src1
, src2
, 0);
708 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
710 /* sign-extend truncated operands before multiplication */
711 gen_op_multiply(dst
, src1
, src2
, 1);
714 #ifdef TARGET_SPARC64
715 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
720 l1
= gen_new_label();
721 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
722 r_const
= tcg_const_i32(TT_DIV_ZERO
);
723 gen_helper_raise_exception(r_const
);
724 tcg_temp_free_i32(r_const
);
728 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
732 l1
= gen_new_label();
733 l2
= gen_new_label();
734 tcg_gen_mov_tl(cpu_cc_src
, src1
);
735 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
736 gen_trap_ifdivzero_tl(cpu_cc_src2
);
737 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
738 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
739 tcg_gen_movi_i64(dst
, INT64_MIN
);
742 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
748 static inline void gen_op_eval_ba(TCGv dst
)
750 tcg_gen_movi_tl(dst
, 1);
754 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
756 gen_mov_reg_Z(dst
, src
);
760 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
762 gen_mov_reg_N(cpu_tmp0
, src
);
763 gen_mov_reg_V(dst
, src
);
764 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
765 gen_mov_reg_Z(cpu_tmp0
, src
);
766 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
770 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
772 gen_mov_reg_V(cpu_tmp0
, src
);
773 gen_mov_reg_N(dst
, src
);
774 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
778 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
780 gen_mov_reg_Z(cpu_tmp0
, src
);
781 gen_mov_reg_C(dst
, src
);
782 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
786 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
788 gen_mov_reg_C(dst
, src
);
792 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
794 gen_mov_reg_V(dst
, src
);
798 static inline void gen_op_eval_bn(TCGv dst
)
800 tcg_gen_movi_tl(dst
, 0);
804 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
806 gen_mov_reg_N(dst
, src
);
810 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
812 gen_mov_reg_Z(dst
, src
);
813 tcg_gen_xori_tl(dst
, dst
, 0x1);
817 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
819 gen_mov_reg_N(cpu_tmp0
, src
);
820 gen_mov_reg_V(dst
, src
);
821 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
822 gen_mov_reg_Z(cpu_tmp0
, src
);
823 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
824 tcg_gen_xori_tl(dst
, dst
, 0x1);
828 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
830 gen_mov_reg_V(cpu_tmp0
, src
);
831 gen_mov_reg_N(dst
, src
);
832 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
833 tcg_gen_xori_tl(dst
, dst
, 0x1);
837 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
839 gen_mov_reg_Z(cpu_tmp0
, src
);
840 gen_mov_reg_C(dst
, src
);
841 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
842 tcg_gen_xori_tl(dst
, dst
, 0x1);
846 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
848 gen_mov_reg_C(dst
, src
);
849 tcg_gen_xori_tl(dst
, dst
, 0x1);
853 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
855 gen_mov_reg_N(dst
, src
);
856 tcg_gen_xori_tl(dst
, dst
, 0x1);
860 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
862 gen_mov_reg_V(dst
, src
);
863 tcg_gen_xori_tl(dst
, dst
, 0x1);
867 FPSR bit field FCC1 | FCC0:
873 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
874 unsigned int fcc_offset
)
876 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
877 tcg_gen_andi_tl(reg
, reg
, 0x1);
880 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
881 unsigned int fcc_offset
)
883 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
884 tcg_gen_andi_tl(reg
, reg
, 0x1);
888 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
889 unsigned int fcc_offset
)
891 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
892 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
893 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
896 // 1 or 2: FCC0 ^ FCC1
897 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
898 unsigned int fcc_offset
)
900 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
901 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
902 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
906 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
907 unsigned int fcc_offset
)
909 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
913 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
914 unsigned int fcc_offset
)
916 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
917 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
918 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
919 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
923 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
924 unsigned int fcc_offset
)
926 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
930 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
931 unsigned int fcc_offset
)
933 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
934 tcg_gen_xori_tl(dst
, dst
, 0x1);
935 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
936 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
940 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
941 unsigned int fcc_offset
)
943 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
944 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
945 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
949 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
950 unsigned int fcc_offset
)
952 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
953 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
954 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
955 tcg_gen_xori_tl(dst
, dst
, 0x1);
958 // 0 or 3: !(FCC0 ^ FCC1)
959 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
960 unsigned int fcc_offset
)
962 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
963 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
964 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
965 tcg_gen_xori_tl(dst
, dst
, 0x1);
969 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
970 unsigned int fcc_offset
)
972 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
973 tcg_gen_xori_tl(dst
, dst
, 0x1);
976 // !1: !(FCC0 & !FCC1)
977 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
978 unsigned int fcc_offset
)
980 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
981 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
982 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
983 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
984 tcg_gen_xori_tl(dst
, dst
, 0x1);
988 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
989 unsigned int fcc_offset
)
991 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
992 tcg_gen_xori_tl(dst
, dst
, 0x1);
995 // !2: !(!FCC0 & FCC1)
996 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
997 unsigned int fcc_offset
)
999 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1000 tcg_gen_xori_tl(dst
, dst
, 0x1);
1001 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1002 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1003 tcg_gen_xori_tl(dst
, dst
, 0x1);
1006 // !3: !(FCC0 & FCC1)
1007 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1008 unsigned int fcc_offset
)
1010 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1011 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1012 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1013 tcg_gen_xori_tl(dst
, dst
, 0x1);
1016 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1017 target_ulong pc2
, TCGv r_cond
)
1021 l1
= gen_new_label();
1023 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1025 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1028 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1031 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1032 target_ulong pc2
, TCGv r_cond
)
1036 l1
= gen_new_label();
1038 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1040 gen_goto_tb(dc
, 0, pc2
, pc1
);
1043 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1046 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1051 l1
= gen_new_label();
1052 l2
= gen_new_label();
1054 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1056 tcg_gen_movi_tl(cpu_npc
, npc1
);
1060 tcg_gen_movi_tl(cpu_npc
, npc2
);
1064 /* call this function before using the condition register as it may
1065 have been set for a jump */
1066 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1068 if (dc
->npc
== JUMP_PC
) {
1069 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1070 dc
->npc
= DYNAMIC_PC
;
1074 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1076 if (dc
->npc
== JUMP_PC
) {
1077 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1078 dc
->npc
= DYNAMIC_PC
;
1079 } else if (dc
->npc
!= DYNAMIC_PC
) {
1080 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1084 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1086 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1087 /* flush pending conditional evaluations before exposing cpu state */
1088 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1089 dc
->cc_op
= CC_OP_FLAGS
;
1090 gen_helper_compute_psr();
1095 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1097 if (dc
->npc
== JUMP_PC
) {
1098 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1099 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1100 dc
->pc
= DYNAMIC_PC
;
1101 } else if (dc
->npc
== DYNAMIC_PC
) {
1102 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1103 dc
->pc
= DYNAMIC_PC
;
1109 static inline void gen_op_next_insn(void)
1111 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1112 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1115 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1120 #ifdef TARGET_SPARC64
1128 switch (dc
->cc_op
) {
1132 gen_helper_compute_psr();
1133 dc
->cc_op
= CC_OP_FLAGS
;
1138 gen_op_eval_bn(r_dst
);
1141 gen_op_eval_be(r_dst
, r_src
);
1144 gen_op_eval_ble(r_dst
, r_src
);
1147 gen_op_eval_bl(r_dst
, r_src
);
1150 gen_op_eval_bleu(r_dst
, r_src
);
1153 gen_op_eval_bcs(r_dst
, r_src
);
1156 gen_op_eval_bneg(r_dst
, r_src
);
1159 gen_op_eval_bvs(r_dst
, r_src
);
1162 gen_op_eval_ba(r_dst
);
1165 gen_op_eval_bne(r_dst
, r_src
);
1168 gen_op_eval_bg(r_dst
, r_src
);
1171 gen_op_eval_bge(r_dst
, r_src
);
1174 gen_op_eval_bgu(r_dst
, r_src
);
1177 gen_op_eval_bcc(r_dst
, r_src
);
1180 gen_op_eval_bpos(r_dst
, r_src
);
1183 gen_op_eval_bvc(r_dst
, r_src
);
1188 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1190 unsigned int offset
;
1210 gen_op_eval_bn(r_dst
);
1213 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1216 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1219 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1222 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1225 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1228 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1231 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1234 gen_op_eval_ba(r_dst
);
1237 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1240 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1243 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1246 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1249 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1252 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1255 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1260 #ifdef TARGET_SPARC64
1262 static const int gen_tcg_cond_reg
[8] = {
1273 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1277 l1
= gen_new_label();
1278 tcg_gen_movi_tl(r_dst
, 0);
1279 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1280 tcg_gen_movi_tl(r_dst
, 1);
1285 /* XXX: potentially incorrect if dynamic npc */
1286 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1289 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1290 target_ulong target
= dc
->pc
+ offset
;
1293 /* unconditional not taken */
1295 dc
->pc
= dc
->npc
+ 4;
1296 dc
->npc
= dc
->pc
+ 4;
1299 dc
->npc
= dc
->pc
+ 4;
1301 } else if (cond
== 0x8) {
1302 /* unconditional taken */
1305 dc
->npc
= dc
->pc
+ 4;
1309 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1312 flush_cond(dc
, r_cond
);
1313 gen_cond(r_cond
, cc
, cond
, dc
);
1315 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1319 dc
->jump_pc
[0] = target
;
1320 dc
->jump_pc
[1] = dc
->npc
+ 4;
1326 /* XXX: potentially incorrect if dynamic npc */
1327 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1330 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1331 target_ulong target
= dc
->pc
+ offset
;
1334 /* unconditional not taken */
1336 dc
->pc
= dc
->npc
+ 4;
1337 dc
->npc
= dc
->pc
+ 4;
1340 dc
->npc
= dc
->pc
+ 4;
1342 } else if (cond
== 0x8) {
1343 /* unconditional taken */
1346 dc
->npc
= dc
->pc
+ 4;
1350 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1353 flush_cond(dc
, r_cond
);
1354 gen_fcond(r_cond
, cc
, cond
);
1356 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1360 dc
->jump_pc
[0] = target
;
1361 dc
->jump_pc
[1] = dc
->npc
+ 4;
1367 #ifdef TARGET_SPARC64
1368 /* XXX: potentially incorrect if dynamic npc */
1369 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1370 TCGv r_cond
, TCGv r_reg
)
1372 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1373 target_ulong target
= dc
->pc
+ offset
;
1375 flush_cond(dc
, r_cond
);
1376 gen_cond_reg(r_cond
, cond
, r_reg
);
1378 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1382 dc
->jump_pc
[0] = target
;
1383 dc
->jump_pc
[1] = dc
->npc
+ 4;
1388 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1392 gen_helper_fcmps(r_rs1
, r_rs2
);
1395 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1398 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1401 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1406 static inline void gen_op_fcmpd(int fccno
)
1413 gen_helper_fcmpd_fcc1();
1416 gen_helper_fcmpd_fcc2();
1419 gen_helper_fcmpd_fcc3();
1424 static inline void gen_op_fcmpq(int fccno
)
1431 gen_helper_fcmpq_fcc1();
1434 gen_helper_fcmpq_fcc2();
1437 gen_helper_fcmpq_fcc3();
1442 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1446 gen_helper_fcmpes(r_rs1
, r_rs2
);
1449 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1452 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1455 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1460 static inline void gen_op_fcmped(int fccno
)
1464 gen_helper_fcmped();
1467 gen_helper_fcmped_fcc1();
1470 gen_helper_fcmped_fcc2();
1473 gen_helper_fcmped_fcc3();
1478 static inline void gen_op_fcmpeq(int fccno
)
1482 gen_helper_fcmpeq();
1485 gen_helper_fcmpeq_fcc1();
1488 gen_helper_fcmpeq_fcc2();
1491 gen_helper_fcmpeq_fcc3();
1498 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1500 gen_helper_fcmps(r_rs1
, r_rs2
);
1503 static inline void gen_op_fcmpd(int fccno
)
1508 static inline void gen_op_fcmpq(int fccno
)
1513 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1515 gen_helper_fcmpes(r_rs1
, r_rs2
);
1518 static inline void gen_op_fcmped(int fccno
)
1520 gen_helper_fcmped();
1523 static inline void gen_op_fcmpeq(int fccno
)
1525 gen_helper_fcmpeq();
1529 static inline void gen_op_fpexception_im(int fsr_flags
)
1533 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1534 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1535 r_const
= tcg_const_i32(TT_FP_EXCP
);
1536 gen_helper_raise_exception(r_const
);
1537 tcg_temp_free_i32(r_const
);
1540 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1542 #if !defined(CONFIG_USER_ONLY)
1543 if (!dc
->fpu_enabled
) {
1546 save_state(dc
, r_cond
);
1547 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1548 gen_helper_raise_exception(r_const
);
1549 tcg_temp_free_i32(r_const
);
1557 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1559 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1562 static inline void gen_clear_float_exceptions(void)
1564 gen_helper_clear_float_exceptions();
1568 #ifdef TARGET_SPARC64
1569 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1575 r_asi
= tcg_temp_new_i32();
1576 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1578 asi
= GET_FIELD(insn
, 19, 26);
1579 r_asi
= tcg_const_i32(asi
);
1584 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1587 TCGv_i32 r_asi
, r_size
, r_sign
;
1589 r_asi
= gen_get_asi(insn
, addr
);
1590 r_size
= tcg_const_i32(size
);
1591 r_sign
= tcg_const_i32(sign
);
1592 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1593 tcg_temp_free_i32(r_sign
);
1594 tcg_temp_free_i32(r_size
);
1595 tcg_temp_free_i32(r_asi
);
1598 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1600 TCGv_i32 r_asi
, r_size
;
1602 r_asi
= gen_get_asi(insn
, addr
);
1603 r_size
= tcg_const_i32(size
);
1604 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1605 tcg_temp_free_i32(r_size
);
1606 tcg_temp_free_i32(r_asi
);
1609 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1611 TCGv_i32 r_asi
, r_size
, r_rd
;
1613 r_asi
= gen_get_asi(insn
, addr
);
1614 r_size
= tcg_const_i32(size
);
1615 r_rd
= tcg_const_i32(rd
);
1616 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1617 tcg_temp_free_i32(r_rd
);
1618 tcg_temp_free_i32(r_size
);
1619 tcg_temp_free_i32(r_asi
);
1622 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1624 TCGv_i32 r_asi
, r_size
, r_rd
;
1626 r_asi
= gen_get_asi(insn
, addr
);
1627 r_size
= tcg_const_i32(size
);
1628 r_rd
= tcg_const_i32(rd
);
1629 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1630 tcg_temp_free_i32(r_rd
);
1631 tcg_temp_free_i32(r_size
);
1632 tcg_temp_free_i32(r_asi
);
1635 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1637 TCGv_i32 r_asi
, r_size
, r_sign
;
1639 r_asi
= gen_get_asi(insn
, addr
);
1640 r_size
= tcg_const_i32(4);
1641 r_sign
= tcg_const_i32(0);
1642 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1643 tcg_temp_free_i32(r_sign
);
1644 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1645 tcg_temp_free_i32(r_size
);
1646 tcg_temp_free_i32(r_asi
);
1647 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1650 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1652 TCGv_i32 r_asi
, r_rd
;
1654 r_asi
= gen_get_asi(insn
, addr
);
1655 r_rd
= tcg_const_i32(rd
);
1656 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1657 tcg_temp_free_i32(r_rd
);
1658 tcg_temp_free_i32(r_asi
);
1661 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1663 TCGv_i32 r_asi
, r_size
;
1665 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1666 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1667 r_asi
= gen_get_asi(insn
, addr
);
1668 r_size
= tcg_const_i32(8);
1669 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1670 tcg_temp_free_i32(r_size
);
1671 tcg_temp_free_i32(r_asi
);
1674 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1680 r_val1
= tcg_temp_new();
1681 gen_movl_reg_TN(rd
, r_val1
);
1682 r_asi
= gen_get_asi(insn
, addr
);
1683 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1684 tcg_temp_free_i32(r_asi
);
1685 tcg_temp_free(r_val1
);
1688 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1693 gen_movl_reg_TN(rd
, cpu_tmp64
);
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1696 tcg_temp_free_i32(r_asi
);
1699 #elif !defined(CONFIG_USER_ONLY)
1701 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1704 TCGv_i32 r_asi
, r_size
, r_sign
;
1706 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1707 r_size
= tcg_const_i32(size
);
1708 r_sign
= tcg_const_i32(sign
);
1709 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1710 tcg_temp_free(r_sign
);
1711 tcg_temp_free(r_size
);
1712 tcg_temp_free(r_asi
);
1713 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1716 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1718 TCGv_i32 r_asi
, r_size
;
1720 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1721 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1722 r_size
= tcg_const_i32(size
);
1723 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1724 tcg_temp_free(r_size
);
1725 tcg_temp_free(r_asi
);
1728 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1730 TCGv_i32 r_asi
, r_size
, r_sign
;
1733 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1734 r_size
= tcg_const_i32(4);
1735 r_sign
= tcg_const_i32(0);
1736 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1737 tcg_temp_free(r_sign
);
1738 r_val
= tcg_temp_new_i64();
1739 tcg_gen_extu_tl_i64(r_val
, dst
);
1740 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1741 tcg_temp_free_i64(r_val
);
1742 tcg_temp_free(r_size
);
1743 tcg_temp_free(r_asi
);
1744 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1747 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1749 TCGv_i32 r_asi
, r_size
, r_sign
;
1751 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1752 r_size
= tcg_const_i32(8);
1753 r_sign
= tcg_const_i32(0);
1754 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1755 tcg_temp_free(r_sign
);
1756 tcg_temp_free(r_size
);
1757 tcg_temp_free(r_asi
);
1758 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1759 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1760 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1761 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1762 gen_movl_TN_reg(rd
, hi
);
1765 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1767 TCGv_i32 r_asi
, r_size
;
1769 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1770 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1771 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1772 r_size
= tcg_const_i32(8);
1773 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1774 tcg_temp_free(r_size
);
1775 tcg_temp_free(r_asi
);
1779 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1783 TCGv_i32 r_asi
, r_size
;
1785 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1787 r_val
= tcg_const_i64(0xffULL
);
1788 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1789 r_size
= tcg_const_i32(1);
1790 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1791 tcg_temp_free_i32(r_size
);
1792 tcg_temp_free_i32(r_asi
);
1793 tcg_temp_free_i64(r_val
);
1797 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1802 rs1
= GET_FIELD(insn
, 13, 17);
1804 tcg_gen_movi_tl(def
, 0);
1805 } else if (rs1
< 8) {
1806 r_rs1
= cpu_gregs
[rs1
];
1808 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1813 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1817 if (IS_IMM
) { /* immediate */
1818 target_long simm
= GET_FIELDs(insn
, 19, 31);
1819 tcg_gen_movi_tl(def
, simm
);
1820 } else { /* register */
1821 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1823 tcg_gen_movi_tl(def
, 0);
1824 } else if (rs2
< 8) {
1825 r_rs2
= cpu_gregs
[rs2
];
1827 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1833 #ifdef TARGET_SPARC64
1834 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1836 TCGv_i32 r_tl
= tcg_temp_new_i32();
1838 /* load env->tl into r_tl */
1839 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1841 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1844 /* calculate offset to current trap state from env->ts, reuse r_tl */
1845 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1846 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1848 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1850 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1851 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1852 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1853 tcg_temp_free_ptr(r_tl_tmp
);
1856 tcg_temp_free_i32(r_tl
);
1860 #define CHECK_IU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1863 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1864 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1867 /* before an instruction, dc->pc must be static */
1868 static void disas_sparc_insn(DisasContext
* dc
)
1870 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1871 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1874 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1875 tcg_gen_debug_insn_start(dc
->pc
);
1876 insn
= ldl_code(dc
->pc
);
1877 opc
= GET_FIELD(insn
, 0, 1);
1879 rd
= GET_FIELD(insn
, 2, 6);
1881 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1882 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1885 case 0: /* branches/sethi */
1887 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1890 #ifdef TARGET_SPARC64
1891 case 0x1: /* V9 BPcc */
1895 target
= GET_FIELD_SP(insn
, 0, 18);
1896 target
= sign_extend(target
, 18);
1898 cc
= GET_FIELD_SP(insn
, 20, 21);
1900 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1902 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1907 case 0x3: /* V9 BPr */
1909 target
= GET_FIELD_SP(insn
, 0, 13) |
1910 (GET_FIELD_SP(insn
, 20, 21) << 14);
1911 target
= sign_extend(target
, 16);
1913 cpu_src1
= get_src1(insn
, cpu_src1
);
1914 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1917 case 0x5: /* V9 FBPcc */
1919 int cc
= GET_FIELD_SP(insn
, 20, 21);
1920 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1922 target
= GET_FIELD_SP(insn
, 0, 18);
1923 target
= sign_extend(target
, 19);
1925 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1929 case 0x7: /* CBN+x */
1934 case 0x2: /* BN+x */
1936 target
= GET_FIELD(insn
, 10, 31);
1937 target
= sign_extend(target
, 22);
1939 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1946 target
= GET_FIELD(insn
, 10, 31);
1947 target
= sign_extend(target
, 22);
1949 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1952 case 0x4: /* SETHI */
1954 uint32_t value
= GET_FIELD(insn
, 10, 31);
1957 r_const
= tcg_const_tl(value
<< 10);
1958 gen_movl_TN_reg(rd
, r_const
);
1959 tcg_temp_free(r_const
);
1962 case 0x0: /* UNIMPL */
1971 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1974 r_const
= tcg_const_tl(dc
->pc
);
1975 gen_movl_TN_reg(15, r_const
);
1976 tcg_temp_free(r_const
);
1978 gen_mov_pc_npc(dc
, cpu_cond
);
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1985 if (xop
== 0x3a) { /* generate trap */
1988 cpu_src1
= get_src1(insn
, cpu_src1
);
1990 rs2
= GET_FIELD(insn
, 25, 31);
1991 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1993 rs2
= GET_FIELD(insn
, 27, 31);
1995 gen_movl_reg_TN(rs2
, cpu_src2
);
1996 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1998 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2000 cond
= GET_FIELD(insn
, 3, 6);
2002 save_state(dc
, cpu_cond
);
2003 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2005 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2007 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2008 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2009 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2010 gen_helper_raise_exception(cpu_tmp32
);
2011 } else if (cond
!= 0) {
2012 TCGv r_cond
= tcg_temp_new();
2014 #ifdef TARGET_SPARC64
2016 int cc
= GET_FIELD_SP(insn
, 11, 12);
2018 save_state(dc
, cpu_cond
);
2020 gen_cond(r_cond
, 0, cond
, dc
);
2022 gen_cond(r_cond
, 1, cond
, dc
);
2026 save_state(dc
, cpu_cond
);
2027 gen_cond(r_cond
, 0, cond
, dc
);
2029 l1
= gen_new_label();
2030 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2032 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2034 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2036 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2037 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2038 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2039 gen_helper_raise_exception(cpu_tmp32
);
2042 tcg_temp_free(r_cond
);
2048 } else if (xop
== 0x28) {
2049 rs1
= GET_FIELD(insn
, 13, 17);
2052 #ifndef TARGET_SPARC64
2053 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2054 manual, rdy on the microSPARC
2056 case 0x0f: /* stbar in the SPARCv8 manual,
2057 rdy on the microSPARC II */
2058 case 0x10 ... 0x1f: /* implementation-dependent in the
2059 SPARCv8 manual, rdy on the
2062 gen_movl_TN_reg(rd
, cpu_y
);
2064 #ifdef TARGET_SPARC64
2065 case 0x2: /* V9 rdccr */
2066 gen_helper_compute_psr();
2067 gen_helper_rdccr(cpu_dst
);
2068 gen_movl_TN_reg(rd
, cpu_dst
);
2070 case 0x3: /* V9 rdasi */
2071 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2072 gen_movl_TN_reg(rd
, cpu_dst
);
2074 case 0x4: /* V9 rdtick */
2078 r_tickptr
= tcg_temp_new_ptr();
2079 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2080 offsetof(CPUState
, tick
));
2081 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2082 tcg_temp_free_ptr(r_tickptr
);
2083 gen_movl_TN_reg(rd
, cpu_dst
);
2086 case 0x5: /* V9 rdpc */
2090 r_const
= tcg_const_tl(dc
->pc
);
2091 gen_movl_TN_reg(rd
, r_const
);
2092 tcg_temp_free(r_const
);
2095 case 0x6: /* V9 rdfprs */
2096 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2097 gen_movl_TN_reg(rd
, cpu_dst
);
2099 case 0xf: /* V9 membar */
2100 break; /* no effect */
2101 case 0x13: /* Graphics Status */
2102 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2104 gen_movl_TN_reg(rd
, cpu_gsr
);
2106 case 0x16: /* Softint */
2107 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2108 gen_movl_TN_reg(rd
, cpu_dst
);
2110 case 0x17: /* Tick compare */
2111 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2113 case 0x18: /* System tick */
2117 r_tickptr
= tcg_temp_new_ptr();
2118 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2119 offsetof(CPUState
, stick
));
2120 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2121 tcg_temp_free_ptr(r_tickptr
);
2122 gen_movl_TN_reg(rd
, cpu_dst
);
2125 case 0x19: /* System tick compare */
2126 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2128 case 0x10: /* Performance Control */
2129 case 0x11: /* Performance Instrumentation Counter */
2130 case 0x12: /* Dispatch Control */
2131 case 0x14: /* Softint set, WO */
2132 case 0x15: /* Softint clear, WO */
2137 #if !defined(CONFIG_USER_ONLY)
2138 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2139 #ifndef TARGET_SPARC64
2140 if (!supervisor(dc
))
2142 gen_helper_compute_psr();
2143 dc
->cc_op
= CC_OP_FLAGS
;
2144 gen_helper_rdpsr(cpu_dst
);
2146 CHECK_IU_FEATURE(dc
, HYPV
);
2147 if (!hypervisor(dc
))
2149 rs1
= GET_FIELD(insn
, 13, 17);
2152 // gen_op_rdhpstate();
2155 // gen_op_rdhtstate();
2158 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2161 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2164 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2166 case 31: // hstick_cmpr
2167 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2173 gen_movl_TN_reg(rd
, cpu_dst
);
2175 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2176 if (!supervisor(dc
))
2178 #ifdef TARGET_SPARC64
2179 rs1
= GET_FIELD(insn
, 13, 17);
2185 r_tsptr
= tcg_temp_new_ptr();
2186 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2187 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2188 offsetof(trap_state
, tpc
));
2189 tcg_temp_free_ptr(r_tsptr
);
2196 r_tsptr
= tcg_temp_new_ptr();
2197 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2198 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2199 offsetof(trap_state
, tnpc
));
2200 tcg_temp_free_ptr(r_tsptr
);
2207 r_tsptr
= tcg_temp_new_ptr();
2208 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2209 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2210 offsetof(trap_state
, tstate
));
2211 tcg_temp_free_ptr(r_tsptr
);
2218 r_tsptr
= tcg_temp_new_ptr();
2219 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2220 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2221 offsetof(trap_state
, tt
));
2222 tcg_temp_free_ptr(r_tsptr
);
2223 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2230 r_tickptr
= tcg_temp_new_ptr();
2231 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2232 offsetof(CPUState
, tick
));
2233 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2234 gen_movl_TN_reg(rd
, cpu_tmp0
);
2235 tcg_temp_free_ptr(r_tickptr
);
2239 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2242 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2243 offsetof(CPUSPARCState
, pstate
));
2244 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2247 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2248 offsetof(CPUSPARCState
, tl
));
2249 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2252 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2253 offsetof(CPUSPARCState
, psrpil
));
2254 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2257 gen_helper_rdcwp(cpu_tmp0
);
2260 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2261 offsetof(CPUSPARCState
, cansave
));
2262 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2264 case 11: // canrestore
2265 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2266 offsetof(CPUSPARCState
, canrestore
));
2267 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2269 case 12: // cleanwin
2270 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2271 offsetof(CPUSPARCState
, cleanwin
));
2272 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2274 case 13: // otherwin
2275 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2276 offsetof(CPUSPARCState
, otherwin
));
2277 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2280 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2281 offsetof(CPUSPARCState
, wstate
));
2282 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2284 case 16: // UA2005 gl
2285 CHECK_IU_FEATURE(dc
, GL
);
2286 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2287 offsetof(CPUSPARCState
, gl
));
2288 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2290 case 26: // UA2005 strand status
2291 CHECK_IU_FEATURE(dc
, HYPV
);
2292 if (!hypervisor(dc
))
2294 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2297 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2304 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2306 gen_movl_TN_reg(rd
, cpu_tmp0
);
2308 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2309 #ifdef TARGET_SPARC64
2310 save_state(dc
, cpu_cond
);
2311 gen_helper_flushw();
2313 if (!supervisor(dc
))
2315 gen_movl_TN_reg(rd
, cpu_tbr
);
2319 } else if (xop
== 0x34) { /* FPU Operations */
2320 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2322 gen_op_clear_ieee_excp_and_FTT();
2323 rs1
= GET_FIELD(insn
, 13, 17);
2324 rs2
= GET_FIELD(insn
, 27, 31);
2325 xop
= GET_FIELD(insn
, 18, 26);
2326 save_state(dc
, cpu_cond
);
2328 case 0x1: /* fmovs */
2329 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2331 case 0x5: /* fnegs */
2332 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2334 case 0x9: /* fabss */
2335 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2337 case 0x29: /* fsqrts */
2338 CHECK_FPU_FEATURE(dc
, FSQRT
);
2339 gen_clear_float_exceptions();
2340 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2341 gen_helper_check_ieee_exceptions();
2342 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2344 case 0x2a: /* fsqrtd */
2345 CHECK_FPU_FEATURE(dc
, FSQRT
);
2346 gen_op_load_fpr_DT1(DFPREG(rs2
));
2347 gen_clear_float_exceptions();
2348 gen_helper_fsqrtd();
2349 gen_helper_check_ieee_exceptions();
2350 gen_op_store_DT0_fpr(DFPREG(rd
));
2352 case 0x2b: /* fsqrtq */
2353 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2354 gen_op_load_fpr_QT1(QFPREG(rs2
));
2355 gen_clear_float_exceptions();
2356 gen_helper_fsqrtq();
2357 gen_helper_check_ieee_exceptions();
2358 gen_op_store_QT0_fpr(QFPREG(rd
));
2360 case 0x41: /* fadds */
2361 gen_clear_float_exceptions();
2362 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2363 gen_helper_check_ieee_exceptions();
2364 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2366 case 0x42: /* faddd */
2367 gen_op_load_fpr_DT0(DFPREG(rs1
));
2368 gen_op_load_fpr_DT1(DFPREG(rs2
));
2369 gen_clear_float_exceptions();
2371 gen_helper_check_ieee_exceptions();
2372 gen_op_store_DT0_fpr(DFPREG(rd
));
2374 case 0x43: /* faddq */
2375 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2376 gen_op_load_fpr_QT0(QFPREG(rs1
));
2377 gen_op_load_fpr_QT1(QFPREG(rs2
));
2378 gen_clear_float_exceptions();
2380 gen_helper_check_ieee_exceptions();
2381 gen_op_store_QT0_fpr(QFPREG(rd
));
2383 case 0x45: /* fsubs */
2384 gen_clear_float_exceptions();
2385 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2386 gen_helper_check_ieee_exceptions();
2387 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2389 case 0x46: /* fsubd */
2390 gen_op_load_fpr_DT0(DFPREG(rs1
));
2391 gen_op_load_fpr_DT1(DFPREG(rs2
));
2392 gen_clear_float_exceptions();
2394 gen_helper_check_ieee_exceptions();
2395 gen_op_store_DT0_fpr(DFPREG(rd
));
2397 case 0x47: /* fsubq */
2398 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2399 gen_op_load_fpr_QT0(QFPREG(rs1
));
2400 gen_op_load_fpr_QT1(QFPREG(rs2
));
2401 gen_clear_float_exceptions();
2403 gen_helper_check_ieee_exceptions();
2404 gen_op_store_QT0_fpr(QFPREG(rd
));
2406 case 0x49: /* fmuls */
2407 CHECK_FPU_FEATURE(dc
, FMUL
);
2408 gen_clear_float_exceptions();
2409 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2410 gen_helper_check_ieee_exceptions();
2411 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2413 case 0x4a: /* fmuld */
2414 CHECK_FPU_FEATURE(dc
, FMUL
);
2415 gen_op_load_fpr_DT0(DFPREG(rs1
));
2416 gen_op_load_fpr_DT1(DFPREG(rs2
));
2417 gen_clear_float_exceptions();
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_DT0_fpr(DFPREG(rd
));
2422 case 0x4b: /* fmulq */
2423 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2424 CHECK_FPU_FEATURE(dc
, FMUL
);
2425 gen_op_load_fpr_QT0(QFPREG(rs1
));
2426 gen_op_load_fpr_QT1(QFPREG(rs2
));
2427 gen_clear_float_exceptions();
2429 gen_helper_check_ieee_exceptions();
2430 gen_op_store_QT0_fpr(QFPREG(rd
));
2432 case 0x4d: /* fdivs */
2433 gen_clear_float_exceptions();
2434 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2435 gen_helper_check_ieee_exceptions();
2436 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2438 case 0x4e: /* fdivd */
2439 gen_op_load_fpr_DT0(DFPREG(rs1
));
2440 gen_op_load_fpr_DT1(DFPREG(rs2
));
2441 gen_clear_float_exceptions();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_DT0_fpr(DFPREG(rd
));
2446 case 0x4f: /* fdivq */
2447 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2448 gen_op_load_fpr_QT0(QFPREG(rs1
));
2449 gen_op_load_fpr_QT1(QFPREG(rs2
));
2450 gen_clear_float_exceptions();
2452 gen_helper_check_ieee_exceptions();
2453 gen_op_store_QT0_fpr(QFPREG(rd
));
2455 case 0x69: /* fsmuld */
2456 CHECK_FPU_FEATURE(dc
, FSMULD
);
2457 gen_clear_float_exceptions();
2458 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2459 gen_helper_check_ieee_exceptions();
2460 gen_op_store_DT0_fpr(DFPREG(rd
));
2462 case 0x6e: /* fdmulq */
2463 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2464 gen_op_load_fpr_DT0(DFPREG(rs1
));
2465 gen_op_load_fpr_DT1(DFPREG(rs2
));
2466 gen_clear_float_exceptions();
2467 gen_helper_fdmulq();
2468 gen_helper_check_ieee_exceptions();
2469 gen_op_store_QT0_fpr(QFPREG(rd
));
2471 case 0xc4: /* fitos */
2472 gen_clear_float_exceptions();
2473 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2474 gen_helper_check_ieee_exceptions();
2475 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2477 case 0xc6: /* fdtos */
2478 gen_op_load_fpr_DT1(DFPREG(rs2
));
2479 gen_clear_float_exceptions();
2480 gen_helper_fdtos(cpu_tmp32
);
2481 gen_helper_check_ieee_exceptions();
2482 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2484 case 0xc7: /* fqtos */
2485 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2486 gen_op_load_fpr_QT1(QFPREG(rs2
));
2487 gen_clear_float_exceptions();
2488 gen_helper_fqtos(cpu_tmp32
);
2489 gen_helper_check_ieee_exceptions();
2490 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2492 case 0xc8: /* fitod */
2493 gen_helper_fitod(cpu_fpr
[rs2
]);
2494 gen_op_store_DT0_fpr(DFPREG(rd
));
2496 case 0xc9: /* fstod */
2497 gen_helper_fstod(cpu_fpr
[rs2
]);
2498 gen_op_store_DT0_fpr(DFPREG(rd
));
2500 case 0xcb: /* fqtod */
2501 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2502 gen_op_load_fpr_QT1(QFPREG(rs2
));
2503 gen_clear_float_exceptions();
2505 gen_helper_check_ieee_exceptions();
2506 gen_op_store_DT0_fpr(DFPREG(rd
));
2508 case 0xcc: /* fitoq */
2509 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2510 gen_helper_fitoq(cpu_fpr
[rs2
]);
2511 gen_op_store_QT0_fpr(QFPREG(rd
));
2513 case 0xcd: /* fstoq */
2514 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2515 gen_helper_fstoq(cpu_fpr
[rs2
]);
2516 gen_op_store_QT0_fpr(QFPREG(rd
));
2518 case 0xce: /* fdtoq */
2519 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2520 gen_op_load_fpr_DT1(DFPREG(rs2
));
2522 gen_op_store_QT0_fpr(QFPREG(rd
));
2524 case 0xd1: /* fstoi */
2525 gen_clear_float_exceptions();
2526 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2527 gen_helper_check_ieee_exceptions();
2528 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2530 case 0xd2: /* fdtoi */
2531 gen_op_load_fpr_DT1(DFPREG(rs2
));
2532 gen_clear_float_exceptions();
2533 gen_helper_fdtoi(cpu_tmp32
);
2534 gen_helper_check_ieee_exceptions();
2535 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2537 case 0xd3: /* fqtoi */
2538 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2539 gen_op_load_fpr_QT1(QFPREG(rs2
));
2540 gen_clear_float_exceptions();
2541 gen_helper_fqtoi(cpu_tmp32
);
2542 gen_helper_check_ieee_exceptions();
2543 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2545 #ifdef TARGET_SPARC64
2546 case 0x2: /* V9 fmovd */
2547 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2548 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2549 cpu_fpr
[DFPREG(rs2
) + 1]);
2551 case 0x3: /* V9 fmovq */
2552 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2553 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2554 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2555 cpu_fpr
[QFPREG(rs2
) + 1]);
2556 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2557 cpu_fpr
[QFPREG(rs2
) + 2]);
2558 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2559 cpu_fpr
[QFPREG(rs2
) + 3]);
2561 case 0x6: /* V9 fnegd */
2562 gen_op_load_fpr_DT1(DFPREG(rs2
));
2564 gen_op_store_DT0_fpr(DFPREG(rd
));
2566 case 0x7: /* V9 fnegq */
2567 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2568 gen_op_load_fpr_QT1(QFPREG(rs2
));
2570 gen_op_store_QT0_fpr(QFPREG(rd
));
2572 case 0xa: /* V9 fabsd */
2573 gen_op_load_fpr_DT1(DFPREG(rs2
));
2575 gen_op_store_DT0_fpr(DFPREG(rd
));
2577 case 0xb: /* V9 fabsq */
2578 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2579 gen_op_load_fpr_QT1(QFPREG(rs2
));
2581 gen_op_store_QT0_fpr(QFPREG(rd
));
2583 case 0x81: /* V9 fstox */
2584 gen_clear_float_exceptions();
2585 gen_helper_fstox(cpu_fpr
[rs2
]);
2586 gen_helper_check_ieee_exceptions();
2587 gen_op_store_DT0_fpr(DFPREG(rd
));
2589 case 0x82: /* V9 fdtox */
2590 gen_op_load_fpr_DT1(DFPREG(rs2
));
2591 gen_clear_float_exceptions();
2593 gen_helper_check_ieee_exceptions();
2594 gen_op_store_DT0_fpr(DFPREG(rd
));
2596 case 0x83: /* V9 fqtox */
2597 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2598 gen_op_load_fpr_QT1(QFPREG(rs2
));
2599 gen_clear_float_exceptions();
2601 gen_helper_check_ieee_exceptions();
2602 gen_op_store_DT0_fpr(DFPREG(rd
));
2604 case 0x84: /* V9 fxtos */
2605 gen_op_load_fpr_DT1(DFPREG(rs2
));
2606 gen_clear_float_exceptions();
2607 gen_helper_fxtos(cpu_tmp32
);
2608 gen_helper_check_ieee_exceptions();
2609 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2611 case 0x88: /* V9 fxtod */
2612 gen_op_load_fpr_DT1(DFPREG(rs2
));
2613 gen_clear_float_exceptions();
2615 gen_helper_check_ieee_exceptions();
2616 gen_op_store_DT0_fpr(DFPREG(rd
));
2618 case 0x8c: /* V9 fxtoq */
2619 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2620 gen_op_load_fpr_DT1(DFPREG(rs2
));
2621 gen_clear_float_exceptions();
2623 gen_helper_check_ieee_exceptions();
2624 gen_op_store_QT0_fpr(QFPREG(rd
));
2630 } else if (xop
== 0x35) { /* FPU Operations */
2631 #ifdef TARGET_SPARC64
2634 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2636 gen_op_clear_ieee_excp_and_FTT();
2637 rs1
= GET_FIELD(insn
, 13, 17);
2638 rs2
= GET_FIELD(insn
, 27, 31);
2639 xop
= GET_FIELD(insn
, 18, 26);
2640 save_state(dc
, cpu_cond
);
2641 #ifdef TARGET_SPARC64
2642 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2645 l1
= gen_new_label();
2646 cond
= GET_FIELD_SP(insn
, 14, 17);
2647 cpu_src1
= get_src1(insn
, cpu_src1
);
2648 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2650 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2653 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2656 l1
= gen_new_label();
2657 cond
= GET_FIELD_SP(insn
, 14, 17);
2658 cpu_src1
= get_src1(insn
, cpu_src1
);
2659 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2661 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2662 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2665 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2668 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2669 l1
= gen_new_label();
2670 cond
= GET_FIELD_SP(insn
, 14, 17);
2671 cpu_src1
= get_src1(insn
, cpu_src1
);
2672 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2674 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2675 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2676 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2677 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2683 #ifdef TARGET_SPARC64
2684 #define FMOVSCC(fcc) \
2689 l1 = gen_new_label(); \
2690 r_cond = tcg_temp_new(); \
2691 cond = GET_FIELD_SP(insn, 14, 17); \
2692 gen_fcond(r_cond, fcc, cond); \
2693 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2695 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2696 gen_set_label(l1); \
2697 tcg_temp_free(r_cond); \
2699 #define FMOVDCC(fcc) \
2704 l1 = gen_new_label(); \
2705 r_cond = tcg_temp_new(); \
2706 cond = GET_FIELD_SP(insn, 14, 17); \
2707 gen_fcond(r_cond, fcc, cond); \
2708 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2710 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2711 cpu_fpr[DFPREG(rs2)]); \
2712 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2713 cpu_fpr[DFPREG(rs2) + 1]); \
2714 gen_set_label(l1); \
2715 tcg_temp_free(r_cond); \
2717 #define FMOVQCC(fcc) \
2722 l1 = gen_new_label(); \
2723 r_cond = tcg_temp_new(); \
2724 cond = GET_FIELD_SP(insn, 14, 17); \
2725 gen_fcond(r_cond, fcc, cond); \
2726 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2728 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2729 cpu_fpr[QFPREG(rs2)]); \
2730 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2731 cpu_fpr[QFPREG(rs2) + 1]); \
2732 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2733 cpu_fpr[QFPREG(rs2) + 2]); \
2734 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2735 cpu_fpr[QFPREG(rs2) + 3]); \
2736 gen_set_label(l1); \
2737 tcg_temp_free(r_cond); \
2739 case 0x001: /* V9 fmovscc %fcc0 */
2742 case 0x002: /* V9 fmovdcc %fcc0 */
2745 case 0x003: /* V9 fmovqcc %fcc0 */
2746 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2749 case 0x041: /* V9 fmovscc %fcc1 */
2752 case 0x042: /* V9 fmovdcc %fcc1 */
2755 case 0x043: /* V9 fmovqcc %fcc1 */
2756 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2759 case 0x081: /* V9 fmovscc %fcc2 */
2762 case 0x082: /* V9 fmovdcc %fcc2 */
2765 case 0x083: /* V9 fmovqcc %fcc2 */
2766 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2769 case 0x0c1: /* V9 fmovscc %fcc3 */
2772 case 0x0c2: /* V9 fmovdcc %fcc3 */
2775 case 0x0c3: /* V9 fmovqcc %fcc3 */
2776 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2782 #define FMOVSCC(icc) \
2787 l1 = gen_new_label(); \
2788 r_cond = tcg_temp_new(); \
2789 cond = GET_FIELD_SP(insn, 14, 17); \
2790 gen_cond(r_cond, icc, cond, dc); \
2791 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2793 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2794 gen_set_label(l1); \
2795 tcg_temp_free(r_cond); \
2797 #define FMOVDCC(icc) \
2802 l1 = gen_new_label(); \
2803 r_cond = tcg_temp_new(); \
2804 cond = GET_FIELD_SP(insn, 14, 17); \
2805 gen_cond(r_cond, icc, cond, dc); \
2806 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2808 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2809 cpu_fpr[DFPREG(rs2)]); \
2810 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2811 cpu_fpr[DFPREG(rs2) + 1]); \
2812 gen_set_label(l1); \
2813 tcg_temp_free(r_cond); \
2815 #define FMOVQCC(icc) \
2820 l1 = gen_new_label(); \
2821 r_cond = tcg_temp_new(); \
2822 cond = GET_FIELD_SP(insn, 14, 17); \
2823 gen_cond(r_cond, icc, cond, dc); \
2824 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2826 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2827 cpu_fpr[QFPREG(rs2)]); \
2828 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2829 cpu_fpr[QFPREG(rs2) + 1]); \
2830 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2831 cpu_fpr[QFPREG(rs2) + 2]); \
2832 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2833 cpu_fpr[QFPREG(rs2) + 3]); \
2834 gen_set_label(l1); \
2835 tcg_temp_free(r_cond); \
2838 case 0x101: /* V9 fmovscc %icc */
2841 case 0x102: /* V9 fmovdcc %icc */
2843 case 0x103: /* V9 fmovqcc %icc */
2844 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2847 case 0x181: /* V9 fmovscc %xcc */
2850 case 0x182: /* V9 fmovdcc %xcc */
2853 case 0x183: /* V9 fmovqcc %xcc */
2854 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2861 case 0x51: /* fcmps, V9 %fcc */
2862 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2864 case 0x52: /* fcmpd, V9 %fcc */
2865 gen_op_load_fpr_DT0(DFPREG(rs1
));
2866 gen_op_load_fpr_DT1(DFPREG(rs2
));
2867 gen_op_fcmpd(rd
& 3);
2869 case 0x53: /* fcmpq, V9 %fcc */
2870 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2871 gen_op_load_fpr_QT0(QFPREG(rs1
));
2872 gen_op_load_fpr_QT1(QFPREG(rs2
));
2873 gen_op_fcmpq(rd
& 3);
2875 case 0x55: /* fcmpes, V9 %fcc */
2876 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2878 case 0x56: /* fcmped, V9 %fcc */
2879 gen_op_load_fpr_DT0(DFPREG(rs1
));
2880 gen_op_load_fpr_DT1(DFPREG(rs2
));
2881 gen_op_fcmped(rd
& 3);
2883 case 0x57: /* fcmpeq, V9 %fcc */
2884 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2885 gen_op_load_fpr_QT0(QFPREG(rs1
));
2886 gen_op_load_fpr_QT1(QFPREG(rs2
));
2887 gen_op_fcmpeq(rd
& 3);
2892 } else if (xop
== 0x2) {
2895 rs1
= GET_FIELD(insn
, 13, 17);
2897 // or %g0, x, y -> mov T0, x; mov y, T0
2898 if (IS_IMM
) { /* immediate */
2901 simm
= GET_FIELDs(insn
, 19, 31);
2902 r_const
= tcg_const_tl(simm
);
2903 gen_movl_TN_reg(rd
, r_const
);
2904 tcg_temp_free(r_const
);
2905 } else { /* register */
2906 rs2
= GET_FIELD(insn
, 27, 31);
2907 gen_movl_reg_TN(rs2
, cpu_dst
);
2908 gen_movl_TN_reg(rd
, cpu_dst
);
2911 cpu_src1
= get_src1(insn
, cpu_src1
);
2912 if (IS_IMM
) { /* immediate */
2913 simm
= GET_FIELDs(insn
, 19, 31);
2914 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2915 gen_movl_TN_reg(rd
, cpu_dst
);
2916 } else { /* register */
2917 // or x, %g0, y -> mov T1, x; mov y, T1
2918 rs2
= GET_FIELD(insn
, 27, 31);
2920 gen_movl_reg_TN(rs2
, cpu_src2
);
2921 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2922 gen_movl_TN_reg(rd
, cpu_dst
);
2924 gen_movl_TN_reg(rd
, cpu_src1
);
2927 #ifdef TARGET_SPARC64
2928 } else if (xop
== 0x25) { /* sll, V9 sllx */
2929 cpu_src1
= get_src1(insn
, cpu_src1
);
2930 if (IS_IMM
) { /* immediate */
2931 simm
= GET_FIELDs(insn
, 20, 31);
2932 if (insn
& (1 << 12)) {
2933 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2935 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2937 } else { /* register */
2938 rs2
= GET_FIELD(insn
, 27, 31);
2939 gen_movl_reg_TN(rs2
, cpu_src2
);
2940 if (insn
& (1 << 12)) {
2941 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2943 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2945 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2947 gen_movl_TN_reg(rd
, cpu_dst
);
2948 } else if (xop
== 0x26) { /* srl, V9 srlx */
2949 cpu_src1
= get_src1(insn
, cpu_src1
);
2950 if (IS_IMM
) { /* immediate */
2951 simm
= GET_FIELDs(insn
, 20, 31);
2952 if (insn
& (1 << 12)) {
2953 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2955 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2956 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2958 } else { /* register */
2959 rs2
= GET_FIELD(insn
, 27, 31);
2960 gen_movl_reg_TN(rs2
, cpu_src2
);
2961 if (insn
& (1 << 12)) {
2962 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2963 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2965 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2966 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2967 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2970 gen_movl_TN_reg(rd
, cpu_dst
);
2971 } else if (xop
== 0x27) { /* sra, V9 srax */
2972 cpu_src1
= get_src1(insn
, cpu_src1
);
2973 if (IS_IMM
) { /* immediate */
2974 simm
= GET_FIELDs(insn
, 20, 31);
2975 if (insn
& (1 << 12)) {
2976 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2978 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2979 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2980 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2982 } else { /* register */
2983 rs2
= GET_FIELD(insn
, 27, 31);
2984 gen_movl_reg_TN(rs2
, cpu_src2
);
2985 if (insn
& (1 << 12)) {
2986 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2987 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2989 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2990 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2991 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2992 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2995 gen_movl_TN_reg(rd
, cpu_dst
);
2997 } else if (xop
< 0x36) {
2999 cpu_src1
= get_src1(insn
, cpu_src1
);
3000 cpu_src2
= get_src2(insn
, cpu_src2
);
3001 switch (xop
& ~0x10) {
3004 simm
= GET_FIELDs(insn
, 19, 31);
3006 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3007 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3008 dc
->cc_op
= CC_OP_ADD
;
3010 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3014 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3015 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3016 dc
->cc_op
= CC_OP_ADD
;
3018 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3024 simm
= GET_FIELDs(insn
, 19, 31);
3025 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3027 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3030 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3031 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3032 dc
->cc_op
= CC_OP_LOGIC
;
3037 simm
= GET_FIELDs(insn
, 19, 31);
3038 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3040 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3043 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3044 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3045 dc
->cc_op
= CC_OP_LOGIC
;
3050 simm
= GET_FIELDs(insn
, 19, 31);
3051 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3053 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3056 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3057 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3058 dc
->cc_op
= CC_OP_LOGIC
;
3063 simm
= GET_FIELDs(insn
, 19, 31);
3065 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3067 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3071 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3072 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3073 dc
->cc_op
= CC_OP_SUB
;
3075 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3079 case 0x5: /* andn */
3081 simm
= GET_FIELDs(insn
, 19, 31);
3082 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3084 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3087 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3088 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3089 dc
->cc_op
= CC_OP_LOGIC
;
3094 simm
= GET_FIELDs(insn
, 19, 31);
3095 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3097 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3100 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3101 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3102 dc
->cc_op
= CC_OP_LOGIC
;
3105 case 0x7: /* xorn */
3107 simm
= GET_FIELDs(insn
, 19, 31);
3108 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3110 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3111 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3114 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3115 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3116 dc
->cc_op
= CC_OP_LOGIC
;
3119 case 0x8: /* addx, V9 addc */
3120 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3123 #ifdef TARGET_SPARC64
3124 case 0x9: /* V9 mulx */
3126 simm
= GET_FIELDs(insn
, 19, 31);
3127 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3129 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3133 case 0xa: /* umul */
3134 CHECK_IU_FEATURE(dc
, MUL
);
3135 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3137 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3138 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3139 dc
->cc_op
= CC_OP_LOGIC
;
3142 case 0xb: /* smul */
3143 CHECK_IU_FEATURE(dc
, MUL
);
3144 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3146 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3147 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3148 dc
->cc_op
= CC_OP_LOGIC
;
3151 case 0xc: /* subx, V9 subc */
3152 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3155 #ifdef TARGET_SPARC64
3156 case 0xd: /* V9 udivx */
3157 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3158 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3159 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3160 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3163 case 0xe: /* udiv */
3164 CHECK_IU_FEATURE(dc
, DIV
);
3166 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3167 dc
->cc_op
= CC_OP_DIV
;
3169 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3172 case 0xf: /* sdiv */
3173 CHECK_IU_FEATURE(dc
, DIV
);
3175 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3176 dc
->cc_op
= CC_OP_DIV
;
3178 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3184 gen_movl_TN_reg(rd
, cpu_dst
);
3186 cpu_src1
= get_src1(insn
, cpu_src1
);
3187 cpu_src2
= get_src2(insn
, cpu_src2
);
3189 case 0x20: /* taddcc */
3190 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3191 gen_movl_TN_reg(rd
, cpu_dst
);
3192 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3193 dc
->cc_op
= CC_OP_TADD
;
3195 case 0x21: /* tsubcc */
3196 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3197 gen_movl_TN_reg(rd
, cpu_dst
);
3198 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3199 dc
->cc_op
= CC_OP_TSUB
;
3201 case 0x22: /* taddcctv */
3202 save_state(dc
, cpu_cond
);
3203 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3204 gen_movl_TN_reg(rd
, cpu_dst
);
3205 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3206 dc
->cc_op
= CC_OP_TADDTV
;
3208 case 0x23: /* tsubcctv */
3209 save_state(dc
, cpu_cond
);
3210 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3211 gen_movl_TN_reg(rd
, cpu_dst
);
3212 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3213 dc
->cc_op
= CC_OP_TSUBTV
;
3215 case 0x24: /* mulscc */
3216 gen_helper_compute_psr();
3217 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3218 gen_movl_TN_reg(rd
, cpu_dst
);
3219 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3220 dc
->cc_op
= CC_OP_ADD
;
3222 #ifndef TARGET_SPARC64
3223 case 0x25: /* sll */
3224 if (IS_IMM
) { /* immediate */
3225 simm
= GET_FIELDs(insn
, 20, 31);
3226 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3227 } else { /* register */
3228 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3229 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3231 gen_movl_TN_reg(rd
, cpu_dst
);
3233 case 0x26: /* srl */
3234 if (IS_IMM
) { /* immediate */
3235 simm
= GET_FIELDs(insn
, 20, 31);
3236 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3237 } else { /* register */
3238 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3239 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3241 gen_movl_TN_reg(rd
, cpu_dst
);
3243 case 0x27: /* sra */
3244 if (IS_IMM
) { /* immediate */
3245 simm
= GET_FIELDs(insn
, 20, 31);
3246 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3247 } else { /* register */
3248 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3249 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3251 gen_movl_TN_reg(rd
, cpu_dst
);
3258 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3259 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3261 #ifndef TARGET_SPARC64
3262 case 0x01 ... 0x0f: /* undefined in the
3266 case 0x10 ... 0x1f: /* implementation-dependent
3272 case 0x2: /* V9 wrccr */
3273 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3274 gen_helper_wrccr(cpu_dst
);
3275 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3276 dc
->cc_op
= CC_OP_FLAGS
;
3278 case 0x3: /* V9 wrasi */
3279 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3280 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3281 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3283 case 0x6: /* V9 wrfprs */
3284 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3285 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3286 save_state(dc
, cpu_cond
);
3291 case 0xf: /* V9 sir, nop if user */
3292 #if !defined(CONFIG_USER_ONLY)
3293 if (supervisor(dc
)) {
3298 case 0x13: /* Graphics Status */
3299 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3301 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3303 case 0x14: /* Softint set */
3304 if (!supervisor(dc
))
3306 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3307 gen_helper_set_softint(cpu_tmp64
);
3309 case 0x15: /* Softint clear */
3310 if (!supervisor(dc
))
3312 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3313 gen_helper_clear_softint(cpu_tmp64
);
3315 case 0x16: /* Softint write */
3316 if (!supervisor(dc
))
3318 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3319 gen_helper_write_softint(cpu_tmp64
);
3321 case 0x17: /* Tick compare */
3322 #if !defined(CONFIG_USER_ONLY)
3323 if (!supervisor(dc
))
3329 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3331 r_tickptr
= tcg_temp_new_ptr();
3332 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3333 offsetof(CPUState
, tick
));
3334 gen_helper_tick_set_limit(r_tickptr
,
3336 tcg_temp_free_ptr(r_tickptr
);
3339 case 0x18: /* System tick */
3340 #if !defined(CONFIG_USER_ONLY)
3341 if (!supervisor(dc
))
3347 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3349 r_tickptr
= tcg_temp_new_ptr();
3350 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3351 offsetof(CPUState
, stick
));
3352 gen_helper_tick_set_count(r_tickptr
,
3354 tcg_temp_free_ptr(r_tickptr
);
3357 case 0x19: /* System tick compare */
3358 #if !defined(CONFIG_USER_ONLY)
3359 if (!supervisor(dc
))
3365 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3367 r_tickptr
= tcg_temp_new_ptr();
3368 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3369 offsetof(CPUState
, stick
));
3370 gen_helper_tick_set_limit(r_tickptr
,
3372 tcg_temp_free_ptr(r_tickptr
);
3376 case 0x10: /* Performance Control */
3377 case 0x11: /* Performance Instrumentation
3379 case 0x12: /* Dispatch Control */
3386 #if !defined(CONFIG_USER_ONLY)
3387 case 0x31: /* wrpsr, V9 saved, restored */
3389 if (!supervisor(dc
))
3391 #ifdef TARGET_SPARC64
3397 gen_helper_restored();
3399 case 2: /* UA2005 allclean */
3400 case 3: /* UA2005 otherw */
3401 case 4: /* UA2005 normalw */
3402 case 5: /* UA2005 invalw */
3408 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3409 gen_helper_wrpsr(cpu_dst
);
3410 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3411 dc
->cc_op
= CC_OP_FLAGS
;
3412 save_state(dc
, cpu_cond
);
3419 case 0x32: /* wrwim, V9 wrpr */
3421 if (!supervisor(dc
))
3423 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3424 #ifdef TARGET_SPARC64
3430 r_tsptr
= tcg_temp_new_ptr();
3431 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3432 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3433 offsetof(trap_state
, tpc
));
3434 tcg_temp_free_ptr(r_tsptr
);
3441 r_tsptr
= tcg_temp_new_ptr();
3442 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3443 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3444 offsetof(trap_state
, tnpc
));
3445 tcg_temp_free_ptr(r_tsptr
);
3452 r_tsptr
= tcg_temp_new_ptr();
3453 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3454 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3455 offsetof(trap_state
,
3457 tcg_temp_free_ptr(r_tsptr
);
3464 r_tsptr
= tcg_temp_new_ptr();
3465 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3466 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3467 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3468 offsetof(trap_state
, tt
));
3469 tcg_temp_free_ptr(r_tsptr
);
3476 r_tickptr
= tcg_temp_new_ptr();
3477 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3478 offsetof(CPUState
, tick
));
3479 gen_helper_tick_set_count(r_tickptr
,
3481 tcg_temp_free_ptr(r_tickptr
);
3485 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3488 save_state(dc
, cpu_cond
);
3489 gen_helper_wrpstate(cpu_tmp0
);
3490 dc
->npc
= DYNAMIC_PC
;
3493 save_state(dc
, cpu_cond
);
3494 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3495 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3496 offsetof(CPUSPARCState
, tl
));
3497 dc
->npc
= DYNAMIC_PC
;
3500 gen_helper_wrpil(cpu_tmp0
);
3503 gen_helper_wrcwp(cpu_tmp0
);
3506 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3507 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3508 offsetof(CPUSPARCState
,
3511 case 11: // canrestore
3512 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3513 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3514 offsetof(CPUSPARCState
,
3517 case 12: // cleanwin
3518 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3519 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3520 offsetof(CPUSPARCState
,
3523 case 13: // otherwin
3524 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3525 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3526 offsetof(CPUSPARCState
,
3530 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3531 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3532 offsetof(CPUSPARCState
,
3535 case 16: // UA2005 gl
3536 CHECK_IU_FEATURE(dc
, GL
);
3537 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3538 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3539 offsetof(CPUSPARCState
, gl
));
3541 case 26: // UA2005 strand status
3542 CHECK_IU_FEATURE(dc
, HYPV
);
3543 if (!hypervisor(dc
))
3545 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3551 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3552 if (dc
->def
->nwindows
!= 32)
3553 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3554 (1 << dc
->def
->nwindows
) - 1);
3555 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3559 case 0x33: /* wrtbr, UA2005 wrhpr */
3561 #ifndef TARGET_SPARC64
3562 if (!supervisor(dc
))
3564 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3566 CHECK_IU_FEATURE(dc
, HYPV
);
3567 if (!hypervisor(dc
))
3569 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3572 // XXX gen_op_wrhpstate();
3573 save_state(dc
, cpu_cond
);
3579 // XXX gen_op_wrhtstate();
3582 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3585 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3587 case 31: // hstick_cmpr
3591 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3592 r_tickptr
= tcg_temp_new_ptr();
3593 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3594 offsetof(CPUState
, hstick
));
3595 gen_helper_tick_set_limit(r_tickptr
,
3597 tcg_temp_free_ptr(r_tickptr
);
3600 case 6: // hver readonly
3608 #ifdef TARGET_SPARC64
3609 case 0x2c: /* V9 movcc */
3611 int cc
= GET_FIELD_SP(insn
, 11, 12);
3612 int cond
= GET_FIELD_SP(insn
, 14, 17);
3616 r_cond
= tcg_temp_new();
3617 if (insn
& (1 << 18)) {
3619 gen_cond(r_cond
, 0, cond
, dc
);
3621 gen_cond(r_cond
, 1, cond
, dc
);
3625 gen_fcond(r_cond
, cc
, cond
);
3628 l1
= gen_new_label();
3630 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3631 if (IS_IMM
) { /* immediate */
3634 simm
= GET_FIELD_SPs(insn
, 0, 10);
3635 r_const
= tcg_const_tl(simm
);
3636 gen_movl_TN_reg(rd
, r_const
);
3637 tcg_temp_free(r_const
);
3639 rs2
= GET_FIELD_SP(insn
, 0, 4);
3640 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3641 gen_movl_TN_reg(rd
, cpu_tmp0
);
3644 tcg_temp_free(r_cond
);
3647 case 0x2d: /* V9 sdivx */
3648 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3649 gen_movl_TN_reg(rd
, cpu_dst
);
3651 case 0x2e: /* V9 popc */
3653 cpu_src2
= get_src2(insn
, cpu_src2
);
3654 gen_helper_popc(cpu_dst
, cpu_src2
);
3655 gen_movl_TN_reg(rd
, cpu_dst
);
3657 case 0x2f: /* V9 movr */
3659 int cond
= GET_FIELD_SP(insn
, 10, 12);
3662 cpu_src1
= get_src1(insn
, cpu_src1
);
3664 l1
= gen_new_label();
3666 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3668 if (IS_IMM
) { /* immediate */
3671 simm
= GET_FIELD_SPs(insn
, 0, 9);
3672 r_const
= tcg_const_tl(simm
);
3673 gen_movl_TN_reg(rd
, r_const
);
3674 tcg_temp_free(r_const
);
3676 rs2
= GET_FIELD_SP(insn
, 0, 4);
3677 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3678 gen_movl_TN_reg(rd
, cpu_tmp0
);
3688 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3689 #ifdef TARGET_SPARC64
3690 int opf
= GET_FIELD_SP(insn
, 5, 13);
3691 rs1
= GET_FIELD(insn
, 13, 17);
3692 rs2
= GET_FIELD(insn
, 27, 31);
3693 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3697 case 0x000: /* VIS I edge8cc */
3698 case 0x001: /* VIS II edge8n */
3699 case 0x002: /* VIS I edge8lcc */
3700 case 0x003: /* VIS II edge8ln */
3701 case 0x004: /* VIS I edge16cc */
3702 case 0x005: /* VIS II edge16n */
3703 case 0x006: /* VIS I edge16lcc */
3704 case 0x007: /* VIS II edge16ln */
3705 case 0x008: /* VIS I edge32cc */
3706 case 0x009: /* VIS II edge32n */
3707 case 0x00a: /* VIS I edge32lcc */
3708 case 0x00b: /* VIS II edge32ln */
3711 case 0x010: /* VIS I array8 */
3712 CHECK_FPU_FEATURE(dc
, VIS1
);
3713 cpu_src1
= get_src1(insn
, cpu_src1
);
3714 gen_movl_reg_TN(rs2
, cpu_src2
);
3715 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3716 gen_movl_TN_reg(rd
, cpu_dst
);
3718 case 0x012: /* VIS I array16 */
3719 CHECK_FPU_FEATURE(dc
, VIS1
);
3720 cpu_src1
= get_src1(insn
, cpu_src1
);
3721 gen_movl_reg_TN(rs2
, cpu_src2
);
3722 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3723 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3724 gen_movl_TN_reg(rd
, cpu_dst
);
3726 case 0x014: /* VIS I array32 */
3727 CHECK_FPU_FEATURE(dc
, VIS1
);
3728 cpu_src1
= get_src1(insn
, cpu_src1
);
3729 gen_movl_reg_TN(rs2
, cpu_src2
);
3730 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3731 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3732 gen_movl_TN_reg(rd
, cpu_dst
);
3734 case 0x018: /* VIS I alignaddr */
3735 CHECK_FPU_FEATURE(dc
, VIS1
);
3736 cpu_src1
= get_src1(insn
, cpu_src1
);
3737 gen_movl_reg_TN(rs2
, cpu_src2
);
3738 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3739 gen_movl_TN_reg(rd
, cpu_dst
);
3741 case 0x019: /* VIS II bmask */
3742 case 0x01a: /* VIS I alignaddrl */
3745 case 0x020: /* VIS I fcmple16 */
3746 CHECK_FPU_FEATURE(dc
, VIS1
);
3747 gen_op_load_fpr_DT0(DFPREG(rs1
));
3748 gen_op_load_fpr_DT1(DFPREG(rs2
));
3749 gen_helper_fcmple16();
3750 gen_op_store_DT0_fpr(DFPREG(rd
));
3752 case 0x022: /* VIS I fcmpne16 */
3753 CHECK_FPU_FEATURE(dc
, VIS1
);
3754 gen_op_load_fpr_DT0(DFPREG(rs1
));
3755 gen_op_load_fpr_DT1(DFPREG(rs2
));
3756 gen_helper_fcmpne16();
3757 gen_op_store_DT0_fpr(DFPREG(rd
));
3759 case 0x024: /* VIS I fcmple32 */
3760 CHECK_FPU_FEATURE(dc
, VIS1
);
3761 gen_op_load_fpr_DT0(DFPREG(rs1
));
3762 gen_op_load_fpr_DT1(DFPREG(rs2
));
3763 gen_helper_fcmple32();
3764 gen_op_store_DT0_fpr(DFPREG(rd
));
3766 case 0x026: /* VIS I fcmpne32 */
3767 CHECK_FPU_FEATURE(dc
, VIS1
);
3768 gen_op_load_fpr_DT0(DFPREG(rs1
));
3769 gen_op_load_fpr_DT1(DFPREG(rs2
));
3770 gen_helper_fcmpne32();
3771 gen_op_store_DT0_fpr(DFPREG(rd
));
3773 case 0x028: /* VIS I fcmpgt16 */
3774 CHECK_FPU_FEATURE(dc
, VIS1
);
3775 gen_op_load_fpr_DT0(DFPREG(rs1
));
3776 gen_op_load_fpr_DT1(DFPREG(rs2
));
3777 gen_helper_fcmpgt16();
3778 gen_op_store_DT0_fpr(DFPREG(rd
));
3780 case 0x02a: /* VIS I fcmpeq16 */
3781 CHECK_FPU_FEATURE(dc
, VIS1
);
3782 gen_op_load_fpr_DT0(DFPREG(rs1
));
3783 gen_op_load_fpr_DT1(DFPREG(rs2
));
3784 gen_helper_fcmpeq16();
3785 gen_op_store_DT0_fpr(DFPREG(rd
));
3787 case 0x02c: /* VIS I fcmpgt32 */
3788 CHECK_FPU_FEATURE(dc
, VIS1
);
3789 gen_op_load_fpr_DT0(DFPREG(rs1
));
3790 gen_op_load_fpr_DT1(DFPREG(rs2
));
3791 gen_helper_fcmpgt32();
3792 gen_op_store_DT0_fpr(DFPREG(rd
));
3794 case 0x02e: /* VIS I fcmpeq32 */
3795 CHECK_FPU_FEATURE(dc
, VIS1
);
3796 gen_op_load_fpr_DT0(DFPREG(rs1
));
3797 gen_op_load_fpr_DT1(DFPREG(rs2
));
3798 gen_helper_fcmpeq32();
3799 gen_op_store_DT0_fpr(DFPREG(rd
));
3801 case 0x031: /* VIS I fmul8x16 */
3802 CHECK_FPU_FEATURE(dc
, VIS1
);
3803 gen_op_load_fpr_DT0(DFPREG(rs1
));
3804 gen_op_load_fpr_DT1(DFPREG(rs2
));
3805 gen_helper_fmul8x16();
3806 gen_op_store_DT0_fpr(DFPREG(rd
));
3808 case 0x033: /* VIS I fmul8x16au */
3809 CHECK_FPU_FEATURE(dc
, VIS1
);
3810 gen_op_load_fpr_DT0(DFPREG(rs1
));
3811 gen_op_load_fpr_DT1(DFPREG(rs2
));
3812 gen_helper_fmul8x16au();
3813 gen_op_store_DT0_fpr(DFPREG(rd
));
3815 case 0x035: /* VIS I fmul8x16al */
3816 CHECK_FPU_FEATURE(dc
, VIS1
);
3817 gen_op_load_fpr_DT0(DFPREG(rs1
));
3818 gen_op_load_fpr_DT1(DFPREG(rs2
));
3819 gen_helper_fmul8x16al();
3820 gen_op_store_DT0_fpr(DFPREG(rd
));
3822 case 0x036: /* VIS I fmul8sux16 */
3823 CHECK_FPU_FEATURE(dc
, VIS1
);
3824 gen_op_load_fpr_DT0(DFPREG(rs1
));
3825 gen_op_load_fpr_DT1(DFPREG(rs2
));
3826 gen_helper_fmul8sux16();
3827 gen_op_store_DT0_fpr(DFPREG(rd
));
3829 case 0x037: /* VIS I fmul8ulx16 */
3830 CHECK_FPU_FEATURE(dc
, VIS1
);
3831 gen_op_load_fpr_DT0(DFPREG(rs1
));
3832 gen_op_load_fpr_DT1(DFPREG(rs2
));
3833 gen_helper_fmul8ulx16();
3834 gen_op_store_DT0_fpr(DFPREG(rd
));
3836 case 0x038: /* VIS I fmuld8sux16 */
3837 CHECK_FPU_FEATURE(dc
, VIS1
);
3838 gen_op_load_fpr_DT0(DFPREG(rs1
));
3839 gen_op_load_fpr_DT1(DFPREG(rs2
));
3840 gen_helper_fmuld8sux16();
3841 gen_op_store_DT0_fpr(DFPREG(rd
));
3843 case 0x039: /* VIS I fmuld8ulx16 */
3844 CHECK_FPU_FEATURE(dc
, VIS1
);
3845 gen_op_load_fpr_DT0(DFPREG(rs1
));
3846 gen_op_load_fpr_DT1(DFPREG(rs2
));
3847 gen_helper_fmuld8ulx16();
3848 gen_op_store_DT0_fpr(DFPREG(rd
));
3850 case 0x03a: /* VIS I fpack32 */
3851 case 0x03b: /* VIS I fpack16 */
3852 case 0x03d: /* VIS I fpackfix */
3853 case 0x03e: /* VIS I pdist */
3856 case 0x048: /* VIS I faligndata */
3857 CHECK_FPU_FEATURE(dc
, VIS1
);
3858 gen_op_load_fpr_DT0(DFPREG(rs1
));
3859 gen_op_load_fpr_DT1(DFPREG(rs2
));
3860 gen_helper_faligndata();
3861 gen_op_store_DT0_fpr(DFPREG(rd
));
3863 case 0x04b: /* VIS I fpmerge */
3864 CHECK_FPU_FEATURE(dc
, VIS1
);
3865 gen_op_load_fpr_DT0(DFPREG(rs1
));
3866 gen_op_load_fpr_DT1(DFPREG(rs2
));
3867 gen_helper_fpmerge();
3868 gen_op_store_DT0_fpr(DFPREG(rd
));
3870 case 0x04c: /* VIS II bshuffle */
3873 case 0x04d: /* VIS I fexpand */
3874 CHECK_FPU_FEATURE(dc
, VIS1
);
3875 gen_op_load_fpr_DT0(DFPREG(rs1
));
3876 gen_op_load_fpr_DT1(DFPREG(rs2
));
3877 gen_helper_fexpand();
3878 gen_op_store_DT0_fpr(DFPREG(rd
));
3880 case 0x050: /* VIS I fpadd16 */
3881 CHECK_FPU_FEATURE(dc
, VIS1
);
3882 gen_op_load_fpr_DT0(DFPREG(rs1
));
3883 gen_op_load_fpr_DT1(DFPREG(rs2
));
3884 gen_helper_fpadd16();
3885 gen_op_store_DT0_fpr(DFPREG(rd
));
3887 case 0x051: /* VIS I fpadd16s */
3888 CHECK_FPU_FEATURE(dc
, VIS1
);
3889 gen_helper_fpadd16s(cpu_fpr
[rd
],
3890 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3892 case 0x052: /* VIS I fpadd32 */
3893 CHECK_FPU_FEATURE(dc
, VIS1
);
3894 gen_op_load_fpr_DT0(DFPREG(rs1
));
3895 gen_op_load_fpr_DT1(DFPREG(rs2
));
3896 gen_helper_fpadd32();
3897 gen_op_store_DT0_fpr(DFPREG(rd
));
3899 case 0x053: /* VIS I fpadd32s */
3900 CHECK_FPU_FEATURE(dc
, VIS1
);
3901 gen_helper_fpadd32s(cpu_fpr
[rd
],
3902 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3904 case 0x054: /* VIS I fpsub16 */
3905 CHECK_FPU_FEATURE(dc
, VIS1
);
3906 gen_op_load_fpr_DT0(DFPREG(rs1
));
3907 gen_op_load_fpr_DT1(DFPREG(rs2
));
3908 gen_helper_fpsub16();
3909 gen_op_store_DT0_fpr(DFPREG(rd
));
3911 case 0x055: /* VIS I fpsub16s */
3912 CHECK_FPU_FEATURE(dc
, VIS1
);
3913 gen_helper_fpsub16s(cpu_fpr
[rd
],
3914 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3916 case 0x056: /* VIS I fpsub32 */
3917 CHECK_FPU_FEATURE(dc
, VIS1
);
3918 gen_op_load_fpr_DT0(DFPREG(rs1
));
3919 gen_op_load_fpr_DT1(DFPREG(rs2
));
3920 gen_helper_fpsub32();
3921 gen_op_store_DT0_fpr(DFPREG(rd
));
3923 case 0x057: /* VIS I fpsub32s */
3924 CHECK_FPU_FEATURE(dc
, VIS1
);
3925 gen_helper_fpsub32s(cpu_fpr
[rd
],
3926 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3928 case 0x060: /* VIS I fzero */
3929 CHECK_FPU_FEATURE(dc
, VIS1
);
3930 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3931 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3933 case 0x061: /* VIS I fzeros */
3934 CHECK_FPU_FEATURE(dc
, VIS1
);
3935 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3937 case 0x062: /* VIS I fnor */
3938 CHECK_FPU_FEATURE(dc
, VIS1
);
3939 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3940 cpu_fpr
[DFPREG(rs2
)]);
3941 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3942 cpu_fpr
[DFPREG(rs2
) + 1]);
3944 case 0x063: /* VIS I fnors */
3945 CHECK_FPU_FEATURE(dc
, VIS1
);
3946 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3948 case 0x064: /* VIS I fandnot2 */
3949 CHECK_FPU_FEATURE(dc
, VIS1
);
3950 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3951 cpu_fpr
[DFPREG(rs2
)]);
3952 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3953 cpu_fpr
[DFPREG(rs1
) + 1],
3954 cpu_fpr
[DFPREG(rs2
) + 1]);
3956 case 0x065: /* VIS I fandnot2s */
3957 CHECK_FPU_FEATURE(dc
, VIS1
);
3958 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3960 case 0x066: /* VIS I fnot2 */
3961 CHECK_FPU_FEATURE(dc
, VIS1
);
3962 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3963 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3964 cpu_fpr
[DFPREG(rs2
) + 1]);
3966 case 0x067: /* VIS I fnot2s */
3967 CHECK_FPU_FEATURE(dc
, VIS1
);
3968 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3970 case 0x068: /* VIS I fandnot1 */
3971 CHECK_FPU_FEATURE(dc
, VIS1
);
3972 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3973 cpu_fpr
[DFPREG(rs1
)]);
3974 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3975 cpu_fpr
[DFPREG(rs2
) + 1],
3976 cpu_fpr
[DFPREG(rs1
) + 1]);
3978 case 0x069: /* VIS I fandnot1s */
3979 CHECK_FPU_FEATURE(dc
, VIS1
);
3980 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3982 case 0x06a: /* VIS I fnot1 */
3983 CHECK_FPU_FEATURE(dc
, VIS1
);
3984 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3985 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3986 cpu_fpr
[DFPREG(rs1
) + 1]);
3988 case 0x06b: /* VIS I fnot1s */
3989 CHECK_FPU_FEATURE(dc
, VIS1
);
3990 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3992 case 0x06c: /* VIS I fxor */
3993 CHECK_FPU_FEATURE(dc
, VIS1
);
3994 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3995 cpu_fpr
[DFPREG(rs2
)]);
3996 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3997 cpu_fpr
[DFPREG(rs1
) + 1],
3998 cpu_fpr
[DFPREG(rs2
) + 1]);
4000 case 0x06d: /* VIS I fxors */
4001 CHECK_FPU_FEATURE(dc
, VIS1
);
4002 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4004 case 0x06e: /* VIS I fnand */
4005 CHECK_FPU_FEATURE(dc
, VIS1
);
4006 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4007 cpu_fpr
[DFPREG(rs2
)]);
4008 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4009 cpu_fpr
[DFPREG(rs2
) + 1]);
4011 case 0x06f: /* VIS I fnands */
4012 CHECK_FPU_FEATURE(dc
, VIS1
);
4013 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4015 case 0x070: /* VIS I fand */
4016 CHECK_FPU_FEATURE(dc
, VIS1
);
4017 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4018 cpu_fpr
[DFPREG(rs2
)]);
4019 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4020 cpu_fpr
[DFPREG(rs1
) + 1],
4021 cpu_fpr
[DFPREG(rs2
) + 1]);
4023 case 0x071: /* VIS I fands */
4024 CHECK_FPU_FEATURE(dc
, VIS1
);
4025 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4027 case 0x072: /* VIS I fxnor */
4028 CHECK_FPU_FEATURE(dc
, VIS1
);
4029 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4030 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4031 cpu_fpr
[DFPREG(rs1
)]);
4032 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4033 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4034 cpu_fpr
[DFPREG(rs1
) + 1]);
4036 case 0x073: /* VIS I fxnors */
4037 CHECK_FPU_FEATURE(dc
, VIS1
);
4038 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4039 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4041 case 0x074: /* VIS I fsrc1 */
4042 CHECK_FPU_FEATURE(dc
, VIS1
);
4043 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4044 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4045 cpu_fpr
[DFPREG(rs1
) + 1]);
4047 case 0x075: /* VIS I fsrc1s */
4048 CHECK_FPU_FEATURE(dc
, VIS1
);
4049 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4051 case 0x076: /* VIS I fornot2 */
4052 CHECK_FPU_FEATURE(dc
, VIS1
);
4053 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4054 cpu_fpr
[DFPREG(rs2
)]);
4055 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4056 cpu_fpr
[DFPREG(rs1
) + 1],
4057 cpu_fpr
[DFPREG(rs2
) + 1]);
4059 case 0x077: /* VIS I fornot2s */
4060 CHECK_FPU_FEATURE(dc
, VIS1
);
4061 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4063 case 0x078: /* VIS I fsrc2 */
4064 CHECK_FPU_FEATURE(dc
, VIS1
);
4065 gen_op_load_fpr_DT0(DFPREG(rs2
));
4066 gen_op_store_DT0_fpr(DFPREG(rd
));
4068 case 0x079: /* VIS I fsrc2s */
4069 CHECK_FPU_FEATURE(dc
, VIS1
);
4070 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4072 case 0x07a: /* VIS I fornot1 */
4073 CHECK_FPU_FEATURE(dc
, VIS1
);
4074 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4075 cpu_fpr
[DFPREG(rs1
)]);
4076 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4077 cpu_fpr
[DFPREG(rs2
) + 1],
4078 cpu_fpr
[DFPREG(rs1
) + 1]);
4080 case 0x07b: /* VIS I fornot1s */
4081 CHECK_FPU_FEATURE(dc
, VIS1
);
4082 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4084 case 0x07c: /* VIS I for */
4085 CHECK_FPU_FEATURE(dc
, VIS1
);
4086 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4087 cpu_fpr
[DFPREG(rs2
)]);
4088 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4089 cpu_fpr
[DFPREG(rs1
) + 1],
4090 cpu_fpr
[DFPREG(rs2
) + 1]);
4092 case 0x07d: /* VIS I fors */
4093 CHECK_FPU_FEATURE(dc
, VIS1
);
4094 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4096 case 0x07e: /* VIS I fone */
4097 CHECK_FPU_FEATURE(dc
, VIS1
);
4098 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4099 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4101 case 0x07f: /* VIS I fones */
4102 CHECK_FPU_FEATURE(dc
, VIS1
);
4103 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4105 case 0x080: /* VIS I shutdown */
4106 case 0x081: /* VIS II siam */
4115 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4116 #ifdef TARGET_SPARC64
4121 #ifdef TARGET_SPARC64
4122 } else if (xop
== 0x39) { /* V9 return */
4125 save_state(dc
, cpu_cond
);
4126 cpu_src1
= get_src1(insn
, cpu_src1
);
4127 if (IS_IMM
) { /* immediate */
4128 simm
= GET_FIELDs(insn
, 19, 31);
4129 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4130 } else { /* register */
4131 rs2
= GET_FIELD(insn
, 27, 31);
4133 gen_movl_reg_TN(rs2
, cpu_src2
);
4134 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4136 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4138 gen_helper_restore();
4139 gen_mov_pc_npc(dc
, cpu_cond
);
4140 r_const
= tcg_const_i32(3);
4141 gen_helper_check_align(cpu_dst
, r_const
);
4142 tcg_temp_free_i32(r_const
);
4143 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4144 dc
->npc
= DYNAMIC_PC
;
4148 cpu_src1
= get_src1(insn
, cpu_src1
);
4149 if (IS_IMM
) { /* immediate */
4150 simm
= GET_FIELDs(insn
, 19, 31);
4151 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4152 } else { /* register */
4153 rs2
= GET_FIELD(insn
, 27, 31);
4155 gen_movl_reg_TN(rs2
, cpu_src2
);
4156 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4158 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4161 case 0x38: /* jmpl */
4166 r_pc
= tcg_const_tl(dc
->pc
);
4167 gen_movl_TN_reg(rd
, r_pc
);
4168 tcg_temp_free(r_pc
);
4169 gen_mov_pc_npc(dc
, cpu_cond
);
4170 r_const
= tcg_const_i32(3);
4171 gen_helper_check_align(cpu_dst
, r_const
);
4172 tcg_temp_free_i32(r_const
);
4173 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4174 dc
->npc
= DYNAMIC_PC
;
4177 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4178 case 0x39: /* rett, V9 return */
4182 if (!supervisor(dc
))
4184 gen_mov_pc_npc(dc
, cpu_cond
);
4185 r_const
= tcg_const_i32(3);
4186 gen_helper_check_align(cpu_dst
, r_const
);
4187 tcg_temp_free_i32(r_const
);
4188 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4189 dc
->npc
= DYNAMIC_PC
;
4194 case 0x3b: /* flush */
4195 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4197 gen_helper_flush(cpu_dst
);
4199 case 0x3c: /* save */
4200 save_state(dc
, cpu_cond
);
4202 gen_movl_TN_reg(rd
, cpu_dst
);
4204 case 0x3d: /* restore */
4205 save_state(dc
, cpu_cond
);
4206 gen_helper_restore();
4207 gen_movl_TN_reg(rd
, cpu_dst
);
4209 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4210 case 0x3e: /* V9 done/retry */
4214 if (!supervisor(dc
))
4216 dc
->npc
= DYNAMIC_PC
;
4217 dc
->pc
= DYNAMIC_PC
;
4221 if (!supervisor(dc
))
4223 dc
->npc
= DYNAMIC_PC
;
4224 dc
->pc
= DYNAMIC_PC
;
4240 case 3: /* load/store instructions */
4242 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4244 /* flush pending conditional evaluations before exposing
4246 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4247 dc
->cc_op
= CC_OP_FLAGS
;
4248 gen_helper_compute_psr();
4250 cpu_src1
= get_src1(insn
, cpu_src1
);
4251 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4252 rs2
= GET_FIELD(insn
, 27, 31);
4253 gen_movl_reg_TN(rs2
, cpu_src2
);
4254 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4255 } else if (IS_IMM
) { /* immediate */
4256 simm
= GET_FIELDs(insn
, 19, 31);
4257 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4258 } else { /* register */
4259 rs2
= GET_FIELD(insn
, 27, 31);
4261 gen_movl_reg_TN(rs2
, cpu_src2
);
4262 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4264 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4266 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4267 (xop
> 0x17 && xop
<= 0x1d ) ||
4268 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4270 case 0x0: /* ld, V9 lduw, load unsigned word */
4271 gen_address_mask(dc
, cpu_addr
);
4272 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4274 case 0x1: /* ldub, load unsigned byte */
4275 gen_address_mask(dc
, cpu_addr
);
4276 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4278 case 0x2: /* lduh, load unsigned halfword */
4279 gen_address_mask(dc
, cpu_addr
);
4280 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4282 case 0x3: /* ldd, load double word */
4288 save_state(dc
, cpu_cond
);
4289 r_const
= tcg_const_i32(7);
4290 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4291 tcg_temp_free_i32(r_const
);
4292 gen_address_mask(dc
, cpu_addr
);
4293 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4294 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4295 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4296 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4297 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4298 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4299 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4302 case 0x9: /* ldsb, load signed byte */
4303 gen_address_mask(dc
, cpu_addr
);
4304 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4306 case 0xa: /* ldsh, load signed halfword */
4307 gen_address_mask(dc
, cpu_addr
);
4308 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4310 case 0xd: /* ldstub -- XXX: should be atomically */
4314 gen_address_mask(dc
, cpu_addr
);
4315 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4316 r_const
= tcg_const_tl(0xff);
4317 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4318 tcg_temp_free(r_const
);
4321 case 0x0f: /* swap, swap register with memory. Also
4323 CHECK_IU_FEATURE(dc
, SWAP
);
4324 gen_movl_reg_TN(rd
, cpu_val
);
4325 gen_address_mask(dc
, cpu_addr
);
4326 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4327 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4328 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4330 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4331 case 0x10: /* lda, V9 lduwa, load word alternate */
4332 #ifndef TARGET_SPARC64
4335 if (!supervisor(dc
))
4338 save_state(dc
, cpu_cond
);
4339 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4341 case 0x11: /* lduba, load unsigned byte alternate */
4342 #ifndef TARGET_SPARC64
4345 if (!supervisor(dc
))
4348 save_state(dc
, cpu_cond
);
4349 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4351 case 0x12: /* lduha, load unsigned halfword alternate */
4352 #ifndef TARGET_SPARC64
4355 if (!supervisor(dc
))
4358 save_state(dc
, cpu_cond
);
4359 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4361 case 0x13: /* ldda, load double word alternate */
4362 #ifndef TARGET_SPARC64
4365 if (!supervisor(dc
))
4370 save_state(dc
, cpu_cond
);
4371 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4373 case 0x19: /* ldsba, load signed byte alternate */
4374 #ifndef TARGET_SPARC64
4377 if (!supervisor(dc
))
4380 save_state(dc
, cpu_cond
);
4381 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4383 case 0x1a: /* ldsha, load signed halfword alternate */
4384 #ifndef TARGET_SPARC64
4387 if (!supervisor(dc
))
4390 save_state(dc
, cpu_cond
);
4391 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4393 case 0x1d: /* ldstuba -- XXX: should be atomically */
4394 #ifndef TARGET_SPARC64
4397 if (!supervisor(dc
))
4400 save_state(dc
, cpu_cond
);
4401 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4403 case 0x1f: /* swapa, swap reg with alt. memory. Also
4405 CHECK_IU_FEATURE(dc
, SWAP
);
4406 #ifndef TARGET_SPARC64
4409 if (!supervisor(dc
))
4412 save_state(dc
, cpu_cond
);
4413 gen_movl_reg_TN(rd
, cpu_val
);
4414 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4417 #ifndef TARGET_SPARC64
4418 case 0x30: /* ldc */
4419 case 0x31: /* ldcsr */
4420 case 0x33: /* lddc */
4424 #ifdef TARGET_SPARC64
4425 case 0x08: /* V9 ldsw */
4426 gen_address_mask(dc
, cpu_addr
);
4427 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4429 case 0x0b: /* V9 ldx */
4430 gen_address_mask(dc
, cpu_addr
);
4431 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4433 case 0x18: /* V9 ldswa */
4434 save_state(dc
, cpu_cond
);
4435 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4437 case 0x1b: /* V9 ldxa */
4438 save_state(dc
, cpu_cond
);
4439 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4441 case 0x2d: /* V9 prefetch, no effect */
4443 case 0x30: /* V9 ldfa */
4444 save_state(dc
, cpu_cond
);
4445 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4447 case 0x33: /* V9 lddfa */
4448 save_state(dc
, cpu_cond
);
4449 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4451 case 0x3d: /* V9 prefetcha, no effect */
4453 case 0x32: /* V9 ldqfa */
4454 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4455 save_state(dc
, cpu_cond
);
4456 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4462 gen_movl_TN_reg(rd
, cpu_val
);
4463 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4466 } else if (xop
>= 0x20 && xop
< 0x24) {
4467 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4469 save_state(dc
, cpu_cond
);
4471 case 0x20: /* ldf, load fpreg */
4472 gen_address_mask(dc
, cpu_addr
);
4473 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4474 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4476 case 0x21: /* ldfsr, V9 ldxfsr */
4477 #ifdef TARGET_SPARC64
4478 gen_address_mask(dc
, cpu_addr
);
4480 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4481 gen_helper_ldxfsr(cpu_tmp64
);
4483 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4484 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4485 gen_helper_ldfsr(cpu_tmp32
);
4489 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4490 gen_helper_ldfsr(cpu_tmp32
);
4494 case 0x22: /* ldqf, load quad fpreg */
4498 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4499 r_const
= tcg_const_i32(dc
->mem_idx
);
4500 gen_address_mask(dc
, cpu_addr
);
4501 gen_helper_ldqf(cpu_addr
, r_const
);
4502 tcg_temp_free_i32(r_const
);
4503 gen_op_store_QT0_fpr(QFPREG(rd
));
4506 case 0x23: /* lddf, load double fpreg */
4510 r_const
= tcg_const_i32(dc
->mem_idx
);
4511 gen_address_mask(dc
, cpu_addr
);
4512 gen_helper_lddf(cpu_addr
, r_const
);
4513 tcg_temp_free_i32(r_const
);
4514 gen_op_store_DT0_fpr(DFPREG(rd
));
4520 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4521 xop
== 0xe || xop
== 0x1e) {
4522 gen_movl_reg_TN(rd
, cpu_val
);
4524 case 0x4: /* st, store word */
4525 gen_address_mask(dc
, cpu_addr
);
4526 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4528 case 0x5: /* stb, store byte */
4529 gen_address_mask(dc
, cpu_addr
);
4530 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4532 case 0x6: /* sth, store halfword */
4533 gen_address_mask(dc
, cpu_addr
);
4534 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4536 case 0x7: /* std, store double word */
4542 save_state(dc
, cpu_cond
);
4543 gen_address_mask(dc
, cpu_addr
);
4544 r_const
= tcg_const_i32(7);
4545 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4546 tcg_temp_free_i32(r_const
);
4547 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4548 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4549 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4552 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4553 case 0x14: /* sta, V9 stwa, store word alternate */
4554 #ifndef TARGET_SPARC64
4557 if (!supervisor(dc
))
4560 save_state(dc
, cpu_cond
);
4561 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4562 dc
->npc
= DYNAMIC_PC
;
4564 case 0x15: /* stba, store byte alternate */
4565 #ifndef TARGET_SPARC64
4568 if (!supervisor(dc
))
4571 save_state(dc
, cpu_cond
);
4572 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4573 dc
->npc
= DYNAMIC_PC
;
4575 case 0x16: /* stha, store halfword alternate */
4576 #ifndef TARGET_SPARC64
4579 if (!supervisor(dc
))
4582 save_state(dc
, cpu_cond
);
4583 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4584 dc
->npc
= DYNAMIC_PC
;
4586 case 0x17: /* stda, store double word alternate */
4587 #ifndef TARGET_SPARC64
4590 if (!supervisor(dc
))
4596 save_state(dc
, cpu_cond
);
4597 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4601 #ifdef TARGET_SPARC64
4602 case 0x0e: /* V9 stx */
4603 gen_address_mask(dc
, cpu_addr
);
4604 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4606 case 0x1e: /* V9 stxa */
4607 save_state(dc
, cpu_cond
);
4608 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4609 dc
->npc
= DYNAMIC_PC
;
4615 } else if (xop
> 0x23 && xop
< 0x28) {
4616 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4618 save_state(dc
, cpu_cond
);
4620 case 0x24: /* stf, store fpreg */
4621 gen_address_mask(dc
, cpu_addr
);
4622 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4623 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4625 case 0x25: /* stfsr, V9 stxfsr */
4626 #ifdef TARGET_SPARC64
4627 gen_address_mask(dc
, cpu_addr
);
4628 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4630 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4632 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4634 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4635 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4639 #ifdef TARGET_SPARC64
4640 /* V9 stqf, store quad fpreg */
4644 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4645 gen_op_load_fpr_QT0(QFPREG(rd
));
4646 r_const
= tcg_const_i32(dc
->mem_idx
);
4647 gen_address_mask(dc
, cpu_addr
);
4648 gen_helper_stqf(cpu_addr
, r_const
);
4649 tcg_temp_free_i32(r_const
);
4652 #else /* !TARGET_SPARC64 */
4653 /* stdfq, store floating point queue */
4654 #if defined(CONFIG_USER_ONLY)
4657 if (!supervisor(dc
))
4659 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4664 case 0x27: /* stdf, store double fpreg */
4668 gen_op_load_fpr_DT0(DFPREG(rd
));
4669 r_const
= tcg_const_i32(dc
->mem_idx
);
4670 gen_address_mask(dc
, cpu_addr
);
4671 gen_helper_stdf(cpu_addr
, r_const
);
4672 tcg_temp_free_i32(r_const
);
4678 } else if (xop
> 0x33 && xop
< 0x3f) {
4679 save_state(dc
, cpu_cond
);
4681 #ifdef TARGET_SPARC64
4682 case 0x34: /* V9 stfa */
4683 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4685 case 0x36: /* V9 stqfa */
4689 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4690 r_const
= tcg_const_i32(7);
4691 gen_helper_check_align(cpu_addr
, r_const
);
4692 tcg_temp_free_i32(r_const
);
4693 gen_op_load_fpr_QT0(QFPREG(rd
));
4694 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4697 case 0x37: /* V9 stdfa */
4698 gen_op_load_fpr_DT0(DFPREG(rd
));
4699 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4701 case 0x3c: /* V9 casa */
4702 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4703 gen_movl_TN_reg(rd
, cpu_val
);
4705 case 0x3e: /* V9 casxa */
4706 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4707 gen_movl_TN_reg(rd
, cpu_val
);
4710 case 0x34: /* stc */
4711 case 0x35: /* stcsr */
4712 case 0x36: /* stdcq */
4713 case 0x37: /* stdc */
4724 /* default case for non jump instructions */
4725 if (dc
->npc
== DYNAMIC_PC
) {
4726 dc
->pc
= DYNAMIC_PC
;
4728 } else if (dc
->npc
== JUMP_PC
) {
4729 /* we can do a static jump */
4730 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4734 dc
->npc
= dc
->npc
+ 4;
4742 save_state(dc
, cpu_cond
);
4743 r_const
= tcg_const_i32(TT_ILL_INSN
);
4744 gen_helper_raise_exception(r_const
);
4745 tcg_temp_free_i32(r_const
);
4753 save_state(dc
, cpu_cond
);
4754 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4755 gen_helper_raise_exception(r_const
);
4756 tcg_temp_free_i32(r_const
);
4760 #if !defined(CONFIG_USER_ONLY)
4765 save_state(dc
, cpu_cond
);
4766 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4767 gen_helper_raise_exception(r_const
);
4768 tcg_temp_free_i32(r_const
);
4774 save_state(dc
, cpu_cond
);
4775 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4778 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4780 save_state(dc
, cpu_cond
);
4781 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4785 #ifndef TARGET_SPARC64
4790 save_state(dc
, cpu_cond
);
4791 r_const
= tcg_const_i32(TT_NCP_INSN
);
4792 gen_helper_raise_exception(r_const
);
4793 tcg_temp_free(r_const
);
4799 tcg_temp_free(cpu_tmp1
);
4800 tcg_temp_free(cpu_tmp2
);
4803 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4804 int spc
, CPUSPARCState
*env
)
4806 target_ulong pc_start
, last_pc
;
4807 uint16_t *gen_opc_end
;
4808 DisasContext dc1
, *dc
= &dc1
;
4814 memset(dc
, 0, sizeof(DisasContext
));
4819 dc
->npc
= (target_ulong
) tb
->cs_base
;
4820 dc
->cc_op
= CC_OP_DYNAMIC
;
4821 dc
->mem_idx
= cpu_mmu_index(env
);
4823 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4824 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4826 dc
->fpu_enabled
= 0;
4827 #ifdef TARGET_SPARC64
4828 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4830 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4831 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4833 cpu_tmp0
= tcg_temp_new();
4834 cpu_tmp32
= tcg_temp_new_i32();
4835 cpu_tmp64
= tcg_temp_new_i64();
4837 cpu_dst
= tcg_temp_local_new();
4840 cpu_val
= tcg_temp_local_new();
4841 cpu_addr
= tcg_temp_local_new();
4844 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4846 max_insns
= CF_COUNT_MASK
;
4849 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4850 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4851 if (bp
->pc
== dc
->pc
) {
4852 if (dc
->pc
!= pc_start
)
4853 save_state(dc
, cpu_cond
);
4862 qemu_log("Search PC...\n");
4863 j
= gen_opc_ptr
- gen_opc_buf
;
4867 gen_opc_instr_start
[lj
++] = 0;
4868 gen_opc_pc
[lj
] = dc
->pc
;
4869 gen_opc_npc
[lj
] = dc
->npc
;
4870 gen_opc_instr_start
[lj
] = 1;
4871 gen_opc_icount
[lj
] = num_insns
;
4874 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4877 disas_sparc_insn(dc
);
4882 /* if the next PC is different, we abort now */
4883 if (dc
->pc
!= (last_pc
+ 4))
4885 /* if we reach a page boundary, we stop generation so that the
4886 PC of a TT_TFAULT exception is always in the right page */
4887 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4889 /* if single step mode, we generate only one instruction and
4890 generate an exception */
4891 if (dc
->singlestep
) {
4894 } while ((gen_opc_ptr
< gen_opc_end
) &&
4895 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4896 num_insns
< max_insns
);
4899 tcg_temp_free(cpu_addr
);
4900 tcg_temp_free(cpu_val
);
4901 tcg_temp_free(cpu_dst
);
4902 tcg_temp_free_i64(cpu_tmp64
);
4903 tcg_temp_free_i32(cpu_tmp32
);
4904 tcg_temp_free(cpu_tmp0
);
4905 if (tb
->cflags
& CF_LAST_IO
)
4908 if (dc
->pc
!= DYNAMIC_PC
&&
4909 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4910 /* static PC and NPC: we can use direct chaining */
4911 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4913 if (dc
->pc
!= DYNAMIC_PC
)
4914 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4915 save_npc(dc
, cpu_cond
);
4919 gen_icount_end(tb
, num_insns
);
4920 *gen_opc_ptr
= INDEX_op_end
;
4922 j
= gen_opc_ptr
- gen_opc_buf
;
4925 gen_opc_instr_start
[lj
++] = 0;
4929 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4930 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4932 tb
->size
= last_pc
+ 4 - pc_start
;
4933 tb
->icount
= num_insns
;
4936 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4937 qemu_log("--------------\n");
4938 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4939 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4945 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4947 gen_intermediate_code_internal(tb
, 0, env
);
4950 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4952 gen_intermediate_code_internal(tb
, 1, env
);
4955 void gen_intermediate_code_init(CPUSPARCState
*env
)
4959 static const char * const gregnames
[8] = {
4960 NULL
, // g0 not used
4969 static const char * const fregnames
[64] = {
4970 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4971 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4972 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4973 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4974 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4975 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4976 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4977 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4980 /* init various static tables */
4984 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4985 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4986 offsetof(CPUState
, regwptr
),
4988 #ifdef TARGET_SPARC64
4989 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4991 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4993 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4995 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4997 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4998 offsetof(CPUState
, tick_cmpr
),
5000 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5001 offsetof(CPUState
, stick_cmpr
),
5003 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5004 offsetof(CPUState
, hstick_cmpr
),
5006 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5008 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5010 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5012 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5013 offsetof(CPUState
, ssr
), "ssr");
5014 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5015 offsetof(CPUState
, version
), "ver");
5016 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5017 offsetof(CPUState
, softint
),
5020 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5023 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5025 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5027 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5028 offsetof(CPUState
, cc_src2
),
5030 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5032 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5034 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5036 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5038 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5040 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5042 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5043 #ifndef CONFIG_USER_ONLY
5044 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5047 for (i
= 1; i
< 8; i
++)
5048 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5049 offsetof(CPUState
, gregs
[i
]),
5051 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5052 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5053 offsetof(CPUState
, fpr
[i
]),
5056 /* register helpers */
5058 #define GEN_HELPER 2
5063 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5064 unsigned long searched_pc
, int pc_pos
, void *puc
)
5067 env
->pc
= gen_opc_pc
[pc_pos
];
5068 npc
= gen_opc_npc
[pc_pos
];
5070 /* dynamic NPC: already stored */
5071 } else if (npc
== 2) {
5072 /* jump PC: use 'cond' and the jump targets of the translation */
5074 env
->npc
= gen_opc_jump_pc
[0];
5076 env
->npc
= gen_opc_jump_pc
[1];
5082 /* flush pending conditional evaluations before exposing cpu state */
5083 if (CC_OP
!= CC_OP_FLAGS
) {
5084 helper_compute_psr();