4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
83 uint32_t cc_op
; /* current CC operation */
84 struct TranslationBlock
*tb
;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x
, int len
)
113 return (x
<< len
) >> len
;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
122 offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
132 offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
138 offsetof(CPU_DoubleU
, l
.upper
));
139 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
140 offsetof(CPU_DoubleU
, l
.lower
));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.upmost
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.upper
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.lower
));
151 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
152 offsetof(CPU_QuadU
, l
.lowest
));
155 static void gen_op_load_fpr_QT1(unsigned int src
)
157 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.upmost
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.upper
));
161 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
162 offsetof(CPU_QuadU
, l
.lower
));
163 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
164 offsetof(CPU_QuadU
, l
.lowest
));
167 static void gen_op_store_QT0_fpr(unsigned int dst
)
169 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
174 offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
176 offsetof(CPU_QuadU
, l
.lowest
));
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
193 #ifdef TARGET_SPARC64
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #define AM_CHECK(dc) (1)
201 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
203 #ifdef TARGET_SPARC64
205 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
209 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
212 tcg_gen_movi_tl(tn
, 0);
214 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
216 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
220 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
225 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
227 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
232 target_ulong pc
, target_ulong npc
)
234 TranslationBlock
*tb
;
237 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
238 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((long)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
288 l1
= gen_new_label();
290 r_temp
= tcg_temp_new();
291 tcg_gen_xor_tl(r_temp
, src1
, src2
);
292 tcg_gen_not_tl(r_temp
, r_temp
);
293 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
294 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
295 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
297 r_const
= tcg_const_i32(TT_TOVF
);
298 gen_helper_raise_exception(r_const
);
299 tcg_temp_free_i32(r_const
);
301 tcg_temp_free(r_temp
);
304 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
309 l1
= gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
311 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
313 r_const
= tcg_const_i32(TT_TOVF
);
314 gen_helper_raise_exception(r_const
);
315 tcg_temp_free_i32(r_const
);
319 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
321 tcg_gen_mov_tl(cpu_cc_src
, src1
);
322 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
323 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
324 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
327 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
329 tcg_gen_mov_tl(cpu_cc_src
, src1
);
330 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
331 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
332 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
335 static TCGv_i32
gen_add32_carry32(void)
337 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32
= tcg_temp_new_i32();
342 cc_src2_32
= tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
344 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
346 cc_src1_32
= cpu_cc_dst
;
347 cc_src2_32
= cpu_cc_src
;
350 carry_32
= tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32
);
355 tcg_temp_free_i32(cc_src2_32
);
361 static TCGv_i32
gen_sub32_carry32(void)
363 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32
= tcg_temp_new_i32();
368 cc_src2_32
= tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
370 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
372 cc_src1_32
= cpu_cc_src
;
373 cc_src2_32
= cpu_cc_src2
;
376 carry_32
= tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32
);
381 tcg_temp_free_i32(cc_src2_32
);
387 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
388 TCGv src2
, int update_cc
)
396 /* Carry is known to be zero. Fall back to plain ADD. */
398 gen_op_add_cc(dst
, src1
, src2
);
400 tcg_gen_add_tl(dst
, src1
, src2
);
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low
= tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
415 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
416 tcg_temp_free(dst_low
);
420 carry_32
= gen_add32_carry32();
426 carry_32
= gen_sub32_carry32();
430 /* We need external help to produce the carry. */
431 carry_32
= tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32
);
436 #if TARGET_LONG_BITS == 64
437 carry
= tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry
, carry_32
);
443 tcg_gen_add_tl(dst
, src1
, src2
);
444 tcg_gen_add_tl(dst
, dst
, carry
);
446 tcg_temp_free_i32(carry_32
);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry
);
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
455 tcg_gen_mov_tl(cpu_cc_src
, src1
);
456 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
457 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
458 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
459 dc
->cc_op
= CC_OP_ADDX
;
463 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
465 tcg_gen_mov_tl(cpu_cc_src
, src1
);
466 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
467 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
468 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
471 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
473 tcg_gen_mov_tl(cpu_cc_src
, src1
);
474 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
475 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
476 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
487 l1
= gen_new_label();
489 r_temp
= tcg_temp_new();
490 tcg_gen_xor_tl(r_temp
, src1
, src2
);
491 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
492 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
493 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
495 r_const
= tcg_const_i32(TT_TOVF
);
496 gen_helper_raise_exception(r_const
);
497 tcg_temp_free_i32(r_const
);
499 tcg_temp_free(r_temp
);
502 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
504 tcg_gen_mov_tl(cpu_cc_src
, src1
);
505 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
509 dc
->cc_op
= CC_OP_LOGIC
;
511 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
512 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
513 dc
->cc_op
= CC_OP_SUB
;
515 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
518 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
520 tcg_gen_mov_tl(cpu_cc_src
, src1
);
521 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
522 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
523 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
526 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
527 TCGv src2
, int update_cc
)
535 /* Carry is known to be zero. Fall back to plain SUB. */
537 gen_op_sub_cc(dst
, src1
, src2
);
539 tcg_gen_sub_tl(dst
, src1
, src2
);
546 carry_32
= gen_add32_carry32();
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low
= tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
560 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
561 tcg_temp_free(dst_low
);
565 carry_32
= gen_sub32_carry32();
569 /* We need external help to produce the carry. */
570 carry_32
= tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32
);
575 #if TARGET_LONG_BITS == 64
576 carry
= tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry
, carry_32
);
582 tcg_gen_sub_tl(dst
, src1
, src2
);
583 tcg_gen_sub_tl(dst
, dst
, carry
);
585 tcg_temp_free_i32(carry_32
);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry
);
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
594 tcg_gen_mov_tl(cpu_cc_src
, src1
);
595 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
596 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
597 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
598 dc
->cc_op
= CC_OP_SUBX
;
602 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
604 tcg_gen_mov_tl(cpu_cc_src
, src1
);
605 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
606 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
607 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
610 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
612 tcg_gen_mov_tl(cpu_cc_src
, src1
);
613 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
614 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
615 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
620 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
625 l1
= gen_new_label();
626 r_temp
= tcg_temp_new();
632 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
633 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
636 tcg_gen_movi_tl(cpu_cc_src2
, 0);
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
642 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
643 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
644 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
646 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
649 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
650 gen_mov_reg_V(r_temp
, cpu_psr
);
651 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
652 tcg_temp_free(r_temp
);
654 // T0 = (b1 << 31) | (T0 >> 1);
656 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
657 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
658 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
660 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
662 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
665 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
667 TCGv_i64 r_temp
, r_temp2
;
669 r_temp
= tcg_temp_new_i64();
670 r_temp2
= tcg_temp_new_i64();
672 tcg_gen_extu_tl_i64(r_temp
, src2
);
673 tcg_gen_extu_tl_i64(r_temp2
, src1
);
674 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
676 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
677 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
678 tcg_temp_free_i64(r_temp
);
679 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
680 #ifdef TARGET_SPARC64
681 tcg_gen_mov_i64(dst
, r_temp2
);
683 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
685 tcg_temp_free_i64(r_temp2
);
688 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
690 TCGv_i64 r_temp
, r_temp2
;
692 r_temp
= tcg_temp_new_i64();
693 r_temp2
= tcg_temp_new_i64();
695 tcg_gen_ext_tl_i64(r_temp
, src2
);
696 tcg_gen_ext_tl_i64(r_temp2
, src1
);
697 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
699 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
700 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
701 tcg_temp_free_i64(r_temp
);
702 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
703 #ifdef TARGET_SPARC64
704 tcg_gen_mov_i64(dst
, r_temp2
);
706 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
708 tcg_temp_free_i64(r_temp2
);
711 #ifdef TARGET_SPARC64
712 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
717 l1
= gen_new_label();
718 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
719 r_const
= tcg_const_i32(TT_DIV_ZERO
);
720 gen_helper_raise_exception(r_const
);
721 tcg_temp_free_i32(r_const
);
725 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
729 l1
= gen_new_label();
730 l2
= gen_new_label();
731 tcg_gen_mov_tl(cpu_cc_src
, src1
);
732 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
733 gen_trap_ifdivzero_tl(cpu_cc_src2
);
734 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
735 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
736 tcg_gen_movi_i64(dst
, INT64_MIN
);
739 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
745 static inline void gen_op_eval_ba(TCGv dst
)
747 tcg_gen_movi_tl(dst
, 1);
751 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
753 gen_mov_reg_Z(dst
, src
);
757 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
759 gen_mov_reg_N(cpu_tmp0
, src
);
760 gen_mov_reg_V(dst
, src
);
761 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
762 gen_mov_reg_Z(cpu_tmp0
, src
);
763 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
767 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
769 gen_mov_reg_V(cpu_tmp0
, src
);
770 gen_mov_reg_N(dst
, src
);
771 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
775 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
777 gen_mov_reg_Z(cpu_tmp0
, src
);
778 gen_mov_reg_C(dst
, src
);
779 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
783 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
785 gen_mov_reg_C(dst
, src
);
789 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
791 gen_mov_reg_V(dst
, src
);
795 static inline void gen_op_eval_bn(TCGv dst
)
797 tcg_gen_movi_tl(dst
, 0);
801 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
803 gen_mov_reg_N(dst
, src
);
807 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
809 gen_mov_reg_Z(dst
, src
);
810 tcg_gen_xori_tl(dst
, dst
, 0x1);
814 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
816 gen_mov_reg_N(cpu_tmp0
, src
);
817 gen_mov_reg_V(dst
, src
);
818 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
819 gen_mov_reg_Z(cpu_tmp0
, src
);
820 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
821 tcg_gen_xori_tl(dst
, dst
, 0x1);
825 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
827 gen_mov_reg_V(cpu_tmp0
, src
);
828 gen_mov_reg_N(dst
, src
);
829 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
830 tcg_gen_xori_tl(dst
, dst
, 0x1);
834 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
836 gen_mov_reg_Z(cpu_tmp0
, src
);
837 gen_mov_reg_C(dst
, src
);
838 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
839 tcg_gen_xori_tl(dst
, dst
, 0x1);
843 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
845 gen_mov_reg_C(dst
, src
);
846 tcg_gen_xori_tl(dst
, dst
, 0x1);
850 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
852 gen_mov_reg_N(dst
, src
);
853 tcg_gen_xori_tl(dst
, dst
, 0x1);
857 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
859 gen_mov_reg_V(dst
, src
);
860 tcg_gen_xori_tl(dst
, dst
, 0x1);
864 FPSR bit field FCC1 | FCC0:
870 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
871 unsigned int fcc_offset
)
873 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
874 tcg_gen_andi_tl(reg
, reg
, 0x1);
877 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
878 unsigned int fcc_offset
)
880 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
881 tcg_gen_andi_tl(reg
, reg
, 0x1);
885 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
886 unsigned int fcc_offset
)
888 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
889 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
890 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
893 // 1 or 2: FCC0 ^ FCC1
894 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
895 unsigned int fcc_offset
)
897 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
898 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
899 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
903 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
904 unsigned int fcc_offset
)
906 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
910 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
911 unsigned int fcc_offset
)
913 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
914 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
915 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
916 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
920 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
921 unsigned int fcc_offset
)
923 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
927 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
928 unsigned int fcc_offset
)
930 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
931 tcg_gen_xori_tl(dst
, dst
, 0x1);
932 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
933 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
937 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
938 unsigned int fcc_offset
)
940 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
941 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
942 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
946 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
947 unsigned int fcc_offset
)
949 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
950 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
951 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
952 tcg_gen_xori_tl(dst
, dst
, 0x1);
955 // 0 or 3: !(FCC0 ^ FCC1)
956 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
957 unsigned int fcc_offset
)
959 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
960 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
961 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
962 tcg_gen_xori_tl(dst
, dst
, 0x1);
966 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
967 unsigned int fcc_offset
)
969 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
970 tcg_gen_xori_tl(dst
, dst
, 0x1);
973 // !1: !(FCC0 & !FCC1)
974 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
975 unsigned int fcc_offset
)
977 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
978 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
979 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
980 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
981 tcg_gen_xori_tl(dst
, dst
, 0x1);
985 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
986 unsigned int fcc_offset
)
988 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
989 tcg_gen_xori_tl(dst
, dst
, 0x1);
992 // !2: !(!FCC0 & FCC1)
993 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
994 unsigned int fcc_offset
)
996 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
997 tcg_gen_xori_tl(dst
, dst
, 0x1);
998 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
999 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1000 tcg_gen_xori_tl(dst
, dst
, 0x1);
1003 // !3: !(FCC0 & FCC1)
1004 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1005 unsigned int fcc_offset
)
1007 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1008 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1009 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1010 tcg_gen_xori_tl(dst
, dst
, 0x1);
1013 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1014 target_ulong pc2
, TCGv r_cond
)
1018 l1
= gen_new_label();
1020 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1022 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1025 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1028 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1029 target_ulong pc2
, TCGv r_cond
)
1033 l1
= gen_new_label();
1035 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1037 gen_goto_tb(dc
, 0, pc2
, pc1
);
1040 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1043 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1048 l1
= gen_new_label();
1049 l2
= gen_new_label();
1051 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1053 tcg_gen_movi_tl(cpu_npc
, npc1
);
1057 tcg_gen_movi_tl(cpu_npc
, npc2
);
1061 /* call this function before using the condition register as it may
1062 have been set for a jump */
1063 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1065 if (dc
->npc
== JUMP_PC
) {
1066 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1067 dc
->npc
= DYNAMIC_PC
;
1071 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1073 if (dc
->npc
== JUMP_PC
) {
1074 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1075 dc
->npc
= DYNAMIC_PC
;
1076 } else if (dc
->npc
!= DYNAMIC_PC
) {
1077 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1081 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1083 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1084 /* flush pending conditional evaluations before exposing cpu state */
1085 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1086 dc
->cc_op
= CC_OP_FLAGS
;
1087 gen_helper_compute_psr();
1092 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1094 if (dc
->npc
== JUMP_PC
) {
1095 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1096 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1097 dc
->pc
= DYNAMIC_PC
;
1098 } else if (dc
->npc
== DYNAMIC_PC
) {
1099 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1100 dc
->pc
= DYNAMIC_PC
;
1106 static inline void gen_op_next_insn(void)
1108 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1109 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1112 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1117 #ifdef TARGET_SPARC64
1125 switch (dc
->cc_op
) {
1129 gen_helper_compute_psr();
1130 dc
->cc_op
= CC_OP_FLAGS
;
1135 gen_op_eval_bn(r_dst
);
1138 gen_op_eval_be(r_dst
, r_src
);
1141 gen_op_eval_ble(r_dst
, r_src
);
1144 gen_op_eval_bl(r_dst
, r_src
);
1147 gen_op_eval_bleu(r_dst
, r_src
);
1150 gen_op_eval_bcs(r_dst
, r_src
);
1153 gen_op_eval_bneg(r_dst
, r_src
);
1156 gen_op_eval_bvs(r_dst
, r_src
);
1159 gen_op_eval_ba(r_dst
);
1162 gen_op_eval_bne(r_dst
, r_src
);
1165 gen_op_eval_bg(r_dst
, r_src
);
1168 gen_op_eval_bge(r_dst
, r_src
);
1171 gen_op_eval_bgu(r_dst
, r_src
);
1174 gen_op_eval_bcc(r_dst
, r_src
);
1177 gen_op_eval_bpos(r_dst
, r_src
);
1180 gen_op_eval_bvc(r_dst
, r_src
);
1185 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1187 unsigned int offset
;
1207 gen_op_eval_bn(r_dst
);
1210 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1213 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1216 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1219 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1222 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1225 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1228 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1231 gen_op_eval_ba(r_dst
);
1234 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1237 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1240 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1243 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1246 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1249 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1252 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1257 #ifdef TARGET_SPARC64
1259 static const int gen_tcg_cond_reg
[8] = {
1270 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1274 l1
= gen_new_label();
1275 tcg_gen_movi_tl(r_dst
, 0);
1276 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1277 tcg_gen_movi_tl(r_dst
, 1);
1282 /* XXX: potentially incorrect if dynamic npc */
1283 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1286 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1287 target_ulong target
= dc
->pc
+ offset
;
1290 /* unconditional not taken */
1292 dc
->pc
= dc
->npc
+ 4;
1293 dc
->npc
= dc
->pc
+ 4;
1296 dc
->npc
= dc
->pc
+ 4;
1298 } else if (cond
== 0x8) {
1299 /* unconditional taken */
1302 dc
->npc
= dc
->pc
+ 4;
1306 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1309 flush_cond(dc
, r_cond
);
1310 gen_cond(r_cond
, cc
, cond
, dc
);
1312 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1316 dc
->jump_pc
[0] = target
;
1317 dc
->jump_pc
[1] = dc
->npc
+ 4;
1323 /* XXX: potentially incorrect if dynamic npc */
1324 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1327 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1328 target_ulong target
= dc
->pc
+ offset
;
1331 /* unconditional not taken */
1333 dc
->pc
= dc
->npc
+ 4;
1334 dc
->npc
= dc
->pc
+ 4;
1337 dc
->npc
= dc
->pc
+ 4;
1339 } else if (cond
== 0x8) {
1340 /* unconditional taken */
1343 dc
->npc
= dc
->pc
+ 4;
1347 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1350 flush_cond(dc
, r_cond
);
1351 gen_fcond(r_cond
, cc
, cond
);
1353 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1357 dc
->jump_pc
[0] = target
;
1358 dc
->jump_pc
[1] = dc
->npc
+ 4;
1364 #ifdef TARGET_SPARC64
1365 /* XXX: potentially incorrect if dynamic npc */
1366 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1367 TCGv r_cond
, TCGv r_reg
)
1369 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1370 target_ulong target
= dc
->pc
+ offset
;
1372 flush_cond(dc
, r_cond
);
1373 gen_cond_reg(r_cond
, cond
, r_reg
);
1375 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1379 dc
->jump_pc
[0] = target
;
1380 dc
->jump_pc
[1] = dc
->npc
+ 4;
1385 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1389 gen_helper_fcmps(r_rs1
, r_rs2
);
1392 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1395 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1398 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1403 static inline void gen_op_fcmpd(int fccno
)
1410 gen_helper_fcmpd_fcc1();
1413 gen_helper_fcmpd_fcc2();
1416 gen_helper_fcmpd_fcc3();
1421 static inline void gen_op_fcmpq(int fccno
)
1428 gen_helper_fcmpq_fcc1();
1431 gen_helper_fcmpq_fcc2();
1434 gen_helper_fcmpq_fcc3();
1439 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1443 gen_helper_fcmpes(r_rs1
, r_rs2
);
1446 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1449 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1452 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1457 static inline void gen_op_fcmped(int fccno
)
1461 gen_helper_fcmped();
1464 gen_helper_fcmped_fcc1();
1467 gen_helper_fcmped_fcc2();
1470 gen_helper_fcmped_fcc3();
1475 static inline void gen_op_fcmpeq(int fccno
)
1479 gen_helper_fcmpeq();
1482 gen_helper_fcmpeq_fcc1();
1485 gen_helper_fcmpeq_fcc2();
1488 gen_helper_fcmpeq_fcc3();
1495 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1497 gen_helper_fcmps(r_rs1
, r_rs2
);
1500 static inline void gen_op_fcmpd(int fccno
)
1505 static inline void gen_op_fcmpq(int fccno
)
1510 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1512 gen_helper_fcmpes(r_rs1
, r_rs2
);
1515 static inline void gen_op_fcmped(int fccno
)
1517 gen_helper_fcmped();
1520 static inline void gen_op_fcmpeq(int fccno
)
1522 gen_helper_fcmpeq();
1526 static inline void gen_op_fpexception_im(int fsr_flags
)
1530 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1531 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1532 r_const
= tcg_const_i32(TT_FP_EXCP
);
1533 gen_helper_raise_exception(r_const
);
1534 tcg_temp_free_i32(r_const
);
1537 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1539 #if !defined(CONFIG_USER_ONLY)
1540 if (!dc
->fpu_enabled
) {
1543 save_state(dc
, r_cond
);
1544 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1545 gen_helper_raise_exception(r_const
);
1546 tcg_temp_free_i32(r_const
);
1554 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1556 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1559 static inline void gen_clear_float_exceptions(void)
1561 gen_helper_clear_float_exceptions();
1565 #ifdef TARGET_SPARC64
1566 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1572 r_asi
= tcg_temp_new_i32();
1573 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1575 asi
= GET_FIELD(insn
, 19, 26);
1576 r_asi
= tcg_const_i32(asi
);
1581 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1584 TCGv_i32 r_asi
, r_size
, r_sign
;
1586 r_asi
= gen_get_asi(insn
, addr
);
1587 r_size
= tcg_const_i32(size
);
1588 r_sign
= tcg_const_i32(sign
);
1589 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1590 tcg_temp_free_i32(r_sign
);
1591 tcg_temp_free_i32(r_size
);
1592 tcg_temp_free_i32(r_asi
);
1595 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1597 TCGv_i32 r_asi
, r_size
;
1599 r_asi
= gen_get_asi(insn
, addr
);
1600 r_size
= tcg_const_i32(size
);
1601 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1602 tcg_temp_free_i32(r_size
);
1603 tcg_temp_free_i32(r_asi
);
1606 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1608 TCGv_i32 r_asi
, r_size
, r_rd
;
1610 r_asi
= gen_get_asi(insn
, addr
);
1611 r_size
= tcg_const_i32(size
);
1612 r_rd
= tcg_const_i32(rd
);
1613 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1614 tcg_temp_free_i32(r_rd
);
1615 tcg_temp_free_i32(r_size
);
1616 tcg_temp_free_i32(r_asi
);
1619 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1621 TCGv_i32 r_asi
, r_size
, r_rd
;
1623 r_asi
= gen_get_asi(insn
, addr
);
1624 r_size
= tcg_const_i32(size
);
1625 r_rd
= tcg_const_i32(rd
);
1626 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1627 tcg_temp_free_i32(r_rd
);
1628 tcg_temp_free_i32(r_size
);
1629 tcg_temp_free_i32(r_asi
);
1632 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1634 TCGv_i32 r_asi
, r_size
, r_sign
;
1636 r_asi
= gen_get_asi(insn
, addr
);
1637 r_size
= tcg_const_i32(4);
1638 r_sign
= tcg_const_i32(0);
1639 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1640 tcg_temp_free_i32(r_sign
);
1641 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1642 tcg_temp_free_i32(r_size
);
1643 tcg_temp_free_i32(r_asi
);
1644 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1647 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1649 TCGv_i32 r_asi
, r_rd
;
1651 r_asi
= gen_get_asi(insn
, addr
);
1652 r_rd
= tcg_const_i32(rd
);
1653 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1654 tcg_temp_free_i32(r_rd
);
1655 tcg_temp_free_i32(r_asi
);
1658 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1660 TCGv_i32 r_asi
, r_size
;
1662 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1663 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1664 r_asi
= gen_get_asi(insn
, addr
);
1665 r_size
= tcg_const_i32(8);
1666 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1667 tcg_temp_free_i32(r_size
);
1668 tcg_temp_free_i32(r_asi
);
1671 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1677 r_val1
= tcg_temp_new();
1678 gen_movl_reg_TN(rd
, r_val1
);
1679 r_asi
= gen_get_asi(insn
, addr
);
1680 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1681 tcg_temp_free_i32(r_asi
);
1682 tcg_temp_free(r_val1
);
1685 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1690 gen_movl_reg_TN(rd
, cpu_tmp64
);
1691 r_asi
= gen_get_asi(insn
, addr
);
1692 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1693 tcg_temp_free_i32(r_asi
);
1696 #elif !defined(CONFIG_USER_ONLY)
1698 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1701 TCGv_i32 r_asi
, r_size
, r_sign
;
1703 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1704 r_size
= tcg_const_i32(size
);
1705 r_sign
= tcg_const_i32(sign
);
1706 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1707 tcg_temp_free(r_sign
);
1708 tcg_temp_free(r_size
);
1709 tcg_temp_free(r_asi
);
1710 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1713 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1715 TCGv_i32 r_asi
, r_size
;
1717 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1718 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1719 r_size
= tcg_const_i32(size
);
1720 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1721 tcg_temp_free(r_size
);
1722 tcg_temp_free(r_asi
);
1725 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1727 TCGv_i32 r_asi
, r_size
, r_sign
;
1730 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1731 r_size
= tcg_const_i32(4);
1732 r_sign
= tcg_const_i32(0);
1733 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1734 tcg_temp_free(r_sign
);
1735 r_val
= tcg_temp_new_i64();
1736 tcg_gen_extu_tl_i64(r_val
, dst
);
1737 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1738 tcg_temp_free_i64(r_val
);
1739 tcg_temp_free(r_size
);
1740 tcg_temp_free(r_asi
);
1741 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1744 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1746 TCGv_i32 r_asi
, r_size
, r_sign
;
1748 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1749 r_size
= tcg_const_i32(8);
1750 r_sign
= tcg_const_i32(0);
1751 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1752 tcg_temp_free(r_sign
);
1753 tcg_temp_free(r_size
);
1754 tcg_temp_free(r_asi
);
1755 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1756 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1757 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1758 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1759 gen_movl_TN_reg(rd
, hi
);
1762 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1764 TCGv_i32 r_asi
, r_size
;
1766 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1767 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1768 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1769 r_size
= tcg_const_i32(8);
1770 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1771 tcg_temp_free(r_size
);
1772 tcg_temp_free(r_asi
);
1776 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1777 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1780 TCGv_i32 r_asi
, r_size
;
1782 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1784 r_val
= tcg_const_i64(0xffULL
);
1785 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1786 r_size
= tcg_const_i32(1);
1787 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1788 tcg_temp_free_i32(r_size
);
1789 tcg_temp_free_i32(r_asi
);
1790 tcg_temp_free_i64(r_val
);
1794 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1799 rs1
= GET_FIELD(insn
, 13, 17);
1801 tcg_gen_movi_tl(def
, 0);
1802 } else if (rs1
< 8) {
1803 r_rs1
= cpu_gregs
[rs1
];
1805 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1810 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1814 if (IS_IMM
) { /* immediate */
1815 target_long simm
= GET_FIELDs(insn
, 19, 31);
1816 tcg_gen_movi_tl(def
, simm
);
1817 } else { /* register */
1818 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1820 tcg_gen_movi_tl(def
, 0);
1821 } else if (rs2
< 8) {
1822 r_rs2
= cpu_gregs
[rs2
];
1824 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1830 #ifdef TARGET_SPARC64
1831 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1833 TCGv_i32 r_tl
= tcg_temp_new_i32();
1835 /* load env->tl into r_tl */
1836 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1838 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1839 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1841 /* calculate offset to current trap state from env->ts, reuse r_tl */
1842 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1843 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1845 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1847 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1848 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1849 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1850 tcg_temp_free_ptr(r_tl_tmp
);
1853 tcg_temp_free_i32(r_tl
);
1857 #define CHECK_IU_FEATURE(dc, FEATURE) \
1858 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1860 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1864 /* before an instruction, dc->pc must be static */
1865 static void disas_sparc_insn(DisasContext
* dc
)
1867 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1868 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1871 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1872 tcg_gen_debug_insn_start(dc
->pc
);
1873 insn
= ldl_code(dc
->pc
);
1874 opc
= GET_FIELD(insn
, 0, 1);
1876 rd
= GET_FIELD(insn
, 2, 6);
1878 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1879 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1882 case 0: /* branches/sethi */
1884 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1887 #ifdef TARGET_SPARC64
1888 case 0x1: /* V9 BPcc */
1892 target
= GET_FIELD_SP(insn
, 0, 18);
1893 target
= sign_extend(target
, 18);
1895 cc
= GET_FIELD_SP(insn
, 20, 21);
1897 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1899 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1904 case 0x3: /* V9 BPr */
1906 target
= GET_FIELD_SP(insn
, 0, 13) |
1907 (GET_FIELD_SP(insn
, 20, 21) << 14);
1908 target
= sign_extend(target
, 16);
1910 cpu_src1
= get_src1(insn
, cpu_src1
);
1911 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1914 case 0x5: /* V9 FBPcc */
1916 int cc
= GET_FIELD_SP(insn
, 20, 21);
1917 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1919 target
= GET_FIELD_SP(insn
, 0, 18);
1920 target
= sign_extend(target
, 19);
1922 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1926 case 0x7: /* CBN+x */
1931 case 0x2: /* BN+x */
1933 target
= GET_FIELD(insn
, 10, 31);
1934 target
= sign_extend(target
, 22);
1936 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1939 case 0x6: /* FBN+x */
1941 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1943 target
= GET_FIELD(insn
, 10, 31);
1944 target
= sign_extend(target
, 22);
1946 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1949 case 0x4: /* SETHI */
1951 uint32_t value
= GET_FIELD(insn
, 10, 31);
1954 r_const
= tcg_const_tl(value
<< 10);
1955 gen_movl_TN_reg(rd
, r_const
);
1956 tcg_temp_free(r_const
);
1959 case 0x0: /* UNIMPL */
1968 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1971 r_const
= tcg_const_tl(dc
->pc
);
1972 gen_movl_TN_reg(15, r_const
);
1973 tcg_temp_free(r_const
);
1975 gen_mov_pc_npc(dc
, cpu_cond
);
1979 case 2: /* FPU & Logical Operations */
1981 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1982 if (xop
== 0x3a) { /* generate trap */
1985 cpu_src1
= get_src1(insn
, cpu_src1
);
1987 rs2
= GET_FIELD(insn
, 25, 31);
1988 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1990 rs2
= GET_FIELD(insn
, 27, 31);
1992 gen_movl_reg_TN(rs2
, cpu_src2
);
1993 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1995 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1997 cond
= GET_FIELD(insn
, 3, 6);
1999 save_state(dc
, cpu_cond
);
2000 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2002 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2004 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2005 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2006 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2007 gen_helper_raise_exception(cpu_tmp32
);
2008 } else if (cond
!= 0) {
2009 TCGv r_cond
= tcg_temp_new();
2011 #ifdef TARGET_SPARC64
2013 int cc
= GET_FIELD_SP(insn
, 11, 12);
2015 save_state(dc
, cpu_cond
);
2017 gen_cond(r_cond
, 0, cond
, dc
);
2019 gen_cond(r_cond
, 1, cond
, dc
);
2023 save_state(dc
, cpu_cond
);
2024 gen_cond(r_cond
, 0, cond
, dc
);
2026 l1
= gen_new_label();
2027 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2029 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2031 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2033 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2034 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2035 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2036 gen_helper_raise_exception(cpu_tmp32
);
2039 tcg_temp_free(r_cond
);
2045 } else if (xop
== 0x28) {
2046 rs1
= GET_FIELD(insn
, 13, 17);
2049 #ifndef TARGET_SPARC64
2050 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2051 manual, rdy on the microSPARC
2053 case 0x0f: /* stbar in the SPARCv8 manual,
2054 rdy on the microSPARC II */
2055 case 0x10 ... 0x1f: /* implementation-dependent in the
2056 SPARCv8 manual, rdy on the
2059 gen_movl_TN_reg(rd
, cpu_y
);
2061 #ifdef TARGET_SPARC64
2062 case 0x2: /* V9 rdccr */
2063 gen_helper_compute_psr();
2064 gen_helper_rdccr(cpu_dst
);
2065 gen_movl_TN_reg(rd
, cpu_dst
);
2067 case 0x3: /* V9 rdasi */
2068 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2069 gen_movl_TN_reg(rd
, cpu_dst
);
2071 case 0x4: /* V9 rdtick */
2075 r_tickptr
= tcg_temp_new_ptr();
2076 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2077 offsetof(CPUState
, tick
));
2078 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2079 tcg_temp_free_ptr(r_tickptr
);
2080 gen_movl_TN_reg(rd
, cpu_dst
);
2083 case 0x5: /* V9 rdpc */
2087 r_const
= tcg_const_tl(dc
->pc
);
2088 gen_movl_TN_reg(rd
, r_const
);
2089 tcg_temp_free(r_const
);
2092 case 0x6: /* V9 rdfprs */
2093 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2094 gen_movl_TN_reg(rd
, cpu_dst
);
2096 case 0xf: /* V9 membar */
2097 break; /* no effect */
2098 case 0x13: /* Graphics Status */
2099 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2101 gen_movl_TN_reg(rd
, cpu_gsr
);
2103 case 0x16: /* Softint */
2104 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2105 gen_movl_TN_reg(rd
, cpu_dst
);
2107 case 0x17: /* Tick compare */
2108 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2110 case 0x18: /* System tick */
2114 r_tickptr
= tcg_temp_new_ptr();
2115 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2116 offsetof(CPUState
, stick
));
2117 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2118 tcg_temp_free_ptr(r_tickptr
);
2119 gen_movl_TN_reg(rd
, cpu_dst
);
2122 case 0x19: /* System tick compare */
2123 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2125 case 0x10: /* Performance Control */
2126 case 0x11: /* Performance Instrumentation Counter */
2127 case 0x12: /* Dispatch Control */
2128 case 0x14: /* Softint set, WO */
2129 case 0x15: /* Softint clear, WO */
2134 #if !defined(CONFIG_USER_ONLY)
2135 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2136 #ifndef TARGET_SPARC64
2137 if (!supervisor(dc
))
2139 gen_helper_compute_psr();
2140 dc
->cc_op
= CC_OP_FLAGS
;
2141 gen_helper_rdpsr(cpu_dst
);
2143 CHECK_IU_FEATURE(dc
, HYPV
);
2144 if (!hypervisor(dc
))
2146 rs1
= GET_FIELD(insn
, 13, 17);
2149 // gen_op_rdhpstate();
2152 // gen_op_rdhtstate();
2155 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2158 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2161 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2163 case 31: // hstick_cmpr
2164 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2170 gen_movl_TN_reg(rd
, cpu_dst
);
2172 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2173 if (!supervisor(dc
))
2175 #ifdef TARGET_SPARC64
2176 rs1
= GET_FIELD(insn
, 13, 17);
2182 r_tsptr
= tcg_temp_new_ptr();
2183 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2184 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2185 offsetof(trap_state
, tpc
));
2186 tcg_temp_free_ptr(r_tsptr
);
2193 r_tsptr
= tcg_temp_new_ptr();
2194 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2195 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2196 offsetof(trap_state
, tnpc
));
2197 tcg_temp_free_ptr(r_tsptr
);
2204 r_tsptr
= tcg_temp_new_ptr();
2205 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2206 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2207 offsetof(trap_state
, tstate
));
2208 tcg_temp_free_ptr(r_tsptr
);
2215 r_tsptr
= tcg_temp_new_ptr();
2216 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2217 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2218 offsetof(trap_state
, tt
));
2219 tcg_temp_free_ptr(r_tsptr
);
2220 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2227 r_tickptr
= tcg_temp_new_ptr();
2228 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2229 offsetof(CPUState
, tick
));
2230 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2231 gen_movl_TN_reg(rd
, cpu_tmp0
);
2232 tcg_temp_free_ptr(r_tickptr
);
2236 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2239 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2240 offsetof(CPUSPARCState
, pstate
));
2241 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2244 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2245 offsetof(CPUSPARCState
, tl
));
2246 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2249 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2250 offsetof(CPUSPARCState
, psrpil
));
2251 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2254 gen_helper_rdcwp(cpu_tmp0
);
2257 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2258 offsetof(CPUSPARCState
, cansave
));
2259 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2261 case 11: // canrestore
2262 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2263 offsetof(CPUSPARCState
, canrestore
));
2264 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2266 case 12: // cleanwin
2267 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2268 offsetof(CPUSPARCState
, cleanwin
));
2269 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2271 case 13: // otherwin
2272 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2273 offsetof(CPUSPARCState
, otherwin
));
2274 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2277 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2278 offsetof(CPUSPARCState
, wstate
));
2279 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2281 case 16: // UA2005 gl
2282 CHECK_IU_FEATURE(dc
, GL
);
2283 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2284 offsetof(CPUSPARCState
, gl
));
2285 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2287 case 26: // UA2005 strand status
2288 CHECK_IU_FEATURE(dc
, HYPV
);
2289 if (!hypervisor(dc
))
2291 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2294 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2301 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2303 gen_movl_TN_reg(rd
, cpu_tmp0
);
2305 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2306 #ifdef TARGET_SPARC64
2307 save_state(dc
, cpu_cond
);
2308 gen_helper_flushw();
2310 if (!supervisor(dc
))
2312 gen_movl_TN_reg(rd
, cpu_tbr
);
2316 } else if (xop
== 0x34) { /* FPU Operations */
2317 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2319 gen_op_clear_ieee_excp_and_FTT();
2320 rs1
= GET_FIELD(insn
, 13, 17);
2321 rs2
= GET_FIELD(insn
, 27, 31);
2322 xop
= GET_FIELD(insn
, 18, 26);
2323 save_state(dc
, cpu_cond
);
2325 case 0x1: /* fmovs */
2326 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2328 case 0x5: /* fnegs */
2329 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2331 case 0x9: /* fabss */
2332 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2334 case 0x29: /* fsqrts */
2335 CHECK_FPU_FEATURE(dc
, FSQRT
);
2336 gen_clear_float_exceptions();
2337 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2338 gen_helper_check_ieee_exceptions();
2339 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2341 case 0x2a: /* fsqrtd */
2342 CHECK_FPU_FEATURE(dc
, FSQRT
);
2343 gen_op_load_fpr_DT1(DFPREG(rs2
));
2344 gen_clear_float_exceptions();
2345 gen_helper_fsqrtd();
2346 gen_helper_check_ieee_exceptions();
2347 gen_op_store_DT0_fpr(DFPREG(rd
));
2349 case 0x2b: /* fsqrtq */
2350 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2351 gen_op_load_fpr_QT1(QFPREG(rs2
));
2352 gen_clear_float_exceptions();
2353 gen_helper_fsqrtq();
2354 gen_helper_check_ieee_exceptions();
2355 gen_op_store_QT0_fpr(QFPREG(rd
));
2357 case 0x41: /* fadds */
2358 gen_clear_float_exceptions();
2359 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2363 case 0x42: /* faddd */
2364 gen_op_load_fpr_DT0(DFPREG(rs1
));
2365 gen_op_load_fpr_DT1(DFPREG(rs2
));
2366 gen_clear_float_exceptions();
2368 gen_helper_check_ieee_exceptions();
2369 gen_op_store_DT0_fpr(DFPREG(rd
));
2371 case 0x43: /* faddq */
2372 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2373 gen_op_load_fpr_QT0(QFPREG(rs1
));
2374 gen_op_load_fpr_QT1(QFPREG(rs2
));
2375 gen_clear_float_exceptions();
2377 gen_helper_check_ieee_exceptions();
2378 gen_op_store_QT0_fpr(QFPREG(rd
));
2380 case 0x45: /* fsubs */
2381 gen_clear_float_exceptions();
2382 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2383 gen_helper_check_ieee_exceptions();
2384 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2386 case 0x46: /* fsubd */
2387 gen_op_load_fpr_DT0(DFPREG(rs1
));
2388 gen_op_load_fpr_DT1(DFPREG(rs2
));
2389 gen_clear_float_exceptions();
2391 gen_helper_check_ieee_exceptions();
2392 gen_op_store_DT0_fpr(DFPREG(rd
));
2394 case 0x47: /* fsubq */
2395 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2396 gen_op_load_fpr_QT0(QFPREG(rs1
));
2397 gen_op_load_fpr_QT1(QFPREG(rs2
));
2398 gen_clear_float_exceptions();
2400 gen_helper_check_ieee_exceptions();
2401 gen_op_store_QT0_fpr(QFPREG(rd
));
2403 case 0x49: /* fmuls */
2404 CHECK_FPU_FEATURE(dc
, FMUL
);
2405 gen_clear_float_exceptions();
2406 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2407 gen_helper_check_ieee_exceptions();
2408 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2410 case 0x4a: /* fmuld */
2411 CHECK_FPU_FEATURE(dc
, FMUL
);
2412 gen_op_load_fpr_DT0(DFPREG(rs1
));
2413 gen_op_load_fpr_DT1(DFPREG(rs2
));
2414 gen_clear_float_exceptions();
2416 gen_helper_check_ieee_exceptions();
2417 gen_op_store_DT0_fpr(DFPREG(rd
));
2419 case 0x4b: /* fmulq */
2420 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2421 CHECK_FPU_FEATURE(dc
, FMUL
);
2422 gen_op_load_fpr_QT0(QFPREG(rs1
));
2423 gen_op_load_fpr_QT1(QFPREG(rs2
));
2424 gen_clear_float_exceptions();
2426 gen_helper_check_ieee_exceptions();
2427 gen_op_store_QT0_fpr(QFPREG(rd
));
2429 case 0x4d: /* fdivs */
2430 gen_clear_float_exceptions();
2431 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2432 gen_helper_check_ieee_exceptions();
2433 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2435 case 0x4e: /* fdivd */
2436 gen_op_load_fpr_DT0(DFPREG(rs1
));
2437 gen_op_load_fpr_DT1(DFPREG(rs2
));
2438 gen_clear_float_exceptions();
2440 gen_helper_check_ieee_exceptions();
2441 gen_op_store_DT0_fpr(DFPREG(rd
));
2443 case 0x4f: /* fdivq */
2444 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2445 gen_op_load_fpr_QT0(QFPREG(rs1
));
2446 gen_op_load_fpr_QT1(QFPREG(rs2
));
2447 gen_clear_float_exceptions();
2449 gen_helper_check_ieee_exceptions();
2450 gen_op_store_QT0_fpr(QFPREG(rd
));
2452 case 0x69: /* fsmuld */
2453 CHECK_FPU_FEATURE(dc
, FSMULD
);
2454 gen_clear_float_exceptions();
2455 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2456 gen_helper_check_ieee_exceptions();
2457 gen_op_store_DT0_fpr(DFPREG(rd
));
2459 case 0x6e: /* fdmulq */
2460 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2461 gen_op_load_fpr_DT0(DFPREG(rs1
));
2462 gen_op_load_fpr_DT1(DFPREG(rs2
));
2463 gen_clear_float_exceptions();
2464 gen_helper_fdmulq();
2465 gen_helper_check_ieee_exceptions();
2466 gen_op_store_QT0_fpr(QFPREG(rd
));
2468 case 0xc4: /* fitos */
2469 gen_clear_float_exceptions();
2470 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2471 gen_helper_check_ieee_exceptions();
2472 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2474 case 0xc6: /* fdtos */
2475 gen_op_load_fpr_DT1(DFPREG(rs2
));
2476 gen_clear_float_exceptions();
2477 gen_helper_fdtos(cpu_tmp32
);
2478 gen_helper_check_ieee_exceptions();
2479 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2481 case 0xc7: /* fqtos */
2482 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2483 gen_op_load_fpr_QT1(QFPREG(rs2
));
2484 gen_clear_float_exceptions();
2485 gen_helper_fqtos(cpu_tmp32
);
2486 gen_helper_check_ieee_exceptions();
2487 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2489 case 0xc8: /* fitod */
2490 gen_helper_fitod(cpu_fpr
[rs2
]);
2491 gen_op_store_DT0_fpr(DFPREG(rd
));
2493 case 0xc9: /* fstod */
2494 gen_helper_fstod(cpu_fpr
[rs2
]);
2495 gen_op_store_DT0_fpr(DFPREG(rd
));
2497 case 0xcb: /* fqtod */
2498 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2499 gen_op_load_fpr_QT1(QFPREG(rs2
));
2500 gen_clear_float_exceptions();
2502 gen_helper_check_ieee_exceptions();
2503 gen_op_store_DT0_fpr(DFPREG(rd
));
2505 case 0xcc: /* fitoq */
2506 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2507 gen_helper_fitoq(cpu_fpr
[rs2
]);
2508 gen_op_store_QT0_fpr(QFPREG(rd
));
2510 case 0xcd: /* fstoq */
2511 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2512 gen_helper_fstoq(cpu_fpr
[rs2
]);
2513 gen_op_store_QT0_fpr(QFPREG(rd
));
2515 case 0xce: /* fdtoq */
2516 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2517 gen_op_load_fpr_DT1(DFPREG(rs2
));
2519 gen_op_store_QT0_fpr(QFPREG(rd
));
2521 case 0xd1: /* fstoi */
2522 gen_clear_float_exceptions();
2523 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2524 gen_helper_check_ieee_exceptions();
2525 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2527 case 0xd2: /* fdtoi */
2528 gen_op_load_fpr_DT1(DFPREG(rs2
));
2529 gen_clear_float_exceptions();
2530 gen_helper_fdtoi(cpu_tmp32
);
2531 gen_helper_check_ieee_exceptions();
2532 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2534 case 0xd3: /* fqtoi */
2535 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2536 gen_op_load_fpr_QT1(QFPREG(rs2
));
2537 gen_clear_float_exceptions();
2538 gen_helper_fqtoi(cpu_tmp32
);
2539 gen_helper_check_ieee_exceptions();
2540 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2542 #ifdef TARGET_SPARC64
2543 case 0x2: /* V9 fmovd */
2544 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2545 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2546 cpu_fpr
[DFPREG(rs2
) + 1]);
2548 case 0x3: /* V9 fmovq */
2549 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2550 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2551 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2552 cpu_fpr
[QFPREG(rs2
) + 1]);
2553 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2554 cpu_fpr
[QFPREG(rs2
) + 2]);
2555 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2556 cpu_fpr
[QFPREG(rs2
) + 3]);
2558 case 0x6: /* V9 fnegd */
2559 gen_op_load_fpr_DT1(DFPREG(rs2
));
2561 gen_op_store_DT0_fpr(DFPREG(rd
));
2563 case 0x7: /* V9 fnegq */
2564 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2565 gen_op_load_fpr_QT1(QFPREG(rs2
));
2567 gen_op_store_QT0_fpr(QFPREG(rd
));
2569 case 0xa: /* V9 fabsd */
2570 gen_op_load_fpr_DT1(DFPREG(rs2
));
2572 gen_op_store_DT0_fpr(DFPREG(rd
));
2574 case 0xb: /* V9 fabsq */
2575 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2576 gen_op_load_fpr_QT1(QFPREG(rs2
));
2578 gen_op_store_QT0_fpr(QFPREG(rd
));
2580 case 0x81: /* V9 fstox */
2581 gen_clear_float_exceptions();
2582 gen_helper_fstox(cpu_fpr
[rs2
]);
2583 gen_helper_check_ieee_exceptions();
2584 gen_op_store_DT0_fpr(DFPREG(rd
));
2586 case 0x82: /* V9 fdtox */
2587 gen_op_load_fpr_DT1(DFPREG(rs2
));
2588 gen_clear_float_exceptions();
2590 gen_helper_check_ieee_exceptions();
2591 gen_op_store_DT0_fpr(DFPREG(rd
));
2593 case 0x83: /* V9 fqtox */
2594 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2595 gen_op_load_fpr_QT1(QFPREG(rs2
));
2596 gen_clear_float_exceptions();
2598 gen_helper_check_ieee_exceptions();
2599 gen_op_store_DT0_fpr(DFPREG(rd
));
2601 case 0x84: /* V9 fxtos */
2602 gen_op_load_fpr_DT1(DFPREG(rs2
));
2603 gen_clear_float_exceptions();
2604 gen_helper_fxtos(cpu_tmp32
);
2605 gen_helper_check_ieee_exceptions();
2606 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2608 case 0x88: /* V9 fxtod */
2609 gen_op_load_fpr_DT1(DFPREG(rs2
));
2610 gen_clear_float_exceptions();
2612 gen_helper_check_ieee_exceptions();
2613 gen_op_store_DT0_fpr(DFPREG(rd
));
2615 case 0x8c: /* V9 fxtoq */
2616 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2617 gen_op_load_fpr_DT1(DFPREG(rs2
));
2618 gen_clear_float_exceptions();
2620 gen_helper_check_ieee_exceptions();
2621 gen_op_store_QT0_fpr(QFPREG(rd
));
2627 } else if (xop
== 0x35) { /* FPU Operations */
2628 #ifdef TARGET_SPARC64
2631 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2633 gen_op_clear_ieee_excp_and_FTT();
2634 rs1
= GET_FIELD(insn
, 13, 17);
2635 rs2
= GET_FIELD(insn
, 27, 31);
2636 xop
= GET_FIELD(insn
, 18, 26);
2637 save_state(dc
, cpu_cond
);
2638 #ifdef TARGET_SPARC64
2639 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2642 l1
= gen_new_label();
2643 cond
= GET_FIELD_SP(insn
, 14, 17);
2644 cpu_src1
= get_src1(insn
, cpu_src1
);
2645 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2647 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2650 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2653 l1
= gen_new_label();
2654 cond
= GET_FIELD_SP(insn
, 14, 17);
2655 cpu_src1
= get_src1(insn
, cpu_src1
);
2656 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2658 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2659 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2662 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2665 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2666 l1
= gen_new_label();
2667 cond
= GET_FIELD_SP(insn
, 14, 17);
2668 cpu_src1
= get_src1(insn
, cpu_src1
);
2669 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2671 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2672 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2673 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2674 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2680 #ifdef TARGET_SPARC64
2681 #define FMOVSCC(fcc) \
2686 l1 = gen_new_label(); \
2687 r_cond = tcg_temp_new(); \
2688 cond = GET_FIELD_SP(insn, 14, 17); \
2689 gen_fcond(r_cond, fcc, cond); \
2690 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2692 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2693 gen_set_label(l1); \
2694 tcg_temp_free(r_cond); \
2696 #define FMOVDCC(fcc) \
2701 l1 = gen_new_label(); \
2702 r_cond = tcg_temp_new(); \
2703 cond = GET_FIELD_SP(insn, 14, 17); \
2704 gen_fcond(r_cond, fcc, cond); \
2705 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2707 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2708 cpu_fpr[DFPREG(rs2)]); \
2709 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2710 cpu_fpr[DFPREG(rs2) + 1]); \
2711 gen_set_label(l1); \
2712 tcg_temp_free(r_cond); \
2714 #define FMOVQCC(fcc) \
2719 l1 = gen_new_label(); \
2720 r_cond = tcg_temp_new(); \
2721 cond = GET_FIELD_SP(insn, 14, 17); \
2722 gen_fcond(r_cond, fcc, cond); \
2723 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2725 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2726 cpu_fpr[QFPREG(rs2)]); \
2727 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2728 cpu_fpr[QFPREG(rs2) + 1]); \
2729 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2730 cpu_fpr[QFPREG(rs2) + 2]); \
2731 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2732 cpu_fpr[QFPREG(rs2) + 3]); \
2733 gen_set_label(l1); \
2734 tcg_temp_free(r_cond); \
2736 case 0x001: /* V9 fmovscc %fcc0 */
2739 case 0x002: /* V9 fmovdcc %fcc0 */
2742 case 0x003: /* V9 fmovqcc %fcc0 */
2743 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2746 case 0x041: /* V9 fmovscc %fcc1 */
2749 case 0x042: /* V9 fmovdcc %fcc1 */
2752 case 0x043: /* V9 fmovqcc %fcc1 */
2753 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2756 case 0x081: /* V9 fmovscc %fcc2 */
2759 case 0x082: /* V9 fmovdcc %fcc2 */
2762 case 0x083: /* V9 fmovqcc %fcc2 */
2763 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2766 case 0x0c1: /* V9 fmovscc %fcc3 */
2769 case 0x0c2: /* V9 fmovdcc %fcc3 */
2772 case 0x0c3: /* V9 fmovqcc %fcc3 */
2773 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2779 #define FMOVSCC(icc) \
2784 l1 = gen_new_label(); \
2785 r_cond = tcg_temp_new(); \
2786 cond = GET_FIELD_SP(insn, 14, 17); \
2787 gen_cond(r_cond, icc, cond, dc); \
2788 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2790 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2791 gen_set_label(l1); \
2792 tcg_temp_free(r_cond); \
2794 #define FMOVDCC(icc) \
2799 l1 = gen_new_label(); \
2800 r_cond = tcg_temp_new(); \
2801 cond = GET_FIELD_SP(insn, 14, 17); \
2802 gen_cond(r_cond, icc, cond, dc); \
2803 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2805 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2806 cpu_fpr[DFPREG(rs2)]); \
2807 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2808 cpu_fpr[DFPREG(rs2) + 1]); \
2809 gen_set_label(l1); \
2810 tcg_temp_free(r_cond); \
2812 #define FMOVQCC(icc) \
2817 l1 = gen_new_label(); \
2818 r_cond = tcg_temp_new(); \
2819 cond = GET_FIELD_SP(insn, 14, 17); \
2820 gen_cond(r_cond, icc, cond, dc); \
2821 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2823 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2824 cpu_fpr[QFPREG(rs2)]); \
2825 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2826 cpu_fpr[QFPREG(rs2) + 1]); \
2827 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2828 cpu_fpr[QFPREG(rs2) + 2]); \
2829 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2830 cpu_fpr[QFPREG(rs2) + 3]); \
2831 gen_set_label(l1); \
2832 tcg_temp_free(r_cond); \
2835 case 0x101: /* V9 fmovscc %icc */
2838 case 0x102: /* V9 fmovdcc %icc */
2840 case 0x103: /* V9 fmovqcc %icc */
2841 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2844 case 0x181: /* V9 fmovscc %xcc */
2847 case 0x182: /* V9 fmovdcc %xcc */
2850 case 0x183: /* V9 fmovqcc %xcc */
2851 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2858 case 0x51: /* fcmps, V9 %fcc */
2859 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2861 case 0x52: /* fcmpd, V9 %fcc */
2862 gen_op_load_fpr_DT0(DFPREG(rs1
));
2863 gen_op_load_fpr_DT1(DFPREG(rs2
));
2864 gen_op_fcmpd(rd
& 3);
2866 case 0x53: /* fcmpq, V9 %fcc */
2867 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2868 gen_op_load_fpr_QT0(QFPREG(rs1
));
2869 gen_op_load_fpr_QT1(QFPREG(rs2
));
2870 gen_op_fcmpq(rd
& 3);
2872 case 0x55: /* fcmpes, V9 %fcc */
2873 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2875 case 0x56: /* fcmped, V9 %fcc */
2876 gen_op_load_fpr_DT0(DFPREG(rs1
));
2877 gen_op_load_fpr_DT1(DFPREG(rs2
));
2878 gen_op_fcmped(rd
& 3);
2880 case 0x57: /* fcmpeq, V9 %fcc */
2881 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2882 gen_op_load_fpr_QT0(QFPREG(rs1
));
2883 gen_op_load_fpr_QT1(QFPREG(rs2
));
2884 gen_op_fcmpeq(rd
& 3);
2889 } else if (xop
== 0x2) {
2892 rs1
= GET_FIELD(insn
, 13, 17);
2894 // or %g0, x, y -> mov T0, x; mov y, T0
2895 if (IS_IMM
) { /* immediate */
2898 simm
= GET_FIELDs(insn
, 19, 31);
2899 r_const
= tcg_const_tl(simm
);
2900 gen_movl_TN_reg(rd
, r_const
);
2901 tcg_temp_free(r_const
);
2902 } else { /* register */
2903 rs2
= GET_FIELD(insn
, 27, 31);
2904 gen_movl_reg_TN(rs2
, cpu_dst
);
2905 gen_movl_TN_reg(rd
, cpu_dst
);
2908 cpu_src1
= get_src1(insn
, cpu_src1
);
2909 if (IS_IMM
) { /* immediate */
2910 simm
= GET_FIELDs(insn
, 19, 31);
2911 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2912 gen_movl_TN_reg(rd
, cpu_dst
);
2913 } else { /* register */
2914 // or x, %g0, y -> mov T1, x; mov y, T1
2915 rs2
= GET_FIELD(insn
, 27, 31);
2917 gen_movl_reg_TN(rs2
, cpu_src2
);
2918 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2919 gen_movl_TN_reg(rd
, cpu_dst
);
2921 gen_movl_TN_reg(rd
, cpu_src1
);
2924 #ifdef TARGET_SPARC64
2925 } else if (xop
== 0x25) { /* sll, V9 sllx */
2926 cpu_src1
= get_src1(insn
, cpu_src1
);
2927 if (IS_IMM
) { /* immediate */
2928 simm
= GET_FIELDs(insn
, 20, 31);
2929 if (insn
& (1 << 12)) {
2930 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2932 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2934 } else { /* register */
2935 rs2
= GET_FIELD(insn
, 27, 31);
2936 gen_movl_reg_TN(rs2
, cpu_src2
);
2937 if (insn
& (1 << 12)) {
2938 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2940 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2942 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2944 gen_movl_TN_reg(rd
, cpu_dst
);
2945 } else if (xop
== 0x26) { /* srl, V9 srlx */
2946 cpu_src1
= get_src1(insn
, cpu_src1
);
2947 if (IS_IMM
) { /* immediate */
2948 simm
= GET_FIELDs(insn
, 20, 31);
2949 if (insn
& (1 << 12)) {
2950 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2952 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2953 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2955 } else { /* register */
2956 rs2
= GET_FIELD(insn
, 27, 31);
2957 gen_movl_reg_TN(rs2
, cpu_src2
);
2958 if (insn
& (1 << 12)) {
2959 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2960 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2962 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2963 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2964 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2967 gen_movl_TN_reg(rd
, cpu_dst
);
2968 } else if (xop
== 0x27) { /* sra, V9 srax */
2969 cpu_src1
= get_src1(insn
, cpu_src1
);
2970 if (IS_IMM
) { /* immediate */
2971 simm
= GET_FIELDs(insn
, 20, 31);
2972 if (insn
& (1 << 12)) {
2973 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2975 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2976 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2977 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2979 } else { /* register */
2980 rs2
= GET_FIELD(insn
, 27, 31);
2981 gen_movl_reg_TN(rs2
, cpu_src2
);
2982 if (insn
& (1 << 12)) {
2983 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2984 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2986 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2987 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2988 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2989 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2992 gen_movl_TN_reg(rd
, cpu_dst
);
2994 } else if (xop
< 0x36) {
2996 cpu_src1
= get_src1(insn
, cpu_src1
);
2997 cpu_src2
= get_src2(insn
, cpu_src2
);
2998 switch (xop
& ~0x10) {
3001 simm
= GET_FIELDs(insn
, 19, 31);
3003 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3004 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3005 dc
->cc_op
= CC_OP_ADD
;
3007 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3011 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3012 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3013 dc
->cc_op
= CC_OP_ADD
;
3015 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3021 simm
= GET_FIELDs(insn
, 19, 31);
3022 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3024 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3027 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3028 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3029 dc
->cc_op
= CC_OP_LOGIC
;
3034 simm
= GET_FIELDs(insn
, 19, 31);
3035 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3037 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3040 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3041 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3042 dc
->cc_op
= CC_OP_LOGIC
;
3047 simm
= GET_FIELDs(insn
, 19, 31);
3048 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3050 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3053 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3054 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3055 dc
->cc_op
= CC_OP_LOGIC
;
3060 simm
= GET_FIELDs(insn
, 19, 31);
3062 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3064 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3068 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3069 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3070 dc
->cc_op
= CC_OP_SUB
;
3072 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3076 case 0x5: /* andn */
3078 simm
= GET_FIELDs(insn
, 19, 31);
3079 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3081 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3084 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3085 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3086 dc
->cc_op
= CC_OP_LOGIC
;
3091 simm
= GET_FIELDs(insn
, 19, 31);
3092 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3094 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3097 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3098 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3099 dc
->cc_op
= CC_OP_LOGIC
;
3102 case 0x7: /* xorn */
3104 simm
= GET_FIELDs(insn
, 19, 31);
3105 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3107 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3108 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3111 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3112 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3113 dc
->cc_op
= CC_OP_LOGIC
;
3116 case 0x8: /* addx, V9 addc */
3117 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3120 #ifdef TARGET_SPARC64
3121 case 0x9: /* V9 mulx */
3123 simm
= GET_FIELDs(insn
, 19, 31);
3124 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3126 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3130 case 0xa: /* umul */
3131 CHECK_IU_FEATURE(dc
, MUL
);
3132 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3134 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3135 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3136 dc
->cc_op
= CC_OP_LOGIC
;
3139 case 0xb: /* smul */
3140 CHECK_IU_FEATURE(dc
, MUL
);
3141 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3143 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3144 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3145 dc
->cc_op
= CC_OP_LOGIC
;
3148 case 0xc: /* subx, V9 subc */
3149 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3152 #ifdef TARGET_SPARC64
3153 case 0xd: /* V9 udivx */
3154 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3155 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3156 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3157 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3160 case 0xe: /* udiv */
3161 CHECK_IU_FEATURE(dc
, DIV
);
3162 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3164 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3165 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3166 dc
->cc_op
= CC_OP_DIV
;
3169 case 0xf: /* sdiv */
3170 CHECK_IU_FEATURE(dc
, DIV
);
3171 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3173 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3174 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3175 dc
->cc_op
= CC_OP_DIV
;
3181 gen_movl_TN_reg(rd
, cpu_dst
);
3183 cpu_src1
= get_src1(insn
, cpu_src1
);
3184 cpu_src2
= get_src2(insn
, cpu_src2
);
3186 case 0x20: /* taddcc */
3187 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3188 gen_movl_TN_reg(rd
, cpu_dst
);
3189 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3190 dc
->cc_op
= CC_OP_TADD
;
3192 case 0x21: /* tsubcc */
3193 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3194 gen_movl_TN_reg(rd
, cpu_dst
);
3195 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3196 dc
->cc_op
= CC_OP_TSUB
;
3198 case 0x22: /* taddcctv */
3199 save_state(dc
, cpu_cond
);
3200 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3201 gen_movl_TN_reg(rd
, cpu_dst
);
3202 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3203 dc
->cc_op
= CC_OP_TADDTV
;
3205 case 0x23: /* tsubcctv */
3206 save_state(dc
, cpu_cond
);
3207 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3208 gen_movl_TN_reg(rd
, cpu_dst
);
3209 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3210 dc
->cc_op
= CC_OP_TSUBTV
;
3212 case 0x24: /* mulscc */
3213 gen_helper_compute_psr();
3214 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3215 gen_movl_TN_reg(rd
, cpu_dst
);
3216 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3217 dc
->cc_op
= CC_OP_ADD
;
3219 #ifndef TARGET_SPARC64
3220 case 0x25: /* sll */
3221 if (IS_IMM
) { /* immediate */
3222 simm
= GET_FIELDs(insn
, 20, 31);
3223 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3224 } else { /* register */
3225 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3226 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3228 gen_movl_TN_reg(rd
, cpu_dst
);
3230 case 0x26: /* srl */
3231 if (IS_IMM
) { /* immediate */
3232 simm
= GET_FIELDs(insn
, 20, 31);
3233 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3234 } else { /* register */
3235 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3236 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3238 gen_movl_TN_reg(rd
, cpu_dst
);
3240 case 0x27: /* sra */
3241 if (IS_IMM
) { /* immediate */
3242 simm
= GET_FIELDs(insn
, 20, 31);
3243 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3244 } else { /* register */
3245 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3246 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3248 gen_movl_TN_reg(rd
, cpu_dst
);
3255 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3256 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3258 #ifndef TARGET_SPARC64
3259 case 0x01 ... 0x0f: /* undefined in the
3263 case 0x10 ... 0x1f: /* implementation-dependent
3269 case 0x2: /* V9 wrccr */
3270 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3271 gen_helper_wrccr(cpu_dst
);
3272 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3273 dc
->cc_op
= CC_OP_FLAGS
;
3275 case 0x3: /* V9 wrasi */
3276 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3277 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3278 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3280 case 0x6: /* V9 wrfprs */
3281 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3282 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3283 save_state(dc
, cpu_cond
);
3288 case 0xf: /* V9 sir, nop if user */
3289 #if !defined(CONFIG_USER_ONLY)
3290 if (supervisor(dc
)) {
3295 case 0x13: /* Graphics Status */
3296 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3298 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3300 case 0x14: /* Softint set */
3301 if (!supervisor(dc
))
3303 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3304 gen_helper_set_softint(cpu_tmp64
);
3306 case 0x15: /* Softint clear */
3307 if (!supervisor(dc
))
3309 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3310 gen_helper_clear_softint(cpu_tmp64
);
3312 case 0x16: /* Softint write */
3313 if (!supervisor(dc
))
3315 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3316 gen_helper_write_softint(cpu_tmp64
);
3318 case 0x17: /* Tick compare */
3319 #if !defined(CONFIG_USER_ONLY)
3320 if (!supervisor(dc
))
3326 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3328 r_tickptr
= tcg_temp_new_ptr();
3329 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3330 offsetof(CPUState
, tick
));
3331 gen_helper_tick_set_limit(r_tickptr
,
3333 tcg_temp_free_ptr(r_tickptr
);
3336 case 0x18: /* System tick */
3337 #if !defined(CONFIG_USER_ONLY)
3338 if (!supervisor(dc
))
3344 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3346 r_tickptr
= tcg_temp_new_ptr();
3347 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3348 offsetof(CPUState
, stick
));
3349 gen_helper_tick_set_count(r_tickptr
,
3351 tcg_temp_free_ptr(r_tickptr
);
3354 case 0x19: /* System tick compare */
3355 #if !defined(CONFIG_USER_ONLY)
3356 if (!supervisor(dc
))
3362 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3364 r_tickptr
= tcg_temp_new_ptr();
3365 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3366 offsetof(CPUState
, stick
));
3367 gen_helper_tick_set_limit(r_tickptr
,
3369 tcg_temp_free_ptr(r_tickptr
);
3373 case 0x10: /* Performance Control */
3374 case 0x11: /* Performance Instrumentation
3376 case 0x12: /* Dispatch Control */
3383 #if !defined(CONFIG_USER_ONLY)
3384 case 0x31: /* wrpsr, V9 saved, restored */
3386 if (!supervisor(dc
))
3388 #ifdef TARGET_SPARC64
3394 gen_helper_restored();
3396 case 2: /* UA2005 allclean */
3397 case 3: /* UA2005 otherw */
3398 case 4: /* UA2005 normalw */
3399 case 5: /* UA2005 invalw */
3405 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3406 gen_helper_wrpsr(cpu_dst
);
3407 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3408 dc
->cc_op
= CC_OP_FLAGS
;
3409 save_state(dc
, cpu_cond
);
3416 case 0x32: /* wrwim, V9 wrpr */
3418 if (!supervisor(dc
))
3420 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3421 #ifdef TARGET_SPARC64
3427 r_tsptr
= tcg_temp_new_ptr();
3428 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3429 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3430 offsetof(trap_state
, tpc
));
3431 tcg_temp_free_ptr(r_tsptr
);
3438 r_tsptr
= tcg_temp_new_ptr();
3439 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3440 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3441 offsetof(trap_state
, tnpc
));
3442 tcg_temp_free_ptr(r_tsptr
);
3449 r_tsptr
= tcg_temp_new_ptr();
3450 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3451 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3452 offsetof(trap_state
,
3454 tcg_temp_free_ptr(r_tsptr
);
3461 r_tsptr
= tcg_temp_new_ptr();
3462 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3463 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3464 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3465 offsetof(trap_state
, tt
));
3466 tcg_temp_free_ptr(r_tsptr
);
3473 r_tickptr
= tcg_temp_new_ptr();
3474 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3475 offsetof(CPUState
, tick
));
3476 gen_helper_tick_set_count(r_tickptr
,
3478 tcg_temp_free_ptr(r_tickptr
);
3482 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3485 save_state(dc
, cpu_cond
);
3486 gen_helper_wrpstate(cpu_tmp0
);
3487 dc
->npc
= DYNAMIC_PC
;
3490 save_state(dc
, cpu_cond
);
3491 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3492 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3493 offsetof(CPUSPARCState
, tl
));
3494 dc
->npc
= DYNAMIC_PC
;
3497 gen_helper_wrpil(cpu_tmp0
);
3500 gen_helper_wrcwp(cpu_tmp0
);
3503 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3504 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3505 offsetof(CPUSPARCState
,
3508 case 11: // canrestore
3509 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3510 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3511 offsetof(CPUSPARCState
,
3514 case 12: // cleanwin
3515 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3516 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3517 offsetof(CPUSPARCState
,
3520 case 13: // otherwin
3521 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3522 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3523 offsetof(CPUSPARCState
,
3527 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3528 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3529 offsetof(CPUSPARCState
,
3532 case 16: // UA2005 gl
3533 CHECK_IU_FEATURE(dc
, GL
);
3534 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3535 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3536 offsetof(CPUSPARCState
, gl
));
3538 case 26: // UA2005 strand status
3539 CHECK_IU_FEATURE(dc
, HYPV
);
3540 if (!hypervisor(dc
))
3542 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3548 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3549 if (dc
->def
->nwindows
!= 32)
3550 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3551 (1 << dc
->def
->nwindows
) - 1);
3552 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3556 case 0x33: /* wrtbr, UA2005 wrhpr */
3558 #ifndef TARGET_SPARC64
3559 if (!supervisor(dc
))
3561 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3563 CHECK_IU_FEATURE(dc
, HYPV
);
3564 if (!hypervisor(dc
))
3566 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3569 // XXX gen_op_wrhpstate();
3570 save_state(dc
, cpu_cond
);
3576 // XXX gen_op_wrhtstate();
3579 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3582 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3584 case 31: // hstick_cmpr
3588 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3589 r_tickptr
= tcg_temp_new_ptr();
3590 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3591 offsetof(CPUState
, hstick
));
3592 gen_helper_tick_set_limit(r_tickptr
,
3594 tcg_temp_free_ptr(r_tickptr
);
3597 case 6: // hver readonly
3605 #ifdef TARGET_SPARC64
3606 case 0x2c: /* V9 movcc */
3608 int cc
= GET_FIELD_SP(insn
, 11, 12);
3609 int cond
= GET_FIELD_SP(insn
, 14, 17);
3613 r_cond
= tcg_temp_new();
3614 if (insn
& (1 << 18)) {
3616 gen_cond(r_cond
, 0, cond
, dc
);
3618 gen_cond(r_cond
, 1, cond
, dc
);
3622 gen_fcond(r_cond
, cc
, cond
);
3625 l1
= gen_new_label();
3627 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3628 if (IS_IMM
) { /* immediate */
3631 simm
= GET_FIELD_SPs(insn
, 0, 10);
3632 r_const
= tcg_const_tl(simm
);
3633 gen_movl_TN_reg(rd
, r_const
);
3634 tcg_temp_free(r_const
);
3636 rs2
= GET_FIELD_SP(insn
, 0, 4);
3637 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3638 gen_movl_TN_reg(rd
, cpu_tmp0
);
3641 tcg_temp_free(r_cond
);
3644 case 0x2d: /* V9 sdivx */
3645 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3646 gen_movl_TN_reg(rd
, cpu_dst
);
3648 case 0x2e: /* V9 popc */
3650 cpu_src2
= get_src2(insn
, cpu_src2
);
3651 gen_helper_popc(cpu_dst
, cpu_src2
);
3652 gen_movl_TN_reg(rd
, cpu_dst
);
3654 case 0x2f: /* V9 movr */
3656 int cond
= GET_FIELD_SP(insn
, 10, 12);
3659 cpu_src1
= get_src1(insn
, cpu_src1
);
3661 l1
= gen_new_label();
3663 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3665 if (IS_IMM
) { /* immediate */
3668 simm
= GET_FIELD_SPs(insn
, 0, 9);
3669 r_const
= tcg_const_tl(simm
);
3670 gen_movl_TN_reg(rd
, r_const
);
3671 tcg_temp_free(r_const
);
3673 rs2
= GET_FIELD_SP(insn
, 0, 4);
3674 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3675 gen_movl_TN_reg(rd
, cpu_tmp0
);
3685 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3686 #ifdef TARGET_SPARC64
3687 int opf
= GET_FIELD_SP(insn
, 5, 13);
3688 rs1
= GET_FIELD(insn
, 13, 17);
3689 rs2
= GET_FIELD(insn
, 27, 31);
3690 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3694 case 0x000: /* VIS I edge8cc */
3695 case 0x001: /* VIS II edge8n */
3696 case 0x002: /* VIS I edge8lcc */
3697 case 0x003: /* VIS II edge8ln */
3698 case 0x004: /* VIS I edge16cc */
3699 case 0x005: /* VIS II edge16n */
3700 case 0x006: /* VIS I edge16lcc */
3701 case 0x007: /* VIS II edge16ln */
3702 case 0x008: /* VIS I edge32cc */
3703 case 0x009: /* VIS II edge32n */
3704 case 0x00a: /* VIS I edge32lcc */
3705 case 0x00b: /* VIS II edge32ln */
3708 case 0x010: /* VIS I array8 */
3709 CHECK_FPU_FEATURE(dc
, VIS1
);
3710 cpu_src1
= get_src1(insn
, cpu_src1
);
3711 gen_movl_reg_TN(rs2
, cpu_src2
);
3712 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3713 gen_movl_TN_reg(rd
, cpu_dst
);
3715 case 0x012: /* VIS I array16 */
3716 CHECK_FPU_FEATURE(dc
, VIS1
);
3717 cpu_src1
= get_src1(insn
, cpu_src1
);
3718 gen_movl_reg_TN(rs2
, cpu_src2
);
3719 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3720 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3721 gen_movl_TN_reg(rd
, cpu_dst
);
3723 case 0x014: /* VIS I array32 */
3724 CHECK_FPU_FEATURE(dc
, VIS1
);
3725 cpu_src1
= get_src1(insn
, cpu_src1
);
3726 gen_movl_reg_TN(rs2
, cpu_src2
);
3727 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3728 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3729 gen_movl_TN_reg(rd
, cpu_dst
);
3731 case 0x018: /* VIS I alignaddr */
3732 CHECK_FPU_FEATURE(dc
, VIS1
);
3733 cpu_src1
= get_src1(insn
, cpu_src1
);
3734 gen_movl_reg_TN(rs2
, cpu_src2
);
3735 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3736 gen_movl_TN_reg(rd
, cpu_dst
);
3738 case 0x019: /* VIS II bmask */
3739 case 0x01a: /* VIS I alignaddrl */
3742 case 0x020: /* VIS I fcmple16 */
3743 CHECK_FPU_FEATURE(dc
, VIS1
);
3744 gen_op_load_fpr_DT0(DFPREG(rs1
));
3745 gen_op_load_fpr_DT1(DFPREG(rs2
));
3746 gen_helper_fcmple16();
3747 gen_op_store_DT0_fpr(DFPREG(rd
));
3749 case 0x022: /* VIS I fcmpne16 */
3750 CHECK_FPU_FEATURE(dc
, VIS1
);
3751 gen_op_load_fpr_DT0(DFPREG(rs1
));
3752 gen_op_load_fpr_DT1(DFPREG(rs2
));
3753 gen_helper_fcmpne16();
3754 gen_op_store_DT0_fpr(DFPREG(rd
));
3756 case 0x024: /* VIS I fcmple32 */
3757 CHECK_FPU_FEATURE(dc
, VIS1
);
3758 gen_op_load_fpr_DT0(DFPREG(rs1
));
3759 gen_op_load_fpr_DT1(DFPREG(rs2
));
3760 gen_helper_fcmple32();
3761 gen_op_store_DT0_fpr(DFPREG(rd
));
3763 case 0x026: /* VIS I fcmpne32 */
3764 CHECK_FPU_FEATURE(dc
, VIS1
);
3765 gen_op_load_fpr_DT0(DFPREG(rs1
));
3766 gen_op_load_fpr_DT1(DFPREG(rs2
));
3767 gen_helper_fcmpne32();
3768 gen_op_store_DT0_fpr(DFPREG(rd
));
3770 case 0x028: /* VIS I fcmpgt16 */
3771 CHECK_FPU_FEATURE(dc
, VIS1
);
3772 gen_op_load_fpr_DT0(DFPREG(rs1
));
3773 gen_op_load_fpr_DT1(DFPREG(rs2
));
3774 gen_helper_fcmpgt16();
3775 gen_op_store_DT0_fpr(DFPREG(rd
));
3777 case 0x02a: /* VIS I fcmpeq16 */
3778 CHECK_FPU_FEATURE(dc
, VIS1
);
3779 gen_op_load_fpr_DT0(DFPREG(rs1
));
3780 gen_op_load_fpr_DT1(DFPREG(rs2
));
3781 gen_helper_fcmpeq16();
3782 gen_op_store_DT0_fpr(DFPREG(rd
));
3784 case 0x02c: /* VIS I fcmpgt32 */
3785 CHECK_FPU_FEATURE(dc
, VIS1
);
3786 gen_op_load_fpr_DT0(DFPREG(rs1
));
3787 gen_op_load_fpr_DT1(DFPREG(rs2
));
3788 gen_helper_fcmpgt32();
3789 gen_op_store_DT0_fpr(DFPREG(rd
));
3791 case 0x02e: /* VIS I fcmpeq32 */
3792 CHECK_FPU_FEATURE(dc
, VIS1
);
3793 gen_op_load_fpr_DT0(DFPREG(rs1
));
3794 gen_op_load_fpr_DT1(DFPREG(rs2
));
3795 gen_helper_fcmpeq32();
3796 gen_op_store_DT0_fpr(DFPREG(rd
));
3798 case 0x031: /* VIS I fmul8x16 */
3799 CHECK_FPU_FEATURE(dc
, VIS1
);
3800 gen_op_load_fpr_DT0(DFPREG(rs1
));
3801 gen_op_load_fpr_DT1(DFPREG(rs2
));
3802 gen_helper_fmul8x16();
3803 gen_op_store_DT0_fpr(DFPREG(rd
));
3805 case 0x033: /* VIS I fmul8x16au */
3806 CHECK_FPU_FEATURE(dc
, VIS1
);
3807 gen_op_load_fpr_DT0(DFPREG(rs1
));
3808 gen_op_load_fpr_DT1(DFPREG(rs2
));
3809 gen_helper_fmul8x16au();
3810 gen_op_store_DT0_fpr(DFPREG(rd
));
3812 case 0x035: /* VIS I fmul8x16al */
3813 CHECK_FPU_FEATURE(dc
, VIS1
);
3814 gen_op_load_fpr_DT0(DFPREG(rs1
));
3815 gen_op_load_fpr_DT1(DFPREG(rs2
));
3816 gen_helper_fmul8x16al();
3817 gen_op_store_DT0_fpr(DFPREG(rd
));
3819 case 0x036: /* VIS I fmul8sux16 */
3820 CHECK_FPU_FEATURE(dc
, VIS1
);
3821 gen_op_load_fpr_DT0(DFPREG(rs1
));
3822 gen_op_load_fpr_DT1(DFPREG(rs2
));
3823 gen_helper_fmul8sux16();
3824 gen_op_store_DT0_fpr(DFPREG(rd
));
3826 case 0x037: /* VIS I fmul8ulx16 */
3827 CHECK_FPU_FEATURE(dc
, VIS1
);
3828 gen_op_load_fpr_DT0(DFPREG(rs1
));
3829 gen_op_load_fpr_DT1(DFPREG(rs2
));
3830 gen_helper_fmul8ulx16();
3831 gen_op_store_DT0_fpr(DFPREG(rd
));
3833 case 0x038: /* VIS I fmuld8sux16 */
3834 CHECK_FPU_FEATURE(dc
, VIS1
);
3835 gen_op_load_fpr_DT0(DFPREG(rs1
));
3836 gen_op_load_fpr_DT1(DFPREG(rs2
));
3837 gen_helper_fmuld8sux16();
3838 gen_op_store_DT0_fpr(DFPREG(rd
));
3840 case 0x039: /* VIS I fmuld8ulx16 */
3841 CHECK_FPU_FEATURE(dc
, VIS1
);
3842 gen_op_load_fpr_DT0(DFPREG(rs1
));
3843 gen_op_load_fpr_DT1(DFPREG(rs2
));
3844 gen_helper_fmuld8ulx16();
3845 gen_op_store_DT0_fpr(DFPREG(rd
));
3847 case 0x03a: /* VIS I fpack32 */
3848 case 0x03b: /* VIS I fpack16 */
3849 case 0x03d: /* VIS I fpackfix */
3850 case 0x03e: /* VIS I pdist */
3853 case 0x048: /* VIS I faligndata */
3854 CHECK_FPU_FEATURE(dc
, VIS1
);
3855 gen_op_load_fpr_DT0(DFPREG(rs1
));
3856 gen_op_load_fpr_DT1(DFPREG(rs2
));
3857 gen_helper_faligndata();
3858 gen_op_store_DT0_fpr(DFPREG(rd
));
3860 case 0x04b: /* VIS I fpmerge */
3861 CHECK_FPU_FEATURE(dc
, VIS1
);
3862 gen_op_load_fpr_DT0(DFPREG(rs1
));
3863 gen_op_load_fpr_DT1(DFPREG(rs2
));
3864 gen_helper_fpmerge();
3865 gen_op_store_DT0_fpr(DFPREG(rd
));
3867 case 0x04c: /* VIS II bshuffle */
3870 case 0x04d: /* VIS I fexpand */
3871 CHECK_FPU_FEATURE(dc
, VIS1
);
3872 gen_op_load_fpr_DT0(DFPREG(rs1
));
3873 gen_op_load_fpr_DT1(DFPREG(rs2
));
3874 gen_helper_fexpand();
3875 gen_op_store_DT0_fpr(DFPREG(rd
));
3877 case 0x050: /* VIS I fpadd16 */
3878 CHECK_FPU_FEATURE(dc
, VIS1
);
3879 gen_op_load_fpr_DT0(DFPREG(rs1
));
3880 gen_op_load_fpr_DT1(DFPREG(rs2
));
3881 gen_helper_fpadd16();
3882 gen_op_store_DT0_fpr(DFPREG(rd
));
3884 case 0x051: /* VIS I fpadd16s */
3885 CHECK_FPU_FEATURE(dc
, VIS1
);
3886 gen_helper_fpadd16s(cpu_fpr
[rd
],
3887 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3889 case 0x052: /* VIS I fpadd32 */
3890 CHECK_FPU_FEATURE(dc
, VIS1
);
3891 gen_op_load_fpr_DT0(DFPREG(rs1
));
3892 gen_op_load_fpr_DT1(DFPREG(rs2
));
3893 gen_helper_fpadd32();
3894 gen_op_store_DT0_fpr(DFPREG(rd
));
3896 case 0x053: /* VIS I fpadd32s */
3897 CHECK_FPU_FEATURE(dc
, VIS1
);
3898 gen_helper_fpadd32s(cpu_fpr
[rd
],
3899 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3901 case 0x054: /* VIS I fpsub16 */
3902 CHECK_FPU_FEATURE(dc
, VIS1
);
3903 gen_op_load_fpr_DT0(DFPREG(rs1
));
3904 gen_op_load_fpr_DT1(DFPREG(rs2
));
3905 gen_helper_fpsub16();
3906 gen_op_store_DT0_fpr(DFPREG(rd
));
3908 case 0x055: /* VIS I fpsub16s */
3909 CHECK_FPU_FEATURE(dc
, VIS1
);
3910 gen_helper_fpsub16s(cpu_fpr
[rd
],
3911 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3913 case 0x056: /* VIS I fpsub32 */
3914 CHECK_FPU_FEATURE(dc
, VIS1
);
3915 gen_op_load_fpr_DT0(DFPREG(rs1
));
3916 gen_op_load_fpr_DT1(DFPREG(rs2
));
3917 gen_helper_fpsub32();
3918 gen_op_store_DT0_fpr(DFPREG(rd
));
3920 case 0x057: /* VIS I fpsub32s */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 gen_helper_fpsub32s(cpu_fpr
[rd
],
3923 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3925 case 0x060: /* VIS I fzero */
3926 CHECK_FPU_FEATURE(dc
, VIS1
);
3927 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3928 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3930 case 0x061: /* VIS I fzeros */
3931 CHECK_FPU_FEATURE(dc
, VIS1
);
3932 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3934 case 0x062: /* VIS I fnor */
3935 CHECK_FPU_FEATURE(dc
, VIS1
);
3936 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3937 cpu_fpr
[DFPREG(rs2
)]);
3938 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3939 cpu_fpr
[DFPREG(rs2
) + 1]);
3941 case 0x063: /* VIS I fnors */
3942 CHECK_FPU_FEATURE(dc
, VIS1
);
3943 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3945 case 0x064: /* VIS I fandnot2 */
3946 CHECK_FPU_FEATURE(dc
, VIS1
);
3947 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3948 cpu_fpr
[DFPREG(rs2
)]);
3949 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3950 cpu_fpr
[DFPREG(rs1
) + 1],
3951 cpu_fpr
[DFPREG(rs2
) + 1]);
3953 case 0x065: /* VIS I fandnot2s */
3954 CHECK_FPU_FEATURE(dc
, VIS1
);
3955 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3957 case 0x066: /* VIS I fnot2 */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3960 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3961 cpu_fpr
[DFPREG(rs2
) + 1]);
3963 case 0x067: /* VIS I fnot2s */
3964 CHECK_FPU_FEATURE(dc
, VIS1
);
3965 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3967 case 0x068: /* VIS I fandnot1 */
3968 CHECK_FPU_FEATURE(dc
, VIS1
);
3969 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3970 cpu_fpr
[DFPREG(rs1
)]);
3971 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3972 cpu_fpr
[DFPREG(rs2
) + 1],
3973 cpu_fpr
[DFPREG(rs1
) + 1]);
3975 case 0x069: /* VIS I fandnot1s */
3976 CHECK_FPU_FEATURE(dc
, VIS1
);
3977 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3979 case 0x06a: /* VIS I fnot1 */
3980 CHECK_FPU_FEATURE(dc
, VIS1
);
3981 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3982 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3983 cpu_fpr
[DFPREG(rs1
) + 1]);
3985 case 0x06b: /* VIS I fnot1s */
3986 CHECK_FPU_FEATURE(dc
, VIS1
);
3987 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3989 case 0x06c: /* VIS I fxor */
3990 CHECK_FPU_FEATURE(dc
, VIS1
);
3991 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3992 cpu_fpr
[DFPREG(rs2
)]);
3993 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3994 cpu_fpr
[DFPREG(rs1
) + 1],
3995 cpu_fpr
[DFPREG(rs2
) + 1]);
3997 case 0x06d: /* VIS I fxors */
3998 CHECK_FPU_FEATURE(dc
, VIS1
);
3999 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4001 case 0x06e: /* VIS I fnand */
4002 CHECK_FPU_FEATURE(dc
, VIS1
);
4003 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4004 cpu_fpr
[DFPREG(rs2
)]);
4005 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4006 cpu_fpr
[DFPREG(rs2
) + 1]);
4008 case 0x06f: /* VIS I fnands */
4009 CHECK_FPU_FEATURE(dc
, VIS1
);
4010 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4012 case 0x070: /* VIS I fand */
4013 CHECK_FPU_FEATURE(dc
, VIS1
);
4014 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4015 cpu_fpr
[DFPREG(rs2
)]);
4016 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4017 cpu_fpr
[DFPREG(rs1
) + 1],
4018 cpu_fpr
[DFPREG(rs2
) + 1]);
4020 case 0x071: /* VIS I fands */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4024 case 0x072: /* VIS I fxnor */
4025 CHECK_FPU_FEATURE(dc
, VIS1
);
4026 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4027 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4028 cpu_fpr
[DFPREG(rs1
)]);
4029 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4030 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4031 cpu_fpr
[DFPREG(rs1
) + 1]);
4033 case 0x073: /* VIS I fxnors */
4034 CHECK_FPU_FEATURE(dc
, VIS1
);
4035 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4036 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4038 case 0x074: /* VIS I fsrc1 */
4039 CHECK_FPU_FEATURE(dc
, VIS1
);
4040 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4041 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4042 cpu_fpr
[DFPREG(rs1
) + 1]);
4044 case 0x075: /* VIS I fsrc1s */
4045 CHECK_FPU_FEATURE(dc
, VIS1
);
4046 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4048 case 0x076: /* VIS I fornot2 */
4049 CHECK_FPU_FEATURE(dc
, VIS1
);
4050 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4051 cpu_fpr
[DFPREG(rs2
)]);
4052 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4053 cpu_fpr
[DFPREG(rs1
) + 1],
4054 cpu_fpr
[DFPREG(rs2
) + 1]);
4056 case 0x077: /* VIS I fornot2s */
4057 CHECK_FPU_FEATURE(dc
, VIS1
);
4058 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4060 case 0x078: /* VIS I fsrc2 */
4061 CHECK_FPU_FEATURE(dc
, VIS1
);
4062 gen_op_load_fpr_DT0(DFPREG(rs2
));
4063 gen_op_store_DT0_fpr(DFPREG(rd
));
4065 case 0x079: /* VIS I fsrc2s */
4066 CHECK_FPU_FEATURE(dc
, VIS1
);
4067 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4069 case 0x07a: /* VIS I fornot1 */
4070 CHECK_FPU_FEATURE(dc
, VIS1
);
4071 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4072 cpu_fpr
[DFPREG(rs1
)]);
4073 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4074 cpu_fpr
[DFPREG(rs2
) + 1],
4075 cpu_fpr
[DFPREG(rs1
) + 1]);
4077 case 0x07b: /* VIS I fornot1s */
4078 CHECK_FPU_FEATURE(dc
, VIS1
);
4079 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4081 case 0x07c: /* VIS I for */
4082 CHECK_FPU_FEATURE(dc
, VIS1
);
4083 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4084 cpu_fpr
[DFPREG(rs2
)]);
4085 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4086 cpu_fpr
[DFPREG(rs1
) + 1],
4087 cpu_fpr
[DFPREG(rs2
) + 1]);
4089 case 0x07d: /* VIS I fors */
4090 CHECK_FPU_FEATURE(dc
, VIS1
);
4091 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4093 case 0x07e: /* VIS I fone */
4094 CHECK_FPU_FEATURE(dc
, VIS1
);
4095 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4096 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4098 case 0x07f: /* VIS I fones */
4099 CHECK_FPU_FEATURE(dc
, VIS1
);
4100 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4102 case 0x080: /* VIS I shutdown */
4103 case 0x081: /* VIS II siam */
4112 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4113 #ifdef TARGET_SPARC64
4118 #ifdef TARGET_SPARC64
4119 } else if (xop
== 0x39) { /* V9 return */
4122 save_state(dc
, cpu_cond
);
4123 cpu_src1
= get_src1(insn
, cpu_src1
);
4124 if (IS_IMM
) { /* immediate */
4125 simm
= GET_FIELDs(insn
, 19, 31);
4126 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4127 } else { /* register */
4128 rs2
= GET_FIELD(insn
, 27, 31);
4130 gen_movl_reg_TN(rs2
, cpu_src2
);
4131 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4133 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4135 gen_helper_restore();
4136 gen_mov_pc_npc(dc
, cpu_cond
);
4137 r_const
= tcg_const_i32(3);
4138 gen_helper_check_align(cpu_dst
, r_const
);
4139 tcg_temp_free_i32(r_const
);
4140 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4141 dc
->npc
= DYNAMIC_PC
;
4145 cpu_src1
= get_src1(insn
, cpu_src1
);
4146 if (IS_IMM
) { /* immediate */
4147 simm
= GET_FIELDs(insn
, 19, 31);
4148 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4149 } else { /* register */
4150 rs2
= GET_FIELD(insn
, 27, 31);
4152 gen_movl_reg_TN(rs2
, cpu_src2
);
4153 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4155 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4158 case 0x38: /* jmpl */
4163 r_pc
= tcg_const_tl(dc
->pc
);
4164 gen_movl_TN_reg(rd
, r_pc
);
4165 tcg_temp_free(r_pc
);
4166 gen_mov_pc_npc(dc
, cpu_cond
);
4167 r_const
= tcg_const_i32(3);
4168 gen_helper_check_align(cpu_dst
, r_const
);
4169 tcg_temp_free_i32(r_const
);
4170 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4171 dc
->npc
= DYNAMIC_PC
;
4174 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4175 case 0x39: /* rett, V9 return */
4179 if (!supervisor(dc
))
4181 gen_mov_pc_npc(dc
, cpu_cond
);
4182 r_const
= tcg_const_i32(3);
4183 gen_helper_check_align(cpu_dst
, r_const
);
4184 tcg_temp_free_i32(r_const
);
4185 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4186 dc
->npc
= DYNAMIC_PC
;
4191 case 0x3b: /* flush */
4192 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4194 gen_helper_flush(cpu_dst
);
4196 case 0x3c: /* save */
4197 save_state(dc
, cpu_cond
);
4199 gen_movl_TN_reg(rd
, cpu_dst
);
4201 case 0x3d: /* restore */
4202 save_state(dc
, cpu_cond
);
4203 gen_helper_restore();
4204 gen_movl_TN_reg(rd
, cpu_dst
);
4206 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4207 case 0x3e: /* V9 done/retry */
4211 if (!supervisor(dc
))
4213 dc
->npc
= DYNAMIC_PC
;
4214 dc
->pc
= DYNAMIC_PC
;
4218 if (!supervisor(dc
))
4220 dc
->npc
= DYNAMIC_PC
;
4221 dc
->pc
= DYNAMIC_PC
;
4237 case 3: /* load/store instructions */
4239 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4241 /* flush pending conditional evaluations before exposing
4243 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4244 dc
->cc_op
= CC_OP_FLAGS
;
4245 gen_helper_compute_psr();
4247 cpu_src1
= get_src1(insn
, cpu_src1
);
4248 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4249 rs2
= GET_FIELD(insn
, 27, 31);
4250 gen_movl_reg_TN(rs2
, cpu_src2
);
4251 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4252 } else if (IS_IMM
) { /* immediate */
4253 simm
= GET_FIELDs(insn
, 19, 31);
4254 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4255 } else { /* register */
4256 rs2
= GET_FIELD(insn
, 27, 31);
4258 gen_movl_reg_TN(rs2
, cpu_src2
);
4259 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4261 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4263 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4264 (xop
> 0x17 && xop
<= 0x1d ) ||
4265 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4267 case 0x0: /* ld, V9 lduw, load unsigned word */
4268 gen_address_mask(dc
, cpu_addr
);
4269 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4271 case 0x1: /* ldub, load unsigned byte */
4272 gen_address_mask(dc
, cpu_addr
);
4273 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4275 case 0x2: /* lduh, load unsigned halfword */
4276 gen_address_mask(dc
, cpu_addr
);
4277 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4279 case 0x3: /* ldd, load double word */
4285 save_state(dc
, cpu_cond
);
4286 r_const
= tcg_const_i32(7);
4287 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4288 tcg_temp_free_i32(r_const
);
4289 gen_address_mask(dc
, cpu_addr
);
4290 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4291 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4292 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4293 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4294 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4295 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4296 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4299 case 0x9: /* ldsb, load signed byte */
4300 gen_address_mask(dc
, cpu_addr
);
4301 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4303 case 0xa: /* ldsh, load signed halfword */
4304 gen_address_mask(dc
, cpu_addr
);
4305 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4307 case 0xd: /* ldstub -- XXX: should be atomically */
4311 gen_address_mask(dc
, cpu_addr
);
4312 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4313 r_const
= tcg_const_tl(0xff);
4314 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4315 tcg_temp_free(r_const
);
4318 case 0x0f: /* swap, swap register with memory. Also
4320 CHECK_IU_FEATURE(dc
, SWAP
);
4321 gen_movl_reg_TN(rd
, cpu_val
);
4322 gen_address_mask(dc
, cpu_addr
);
4323 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4324 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4325 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4327 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4328 case 0x10: /* lda, V9 lduwa, load word alternate */
4329 #ifndef TARGET_SPARC64
4332 if (!supervisor(dc
))
4335 save_state(dc
, cpu_cond
);
4336 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4338 case 0x11: /* lduba, load unsigned byte alternate */
4339 #ifndef TARGET_SPARC64
4342 if (!supervisor(dc
))
4345 save_state(dc
, cpu_cond
);
4346 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4348 case 0x12: /* lduha, load unsigned halfword alternate */
4349 #ifndef TARGET_SPARC64
4352 if (!supervisor(dc
))
4355 save_state(dc
, cpu_cond
);
4356 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4358 case 0x13: /* ldda, load double word alternate */
4359 #ifndef TARGET_SPARC64
4362 if (!supervisor(dc
))
4367 save_state(dc
, cpu_cond
);
4368 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4370 case 0x19: /* ldsba, load signed byte alternate */
4371 #ifndef TARGET_SPARC64
4374 if (!supervisor(dc
))
4377 save_state(dc
, cpu_cond
);
4378 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4380 case 0x1a: /* ldsha, load signed halfword alternate */
4381 #ifndef TARGET_SPARC64
4384 if (!supervisor(dc
))
4387 save_state(dc
, cpu_cond
);
4388 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4390 case 0x1d: /* ldstuba -- XXX: should be atomically */
4391 #ifndef TARGET_SPARC64
4394 if (!supervisor(dc
))
4397 save_state(dc
, cpu_cond
);
4398 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4400 case 0x1f: /* swapa, swap reg with alt. memory. Also
4402 CHECK_IU_FEATURE(dc
, SWAP
);
4403 #ifndef TARGET_SPARC64
4406 if (!supervisor(dc
))
4409 save_state(dc
, cpu_cond
);
4410 gen_movl_reg_TN(rd
, cpu_val
);
4411 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4414 #ifndef TARGET_SPARC64
4415 case 0x30: /* ldc */
4416 case 0x31: /* ldcsr */
4417 case 0x33: /* lddc */
4421 #ifdef TARGET_SPARC64
4422 case 0x08: /* V9 ldsw */
4423 gen_address_mask(dc
, cpu_addr
);
4424 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4426 case 0x0b: /* V9 ldx */
4427 gen_address_mask(dc
, cpu_addr
);
4428 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4430 case 0x18: /* V9 ldswa */
4431 save_state(dc
, cpu_cond
);
4432 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4434 case 0x1b: /* V9 ldxa */
4435 save_state(dc
, cpu_cond
);
4436 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4438 case 0x2d: /* V9 prefetch, no effect */
4440 case 0x30: /* V9 ldfa */
4441 save_state(dc
, cpu_cond
);
4442 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4444 case 0x33: /* V9 lddfa */
4445 save_state(dc
, cpu_cond
);
4446 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4448 case 0x3d: /* V9 prefetcha, no effect */
4450 case 0x32: /* V9 ldqfa */
4451 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4452 save_state(dc
, cpu_cond
);
4453 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4459 gen_movl_TN_reg(rd
, cpu_val
);
4460 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4463 } else if (xop
>= 0x20 && xop
< 0x24) {
4464 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4466 save_state(dc
, cpu_cond
);
4468 case 0x20: /* ldf, load fpreg */
4469 gen_address_mask(dc
, cpu_addr
);
4470 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4471 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4473 case 0x21: /* ldfsr, V9 ldxfsr */
4474 #ifdef TARGET_SPARC64
4475 gen_address_mask(dc
, cpu_addr
);
4477 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4478 gen_helper_ldxfsr(cpu_tmp64
);
4482 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4483 gen_helper_ldfsr(cpu_tmp32
);
4487 case 0x22: /* ldqf, load quad fpreg */
4491 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4492 r_const
= tcg_const_i32(dc
->mem_idx
);
4493 gen_helper_ldqf(cpu_addr
, r_const
);
4494 tcg_temp_free_i32(r_const
);
4495 gen_op_store_QT0_fpr(QFPREG(rd
));
4498 case 0x23: /* lddf, load double fpreg */
4502 r_const
= tcg_const_i32(dc
->mem_idx
);
4503 gen_helper_lddf(cpu_addr
, r_const
);
4504 tcg_temp_free_i32(r_const
);
4505 gen_op_store_DT0_fpr(DFPREG(rd
));
4511 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4512 xop
== 0xe || xop
== 0x1e) {
4513 gen_movl_reg_TN(rd
, cpu_val
);
4515 case 0x4: /* st, store word */
4516 gen_address_mask(dc
, cpu_addr
);
4517 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4519 case 0x5: /* stb, store byte */
4520 gen_address_mask(dc
, cpu_addr
);
4521 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4523 case 0x6: /* sth, store halfword */
4524 gen_address_mask(dc
, cpu_addr
);
4525 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4527 case 0x7: /* std, store double word */
4533 save_state(dc
, cpu_cond
);
4534 gen_address_mask(dc
, cpu_addr
);
4535 r_const
= tcg_const_i32(7);
4536 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4537 tcg_temp_free_i32(r_const
);
4538 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4539 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4540 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4543 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4544 case 0x14: /* sta, V9 stwa, store word alternate */
4545 #ifndef TARGET_SPARC64
4548 if (!supervisor(dc
))
4551 save_state(dc
, cpu_cond
);
4552 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4553 dc
->npc
= DYNAMIC_PC
;
4555 case 0x15: /* stba, store byte alternate */
4556 #ifndef TARGET_SPARC64
4559 if (!supervisor(dc
))
4562 save_state(dc
, cpu_cond
);
4563 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4564 dc
->npc
= DYNAMIC_PC
;
4566 case 0x16: /* stha, store halfword alternate */
4567 #ifndef TARGET_SPARC64
4570 if (!supervisor(dc
))
4573 save_state(dc
, cpu_cond
);
4574 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4575 dc
->npc
= DYNAMIC_PC
;
4577 case 0x17: /* stda, store double word alternate */
4578 #ifndef TARGET_SPARC64
4581 if (!supervisor(dc
))
4587 save_state(dc
, cpu_cond
);
4588 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4592 #ifdef TARGET_SPARC64
4593 case 0x0e: /* V9 stx */
4594 gen_address_mask(dc
, cpu_addr
);
4595 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4597 case 0x1e: /* V9 stxa */
4598 save_state(dc
, cpu_cond
);
4599 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4600 dc
->npc
= DYNAMIC_PC
;
4606 } else if (xop
> 0x23 && xop
< 0x28) {
4607 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4609 save_state(dc
, cpu_cond
);
4611 case 0x24: /* stf, store fpreg */
4612 gen_address_mask(dc
, cpu_addr
);
4613 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4614 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4616 case 0x25: /* stfsr, V9 stxfsr */
4617 #ifdef TARGET_SPARC64
4618 gen_address_mask(dc
, cpu_addr
);
4619 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4621 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4623 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4625 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4626 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4630 #ifdef TARGET_SPARC64
4631 /* V9 stqf, store quad fpreg */
4635 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4636 gen_op_load_fpr_QT0(QFPREG(rd
));
4637 r_const
= tcg_const_i32(dc
->mem_idx
);
4638 gen_helper_stqf(cpu_addr
, r_const
);
4639 tcg_temp_free_i32(r_const
);
4642 #else /* !TARGET_SPARC64 */
4643 /* stdfq, store floating point queue */
4644 #if defined(CONFIG_USER_ONLY)
4647 if (!supervisor(dc
))
4649 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4654 case 0x27: /* stdf, store double fpreg */
4658 gen_op_load_fpr_DT0(DFPREG(rd
));
4659 r_const
= tcg_const_i32(dc
->mem_idx
);
4660 gen_helper_stdf(cpu_addr
, r_const
);
4661 tcg_temp_free_i32(r_const
);
4667 } else if (xop
> 0x33 && xop
< 0x3f) {
4668 save_state(dc
, cpu_cond
);
4670 #ifdef TARGET_SPARC64
4671 case 0x34: /* V9 stfa */
4672 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4674 case 0x36: /* V9 stqfa */
4678 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4679 r_const
= tcg_const_i32(7);
4680 gen_helper_check_align(cpu_addr
, r_const
);
4681 tcg_temp_free_i32(r_const
);
4682 gen_op_load_fpr_QT0(QFPREG(rd
));
4683 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4686 case 0x37: /* V9 stdfa */
4687 gen_op_load_fpr_DT0(DFPREG(rd
));
4688 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4690 case 0x3c: /* V9 casa */
4691 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4692 gen_movl_TN_reg(rd
, cpu_val
);
4694 case 0x3e: /* V9 casxa */
4695 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4696 gen_movl_TN_reg(rd
, cpu_val
);
4699 case 0x34: /* stc */
4700 case 0x35: /* stcsr */
4701 case 0x36: /* stdcq */
4702 case 0x37: /* stdc */
4713 /* default case for non jump instructions */
4714 if (dc
->npc
== DYNAMIC_PC
) {
4715 dc
->pc
= DYNAMIC_PC
;
4717 } else if (dc
->npc
== JUMP_PC
) {
4718 /* we can do a static jump */
4719 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4723 dc
->npc
= dc
->npc
+ 4;
4731 save_state(dc
, cpu_cond
);
4732 r_const
= tcg_const_i32(TT_ILL_INSN
);
4733 gen_helper_raise_exception(r_const
);
4734 tcg_temp_free_i32(r_const
);
4742 save_state(dc
, cpu_cond
);
4743 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4744 gen_helper_raise_exception(r_const
);
4745 tcg_temp_free_i32(r_const
);
4749 #if !defined(CONFIG_USER_ONLY)
4754 save_state(dc
, cpu_cond
);
4755 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4756 gen_helper_raise_exception(r_const
);
4757 tcg_temp_free_i32(r_const
);
4763 save_state(dc
, cpu_cond
);
4764 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4767 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4769 save_state(dc
, cpu_cond
);
4770 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4774 #ifndef TARGET_SPARC64
4779 save_state(dc
, cpu_cond
);
4780 r_const
= tcg_const_i32(TT_NCP_INSN
);
4781 gen_helper_raise_exception(r_const
);
4782 tcg_temp_free(r_const
);
4788 tcg_temp_free(cpu_tmp1
);
4789 tcg_temp_free(cpu_tmp2
);
4792 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4793 int spc
, CPUSPARCState
*env
)
4795 target_ulong pc_start
, last_pc
;
4796 uint16_t *gen_opc_end
;
4797 DisasContext dc1
, *dc
= &dc1
;
4803 memset(dc
, 0, sizeof(DisasContext
));
4808 dc
->npc
= (target_ulong
) tb
->cs_base
;
4809 dc
->cc_op
= CC_OP_DYNAMIC
;
4810 dc
->mem_idx
= cpu_mmu_index(env
);
4812 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4813 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4815 dc
->fpu_enabled
= 0;
4816 #ifdef TARGET_SPARC64
4817 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4819 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4820 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4822 cpu_tmp0
= tcg_temp_new();
4823 cpu_tmp32
= tcg_temp_new_i32();
4824 cpu_tmp64
= tcg_temp_new_i64();
4826 cpu_dst
= tcg_temp_local_new();
4829 cpu_val
= tcg_temp_local_new();
4830 cpu_addr
= tcg_temp_local_new();
4833 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4835 max_insns
= CF_COUNT_MASK
;
4838 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4839 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4840 if (bp
->pc
== dc
->pc
) {
4841 if (dc
->pc
!= pc_start
)
4842 save_state(dc
, cpu_cond
);
4851 qemu_log("Search PC...\n");
4852 j
= gen_opc_ptr
- gen_opc_buf
;
4856 gen_opc_instr_start
[lj
++] = 0;
4857 gen_opc_pc
[lj
] = dc
->pc
;
4858 gen_opc_npc
[lj
] = dc
->npc
;
4859 gen_opc_instr_start
[lj
] = 1;
4860 gen_opc_icount
[lj
] = num_insns
;
4863 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4866 disas_sparc_insn(dc
);
4871 /* if the next PC is different, we abort now */
4872 if (dc
->pc
!= (last_pc
+ 4))
4874 /* if we reach a page boundary, we stop generation so that the
4875 PC of a TT_TFAULT exception is always in the right page */
4876 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4878 /* if single step mode, we generate only one instruction and
4879 generate an exception */
4880 if (dc
->singlestep
) {
4883 } while ((gen_opc_ptr
< gen_opc_end
) &&
4884 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4885 num_insns
< max_insns
);
4888 tcg_temp_free(cpu_addr
);
4889 tcg_temp_free(cpu_val
);
4890 tcg_temp_free(cpu_dst
);
4891 tcg_temp_free_i64(cpu_tmp64
);
4892 tcg_temp_free_i32(cpu_tmp32
);
4893 tcg_temp_free(cpu_tmp0
);
4894 if (tb
->cflags
& CF_LAST_IO
)
4897 if (dc
->pc
!= DYNAMIC_PC
&&
4898 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4899 /* static PC and NPC: we can use direct chaining */
4900 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4902 if (dc
->pc
!= DYNAMIC_PC
)
4903 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4904 save_npc(dc
, cpu_cond
);
4908 gen_icount_end(tb
, num_insns
);
4909 *gen_opc_ptr
= INDEX_op_end
;
4911 j
= gen_opc_ptr
- gen_opc_buf
;
4914 gen_opc_instr_start
[lj
++] = 0;
4918 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4919 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4921 tb
->size
= last_pc
+ 4 - pc_start
;
4922 tb
->icount
= num_insns
;
4925 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4926 qemu_log("--------------\n");
4927 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4928 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4934 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4936 gen_intermediate_code_internal(tb
, 0, env
);
4939 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4941 gen_intermediate_code_internal(tb
, 1, env
);
4944 void gen_intermediate_code_init(CPUSPARCState
*env
)
4948 static const char * const gregnames
[8] = {
4949 NULL
, // g0 not used
4958 static const char * const fregnames
[64] = {
4959 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4960 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4961 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4962 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4963 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4964 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4965 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4966 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4969 /* init various static tables */
4973 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4974 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4975 offsetof(CPUState
, regwptr
),
4977 #ifdef TARGET_SPARC64
4978 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4980 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4982 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4984 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4986 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4987 offsetof(CPUState
, tick_cmpr
),
4989 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4990 offsetof(CPUState
, stick_cmpr
),
4992 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4993 offsetof(CPUState
, hstick_cmpr
),
4995 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4997 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4999 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5001 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5002 offsetof(CPUState
, ssr
), "ssr");
5003 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5004 offsetof(CPUState
, version
), "ver");
5005 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5006 offsetof(CPUState
, softint
),
5009 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5012 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5014 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5016 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5017 offsetof(CPUState
, cc_src2
),
5019 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5021 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5023 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5025 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5027 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5029 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5031 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5032 #ifndef CONFIG_USER_ONLY
5033 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5036 for (i
= 1; i
< 8; i
++)
5037 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5038 offsetof(CPUState
, gregs
[i
]),
5040 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5041 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5042 offsetof(CPUState
, fpr
[i
]),
5045 /* register helpers */
5047 #define GEN_HELPER 2
5052 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5053 unsigned long searched_pc
, int pc_pos
, void *puc
)
5056 env
->pc
= gen_opc_pc
[pc_pos
];
5057 npc
= gen_opc_npc
[pc_pos
];
5059 /* dynamic NPC: already stored */
5060 } else if (npc
== 2) {
5061 /* jump PC: use 'cond' and the jump targets of the translation */
5063 env
->npc
= gen_opc_jump_pc
[0];
5065 env
->npc
= gen_opc_jump_pc
[1];
5071 /* flush pending conditional evaluations before exposing cpu state */
5072 if (CC_OP
!= CC_OP_FLAGS
) {
5073 helper_compute_psr();