4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
83 uint32_t cc_op
; /* current CC operation */
84 struct TranslationBlock
*tb
;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x
, int len
)
113 return (x
<< len
) >> len
;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
122 offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
132 offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
138 offsetof(CPU_DoubleU
, l
.upper
));
139 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
140 offsetof(CPU_DoubleU
, l
.lower
));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.upmost
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.upper
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.lower
));
151 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
152 offsetof(CPU_QuadU
, l
.lowest
));
155 static void gen_op_load_fpr_QT1(unsigned int src
)
157 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.upmost
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.upper
));
161 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
162 offsetof(CPU_QuadU
, l
.lower
));
163 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
164 offsetof(CPU_QuadU
, l
.lowest
));
167 static void gen_op_store_QT0_fpr(unsigned int dst
)
169 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
174 offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
176 offsetof(CPU_QuadU
, l
.lowest
));
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
193 #ifdef TARGET_SPARC64
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #define AM_CHECK(dc) (1)
201 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
203 #ifdef TARGET_SPARC64
205 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
209 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
212 tcg_gen_movi_tl(tn
, 0);
214 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
216 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
220 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
225 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
227 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
232 target_ulong pc
, target_ulong npc
)
234 TranslationBlock
*tb
;
237 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
238 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((long)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
288 l1
= gen_new_label();
290 r_temp
= tcg_temp_new();
291 tcg_gen_xor_tl(r_temp
, src1
, src2
);
292 tcg_gen_not_tl(r_temp
, r_temp
);
293 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
294 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
295 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
297 r_const
= tcg_const_i32(TT_TOVF
);
298 gen_helper_raise_exception(r_const
);
299 tcg_temp_free_i32(r_const
);
301 tcg_temp_free(r_temp
);
304 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
309 l1
= gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
311 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
313 r_const
= tcg_const_i32(TT_TOVF
);
314 gen_helper_raise_exception(r_const
);
315 tcg_temp_free_i32(r_const
);
319 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
321 tcg_gen_mov_tl(cpu_cc_src
, src1
);
322 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
323 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
324 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
327 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
329 tcg_gen_mov_tl(cpu_cc_src
, src1
);
330 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
331 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
332 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
335 static TCGv_i32
gen_add32_carry32(void)
337 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32
= tcg_temp_new_i32();
342 cc_src2_32
= tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
344 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
346 cc_src1_32
= cpu_cc_dst
;
347 cc_src2_32
= cpu_cc_src
;
350 carry_32
= tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32
);
355 tcg_temp_free_i32(cc_src2_32
);
361 static TCGv_i32
gen_sub32_carry32(void)
363 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32
= tcg_temp_new_i32();
368 cc_src2_32
= tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
370 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
372 cc_src1_32
= cpu_cc_src
;
373 cc_src2_32
= cpu_cc_src2
;
376 carry_32
= tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32
);
381 tcg_temp_free_i32(cc_src2_32
);
387 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
388 TCGv src2
, int update_cc
)
396 /* Carry is known to be zero. Fall back to plain ADD. */
398 gen_op_add_cc(dst
, src1
, src2
);
400 tcg_gen_add_tl(dst
, src1
, src2
);
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low
= tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
415 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
416 tcg_temp_free(dst_low
);
420 carry_32
= gen_add32_carry32();
426 carry_32
= gen_sub32_carry32();
430 /* We need external help to produce the carry. */
431 carry_32
= tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32
);
436 #if TARGET_LONG_BITS == 64
437 carry
= tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry
, carry_32
);
443 tcg_gen_add_tl(dst
, src1
, src2
);
444 tcg_gen_add_tl(dst
, dst
, carry
);
446 tcg_temp_free_i32(carry_32
);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry
);
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
455 tcg_gen_mov_tl(cpu_cc_src
, src1
);
456 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
457 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
458 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
459 dc
->cc_op
= CC_OP_ADDX
;
463 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
465 tcg_gen_mov_tl(cpu_cc_src
, src1
);
466 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
467 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
468 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
471 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
473 tcg_gen_mov_tl(cpu_cc_src
, src1
);
474 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
475 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
476 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
487 l1
= gen_new_label();
489 r_temp
= tcg_temp_new();
490 tcg_gen_xor_tl(r_temp
, src1
, src2
);
491 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
492 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
493 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
495 r_const
= tcg_const_i32(TT_TOVF
);
496 gen_helper_raise_exception(r_const
);
497 tcg_temp_free_i32(r_const
);
499 tcg_temp_free(r_temp
);
502 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
504 tcg_gen_mov_tl(cpu_cc_src
, src1
);
505 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
509 dc
->cc_op
= CC_OP_LOGIC
;
511 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
512 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
513 dc
->cc_op
= CC_OP_SUB
;
515 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
518 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
520 tcg_gen_mov_tl(cpu_cc_src
, src1
);
521 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
522 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
523 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
526 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
527 TCGv src2
, int update_cc
)
535 /* Carry is known to be zero. Fall back to plain SUB. */
537 gen_op_sub_cc(dst
, src1
, src2
);
539 tcg_gen_sub_tl(dst
, src1
, src2
);
546 carry_32
= gen_add32_carry32();
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low
= tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
560 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
561 tcg_temp_free(dst_low
);
565 carry_32
= gen_sub32_carry32();
569 /* We need external help to produce the carry. */
570 carry_32
= tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32
);
575 #if TARGET_LONG_BITS == 64
576 carry
= tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry
, carry_32
);
582 tcg_gen_sub_tl(dst
, src1
, src2
);
583 tcg_gen_sub_tl(dst
, dst
, carry
);
585 tcg_temp_free_i32(carry_32
);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry
);
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
594 tcg_gen_mov_tl(cpu_cc_src
, src1
);
595 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
596 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
597 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
598 dc
->cc_op
= CC_OP_SUBX
;
602 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
604 tcg_gen_mov_tl(cpu_cc_src
, src1
);
605 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
606 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
607 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
610 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
612 tcg_gen_mov_tl(cpu_cc_src
, src1
);
613 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
614 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
615 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
620 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
625 l1
= gen_new_label();
626 r_temp
= tcg_temp_new();
632 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
633 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
636 tcg_gen_movi_tl(cpu_cc_src2
, 0);
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
642 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
643 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
644 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
646 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
649 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
650 gen_mov_reg_V(r_temp
, cpu_psr
);
651 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
652 tcg_temp_free(r_temp
);
654 // T0 = (b1 << 31) | (T0 >> 1);
656 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
657 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
658 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
660 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
662 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
665 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
667 TCGv_i32 r_src1
, r_src2
;
668 TCGv_i64 r_temp
, r_temp2
;
670 r_src1
= tcg_temp_new_i32();
671 r_src2
= tcg_temp_new_i32();
673 tcg_gen_trunc_tl_i32(r_src1
, src1
);
674 tcg_gen_trunc_tl_i32(r_src2
, src2
);
676 r_temp
= tcg_temp_new_i64();
677 r_temp2
= tcg_temp_new_i64();
680 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
681 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
683 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
684 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
687 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
689 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
690 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
691 tcg_temp_free_i64(r_temp
);
692 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
694 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
696 tcg_temp_free_i64(r_temp2
);
698 tcg_temp_free_i32(r_src1
);
699 tcg_temp_free_i32(r_src2
);
702 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
704 /* zero-extend truncated operands before multiplication */
705 gen_op_multiply(dst
, src1
, src2
, 0);
708 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
710 /* sign-extend truncated operands before multiplication */
711 gen_op_multiply(dst
, src1
, src2
, 1);
714 #ifdef TARGET_SPARC64
715 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
720 l1
= gen_new_label();
721 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
722 r_const
= tcg_const_i32(TT_DIV_ZERO
);
723 gen_helper_raise_exception(r_const
);
724 tcg_temp_free_i32(r_const
);
728 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
732 l1
= gen_new_label();
733 l2
= gen_new_label();
734 tcg_gen_mov_tl(cpu_cc_src
, src1
);
735 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
736 gen_trap_ifdivzero_tl(cpu_cc_src2
);
737 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
738 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
739 tcg_gen_movi_i64(dst
, INT64_MIN
);
742 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
748 static inline void gen_op_eval_ba(TCGv dst
)
750 tcg_gen_movi_tl(dst
, 1);
754 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
756 gen_mov_reg_Z(dst
, src
);
760 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
762 gen_mov_reg_N(cpu_tmp0
, src
);
763 gen_mov_reg_V(dst
, src
);
764 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
765 gen_mov_reg_Z(cpu_tmp0
, src
);
766 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
770 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
772 gen_mov_reg_V(cpu_tmp0
, src
);
773 gen_mov_reg_N(dst
, src
);
774 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
778 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
780 gen_mov_reg_Z(cpu_tmp0
, src
);
781 gen_mov_reg_C(dst
, src
);
782 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
786 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
788 gen_mov_reg_C(dst
, src
);
792 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
794 gen_mov_reg_V(dst
, src
);
798 static inline void gen_op_eval_bn(TCGv dst
)
800 tcg_gen_movi_tl(dst
, 0);
804 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
806 gen_mov_reg_N(dst
, src
);
810 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
812 gen_mov_reg_Z(dst
, src
);
813 tcg_gen_xori_tl(dst
, dst
, 0x1);
817 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
819 gen_mov_reg_N(cpu_tmp0
, src
);
820 gen_mov_reg_V(dst
, src
);
821 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
822 gen_mov_reg_Z(cpu_tmp0
, src
);
823 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
824 tcg_gen_xori_tl(dst
, dst
, 0x1);
828 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
830 gen_mov_reg_V(cpu_tmp0
, src
);
831 gen_mov_reg_N(dst
, src
);
832 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
833 tcg_gen_xori_tl(dst
, dst
, 0x1);
837 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
839 gen_mov_reg_Z(cpu_tmp0
, src
);
840 gen_mov_reg_C(dst
, src
);
841 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
842 tcg_gen_xori_tl(dst
, dst
, 0x1);
846 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
848 gen_mov_reg_C(dst
, src
);
849 tcg_gen_xori_tl(dst
, dst
, 0x1);
853 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
855 gen_mov_reg_N(dst
, src
);
856 tcg_gen_xori_tl(dst
, dst
, 0x1);
860 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
862 gen_mov_reg_V(dst
, src
);
863 tcg_gen_xori_tl(dst
, dst
, 0x1);
867 FPSR bit field FCC1 | FCC0:
873 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
874 unsigned int fcc_offset
)
876 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
877 tcg_gen_andi_tl(reg
, reg
, 0x1);
880 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
881 unsigned int fcc_offset
)
883 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
884 tcg_gen_andi_tl(reg
, reg
, 0x1);
888 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
889 unsigned int fcc_offset
)
891 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
892 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
893 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
896 // 1 or 2: FCC0 ^ FCC1
897 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
898 unsigned int fcc_offset
)
900 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
901 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
902 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
906 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
907 unsigned int fcc_offset
)
909 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
913 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
914 unsigned int fcc_offset
)
916 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
917 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
918 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
919 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
923 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
924 unsigned int fcc_offset
)
926 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
930 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
931 unsigned int fcc_offset
)
933 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
934 tcg_gen_xori_tl(dst
, dst
, 0x1);
935 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
936 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
940 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
941 unsigned int fcc_offset
)
943 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
944 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
945 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
949 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
950 unsigned int fcc_offset
)
952 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
953 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
954 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
955 tcg_gen_xori_tl(dst
, dst
, 0x1);
958 // 0 or 3: !(FCC0 ^ FCC1)
959 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
960 unsigned int fcc_offset
)
962 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
963 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
964 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
965 tcg_gen_xori_tl(dst
, dst
, 0x1);
969 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
970 unsigned int fcc_offset
)
972 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
973 tcg_gen_xori_tl(dst
, dst
, 0x1);
976 // !1: !(FCC0 & !FCC1)
977 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
978 unsigned int fcc_offset
)
980 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
981 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
982 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
983 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
984 tcg_gen_xori_tl(dst
, dst
, 0x1);
988 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
989 unsigned int fcc_offset
)
991 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
992 tcg_gen_xori_tl(dst
, dst
, 0x1);
995 // !2: !(!FCC0 & FCC1)
996 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
997 unsigned int fcc_offset
)
999 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1000 tcg_gen_xori_tl(dst
, dst
, 0x1);
1001 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1002 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1003 tcg_gen_xori_tl(dst
, dst
, 0x1);
1006 // !3: !(FCC0 & FCC1)
1007 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1008 unsigned int fcc_offset
)
1010 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1011 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1012 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1013 tcg_gen_xori_tl(dst
, dst
, 0x1);
1016 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1017 target_ulong pc2
, TCGv r_cond
)
1021 l1
= gen_new_label();
1023 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1025 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1028 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1031 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1032 target_ulong pc2
, TCGv r_cond
)
1036 l1
= gen_new_label();
1038 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1040 gen_goto_tb(dc
, 0, pc2
, pc1
);
1043 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1046 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1051 l1
= gen_new_label();
1052 l2
= gen_new_label();
1054 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1056 tcg_gen_movi_tl(cpu_npc
, npc1
);
1060 tcg_gen_movi_tl(cpu_npc
, npc2
);
1064 /* call this function before using the condition register as it may
1065 have been set for a jump */
1066 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1068 if (dc
->npc
== JUMP_PC
) {
1069 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1070 dc
->npc
= DYNAMIC_PC
;
1074 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1076 if (dc
->npc
== JUMP_PC
) {
1077 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1078 dc
->npc
= DYNAMIC_PC
;
1079 } else if (dc
->npc
!= DYNAMIC_PC
) {
1080 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1084 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1086 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1087 /* flush pending conditional evaluations before exposing cpu state */
1088 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1089 dc
->cc_op
= CC_OP_FLAGS
;
1090 gen_helper_compute_psr();
1095 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1097 if (dc
->npc
== JUMP_PC
) {
1098 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1099 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1100 dc
->pc
= DYNAMIC_PC
;
1101 } else if (dc
->npc
== DYNAMIC_PC
) {
1102 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1103 dc
->pc
= DYNAMIC_PC
;
1109 static inline void gen_op_next_insn(void)
1111 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1112 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1115 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1120 #ifdef TARGET_SPARC64
1128 switch (dc
->cc_op
) {
1132 gen_helper_compute_psr();
1133 dc
->cc_op
= CC_OP_FLAGS
;
1138 gen_op_eval_bn(r_dst
);
1141 gen_op_eval_be(r_dst
, r_src
);
1144 gen_op_eval_ble(r_dst
, r_src
);
1147 gen_op_eval_bl(r_dst
, r_src
);
1150 gen_op_eval_bleu(r_dst
, r_src
);
1153 gen_op_eval_bcs(r_dst
, r_src
);
1156 gen_op_eval_bneg(r_dst
, r_src
);
1159 gen_op_eval_bvs(r_dst
, r_src
);
1162 gen_op_eval_ba(r_dst
);
1165 gen_op_eval_bne(r_dst
, r_src
);
1168 gen_op_eval_bg(r_dst
, r_src
);
1171 gen_op_eval_bge(r_dst
, r_src
);
1174 gen_op_eval_bgu(r_dst
, r_src
);
1177 gen_op_eval_bcc(r_dst
, r_src
);
1180 gen_op_eval_bpos(r_dst
, r_src
);
1183 gen_op_eval_bvc(r_dst
, r_src
);
1188 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1190 unsigned int offset
;
1210 gen_op_eval_bn(r_dst
);
1213 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1216 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1219 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1222 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1225 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1228 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1231 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1234 gen_op_eval_ba(r_dst
);
1237 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1240 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1243 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1246 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1249 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1252 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1255 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1260 #ifdef TARGET_SPARC64
1262 static const int gen_tcg_cond_reg
[8] = {
1273 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1277 l1
= gen_new_label();
1278 tcg_gen_movi_tl(r_dst
, 0);
1279 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1280 tcg_gen_movi_tl(r_dst
, 1);
1285 /* XXX: potentially incorrect if dynamic npc */
1286 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1289 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1290 target_ulong target
= dc
->pc
+ offset
;
1293 /* unconditional not taken */
1295 dc
->pc
= dc
->npc
+ 4;
1296 dc
->npc
= dc
->pc
+ 4;
1299 dc
->npc
= dc
->pc
+ 4;
1301 } else if (cond
== 0x8) {
1302 /* unconditional taken */
1305 dc
->npc
= dc
->pc
+ 4;
1309 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1312 flush_cond(dc
, r_cond
);
1313 gen_cond(r_cond
, cc
, cond
, dc
);
1315 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1319 dc
->jump_pc
[0] = target
;
1320 dc
->jump_pc
[1] = dc
->npc
+ 4;
1326 /* XXX: potentially incorrect if dynamic npc */
1327 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1330 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1331 target_ulong target
= dc
->pc
+ offset
;
1334 /* unconditional not taken */
1336 dc
->pc
= dc
->npc
+ 4;
1337 dc
->npc
= dc
->pc
+ 4;
1340 dc
->npc
= dc
->pc
+ 4;
1342 } else if (cond
== 0x8) {
1343 /* unconditional taken */
1346 dc
->npc
= dc
->pc
+ 4;
1350 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1353 flush_cond(dc
, r_cond
);
1354 gen_fcond(r_cond
, cc
, cond
);
1356 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1360 dc
->jump_pc
[0] = target
;
1361 dc
->jump_pc
[1] = dc
->npc
+ 4;
1367 #ifdef TARGET_SPARC64
1368 /* XXX: potentially incorrect if dynamic npc */
1369 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1370 TCGv r_cond
, TCGv r_reg
)
1372 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1373 target_ulong target
= dc
->pc
+ offset
;
1375 flush_cond(dc
, r_cond
);
1376 gen_cond_reg(r_cond
, cond
, r_reg
);
1378 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1382 dc
->jump_pc
[0] = target
;
1383 dc
->jump_pc
[1] = dc
->npc
+ 4;
1388 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1392 gen_helper_fcmps(r_rs1
, r_rs2
);
1395 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1398 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1401 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1406 static inline void gen_op_fcmpd(int fccno
)
1413 gen_helper_fcmpd_fcc1();
1416 gen_helper_fcmpd_fcc2();
1419 gen_helper_fcmpd_fcc3();
1424 static inline void gen_op_fcmpq(int fccno
)
1431 gen_helper_fcmpq_fcc1();
1434 gen_helper_fcmpq_fcc2();
1437 gen_helper_fcmpq_fcc3();
1442 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1446 gen_helper_fcmpes(r_rs1
, r_rs2
);
1449 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1452 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1455 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1460 static inline void gen_op_fcmped(int fccno
)
1464 gen_helper_fcmped();
1467 gen_helper_fcmped_fcc1();
1470 gen_helper_fcmped_fcc2();
1473 gen_helper_fcmped_fcc3();
1478 static inline void gen_op_fcmpeq(int fccno
)
1482 gen_helper_fcmpeq();
1485 gen_helper_fcmpeq_fcc1();
1488 gen_helper_fcmpeq_fcc2();
1491 gen_helper_fcmpeq_fcc3();
1498 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1500 gen_helper_fcmps(r_rs1
, r_rs2
);
1503 static inline void gen_op_fcmpd(int fccno
)
1508 static inline void gen_op_fcmpq(int fccno
)
1513 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1515 gen_helper_fcmpes(r_rs1
, r_rs2
);
1518 static inline void gen_op_fcmped(int fccno
)
1520 gen_helper_fcmped();
1523 static inline void gen_op_fcmpeq(int fccno
)
1525 gen_helper_fcmpeq();
1529 static inline void gen_op_fpexception_im(int fsr_flags
)
1533 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1534 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1535 r_const
= tcg_const_i32(TT_FP_EXCP
);
1536 gen_helper_raise_exception(r_const
);
1537 tcg_temp_free_i32(r_const
);
1540 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1542 #if !defined(CONFIG_USER_ONLY)
1543 if (!dc
->fpu_enabled
) {
1546 save_state(dc
, r_cond
);
1547 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1548 gen_helper_raise_exception(r_const
);
1549 tcg_temp_free_i32(r_const
);
1557 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1559 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1562 static inline void gen_clear_float_exceptions(void)
1564 gen_helper_clear_float_exceptions();
1568 #ifdef TARGET_SPARC64
1569 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1575 r_asi
= tcg_temp_new_i32();
1576 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1578 asi
= GET_FIELD(insn
, 19, 26);
1579 r_asi
= tcg_const_i32(asi
);
1584 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1587 TCGv_i32 r_asi
, r_size
, r_sign
;
1589 r_asi
= gen_get_asi(insn
, addr
);
1590 r_size
= tcg_const_i32(size
);
1591 r_sign
= tcg_const_i32(sign
);
1592 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1593 tcg_temp_free_i32(r_sign
);
1594 tcg_temp_free_i32(r_size
);
1595 tcg_temp_free_i32(r_asi
);
1598 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1600 TCGv_i32 r_asi
, r_size
;
1602 r_asi
= gen_get_asi(insn
, addr
);
1603 r_size
= tcg_const_i32(size
);
1604 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1605 tcg_temp_free_i32(r_size
);
1606 tcg_temp_free_i32(r_asi
);
1609 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1611 TCGv_i32 r_asi
, r_size
, r_rd
;
1613 r_asi
= gen_get_asi(insn
, addr
);
1614 r_size
= tcg_const_i32(size
);
1615 r_rd
= tcg_const_i32(rd
);
1616 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1617 tcg_temp_free_i32(r_rd
);
1618 tcg_temp_free_i32(r_size
);
1619 tcg_temp_free_i32(r_asi
);
1622 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1624 TCGv_i32 r_asi
, r_size
, r_rd
;
1626 r_asi
= gen_get_asi(insn
, addr
);
1627 r_size
= tcg_const_i32(size
);
1628 r_rd
= tcg_const_i32(rd
);
1629 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1630 tcg_temp_free_i32(r_rd
);
1631 tcg_temp_free_i32(r_size
);
1632 tcg_temp_free_i32(r_asi
);
1635 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1637 TCGv_i32 r_asi
, r_size
, r_sign
;
1639 r_asi
= gen_get_asi(insn
, addr
);
1640 r_size
= tcg_const_i32(4);
1641 r_sign
= tcg_const_i32(0);
1642 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1643 tcg_temp_free_i32(r_sign
);
1644 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1645 tcg_temp_free_i32(r_size
);
1646 tcg_temp_free_i32(r_asi
);
1647 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1650 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1652 TCGv_i32 r_asi
, r_rd
;
1654 r_asi
= gen_get_asi(insn
, addr
);
1655 r_rd
= tcg_const_i32(rd
);
1656 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1657 tcg_temp_free_i32(r_rd
);
1658 tcg_temp_free_i32(r_asi
);
1661 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1663 TCGv_i32 r_asi
, r_size
;
1665 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1666 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1667 r_asi
= gen_get_asi(insn
, addr
);
1668 r_size
= tcg_const_i32(8);
1669 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1670 tcg_temp_free_i32(r_size
);
1671 tcg_temp_free_i32(r_asi
);
1674 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1680 r_val1
= tcg_temp_new();
1681 gen_movl_reg_TN(rd
, r_val1
);
1682 r_asi
= gen_get_asi(insn
, addr
);
1683 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1684 tcg_temp_free_i32(r_asi
);
1685 tcg_temp_free(r_val1
);
1688 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1693 gen_movl_reg_TN(rd
, cpu_tmp64
);
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1696 tcg_temp_free_i32(r_asi
);
1699 #elif !defined(CONFIG_USER_ONLY)
1701 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1704 TCGv_i32 r_asi
, r_size
, r_sign
;
1706 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1707 r_size
= tcg_const_i32(size
);
1708 r_sign
= tcg_const_i32(sign
);
1709 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1710 tcg_temp_free(r_sign
);
1711 tcg_temp_free(r_size
);
1712 tcg_temp_free(r_asi
);
1713 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1716 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1718 TCGv_i32 r_asi
, r_size
;
1720 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1721 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1722 r_size
= tcg_const_i32(size
);
1723 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1724 tcg_temp_free(r_size
);
1725 tcg_temp_free(r_asi
);
1728 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1730 TCGv_i32 r_asi
, r_size
, r_sign
;
1733 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1734 r_size
= tcg_const_i32(4);
1735 r_sign
= tcg_const_i32(0);
1736 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1737 tcg_temp_free(r_sign
);
1738 r_val
= tcg_temp_new_i64();
1739 tcg_gen_extu_tl_i64(r_val
, dst
);
1740 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1741 tcg_temp_free_i64(r_val
);
1742 tcg_temp_free(r_size
);
1743 tcg_temp_free(r_asi
);
1744 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1747 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1749 TCGv_i32 r_asi
, r_size
, r_sign
;
1751 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1752 r_size
= tcg_const_i32(8);
1753 r_sign
= tcg_const_i32(0);
1754 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1755 tcg_temp_free(r_sign
);
1756 tcg_temp_free(r_size
);
1757 tcg_temp_free(r_asi
);
1758 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1759 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1760 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1761 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1762 gen_movl_TN_reg(rd
, hi
);
1765 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1767 TCGv_i32 r_asi
, r_size
;
1769 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1770 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1771 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1772 r_size
= tcg_const_i32(8);
1773 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1774 tcg_temp_free(r_size
);
1775 tcg_temp_free(r_asi
);
1779 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1783 TCGv_i32 r_asi
, r_size
;
1785 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1787 r_val
= tcg_const_i64(0xffULL
);
1788 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1789 r_size
= tcg_const_i32(1);
1790 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1791 tcg_temp_free_i32(r_size
);
1792 tcg_temp_free_i32(r_asi
);
1793 tcg_temp_free_i64(r_val
);
1797 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1802 rs1
= GET_FIELD(insn
, 13, 17);
1804 tcg_gen_movi_tl(def
, 0);
1805 } else if (rs1
< 8) {
1806 r_rs1
= cpu_gregs
[rs1
];
1808 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1813 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1817 if (IS_IMM
) { /* immediate */
1818 target_long simm
= GET_FIELDs(insn
, 19, 31);
1819 tcg_gen_movi_tl(def
, simm
);
1820 } else { /* register */
1821 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1823 tcg_gen_movi_tl(def
, 0);
1824 } else if (rs2
< 8) {
1825 r_rs2
= cpu_gregs
[rs2
];
1827 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1833 #ifdef TARGET_SPARC64
1834 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1836 TCGv_i32 r_tl
= tcg_temp_new_i32();
1838 /* load env->tl into r_tl */
1839 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1841 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1844 /* calculate offset to current trap state from env->ts, reuse r_tl */
1845 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1846 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1848 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1850 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1851 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1852 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1853 tcg_temp_free_ptr(r_tl_tmp
);
1856 tcg_temp_free_i32(r_tl
);
1860 #define CHECK_IU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1863 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1864 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1867 /* before an instruction, dc->pc must be static */
1868 static void disas_sparc_insn(DisasContext
* dc
)
1870 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1871 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1874 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1875 tcg_gen_debug_insn_start(dc
->pc
);
1876 insn
= ldl_code(dc
->pc
);
1877 opc
= GET_FIELD(insn
, 0, 1);
1879 rd
= GET_FIELD(insn
, 2, 6);
1881 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1882 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1885 case 0: /* branches/sethi */
1887 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1890 #ifdef TARGET_SPARC64
1891 case 0x1: /* V9 BPcc */
1895 target
= GET_FIELD_SP(insn
, 0, 18);
1896 target
= sign_extend(target
, 18);
1898 cc
= GET_FIELD_SP(insn
, 20, 21);
1900 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1902 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1907 case 0x3: /* V9 BPr */
1909 target
= GET_FIELD_SP(insn
, 0, 13) |
1910 (GET_FIELD_SP(insn
, 20, 21) << 14);
1911 target
= sign_extend(target
, 16);
1913 cpu_src1
= get_src1(insn
, cpu_src1
);
1914 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1917 case 0x5: /* V9 FBPcc */
1919 int cc
= GET_FIELD_SP(insn
, 20, 21);
1920 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1922 target
= GET_FIELD_SP(insn
, 0, 18);
1923 target
= sign_extend(target
, 19);
1925 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1929 case 0x7: /* CBN+x */
1934 case 0x2: /* BN+x */
1936 target
= GET_FIELD(insn
, 10, 31);
1937 target
= sign_extend(target
, 22);
1939 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1946 target
= GET_FIELD(insn
, 10, 31);
1947 target
= sign_extend(target
, 22);
1949 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1952 case 0x4: /* SETHI */
1954 uint32_t value
= GET_FIELD(insn
, 10, 31);
1957 r_const
= tcg_const_tl(value
<< 10);
1958 gen_movl_TN_reg(rd
, r_const
);
1959 tcg_temp_free(r_const
);
1962 case 0x0: /* UNIMPL */
1971 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1974 r_const
= tcg_const_tl(dc
->pc
);
1975 gen_movl_TN_reg(15, r_const
);
1976 tcg_temp_free(r_const
);
1978 gen_mov_pc_npc(dc
, cpu_cond
);
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1985 if (xop
== 0x3a) { /* generate trap */
1988 cpu_src1
= get_src1(insn
, cpu_src1
);
1990 rs2
= GET_FIELD(insn
, 25, 31);
1991 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1993 rs2
= GET_FIELD(insn
, 27, 31);
1995 gen_movl_reg_TN(rs2
, cpu_src2
);
1996 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1998 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2001 cond
= GET_FIELD(insn
, 3, 6);
2002 if (cond
== 0x8) { /* Trap Always */
2003 save_state(dc
, cpu_cond
);
2004 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2006 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2008 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2009 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2010 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2013 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2015 gen_helper_shutdown();
2018 gen_helper_raise_exception(cpu_tmp32
);
2020 } else if (cond
!= 0) {
2021 TCGv r_cond
= tcg_temp_new();
2023 #ifdef TARGET_SPARC64
2025 int cc
= GET_FIELD_SP(insn
, 11, 12);
2027 save_state(dc
, cpu_cond
);
2029 gen_cond(r_cond
, 0, cond
, dc
);
2031 gen_cond(r_cond
, 1, cond
, dc
);
2035 save_state(dc
, cpu_cond
);
2036 gen_cond(r_cond
, 0, cond
, dc
);
2038 l1
= gen_new_label();
2039 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2041 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2043 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2045 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2046 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2047 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2048 gen_helper_raise_exception(cpu_tmp32
);
2051 tcg_temp_free(r_cond
);
2057 } else if (xop
== 0x28) {
2058 rs1
= GET_FIELD(insn
, 13, 17);
2061 #ifndef TARGET_SPARC64
2062 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2063 manual, rdy on the microSPARC
2065 case 0x0f: /* stbar in the SPARCv8 manual,
2066 rdy on the microSPARC II */
2067 case 0x10 ... 0x1f: /* implementation-dependent in the
2068 SPARCv8 manual, rdy on the
2071 gen_movl_TN_reg(rd
, cpu_y
);
2073 #ifdef TARGET_SPARC64
2074 case 0x2: /* V9 rdccr */
2075 gen_helper_compute_psr();
2076 gen_helper_rdccr(cpu_dst
);
2077 gen_movl_TN_reg(rd
, cpu_dst
);
2079 case 0x3: /* V9 rdasi */
2080 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2081 gen_movl_TN_reg(rd
, cpu_dst
);
2083 case 0x4: /* V9 rdtick */
2087 r_tickptr
= tcg_temp_new_ptr();
2088 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2089 offsetof(CPUState
, tick
));
2090 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2091 tcg_temp_free_ptr(r_tickptr
);
2092 gen_movl_TN_reg(rd
, cpu_dst
);
2095 case 0x5: /* V9 rdpc */
2099 r_const
= tcg_const_tl(dc
->pc
);
2100 gen_movl_TN_reg(rd
, r_const
);
2101 tcg_temp_free(r_const
);
2104 case 0x6: /* V9 rdfprs */
2105 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2106 gen_movl_TN_reg(rd
, cpu_dst
);
2108 case 0xf: /* V9 membar */
2109 break; /* no effect */
2110 case 0x13: /* Graphics Status */
2111 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2113 gen_movl_TN_reg(rd
, cpu_gsr
);
2115 case 0x16: /* Softint */
2116 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2117 gen_movl_TN_reg(rd
, cpu_dst
);
2119 case 0x17: /* Tick compare */
2120 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2122 case 0x18: /* System tick */
2126 r_tickptr
= tcg_temp_new_ptr();
2127 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2128 offsetof(CPUState
, stick
));
2129 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2130 tcg_temp_free_ptr(r_tickptr
);
2131 gen_movl_TN_reg(rd
, cpu_dst
);
2134 case 0x19: /* System tick compare */
2135 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2137 case 0x10: /* Performance Control */
2138 case 0x11: /* Performance Instrumentation Counter */
2139 case 0x12: /* Dispatch Control */
2140 case 0x14: /* Softint set, WO */
2141 case 0x15: /* Softint clear, WO */
2146 #if !defined(CONFIG_USER_ONLY)
2147 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2148 #ifndef TARGET_SPARC64
2149 if (!supervisor(dc
))
2151 gen_helper_compute_psr();
2152 dc
->cc_op
= CC_OP_FLAGS
;
2153 gen_helper_rdpsr(cpu_dst
);
2155 CHECK_IU_FEATURE(dc
, HYPV
);
2156 if (!hypervisor(dc
))
2158 rs1
= GET_FIELD(insn
, 13, 17);
2161 // gen_op_rdhpstate();
2164 // gen_op_rdhtstate();
2167 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2170 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2173 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2175 case 31: // hstick_cmpr
2176 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2182 gen_movl_TN_reg(rd
, cpu_dst
);
2184 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2185 if (!supervisor(dc
))
2187 #ifdef TARGET_SPARC64
2188 rs1
= GET_FIELD(insn
, 13, 17);
2194 r_tsptr
= tcg_temp_new_ptr();
2195 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2196 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2197 offsetof(trap_state
, tpc
));
2198 tcg_temp_free_ptr(r_tsptr
);
2205 r_tsptr
= tcg_temp_new_ptr();
2206 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2207 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2208 offsetof(trap_state
, tnpc
));
2209 tcg_temp_free_ptr(r_tsptr
);
2216 r_tsptr
= tcg_temp_new_ptr();
2217 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2218 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2219 offsetof(trap_state
, tstate
));
2220 tcg_temp_free_ptr(r_tsptr
);
2227 r_tsptr
= tcg_temp_new_ptr();
2228 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2229 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2230 offsetof(trap_state
, tt
));
2231 tcg_temp_free_ptr(r_tsptr
);
2232 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2239 r_tickptr
= tcg_temp_new_ptr();
2240 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2241 offsetof(CPUState
, tick
));
2242 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2243 gen_movl_TN_reg(rd
, cpu_tmp0
);
2244 tcg_temp_free_ptr(r_tickptr
);
2248 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2251 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2252 offsetof(CPUSPARCState
, pstate
));
2253 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2256 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2257 offsetof(CPUSPARCState
, tl
));
2258 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2261 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2262 offsetof(CPUSPARCState
, psrpil
));
2263 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2266 gen_helper_rdcwp(cpu_tmp0
);
2269 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2270 offsetof(CPUSPARCState
, cansave
));
2271 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2273 case 11: // canrestore
2274 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2275 offsetof(CPUSPARCState
, canrestore
));
2276 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2278 case 12: // cleanwin
2279 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2280 offsetof(CPUSPARCState
, cleanwin
));
2281 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2283 case 13: // otherwin
2284 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2285 offsetof(CPUSPARCState
, otherwin
));
2286 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2289 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2290 offsetof(CPUSPARCState
, wstate
));
2291 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2293 case 16: // UA2005 gl
2294 CHECK_IU_FEATURE(dc
, GL
);
2295 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2296 offsetof(CPUSPARCState
, gl
));
2297 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2299 case 26: // UA2005 strand status
2300 CHECK_IU_FEATURE(dc
, HYPV
);
2301 if (!hypervisor(dc
))
2303 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2306 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2313 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2315 gen_movl_TN_reg(rd
, cpu_tmp0
);
2317 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2318 #ifdef TARGET_SPARC64
2319 save_state(dc
, cpu_cond
);
2320 gen_helper_flushw();
2322 if (!supervisor(dc
))
2324 gen_movl_TN_reg(rd
, cpu_tbr
);
2328 } else if (xop
== 0x34) { /* FPU Operations */
2329 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2331 gen_op_clear_ieee_excp_and_FTT();
2332 rs1
= GET_FIELD(insn
, 13, 17);
2333 rs2
= GET_FIELD(insn
, 27, 31);
2334 xop
= GET_FIELD(insn
, 18, 26);
2335 save_state(dc
, cpu_cond
);
2337 case 0x1: /* fmovs */
2338 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2340 case 0x5: /* fnegs */
2341 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2343 case 0x9: /* fabss */
2344 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2346 case 0x29: /* fsqrts */
2347 CHECK_FPU_FEATURE(dc
, FSQRT
);
2348 gen_clear_float_exceptions();
2349 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2350 gen_helper_check_ieee_exceptions();
2351 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2353 case 0x2a: /* fsqrtd */
2354 CHECK_FPU_FEATURE(dc
, FSQRT
);
2355 gen_op_load_fpr_DT1(DFPREG(rs2
));
2356 gen_clear_float_exceptions();
2357 gen_helper_fsqrtd();
2358 gen_helper_check_ieee_exceptions();
2359 gen_op_store_DT0_fpr(DFPREG(rd
));
2361 case 0x2b: /* fsqrtq */
2362 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2363 gen_op_load_fpr_QT1(QFPREG(rs2
));
2364 gen_clear_float_exceptions();
2365 gen_helper_fsqrtq();
2366 gen_helper_check_ieee_exceptions();
2367 gen_op_store_QT0_fpr(QFPREG(rd
));
2369 case 0x41: /* fadds */
2370 gen_clear_float_exceptions();
2371 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2372 gen_helper_check_ieee_exceptions();
2373 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2375 case 0x42: /* faddd */
2376 gen_op_load_fpr_DT0(DFPREG(rs1
));
2377 gen_op_load_fpr_DT1(DFPREG(rs2
));
2378 gen_clear_float_exceptions();
2380 gen_helper_check_ieee_exceptions();
2381 gen_op_store_DT0_fpr(DFPREG(rd
));
2383 case 0x43: /* faddq */
2384 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2385 gen_op_load_fpr_QT0(QFPREG(rs1
));
2386 gen_op_load_fpr_QT1(QFPREG(rs2
));
2387 gen_clear_float_exceptions();
2389 gen_helper_check_ieee_exceptions();
2390 gen_op_store_QT0_fpr(QFPREG(rd
));
2392 case 0x45: /* fsubs */
2393 gen_clear_float_exceptions();
2394 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2395 gen_helper_check_ieee_exceptions();
2396 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2398 case 0x46: /* fsubd */
2399 gen_op_load_fpr_DT0(DFPREG(rs1
));
2400 gen_op_load_fpr_DT1(DFPREG(rs2
));
2401 gen_clear_float_exceptions();
2403 gen_helper_check_ieee_exceptions();
2404 gen_op_store_DT0_fpr(DFPREG(rd
));
2406 case 0x47: /* fsubq */
2407 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2408 gen_op_load_fpr_QT0(QFPREG(rs1
));
2409 gen_op_load_fpr_QT1(QFPREG(rs2
));
2410 gen_clear_float_exceptions();
2412 gen_helper_check_ieee_exceptions();
2413 gen_op_store_QT0_fpr(QFPREG(rd
));
2415 case 0x49: /* fmuls */
2416 CHECK_FPU_FEATURE(dc
, FMUL
);
2417 gen_clear_float_exceptions();
2418 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2419 gen_helper_check_ieee_exceptions();
2420 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2422 case 0x4a: /* fmuld */
2423 CHECK_FPU_FEATURE(dc
, FMUL
);
2424 gen_op_load_fpr_DT0(DFPREG(rs1
));
2425 gen_op_load_fpr_DT1(DFPREG(rs2
));
2426 gen_clear_float_exceptions();
2428 gen_helper_check_ieee_exceptions();
2429 gen_op_store_DT0_fpr(DFPREG(rd
));
2431 case 0x4b: /* fmulq */
2432 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2433 CHECK_FPU_FEATURE(dc
, FMUL
);
2434 gen_op_load_fpr_QT0(QFPREG(rs1
));
2435 gen_op_load_fpr_QT1(QFPREG(rs2
));
2436 gen_clear_float_exceptions();
2438 gen_helper_check_ieee_exceptions();
2439 gen_op_store_QT0_fpr(QFPREG(rd
));
2441 case 0x4d: /* fdivs */
2442 gen_clear_float_exceptions();
2443 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2444 gen_helper_check_ieee_exceptions();
2445 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2447 case 0x4e: /* fdivd */
2448 gen_op_load_fpr_DT0(DFPREG(rs1
));
2449 gen_op_load_fpr_DT1(DFPREG(rs2
));
2450 gen_clear_float_exceptions();
2452 gen_helper_check_ieee_exceptions();
2453 gen_op_store_DT0_fpr(DFPREG(rd
));
2455 case 0x4f: /* fdivq */
2456 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2457 gen_op_load_fpr_QT0(QFPREG(rs1
));
2458 gen_op_load_fpr_QT1(QFPREG(rs2
));
2459 gen_clear_float_exceptions();
2461 gen_helper_check_ieee_exceptions();
2462 gen_op_store_QT0_fpr(QFPREG(rd
));
2464 case 0x69: /* fsmuld */
2465 CHECK_FPU_FEATURE(dc
, FSMULD
);
2466 gen_clear_float_exceptions();
2467 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2468 gen_helper_check_ieee_exceptions();
2469 gen_op_store_DT0_fpr(DFPREG(rd
));
2471 case 0x6e: /* fdmulq */
2472 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2473 gen_op_load_fpr_DT0(DFPREG(rs1
));
2474 gen_op_load_fpr_DT1(DFPREG(rs2
));
2475 gen_clear_float_exceptions();
2476 gen_helper_fdmulq();
2477 gen_helper_check_ieee_exceptions();
2478 gen_op_store_QT0_fpr(QFPREG(rd
));
2480 case 0xc4: /* fitos */
2481 gen_clear_float_exceptions();
2482 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2483 gen_helper_check_ieee_exceptions();
2484 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2486 case 0xc6: /* fdtos */
2487 gen_op_load_fpr_DT1(DFPREG(rs2
));
2488 gen_clear_float_exceptions();
2489 gen_helper_fdtos(cpu_tmp32
);
2490 gen_helper_check_ieee_exceptions();
2491 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2493 case 0xc7: /* fqtos */
2494 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2495 gen_op_load_fpr_QT1(QFPREG(rs2
));
2496 gen_clear_float_exceptions();
2497 gen_helper_fqtos(cpu_tmp32
);
2498 gen_helper_check_ieee_exceptions();
2499 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2501 case 0xc8: /* fitod */
2502 gen_helper_fitod(cpu_fpr
[rs2
]);
2503 gen_op_store_DT0_fpr(DFPREG(rd
));
2505 case 0xc9: /* fstod */
2506 gen_helper_fstod(cpu_fpr
[rs2
]);
2507 gen_op_store_DT0_fpr(DFPREG(rd
));
2509 case 0xcb: /* fqtod */
2510 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2511 gen_op_load_fpr_QT1(QFPREG(rs2
));
2512 gen_clear_float_exceptions();
2514 gen_helper_check_ieee_exceptions();
2515 gen_op_store_DT0_fpr(DFPREG(rd
));
2517 case 0xcc: /* fitoq */
2518 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2519 gen_helper_fitoq(cpu_fpr
[rs2
]);
2520 gen_op_store_QT0_fpr(QFPREG(rd
));
2522 case 0xcd: /* fstoq */
2523 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2524 gen_helper_fstoq(cpu_fpr
[rs2
]);
2525 gen_op_store_QT0_fpr(QFPREG(rd
));
2527 case 0xce: /* fdtoq */
2528 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2529 gen_op_load_fpr_DT1(DFPREG(rs2
));
2531 gen_op_store_QT0_fpr(QFPREG(rd
));
2533 case 0xd1: /* fstoi */
2534 gen_clear_float_exceptions();
2535 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2536 gen_helper_check_ieee_exceptions();
2537 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2539 case 0xd2: /* fdtoi */
2540 gen_op_load_fpr_DT1(DFPREG(rs2
));
2541 gen_clear_float_exceptions();
2542 gen_helper_fdtoi(cpu_tmp32
);
2543 gen_helper_check_ieee_exceptions();
2544 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2546 case 0xd3: /* fqtoi */
2547 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2548 gen_op_load_fpr_QT1(QFPREG(rs2
));
2549 gen_clear_float_exceptions();
2550 gen_helper_fqtoi(cpu_tmp32
);
2551 gen_helper_check_ieee_exceptions();
2552 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2554 #ifdef TARGET_SPARC64
2555 case 0x2: /* V9 fmovd */
2556 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2557 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2558 cpu_fpr
[DFPREG(rs2
) + 1]);
2560 case 0x3: /* V9 fmovq */
2561 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2562 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2563 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2564 cpu_fpr
[QFPREG(rs2
) + 1]);
2565 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2566 cpu_fpr
[QFPREG(rs2
) + 2]);
2567 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2568 cpu_fpr
[QFPREG(rs2
) + 3]);
2570 case 0x6: /* V9 fnegd */
2571 gen_op_load_fpr_DT1(DFPREG(rs2
));
2573 gen_op_store_DT0_fpr(DFPREG(rd
));
2575 case 0x7: /* V9 fnegq */
2576 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2577 gen_op_load_fpr_QT1(QFPREG(rs2
));
2579 gen_op_store_QT0_fpr(QFPREG(rd
));
2581 case 0xa: /* V9 fabsd */
2582 gen_op_load_fpr_DT1(DFPREG(rs2
));
2584 gen_op_store_DT0_fpr(DFPREG(rd
));
2586 case 0xb: /* V9 fabsq */
2587 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2588 gen_op_load_fpr_QT1(QFPREG(rs2
));
2590 gen_op_store_QT0_fpr(QFPREG(rd
));
2592 case 0x81: /* V9 fstox */
2593 gen_clear_float_exceptions();
2594 gen_helper_fstox(cpu_fpr
[rs2
]);
2595 gen_helper_check_ieee_exceptions();
2596 gen_op_store_DT0_fpr(DFPREG(rd
));
2598 case 0x82: /* V9 fdtox */
2599 gen_op_load_fpr_DT1(DFPREG(rs2
));
2600 gen_clear_float_exceptions();
2602 gen_helper_check_ieee_exceptions();
2603 gen_op_store_DT0_fpr(DFPREG(rd
));
2605 case 0x83: /* V9 fqtox */
2606 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2607 gen_op_load_fpr_QT1(QFPREG(rs2
));
2608 gen_clear_float_exceptions();
2610 gen_helper_check_ieee_exceptions();
2611 gen_op_store_DT0_fpr(DFPREG(rd
));
2613 case 0x84: /* V9 fxtos */
2614 gen_op_load_fpr_DT1(DFPREG(rs2
));
2615 gen_clear_float_exceptions();
2616 gen_helper_fxtos(cpu_tmp32
);
2617 gen_helper_check_ieee_exceptions();
2618 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2620 case 0x88: /* V9 fxtod */
2621 gen_op_load_fpr_DT1(DFPREG(rs2
));
2622 gen_clear_float_exceptions();
2624 gen_helper_check_ieee_exceptions();
2625 gen_op_store_DT0_fpr(DFPREG(rd
));
2627 case 0x8c: /* V9 fxtoq */
2628 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2629 gen_op_load_fpr_DT1(DFPREG(rs2
));
2630 gen_clear_float_exceptions();
2632 gen_helper_check_ieee_exceptions();
2633 gen_op_store_QT0_fpr(QFPREG(rd
));
2639 } else if (xop
== 0x35) { /* FPU Operations */
2640 #ifdef TARGET_SPARC64
2643 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2645 gen_op_clear_ieee_excp_and_FTT();
2646 rs1
= GET_FIELD(insn
, 13, 17);
2647 rs2
= GET_FIELD(insn
, 27, 31);
2648 xop
= GET_FIELD(insn
, 18, 26);
2649 save_state(dc
, cpu_cond
);
2650 #ifdef TARGET_SPARC64
2651 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2654 l1
= gen_new_label();
2655 cond
= GET_FIELD_SP(insn
, 14, 17);
2656 cpu_src1
= get_src1(insn
, cpu_src1
);
2657 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2659 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2662 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2665 l1
= gen_new_label();
2666 cond
= GET_FIELD_SP(insn
, 14, 17);
2667 cpu_src1
= get_src1(insn
, cpu_src1
);
2668 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2670 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2671 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2674 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2677 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2678 l1
= gen_new_label();
2679 cond
= GET_FIELD_SP(insn
, 14, 17);
2680 cpu_src1
= get_src1(insn
, cpu_src1
);
2681 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2683 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2684 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2685 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2686 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2692 #ifdef TARGET_SPARC64
2693 #define FMOVSCC(fcc) \
2698 l1 = gen_new_label(); \
2699 r_cond = tcg_temp_new(); \
2700 cond = GET_FIELD_SP(insn, 14, 17); \
2701 gen_fcond(r_cond, fcc, cond); \
2702 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2704 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2705 gen_set_label(l1); \
2706 tcg_temp_free(r_cond); \
2708 #define FMOVDCC(fcc) \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_fcond(r_cond, fcc, cond); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2719 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2720 cpu_fpr[DFPREG(rs2)]); \
2721 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2722 cpu_fpr[DFPREG(rs2) + 1]); \
2723 gen_set_label(l1); \
2724 tcg_temp_free(r_cond); \
2726 #define FMOVQCC(fcc) \
2731 l1 = gen_new_label(); \
2732 r_cond = tcg_temp_new(); \
2733 cond = GET_FIELD_SP(insn, 14, 17); \
2734 gen_fcond(r_cond, fcc, cond); \
2735 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2737 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2738 cpu_fpr[QFPREG(rs2)]); \
2739 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2740 cpu_fpr[QFPREG(rs2) + 1]); \
2741 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2742 cpu_fpr[QFPREG(rs2) + 2]); \
2743 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2744 cpu_fpr[QFPREG(rs2) + 3]); \
2745 gen_set_label(l1); \
2746 tcg_temp_free(r_cond); \
2748 case 0x001: /* V9 fmovscc %fcc0 */
2751 case 0x002: /* V9 fmovdcc %fcc0 */
2754 case 0x003: /* V9 fmovqcc %fcc0 */
2755 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2758 case 0x041: /* V9 fmovscc %fcc1 */
2761 case 0x042: /* V9 fmovdcc %fcc1 */
2764 case 0x043: /* V9 fmovqcc %fcc1 */
2765 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2768 case 0x081: /* V9 fmovscc %fcc2 */
2771 case 0x082: /* V9 fmovdcc %fcc2 */
2774 case 0x083: /* V9 fmovqcc %fcc2 */
2775 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2778 case 0x0c1: /* V9 fmovscc %fcc3 */
2781 case 0x0c2: /* V9 fmovdcc %fcc3 */
2784 case 0x0c3: /* V9 fmovqcc %fcc3 */
2785 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2791 #define FMOVSCC(icc) \
2796 l1 = gen_new_label(); \
2797 r_cond = tcg_temp_new(); \
2798 cond = GET_FIELD_SP(insn, 14, 17); \
2799 gen_cond(r_cond, icc, cond, dc); \
2800 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2802 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2803 gen_set_label(l1); \
2804 tcg_temp_free(r_cond); \
2806 #define FMOVDCC(icc) \
2811 l1 = gen_new_label(); \
2812 r_cond = tcg_temp_new(); \
2813 cond = GET_FIELD_SP(insn, 14, 17); \
2814 gen_cond(r_cond, icc, cond, dc); \
2815 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2817 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2818 cpu_fpr[DFPREG(rs2)]); \
2819 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2820 cpu_fpr[DFPREG(rs2) + 1]); \
2821 gen_set_label(l1); \
2822 tcg_temp_free(r_cond); \
2824 #define FMOVQCC(icc) \
2829 l1 = gen_new_label(); \
2830 r_cond = tcg_temp_new(); \
2831 cond = GET_FIELD_SP(insn, 14, 17); \
2832 gen_cond(r_cond, icc, cond, dc); \
2833 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2835 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2836 cpu_fpr[QFPREG(rs2)]); \
2837 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2838 cpu_fpr[QFPREG(rs2) + 1]); \
2839 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2840 cpu_fpr[QFPREG(rs2) + 2]); \
2841 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2842 cpu_fpr[QFPREG(rs2) + 3]); \
2843 gen_set_label(l1); \
2844 tcg_temp_free(r_cond); \
2847 case 0x101: /* V9 fmovscc %icc */
2850 case 0x102: /* V9 fmovdcc %icc */
2852 case 0x103: /* V9 fmovqcc %icc */
2853 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2856 case 0x181: /* V9 fmovscc %xcc */
2859 case 0x182: /* V9 fmovdcc %xcc */
2862 case 0x183: /* V9 fmovqcc %xcc */
2863 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2870 case 0x51: /* fcmps, V9 %fcc */
2871 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2873 case 0x52: /* fcmpd, V9 %fcc */
2874 gen_op_load_fpr_DT0(DFPREG(rs1
));
2875 gen_op_load_fpr_DT1(DFPREG(rs2
));
2876 gen_op_fcmpd(rd
& 3);
2878 case 0x53: /* fcmpq, V9 %fcc */
2879 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2880 gen_op_load_fpr_QT0(QFPREG(rs1
));
2881 gen_op_load_fpr_QT1(QFPREG(rs2
));
2882 gen_op_fcmpq(rd
& 3);
2884 case 0x55: /* fcmpes, V9 %fcc */
2885 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2887 case 0x56: /* fcmped, V9 %fcc */
2888 gen_op_load_fpr_DT0(DFPREG(rs1
));
2889 gen_op_load_fpr_DT1(DFPREG(rs2
));
2890 gen_op_fcmped(rd
& 3);
2892 case 0x57: /* fcmpeq, V9 %fcc */
2893 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2894 gen_op_load_fpr_QT0(QFPREG(rs1
));
2895 gen_op_load_fpr_QT1(QFPREG(rs2
));
2896 gen_op_fcmpeq(rd
& 3);
2901 } else if (xop
== 0x2) {
2904 rs1
= GET_FIELD(insn
, 13, 17);
2906 // or %g0, x, y -> mov T0, x; mov y, T0
2907 if (IS_IMM
) { /* immediate */
2910 simm
= GET_FIELDs(insn
, 19, 31);
2911 r_const
= tcg_const_tl(simm
);
2912 gen_movl_TN_reg(rd
, r_const
);
2913 tcg_temp_free(r_const
);
2914 } else { /* register */
2915 rs2
= GET_FIELD(insn
, 27, 31);
2916 gen_movl_reg_TN(rs2
, cpu_dst
);
2917 gen_movl_TN_reg(rd
, cpu_dst
);
2920 cpu_src1
= get_src1(insn
, cpu_src1
);
2921 if (IS_IMM
) { /* immediate */
2922 simm
= GET_FIELDs(insn
, 19, 31);
2923 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2924 gen_movl_TN_reg(rd
, cpu_dst
);
2925 } else { /* register */
2926 // or x, %g0, y -> mov T1, x; mov y, T1
2927 rs2
= GET_FIELD(insn
, 27, 31);
2929 gen_movl_reg_TN(rs2
, cpu_src2
);
2930 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2931 gen_movl_TN_reg(rd
, cpu_dst
);
2933 gen_movl_TN_reg(rd
, cpu_src1
);
2936 #ifdef TARGET_SPARC64
2937 } else if (xop
== 0x25) { /* sll, V9 sllx */
2938 cpu_src1
= get_src1(insn
, cpu_src1
);
2939 if (IS_IMM
) { /* immediate */
2940 simm
= GET_FIELDs(insn
, 20, 31);
2941 if (insn
& (1 << 12)) {
2942 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2944 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2946 } else { /* register */
2947 rs2
= GET_FIELD(insn
, 27, 31);
2948 gen_movl_reg_TN(rs2
, cpu_src2
);
2949 if (insn
& (1 << 12)) {
2950 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2952 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2954 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2956 gen_movl_TN_reg(rd
, cpu_dst
);
2957 } else if (xop
== 0x26) { /* srl, V9 srlx */
2958 cpu_src1
= get_src1(insn
, cpu_src1
);
2959 if (IS_IMM
) { /* immediate */
2960 simm
= GET_FIELDs(insn
, 20, 31);
2961 if (insn
& (1 << 12)) {
2962 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2964 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2965 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2967 } else { /* register */
2968 rs2
= GET_FIELD(insn
, 27, 31);
2969 gen_movl_reg_TN(rs2
, cpu_src2
);
2970 if (insn
& (1 << 12)) {
2971 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2972 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2974 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2975 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2976 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2979 gen_movl_TN_reg(rd
, cpu_dst
);
2980 } else if (xop
== 0x27) { /* sra, V9 srax */
2981 cpu_src1
= get_src1(insn
, cpu_src1
);
2982 if (IS_IMM
) { /* immediate */
2983 simm
= GET_FIELDs(insn
, 20, 31);
2984 if (insn
& (1 << 12)) {
2985 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2987 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2988 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2989 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2991 } else { /* register */
2992 rs2
= GET_FIELD(insn
, 27, 31);
2993 gen_movl_reg_TN(rs2
, cpu_src2
);
2994 if (insn
& (1 << 12)) {
2995 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2996 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2998 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2999 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3000 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3001 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3004 gen_movl_TN_reg(rd
, cpu_dst
);
3006 } else if (xop
< 0x36) {
3008 cpu_src1
= get_src1(insn
, cpu_src1
);
3009 cpu_src2
= get_src2(insn
, cpu_src2
);
3010 switch (xop
& ~0x10) {
3013 simm
= GET_FIELDs(insn
, 19, 31);
3015 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3016 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3017 dc
->cc_op
= CC_OP_ADD
;
3019 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3023 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3024 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3025 dc
->cc_op
= CC_OP_ADD
;
3027 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3033 simm
= GET_FIELDs(insn
, 19, 31);
3034 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3036 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3039 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3040 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3041 dc
->cc_op
= CC_OP_LOGIC
;
3046 simm
= GET_FIELDs(insn
, 19, 31);
3047 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3049 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3052 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3053 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3054 dc
->cc_op
= CC_OP_LOGIC
;
3059 simm
= GET_FIELDs(insn
, 19, 31);
3060 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3062 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3065 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3066 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3067 dc
->cc_op
= CC_OP_LOGIC
;
3072 simm
= GET_FIELDs(insn
, 19, 31);
3074 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3076 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3080 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3081 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3082 dc
->cc_op
= CC_OP_SUB
;
3084 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3088 case 0x5: /* andn */
3090 simm
= GET_FIELDs(insn
, 19, 31);
3091 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3093 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3096 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3097 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3098 dc
->cc_op
= CC_OP_LOGIC
;
3103 simm
= GET_FIELDs(insn
, 19, 31);
3104 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3106 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3109 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3110 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3111 dc
->cc_op
= CC_OP_LOGIC
;
3114 case 0x7: /* xorn */
3116 simm
= GET_FIELDs(insn
, 19, 31);
3117 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3119 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3120 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3123 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3124 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3125 dc
->cc_op
= CC_OP_LOGIC
;
3128 case 0x8: /* addx, V9 addc */
3129 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3132 #ifdef TARGET_SPARC64
3133 case 0x9: /* V9 mulx */
3135 simm
= GET_FIELDs(insn
, 19, 31);
3136 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3138 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3142 case 0xa: /* umul */
3143 CHECK_IU_FEATURE(dc
, MUL
);
3144 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3146 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3147 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3148 dc
->cc_op
= CC_OP_LOGIC
;
3151 case 0xb: /* smul */
3152 CHECK_IU_FEATURE(dc
, MUL
);
3153 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3155 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3156 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3157 dc
->cc_op
= CC_OP_LOGIC
;
3160 case 0xc: /* subx, V9 subc */
3161 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3164 #ifdef TARGET_SPARC64
3165 case 0xd: /* V9 udivx */
3166 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3167 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3168 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3169 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3172 case 0xe: /* udiv */
3173 CHECK_IU_FEATURE(dc
, DIV
);
3175 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3176 dc
->cc_op
= CC_OP_DIV
;
3178 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3181 case 0xf: /* sdiv */
3182 CHECK_IU_FEATURE(dc
, DIV
);
3184 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3185 dc
->cc_op
= CC_OP_DIV
;
3187 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3193 gen_movl_TN_reg(rd
, cpu_dst
);
3195 cpu_src1
= get_src1(insn
, cpu_src1
);
3196 cpu_src2
= get_src2(insn
, cpu_src2
);
3198 case 0x20: /* taddcc */
3199 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3200 gen_movl_TN_reg(rd
, cpu_dst
);
3201 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3202 dc
->cc_op
= CC_OP_TADD
;
3204 case 0x21: /* tsubcc */
3205 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3206 gen_movl_TN_reg(rd
, cpu_dst
);
3207 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3208 dc
->cc_op
= CC_OP_TSUB
;
3210 case 0x22: /* taddcctv */
3211 save_state(dc
, cpu_cond
);
3212 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3213 gen_movl_TN_reg(rd
, cpu_dst
);
3214 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3215 dc
->cc_op
= CC_OP_TADDTV
;
3217 case 0x23: /* tsubcctv */
3218 save_state(dc
, cpu_cond
);
3219 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3220 gen_movl_TN_reg(rd
, cpu_dst
);
3221 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3222 dc
->cc_op
= CC_OP_TSUBTV
;
3224 case 0x24: /* mulscc */
3225 gen_helper_compute_psr();
3226 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3227 gen_movl_TN_reg(rd
, cpu_dst
);
3228 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3229 dc
->cc_op
= CC_OP_ADD
;
3231 #ifndef TARGET_SPARC64
3232 case 0x25: /* sll */
3233 if (IS_IMM
) { /* immediate */
3234 simm
= GET_FIELDs(insn
, 20, 31);
3235 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3236 } else { /* register */
3237 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3238 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3240 gen_movl_TN_reg(rd
, cpu_dst
);
3242 case 0x26: /* srl */
3243 if (IS_IMM
) { /* immediate */
3244 simm
= GET_FIELDs(insn
, 20, 31);
3245 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3246 } else { /* register */
3247 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3248 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3250 gen_movl_TN_reg(rd
, cpu_dst
);
3252 case 0x27: /* sra */
3253 if (IS_IMM
) { /* immediate */
3254 simm
= GET_FIELDs(insn
, 20, 31);
3255 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3256 } else { /* register */
3257 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3258 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3260 gen_movl_TN_reg(rd
, cpu_dst
);
3267 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3268 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3270 #ifndef TARGET_SPARC64
3271 case 0x01 ... 0x0f: /* undefined in the
3275 case 0x10 ... 0x1f: /* implementation-dependent
3281 case 0x2: /* V9 wrccr */
3282 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3283 gen_helper_wrccr(cpu_dst
);
3284 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3285 dc
->cc_op
= CC_OP_FLAGS
;
3287 case 0x3: /* V9 wrasi */
3288 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3289 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3290 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3292 case 0x6: /* V9 wrfprs */
3293 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3294 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3295 save_state(dc
, cpu_cond
);
3300 case 0xf: /* V9 sir, nop if user */
3301 #if !defined(CONFIG_USER_ONLY)
3302 if (supervisor(dc
)) {
3307 case 0x13: /* Graphics Status */
3308 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3310 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3312 case 0x14: /* Softint set */
3313 if (!supervisor(dc
))
3315 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3316 gen_helper_set_softint(cpu_tmp64
);
3318 case 0x15: /* Softint clear */
3319 if (!supervisor(dc
))
3321 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3322 gen_helper_clear_softint(cpu_tmp64
);
3324 case 0x16: /* Softint write */
3325 if (!supervisor(dc
))
3327 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3328 gen_helper_write_softint(cpu_tmp64
);
3330 case 0x17: /* Tick compare */
3331 #if !defined(CONFIG_USER_ONLY)
3332 if (!supervisor(dc
))
3338 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3340 r_tickptr
= tcg_temp_new_ptr();
3341 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3342 offsetof(CPUState
, tick
));
3343 gen_helper_tick_set_limit(r_tickptr
,
3345 tcg_temp_free_ptr(r_tickptr
);
3348 case 0x18: /* System tick */
3349 #if !defined(CONFIG_USER_ONLY)
3350 if (!supervisor(dc
))
3356 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3358 r_tickptr
= tcg_temp_new_ptr();
3359 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3360 offsetof(CPUState
, stick
));
3361 gen_helper_tick_set_count(r_tickptr
,
3363 tcg_temp_free_ptr(r_tickptr
);
3366 case 0x19: /* System tick compare */
3367 #if !defined(CONFIG_USER_ONLY)
3368 if (!supervisor(dc
))
3374 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3376 r_tickptr
= tcg_temp_new_ptr();
3377 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3378 offsetof(CPUState
, stick
));
3379 gen_helper_tick_set_limit(r_tickptr
,
3381 tcg_temp_free_ptr(r_tickptr
);
3385 case 0x10: /* Performance Control */
3386 case 0x11: /* Performance Instrumentation
3388 case 0x12: /* Dispatch Control */
3395 #if !defined(CONFIG_USER_ONLY)
3396 case 0x31: /* wrpsr, V9 saved, restored */
3398 if (!supervisor(dc
))
3400 #ifdef TARGET_SPARC64
3406 gen_helper_restored();
3408 case 2: /* UA2005 allclean */
3409 case 3: /* UA2005 otherw */
3410 case 4: /* UA2005 normalw */
3411 case 5: /* UA2005 invalw */
3417 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3418 gen_helper_wrpsr(cpu_dst
);
3419 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3420 dc
->cc_op
= CC_OP_FLAGS
;
3421 save_state(dc
, cpu_cond
);
3428 case 0x32: /* wrwim, V9 wrpr */
3430 if (!supervisor(dc
))
3432 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3433 #ifdef TARGET_SPARC64
3439 r_tsptr
= tcg_temp_new_ptr();
3440 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3441 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3442 offsetof(trap_state
, tpc
));
3443 tcg_temp_free_ptr(r_tsptr
);
3450 r_tsptr
= tcg_temp_new_ptr();
3451 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3452 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3453 offsetof(trap_state
, tnpc
));
3454 tcg_temp_free_ptr(r_tsptr
);
3461 r_tsptr
= tcg_temp_new_ptr();
3462 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3463 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3464 offsetof(trap_state
,
3466 tcg_temp_free_ptr(r_tsptr
);
3473 r_tsptr
= tcg_temp_new_ptr();
3474 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3475 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3476 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3477 offsetof(trap_state
, tt
));
3478 tcg_temp_free_ptr(r_tsptr
);
3485 r_tickptr
= tcg_temp_new_ptr();
3486 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3487 offsetof(CPUState
, tick
));
3488 gen_helper_tick_set_count(r_tickptr
,
3490 tcg_temp_free_ptr(r_tickptr
);
3494 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3497 save_state(dc
, cpu_cond
);
3498 gen_helper_wrpstate(cpu_tmp0
);
3499 dc
->npc
= DYNAMIC_PC
;
3502 save_state(dc
, cpu_cond
);
3503 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3504 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3505 offsetof(CPUSPARCState
, tl
));
3506 dc
->npc
= DYNAMIC_PC
;
3509 gen_helper_wrpil(cpu_tmp0
);
3512 gen_helper_wrcwp(cpu_tmp0
);
3515 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3516 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3517 offsetof(CPUSPARCState
,
3520 case 11: // canrestore
3521 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3522 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3523 offsetof(CPUSPARCState
,
3526 case 12: // cleanwin
3527 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3528 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3529 offsetof(CPUSPARCState
,
3532 case 13: // otherwin
3533 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3534 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3535 offsetof(CPUSPARCState
,
3539 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3540 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3541 offsetof(CPUSPARCState
,
3544 case 16: // UA2005 gl
3545 CHECK_IU_FEATURE(dc
, GL
);
3546 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3547 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3548 offsetof(CPUSPARCState
, gl
));
3550 case 26: // UA2005 strand status
3551 CHECK_IU_FEATURE(dc
, HYPV
);
3552 if (!hypervisor(dc
))
3554 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3560 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3561 if (dc
->def
->nwindows
!= 32)
3562 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3563 (1 << dc
->def
->nwindows
) - 1);
3564 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3568 case 0x33: /* wrtbr, UA2005 wrhpr */
3570 #ifndef TARGET_SPARC64
3571 if (!supervisor(dc
))
3573 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3575 CHECK_IU_FEATURE(dc
, HYPV
);
3576 if (!hypervisor(dc
))
3578 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3581 // XXX gen_op_wrhpstate();
3582 save_state(dc
, cpu_cond
);
3588 // XXX gen_op_wrhtstate();
3591 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3594 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3596 case 31: // hstick_cmpr
3600 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3601 r_tickptr
= tcg_temp_new_ptr();
3602 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3603 offsetof(CPUState
, hstick
));
3604 gen_helper_tick_set_limit(r_tickptr
,
3606 tcg_temp_free_ptr(r_tickptr
);
3609 case 6: // hver readonly
3617 #ifdef TARGET_SPARC64
3618 case 0x2c: /* V9 movcc */
3620 int cc
= GET_FIELD_SP(insn
, 11, 12);
3621 int cond
= GET_FIELD_SP(insn
, 14, 17);
3625 r_cond
= tcg_temp_new();
3626 if (insn
& (1 << 18)) {
3628 gen_cond(r_cond
, 0, cond
, dc
);
3630 gen_cond(r_cond
, 1, cond
, dc
);
3634 gen_fcond(r_cond
, cc
, cond
);
3637 l1
= gen_new_label();
3639 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3640 if (IS_IMM
) { /* immediate */
3643 simm
= GET_FIELD_SPs(insn
, 0, 10);
3644 r_const
= tcg_const_tl(simm
);
3645 gen_movl_TN_reg(rd
, r_const
);
3646 tcg_temp_free(r_const
);
3648 rs2
= GET_FIELD_SP(insn
, 0, 4);
3649 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3650 gen_movl_TN_reg(rd
, cpu_tmp0
);
3653 tcg_temp_free(r_cond
);
3656 case 0x2d: /* V9 sdivx */
3657 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3658 gen_movl_TN_reg(rd
, cpu_dst
);
3660 case 0x2e: /* V9 popc */
3662 cpu_src2
= get_src2(insn
, cpu_src2
);
3663 gen_helper_popc(cpu_dst
, cpu_src2
);
3664 gen_movl_TN_reg(rd
, cpu_dst
);
3666 case 0x2f: /* V9 movr */
3668 int cond
= GET_FIELD_SP(insn
, 10, 12);
3671 cpu_src1
= get_src1(insn
, cpu_src1
);
3673 l1
= gen_new_label();
3675 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3677 if (IS_IMM
) { /* immediate */
3680 simm
= GET_FIELD_SPs(insn
, 0, 9);
3681 r_const
= tcg_const_tl(simm
);
3682 gen_movl_TN_reg(rd
, r_const
);
3683 tcg_temp_free(r_const
);
3685 rs2
= GET_FIELD_SP(insn
, 0, 4);
3686 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3687 gen_movl_TN_reg(rd
, cpu_tmp0
);
3697 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3698 #ifdef TARGET_SPARC64
3699 int opf
= GET_FIELD_SP(insn
, 5, 13);
3700 rs1
= GET_FIELD(insn
, 13, 17);
3701 rs2
= GET_FIELD(insn
, 27, 31);
3702 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3706 case 0x000: /* VIS I edge8cc */
3707 case 0x001: /* VIS II edge8n */
3708 case 0x002: /* VIS I edge8lcc */
3709 case 0x003: /* VIS II edge8ln */
3710 case 0x004: /* VIS I edge16cc */
3711 case 0x005: /* VIS II edge16n */
3712 case 0x006: /* VIS I edge16lcc */
3713 case 0x007: /* VIS II edge16ln */
3714 case 0x008: /* VIS I edge32cc */
3715 case 0x009: /* VIS II edge32n */
3716 case 0x00a: /* VIS I edge32lcc */
3717 case 0x00b: /* VIS II edge32ln */
3720 case 0x010: /* VIS I array8 */
3721 CHECK_FPU_FEATURE(dc
, VIS1
);
3722 cpu_src1
= get_src1(insn
, cpu_src1
);
3723 gen_movl_reg_TN(rs2
, cpu_src2
);
3724 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3725 gen_movl_TN_reg(rd
, cpu_dst
);
3727 case 0x012: /* VIS I array16 */
3728 CHECK_FPU_FEATURE(dc
, VIS1
);
3729 cpu_src1
= get_src1(insn
, cpu_src1
);
3730 gen_movl_reg_TN(rs2
, cpu_src2
);
3731 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3732 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3733 gen_movl_TN_reg(rd
, cpu_dst
);
3735 case 0x014: /* VIS I array32 */
3736 CHECK_FPU_FEATURE(dc
, VIS1
);
3737 cpu_src1
= get_src1(insn
, cpu_src1
);
3738 gen_movl_reg_TN(rs2
, cpu_src2
);
3739 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3740 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3741 gen_movl_TN_reg(rd
, cpu_dst
);
3743 case 0x018: /* VIS I alignaddr */
3744 CHECK_FPU_FEATURE(dc
, VIS1
);
3745 cpu_src1
= get_src1(insn
, cpu_src1
);
3746 gen_movl_reg_TN(rs2
, cpu_src2
);
3747 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3748 gen_movl_TN_reg(rd
, cpu_dst
);
3750 case 0x019: /* VIS II bmask */
3751 case 0x01a: /* VIS I alignaddrl */
3754 case 0x020: /* VIS I fcmple16 */
3755 CHECK_FPU_FEATURE(dc
, VIS1
);
3756 gen_op_load_fpr_DT0(DFPREG(rs1
));
3757 gen_op_load_fpr_DT1(DFPREG(rs2
));
3758 gen_helper_fcmple16();
3759 gen_op_store_DT0_fpr(DFPREG(rd
));
3761 case 0x022: /* VIS I fcmpne16 */
3762 CHECK_FPU_FEATURE(dc
, VIS1
);
3763 gen_op_load_fpr_DT0(DFPREG(rs1
));
3764 gen_op_load_fpr_DT1(DFPREG(rs2
));
3765 gen_helper_fcmpne16();
3766 gen_op_store_DT0_fpr(DFPREG(rd
));
3768 case 0x024: /* VIS I fcmple32 */
3769 CHECK_FPU_FEATURE(dc
, VIS1
);
3770 gen_op_load_fpr_DT0(DFPREG(rs1
));
3771 gen_op_load_fpr_DT1(DFPREG(rs2
));
3772 gen_helper_fcmple32();
3773 gen_op_store_DT0_fpr(DFPREG(rd
));
3775 case 0x026: /* VIS I fcmpne32 */
3776 CHECK_FPU_FEATURE(dc
, VIS1
);
3777 gen_op_load_fpr_DT0(DFPREG(rs1
));
3778 gen_op_load_fpr_DT1(DFPREG(rs2
));
3779 gen_helper_fcmpne32();
3780 gen_op_store_DT0_fpr(DFPREG(rd
));
3782 case 0x028: /* VIS I fcmpgt16 */
3783 CHECK_FPU_FEATURE(dc
, VIS1
);
3784 gen_op_load_fpr_DT0(DFPREG(rs1
));
3785 gen_op_load_fpr_DT1(DFPREG(rs2
));
3786 gen_helper_fcmpgt16();
3787 gen_op_store_DT0_fpr(DFPREG(rd
));
3789 case 0x02a: /* VIS I fcmpeq16 */
3790 CHECK_FPU_FEATURE(dc
, VIS1
);
3791 gen_op_load_fpr_DT0(DFPREG(rs1
));
3792 gen_op_load_fpr_DT1(DFPREG(rs2
));
3793 gen_helper_fcmpeq16();
3794 gen_op_store_DT0_fpr(DFPREG(rd
));
3796 case 0x02c: /* VIS I fcmpgt32 */
3797 CHECK_FPU_FEATURE(dc
, VIS1
);
3798 gen_op_load_fpr_DT0(DFPREG(rs1
));
3799 gen_op_load_fpr_DT1(DFPREG(rs2
));
3800 gen_helper_fcmpgt32();
3801 gen_op_store_DT0_fpr(DFPREG(rd
));
3803 case 0x02e: /* VIS I fcmpeq32 */
3804 CHECK_FPU_FEATURE(dc
, VIS1
);
3805 gen_op_load_fpr_DT0(DFPREG(rs1
));
3806 gen_op_load_fpr_DT1(DFPREG(rs2
));
3807 gen_helper_fcmpeq32();
3808 gen_op_store_DT0_fpr(DFPREG(rd
));
3810 case 0x031: /* VIS I fmul8x16 */
3811 CHECK_FPU_FEATURE(dc
, VIS1
);
3812 gen_op_load_fpr_DT0(DFPREG(rs1
));
3813 gen_op_load_fpr_DT1(DFPREG(rs2
));
3814 gen_helper_fmul8x16();
3815 gen_op_store_DT0_fpr(DFPREG(rd
));
3817 case 0x033: /* VIS I fmul8x16au */
3818 CHECK_FPU_FEATURE(dc
, VIS1
);
3819 gen_op_load_fpr_DT0(DFPREG(rs1
));
3820 gen_op_load_fpr_DT1(DFPREG(rs2
));
3821 gen_helper_fmul8x16au();
3822 gen_op_store_DT0_fpr(DFPREG(rd
));
3824 case 0x035: /* VIS I fmul8x16al */
3825 CHECK_FPU_FEATURE(dc
, VIS1
);
3826 gen_op_load_fpr_DT0(DFPREG(rs1
));
3827 gen_op_load_fpr_DT1(DFPREG(rs2
));
3828 gen_helper_fmul8x16al();
3829 gen_op_store_DT0_fpr(DFPREG(rd
));
3831 case 0x036: /* VIS I fmul8sux16 */
3832 CHECK_FPU_FEATURE(dc
, VIS1
);
3833 gen_op_load_fpr_DT0(DFPREG(rs1
));
3834 gen_op_load_fpr_DT1(DFPREG(rs2
));
3835 gen_helper_fmul8sux16();
3836 gen_op_store_DT0_fpr(DFPREG(rd
));
3838 case 0x037: /* VIS I fmul8ulx16 */
3839 CHECK_FPU_FEATURE(dc
, VIS1
);
3840 gen_op_load_fpr_DT0(DFPREG(rs1
));
3841 gen_op_load_fpr_DT1(DFPREG(rs2
));
3842 gen_helper_fmul8ulx16();
3843 gen_op_store_DT0_fpr(DFPREG(rd
));
3845 case 0x038: /* VIS I fmuld8sux16 */
3846 CHECK_FPU_FEATURE(dc
, VIS1
);
3847 gen_op_load_fpr_DT0(DFPREG(rs1
));
3848 gen_op_load_fpr_DT1(DFPREG(rs2
));
3849 gen_helper_fmuld8sux16();
3850 gen_op_store_DT0_fpr(DFPREG(rd
));
3852 case 0x039: /* VIS I fmuld8ulx16 */
3853 CHECK_FPU_FEATURE(dc
, VIS1
);
3854 gen_op_load_fpr_DT0(DFPREG(rs1
));
3855 gen_op_load_fpr_DT1(DFPREG(rs2
));
3856 gen_helper_fmuld8ulx16();
3857 gen_op_store_DT0_fpr(DFPREG(rd
));
3859 case 0x03a: /* VIS I fpack32 */
3860 case 0x03b: /* VIS I fpack16 */
3861 case 0x03d: /* VIS I fpackfix */
3862 case 0x03e: /* VIS I pdist */
3865 case 0x048: /* VIS I faligndata */
3866 CHECK_FPU_FEATURE(dc
, VIS1
);
3867 gen_op_load_fpr_DT0(DFPREG(rs1
));
3868 gen_op_load_fpr_DT1(DFPREG(rs2
));
3869 gen_helper_faligndata();
3870 gen_op_store_DT0_fpr(DFPREG(rd
));
3872 case 0x04b: /* VIS I fpmerge */
3873 CHECK_FPU_FEATURE(dc
, VIS1
);
3874 gen_op_load_fpr_DT0(DFPREG(rs1
));
3875 gen_op_load_fpr_DT1(DFPREG(rs2
));
3876 gen_helper_fpmerge();
3877 gen_op_store_DT0_fpr(DFPREG(rd
));
3879 case 0x04c: /* VIS II bshuffle */
3882 case 0x04d: /* VIS I fexpand */
3883 CHECK_FPU_FEATURE(dc
, VIS1
);
3884 gen_op_load_fpr_DT0(DFPREG(rs1
));
3885 gen_op_load_fpr_DT1(DFPREG(rs2
));
3886 gen_helper_fexpand();
3887 gen_op_store_DT0_fpr(DFPREG(rd
));
3889 case 0x050: /* VIS I fpadd16 */
3890 CHECK_FPU_FEATURE(dc
, VIS1
);
3891 gen_op_load_fpr_DT0(DFPREG(rs1
));
3892 gen_op_load_fpr_DT1(DFPREG(rs2
));
3893 gen_helper_fpadd16();
3894 gen_op_store_DT0_fpr(DFPREG(rd
));
3896 case 0x051: /* VIS I fpadd16s */
3897 CHECK_FPU_FEATURE(dc
, VIS1
);
3898 gen_helper_fpadd16s(cpu_fpr
[rd
],
3899 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3901 case 0x052: /* VIS I fpadd32 */
3902 CHECK_FPU_FEATURE(dc
, VIS1
);
3903 gen_op_load_fpr_DT0(DFPREG(rs1
));
3904 gen_op_load_fpr_DT1(DFPREG(rs2
));
3905 gen_helper_fpadd32();
3906 gen_op_store_DT0_fpr(DFPREG(rd
));
3908 case 0x053: /* VIS I fpadd32s */
3909 CHECK_FPU_FEATURE(dc
, VIS1
);
3910 gen_helper_fpadd32s(cpu_fpr
[rd
],
3911 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3913 case 0x054: /* VIS I fpsub16 */
3914 CHECK_FPU_FEATURE(dc
, VIS1
);
3915 gen_op_load_fpr_DT0(DFPREG(rs1
));
3916 gen_op_load_fpr_DT1(DFPREG(rs2
));
3917 gen_helper_fpsub16();
3918 gen_op_store_DT0_fpr(DFPREG(rd
));
3920 case 0x055: /* VIS I fpsub16s */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 gen_helper_fpsub16s(cpu_fpr
[rd
],
3923 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3925 case 0x056: /* VIS I fpsub32 */
3926 CHECK_FPU_FEATURE(dc
, VIS1
);
3927 gen_op_load_fpr_DT0(DFPREG(rs1
));
3928 gen_op_load_fpr_DT1(DFPREG(rs2
));
3929 gen_helper_fpsub32();
3930 gen_op_store_DT0_fpr(DFPREG(rd
));
3932 case 0x057: /* VIS I fpsub32s */
3933 CHECK_FPU_FEATURE(dc
, VIS1
);
3934 gen_helper_fpsub32s(cpu_fpr
[rd
],
3935 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3937 case 0x060: /* VIS I fzero */
3938 CHECK_FPU_FEATURE(dc
, VIS1
);
3939 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3940 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3942 case 0x061: /* VIS I fzeros */
3943 CHECK_FPU_FEATURE(dc
, VIS1
);
3944 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3946 case 0x062: /* VIS I fnor */
3947 CHECK_FPU_FEATURE(dc
, VIS1
);
3948 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3949 cpu_fpr
[DFPREG(rs2
)]);
3950 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3951 cpu_fpr
[DFPREG(rs2
) + 1]);
3953 case 0x063: /* VIS I fnors */
3954 CHECK_FPU_FEATURE(dc
, VIS1
);
3955 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3957 case 0x064: /* VIS I fandnot2 */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3960 cpu_fpr
[DFPREG(rs2
)]);
3961 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3962 cpu_fpr
[DFPREG(rs1
) + 1],
3963 cpu_fpr
[DFPREG(rs2
) + 1]);
3965 case 0x065: /* VIS I fandnot2s */
3966 CHECK_FPU_FEATURE(dc
, VIS1
);
3967 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3969 case 0x066: /* VIS I fnot2 */
3970 CHECK_FPU_FEATURE(dc
, VIS1
);
3971 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3972 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3973 cpu_fpr
[DFPREG(rs2
) + 1]);
3975 case 0x067: /* VIS I fnot2s */
3976 CHECK_FPU_FEATURE(dc
, VIS1
);
3977 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3979 case 0x068: /* VIS I fandnot1 */
3980 CHECK_FPU_FEATURE(dc
, VIS1
);
3981 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3982 cpu_fpr
[DFPREG(rs1
)]);
3983 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3984 cpu_fpr
[DFPREG(rs2
) + 1],
3985 cpu_fpr
[DFPREG(rs1
) + 1]);
3987 case 0x069: /* VIS I fandnot1s */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3991 case 0x06a: /* VIS I fnot1 */
3992 CHECK_FPU_FEATURE(dc
, VIS1
);
3993 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3994 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3995 cpu_fpr
[DFPREG(rs1
) + 1]);
3997 case 0x06b: /* VIS I fnot1s */
3998 CHECK_FPU_FEATURE(dc
, VIS1
);
3999 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4001 case 0x06c: /* VIS I fxor */
4002 CHECK_FPU_FEATURE(dc
, VIS1
);
4003 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4004 cpu_fpr
[DFPREG(rs2
)]);
4005 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4006 cpu_fpr
[DFPREG(rs1
) + 1],
4007 cpu_fpr
[DFPREG(rs2
) + 1]);
4009 case 0x06d: /* VIS I fxors */
4010 CHECK_FPU_FEATURE(dc
, VIS1
);
4011 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4013 case 0x06e: /* VIS I fnand */
4014 CHECK_FPU_FEATURE(dc
, VIS1
);
4015 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4016 cpu_fpr
[DFPREG(rs2
)]);
4017 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4018 cpu_fpr
[DFPREG(rs2
) + 1]);
4020 case 0x06f: /* VIS I fnands */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4024 case 0x070: /* VIS I fand */
4025 CHECK_FPU_FEATURE(dc
, VIS1
);
4026 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4027 cpu_fpr
[DFPREG(rs2
)]);
4028 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4029 cpu_fpr
[DFPREG(rs1
) + 1],
4030 cpu_fpr
[DFPREG(rs2
) + 1]);
4032 case 0x071: /* VIS I fands */
4033 CHECK_FPU_FEATURE(dc
, VIS1
);
4034 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4036 case 0x072: /* VIS I fxnor */
4037 CHECK_FPU_FEATURE(dc
, VIS1
);
4038 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4039 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4040 cpu_fpr
[DFPREG(rs1
)]);
4041 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4042 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4043 cpu_fpr
[DFPREG(rs1
) + 1]);
4045 case 0x073: /* VIS I fxnors */
4046 CHECK_FPU_FEATURE(dc
, VIS1
);
4047 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4048 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4050 case 0x074: /* VIS I fsrc1 */
4051 CHECK_FPU_FEATURE(dc
, VIS1
);
4052 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4053 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4054 cpu_fpr
[DFPREG(rs1
) + 1]);
4056 case 0x075: /* VIS I fsrc1s */
4057 CHECK_FPU_FEATURE(dc
, VIS1
);
4058 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4060 case 0x076: /* VIS I fornot2 */
4061 CHECK_FPU_FEATURE(dc
, VIS1
);
4062 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4063 cpu_fpr
[DFPREG(rs2
)]);
4064 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4065 cpu_fpr
[DFPREG(rs1
) + 1],
4066 cpu_fpr
[DFPREG(rs2
) + 1]);
4068 case 0x077: /* VIS I fornot2s */
4069 CHECK_FPU_FEATURE(dc
, VIS1
);
4070 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4072 case 0x078: /* VIS I fsrc2 */
4073 CHECK_FPU_FEATURE(dc
, VIS1
);
4074 gen_op_load_fpr_DT0(DFPREG(rs2
));
4075 gen_op_store_DT0_fpr(DFPREG(rd
));
4077 case 0x079: /* VIS I fsrc2s */
4078 CHECK_FPU_FEATURE(dc
, VIS1
);
4079 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4081 case 0x07a: /* VIS I fornot1 */
4082 CHECK_FPU_FEATURE(dc
, VIS1
);
4083 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4084 cpu_fpr
[DFPREG(rs1
)]);
4085 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4086 cpu_fpr
[DFPREG(rs2
) + 1],
4087 cpu_fpr
[DFPREG(rs1
) + 1]);
4089 case 0x07b: /* VIS I fornot1s */
4090 CHECK_FPU_FEATURE(dc
, VIS1
);
4091 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4093 case 0x07c: /* VIS I for */
4094 CHECK_FPU_FEATURE(dc
, VIS1
);
4095 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4096 cpu_fpr
[DFPREG(rs2
)]);
4097 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4098 cpu_fpr
[DFPREG(rs1
) + 1],
4099 cpu_fpr
[DFPREG(rs2
) + 1]);
4101 case 0x07d: /* VIS I fors */
4102 CHECK_FPU_FEATURE(dc
, VIS1
);
4103 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4105 case 0x07e: /* VIS I fone */
4106 CHECK_FPU_FEATURE(dc
, VIS1
);
4107 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4108 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4110 case 0x07f: /* VIS I fones */
4111 CHECK_FPU_FEATURE(dc
, VIS1
);
4112 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4114 case 0x080: /* VIS I shutdown */
4115 case 0x081: /* VIS II siam */
4124 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4125 #ifdef TARGET_SPARC64
4130 #ifdef TARGET_SPARC64
4131 } else if (xop
== 0x39) { /* V9 return */
4134 save_state(dc
, cpu_cond
);
4135 cpu_src1
= get_src1(insn
, cpu_src1
);
4136 if (IS_IMM
) { /* immediate */
4137 simm
= GET_FIELDs(insn
, 19, 31);
4138 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4139 } else { /* register */
4140 rs2
= GET_FIELD(insn
, 27, 31);
4142 gen_movl_reg_TN(rs2
, cpu_src2
);
4143 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4145 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4147 gen_helper_restore();
4148 gen_mov_pc_npc(dc
, cpu_cond
);
4149 r_const
= tcg_const_i32(3);
4150 gen_helper_check_align(cpu_dst
, r_const
);
4151 tcg_temp_free_i32(r_const
);
4152 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4153 dc
->npc
= DYNAMIC_PC
;
4157 cpu_src1
= get_src1(insn
, cpu_src1
);
4158 if (IS_IMM
) { /* immediate */
4159 simm
= GET_FIELDs(insn
, 19, 31);
4160 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4161 } else { /* register */
4162 rs2
= GET_FIELD(insn
, 27, 31);
4164 gen_movl_reg_TN(rs2
, cpu_src2
);
4165 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4167 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4170 case 0x38: /* jmpl */
4175 r_pc
= tcg_const_tl(dc
->pc
);
4176 gen_movl_TN_reg(rd
, r_pc
);
4177 tcg_temp_free(r_pc
);
4178 gen_mov_pc_npc(dc
, cpu_cond
);
4179 r_const
= tcg_const_i32(3);
4180 gen_helper_check_align(cpu_dst
, r_const
);
4181 tcg_temp_free_i32(r_const
);
4182 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4183 dc
->npc
= DYNAMIC_PC
;
4186 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4187 case 0x39: /* rett, V9 return */
4191 if (!supervisor(dc
))
4193 gen_mov_pc_npc(dc
, cpu_cond
);
4194 r_const
= tcg_const_i32(3);
4195 gen_helper_check_align(cpu_dst
, r_const
);
4196 tcg_temp_free_i32(r_const
);
4197 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4198 dc
->npc
= DYNAMIC_PC
;
4203 case 0x3b: /* flush */
4204 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4206 gen_helper_flush(cpu_dst
);
4208 case 0x3c: /* save */
4209 save_state(dc
, cpu_cond
);
4211 gen_movl_TN_reg(rd
, cpu_dst
);
4213 case 0x3d: /* restore */
4214 save_state(dc
, cpu_cond
);
4215 gen_helper_restore();
4216 gen_movl_TN_reg(rd
, cpu_dst
);
4218 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4219 case 0x3e: /* V9 done/retry */
4223 if (!supervisor(dc
))
4225 dc
->npc
= DYNAMIC_PC
;
4226 dc
->pc
= DYNAMIC_PC
;
4230 if (!supervisor(dc
))
4232 dc
->npc
= DYNAMIC_PC
;
4233 dc
->pc
= DYNAMIC_PC
;
4249 case 3: /* load/store instructions */
4251 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4253 /* flush pending conditional evaluations before exposing
4255 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4256 dc
->cc_op
= CC_OP_FLAGS
;
4257 gen_helper_compute_psr();
4259 cpu_src1
= get_src1(insn
, cpu_src1
);
4260 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4261 rs2
= GET_FIELD(insn
, 27, 31);
4262 gen_movl_reg_TN(rs2
, cpu_src2
);
4263 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4264 } else if (IS_IMM
) { /* immediate */
4265 simm
= GET_FIELDs(insn
, 19, 31);
4266 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4267 } else { /* register */
4268 rs2
= GET_FIELD(insn
, 27, 31);
4270 gen_movl_reg_TN(rs2
, cpu_src2
);
4271 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4273 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4275 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4276 (xop
> 0x17 && xop
<= 0x1d ) ||
4277 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4279 case 0x0: /* ld, V9 lduw, load unsigned word */
4280 gen_address_mask(dc
, cpu_addr
);
4281 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4283 case 0x1: /* ldub, load unsigned byte */
4284 gen_address_mask(dc
, cpu_addr
);
4285 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4287 case 0x2: /* lduh, load unsigned halfword */
4288 gen_address_mask(dc
, cpu_addr
);
4289 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4291 case 0x3: /* ldd, load double word */
4297 save_state(dc
, cpu_cond
);
4298 r_const
= tcg_const_i32(7);
4299 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4300 tcg_temp_free_i32(r_const
);
4301 gen_address_mask(dc
, cpu_addr
);
4302 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4303 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4304 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4305 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4306 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4307 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4308 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4311 case 0x9: /* ldsb, load signed byte */
4312 gen_address_mask(dc
, cpu_addr
);
4313 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4315 case 0xa: /* ldsh, load signed halfword */
4316 gen_address_mask(dc
, cpu_addr
);
4317 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4319 case 0xd: /* ldstub -- XXX: should be atomically */
4323 gen_address_mask(dc
, cpu_addr
);
4324 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4325 r_const
= tcg_const_tl(0xff);
4326 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4327 tcg_temp_free(r_const
);
4330 case 0x0f: /* swap, swap register with memory. Also
4332 CHECK_IU_FEATURE(dc
, SWAP
);
4333 gen_movl_reg_TN(rd
, cpu_val
);
4334 gen_address_mask(dc
, cpu_addr
);
4335 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4336 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4337 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4339 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4340 case 0x10: /* lda, V9 lduwa, load word alternate */
4341 #ifndef TARGET_SPARC64
4344 if (!supervisor(dc
))
4347 save_state(dc
, cpu_cond
);
4348 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4350 case 0x11: /* lduba, load unsigned byte alternate */
4351 #ifndef TARGET_SPARC64
4354 if (!supervisor(dc
))
4357 save_state(dc
, cpu_cond
);
4358 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4360 case 0x12: /* lduha, load unsigned halfword alternate */
4361 #ifndef TARGET_SPARC64
4364 if (!supervisor(dc
))
4367 save_state(dc
, cpu_cond
);
4368 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4370 case 0x13: /* ldda, load double word alternate */
4371 #ifndef TARGET_SPARC64
4374 if (!supervisor(dc
))
4379 save_state(dc
, cpu_cond
);
4380 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4382 case 0x19: /* ldsba, load signed byte alternate */
4383 #ifndef TARGET_SPARC64
4386 if (!supervisor(dc
))
4389 save_state(dc
, cpu_cond
);
4390 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4392 case 0x1a: /* ldsha, load signed halfword alternate */
4393 #ifndef TARGET_SPARC64
4396 if (!supervisor(dc
))
4399 save_state(dc
, cpu_cond
);
4400 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4402 case 0x1d: /* ldstuba -- XXX: should be atomically */
4403 #ifndef TARGET_SPARC64
4406 if (!supervisor(dc
))
4409 save_state(dc
, cpu_cond
);
4410 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4412 case 0x1f: /* swapa, swap reg with alt. memory. Also
4414 CHECK_IU_FEATURE(dc
, SWAP
);
4415 #ifndef TARGET_SPARC64
4418 if (!supervisor(dc
))
4421 save_state(dc
, cpu_cond
);
4422 gen_movl_reg_TN(rd
, cpu_val
);
4423 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4426 #ifndef TARGET_SPARC64
4427 case 0x30: /* ldc */
4428 case 0x31: /* ldcsr */
4429 case 0x33: /* lddc */
4433 #ifdef TARGET_SPARC64
4434 case 0x08: /* V9 ldsw */
4435 gen_address_mask(dc
, cpu_addr
);
4436 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4438 case 0x0b: /* V9 ldx */
4439 gen_address_mask(dc
, cpu_addr
);
4440 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4442 case 0x18: /* V9 ldswa */
4443 save_state(dc
, cpu_cond
);
4444 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4446 case 0x1b: /* V9 ldxa */
4447 save_state(dc
, cpu_cond
);
4448 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4450 case 0x2d: /* V9 prefetch, no effect */
4452 case 0x30: /* V9 ldfa */
4453 save_state(dc
, cpu_cond
);
4454 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4456 case 0x33: /* V9 lddfa */
4457 save_state(dc
, cpu_cond
);
4458 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4460 case 0x3d: /* V9 prefetcha, no effect */
4462 case 0x32: /* V9 ldqfa */
4463 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4464 save_state(dc
, cpu_cond
);
4465 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4471 gen_movl_TN_reg(rd
, cpu_val
);
4472 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4475 } else if (xop
>= 0x20 && xop
< 0x24) {
4476 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4478 save_state(dc
, cpu_cond
);
4480 case 0x20: /* ldf, load fpreg */
4481 gen_address_mask(dc
, cpu_addr
);
4482 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4483 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4485 case 0x21: /* ldfsr, V9 ldxfsr */
4486 #ifdef TARGET_SPARC64
4487 gen_address_mask(dc
, cpu_addr
);
4489 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4490 gen_helper_ldxfsr(cpu_tmp64
);
4492 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4493 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4494 gen_helper_ldfsr(cpu_tmp32
);
4498 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4499 gen_helper_ldfsr(cpu_tmp32
);
4503 case 0x22: /* ldqf, load quad fpreg */
4507 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4508 r_const
= tcg_const_i32(dc
->mem_idx
);
4509 gen_address_mask(dc
, cpu_addr
);
4510 gen_helper_ldqf(cpu_addr
, r_const
);
4511 tcg_temp_free_i32(r_const
);
4512 gen_op_store_QT0_fpr(QFPREG(rd
));
4515 case 0x23: /* lddf, load double fpreg */
4519 r_const
= tcg_const_i32(dc
->mem_idx
);
4520 gen_address_mask(dc
, cpu_addr
);
4521 gen_helper_lddf(cpu_addr
, r_const
);
4522 tcg_temp_free_i32(r_const
);
4523 gen_op_store_DT0_fpr(DFPREG(rd
));
4529 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4530 xop
== 0xe || xop
== 0x1e) {
4531 gen_movl_reg_TN(rd
, cpu_val
);
4533 case 0x4: /* st, store word */
4534 gen_address_mask(dc
, cpu_addr
);
4535 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4537 case 0x5: /* stb, store byte */
4538 gen_address_mask(dc
, cpu_addr
);
4539 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4541 case 0x6: /* sth, store halfword */
4542 gen_address_mask(dc
, cpu_addr
);
4543 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4545 case 0x7: /* std, store double word */
4551 save_state(dc
, cpu_cond
);
4552 gen_address_mask(dc
, cpu_addr
);
4553 r_const
= tcg_const_i32(7);
4554 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4555 tcg_temp_free_i32(r_const
);
4556 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4557 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4558 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4561 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4562 case 0x14: /* sta, V9 stwa, store word alternate */
4563 #ifndef TARGET_SPARC64
4566 if (!supervisor(dc
))
4569 save_state(dc
, cpu_cond
);
4570 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4571 dc
->npc
= DYNAMIC_PC
;
4573 case 0x15: /* stba, store byte alternate */
4574 #ifndef TARGET_SPARC64
4577 if (!supervisor(dc
))
4580 save_state(dc
, cpu_cond
);
4581 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4582 dc
->npc
= DYNAMIC_PC
;
4584 case 0x16: /* stha, store halfword alternate */
4585 #ifndef TARGET_SPARC64
4588 if (!supervisor(dc
))
4591 save_state(dc
, cpu_cond
);
4592 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4593 dc
->npc
= DYNAMIC_PC
;
4595 case 0x17: /* stda, store double word alternate */
4596 #ifndef TARGET_SPARC64
4599 if (!supervisor(dc
))
4605 save_state(dc
, cpu_cond
);
4606 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4610 #ifdef TARGET_SPARC64
4611 case 0x0e: /* V9 stx */
4612 gen_address_mask(dc
, cpu_addr
);
4613 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4615 case 0x1e: /* V9 stxa */
4616 save_state(dc
, cpu_cond
);
4617 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4618 dc
->npc
= DYNAMIC_PC
;
4624 } else if (xop
> 0x23 && xop
< 0x28) {
4625 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4627 save_state(dc
, cpu_cond
);
4629 case 0x24: /* stf, store fpreg */
4630 gen_address_mask(dc
, cpu_addr
);
4631 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4632 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4634 case 0x25: /* stfsr, V9 stxfsr */
4635 #ifdef TARGET_SPARC64
4636 gen_address_mask(dc
, cpu_addr
);
4637 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4639 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4641 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4643 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4644 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4648 #ifdef TARGET_SPARC64
4649 /* V9 stqf, store quad fpreg */
4653 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4654 gen_op_load_fpr_QT0(QFPREG(rd
));
4655 r_const
= tcg_const_i32(dc
->mem_idx
);
4656 gen_address_mask(dc
, cpu_addr
);
4657 gen_helper_stqf(cpu_addr
, r_const
);
4658 tcg_temp_free_i32(r_const
);
4661 #else /* !TARGET_SPARC64 */
4662 /* stdfq, store floating point queue */
4663 #if defined(CONFIG_USER_ONLY)
4666 if (!supervisor(dc
))
4668 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4673 case 0x27: /* stdf, store double fpreg */
4677 gen_op_load_fpr_DT0(DFPREG(rd
));
4678 r_const
= tcg_const_i32(dc
->mem_idx
);
4679 gen_address_mask(dc
, cpu_addr
);
4680 gen_helper_stdf(cpu_addr
, r_const
);
4681 tcg_temp_free_i32(r_const
);
4687 } else if (xop
> 0x33 && xop
< 0x3f) {
4688 save_state(dc
, cpu_cond
);
4690 #ifdef TARGET_SPARC64
4691 case 0x34: /* V9 stfa */
4692 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4694 case 0x36: /* V9 stqfa */
4698 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4699 r_const
= tcg_const_i32(7);
4700 gen_helper_check_align(cpu_addr
, r_const
);
4701 tcg_temp_free_i32(r_const
);
4702 gen_op_load_fpr_QT0(QFPREG(rd
));
4703 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4706 case 0x37: /* V9 stdfa */
4707 gen_op_load_fpr_DT0(DFPREG(rd
));
4708 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4710 case 0x3c: /* V9 casa */
4711 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4712 gen_movl_TN_reg(rd
, cpu_val
);
4714 case 0x3e: /* V9 casxa */
4715 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4716 gen_movl_TN_reg(rd
, cpu_val
);
4719 case 0x34: /* stc */
4720 case 0x35: /* stcsr */
4721 case 0x36: /* stdcq */
4722 case 0x37: /* stdc */
4733 /* default case for non jump instructions */
4734 if (dc
->npc
== DYNAMIC_PC
) {
4735 dc
->pc
= DYNAMIC_PC
;
4737 } else if (dc
->npc
== JUMP_PC
) {
4738 /* we can do a static jump */
4739 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4743 dc
->npc
= dc
->npc
+ 4;
4751 save_state(dc
, cpu_cond
);
4752 r_const
= tcg_const_i32(TT_ILL_INSN
);
4753 gen_helper_raise_exception(r_const
);
4754 tcg_temp_free_i32(r_const
);
4762 save_state(dc
, cpu_cond
);
4763 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4764 gen_helper_raise_exception(r_const
);
4765 tcg_temp_free_i32(r_const
);
4769 #if !defined(CONFIG_USER_ONLY)
4774 save_state(dc
, cpu_cond
);
4775 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4776 gen_helper_raise_exception(r_const
);
4777 tcg_temp_free_i32(r_const
);
4783 save_state(dc
, cpu_cond
);
4784 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4787 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4789 save_state(dc
, cpu_cond
);
4790 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4794 #ifndef TARGET_SPARC64
4799 save_state(dc
, cpu_cond
);
4800 r_const
= tcg_const_i32(TT_NCP_INSN
);
4801 gen_helper_raise_exception(r_const
);
4802 tcg_temp_free(r_const
);
4808 tcg_temp_free(cpu_tmp1
);
4809 tcg_temp_free(cpu_tmp2
);
4812 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4813 int spc
, CPUSPARCState
*env
)
4815 target_ulong pc_start
, last_pc
;
4816 uint16_t *gen_opc_end
;
4817 DisasContext dc1
, *dc
= &dc1
;
4823 memset(dc
, 0, sizeof(DisasContext
));
4828 dc
->npc
= (target_ulong
) tb
->cs_base
;
4829 dc
->cc_op
= CC_OP_DYNAMIC
;
4830 dc
->mem_idx
= cpu_mmu_index(env
);
4832 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4833 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4835 dc
->fpu_enabled
= 0;
4836 #ifdef TARGET_SPARC64
4837 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4839 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4840 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4842 cpu_tmp0
= tcg_temp_new();
4843 cpu_tmp32
= tcg_temp_new_i32();
4844 cpu_tmp64
= tcg_temp_new_i64();
4846 cpu_dst
= tcg_temp_local_new();
4849 cpu_val
= tcg_temp_local_new();
4850 cpu_addr
= tcg_temp_local_new();
4853 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4855 max_insns
= CF_COUNT_MASK
;
4858 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4859 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4860 if (bp
->pc
== dc
->pc
) {
4861 if (dc
->pc
!= pc_start
)
4862 save_state(dc
, cpu_cond
);
4871 qemu_log("Search PC...\n");
4872 j
= gen_opc_ptr
- gen_opc_buf
;
4876 gen_opc_instr_start
[lj
++] = 0;
4877 gen_opc_pc
[lj
] = dc
->pc
;
4878 gen_opc_npc
[lj
] = dc
->npc
;
4879 gen_opc_instr_start
[lj
] = 1;
4880 gen_opc_icount
[lj
] = num_insns
;
4883 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4886 disas_sparc_insn(dc
);
4891 /* if the next PC is different, we abort now */
4892 if (dc
->pc
!= (last_pc
+ 4))
4894 /* if we reach a page boundary, we stop generation so that the
4895 PC of a TT_TFAULT exception is always in the right page */
4896 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4898 /* if single step mode, we generate only one instruction and
4899 generate an exception */
4900 if (dc
->singlestep
) {
4903 } while ((gen_opc_ptr
< gen_opc_end
) &&
4904 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4905 num_insns
< max_insns
);
4908 tcg_temp_free(cpu_addr
);
4909 tcg_temp_free(cpu_val
);
4910 tcg_temp_free(cpu_dst
);
4911 tcg_temp_free_i64(cpu_tmp64
);
4912 tcg_temp_free_i32(cpu_tmp32
);
4913 tcg_temp_free(cpu_tmp0
);
4914 if (tb
->cflags
& CF_LAST_IO
)
4917 if (dc
->pc
!= DYNAMIC_PC
&&
4918 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4919 /* static PC and NPC: we can use direct chaining */
4920 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4922 if (dc
->pc
!= DYNAMIC_PC
)
4923 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4924 save_npc(dc
, cpu_cond
);
4928 gen_icount_end(tb
, num_insns
);
4929 *gen_opc_ptr
= INDEX_op_end
;
4931 j
= gen_opc_ptr
- gen_opc_buf
;
4934 gen_opc_instr_start
[lj
++] = 0;
4938 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4939 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4941 tb
->size
= last_pc
+ 4 - pc_start
;
4942 tb
->icount
= num_insns
;
4945 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4946 qemu_log("--------------\n");
4947 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4948 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4954 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4956 gen_intermediate_code_internal(tb
, 0, env
);
4959 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4961 gen_intermediate_code_internal(tb
, 1, env
);
4964 void gen_intermediate_code_init(CPUSPARCState
*env
)
4968 static const char * const gregnames
[8] = {
4969 NULL
, // g0 not used
4978 static const char * const fregnames
[64] = {
4979 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4980 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4981 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4982 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4983 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4984 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4985 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4986 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4989 /* init various static tables */
4993 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4994 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4995 offsetof(CPUState
, regwptr
),
4997 #ifdef TARGET_SPARC64
4998 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5000 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5002 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5004 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5006 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5007 offsetof(CPUState
, tick_cmpr
),
5009 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5010 offsetof(CPUState
, stick_cmpr
),
5012 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5013 offsetof(CPUState
, hstick_cmpr
),
5015 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5017 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5019 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5021 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5022 offsetof(CPUState
, ssr
), "ssr");
5023 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5024 offsetof(CPUState
, version
), "ver");
5025 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5026 offsetof(CPUState
, softint
),
5029 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5032 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5034 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5036 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5037 offsetof(CPUState
, cc_src2
),
5039 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5041 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5043 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5045 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5047 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5049 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5051 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5052 #ifndef CONFIG_USER_ONLY
5053 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5056 for (i
= 1; i
< 8; i
++)
5057 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5058 offsetof(CPUState
, gregs
[i
]),
5060 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5061 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5062 offsetof(CPUState
, fpr
[i
]),
5065 /* register helpers */
5067 #define GEN_HELPER 2
5072 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5073 unsigned long searched_pc
, int pc_pos
, void *puc
)
5076 env
->pc
= gen_opc_pc
[pc_pos
];
5077 npc
= gen_opc_npc
[pc_pos
];
5079 /* dynamic NPC: already stored */
5080 } else if (npc
== 2) {
5081 /* jump PC: use 'cond' and the jump targets of the translation */
5083 env
->npc
= gen_opc_jump_pc
[0];
5085 env
->npc
= gen_opc_jump_pc
[1];
5091 /* flush pending conditional evaluations before exposing cpu state */
5092 if (CC_OP
!= CC_OP_FLAGS
) {
5093 helper_compute_psr();