4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
83 uint32_t cc_op
; /* current CC operation */
84 struct TranslationBlock
*tb
;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x
, int len
)
113 return (x
<< len
) >> len
;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
122 offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
132 offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
138 offsetof(CPU_DoubleU
, l
.upper
));
139 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
140 offsetof(CPU_DoubleU
, l
.lower
));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.upmost
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.upper
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.lower
));
151 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
152 offsetof(CPU_QuadU
, l
.lowest
));
155 static void gen_op_load_fpr_QT1(unsigned int src
)
157 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.upmost
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.upper
));
161 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
162 offsetof(CPU_QuadU
, l
.lower
));
163 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
164 offsetof(CPU_QuadU
, l
.lowest
));
167 static void gen_op_store_QT0_fpr(unsigned int dst
)
169 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
174 offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
176 offsetof(CPU_QuadU
, l
.lowest
));
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
193 #ifdef TARGET_SPARC64
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #define AM_CHECK(dc) (1)
201 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
203 #ifdef TARGET_SPARC64
205 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
209 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
212 tcg_gen_movi_tl(tn
, 0);
214 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
216 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
220 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
225 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
227 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
232 target_ulong pc
, target_ulong npc
)
234 TranslationBlock
*tb
;
237 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
238 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
288 l1
= gen_new_label();
290 r_temp
= tcg_temp_new();
291 tcg_gen_xor_tl(r_temp
, src1
, src2
);
292 tcg_gen_not_tl(r_temp
, r_temp
);
293 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
294 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
295 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
297 r_const
= tcg_const_i32(TT_TOVF
);
298 gen_helper_raise_exception(r_const
);
299 tcg_temp_free_i32(r_const
);
301 tcg_temp_free(r_temp
);
304 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
309 l1
= gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
311 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
313 r_const
= tcg_const_i32(TT_TOVF
);
314 gen_helper_raise_exception(r_const
);
315 tcg_temp_free_i32(r_const
);
319 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
321 tcg_gen_mov_tl(cpu_cc_src
, src1
);
322 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
323 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
324 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
327 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
329 tcg_gen_mov_tl(cpu_cc_src
, src1
);
330 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
331 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
332 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
335 static TCGv_i32
gen_add32_carry32(void)
337 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32
= tcg_temp_new_i32();
342 cc_src2_32
= tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
344 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
346 cc_src1_32
= cpu_cc_dst
;
347 cc_src2_32
= cpu_cc_src
;
350 carry_32
= tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32
);
355 tcg_temp_free_i32(cc_src2_32
);
361 static TCGv_i32
gen_sub32_carry32(void)
363 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32
= tcg_temp_new_i32();
368 cc_src2_32
= tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
370 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
372 cc_src1_32
= cpu_cc_src
;
373 cc_src2_32
= cpu_cc_src2
;
376 carry_32
= tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32
);
381 tcg_temp_free_i32(cc_src2_32
);
387 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
388 TCGv src2
, int update_cc
)
396 /* Carry is known to be zero. Fall back to plain ADD. */
398 gen_op_add_cc(dst
, src1
, src2
);
400 tcg_gen_add_tl(dst
, src1
, src2
);
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low
= tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
415 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
416 tcg_temp_free(dst_low
);
420 carry_32
= gen_add32_carry32();
426 carry_32
= gen_sub32_carry32();
430 /* We need external help to produce the carry. */
431 carry_32
= tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32
);
436 #if TARGET_LONG_BITS == 64
437 carry
= tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry
, carry_32
);
443 tcg_gen_add_tl(dst
, src1
, src2
);
444 tcg_gen_add_tl(dst
, dst
, carry
);
446 tcg_temp_free_i32(carry_32
);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry
);
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
455 tcg_gen_mov_tl(cpu_cc_src
, src1
);
456 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
457 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
458 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
459 dc
->cc_op
= CC_OP_ADDX
;
463 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
465 tcg_gen_mov_tl(cpu_cc_src
, src1
);
466 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
467 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
468 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
471 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
473 tcg_gen_mov_tl(cpu_cc_src
, src1
);
474 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
475 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
476 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
487 l1
= gen_new_label();
489 r_temp
= tcg_temp_new();
490 tcg_gen_xor_tl(r_temp
, src1
, src2
);
491 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
492 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
493 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
495 r_const
= tcg_const_i32(TT_TOVF
);
496 gen_helper_raise_exception(r_const
);
497 tcg_temp_free_i32(r_const
);
499 tcg_temp_free(r_temp
);
502 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
504 tcg_gen_mov_tl(cpu_cc_src
, src1
);
505 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
509 dc
->cc_op
= CC_OP_LOGIC
;
511 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
512 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
513 dc
->cc_op
= CC_OP_SUB
;
515 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
518 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
520 tcg_gen_mov_tl(cpu_cc_src
, src1
);
521 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
522 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
523 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
526 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
527 TCGv src2
, int update_cc
)
535 /* Carry is known to be zero. Fall back to plain SUB. */
537 gen_op_sub_cc(dst
, src1
, src2
);
539 tcg_gen_sub_tl(dst
, src1
, src2
);
546 carry_32
= gen_add32_carry32();
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low
= tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
560 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
561 tcg_temp_free(dst_low
);
565 carry_32
= gen_sub32_carry32();
569 /* We need external help to produce the carry. */
570 carry_32
= tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32
);
575 #if TARGET_LONG_BITS == 64
576 carry
= tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry
, carry_32
);
582 tcg_gen_sub_tl(dst
, src1
, src2
);
583 tcg_gen_sub_tl(dst
, dst
, carry
);
585 tcg_temp_free_i32(carry_32
);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry
);
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
594 tcg_gen_mov_tl(cpu_cc_src
, src1
);
595 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
596 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
597 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
598 dc
->cc_op
= CC_OP_SUBX
;
602 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
604 tcg_gen_mov_tl(cpu_cc_src
, src1
);
605 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
606 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
607 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
610 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
612 tcg_gen_mov_tl(cpu_cc_src
, src1
);
613 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
614 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
615 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
620 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
625 l1
= gen_new_label();
626 r_temp
= tcg_temp_new();
632 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
633 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
636 tcg_gen_movi_tl(cpu_cc_src2
, 0);
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
642 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
643 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
644 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
646 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
649 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
650 gen_mov_reg_V(r_temp
, cpu_psr
);
651 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
652 tcg_temp_free(r_temp
);
654 // T0 = (b1 << 31) | (T0 >> 1);
656 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
657 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
658 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
660 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
662 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
665 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
667 TCGv_i32 r_src1
, r_src2
;
668 TCGv_i64 r_temp
, r_temp2
;
670 r_src1
= tcg_temp_new_i32();
671 r_src2
= tcg_temp_new_i32();
673 tcg_gen_trunc_tl_i32(r_src1
, src1
);
674 tcg_gen_trunc_tl_i32(r_src2
, src2
);
676 r_temp
= tcg_temp_new_i64();
677 r_temp2
= tcg_temp_new_i64();
680 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
681 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
683 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
684 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
687 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
689 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
690 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
691 tcg_temp_free_i64(r_temp
);
692 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
694 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
696 tcg_temp_free_i64(r_temp2
);
698 tcg_temp_free_i32(r_src1
);
699 tcg_temp_free_i32(r_src2
);
702 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
704 /* zero-extend truncated operands before multiplication */
705 gen_op_multiply(dst
, src1
, src2
, 0);
708 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
710 /* sign-extend truncated operands before multiplication */
711 gen_op_multiply(dst
, src1
, src2
, 1);
714 #ifdef TARGET_SPARC64
715 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
720 l1
= gen_new_label();
721 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
722 r_const
= tcg_const_i32(TT_DIV_ZERO
);
723 gen_helper_raise_exception(r_const
);
724 tcg_temp_free_i32(r_const
);
728 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
732 l1
= gen_new_label();
733 l2
= gen_new_label();
734 tcg_gen_mov_tl(cpu_cc_src
, src1
);
735 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
736 gen_trap_ifdivzero_tl(cpu_cc_src2
);
737 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
738 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
739 tcg_gen_movi_i64(dst
, INT64_MIN
);
742 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
748 static inline void gen_op_eval_ba(TCGv dst
)
750 tcg_gen_movi_tl(dst
, 1);
754 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
756 gen_mov_reg_Z(dst
, src
);
760 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
762 gen_mov_reg_N(cpu_tmp0
, src
);
763 gen_mov_reg_V(dst
, src
);
764 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
765 gen_mov_reg_Z(cpu_tmp0
, src
);
766 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
770 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
772 gen_mov_reg_V(cpu_tmp0
, src
);
773 gen_mov_reg_N(dst
, src
);
774 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
778 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
780 gen_mov_reg_Z(cpu_tmp0
, src
);
781 gen_mov_reg_C(dst
, src
);
782 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
786 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
788 gen_mov_reg_C(dst
, src
);
792 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
794 gen_mov_reg_V(dst
, src
);
798 static inline void gen_op_eval_bn(TCGv dst
)
800 tcg_gen_movi_tl(dst
, 0);
804 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
806 gen_mov_reg_N(dst
, src
);
810 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
812 gen_mov_reg_Z(dst
, src
);
813 tcg_gen_xori_tl(dst
, dst
, 0x1);
817 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
819 gen_mov_reg_N(cpu_tmp0
, src
);
820 gen_mov_reg_V(dst
, src
);
821 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
822 gen_mov_reg_Z(cpu_tmp0
, src
);
823 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
824 tcg_gen_xori_tl(dst
, dst
, 0x1);
828 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
830 gen_mov_reg_V(cpu_tmp0
, src
);
831 gen_mov_reg_N(dst
, src
);
832 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
833 tcg_gen_xori_tl(dst
, dst
, 0x1);
837 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
839 gen_mov_reg_Z(cpu_tmp0
, src
);
840 gen_mov_reg_C(dst
, src
);
841 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
842 tcg_gen_xori_tl(dst
, dst
, 0x1);
846 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
848 gen_mov_reg_C(dst
, src
);
849 tcg_gen_xori_tl(dst
, dst
, 0x1);
853 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
855 gen_mov_reg_N(dst
, src
);
856 tcg_gen_xori_tl(dst
, dst
, 0x1);
860 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
862 gen_mov_reg_V(dst
, src
);
863 tcg_gen_xori_tl(dst
, dst
, 0x1);
867 FPSR bit field FCC1 | FCC0:
873 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
874 unsigned int fcc_offset
)
876 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
877 tcg_gen_andi_tl(reg
, reg
, 0x1);
880 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
881 unsigned int fcc_offset
)
883 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
884 tcg_gen_andi_tl(reg
, reg
, 0x1);
888 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
889 unsigned int fcc_offset
)
891 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
892 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
893 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
896 // 1 or 2: FCC0 ^ FCC1
897 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
898 unsigned int fcc_offset
)
900 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
901 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
902 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
906 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
907 unsigned int fcc_offset
)
909 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
913 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
914 unsigned int fcc_offset
)
916 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
917 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
918 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
919 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
923 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
924 unsigned int fcc_offset
)
926 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
930 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
931 unsigned int fcc_offset
)
933 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
934 tcg_gen_xori_tl(dst
, dst
, 0x1);
935 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
936 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
940 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
941 unsigned int fcc_offset
)
943 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
944 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
945 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
949 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
950 unsigned int fcc_offset
)
952 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
953 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
954 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
955 tcg_gen_xori_tl(dst
, dst
, 0x1);
958 // 0 or 3: !(FCC0 ^ FCC1)
959 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
960 unsigned int fcc_offset
)
962 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
963 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
964 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
965 tcg_gen_xori_tl(dst
, dst
, 0x1);
969 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
970 unsigned int fcc_offset
)
972 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
973 tcg_gen_xori_tl(dst
, dst
, 0x1);
976 // !1: !(FCC0 & !FCC1)
977 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
978 unsigned int fcc_offset
)
980 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
981 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
982 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
983 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
984 tcg_gen_xori_tl(dst
, dst
, 0x1);
988 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
989 unsigned int fcc_offset
)
991 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
992 tcg_gen_xori_tl(dst
, dst
, 0x1);
995 // !2: !(!FCC0 & FCC1)
996 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
997 unsigned int fcc_offset
)
999 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1000 tcg_gen_xori_tl(dst
, dst
, 0x1);
1001 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1002 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1003 tcg_gen_xori_tl(dst
, dst
, 0x1);
1006 // !3: !(FCC0 & FCC1)
1007 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1008 unsigned int fcc_offset
)
1010 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1011 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1012 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1013 tcg_gen_xori_tl(dst
, dst
, 0x1);
1016 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1017 target_ulong pc2
, TCGv r_cond
)
1021 l1
= gen_new_label();
1023 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1025 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1028 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1031 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1032 target_ulong pc2
, TCGv r_cond
)
1036 l1
= gen_new_label();
1038 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1040 gen_goto_tb(dc
, 0, pc2
, pc1
);
1043 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1046 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1051 l1
= gen_new_label();
1052 l2
= gen_new_label();
1054 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1056 tcg_gen_movi_tl(cpu_npc
, npc1
);
1060 tcg_gen_movi_tl(cpu_npc
, npc2
);
1064 /* call this function before using the condition register as it may
1065 have been set for a jump */
1066 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1068 if (dc
->npc
== JUMP_PC
) {
1069 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1070 dc
->npc
= DYNAMIC_PC
;
1074 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1076 if (dc
->npc
== JUMP_PC
) {
1077 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1078 dc
->npc
= DYNAMIC_PC
;
1079 } else if (dc
->npc
!= DYNAMIC_PC
) {
1080 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1084 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1086 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1087 /* flush pending conditional evaluations before exposing cpu state */
1088 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1089 dc
->cc_op
= CC_OP_FLAGS
;
1090 gen_helper_compute_psr();
1095 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1097 if (dc
->npc
== JUMP_PC
) {
1098 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1099 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1100 dc
->pc
= DYNAMIC_PC
;
1101 } else if (dc
->npc
== DYNAMIC_PC
) {
1102 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1103 dc
->pc
= DYNAMIC_PC
;
1109 static inline void gen_op_next_insn(void)
1111 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1112 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1115 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1120 #ifdef TARGET_SPARC64
1128 switch (dc
->cc_op
) {
1132 gen_helper_compute_psr();
1133 dc
->cc_op
= CC_OP_FLAGS
;
1138 gen_op_eval_bn(r_dst
);
1141 gen_op_eval_be(r_dst
, r_src
);
1144 gen_op_eval_ble(r_dst
, r_src
);
1147 gen_op_eval_bl(r_dst
, r_src
);
1150 gen_op_eval_bleu(r_dst
, r_src
);
1153 gen_op_eval_bcs(r_dst
, r_src
);
1156 gen_op_eval_bneg(r_dst
, r_src
);
1159 gen_op_eval_bvs(r_dst
, r_src
);
1162 gen_op_eval_ba(r_dst
);
1165 gen_op_eval_bne(r_dst
, r_src
);
1168 gen_op_eval_bg(r_dst
, r_src
);
1171 gen_op_eval_bge(r_dst
, r_src
);
1174 gen_op_eval_bgu(r_dst
, r_src
);
1177 gen_op_eval_bcc(r_dst
, r_src
);
1180 gen_op_eval_bpos(r_dst
, r_src
);
1183 gen_op_eval_bvc(r_dst
, r_src
);
1188 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1190 unsigned int offset
;
1210 gen_op_eval_bn(r_dst
);
1213 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1216 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1219 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1222 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1225 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1228 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1231 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1234 gen_op_eval_ba(r_dst
);
1237 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1240 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1243 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1246 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1249 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1252 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1255 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1260 #ifdef TARGET_SPARC64
1262 static const int gen_tcg_cond_reg
[8] = {
1273 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1277 l1
= gen_new_label();
1278 tcg_gen_movi_tl(r_dst
, 0);
1279 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1280 tcg_gen_movi_tl(r_dst
, 1);
1285 /* XXX: potentially incorrect if dynamic npc */
1286 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1289 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1290 target_ulong target
= dc
->pc
+ offset
;
1293 /* unconditional not taken */
1295 dc
->pc
= dc
->npc
+ 4;
1296 dc
->npc
= dc
->pc
+ 4;
1299 dc
->npc
= dc
->pc
+ 4;
1301 } else if (cond
== 0x8) {
1302 /* unconditional taken */
1305 dc
->npc
= dc
->pc
+ 4;
1309 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1312 flush_cond(dc
, r_cond
);
1313 gen_cond(r_cond
, cc
, cond
, dc
);
1315 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1319 dc
->jump_pc
[0] = target
;
1320 dc
->jump_pc
[1] = dc
->npc
+ 4;
1326 /* XXX: potentially incorrect if dynamic npc */
1327 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1330 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1331 target_ulong target
= dc
->pc
+ offset
;
1334 /* unconditional not taken */
1336 dc
->pc
= dc
->npc
+ 4;
1337 dc
->npc
= dc
->pc
+ 4;
1340 dc
->npc
= dc
->pc
+ 4;
1342 } else if (cond
== 0x8) {
1343 /* unconditional taken */
1346 dc
->npc
= dc
->pc
+ 4;
1350 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1353 flush_cond(dc
, r_cond
);
1354 gen_fcond(r_cond
, cc
, cond
);
1356 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1360 dc
->jump_pc
[0] = target
;
1361 dc
->jump_pc
[1] = dc
->npc
+ 4;
1367 #ifdef TARGET_SPARC64
1368 /* XXX: potentially incorrect if dynamic npc */
1369 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1370 TCGv r_cond
, TCGv r_reg
)
1372 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1373 target_ulong target
= dc
->pc
+ offset
;
1375 flush_cond(dc
, r_cond
);
1376 gen_cond_reg(r_cond
, cond
, r_reg
);
1378 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1382 dc
->jump_pc
[0] = target
;
1383 dc
->jump_pc
[1] = dc
->npc
+ 4;
1388 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1392 gen_helper_fcmps(r_rs1
, r_rs2
);
1395 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1398 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1401 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1406 static inline void gen_op_fcmpd(int fccno
)
1413 gen_helper_fcmpd_fcc1();
1416 gen_helper_fcmpd_fcc2();
1419 gen_helper_fcmpd_fcc3();
1424 static inline void gen_op_fcmpq(int fccno
)
1431 gen_helper_fcmpq_fcc1();
1434 gen_helper_fcmpq_fcc2();
1437 gen_helper_fcmpq_fcc3();
1442 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1446 gen_helper_fcmpes(r_rs1
, r_rs2
);
1449 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1452 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1455 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1460 static inline void gen_op_fcmped(int fccno
)
1464 gen_helper_fcmped();
1467 gen_helper_fcmped_fcc1();
1470 gen_helper_fcmped_fcc2();
1473 gen_helper_fcmped_fcc3();
1478 static inline void gen_op_fcmpeq(int fccno
)
1482 gen_helper_fcmpeq();
1485 gen_helper_fcmpeq_fcc1();
1488 gen_helper_fcmpeq_fcc2();
1491 gen_helper_fcmpeq_fcc3();
1498 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1500 gen_helper_fcmps(r_rs1
, r_rs2
);
1503 static inline void gen_op_fcmpd(int fccno
)
1508 static inline void gen_op_fcmpq(int fccno
)
1513 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1515 gen_helper_fcmpes(r_rs1
, r_rs2
);
1518 static inline void gen_op_fcmped(int fccno
)
1520 gen_helper_fcmped();
1523 static inline void gen_op_fcmpeq(int fccno
)
1525 gen_helper_fcmpeq();
1529 static inline void gen_op_fpexception_im(int fsr_flags
)
1533 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1534 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1535 r_const
= tcg_const_i32(TT_FP_EXCP
);
1536 gen_helper_raise_exception(r_const
);
1537 tcg_temp_free_i32(r_const
);
1540 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1542 #if !defined(CONFIG_USER_ONLY)
1543 if (!dc
->fpu_enabled
) {
1546 save_state(dc
, r_cond
);
1547 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1548 gen_helper_raise_exception(r_const
);
1549 tcg_temp_free_i32(r_const
);
1557 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1559 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1562 static inline void gen_clear_float_exceptions(void)
1564 gen_helper_clear_float_exceptions();
1568 #ifdef TARGET_SPARC64
1569 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1575 r_asi
= tcg_temp_new_i32();
1576 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1578 asi
= GET_FIELD(insn
, 19, 26);
1579 r_asi
= tcg_const_i32(asi
);
1584 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1587 TCGv_i32 r_asi
, r_size
, r_sign
;
1589 r_asi
= gen_get_asi(insn
, addr
);
1590 r_size
= tcg_const_i32(size
);
1591 r_sign
= tcg_const_i32(sign
);
1592 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1593 tcg_temp_free_i32(r_sign
);
1594 tcg_temp_free_i32(r_size
);
1595 tcg_temp_free_i32(r_asi
);
1598 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1600 TCGv_i32 r_asi
, r_size
;
1602 r_asi
= gen_get_asi(insn
, addr
);
1603 r_size
= tcg_const_i32(size
);
1604 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1605 tcg_temp_free_i32(r_size
);
1606 tcg_temp_free_i32(r_asi
);
1609 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1611 TCGv_i32 r_asi
, r_size
, r_rd
;
1613 r_asi
= gen_get_asi(insn
, addr
);
1614 r_size
= tcg_const_i32(size
);
1615 r_rd
= tcg_const_i32(rd
);
1616 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1617 tcg_temp_free_i32(r_rd
);
1618 tcg_temp_free_i32(r_size
);
1619 tcg_temp_free_i32(r_asi
);
1622 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1624 TCGv_i32 r_asi
, r_size
, r_rd
;
1626 r_asi
= gen_get_asi(insn
, addr
);
1627 r_size
= tcg_const_i32(size
);
1628 r_rd
= tcg_const_i32(rd
);
1629 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1630 tcg_temp_free_i32(r_rd
);
1631 tcg_temp_free_i32(r_size
);
1632 tcg_temp_free_i32(r_asi
);
1635 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1637 TCGv_i32 r_asi
, r_size
, r_sign
;
1639 r_asi
= gen_get_asi(insn
, addr
);
1640 r_size
= tcg_const_i32(4);
1641 r_sign
= tcg_const_i32(0);
1642 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1643 tcg_temp_free_i32(r_sign
);
1644 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1645 tcg_temp_free_i32(r_size
);
1646 tcg_temp_free_i32(r_asi
);
1647 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1650 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1652 TCGv_i32 r_asi
, r_rd
;
1654 r_asi
= gen_get_asi(insn
, addr
);
1655 r_rd
= tcg_const_i32(rd
);
1656 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1657 tcg_temp_free_i32(r_rd
);
1658 tcg_temp_free_i32(r_asi
);
1661 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1663 TCGv_i32 r_asi
, r_size
;
1665 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1666 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1667 r_asi
= gen_get_asi(insn
, addr
);
1668 r_size
= tcg_const_i32(8);
1669 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1670 tcg_temp_free_i32(r_size
);
1671 tcg_temp_free_i32(r_asi
);
1674 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1680 r_val1
= tcg_temp_new();
1681 gen_movl_reg_TN(rd
, r_val1
);
1682 r_asi
= gen_get_asi(insn
, addr
);
1683 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1684 tcg_temp_free_i32(r_asi
);
1685 tcg_temp_free(r_val1
);
1688 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1693 gen_movl_reg_TN(rd
, cpu_tmp64
);
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1696 tcg_temp_free_i32(r_asi
);
1699 #elif !defined(CONFIG_USER_ONLY)
1701 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1704 TCGv_i32 r_asi
, r_size
, r_sign
;
1706 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1707 r_size
= tcg_const_i32(size
);
1708 r_sign
= tcg_const_i32(sign
);
1709 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1710 tcg_temp_free(r_sign
);
1711 tcg_temp_free(r_size
);
1712 tcg_temp_free(r_asi
);
1713 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1716 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1718 TCGv_i32 r_asi
, r_size
;
1720 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1721 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1722 r_size
= tcg_const_i32(size
);
1723 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1724 tcg_temp_free(r_size
);
1725 tcg_temp_free(r_asi
);
1728 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1730 TCGv_i32 r_asi
, r_size
, r_sign
;
1733 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1734 r_size
= tcg_const_i32(4);
1735 r_sign
= tcg_const_i32(0);
1736 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1737 tcg_temp_free(r_sign
);
1738 r_val
= tcg_temp_new_i64();
1739 tcg_gen_extu_tl_i64(r_val
, dst
);
1740 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1741 tcg_temp_free_i64(r_val
);
1742 tcg_temp_free(r_size
);
1743 tcg_temp_free(r_asi
);
1744 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1747 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1749 TCGv_i32 r_asi
, r_size
, r_sign
;
1751 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1752 r_size
= tcg_const_i32(8);
1753 r_sign
= tcg_const_i32(0);
1754 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1755 tcg_temp_free(r_sign
);
1756 tcg_temp_free(r_size
);
1757 tcg_temp_free(r_asi
);
1758 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1759 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1760 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1761 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1762 gen_movl_TN_reg(rd
, hi
);
1765 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1767 TCGv_i32 r_asi
, r_size
;
1769 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1770 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1771 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1772 r_size
= tcg_const_i32(8);
1773 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1774 tcg_temp_free(r_size
);
1775 tcg_temp_free(r_asi
);
1779 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1783 TCGv_i32 r_asi
, r_size
;
1785 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1787 r_val
= tcg_const_i64(0xffULL
);
1788 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1789 r_size
= tcg_const_i32(1);
1790 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1791 tcg_temp_free_i32(r_size
);
1792 tcg_temp_free_i32(r_asi
);
1793 tcg_temp_free_i64(r_val
);
1797 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1802 rs1
= GET_FIELD(insn
, 13, 17);
1804 tcg_gen_movi_tl(def
, 0);
1805 } else if (rs1
< 8) {
1806 r_rs1
= cpu_gregs
[rs1
];
1808 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1813 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1817 if (IS_IMM
) { /* immediate */
1818 target_long simm
= GET_FIELDs(insn
, 19, 31);
1819 tcg_gen_movi_tl(def
, simm
);
1820 } else { /* register */
1821 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1823 tcg_gen_movi_tl(def
, 0);
1824 } else if (rs2
< 8) {
1825 r_rs2
= cpu_gregs
[rs2
];
1827 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1833 #ifdef TARGET_SPARC64
1834 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1836 TCGv_i32 r_tl
= tcg_temp_new_i32();
1838 /* load env->tl into r_tl */
1839 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1841 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1844 /* calculate offset to current trap state from env->ts, reuse r_tl */
1845 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1846 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1848 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1850 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1851 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1852 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1853 tcg_temp_free_ptr(r_tl_tmp
);
1856 tcg_temp_free_i32(r_tl
);
1860 #define CHECK_IU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1863 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1864 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1867 /* before an instruction, dc->pc must be static */
1868 static void disas_sparc_insn(DisasContext
* dc
)
1870 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1871 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1874 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1875 tcg_gen_debug_insn_start(dc
->pc
);
1876 insn
= ldl_code(dc
->pc
);
1877 opc
= GET_FIELD(insn
, 0, 1);
1879 rd
= GET_FIELD(insn
, 2, 6);
1881 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1882 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1885 case 0: /* branches/sethi */
1887 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1890 #ifdef TARGET_SPARC64
1891 case 0x1: /* V9 BPcc */
1895 target
= GET_FIELD_SP(insn
, 0, 18);
1896 target
= sign_extend(target
, 18);
1898 cc
= GET_FIELD_SP(insn
, 20, 21);
1900 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1902 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1907 case 0x3: /* V9 BPr */
1909 target
= GET_FIELD_SP(insn
, 0, 13) |
1910 (GET_FIELD_SP(insn
, 20, 21) << 14);
1911 target
= sign_extend(target
, 16);
1913 cpu_src1
= get_src1(insn
, cpu_src1
);
1914 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1917 case 0x5: /* V9 FBPcc */
1919 int cc
= GET_FIELD_SP(insn
, 20, 21);
1920 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1922 target
= GET_FIELD_SP(insn
, 0, 18);
1923 target
= sign_extend(target
, 19);
1925 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1929 case 0x7: /* CBN+x */
1934 case 0x2: /* BN+x */
1936 target
= GET_FIELD(insn
, 10, 31);
1937 target
= sign_extend(target
, 22);
1939 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1946 target
= GET_FIELD(insn
, 10, 31);
1947 target
= sign_extend(target
, 22);
1949 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1952 case 0x4: /* SETHI */
1954 uint32_t value
= GET_FIELD(insn
, 10, 31);
1957 r_const
= tcg_const_tl(value
<< 10);
1958 gen_movl_TN_reg(rd
, r_const
);
1959 tcg_temp_free(r_const
);
1962 case 0x0: /* UNIMPL */
1971 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1974 r_const
= tcg_const_tl(dc
->pc
);
1975 gen_movl_TN_reg(15, r_const
);
1976 tcg_temp_free(r_const
);
1978 gen_mov_pc_npc(dc
, cpu_cond
);
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1985 if (xop
== 0x3a) { /* generate trap */
1988 cpu_src1
= get_src1(insn
, cpu_src1
);
1990 rs2
= GET_FIELD(insn
, 25, 31);
1991 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1993 rs2
= GET_FIELD(insn
, 27, 31);
1995 gen_movl_reg_TN(rs2
, cpu_src2
);
1996 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1998 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2001 cond
= GET_FIELD(insn
, 3, 6);
2002 if (cond
== 0x8) { /* Trap Always */
2003 save_state(dc
, cpu_cond
);
2004 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2006 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2008 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2009 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2010 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2013 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2015 gen_helper_shutdown();
2018 gen_helper_raise_exception(cpu_tmp32
);
2020 } else if (cond
!= 0) {
2021 TCGv r_cond
= tcg_temp_new();
2023 #ifdef TARGET_SPARC64
2025 int cc
= GET_FIELD_SP(insn
, 11, 12);
2027 save_state(dc
, cpu_cond
);
2029 gen_cond(r_cond
, 0, cond
, dc
);
2031 gen_cond(r_cond
, 1, cond
, dc
);
2035 save_state(dc
, cpu_cond
);
2036 gen_cond(r_cond
, 0, cond
, dc
);
2038 l1
= gen_new_label();
2039 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2041 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2043 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2045 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2046 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2047 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2048 gen_helper_raise_exception(cpu_tmp32
);
2051 tcg_temp_free(r_cond
);
2057 } else if (xop
== 0x28) {
2058 rs1
= GET_FIELD(insn
, 13, 17);
2061 #ifndef TARGET_SPARC64
2062 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2063 manual, rdy on the microSPARC
2065 case 0x0f: /* stbar in the SPARCv8 manual,
2066 rdy on the microSPARC II */
2067 case 0x10 ... 0x1f: /* implementation-dependent in the
2068 SPARCv8 manual, rdy on the
2071 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2074 /* Read Asr17 for a Leon3 monoprocessor */
2075 r_const
= tcg_const_tl((1 << 8)
2076 | (dc
->def
->nwindows
- 1));
2077 gen_movl_TN_reg(rd
, r_const
);
2078 tcg_temp_free(r_const
);
2082 gen_movl_TN_reg(rd
, cpu_y
);
2084 #ifdef TARGET_SPARC64
2085 case 0x2: /* V9 rdccr */
2086 gen_helper_compute_psr();
2087 gen_helper_rdccr(cpu_dst
);
2088 gen_movl_TN_reg(rd
, cpu_dst
);
2090 case 0x3: /* V9 rdasi */
2091 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2092 gen_movl_TN_reg(rd
, cpu_dst
);
2094 case 0x4: /* V9 rdtick */
2098 r_tickptr
= tcg_temp_new_ptr();
2099 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2100 offsetof(CPUState
, tick
));
2101 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2102 tcg_temp_free_ptr(r_tickptr
);
2103 gen_movl_TN_reg(rd
, cpu_dst
);
2106 case 0x5: /* V9 rdpc */
2110 r_const
= tcg_const_tl(dc
->pc
);
2111 gen_movl_TN_reg(rd
, r_const
);
2112 tcg_temp_free(r_const
);
2115 case 0x6: /* V9 rdfprs */
2116 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2117 gen_movl_TN_reg(rd
, cpu_dst
);
2119 case 0xf: /* V9 membar */
2120 break; /* no effect */
2121 case 0x13: /* Graphics Status */
2122 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2124 gen_movl_TN_reg(rd
, cpu_gsr
);
2126 case 0x16: /* Softint */
2127 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2128 gen_movl_TN_reg(rd
, cpu_dst
);
2130 case 0x17: /* Tick compare */
2131 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2133 case 0x18: /* System tick */
2137 r_tickptr
= tcg_temp_new_ptr();
2138 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2139 offsetof(CPUState
, stick
));
2140 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2141 tcg_temp_free_ptr(r_tickptr
);
2142 gen_movl_TN_reg(rd
, cpu_dst
);
2145 case 0x19: /* System tick compare */
2146 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2148 case 0x10: /* Performance Control */
2149 case 0x11: /* Performance Instrumentation Counter */
2150 case 0x12: /* Dispatch Control */
2151 case 0x14: /* Softint set, WO */
2152 case 0x15: /* Softint clear, WO */
2157 #if !defined(CONFIG_USER_ONLY)
2158 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2159 #ifndef TARGET_SPARC64
2160 if (!supervisor(dc
))
2162 gen_helper_compute_psr();
2163 dc
->cc_op
= CC_OP_FLAGS
;
2164 gen_helper_rdpsr(cpu_dst
);
2166 CHECK_IU_FEATURE(dc
, HYPV
);
2167 if (!hypervisor(dc
))
2169 rs1
= GET_FIELD(insn
, 13, 17);
2172 // gen_op_rdhpstate();
2175 // gen_op_rdhtstate();
2178 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2181 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2184 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2186 case 31: // hstick_cmpr
2187 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2193 gen_movl_TN_reg(rd
, cpu_dst
);
2195 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2196 if (!supervisor(dc
))
2198 #ifdef TARGET_SPARC64
2199 rs1
= GET_FIELD(insn
, 13, 17);
2205 r_tsptr
= tcg_temp_new_ptr();
2206 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2207 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2208 offsetof(trap_state
, tpc
));
2209 tcg_temp_free_ptr(r_tsptr
);
2216 r_tsptr
= tcg_temp_new_ptr();
2217 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2218 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2219 offsetof(trap_state
, tnpc
));
2220 tcg_temp_free_ptr(r_tsptr
);
2227 r_tsptr
= tcg_temp_new_ptr();
2228 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2229 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2230 offsetof(trap_state
, tstate
));
2231 tcg_temp_free_ptr(r_tsptr
);
2238 r_tsptr
= tcg_temp_new_ptr();
2239 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2240 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2241 offsetof(trap_state
, tt
));
2242 tcg_temp_free_ptr(r_tsptr
);
2243 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2250 r_tickptr
= tcg_temp_new_ptr();
2251 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2252 offsetof(CPUState
, tick
));
2253 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2254 gen_movl_TN_reg(rd
, cpu_tmp0
);
2255 tcg_temp_free_ptr(r_tickptr
);
2259 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2262 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2263 offsetof(CPUSPARCState
, pstate
));
2264 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2267 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2268 offsetof(CPUSPARCState
, tl
));
2269 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2272 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2273 offsetof(CPUSPARCState
, psrpil
));
2274 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2277 gen_helper_rdcwp(cpu_tmp0
);
2280 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2281 offsetof(CPUSPARCState
, cansave
));
2282 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2284 case 11: // canrestore
2285 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2286 offsetof(CPUSPARCState
, canrestore
));
2287 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2289 case 12: // cleanwin
2290 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2291 offsetof(CPUSPARCState
, cleanwin
));
2292 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2294 case 13: // otherwin
2295 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2296 offsetof(CPUSPARCState
, otherwin
));
2297 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2300 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2301 offsetof(CPUSPARCState
, wstate
));
2302 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2304 case 16: // UA2005 gl
2305 CHECK_IU_FEATURE(dc
, GL
);
2306 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2307 offsetof(CPUSPARCState
, gl
));
2308 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2310 case 26: // UA2005 strand status
2311 CHECK_IU_FEATURE(dc
, HYPV
);
2312 if (!hypervisor(dc
))
2314 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2317 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2324 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2326 gen_movl_TN_reg(rd
, cpu_tmp0
);
2328 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2329 #ifdef TARGET_SPARC64
2330 save_state(dc
, cpu_cond
);
2331 gen_helper_flushw();
2333 if (!supervisor(dc
))
2335 gen_movl_TN_reg(rd
, cpu_tbr
);
2339 } else if (xop
== 0x34) { /* FPU Operations */
2340 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2342 gen_op_clear_ieee_excp_and_FTT();
2343 rs1
= GET_FIELD(insn
, 13, 17);
2344 rs2
= GET_FIELD(insn
, 27, 31);
2345 xop
= GET_FIELD(insn
, 18, 26);
2346 save_state(dc
, cpu_cond
);
2348 case 0x1: /* fmovs */
2349 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2351 case 0x5: /* fnegs */
2352 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2354 case 0x9: /* fabss */
2355 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2357 case 0x29: /* fsqrts */
2358 CHECK_FPU_FEATURE(dc
, FSQRT
);
2359 gen_clear_float_exceptions();
2360 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2361 gen_helper_check_ieee_exceptions();
2362 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2364 case 0x2a: /* fsqrtd */
2365 CHECK_FPU_FEATURE(dc
, FSQRT
);
2366 gen_op_load_fpr_DT1(DFPREG(rs2
));
2367 gen_clear_float_exceptions();
2368 gen_helper_fsqrtd();
2369 gen_helper_check_ieee_exceptions();
2370 gen_op_store_DT0_fpr(DFPREG(rd
));
2372 case 0x2b: /* fsqrtq */
2373 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2374 gen_op_load_fpr_QT1(QFPREG(rs2
));
2375 gen_clear_float_exceptions();
2376 gen_helper_fsqrtq();
2377 gen_helper_check_ieee_exceptions();
2378 gen_op_store_QT0_fpr(QFPREG(rd
));
2380 case 0x41: /* fadds */
2381 gen_clear_float_exceptions();
2382 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2383 gen_helper_check_ieee_exceptions();
2384 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2386 case 0x42: /* faddd */
2387 gen_op_load_fpr_DT0(DFPREG(rs1
));
2388 gen_op_load_fpr_DT1(DFPREG(rs2
));
2389 gen_clear_float_exceptions();
2391 gen_helper_check_ieee_exceptions();
2392 gen_op_store_DT0_fpr(DFPREG(rd
));
2394 case 0x43: /* faddq */
2395 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2396 gen_op_load_fpr_QT0(QFPREG(rs1
));
2397 gen_op_load_fpr_QT1(QFPREG(rs2
));
2398 gen_clear_float_exceptions();
2400 gen_helper_check_ieee_exceptions();
2401 gen_op_store_QT0_fpr(QFPREG(rd
));
2403 case 0x45: /* fsubs */
2404 gen_clear_float_exceptions();
2405 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2406 gen_helper_check_ieee_exceptions();
2407 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2409 case 0x46: /* fsubd */
2410 gen_op_load_fpr_DT0(DFPREG(rs1
));
2411 gen_op_load_fpr_DT1(DFPREG(rs2
));
2412 gen_clear_float_exceptions();
2414 gen_helper_check_ieee_exceptions();
2415 gen_op_store_DT0_fpr(DFPREG(rd
));
2417 case 0x47: /* fsubq */
2418 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2419 gen_op_load_fpr_QT0(QFPREG(rs1
));
2420 gen_op_load_fpr_QT1(QFPREG(rs2
));
2421 gen_clear_float_exceptions();
2423 gen_helper_check_ieee_exceptions();
2424 gen_op_store_QT0_fpr(QFPREG(rd
));
2426 case 0x49: /* fmuls */
2427 CHECK_FPU_FEATURE(dc
, FMUL
);
2428 gen_clear_float_exceptions();
2429 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2430 gen_helper_check_ieee_exceptions();
2431 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2433 case 0x4a: /* fmuld */
2434 CHECK_FPU_FEATURE(dc
, FMUL
);
2435 gen_op_load_fpr_DT0(DFPREG(rs1
));
2436 gen_op_load_fpr_DT1(DFPREG(rs2
));
2437 gen_clear_float_exceptions();
2439 gen_helper_check_ieee_exceptions();
2440 gen_op_store_DT0_fpr(DFPREG(rd
));
2442 case 0x4b: /* fmulq */
2443 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2444 CHECK_FPU_FEATURE(dc
, FMUL
);
2445 gen_op_load_fpr_QT0(QFPREG(rs1
));
2446 gen_op_load_fpr_QT1(QFPREG(rs2
));
2447 gen_clear_float_exceptions();
2449 gen_helper_check_ieee_exceptions();
2450 gen_op_store_QT0_fpr(QFPREG(rd
));
2452 case 0x4d: /* fdivs */
2453 gen_clear_float_exceptions();
2454 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2455 gen_helper_check_ieee_exceptions();
2456 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2458 case 0x4e: /* fdivd */
2459 gen_op_load_fpr_DT0(DFPREG(rs1
));
2460 gen_op_load_fpr_DT1(DFPREG(rs2
));
2461 gen_clear_float_exceptions();
2463 gen_helper_check_ieee_exceptions();
2464 gen_op_store_DT0_fpr(DFPREG(rd
));
2466 case 0x4f: /* fdivq */
2467 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2468 gen_op_load_fpr_QT0(QFPREG(rs1
));
2469 gen_op_load_fpr_QT1(QFPREG(rs2
));
2470 gen_clear_float_exceptions();
2472 gen_helper_check_ieee_exceptions();
2473 gen_op_store_QT0_fpr(QFPREG(rd
));
2475 case 0x69: /* fsmuld */
2476 CHECK_FPU_FEATURE(dc
, FSMULD
);
2477 gen_clear_float_exceptions();
2478 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2479 gen_helper_check_ieee_exceptions();
2480 gen_op_store_DT0_fpr(DFPREG(rd
));
2482 case 0x6e: /* fdmulq */
2483 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2484 gen_op_load_fpr_DT0(DFPREG(rs1
));
2485 gen_op_load_fpr_DT1(DFPREG(rs2
));
2486 gen_clear_float_exceptions();
2487 gen_helper_fdmulq();
2488 gen_helper_check_ieee_exceptions();
2489 gen_op_store_QT0_fpr(QFPREG(rd
));
2491 case 0xc4: /* fitos */
2492 gen_clear_float_exceptions();
2493 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2494 gen_helper_check_ieee_exceptions();
2495 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2497 case 0xc6: /* fdtos */
2498 gen_op_load_fpr_DT1(DFPREG(rs2
));
2499 gen_clear_float_exceptions();
2500 gen_helper_fdtos(cpu_tmp32
);
2501 gen_helper_check_ieee_exceptions();
2502 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2504 case 0xc7: /* fqtos */
2505 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2506 gen_op_load_fpr_QT1(QFPREG(rs2
));
2507 gen_clear_float_exceptions();
2508 gen_helper_fqtos(cpu_tmp32
);
2509 gen_helper_check_ieee_exceptions();
2510 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2512 case 0xc8: /* fitod */
2513 gen_helper_fitod(cpu_fpr
[rs2
]);
2514 gen_op_store_DT0_fpr(DFPREG(rd
));
2516 case 0xc9: /* fstod */
2517 gen_helper_fstod(cpu_fpr
[rs2
]);
2518 gen_op_store_DT0_fpr(DFPREG(rd
));
2520 case 0xcb: /* fqtod */
2521 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2522 gen_op_load_fpr_QT1(QFPREG(rs2
));
2523 gen_clear_float_exceptions();
2525 gen_helper_check_ieee_exceptions();
2526 gen_op_store_DT0_fpr(DFPREG(rd
));
2528 case 0xcc: /* fitoq */
2529 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2530 gen_helper_fitoq(cpu_fpr
[rs2
]);
2531 gen_op_store_QT0_fpr(QFPREG(rd
));
2533 case 0xcd: /* fstoq */
2534 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2535 gen_helper_fstoq(cpu_fpr
[rs2
]);
2536 gen_op_store_QT0_fpr(QFPREG(rd
));
2538 case 0xce: /* fdtoq */
2539 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2540 gen_op_load_fpr_DT1(DFPREG(rs2
));
2542 gen_op_store_QT0_fpr(QFPREG(rd
));
2544 case 0xd1: /* fstoi */
2545 gen_clear_float_exceptions();
2546 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2547 gen_helper_check_ieee_exceptions();
2548 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2550 case 0xd2: /* fdtoi */
2551 gen_op_load_fpr_DT1(DFPREG(rs2
));
2552 gen_clear_float_exceptions();
2553 gen_helper_fdtoi(cpu_tmp32
);
2554 gen_helper_check_ieee_exceptions();
2555 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2557 case 0xd3: /* fqtoi */
2558 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2559 gen_op_load_fpr_QT1(QFPREG(rs2
));
2560 gen_clear_float_exceptions();
2561 gen_helper_fqtoi(cpu_tmp32
);
2562 gen_helper_check_ieee_exceptions();
2563 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2565 #ifdef TARGET_SPARC64
2566 case 0x2: /* V9 fmovd */
2567 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2568 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2569 cpu_fpr
[DFPREG(rs2
) + 1]);
2571 case 0x3: /* V9 fmovq */
2572 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2573 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2574 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2575 cpu_fpr
[QFPREG(rs2
) + 1]);
2576 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2577 cpu_fpr
[QFPREG(rs2
) + 2]);
2578 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2579 cpu_fpr
[QFPREG(rs2
) + 3]);
2581 case 0x6: /* V9 fnegd */
2582 gen_op_load_fpr_DT1(DFPREG(rs2
));
2584 gen_op_store_DT0_fpr(DFPREG(rd
));
2586 case 0x7: /* V9 fnegq */
2587 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2588 gen_op_load_fpr_QT1(QFPREG(rs2
));
2590 gen_op_store_QT0_fpr(QFPREG(rd
));
2592 case 0xa: /* V9 fabsd */
2593 gen_op_load_fpr_DT1(DFPREG(rs2
));
2595 gen_op_store_DT0_fpr(DFPREG(rd
));
2597 case 0xb: /* V9 fabsq */
2598 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2599 gen_op_load_fpr_QT1(QFPREG(rs2
));
2601 gen_op_store_QT0_fpr(QFPREG(rd
));
2603 case 0x81: /* V9 fstox */
2604 gen_clear_float_exceptions();
2605 gen_helper_fstox(cpu_fpr
[rs2
]);
2606 gen_helper_check_ieee_exceptions();
2607 gen_op_store_DT0_fpr(DFPREG(rd
));
2609 case 0x82: /* V9 fdtox */
2610 gen_op_load_fpr_DT1(DFPREG(rs2
));
2611 gen_clear_float_exceptions();
2613 gen_helper_check_ieee_exceptions();
2614 gen_op_store_DT0_fpr(DFPREG(rd
));
2616 case 0x83: /* V9 fqtox */
2617 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2618 gen_op_load_fpr_QT1(QFPREG(rs2
));
2619 gen_clear_float_exceptions();
2621 gen_helper_check_ieee_exceptions();
2622 gen_op_store_DT0_fpr(DFPREG(rd
));
2624 case 0x84: /* V9 fxtos */
2625 gen_op_load_fpr_DT1(DFPREG(rs2
));
2626 gen_clear_float_exceptions();
2627 gen_helper_fxtos(cpu_tmp32
);
2628 gen_helper_check_ieee_exceptions();
2629 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2631 case 0x88: /* V9 fxtod */
2632 gen_op_load_fpr_DT1(DFPREG(rs2
));
2633 gen_clear_float_exceptions();
2635 gen_helper_check_ieee_exceptions();
2636 gen_op_store_DT0_fpr(DFPREG(rd
));
2638 case 0x8c: /* V9 fxtoq */
2639 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2640 gen_op_load_fpr_DT1(DFPREG(rs2
));
2641 gen_clear_float_exceptions();
2643 gen_helper_check_ieee_exceptions();
2644 gen_op_store_QT0_fpr(QFPREG(rd
));
2650 } else if (xop
== 0x35) { /* FPU Operations */
2651 #ifdef TARGET_SPARC64
2654 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2656 gen_op_clear_ieee_excp_and_FTT();
2657 rs1
= GET_FIELD(insn
, 13, 17);
2658 rs2
= GET_FIELD(insn
, 27, 31);
2659 xop
= GET_FIELD(insn
, 18, 26);
2660 save_state(dc
, cpu_cond
);
2661 #ifdef TARGET_SPARC64
2662 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2665 l1
= gen_new_label();
2666 cond
= GET_FIELD_SP(insn
, 14, 17);
2667 cpu_src1
= get_src1(insn
, cpu_src1
);
2668 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2670 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2673 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2676 l1
= gen_new_label();
2677 cond
= GET_FIELD_SP(insn
, 14, 17);
2678 cpu_src1
= get_src1(insn
, cpu_src1
);
2679 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2681 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2682 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2685 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2688 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2689 l1
= gen_new_label();
2690 cond
= GET_FIELD_SP(insn
, 14, 17);
2691 cpu_src1
= get_src1(insn
, cpu_src1
);
2692 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2694 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2695 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2696 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2697 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2703 #ifdef TARGET_SPARC64
2704 #define FMOVSCC(fcc) \
2709 l1 = gen_new_label(); \
2710 r_cond = tcg_temp_new(); \
2711 cond = GET_FIELD_SP(insn, 14, 17); \
2712 gen_fcond(r_cond, fcc, cond); \
2713 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2715 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2716 gen_set_label(l1); \
2717 tcg_temp_free(r_cond); \
2719 #define FMOVDCC(fcc) \
2724 l1 = gen_new_label(); \
2725 r_cond = tcg_temp_new(); \
2726 cond = GET_FIELD_SP(insn, 14, 17); \
2727 gen_fcond(r_cond, fcc, cond); \
2728 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2730 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2731 cpu_fpr[DFPREG(rs2)]); \
2732 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2733 cpu_fpr[DFPREG(rs2) + 1]); \
2734 gen_set_label(l1); \
2735 tcg_temp_free(r_cond); \
2737 #define FMOVQCC(fcc) \
2742 l1 = gen_new_label(); \
2743 r_cond = tcg_temp_new(); \
2744 cond = GET_FIELD_SP(insn, 14, 17); \
2745 gen_fcond(r_cond, fcc, cond); \
2746 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2748 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2749 cpu_fpr[QFPREG(rs2)]); \
2750 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2751 cpu_fpr[QFPREG(rs2) + 1]); \
2752 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2753 cpu_fpr[QFPREG(rs2) + 2]); \
2754 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2755 cpu_fpr[QFPREG(rs2) + 3]); \
2756 gen_set_label(l1); \
2757 tcg_temp_free(r_cond); \
2759 case 0x001: /* V9 fmovscc %fcc0 */
2762 case 0x002: /* V9 fmovdcc %fcc0 */
2765 case 0x003: /* V9 fmovqcc %fcc0 */
2766 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2769 case 0x041: /* V9 fmovscc %fcc1 */
2772 case 0x042: /* V9 fmovdcc %fcc1 */
2775 case 0x043: /* V9 fmovqcc %fcc1 */
2776 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2779 case 0x081: /* V9 fmovscc %fcc2 */
2782 case 0x082: /* V9 fmovdcc %fcc2 */
2785 case 0x083: /* V9 fmovqcc %fcc2 */
2786 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2789 case 0x0c1: /* V9 fmovscc %fcc3 */
2792 case 0x0c2: /* V9 fmovdcc %fcc3 */
2795 case 0x0c3: /* V9 fmovqcc %fcc3 */
2796 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2802 #define FMOVSCC(icc) \
2807 l1 = gen_new_label(); \
2808 r_cond = tcg_temp_new(); \
2809 cond = GET_FIELD_SP(insn, 14, 17); \
2810 gen_cond(r_cond, icc, cond, dc); \
2811 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2813 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2814 gen_set_label(l1); \
2815 tcg_temp_free(r_cond); \
2817 #define FMOVDCC(icc) \
2822 l1 = gen_new_label(); \
2823 r_cond = tcg_temp_new(); \
2824 cond = GET_FIELD_SP(insn, 14, 17); \
2825 gen_cond(r_cond, icc, cond, dc); \
2826 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2828 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2829 cpu_fpr[DFPREG(rs2)]); \
2830 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2831 cpu_fpr[DFPREG(rs2) + 1]); \
2832 gen_set_label(l1); \
2833 tcg_temp_free(r_cond); \
2835 #define FMOVQCC(icc) \
2840 l1 = gen_new_label(); \
2841 r_cond = tcg_temp_new(); \
2842 cond = GET_FIELD_SP(insn, 14, 17); \
2843 gen_cond(r_cond, icc, cond, dc); \
2844 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2846 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2847 cpu_fpr[QFPREG(rs2)]); \
2848 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2849 cpu_fpr[QFPREG(rs2) + 1]); \
2850 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2851 cpu_fpr[QFPREG(rs2) + 2]); \
2852 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2853 cpu_fpr[QFPREG(rs2) + 3]); \
2854 gen_set_label(l1); \
2855 tcg_temp_free(r_cond); \
2858 case 0x101: /* V9 fmovscc %icc */
2861 case 0x102: /* V9 fmovdcc %icc */
2863 case 0x103: /* V9 fmovqcc %icc */
2864 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2867 case 0x181: /* V9 fmovscc %xcc */
2870 case 0x182: /* V9 fmovdcc %xcc */
2873 case 0x183: /* V9 fmovqcc %xcc */
2874 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2881 case 0x51: /* fcmps, V9 %fcc */
2882 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2884 case 0x52: /* fcmpd, V9 %fcc */
2885 gen_op_load_fpr_DT0(DFPREG(rs1
));
2886 gen_op_load_fpr_DT1(DFPREG(rs2
));
2887 gen_op_fcmpd(rd
& 3);
2889 case 0x53: /* fcmpq, V9 %fcc */
2890 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2891 gen_op_load_fpr_QT0(QFPREG(rs1
));
2892 gen_op_load_fpr_QT1(QFPREG(rs2
));
2893 gen_op_fcmpq(rd
& 3);
2895 case 0x55: /* fcmpes, V9 %fcc */
2896 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2898 case 0x56: /* fcmped, V9 %fcc */
2899 gen_op_load_fpr_DT0(DFPREG(rs1
));
2900 gen_op_load_fpr_DT1(DFPREG(rs2
));
2901 gen_op_fcmped(rd
& 3);
2903 case 0x57: /* fcmpeq, V9 %fcc */
2904 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2905 gen_op_load_fpr_QT0(QFPREG(rs1
));
2906 gen_op_load_fpr_QT1(QFPREG(rs2
));
2907 gen_op_fcmpeq(rd
& 3);
2912 } else if (xop
== 0x2) {
2915 rs1
= GET_FIELD(insn
, 13, 17);
2917 // or %g0, x, y -> mov T0, x; mov y, T0
2918 if (IS_IMM
) { /* immediate */
2921 simm
= GET_FIELDs(insn
, 19, 31);
2922 r_const
= tcg_const_tl(simm
);
2923 gen_movl_TN_reg(rd
, r_const
);
2924 tcg_temp_free(r_const
);
2925 } else { /* register */
2926 rs2
= GET_FIELD(insn
, 27, 31);
2927 gen_movl_reg_TN(rs2
, cpu_dst
);
2928 gen_movl_TN_reg(rd
, cpu_dst
);
2931 cpu_src1
= get_src1(insn
, cpu_src1
);
2932 if (IS_IMM
) { /* immediate */
2933 simm
= GET_FIELDs(insn
, 19, 31);
2934 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2935 gen_movl_TN_reg(rd
, cpu_dst
);
2936 } else { /* register */
2937 // or x, %g0, y -> mov T1, x; mov y, T1
2938 rs2
= GET_FIELD(insn
, 27, 31);
2940 gen_movl_reg_TN(rs2
, cpu_src2
);
2941 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2942 gen_movl_TN_reg(rd
, cpu_dst
);
2944 gen_movl_TN_reg(rd
, cpu_src1
);
2947 #ifdef TARGET_SPARC64
2948 } else if (xop
== 0x25) { /* sll, V9 sllx */
2949 cpu_src1
= get_src1(insn
, cpu_src1
);
2950 if (IS_IMM
) { /* immediate */
2951 simm
= GET_FIELDs(insn
, 20, 31);
2952 if (insn
& (1 << 12)) {
2953 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2955 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2957 } else { /* register */
2958 rs2
= GET_FIELD(insn
, 27, 31);
2959 gen_movl_reg_TN(rs2
, cpu_src2
);
2960 if (insn
& (1 << 12)) {
2961 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2963 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2965 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2967 gen_movl_TN_reg(rd
, cpu_dst
);
2968 } else if (xop
== 0x26) { /* srl, V9 srlx */
2969 cpu_src1
= get_src1(insn
, cpu_src1
);
2970 if (IS_IMM
) { /* immediate */
2971 simm
= GET_FIELDs(insn
, 20, 31);
2972 if (insn
& (1 << 12)) {
2973 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2975 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2976 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2978 } else { /* register */
2979 rs2
= GET_FIELD(insn
, 27, 31);
2980 gen_movl_reg_TN(rs2
, cpu_src2
);
2981 if (insn
& (1 << 12)) {
2982 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2983 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2985 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2986 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2987 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2990 gen_movl_TN_reg(rd
, cpu_dst
);
2991 } else if (xop
== 0x27) { /* sra, V9 srax */
2992 cpu_src1
= get_src1(insn
, cpu_src1
);
2993 if (IS_IMM
) { /* immediate */
2994 simm
= GET_FIELDs(insn
, 20, 31);
2995 if (insn
& (1 << 12)) {
2996 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2998 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2999 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3000 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3002 } else { /* register */
3003 rs2
= GET_FIELD(insn
, 27, 31);
3004 gen_movl_reg_TN(rs2
, cpu_src2
);
3005 if (insn
& (1 << 12)) {
3006 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3007 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3009 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3010 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3011 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3012 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3015 gen_movl_TN_reg(rd
, cpu_dst
);
3017 } else if (xop
< 0x36) {
3019 cpu_src1
= get_src1(insn
, cpu_src1
);
3020 cpu_src2
= get_src2(insn
, cpu_src2
);
3021 switch (xop
& ~0x10) {
3024 simm
= GET_FIELDs(insn
, 19, 31);
3026 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3027 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3028 dc
->cc_op
= CC_OP_ADD
;
3030 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3034 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3035 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3036 dc
->cc_op
= CC_OP_ADD
;
3038 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3044 simm
= GET_FIELDs(insn
, 19, 31);
3045 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3047 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3050 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3051 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3052 dc
->cc_op
= CC_OP_LOGIC
;
3057 simm
= GET_FIELDs(insn
, 19, 31);
3058 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3060 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3063 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3064 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3065 dc
->cc_op
= CC_OP_LOGIC
;
3070 simm
= GET_FIELDs(insn
, 19, 31);
3071 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3073 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3076 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3077 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3078 dc
->cc_op
= CC_OP_LOGIC
;
3083 simm
= GET_FIELDs(insn
, 19, 31);
3085 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3087 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3091 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3092 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3093 dc
->cc_op
= CC_OP_SUB
;
3095 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3099 case 0x5: /* andn */
3101 simm
= GET_FIELDs(insn
, 19, 31);
3102 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3104 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3107 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3108 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3109 dc
->cc_op
= CC_OP_LOGIC
;
3114 simm
= GET_FIELDs(insn
, 19, 31);
3115 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3117 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3120 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3121 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3122 dc
->cc_op
= CC_OP_LOGIC
;
3125 case 0x7: /* xorn */
3127 simm
= GET_FIELDs(insn
, 19, 31);
3128 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3130 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3131 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3134 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3135 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3136 dc
->cc_op
= CC_OP_LOGIC
;
3139 case 0x8: /* addx, V9 addc */
3140 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3143 #ifdef TARGET_SPARC64
3144 case 0x9: /* V9 mulx */
3146 simm
= GET_FIELDs(insn
, 19, 31);
3147 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3149 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3153 case 0xa: /* umul */
3154 CHECK_IU_FEATURE(dc
, MUL
);
3155 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3157 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3158 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3159 dc
->cc_op
= CC_OP_LOGIC
;
3162 case 0xb: /* smul */
3163 CHECK_IU_FEATURE(dc
, MUL
);
3164 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3166 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3167 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3168 dc
->cc_op
= CC_OP_LOGIC
;
3171 case 0xc: /* subx, V9 subc */
3172 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3175 #ifdef TARGET_SPARC64
3176 case 0xd: /* V9 udivx */
3177 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3178 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3179 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3180 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3183 case 0xe: /* udiv */
3184 CHECK_IU_FEATURE(dc
, DIV
);
3186 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3187 dc
->cc_op
= CC_OP_DIV
;
3189 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3192 case 0xf: /* sdiv */
3193 CHECK_IU_FEATURE(dc
, DIV
);
3195 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3196 dc
->cc_op
= CC_OP_DIV
;
3198 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3204 gen_movl_TN_reg(rd
, cpu_dst
);
3206 cpu_src1
= get_src1(insn
, cpu_src1
);
3207 cpu_src2
= get_src2(insn
, cpu_src2
);
3209 case 0x20: /* taddcc */
3210 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3211 gen_movl_TN_reg(rd
, cpu_dst
);
3212 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3213 dc
->cc_op
= CC_OP_TADD
;
3215 case 0x21: /* tsubcc */
3216 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3217 gen_movl_TN_reg(rd
, cpu_dst
);
3218 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3219 dc
->cc_op
= CC_OP_TSUB
;
3221 case 0x22: /* taddcctv */
3222 save_state(dc
, cpu_cond
);
3223 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3224 gen_movl_TN_reg(rd
, cpu_dst
);
3225 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3226 dc
->cc_op
= CC_OP_TADDTV
;
3228 case 0x23: /* tsubcctv */
3229 save_state(dc
, cpu_cond
);
3230 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3231 gen_movl_TN_reg(rd
, cpu_dst
);
3232 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3233 dc
->cc_op
= CC_OP_TSUBTV
;
3235 case 0x24: /* mulscc */
3236 gen_helper_compute_psr();
3237 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3238 gen_movl_TN_reg(rd
, cpu_dst
);
3239 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3240 dc
->cc_op
= CC_OP_ADD
;
3242 #ifndef TARGET_SPARC64
3243 case 0x25: /* sll */
3244 if (IS_IMM
) { /* immediate */
3245 simm
= GET_FIELDs(insn
, 20, 31);
3246 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3247 } else { /* register */
3248 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3249 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3251 gen_movl_TN_reg(rd
, cpu_dst
);
3253 case 0x26: /* srl */
3254 if (IS_IMM
) { /* immediate */
3255 simm
= GET_FIELDs(insn
, 20, 31);
3256 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3257 } else { /* register */
3258 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3259 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3261 gen_movl_TN_reg(rd
, cpu_dst
);
3263 case 0x27: /* sra */
3264 if (IS_IMM
) { /* immediate */
3265 simm
= GET_FIELDs(insn
, 20, 31);
3266 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3267 } else { /* register */
3268 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3269 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3271 gen_movl_TN_reg(rd
, cpu_dst
);
3278 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3279 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3281 #ifndef TARGET_SPARC64
3282 case 0x01 ... 0x0f: /* undefined in the
3286 case 0x10 ... 0x1f: /* implementation-dependent
3292 case 0x2: /* V9 wrccr */
3293 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3294 gen_helper_wrccr(cpu_dst
);
3295 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3296 dc
->cc_op
= CC_OP_FLAGS
;
3298 case 0x3: /* V9 wrasi */
3299 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3300 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3301 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3303 case 0x6: /* V9 wrfprs */
3304 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3305 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3306 save_state(dc
, cpu_cond
);
3311 case 0xf: /* V9 sir, nop if user */
3312 #if !defined(CONFIG_USER_ONLY)
3313 if (supervisor(dc
)) {
3318 case 0x13: /* Graphics Status */
3319 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3321 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3323 case 0x14: /* Softint set */
3324 if (!supervisor(dc
))
3326 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3327 gen_helper_set_softint(cpu_tmp64
);
3329 case 0x15: /* Softint clear */
3330 if (!supervisor(dc
))
3332 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3333 gen_helper_clear_softint(cpu_tmp64
);
3335 case 0x16: /* Softint write */
3336 if (!supervisor(dc
))
3338 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3339 gen_helper_write_softint(cpu_tmp64
);
3341 case 0x17: /* Tick compare */
3342 #if !defined(CONFIG_USER_ONLY)
3343 if (!supervisor(dc
))
3349 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3351 r_tickptr
= tcg_temp_new_ptr();
3352 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3353 offsetof(CPUState
, tick
));
3354 gen_helper_tick_set_limit(r_tickptr
,
3356 tcg_temp_free_ptr(r_tickptr
);
3359 case 0x18: /* System tick */
3360 #if !defined(CONFIG_USER_ONLY)
3361 if (!supervisor(dc
))
3367 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3369 r_tickptr
= tcg_temp_new_ptr();
3370 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3371 offsetof(CPUState
, stick
));
3372 gen_helper_tick_set_count(r_tickptr
,
3374 tcg_temp_free_ptr(r_tickptr
);
3377 case 0x19: /* System tick compare */
3378 #if !defined(CONFIG_USER_ONLY)
3379 if (!supervisor(dc
))
3385 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3387 r_tickptr
= tcg_temp_new_ptr();
3388 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3389 offsetof(CPUState
, stick
));
3390 gen_helper_tick_set_limit(r_tickptr
,
3392 tcg_temp_free_ptr(r_tickptr
);
3396 case 0x10: /* Performance Control */
3397 case 0x11: /* Performance Instrumentation
3399 case 0x12: /* Dispatch Control */
3406 #if !defined(CONFIG_USER_ONLY)
3407 case 0x31: /* wrpsr, V9 saved, restored */
3409 if (!supervisor(dc
))
3411 #ifdef TARGET_SPARC64
3417 gen_helper_restored();
3419 case 2: /* UA2005 allclean */
3420 case 3: /* UA2005 otherw */
3421 case 4: /* UA2005 normalw */
3422 case 5: /* UA2005 invalw */
3428 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3429 gen_helper_wrpsr(cpu_dst
);
3430 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3431 dc
->cc_op
= CC_OP_FLAGS
;
3432 save_state(dc
, cpu_cond
);
3439 case 0x32: /* wrwim, V9 wrpr */
3441 if (!supervisor(dc
))
3443 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3444 #ifdef TARGET_SPARC64
3450 r_tsptr
= tcg_temp_new_ptr();
3451 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3452 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3453 offsetof(trap_state
, tpc
));
3454 tcg_temp_free_ptr(r_tsptr
);
3461 r_tsptr
= tcg_temp_new_ptr();
3462 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3463 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3464 offsetof(trap_state
, tnpc
));
3465 tcg_temp_free_ptr(r_tsptr
);
3472 r_tsptr
= tcg_temp_new_ptr();
3473 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3474 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3475 offsetof(trap_state
,
3477 tcg_temp_free_ptr(r_tsptr
);
3484 r_tsptr
= tcg_temp_new_ptr();
3485 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3486 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3487 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3488 offsetof(trap_state
, tt
));
3489 tcg_temp_free_ptr(r_tsptr
);
3496 r_tickptr
= tcg_temp_new_ptr();
3497 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3498 offsetof(CPUState
, tick
));
3499 gen_helper_tick_set_count(r_tickptr
,
3501 tcg_temp_free_ptr(r_tickptr
);
3505 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3508 save_state(dc
, cpu_cond
);
3509 gen_helper_wrpstate(cpu_tmp0
);
3510 dc
->npc
= DYNAMIC_PC
;
3513 save_state(dc
, cpu_cond
);
3514 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3515 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3516 offsetof(CPUSPARCState
, tl
));
3517 dc
->npc
= DYNAMIC_PC
;
3520 gen_helper_wrpil(cpu_tmp0
);
3523 gen_helper_wrcwp(cpu_tmp0
);
3526 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3527 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3528 offsetof(CPUSPARCState
,
3531 case 11: // canrestore
3532 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3533 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3534 offsetof(CPUSPARCState
,
3537 case 12: // cleanwin
3538 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3539 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3540 offsetof(CPUSPARCState
,
3543 case 13: // otherwin
3544 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3545 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3546 offsetof(CPUSPARCState
,
3550 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3551 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3552 offsetof(CPUSPARCState
,
3555 case 16: // UA2005 gl
3556 CHECK_IU_FEATURE(dc
, GL
);
3557 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3558 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3559 offsetof(CPUSPARCState
, gl
));
3561 case 26: // UA2005 strand status
3562 CHECK_IU_FEATURE(dc
, HYPV
);
3563 if (!hypervisor(dc
))
3565 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3571 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3572 if (dc
->def
->nwindows
!= 32)
3573 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3574 (1 << dc
->def
->nwindows
) - 1);
3575 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3579 case 0x33: /* wrtbr, UA2005 wrhpr */
3581 #ifndef TARGET_SPARC64
3582 if (!supervisor(dc
))
3584 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3586 CHECK_IU_FEATURE(dc
, HYPV
);
3587 if (!hypervisor(dc
))
3589 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3592 // XXX gen_op_wrhpstate();
3593 save_state(dc
, cpu_cond
);
3599 // XXX gen_op_wrhtstate();
3602 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3605 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3607 case 31: // hstick_cmpr
3611 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3612 r_tickptr
= tcg_temp_new_ptr();
3613 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3614 offsetof(CPUState
, hstick
));
3615 gen_helper_tick_set_limit(r_tickptr
,
3617 tcg_temp_free_ptr(r_tickptr
);
3620 case 6: // hver readonly
3628 #ifdef TARGET_SPARC64
3629 case 0x2c: /* V9 movcc */
3631 int cc
= GET_FIELD_SP(insn
, 11, 12);
3632 int cond
= GET_FIELD_SP(insn
, 14, 17);
3636 r_cond
= tcg_temp_new();
3637 if (insn
& (1 << 18)) {
3639 gen_cond(r_cond
, 0, cond
, dc
);
3641 gen_cond(r_cond
, 1, cond
, dc
);
3645 gen_fcond(r_cond
, cc
, cond
);
3648 l1
= gen_new_label();
3650 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3651 if (IS_IMM
) { /* immediate */
3654 simm
= GET_FIELD_SPs(insn
, 0, 10);
3655 r_const
= tcg_const_tl(simm
);
3656 gen_movl_TN_reg(rd
, r_const
);
3657 tcg_temp_free(r_const
);
3659 rs2
= GET_FIELD_SP(insn
, 0, 4);
3660 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3661 gen_movl_TN_reg(rd
, cpu_tmp0
);
3664 tcg_temp_free(r_cond
);
3667 case 0x2d: /* V9 sdivx */
3668 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3669 gen_movl_TN_reg(rd
, cpu_dst
);
3671 case 0x2e: /* V9 popc */
3673 cpu_src2
= get_src2(insn
, cpu_src2
);
3674 gen_helper_popc(cpu_dst
, cpu_src2
);
3675 gen_movl_TN_reg(rd
, cpu_dst
);
3677 case 0x2f: /* V9 movr */
3679 int cond
= GET_FIELD_SP(insn
, 10, 12);
3682 cpu_src1
= get_src1(insn
, cpu_src1
);
3684 l1
= gen_new_label();
3686 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3688 if (IS_IMM
) { /* immediate */
3691 simm
= GET_FIELD_SPs(insn
, 0, 9);
3692 r_const
= tcg_const_tl(simm
);
3693 gen_movl_TN_reg(rd
, r_const
);
3694 tcg_temp_free(r_const
);
3696 rs2
= GET_FIELD_SP(insn
, 0, 4);
3697 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3698 gen_movl_TN_reg(rd
, cpu_tmp0
);
3708 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3709 #ifdef TARGET_SPARC64
3710 int opf
= GET_FIELD_SP(insn
, 5, 13);
3711 rs1
= GET_FIELD(insn
, 13, 17);
3712 rs2
= GET_FIELD(insn
, 27, 31);
3713 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3717 case 0x000: /* VIS I edge8cc */
3718 case 0x001: /* VIS II edge8n */
3719 case 0x002: /* VIS I edge8lcc */
3720 case 0x003: /* VIS II edge8ln */
3721 case 0x004: /* VIS I edge16cc */
3722 case 0x005: /* VIS II edge16n */
3723 case 0x006: /* VIS I edge16lcc */
3724 case 0x007: /* VIS II edge16ln */
3725 case 0x008: /* VIS I edge32cc */
3726 case 0x009: /* VIS II edge32n */
3727 case 0x00a: /* VIS I edge32lcc */
3728 case 0x00b: /* VIS II edge32ln */
3731 case 0x010: /* VIS I array8 */
3732 CHECK_FPU_FEATURE(dc
, VIS1
);
3733 cpu_src1
= get_src1(insn
, cpu_src1
);
3734 gen_movl_reg_TN(rs2
, cpu_src2
);
3735 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3736 gen_movl_TN_reg(rd
, cpu_dst
);
3738 case 0x012: /* VIS I array16 */
3739 CHECK_FPU_FEATURE(dc
, VIS1
);
3740 cpu_src1
= get_src1(insn
, cpu_src1
);
3741 gen_movl_reg_TN(rs2
, cpu_src2
);
3742 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3743 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3744 gen_movl_TN_reg(rd
, cpu_dst
);
3746 case 0x014: /* VIS I array32 */
3747 CHECK_FPU_FEATURE(dc
, VIS1
);
3748 cpu_src1
= get_src1(insn
, cpu_src1
);
3749 gen_movl_reg_TN(rs2
, cpu_src2
);
3750 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3751 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3752 gen_movl_TN_reg(rd
, cpu_dst
);
3754 case 0x018: /* VIS I alignaddr */
3755 CHECK_FPU_FEATURE(dc
, VIS1
);
3756 cpu_src1
= get_src1(insn
, cpu_src1
);
3757 gen_movl_reg_TN(rs2
, cpu_src2
);
3758 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3759 gen_movl_TN_reg(rd
, cpu_dst
);
3761 case 0x019: /* VIS II bmask */
3762 case 0x01a: /* VIS I alignaddrl */
3765 case 0x020: /* VIS I fcmple16 */
3766 CHECK_FPU_FEATURE(dc
, VIS1
);
3767 gen_op_load_fpr_DT0(DFPREG(rs1
));
3768 gen_op_load_fpr_DT1(DFPREG(rs2
));
3769 gen_helper_fcmple16();
3770 gen_op_store_DT0_fpr(DFPREG(rd
));
3772 case 0x022: /* VIS I fcmpne16 */
3773 CHECK_FPU_FEATURE(dc
, VIS1
);
3774 gen_op_load_fpr_DT0(DFPREG(rs1
));
3775 gen_op_load_fpr_DT1(DFPREG(rs2
));
3776 gen_helper_fcmpne16();
3777 gen_op_store_DT0_fpr(DFPREG(rd
));
3779 case 0x024: /* VIS I fcmple32 */
3780 CHECK_FPU_FEATURE(dc
, VIS1
);
3781 gen_op_load_fpr_DT0(DFPREG(rs1
));
3782 gen_op_load_fpr_DT1(DFPREG(rs2
));
3783 gen_helper_fcmple32();
3784 gen_op_store_DT0_fpr(DFPREG(rd
));
3786 case 0x026: /* VIS I fcmpne32 */
3787 CHECK_FPU_FEATURE(dc
, VIS1
);
3788 gen_op_load_fpr_DT0(DFPREG(rs1
));
3789 gen_op_load_fpr_DT1(DFPREG(rs2
));
3790 gen_helper_fcmpne32();
3791 gen_op_store_DT0_fpr(DFPREG(rd
));
3793 case 0x028: /* VIS I fcmpgt16 */
3794 CHECK_FPU_FEATURE(dc
, VIS1
);
3795 gen_op_load_fpr_DT0(DFPREG(rs1
));
3796 gen_op_load_fpr_DT1(DFPREG(rs2
));
3797 gen_helper_fcmpgt16();
3798 gen_op_store_DT0_fpr(DFPREG(rd
));
3800 case 0x02a: /* VIS I fcmpeq16 */
3801 CHECK_FPU_FEATURE(dc
, VIS1
);
3802 gen_op_load_fpr_DT0(DFPREG(rs1
));
3803 gen_op_load_fpr_DT1(DFPREG(rs2
));
3804 gen_helper_fcmpeq16();
3805 gen_op_store_DT0_fpr(DFPREG(rd
));
3807 case 0x02c: /* VIS I fcmpgt32 */
3808 CHECK_FPU_FEATURE(dc
, VIS1
);
3809 gen_op_load_fpr_DT0(DFPREG(rs1
));
3810 gen_op_load_fpr_DT1(DFPREG(rs2
));
3811 gen_helper_fcmpgt32();
3812 gen_op_store_DT0_fpr(DFPREG(rd
));
3814 case 0x02e: /* VIS I fcmpeq32 */
3815 CHECK_FPU_FEATURE(dc
, VIS1
);
3816 gen_op_load_fpr_DT0(DFPREG(rs1
));
3817 gen_op_load_fpr_DT1(DFPREG(rs2
));
3818 gen_helper_fcmpeq32();
3819 gen_op_store_DT0_fpr(DFPREG(rd
));
3821 case 0x031: /* VIS I fmul8x16 */
3822 CHECK_FPU_FEATURE(dc
, VIS1
);
3823 gen_op_load_fpr_DT0(DFPREG(rs1
));
3824 gen_op_load_fpr_DT1(DFPREG(rs2
));
3825 gen_helper_fmul8x16();
3826 gen_op_store_DT0_fpr(DFPREG(rd
));
3828 case 0x033: /* VIS I fmul8x16au */
3829 CHECK_FPU_FEATURE(dc
, VIS1
);
3830 gen_op_load_fpr_DT0(DFPREG(rs1
));
3831 gen_op_load_fpr_DT1(DFPREG(rs2
));
3832 gen_helper_fmul8x16au();
3833 gen_op_store_DT0_fpr(DFPREG(rd
));
3835 case 0x035: /* VIS I fmul8x16al */
3836 CHECK_FPU_FEATURE(dc
, VIS1
);
3837 gen_op_load_fpr_DT0(DFPREG(rs1
));
3838 gen_op_load_fpr_DT1(DFPREG(rs2
));
3839 gen_helper_fmul8x16al();
3840 gen_op_store_DT0_fpr(DFPREG(rd
));
3842 case 0x036: /* VIS I fmul8sux16 */
3843 CHECK_FPU_FEATURE(dc
, VIS1
);
3844 gen_op_load_fpr_DT0(DFPREG(rs1
));
3845 gen_op_load_fpr_DT1(DFPREG(rs2
));
3846 gen_helper_fmul8sux16();
3847 gen_op_store_DT0_fpr(DFPREG(rd
));
3849 case 0x037: /* VIS I fmul8ulx16 */
3850 CHECK_FPU_FEATURE(dc
, VIS1
);
3851 gen_op_load_fpr_DT0(DFPREG(rs1
));
3852 gen_op_load_fpr_DT1(DFPREG(rs2
));
3853 gen_helper_fmul8ulx16();
3854 gen_op_store_DT0_fpr(DFPREG(rd
));
3856 case 0x038: /* VIS I fmuld8sux16 */
3857 CHECK_FPU_FEATURE(dc
, VIS1
);
3858 gen_op_load_fpr_DT0(DFPREG(rs1
));
3859 gen_op_load_fpr_DT1(DFPREG(rs2
));
3860 gen_helper_fmuld8sux16();
3861 gen_op_store_DT0_fpr(DFPREG(rd
));
3863 case 0x039: /* VIS I fmuld8ulx16 */
3864 CHECK_FPU_FEATURE(dc
, VIS1
);
3865 gen_op_load_fpr_DT0(DFPREG(rs1
));
3866 gen_op_load_fpr_DT1(DFPREG(rs2
));
3867 gen_helper_fmuld8ulx16();
3868 gen_op_store_DT0_fpr(DFPREG(rd
));
3870 case 0x03a: /* VIS I fpack32 */
3871 case 0x03b: /* VIS I fpack16 */
3872 case 0x03d: /* VIS I fpackfix */
3873 case 0x03e: /* VIS I pdist */
3876 case 0x048: /* VIS I faligndata */
3877 CHECK_FPU_FEATURE(dc
, VIS1
);
3878 gen_op_load_fpr_DT0(DFPREG(rs1
));
3879 gen_op_load_fpr_DT1(DFPREG(rs2
));
3880 gen_helper_faligndata();
3881 gen_op_store_DT0_fpr(DFPREG(rd
));
3883 case 0x04b: /* VIS I fpmerge */
3884 CHECK_FPU_FEATURE(dc
, VIS1
);
3885 gen_op_load_fpr_DT0(DFPREG(rs1
));
3886 gen_op_load_fpr_DT1(DFPREG(rs2
));
3887 gen_helper_fpmerge();
3888 gen_op_store_DT0_fpr(DFPREG(rd
));
3890 case 0x04c: /* VIS II bshuffle */
3893 case 0x04d: /* VIS I fexpand */
3894 CHECK_FPU_FEATURE(dc
, VIS1
);
3895 gen_op_load_fpr_DT0(DFPREG(rs1
));
3896 gen_op_load_fpr_DT1(DFPREG(rs2
));
3897 gen_helper_fexpand();
3898 gen_op_store_DT0_fpr(DFPREG(rd
));
3900 case 0x050: /* VIS I fpadd16 */
3901 CHECK_FPU_FEATURE(dc
, VIS1
);
3902 gen_op_load_fpr_DT0(DFPREG(rs1
));
3903 gen_op_load_fpr_DT1(DFPREG(rs2
));
3904 gen_helper_fpadd16();
3905 gen_op_store_DT0_fpr(DFPREG(rd
));
3907 case 0x051: /* VIS I fpadd16s */
3908 CHECK_FPU_FEATURE(dc
, VIS1
);
3909 gen_helper_fpadd16s(cpu_fpr
[rd
],
3910 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3912 case 0x052: /* VIS I fpadd32 */
3913 CHECK_FPU_FEATURE(dc
, VIS1
);
3914 gen_op_load_fpr_DT0(DFPREG(rs1
));
3915 gen_op_load_fpr_DT1(DFPREG(rs2
));
3916 gen_helper_fpadd32();
3917 gen_op_store_DT0_fpr(DFPREG(rd
));
3919 case 0x053: /* VIS I fpadd32s */
3920 CHECK_FPU_FEATURE(dc
, VIS1
);
3921 gen_helper_fpadd32s(cpu_fpr
[rd
],
3922 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3924 case 0x054: /* VIS I fpsub16 */
3925 CHECK_FPU_FEATURE(dc
, VIS1
);
3926 gen_op_load_fpr_DT0(DFPREG(rs1
));
3927 gen_op_load_fpr_DT1(DFPREG(rs2
));
3928 gen_helper_fpsub16();
3929 gen_op_store_DT0_fpr(DFPREG(rd
));
3931 case 0x055: /* VIS I fpsub16s */
3932 CHECK_FPU_FEATURE(dc
, VIS1
);
3933 gen_helper_fpsub16s(cpu_fpr
[rd
],
3934 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3936 case 0x056: /* VIS I fpsub32 */
3937 CHECK_FPU_FEATURE(dc
, VIS1
);
3938 gen_op_load_fpr_DT0(DFPREG(rs1
));
3939 gen_op_load_fpr_DT1(DFPREG(rs2
));
3940 gen_helper_fpsub32();
3941 gen_op_store_DT0_fpr(DFPREG(rd
));
3943 case 0x057: /* VIS I fpsub32s */
3944 CHECK_FPU_FEATURE(dc
, VIS1
);
3945 gen_helper_fpsub32s(cpu_fpr
[rd
],
3946 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3948 case 0x060: /* VIS I fzero */
3949 CHECK_FPU_FEATURE(dc
, VIS1
);
3950 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3951 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3953 case 0x061: /* VIS I fzeros */
3954 CHECK_FPU_FEATURE(dc
, VIS1
);
3955 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3957 case 0x062: /* VIS I fnor */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3960 cpu_fpr
[DFPREG(rs2
)]);
3961 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3962 cpu_fpr
[DFPREG(rs2
) + 1]);
3964 case 0x063: /* VIS I fnors */
3965 CHECK_FPU_FEATURE(dc
, VIS1
);
3966 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3968 case 0x064: /* VIS I fandnot2 */
3969 CHECK_FPU_FEATURE(dc
, VIS1
);
3970 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3971 cpu_fpr
[DFPREG(rs2
)]);
3972 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3973 cpu_fpr
[DFPREG(rs1
) + 1],
3974 cpu_fpr
[DFPREG(rs2
) + 1]);
3976 case 0x065: /* VIS I fandnot2s */
3977 CHECK_FPU_FEATURE(dc
, VIS1
);
3978 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3980 case 0x066: /* VIS I fnot2 */
3981 CHECK_FPU_FEATURE(dc
, VIS1
);
3982 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3983 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3984 cpu_fpr
[DFPREG(rs2
) + 1]);
3986 case 0x067: /* VIS I fnot2s */
3987 CHECK_FPU_FEATURE(dc
, VIS1
);
3988 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3990 case 0x068: /* VIS I fandnot1 */
3991 CHECK_FPU_FEATURE(dc
, VIS1
);
3992 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3993 cpu_fpr
[DFPREG(rs1
)]);
3994 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3995 cpu_fpr
[DFPREG(rs2
) + 1],
3996 cpu_fpr
[DFPREG(rs1
) + 1]);
3998 case 0x069: /* VIS I fandnot1s */
3999 CHECK_FPU_FEATURE(dc
, VIS1
);
4000 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4002 case 0x06a: /* VIS I fnot1 */
4003 CHECK_FPU_FEATURE(dc
, VIS1
);
4004 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4005 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4006 cpu_fpr
[DFPREG(rs1
) + 1]);
4008 case 0x06b: /* VIS I fnot1s */
4009 CHECK_FPU_FEATURE(dc
, VIS1
);
4010 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4012 case 0x06c: /* VIS I fxor */
4013 CHECK_FPU_FEATURE(dc
, VIS1
);
4014 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4015 cpu_fpr
[DFPREG(rs2
)]);
4016 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4017 cpu_fpr
[DFPREG(rs1
) + 1],
4018 cpu_fpr
[DFPREG(rs2
) + 1]);
4020 case 0x06d: /* VIS I fxors */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4024 case 0x06e: /* VIS I fnand */
4025 CHECK_FPU_FEATURE(dc
, VIS1
);
4026 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4027 cpu_fpr
[DFPREG(rs2
)]);
4028 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4029 cpu_fpr
[DFPREG(rs2
) + 1]);
4031 case 0x06f: /* VIS I fnands */
4032 CHECK_FPU_FEATURE(dc
, VIS1
);
4033 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4035 case 0x070: /* VIS I fand */
4036 CHECK_FPU_FEATURE(dc
, VIS1
);
4037 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4038 cpu_fpr
[DFPREG(rs2
)]);
4039 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4040 cpu_fpr
[DFPREG(rs1
) + 1],
4041 cpu_fpr
[DFPREG(rs2
) + 1]);
4043 case 0x071: /* VIS I fands */
4044 CHECK_FPU_FEATURE(dc
, VIS1
);
4045 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4047 case 0x072: /* VIS I fxnor */
4048 CHECK_FPU_FEATURE(dc
, VIS1
);
4049 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4050 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4051 cpu_fpr
[DFPREG(rs1
)]);
4052 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4053 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4054 cpu_fpr
[DFPREG(rs1
) + 1]);
4056 case 0x073: /* VIS I fxnors */
4057 CHECK_FPU_FEATURE(dc
, VIS1
);
4058 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4059 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4061 case 0x074: /* VIS I fsrc1 */
4062 CHECK_FPU_FEATURE(dc
, VIS1
);
4063 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4064 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4065 cpu_fpr
[DFPREG(rs1
) + 1]);
4067 case 0x075: /* VIS I fsrc1s */
4068 CHECK_FPU_FEATURE(dc
, VIS1
);
4069 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4071 case 0x076: /* VIS I fornot2 */
4072 CHECK_FPU_FEATURE(dc
, VIS1
);
4073 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4074 cpu_fpr
[DFPREG(rs2
)]);
4075 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4076 cpu_fpr
[DFPREG(rs1
) + 1],
4077 cpu_fpr
[DFPREG(rs2
) + 1]);
4079 case 0x077: /* VIS I fornot2s */
4080 CHECK_FPU_FEATURE(dc
, VIS1
);
4081 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4083 case 0x078: /* VIS I fsrc2 */
4084 CHECK_FPU_FEATURE(dc
, VIS1
);
4085 gen_op_load_fpr_DT0(DFPREG(rs2
));
4086 gen_op_store_DT0_fpr(DFPREG(rd
));
4088 case 0x079: /* VIS I fsrc2s */
4089 CHECK_FPU_FEATURE(dc
, VIS1
);
4090 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4092 case 0x07a: /* VIS I fornot1 */
4093 CHECK_FPU_FEATURE(dc
, VIS1
);
4094 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4095 cpu_fpr
[DFPREG(rs1
)]);
4096 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4097 cpu_fpr
[DFPREG(rs2
) + 1],
4098 cpu_fpr
[DFPREG(rs1
) + 1]);
4100 case 0x07b: /* VIS I fornot1s */
4101 CHECK_FPU_FEATURE(dc
, VIS1
);
4102 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4104 case 0x07c: /* VIS I for */
4105 CHECK_FPU_FEATURE(dc
, VIS1
);
4106 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4107 cpu_fpr
[DFPREG(rs2
)]);
4108 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4109 cpu_fpr
[DFPREG(rs1
) + 1],
4110 cpu_fpr
[DFPREG(rs2
) + 1]);
4112 case 0x07d: /* VIS I fors */
4113 CHECK_FPU_FEATURE(dc
, VIS1
);
4114 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4116 case 0x07e: /* VIS I fone */
4117 CHECK_FPU_FEATURE(dc
, VIS1
);
4118 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4119 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4121 case 0x07f: /* VIS I fones */
4122 CHECK_FPU_FEATURE(dc
, VIS1
);
4123 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4125 case 0x080: /* VIS I shutdown */
4126 case 0x081: /* VIS II siam */
4135 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4136 #ifdef TARGET_SPARC64
4141 #ifdef TARGET_SPARC64
4142 } else if (xop
== 0x39) { /* V9 return */
4145 save_state(dc
, cpu_cond
);
4146 cpu_src1
= get_src1(insn
, cpu_src1
);
4147 if (IS_IMM
) { /* immediate */
4148 simm
= GET_FIELDs(insn
, 19, 31);
4149 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4150 } else { /* register */
4151 rs2
= GET_FIELD(insn
, 27, 31);
4153 gen_movl_reg_TN(rs2
, cpu_src2
);
4154 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4156 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4158 gen_helper_restore();
4159 gen_mov_pc_npc(dc
, cpu_cond
);
4160 r_const
= tcg_const_i32(3);
4161 gen_helper_check_align(cpu_dst
, r_const
);
4162 tcg_temp_free_i32(r_const
);
4163 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4164 dc
->npc
= DYNAMIC_PC
;
4168 cpu_src1
= get_src1(insn
, cpu_src1
);
4169 if (IS_IMM
) { /* immediate */
4170 simm
= GET_FIELDs(insn
, 19, 31);
4171 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4172 } else { /* register */
4173 rs2
= GET_FIELD(insn
, 27, 31);
4175 gen_movl_reg_TN(rs2
, cpu_src2
);
4176 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4178 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4181 case 0x38: /* jmpl */
4186 r_pc
= tcg_const_tl(dc
->pc
);
4187 gen_movl_TN_reg(rd
, r_pc
);
4188 tcg_temp_free(r_pc
);
4189 gen_mov_pc_npc(dc
, cpu_cond
);
4190 r_const
= tcg_const_i32(3);
4191 gen_helper_check_align(cpu_dst
, r_const
);
4192 tcg_temp_free_i32(r_const
);
4193 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4194 dc
->npc
= DYNAMIC_PC
;
4197 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4198 case 0x39: /* rett, V9 return */
4202 if (!supervisor(dc
))
4204 gen_mov_pc_npc(dc
, cpu_cond
);
4205 r_const
= tcg_const_i32(3);
4206 gen_helper_check_align(cpu_dst
, r_const
);
4207 tcg_temp_free_i32(r_const
);
4208 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4209 dc
->npc
= DYNAMIC_PC
;
4214 case 0x3b: /* flush */
4215 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4217 gen_helper_flush(cpu_dst
);
4219 case 0x3c: /* save */
4220 save_state(dc
, cpu_cond
);
4222 gen_movl_TN_reg(rd
, cpu_dst
);
4224 case 0x3d: /* restore */
4225 save_state(dc
, cpu_cond
);
4226 gen_helper_restore();
4227 gen_movl_TN_reg(rd
, cpu_dst
);
4229 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4230 case 0x3e: /* V9 done/retry */
4234 if (!supervisor(dc
))
4236 dc
->npc
= DYNAMIC_PC
;
4237 dc
->pc
= DYNAMIC_PC
;
4241 if (!supervisor(dc
))
4243 dc
->npc
= DYNAMIC_PC
;
4244 dc
->pc
= DYNAMIC_PC
;
4260 case 3: /* load/store instructions */
4262 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4264 /* flush pending conditional evaluations before exposing
4266 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4267 dc
->cc_op
= CC_OP_FLAGS
;
4268 gen_helper_compute_psr();
4270 cpu_src1
= get_src1(insn
, cpu_src1
);
4271 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4272 rs2
= GET_FIELD(insn
, 27, 31);
4273 gen_movl_reg_TN(rs2
, cpu_src2
);
4274 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4275 } else if (IS_IMM
) { /* immediate */
4276 simm
= GET_FIELDs(insn
, 19, 31);
4277 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4278 } else { /* register */
4279 rs2
= GET_FIELD(insn
, 27, 31);
4281 gen_movl_reg_TN(rs2
, cpu_src2
);
4282 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4284 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4286 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4287 (xop
> 0x17 && xop
<= 0x1d ) ||
4288 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4290 case 0x0: /* ld, V9 lduw, load unsigned word */
4291 gen_address_mask(dc
, cpu_addr
);
4292 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4294 case 0x1: /* ldub, load unsigned byte */
4295 gen_address_mask(dc
, cpu_addr
);
4296 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4298 case 0x2: /* lduh, load unsigned halfword */
4299 gen_address_mask(dc
, cpu_addr
);
4300 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4302 case 0x3: /* ldd, load double word */
4308 save_state(dc
, cpu_cond
);
4309 r_const
= tcg_const_i32(7);
4310 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4311 tcg_temp_free_i32(r_const
);
4312 gen_address_mask(dc
, cpu_addr
);
4313 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4314 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4315 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4316 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4317 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4318 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4319 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4322 case 0x9: /* ldsb, load signed byte */
4323 gen_address_mask(dc
, cpu_addr
);
4324 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4326 case 0xa: /* ldsh, load signed halfword */
4327 gen_address_mask(dc
, cpu_addr
);
4328 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4330 case 0xd: /* ldstub -- XXX: should be atomically */
4334 gen_address_mask(dc
, cpu_addr
);
4335 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4336 r_const
= tcg_const_tl(0xff);
4337 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4338 tcg_temp_free(r_const
);
4341 case 0x0f: /* swap, swap register with memory. Also
4343 CHECK_IU_FEATURE(dc
, SWAP
);
4344 gen_movl_reg_TN(rd
, cpu_val
);
4345 gen_address_mask(dc
, cpu_addr
);
4346 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4347 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4348 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4350 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4351 case 0x10: /* lda, V9 lduwa, load word alternate */
4352 #ifndef TARGET_SPARC64
4355 if (!supervisor(dc
))
4358 save_state(dc
, cpu_cond
);
4359 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4361 case 0x11: /* lduba, load unsigned byte alternate */
4362 #ifndef TARGET_SPARC64
4365 if (!supervisor(dc
))
4368 save_state(dc
, cpu_cond
);
4369 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4371 case 0x12: /* lduha, load unsigned halfword alternate */
4372 #ifndef TARGET_SPARC64
4375 if (!supervisor(dc
))
4378 save_state(dc
, cpu_cond
);
4379 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4381 case 0x13: /* ldda, load double word alternate */
4382 #ifndef TARGET_SPARC64
4385 if (!supervisor(dc
))
4390 save_state(dc
, cpu_cond
);
4391 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4393 case 0x19: /* ldsba, load signed byte alternate */
4394 #ifndef TARGET_SPARC64
4397 if (!supervisor(dc
))
4400 save_state(dc
, cpu_cond
);
4401 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4403 case 0x1a: /* ldsha, load signed halfword alternate */
4404 #ifndef TARGET_SPARC64
4407 if (!supervisor(dc
))
4410 save_state(dc
, cpu_cond
);
4411 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4413 case 0x1d: /* ldstuba -- XXX: should be atomically */
4414 #ifndef TARGET_SPARC64
4417 if (!supervisor(dc
))
4420 save_state(dc
, cpu_cond
);
4421 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4423 case 0x1f: /* swapa, swap reg with alt. memory. Also
4425 CHECK_IU_FEATURE(dc
, SWAP
);
4426 #ifndef TARGET_SPARC64
4429 if (!supervisor(dc
))
4432 save_state(dc
, cpu_cond
);
4433 gen_movl_reg_TN(rd
, cpu_val
);
4434 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4437 #ifndef TARGET_SPARC64
4438 case 0x30: /* ldc */
4439 case 0x31: /* ldcsr */
4440 case 0x33: /* lddc */
4444 #ifdef TARGET_SPARC64
4445 case 0x08: /* V9 ldsw */
4446 gen_address_mask(dc
, cpu_addr
);
4447 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4449 case 0x0b: /* V9 ldx */
4450 gen_address_mask(dc
, cpu_addr
);
4451 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4453 case 0x18: /* V9 ldswa */
4454 save_state(dc
, cpu_cond
);
4455 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4457 case 0x1b: /* V9 ldxa */
4458 save_state(dc
, cpu_cond
);
4459 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4461 case 0x2d: /* V9 prefetch, no effect */
4463 case 0x30: /* V9 ldfa */
4464 save_state(dc
, cpu_cond
);
4465 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4467 case 0x33: /* V9 lddfa */
4468 save_state(dc
, cpu_cond
);
4469 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4471 case 0x3d: /* V9 prefetcha, no effect */
4473 case 0x32: /* V9 ldqfa */
4474 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4475 save_state(dc
, cpu_cond
);
4476 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4482 gen_movl_TN_reg(rd
, cpu_val
);
4483 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4486 } else if (xop
>= 0x20 && xop
< 0x24) {
4487 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4489 save_state(dc
, cpu_cond
);
4491 case 0x20: /* ldf, load fpreg */
4492 gen_address_mask(dc
, cpu_addr
);
4493 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4494 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4496 case 0x21: /* ldfsr, V9 ldxfsr */
4497 #ifdef TARGET_SPARC64
4498 gen_address_mask(dc
, cpu_addr
);
4500 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4501 gen_helper_ldxfsr(cpu_tmp64
);
4503 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4504 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4505 gen_helper_ldfsr(cpu_tmp32
);
4509 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4510 gen_helper_ldfsr(cpu_tmp32
);
4514 case 0x22: /* ldqf, load quad fpreg */
4518 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4519 r_const
= tcg_const_i32(dc
->mem_idx
);
4520 gen_address_mask(dc
, cpu_addr
);
4521 gen_helper_ldqf(cpu_addr
, r_const
);
4522 tcg_temp_free_i32(r_const
);
4523 gen_op_store_QT0_fpr(QFPREG(rd
));
4526 case 0x23: /* lddf, load double fpreg */
4530 r_const
= tcg_const_i32(dc
->mem_idx
);
4531 gen_address_mask(dc
, cpu_addr
);
4532 gen_helper_lddf(cpu_addr
, r_const
);
4533 tcg_temp_free_i32(r_const
);
4534 gen_op_store_DT0_fpr(DFPREG(rd
));
4540 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4541 xop
== 0xe || xop
== 0x1e) {
4542 gen_movl_reg_TN(rd
, cpu_val
);
4544 case 0x4: /* st, store word */
4545 gen_address_mask(dc
, cpu_addr
);
4546 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4548 case 0x5: /* stb, store byte */
4549 gen_address_mask(dc
, cpu_addr
);
4550 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4552 case 0x6: /* sth, store halfword */
4553 gen_address_mask(dc
, cpu_addr
);
4554 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4556 case 0x7: /* std, store double word */
4562 save_state(dc
, cpu_cond
);
4563 gen_address_mask(dc
, cpu_addr
);
4564 r_const
= tcg_const_i32(7);
4565 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4566 tcg_temp_free_i32(r_const
);
4567 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4568 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4569 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4572 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4573 case 0x14: /* sta, V9 stwa, store word alternate */
4574 #ifndef TARGET_SPARC64
4577 if (!supervisor(dc
))
4580 save_state(dc
, cpu_cond
);
4581 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4582 dc
->npc
= DYNAMIC_PC
;
4584 case 0x15: /* stba, store byte alternate */
4585 #ifndef TARGET_SPARC64
4588 if (!supervisor(dc
))
4591 save_state(dc
, cpu_cond
);
4592 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4593 dc
->npc
= DYNAMIC_PC
;
4595 case 0x16: /* stha, store halfword alternate */
4596 #ifndef TARGET_SPARC64
4599 if (!supervisor(dc
))
4602 save_state(dc
, cpu_cond
);
4603 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4604 dc
->npc
= DYNAMIC_PC
;
4606 case 0x17: /* stda, store double word alternate */
4607 #ifndef TARGET_SPARC64
4610 if (!supervisor(dc
))
4616 save_state(dc
, cpu_cond
);
4617 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4621 #ifdef TARGET_SPARC64
4622 case 0x0e: /* V9 stx */
4623 gen_address_mask(dc
, cpu_addr
);
4624 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4626 case 0x1e: /* V9 stxa */
4627 save_state(dc
, cpu_cond
);
4628 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4629 dc
->npc
= DYNAMIC_PC
;
4635 } else if (xop
> 0x23 && xop
< 0x28) {
4636 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4638 save_state(dc
, cpu_cond
);
4640 case 0x24: /* stf, store fpreg */
4641 gen_address_mask(dc
, cpu_addr
);
4642 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4643 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4645 case 0x25: /* stfsr, V9 stxfsr */
4646 #ifdef TARGET_SPARC64
4647 gen_address_mask(dc
, cpu_addr
);
4648 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4650 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4652 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4654 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4655 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4659 #ifdef TARGET_SPARC64
4660 /* V9 stqf, store quad fpreg */
4664 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4665 gen_op_load_fpr_QT0(QFPREG(rd
));
4666 r_const
= tcg_const_i32(dc
->mem_idx
);
4667 gen_address_mask(dc
, cpu_addr
);
4668 gen_helper_stqf(cpu_addr
, r_const
);
4669 tcg_temp_free_i32(r_const
);
4672 #else /* !TARGET_SPARC64 */
4673 /* stdfq, store floating point queue */
4674 #if defined(CONFIG_USER_ONLY)
4677 if (!supervisor(dc
))
4679 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4684 case 0x27: /* stdf, store double fpreg */
4688 gen_op_load_fpr_DT0(DFPREG(rd
));
4689 r_const
= tcg_const_i32(dc
->mem_idx
);
4690 gen_address_mask(dc
, cpu_addr
);
4691 gen_helper_stdf(cpu_addr
, r_const
);
4692 tcg_temp_free_i32(r_const
);
4698 } else if (xop
> 0x33 && xop
< 0x3f) {
4699 save_state(dc
, cpu_cond
);
4701 #ifdef TARGET_SPARC64
4702 case 0x34: /* V9 stfa */
4703 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4705 case 0x36: /* V9 stqfa */
4709 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4710 r_const
= tcg_const_i32(7);
4711 gen_helper_check_align(cpu_addr
, r_const
);
4712 tcg_temp_free_i32(r_const
);
4713 gen_op_load_fpr_QT0(QFPREG(rd
));
4714 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4717 case 0x37: /* V9 stdfa */
4718 gen_op_load_fpr_DT0(DFPREG(rd
));
4719 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4721 case 0x3c: /* V9 casa */
4722 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4723 gen_movl_TN_reg(rd
, cpu_val
);
4725 case 0x3e: /* V9 casxa */
4726 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4727 gen_movl_TN_reg(rd
, cpu_val
);
4730 case 0x34: /* stc */
4731 case 0x35: /* stcsr */
4732 case 0x36: /* stdcq */
4733 case 0x37: /* stdc */
4744 /* default case for non jump instructions */
4745 if (dc
->npc
== DYNAMIC_PC
) {
4746 dc
->pc
= DYNAMIC_PC
;
4748 } else if (dc
->npc
== JUMP_PC
) {
4749 /* we can do a static jump */
4750 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4754 dc
->npc
= dc
->npc
+ 4;
4762 save_state(dc
, cpu_cond
);
4763 r_const
= tcg_const_i32(TT_ILL_INSN
);
4764 gen_helper_raise_exception(r_const
);
4765 tcg_temp_free_i32(r_const
);
4773 save_state(dc
, cpu_cond
);
4774 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4775 gen_helper_raise_exception(r_const
);
4776 tcg_temp_free_i32(r_const
);
4780 #if !defined(CONFIG_USER_ONLY)
4785 save_state(dc
, cpu_cond
);
4786 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4787 gen_helper_raise_exception(r_const
);
4788 tcg_temp_free_i32(r_const
);
4794 save_state(dc
, cpu_cond
);
4795 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4798 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4800 save_state(dc
, cpu_cond
);
4801 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4805 #ifndef TARGET_SPARC64
4810 save_state(dc
, cpu_cond
);
4811 r_const
= tcg_const_i32(TT_NCP_INSN
);
4812 gen_helper_raise_exception(r_const
);
4813 tcg_temp_free(r_const
);
4819 tcg_temp_free(cpu_tmp1
);
4820 tcg_temp_free(cpu_tmp2
);
4823 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4824 int spc
, CPUSPARCState
*env
)
4826 target_ulong pc_start
, last_pc
;
4827 uint16_t *gen_opc_end
;
4828 DisasContext dc1
, *dc
= &dc1
;
4834 memset(dc
, 0, sizeof(DisasContext
));
4839 dc
->npc
= (target_ulong
) tb
->cs_base
;
4840 dc
->cc_op
= CC_OP_DYNAMIC
;
4841 dc
->mem_idx
= cpu_mmu_index(env
);
4843 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4844 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4846 dc
->fpu_enabled
= 0;
4847 #ifdef TARGET_SPARC64
4848 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4850 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4851 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4853 cpu_tmp0
= tcg_temp_new();
4854 cpu_tmp32
= tcg_temp_new_i32();
4855 cpu_tmp64
= tcg_temp_new_i64();
4857 cpu_dst
= tcg_temp_local_new();
4860 cpu_val
= tcg_temp_local_new();
4861 cpu_addr
= tcg_temp_local_new();
4864 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4866 max_insns
= CF_COUNT_MASK
;
4869 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4870 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4871 if (bp
->pc
== dc
->pc
) {
4872 if (dc
->pc
!= pc_start
)
4873 save_state(dc
, cpu_cond
);
4882 qemu_log("Search PC...\n");
4883 j
= gen_opc_ptr
- gen_opc_buf
;
4887 gen_opc_instr_start
[lj
++] = 0;
4888 gen_opc_pc
[lj
] = dc
->pc
;
4889 gen_opc_npc
[lj
] = dc
->npc
;
4890 gen_opc_instr_start
[lj
] = 1;
4891 gen_opc_icount
[lj
] = num_insns
;
4894 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4897 disas_sparc_insn(dc
);
4902 /* if the next PC is different, we abort now */
4903 if (dc
->pc
!= (last_pc
+ 4))
4905 /* if we reach a page boundary, we stop generation so that the
4906 PC of a TT_TFAULT exception is always in the right page */
4907 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4909 /* if single step mode, we generate only one instruction and
4910 generate an exception */
4911 if (dc
->singlestep
) {
4914 } while ((gen_opc_ptr
< gen_opc_end
) &&
4915 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4916 num_insns
< max_insns
);
4919 tcg_temp_free(cpu_addr
);
4920 tcg_temp_free(cpu_val
);
4921 tcg_temp_free(cpu_dst
);
4922 tcg_temp_free_i64(cpu_tmp64
);
4923 tcg_temp_free_i32(cpu_tmp32
);
4924 tcg_temp_free(cpu_tmp0
);
4925 if (tb
->cflags
& CF_LAST_IO
)
4928 if (dc
->pc
!= DYNAMIC_PC
&&
4929 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4930 /* static PC and NPC: we can use direct chaining */
4931 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4933 if (dc
->pc
!= DYNAMIC_PC
)
4934 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4935 save_npc(dc
, cpu_cond
);
4939 gen_icount_end(tb
, num_insns
);
4940 *gen_opc_ptr
= INDEX_op_end
;
4942 j
= gen_opc_ptr
- gen_opc_buf
;
4945 gen_opc_instr_start
[lj
++] = 0;
4949 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4950 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4952 tb
->size
= last_pc
+ 4 - pc_start
;
4953 tb
->icount
= num_insns
;
4956 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4957 qemu_log("--------------\n");
4958 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4959 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4965 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4967 gen_intermediate_code_internal(tb
, 0, env
);
4970 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4972 gen_intermediate_code_internal(tb
, 1, env
);
4975 void gen_intermediate_code_init(CPUSPARCState
*env
)
4979 static const char * const gregnames
[8] = {
4980 NULL
, // g0 not used
4989 static const char * const fregnames
[64] = {
4990 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4991 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4992 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4993 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4994 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4995 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4996 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4997 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5000 /* init various static tables */
5004 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5005 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5006 offsetof(CPUState
, regwptr
),
5008 #ifdef TARGET_SPARC64
5009 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5011 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5013 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5015 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5017 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5018 offsetof(CPUState
, tick_cmpr
),
5020 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5021 offsetof(CPUState
, stick_cmpr
),
5023 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5024 offsetof(CPUState
, hstick_cmpr
),
5026 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5028 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5030 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5032 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5033 offsetof(CPUState
, ssr
), "ssr");
5034 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5035 offsetof(CPUState
, version
), "ver");
5036 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5037 offsetof(CPUState
, softint
),
5040 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5043 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5045 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5047 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5048 offsetof(CPUState
, cc_src2
),
5050 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5052 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5054 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5056 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5058 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5060 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5062 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5063 #ifndef CONFIG_USER_ONLY
5064 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5067 for (i
= 1; i
< 8; i
++)
5068 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5069 offsetof(CPUState
, gregs
[i
]),
5071 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5072 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5073 offsetof(CPUState
, fpr
[i
]),
5076 /* register helpers */
5078 #define GEN_HELPER 2
5083 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5086 env
->pc
= gen_opc_pc
[pc_pos
];
5087 npc
= gen_opc_npc
[pc_pos
];
5089 /* dynamic NPC: already stored */
5090 } else if (npc
== 2) {
5091 /* jump PC: use 'cond' and the jump targets of the translation */
5093 env
->npc
= gen_opc_jump_pc
[0];
5095 env
->npc
= gen_opc_jump_pc
[1];
5101 /* flush pending conditional evaluations before exposing cpu state */
5102 if (CC_OP
!= CC_OP_FLAGS
) {
5103 helper_compute_psr();