4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x
, int len
)
112 return (x
<< len
) >> len
;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src
)
120 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
121 offsetof(CPU_DoubleU
, l
.upper
));
122 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
123 offsetof(CPU_DoubleU
, l
.lower
));
126 static void gen_op_load_fpr_DT1(unsigned int src
)
128 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
129 offsetof(CPU_DoubleU
, l
.upper
));
130 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
131 offsetof(CPU_DoubleU
, l
.lower
));
134 static void gen_op_store_DT0_fpr(unsigned int dst
)
136 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
137 offsetof(CPU_DoubleU
, l
.upper
));
138 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
139 offsetof(CPU_DoubleU
, l
.lower
));
142 static void gen_op_load_fpr_QT0(unsigned int src
)
144 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.upmost
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.upper
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.lower
));
150 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
151 offsetof(CPU_QuadU
, l
.lowest
));
154 static void gen_op_load_fpr_QT1(unsigned int src
)
156 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.upmost
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.upper
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.lower
));
162 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
163 offsetof(CPU_QuadU
, l
.lowest
));
166 static void gen_op_store_QT0_fpr(unsigned int dst
)
168 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.upmost
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.upper
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.lower
));
174 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
175 offsetof(CPU_QuadU
, l
.lowest
));
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
192 #ifdef TARGET_SPARC64
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #define AM_CHECK(dc) (1)
200 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
202 #ifdef TARGET_SPARC64
204 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
208 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
211 tcg_gen_movi_tl(tn
, 0);
213 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
215 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
219 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
224 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
226 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
230 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
231 target_ulong pc
, target_ulong npc
)
233 TranslationBlock
*tb
;
236 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
237 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num
);
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
243 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc
, pc
);
247 tcg_gen_movi_tl(cpu_npc
, npc
);
253 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
255 tcg_gen_extu_i32_tl(reg
, src
);
256 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
257 tcg_gen_andi_tl(reg
, reg
, 0x1);
260 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
262 tcg_gen_extu_i32_tl(reg
, src
);
263 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
264 tcg_gen_andi_tl(reg
, reg
, 0x1);
267 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
269 tcg_gen_extu_i32_tl(reg
, src
);
270 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
271 tcg_gen_andi_tl(reg
, reg
, 0x1);
274 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
276 tcg_gen_extu_i32_tl(reg
, src
);
277 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
278 tcg_gen_andi_tl(reg
, reg
, 0x1);
281 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
287 l1
= gen_new_label();
289 r_temp
= tcg_temp_new();
290 tcg_gen_xor_tl(r_temp
, src1
, src2
);
291 tcg_gen_not_tl(r_temp
, r_temp
);
292 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
293 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
294 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
296 r_const
= tcg_const_i32(TT_TOVF
);
297 gen_helper_raise_exception(r_const
);
298 tcg_temp_free_i32(r_const
);
300 tcg_temp_free(r_temp
);
303 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
308 l1
= gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
310 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
312 r_const
= tcg_const_i32(TT_TOVF
);
313 gen_helper_raise_exception(r_const
);
314 tcg_temp_free_i32(r_const
);
318 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
320 tcg_gen_mov_tl(cpu_cc_src
, src1
);
321 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
322 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
323 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
326 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
328 tcg_gen_mov_tl(cpu_cc_src
, src1
);
329 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
330 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
331 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
334 static TCGv_i32
gen_add32_carry32(void)
336 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32
= tcg_temp_new_i32();
341 cc_src2_32
= tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
343 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
345 cc_src1_32
= cpu_cc_dst
;
346 cc_src2_32
= cpu_cc_src
;
349 carry_32
= tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32
);
354 tcg_temp_free_i32(cc_src2_32
);
360 static TCGv_i32
gen_sub32_carry32(void)
362 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32
= tcg_temp_new_i32();
367 cc_src2_32
= tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
369 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
371 cc_src1_32
= cpu_cc_src
;
372 cc_src2_32
= cpu_cc_src2
;
375 carry_32
= tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32
);
380 tcg_temp_free_i32(cc_src2_32
);
386 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
387 TCGv src2
, int update_cc
)
395 /* Carry is known to be zero. Fall back to plain ADD. */
397 gen_op_add_cc(dst
, src1
, src2
);
399 tcg_gen_add_tl(dst
, src1
, src2
);
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low
= tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
414 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
415 tcg_temp_free(dst_low
);
419 carry_32
= gen_add32_carry32();
425 carry_32
= gen_sub32_carry32();
429 /* We need external help to produce the carry. */
430 carry_32
= tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32
);
435 #if TARGET_LONG_BITS == 64
436 carry
= tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry
, carry_32
);
442 tcg_gen_add_tl(dst
, src1
, src2
);
443 tcg_gen_add_tl(dst
, dst
, carry
);
445 tcg_temp_free_i32(carry_32
);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry
);
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
454 tcg_gen_mov_tl(cpu_cc_src
, src1
);
455 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
456 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
457 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
458 dc
->cc_op
= CC_OP_ADDX
;
462 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
464 tcg_gen_mov_tl(cpu_cc_src
, src1
);
465 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
466 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
467 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
470 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
472 tcg_gen_mov_tl(cpu_cc_src
, src1
);
473 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
474 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
475 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
476 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
480 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
486 l1
= gen_new_label();
488 r_temp
= tcg_temp_new();
489 tcg_gen_xor_tl(r_temp
, src1
, src2
);
490 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
491 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
492 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
494 r_const
= tcg_const_i32(TT_TOVF
);
495 gen_helper_raise_exception(r_const
);
496 tcg_temp_free_i32(r_const
);
498 tcg_temp_free(r_temp
);
501 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
503 tcg_gen_mov_tl(cpu_cc_src
, src1
);
504 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
506 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
507 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
508 dc
->cc_op
= CC_OP_LOGIC
;
510 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
511 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
512 dc
->cc_op
= CC_OP_SUB
;
514 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
517 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
519 tcg_gen_mov_tl(cpu_cc_src
, src1
);
520 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
521 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
522 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
525 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
526 TCGv src2
, int update_cc
)
534 /* Carry is known to be zero. Fall back to plain SUB. */
536 gen_op_sub_cc(dst
, src1
, src2
);
538 tcg_gen_sub_tl(dst
, src1
, src2
);
545 carry_32
= gen_add32_carry32();
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low
= tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
559 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
560 tcg_temp_free(dst_low
);
564 carry_32
= gen_sub32_carry32();
568 /* We need external help to produce the carry. */
569 carry_32
= tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32
);
574 #if TARGET_LONG_BITS == 64
575 carry
= tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry
, carry_32
);
581 tcg_gen_sub_tl(dst
, src1
, src2
);
582 tcg_gen_sub_tl(dst
, dst
, carry
);
584 tcg_temp_free_i32(carry_32
);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry
);
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
593 tcg_gen_mov_tl(cpu_cc_src
, src1
);
594 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
595 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
596 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
597 dc
->cc_op
= CC_OP_SUBX
;
601 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
603 tcg_gen_mov_tl(cpu_cc_src
, src1
);
604 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
605 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
606 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
609 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
611 tcg_gen_mov_tl(cpu_cc_src
, src1
);
612 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
613 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
614 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
615 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
619 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
624 l1
= gen_new_label();
625 r_temp
= tcg_temp_new();
631 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
632 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
635 tcg_gen_movi_tl(cpu_cc_src2
, 0);
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
641 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
642 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
643 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
645 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
648 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
649 gen_mov_reg_V(r_temp
, cpu_psr
);
650 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
651 tcg_temp_free(r_temp
);
653 // T0 = (b1 << 31) | (T0 >> 1);
655 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
656 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
657 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
659 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
661 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
664 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
666 TCGv_i32 r_src1
, r_src2
;
667 TCGv_i64 r_temp
, r_temp2
;
669 r_src1
= tcg_temp_new_i32();
670 r_src2
= tcg_temp_new_i32();
672 tcg_gen_trunc_tl_i32(r_src1
, src1
);
673 tcg_gen_trunc_tl_i32(r_src2
, src2
);
675 r_temp
= tcg_temp_new_i64();
676 r_temp2
= tcg_temp_new_i64();
679 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
680 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
682 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
683 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
686 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
688 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
690 tcg_temp_free_i64(r_temp
);
691 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
693 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
695 tcg_temp_free_i64(r_temp2
);
697 tcg_temp_free_i32(r_src1
);
698 tcg_temp_free_i32(r_src2
);
701 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst
, src1
, src2
, 0);
707 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst
, src1
, src2
, 1);
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
719 l1
= gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
721 r_const
= tcg_const_i32(TT_DIV_ZERO
);
722 gen_helper_raise_exception(r_const
);
723 tcg_temp_free_i32(r_const
);
727 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
730 TCGv r_temp1
, r_temp2
;
732 l1
= gen_new_label();
733 l2
= gen_new_label();
734 r_temp1
= tcg_temp_local_new();
735 r_temp2
= tcg_temp_local_new();
736 tcg_gen_mov_tl(r_temp1
, src1
);
737 tcg_gen_mov_tl(r_temp2
, src2
);
738 gen_trap_ifdivzero_tl(r_temp2
);
739 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp1
, INT64_MIN
, l1
);
740 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp2
, -1, l1
);
741 tcg_gen_movi_i64(dst
, INT64_MIN
);
744 tcg_gen_div_i64(dst
, r_temp1
, r_temp2
);
746 tcg_temp_free(r_temp1
);
747 tcg_temp_free(r_temp2
);
752 static inline void gen_op_eval_ba(TCGv dst
)
754 tcg_gen_movi_tl(dst
, 1);
758 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
760 gen_mov_reg_Z(dst
, src
);
764 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
766 gen_mov_reg_N(cpu_tmp0
, src
);
767 gen_mov_reg_V(dst
, src
);
768 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
769 gen_mov_reg_Z(cpu_tmp0
, src
);
770 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
774 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
776 gen_mov_reg_V(cpu_tmp0
, src
);
777 gen_mov_reg_N(dst
, src
);
778 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
782 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
784 gen_mov_reg_Z(cpu_tmp0
, src
);
785 gen_mov_reg_C(dst
, src
);
786 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
790 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
792 gen_mov_reg_C(dst
, src
);
796 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
798 gen_mov_reg_V(dst
, src
);
802 static inline void gen_op_eval_bn(TCGv dst
)
804 tcg_gen_movi_tl(dst
, 0);
808 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
810 gen_mov_reg_N(dst
, src
);
814 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
816 gen_mov_reg_Z(dst
, src
);
817 tcg_gen_xori_tl(dst
, dst
, 0x1);
821 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
823 gen_mov_reg_N(cpu_tmp0
, src
);
824 gen_mov_reg_V(dst
, src
);
825 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
826 gen_mov_reg_Z(cpu_tmp0
, src
);
827 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
828 tcg_gen_xori_tl(dst
, dst
, 0x1);
832 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
834 gen_mov_reg_V(cpu_tmp0
, src
);
835 gen_mov_reg_N(dst
, src
);
836 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
837 tcg_gen_xori_tl(dst
, dst
, 0x1);
841 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
843 gen_mov_reg_Z(cpu_tmp0
, src
);
844 gen_mov_reg_C(dst
, src
);
845 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
846 tcg_gen_xori_tl(dst
, dst
, 0x1);
850 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
852 gen_mov_reg_C(dst
, src
);
853 tcg_gen_xori_tl(dst
, dst
, 0x1);
857 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
859 gen_mov_reg_N(dst
, src
);
860 tcg_gen_xori_tl(dst
, dst
, 0x1);
864 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
866 gen_mov_reg_V(dst
, src
);
867 tcg_gen_xori_tl(dst
, dst
, 0x1);
871 FPSR bit field FCC1 | FCC0:
877 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
878 unsigned int fcc_offset
)
880 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
881 tcg_gen_andi_tl(reg
, reg
, 0x1);
884 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
885 unsigned int fcc_offset
)
887 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
888 tcg_gen_andi_tl(reg
, reg
, 0x1);
892 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
893 unsigned int fcc_offset
)
895 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
896 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
897 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
900 // 1 or 2: FCC0 ^ FCC1
901 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
902 unsigned int fcc_offset
)
904 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
905 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
906 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
910 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
911 unsigned int fcc_offset
)
913 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
917 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
918 unsigned int fcc_offset
)
920 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
921 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
922 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
923 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
927 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
928 unsigned int fcc_offset
)
930 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
934 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
935 unsigned int fcc_offset
)
937 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
938 tcg_gen_xori_tl(dst
, dst
, 0x1);
939 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
940 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
944 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
945 unsigned int fcc_offset
)
947 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
948 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
949 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
953 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
954 unsigned int fcc_offset
)
956 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
957 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
958 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
959 tcg_gen_xori_tl(dst
, dst
, 0x1);
962 // 0 or 3: !(FCC0 ^ FCC1)
963 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
964 unsigned int fcc_offset
)
966 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
967 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
968 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
969 tcg_gen_xori_tl(dst
, dst
, 0x1);
973 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
974 unsigned int fcc_offset
)
976 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
977 tcg_gen_xori_tl(dst
, dst
, 0x1);
980 // !1: !(FCC0 & !FCC1)
981 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
982 unsigned int fcc_offset
)
984 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
985 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
986 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
987 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
988 tcg_gen_xori_tl(dst
, dst
, 0x1);
992 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
993 unsigned int fcc_offset
)
995 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
996 tcg_gen_xori_tl(dst
, dst
, 0x1);
999 // !2: !(!FCC0 & FCC1)
1000 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1001 unsigned int fcc_offset
)
1003 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1004 tcg_gen_xori_tl(dst
, dst
, 0x1);
1005 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1006 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1007 tcg_gen_xori_tl(dst
, dst
, 0x1);
1010 // !3: !(FCC0 & FCC1)
1011 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1012 unsigned int fcc_offset
)
1014 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1015 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1016 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1017 tcg_gen_xori_tl(dst
, dst
, 0x1);
1020 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1021 target_ulong pc2
, TCGv r_cond
)
1025 l1
= gen_new_label();
1027 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1029 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1032 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1035 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1036 target_ulong pc2
, TCGv r_cond
)
1040 l1
= gen_new_label();
1042 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1044 gen_goto_tb(dc
, 0, pc2
, pc1
);
1047 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1050 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1055 l1
= gen_new_label();
1056 l2
= gen_new_label();
1058 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1060 tcg_gen_movi_tl(cpu_npc
, npc1
);
1064 tcg_gen_movi_tl(cpu_npc
, npc2
);
1068 /* call this function before using the condition register as it may
1069 have been set for a jump */
1070 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1072 if (dc
->npc
== JUMP_PC
) {
1073 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1074 dc
->npc
= DYNAMIC_PC
;
1078 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1080 if (dc
->npc
== JUMP_PC
) {
1081 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1082 dc
->npc
= DYNAMIC_PC
;
1083 } else if (dc
->npc
!= DYNAMIC_PC
) {
1084 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1088 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1090 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1091 /* flush pending conditional evaluations before exposing cpu state */
1092 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1093 dc
->cc_op
= CC_OP_FLAGS
;
1094 gen_helper_compute_psr();
1099 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1101 if (dc
->npc
== JUMP_PC
) {
1102 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1103 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1104 dc
->pc
= DYNAMIC_PC
;
1105 } else if (dc
->npc
== DYNAMIC_PC
) {
1106 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1107 dc
->pc
= DYNAMIC_PC
;
1113 static inline void gen_op_next_insn(void)
1115 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1116 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1119 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1124 #ifdef TARGET_SPARC64
1132 switch (dc
->cc_op
) {
1136 gen_helper_compute_psr();
1137 dc
->cc_op
= CC_OP_FLAGS
;
1142 gen_op_eval_bn(r_dst
);
1145 gen_op_eval_be(r_dst
, r_src
);
1148 gen_op_eval_ble(r_dst
, r_src
);
1151 gen_op_eval_bl(r_dst
, r_src
);
1154 gen_op_eval_bleu(r_dst
, r_src
);
1157 gen_op_eval_bcs(r_dst
, r_src
);
1160 gen_op_eval_bneg(r_dst
, r_src
);
1163 gen_op_eval_bvs(r_dst
, r_src
);
1166 gen_op_eval_ba(r_dst
);
1169 gen_op_eval_bne(r_dst
, r_src
);
1172 gen_op_eval_bg(r_dst
, r_src
);
1175 gen_op_eval_bge(r_dst
, r_src
);
1178 gen_op_eval_bgu(r_dst
, r_src
);
1181 gen_op_eval_bcc(r_dst
, r_src
);
1184 gen_op_eval_bpos(r_dst
, r_src
);
1187 gen_op_eval_bvc(r_dst
, r_src
);
1192 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1194 unsigned int offset
;
1214 gen_op_eval_bn(r_dst
);
1217 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1220 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1223 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1226 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1229 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1232 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1235 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1238 gen_op_eval_ba(r_dst
);
1241 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1244 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1247 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1250 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1253 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1256 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1259 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1264 #ifdef TARGET_SPARC64
1266 static const int gen_tcg_cond_reg
[8] = {
1277 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1281 l1
= gen_new_label();
1282 tcg_gen_movi_tl(r_dst
, 0);
1283 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1284 tcg_gen_movi_tl(r_dst
, 1);
1289 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1292 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1293 target_ulong target
= dc
->pc
+ offset
;
1296 /* unconditional not taken */
1298 dc
->pc
= dc
->npc
+ 4;
1299 dc
->npc
= dc
->pc
+ 4;
1302 dc
->npc
= dc
->pc
+ 4;
1304 } else if (cond
== 0x8) {
1305 /* unconditional taken */
1308 dc
->npc
= dc
->pc
+ 4;
1312 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1315 flush_cond(dc
, r_cond
);
1316 gen_cond(r_cond
, cc
, cond
, dc
);
1318 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1322 dc
->jump_pc
[0] = target
;
1323 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1324 dc
->jump_pc
[1] = DYNAMIC_PC
;
1325 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1327 dc
->jump_pc
[1] = dc
->npc
+ 4;
1334 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1337 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1338 target_ulong target
= dc
->pc
+ offset
;
1341 /* unconditional not taken */
1343 dc
->pc
= dc
->npc
+ 4;
1344 dc
->npc
= dc
->pc
+ 4;
1347 dc
->npc
= dc
->pc
+ 4;
1349 } else if (cond
== 0x8) {
1350 /* unconditional taken */
1353 dc
->npc
= dc
->pc
+ 4;
1357 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1360 flush_cond(dc
, r_cond
);
1361 gen_fcond(r_cond
, cc
, cond
);
1363 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1367 dc
->jump_pc
[0] = target
;
1368 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1369 dc
->jump_pc
[1] = DYNAMIC_PC
;
1370 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1372 dc
->jump_pc
[1] = dc
->npc
+ 4;
1379 #ifdef TARGET_SPARC64
1380 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1381 TCGv r_cond
, TCGv r_reg
)
1383 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1384 target_ulong target
= dc
->pc
+ offset
;
1386 flush_cond(dc
, r_cond
);
1387 gen_cond_reg(r_cond
, cond
, r_reg
);
1389 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1393 dc
->jump_pc
[0] = target
;
1394 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1395 dc
->jump_pc
[1] = DYNAMIC_PC
;
1396 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1398 dc
->jump_pc
[1] = dc
->npc
+ 4;
1404 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1408 gen_helper_fcmps(r_rs1
, r_rs2
);
1411 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1414 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1417 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1422 static inline void gen_op_fcmpd(int fccno
)
1429 gen_helper_fcmpd_fcc1();
1432 gen_helper_fcmpd_fcc2();
1435 gen_helper_fcmpd_fcc3();
1440 static inline void gen_op_fcmpq(int fccno
)
1447 gen_helper_fcmpq_fcc1();
1450 gen_helper_fcmpq_fcc2();
1453 gen_helper_fcmpq_fcc3();
1458 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1462 gen_helper_fcmpes(r_rs1
, r_rs2
);
1465 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1468 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1471 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1476 static inline void gen_op_fcmped(int fccno
)
1480 gen_helper_fcmped();
1483 gen_helper_fcmped_fcc1();
1486 gen_helper_fcmped_fcc2();
1489 gen_helper_fcmped_fcc3();
1494 static inline void gen_op_fcmpeq(int fccno
)
1498 gen_helper_fcmpeq();
1501 gen_helper_fcmpeq_fcc1();
1504 gen_helper_fcmpeq_fcc2();
1507 gen_helper_fcmpeq_fcc3();
1514 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1516 gen_helper_fcmps(r_rs1
, r_rs2
);
1519 static inline void gen_op_fcmpd(int fccno
)
1524 static inline void gen_op_fcmpq(int fccno
)
1529 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1531 gen_helper_fcmpes(r_rs1
, r_rs2
);
1534 static inline void gen_op_fcmped(int fccno
)
1536 gen_helper_fcmped();
1539 static inline void gen_op_fcmpeq(int fccno
)
1541 gen_helper_fcmpeq();
1545 static inline void gen_op_fpexception_im(int fsr_flags
)
1549 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1550 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1551 r_const
= tcg_const_i32(TT_FP_EXCP
);
1552 gen_helper_raise_exception(r_const
);
1553 tcg_temp_free_i32(r_const
);
1556 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1558 #if !defined(CONFIG_USER_ONLY)
1559 if (!dc
->fpu_enabled
) {
1562 save_state(dc
, r_cond
);
1563 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1564 gen_helper_raise_exception(r_const
);
1565 tcg_temp_free_i32(r_const
);
1573 static inline void gen_update_fprs_dirty(int rd
)
1575 #if defined(TARGET_SPARC64)
1576 tcg_gen_ori_i32(cpu_fprs
, cpu_fprs
, (rd
< 32) ? 1 : 2);
1580 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1582 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1585 static inline void gen_clear_float_exceptions(void)
1587 gen_helper_clear_float_exceptions();
1591 #ifdef TARGET_SPARC64
1592 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1598 r_asi
= tcg_temp_new_i32();
1599 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1601 asi
= GET_FIELD(insn
, 19, 26);
1602 r_asi
= tcg_const_i32(asi
);
1607 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1610 TCGv_i32 r_asi
, r_size
, r_sign
;
1612 r_asi
= gen_get_asi(insn
, addr
);
1613 r_size
= tcg_const_i32(size
);
1614 r_sign
= tcg_const_i32(sign
);
1615 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1616 tcg_temp_free_i32(r_sign
);
1617 tcg_temp_free_i32(r_size
);
1618 tcg_temp_free_i32(r_asi
);
1621 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1623 TCGv_i32 r_asi
, r_size
;
1625 r_asi
= gen_get_asi(insn
, addr
);
1626 r_size
= tcg_const_i32(size
);
1627 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1628 tcg_temp_free_i32(r_size
);
1629 tcg_temp_free_i32(r_asi
);
1632 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1634 TCGv_i32 r_asi
, r_size
, r_rd
;
1636 r_asi
= gen_get_asi(insn
, addr
);
1637 r_size
= tcg_const_i32(size
);
1638 r_rd
= tcg_const_i32(rd
);
1639 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1640 tcg_temp_free_i32(r_rd
);
1641 tcg_temp_free_i32(r_size
);
1642 tcg_temp_free_i32(r_asi
);
1645 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1647 TCGv_i32 r_asi
, r_size
, r_rd
;
1649 r_asi
= gen_get_asi(insn
, addr
);
1650 r_size
= tcg_const_i32(size
);
1651 r_rd
= tcg_const_i32(rd
);
1652 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1653 tcg_temp_free_i32(r_rd
);
1654 tcg_temp_free_i32(r_size
);
1655 tcg_temp_free_i32(r_asi
);
1658 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1660 TCGv_i32 r_asi
, r_size
, r_sign
;
1662 r_asi
= gen_get_asi(insn
, addr
);
1663 r_size
= tcg_const_i32(4);
1664 r_sign
= tcg_const_i32(0);
1665 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1666 tcg_temp_free_i32(r_sign
);
1667 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1668 tcg_temp_free_i32(r_size
);
1669 tcg_temp_free_i32(r_asi
);
1670 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1673 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1675 TCGv_i32 r_asi
, r_rd
;
1677 r_asi
= gen_get_asi(insn
, addr
);
1678 r_rd
= tcg_const_i32(rd
);
1679 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1680 tcg_temp_free_i32(r_rd
);
1681 tcg_temp_free_i32(r_asi
);
1684 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1686 TCGv_i32 r_asi
, r_size
;
1688 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1689 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1690 r_asi
= gen_get_asi(insn
, addr
);
1691 r_size
= tcg_const_i32(8);
1692 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1693 tcg_temp_free_i32(r_size
);
1694 tcg_temp_free_i32(r_asi
);
1697 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1703 r_val1
= tcg_temp_new();
1704 gen_movl_reg_TN(rd
, r_val1
);
1705 r_asi
= gen_get_asi(insn
, addr
);
1706 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1707 tcg_temp_free_i32(r_asi
);
1708 tcg_temp_free(r_val1
);
1711 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1716 gen_movl_reg_TN(rd
, cpu_tmp64
);
1717 r_asi
= gen_get_asi(insn
, addr
);
1718 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1719 tcg_temp_free_i32(r_asi
);
1722 #elif !defined(CONFIG_USER_ONLY)
1724 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1727 TCGv_i32 r_asi
, r_size
, r_sign
;
1729 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1730 r_size
= tcg_const_i32(size
);
1731 r_sign
= tcg_const_i32(sign
);
1732 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1733 tcg_temp_free(r_sign
);
1734 tcg_temp_free(r_size
);
1735 tcg_temp_free(r_asi
);
1736 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1739 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1741 TCGv_i32 r_asi
, r_size
;
1743 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1744 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1745 r_size
= tcg_const_i32(size
);
1746 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1747 tcg_temp_free(r_size
);
1748 tcg_temp_free(r_asi
);
1751 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1753 TCGv_i32 r_asi
, r_size
, r_sign
;
1756 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1757 r_size
= tcg_const_i32(4);
1758 r_sign
= tcg_const_i32(0);
1759 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1760 tcg_temp_free(r_sign
);
1761 r_val
= tcg_temp_new_i64();
1762 tcg_gen_extu_tl_i64(r_val
, dst
);
1763 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1764 tcg_temp_free_i64(r_val
);
1765 tcg_temp_free(r_size
);
1766 tcg_temp_free(r_asi
);
1767 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1770 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1772 TCGv_i32 r_asi
, r_size
, r_sign
;
1774 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1775 r_size
= tcg_const_i32(8);
1776 r_sign
= tcg_const_i32(0);
1777 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1778 tcg_temp_free(r_sign
);
1779 tcg_temp_free(r_size
);
1780 tcg_temp_free(r_asi
);
1781 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1782 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1783 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1784 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1785 gen_movl_TN_reg(rd
, hi
);
1788 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1790 TCGv_i32 r_asi
, r_size
;
1792 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1793 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1794 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1795 r_size
= tcg_const_i32(8);
1796 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1797 tcg_temp_free(r_size
);
1798 tcg_temp_free(r_asi
);
1802 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1803 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1806 TCGv_i32 r_asi
, r_size
;
1808 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1810 r_val
= tcg_const_i64(0xffULL
);
1811 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1812 r_size
= tcg_const_i32(1);
1813 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1814 tcg_temp_free_i32(r_size
);
1815 tcg_temp_free_i32(r_asi
);
1816 tcg_temp_free_i64(r_val
);
1820 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1825 rs1
= GET_FIELD(insn
, 13, 17);
1827 tcg_gen_movi_tl(def
, 0);
1828 } else if (rs1
< 8) {
1829 r_rs1
= cpu_gregs
[rs1
];
1831 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1836 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1840 if (IS_IMM
) { /* immediate */
1841 target_long simm
= GET_FIELDs(insn
, 19, 31);
1842 tcg_gen_movi_tl(def
, simm
);
1843 } else { /* register */
1844 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1846 tcg_gen_movi_tl(def
, 0);
1847 } else if (rs2
< 8) {
1848 r_rs2
= cpu_gregs
[rs2
];
1850 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1856 #ifdef TARGET_SPARC64
1857 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1859 TCGv_i32 r_tl
= tcg_temp_new_i32();
1861 /* load env->tl into r_tl */
1862 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1864 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1865 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1867 /* calculate offset to current trap state from env->ts, reuse r_tl */
1868 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1869 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1871 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1873 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1874 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1875 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1876 tcg_temp_free_ptr(r_tl_tmp
);
1879 tcg_temp_free_i32(r_tl
);
1883 #define CHECK_IU_FEATURE(dc, FEATURE) \
1884 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1886 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1887 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1890 /* before an instruction, dc->pc must be static */
1891 static void disas_sparc_insn(DisasContext
* dc
)
1893 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1894 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1897 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1898 tcg_gen_debug_insn_start(dc
->pc
);
1899 insn
= ldl_code(dc
->pc
);
1900 opc
= GET_FIELD(insn
, 0, 1);
1902 rd
= GET_FIELD(insn
, 2, 6);
1904 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1905 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1908 case 0: /* branches/sethi */
1910 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1913 #ifdef TARGET_SPARC64
1914 case 0x1: /* V9 BPcc */
1918 target
= GET_FIELD_SP(insn
, 0, 18);
1919 target
= sign_extend(target
, 19);
1921 cc
= GET_FIELD_SP(insn
, 20, 21);
1923 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1925 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1930 case 0x3: /* V9 BPr */
1932 target
= GET_FIELD_SP(insn
, 0, 13) |
1933 (GET_FIELD_SP(insn
, 20, 21) << 14);
1934 target
= sign_extend(target
, 16);
1936 cpu_src1
= get_src1(insn
, cpu_src1
);
1937 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1940 case 0x5: /* V9 FBPcc */
1942 int cc
= GET_FIELD_SP(insn
, 20, 21);
1943 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1945 target
= GET_FIELD_SP(insn
, 0, 18);
1946 target
= sign_extend(target
, 19);
1948 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1952 case 0x7: /* CBN+x */
1957 case 0x2: /* BN+x */
1959 target
= GET_FIELD(insn
, 10, 31);
1960 target
= sign_extend(target
, 22);
1962 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1965 case 0x6: /* FBN+x */
1967 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1969 target
= GET_FIELD(insn
, 10, 31);
1970 target
= sign_extend(target
, 22);
1972 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1975 case 0x4: /* SETHI */
1977 uint32_t value
= GET_FIELD(insn
, 10, 31);
1980 r_const
= tcg_const_tl(value
<< 10);
1981 gen_movl_TN_reg(rd
, r_const
);
1982 tcg_temp_free(r_const
);
1985 case 0x0: /* UNIMPL */
1994 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1997 r_const
= tcg_const_tl(dc
->pc
);
1998 gen_movl_TN_reg(15, r_const
);
1999 tcg_temp_free(r_const
);
2001 gen_mov_pc_npc(dc
, cpu_cond
);
2005 case 2: /* FPU & Logical Operations */
2007 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2008 if (xop
== 0x3a) { /* generate trap */
2011 cpu_src1
= get_src1(insn
, cpu_src1
);
2013 rs2
= GET_FIELD(insn
, 25, 31);
2014 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2016 rs2
= GET_FIELD(insn
, 27, 31);
2018 gen_movl_reg_TN(rs2
, cpu_src2
);
2019 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2021 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2024 cond
= GET_FIELD(insn
, 3, 6);
2025 if (cond
== 0x8) { /* Trap Always */
2026 save_state(dc
, cpu_cond
);
2027 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2029 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2031 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2032 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2033 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2036 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2038 gen_helper_shutdown();
2041 gen_helper_raise_exception(cpu_tmp32
);
2043 } else if (cond
!= 0) {
2044 TCGv r_cond
= tcg_temp_new();
2046 #ifdef TARGET_SPARC64
2048 int cc
= GET_FIELD_SP(insn
, 11, 12);
2050 save_state(dc
, cpu_cond
);
2052 gen_cond(r_cond
, 0, cond
, dc
);
2054 gen_cond(r_cond
, 1, cond
, dc
);
2058 save_state(dc
, cpu_cond
);
2059 gen_cond(r_cond
, 0, cond
, dc
);
2061 l1
= gen_new_label();
2062 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2064 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2066 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2068 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2069 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2070 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2071 gen_helper_raise_exception(cpu_tmp32
);
2074 tcg_temp_free(r_cond
);
2080 } else if (xop
== 0x28) {
2081 rs1
= GET_FIELD(insn
, 13, 17);
2084 #ifndef TARGET_SPARC64
2085 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2086 manual, rdy on the microSPARC
2088 case 0x0f: /* stbar in the SPARCv8 manual,
2089 rdy on the microSPARC II */
2090 case 0x10 ... 0x1f: /* implementation-dependent in the
2091 SPARCv8 manual, rdy on the
2094 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2097 /* Read Asr17 for a Leon3 monoprocessor */
2098 r_const
= tcg_const_tl((1 << 8)
2099 | (dc
->def
->nwindows
- 1));
2100 gen_movl_TN_reg(rd
, r_const
);
2101 tcg_temp_free(r_const
);
2105 gen_movl_TN_reg(rd
, cpu_y
);
2107 #ifdef TARGET_SPARC64
2108 case 0x2: /* V9 rdccr */
2109 gen_helper_compute_psr();
2110 gen_helper_rdccr(cpu_dst
);
2111 gen_movl_TN_reg(rd
, cpu_dst
);
2113 case 0x3: /* V9 rdasi */
2114 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2115 gen_movl_TN_reg(rd
, cpu_dst
);
2117 case 0x4: /* V9 rdtick */
2121 r_tickptr
= tcg_temp_new_ptr();
2122 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2123 offsetof(CPUState
, tick
));
2124 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2125 tcg_temp_free_ptr(r_tickptr
);
2126 gen_movl_TN_reg(rd
, cpu_dst
);
2129 case 0x5: /* V9 rdpc */
2133 r_const
= tcg_const_tl(dc
->pc
);
2134 gen_movl_TN_reg(rd
, r_const
);
2135 tcg_temp_free(r_const
);
2138 case 0x6: /* V9 rdfprs */
2139 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2140 gen_movl_TN_reg(rd
, cpu_dst
);
2142 case 0xf: /* V9 membar */
2143 break; /* no effect */
2144 case 0x13: /* Graphics Status */
2145 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2147 gen_movl_TN_reg(rd
, cpu_gsr
);
2149 case 0x16: /* Softint */
2150 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2151 gen_movl_TN_reg(rd
, cpu_dst
);
2153 case 0x17: /* Tick compare */
2154 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2156 case 0x18: /* System tick */
2160 r_tickptr
= tcg_temp_new_ptr();
2161 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2162 offsetof(CPUState
, stick
));
2163 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2164 tcg_temp_free_ptr(r_tickptr
);
2165 gen_movl_TN_reg(rd
, cpu_dst
);
2168 case 0x19: /* System tick compare */
2169 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2171 case 0x10: /* Performance Control */
2172 case 0x11: /* Performance Instrumentation Counter */
2173 case 0x12: /* Dispatch Control */
2174 case 0x14: /* Softint set, WO */
2175 case 0x15: /* Softint clear, WO */
2180 #if !defined(CONFIG_USER_ONLY)
2181 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2182 #ifndef TARGET_SPARC64
2183 if (!supervisor(dc
))
2185 gen_helper_compute_psr();
2186 dc
->cc_op
= CC_OP_FLAGS
;
2187 gen_helper_rdpsr(cpu_dst
);
2189 CHECK_IU_FEATURE(dc
, HYPV
);
2190 if (!hypervisor(dc
))
2192 rs1
= GET_FIELD(insn
, 13, 17);
2195 // gen_op_rdhpstate();
2198 // gen_op_rdhtstate();
2201 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2204 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2207 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2209 case 31: // hstick_cmpr
2210 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2216 gen_movl_TN_reg(rd
, cpu_dst
);
2218 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2219 if (!supervisor(dc
))
2221 #ifdef TARGET_SPARC64
2222 rs1
= GET_FIELD(insn
, 13, 17);
2228 r_tsptr
= tcg_temp_new_ptr();
2229 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2230 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2231 offsetof(trap_state
, tpc
));
2232 tcg_temp_free_ptr(r_tsptr
);
2239 r_tsptr
= tcg_temp_new_ptr();
2240 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2241 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2242 offsetof(trap_state
, tnpc
));
2243 tcg_temp_free_ptr(r_tsptr
);
2250 r_tsptr
= tcg_temp_new_ptr();
2251 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2252 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2253 offsetof(trap_state
, tstate
));
2254 tcg_temp_free_ptr(r_tsptr
);
2261 r_tsptr
= tcg_temp_new_ptr();
2262 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2263 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2264 offsetof(trap_state
, tt
));
2265 tcg_temp_free_ptr(r_tsptr
);
2266 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2273 r_tickptr
= tcg_temp_new_ptr();
2274 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2275 offsetof(CPUState
, tick
));
2276 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2277 gen_movl_TN_reg(rd
, cpu_tmp0
);
2278 tcg_temp_free_ptr(r_tickptr
);
2282 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2285 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2286 offsetof(CPUSPARCState
, pstate
));
2287 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2290 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2291 offsetof(CPUSPARCState
, tl
));
2292 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2295 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2296 offsetof(CPUSPARCState
, psrpil
));
2297 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2300 gen_helper_rdcwp(cpu_tmp0
);
2303 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2304 offsetof(CPUSPARCState
, cansave
));
2305 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2307 case 11: // canrestore
2308 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2309 offsetof(CPUSPARCState
, canrestore
));
2310 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2312 case 12: // cleanwin
2313 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2314 offsetof(CPUSPARCState
, cleanwin
));
2315 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2317 case 13: // otherwin
2318 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2319 offsetof(CPUSPARCState
, otherwin
));
2320 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2323 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2324 offsetof(CPUSPARCState
, wstate
));
2325 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2327 case 16: // UA2005 gl
2328 CHECK_IU_FEATURE(dc
, GL
);
2329 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2330 offsetof(CPUSPARCState
, gl
));
2331 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2333 case 26: // UA2005 strand status
2334 CHECK_IU_FEATURE(dc
, HYPV
);
2335 if (!hypervisor(dc
))
2337 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2340 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2347 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2349 gen_movl_TN_reg(rd
, cpu_tmp0
);
2351 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2352 #ifdef TARGET_SPARC64
2353 save_state(dc
, cpu_cond
);
2354 gen_helper_flushw();
2356 if (!supervisor(dc
))
2358 gen_movl_TN_reg(rd
, cpu_tbr
);
2362 } else if (xop
== 0x34) { /* FPU Operations */
2363 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2365 gen_op_clear_ieee_excp_and_FTT();
2366 rs1
= GET_FIELD(insn
, 13, 17);
2367 rs2
= GET_FIELD(insn
, 27, 31);
2368 xop
= GET_FIELD(insn
, 18, 26);
2369 save_state(dc
, cpu_cond
);
2371 case 0x1: /* fmovs */
2372 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2373 gen_update_fprs_dirty(rd
);
2375 case 0x5: /* fnegs */
2376 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2377 gen_update_fprs_dirty(rd
);
2379 case 0x9: /* fabss */
2380 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2381 gen_update_fprs_dirty(rd
);
2383 case 0x29: /* fsqrts */
2384 CHECK_FPU_FEATURE(dc
, FSQRT
);
2385 gen_clear_float_exceptions();
2386 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2387 gen_helper_check_ieee_exceptions();
2388 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2389 gen_update_fprs_dirty(rd
);
2391 case 0x2a: /* fsqrtd */
2392 CHECK_FPU_FEATURE(dc
, FSQRT
);
2393 gen_op_load_fpr_DT1(DFPREG(rs2
));
2394 gen_clear_float_exceptions();
2395 gen_helper_fsqrtd();
2396 gen_helper_check_ieee_exceptions();
2397 gen_op_store_DT0_fpr(DFPREG(rd
));
2398 gen_update_fprs_dirty(DFPREG(rd
));
2400 case 0x2b: /* fsqrtq */
2401 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2402 gen_op_load_fpr_QT1(QFPREG(rs2
));
2403 gen_clear_float_exceptions();
2404 gen_helper_fsqrtq();
2405 gen_helper_check_ieee_exceptions();
2406 gen_op_store_QT0_fpr(QFPREG(rd
));
2407 gen_update_fprs_dirty(QFPREG(rd
));
2409 case 0x41: /* fadds */
2410 gen_clear_float_exceptions();
2411 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2412 gen_helper_check_ieee_exceptions();
2413 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2414 gen_update_fprs_dirty(rd
);
2416 case 0x42: /* faddd */
2417 gen_op_load_fpr_DT0(DFPREG(rs1
));
2418 gen_op_load_fpr_DT1(DFPREG(rs2
));
2419 gen_clear_float_exceptions();
2421 gen_helper_check_ieee_exceptions();
2422 gen_op_store_DT0_fpr(DFPREG(rd
));
2423 gen_update_fprs_dirty(DFPREG(rd
));
2425 case 0x43: /* faddq */
2426 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2427 gen_op_load_fpr_QT0(QFPREG(rs1
));
2428 gen_op_load_fpr_QT1(QFPREG(rs2
));
2429 gen_clear_float_exceptions();
2431 gen_helper_check_ieee_exceptions();
2432 gen_op_store_QT0_fpr(QFPREG(rd
));
2433 gen_update_fprs_dirty(QFPREG(rd
));
2435 case 0x45: /* fsubs */
2436 gen_clear_float_exceptions();
2437 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2438 gen_helper_check_ieee_exceptions();
2439 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2440 gen_update_fprs_dirty(rd
);
2442 case 0x46: /* fsubd */
2443 gen_op_load_fpr_DT0(DFPREG(rs1
));
2444 gen_op_load_fpr_DT1(DFPREG(rs2
));
2445 gen_clear_float_exceptions();
2447 gen_helper_check_ieee_exceptions();
2448 gen_op_store_DT0_fpr(DFPREG(rd
));
2449 gen_update_fprs_dirty(DFPREG(rd
));
2451 case 0x47: /* fsubq */
2452 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2453 gen_op_load_fpr_QT0(QFPREG(rs1
));
2454 gen_op_load_fpr_QT1(QFPREG(rs2
));
2455 gen_clear_float_exceptions();
2457 gen_helper_check_ieee_exceptions();
2458 gen_op_store_QT0_fpr(QFPREG(rd
));
2459 gen_update_fprs_dirty(QFPREG(rd
));
2461 case 0x49: /* fmuls */
2462 CHECK_FPU_FEATURE(dc
, FMUL
);
2463 gen_clear_float_exceptions();
2464 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2465 gen_helper_check_ieee_exceptions();
2466 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2467 gen_update_fprs_dirty(rd
);
2469 case 0x4a: /* fmuld */
2470 CHECK_FPU_FEATURE(dc
, FMUL
);
2471 gen_op_load_fpr_DT0(DFPREG(rs1
));
2472 gen_op_load_fpr_DT1(DFPREG(rs2
));
2473 gen_clear_float_exceptions();
2475 gen_helper_check_ieee_exceptions();
2476 gen_op_store_DT0_fpr(DFPREG(rd
));
2477 gen_update_fprs_dirty(DFPREG(rd
));
2479 case 0x4b: /* fmulq */
2480 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2481 CHECK_FPU_FEATURE(dc
, FMUL
);
2482 gen_op_load_fpr_QT0(QFPREG(rs1
));
2483 gen_op_load_fpr_QT1(QFPREG(rs2
));
2484 gen_clear_float_exceptions();
2486 gen_helper_check_ieee_exceptions();
2487 gen_op_store_QT0_fpr(QFPREG(rd
));
2488 gen_update_fprs_dirty(QFPREG(rd
));
2490 case 0x4d: /* fdivs */
2491 gen_clear_float_exceptions();
2492 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2493 gen_helper_check_ieee_exceptions();
2494 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2495 gen_update_fprs_dirty(rd
);
2497 case 0x4e: /* fdivd */
2498 gen_op_load_fpr_DT0(DFPREG(rs1
));
2499 gen_op_load_fpr_DT1(DFPREG(rs2
));
2500 gen_clear_float_exceptions();
2502 gen_helper_check_ieee_exceptions();
2503 gen_op_store_DT0_fpr(DFPREG(rd
));
2504 gen_update_fprs_dirty(DFPREG(rd
));
2506 case 0x4f: /* fdivq */
2507 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2508 gen_op_load_fpr_QT0(QFPREG(rs1
));
2509 gen_op_load_fpr_QT1(QFPREG(rs2
));
2510 gen_clear_float_exceptions();
2512 gen_helper_check_ieee_exceptions();
2513 gen_op_store_QT0_fpr(QFPREG(rd
));
2514 gen_update_fprs_dirty(QFPREG(rd
));
2516 case 0x69: /* fsmuld */
2517 CHECK_FPU_FEATURE(dc
, FSMULD
);
2518 gen_clear_float_exceptions();
2519 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2520 gen_helper_check_ieee_exceptions();
2521 gen_op_store_DT0_fpr(DFPREG(rd
));
2522 gen_update_fprs_dirty(DFPREG(rd
));
2524 case 0x6e: /* fdmulq */
2525 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2526 gen_op_load_fpr_DT0(DFPREG(rs1
));
2527 gen_op_load_fpr_DT1(DFPREG(rs2
));
2528 gen_clear_float_exceptions();
2529 gen_helper_fdmulq();
2530 gen_helper_check_ieee_exceptions();
2531 gen_op_store_QT0_fpr(QFPREG(rd
));
2532 gen_update_fprs_dirty(QFPREG(rd
));
2534 case 0xc4: /* fitos */
2535 gen_clear_float_exceptions();
2536 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2537 gen_helper_check_ieee_exceptions();
2538 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2539 gen_update_fprs_dirty(rd
);
2541 case 0xc6: /* fdtos */
2542 gen_op_load_fpr_DT1(DFPREG(rs2
));
2543 gen_clear_float_exceptions();
2544 gen_helper_fdtos(cpu_tmp32
);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2547 gen_update_fprs_dirty(rd
);
2549 case 0xc7: /* fqtos */
2550 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2551 gen_op_load_fpr_QT1(QFPREG(rs2
));
2552 gen_clear_float_exceptions();
2553 gen_helper_fqtos(cpu_tmp32
);
2554 gen_helper_check_ieee_exceptions();
2555 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2556 gen_update_fprs_dirty(rd
);
2558 case 0xc8: /* fitod */
2559 gen_helper_fitod(cpu_fpr
[rs2
]);
2560 gen_op_store_DT0_fpr(DFPREG(rd
));
2561 gen_update_fprs_dirty(DFPREG(rd
));
2563 case 0xc9: /* fstod */
2564 gen_helper_fstod(cpu_fpr
[rs2
]);
2565 gen_op_store_DT0_fpr(DFPREG(rd
));
2566 gen_update_fprs_dirty(DFPREG(rd
));
2568 case 0xcb: /* fqtod */
2569 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2570 gen_op_load_fpr_QT1(QFPREG(rs2
));
2571 gen_clear_float_exceptions();
2573 gen_helper_check_ieee_exceptions();
2574 gen_op_store_DT0_fpr(DFPREG(rd
));
2575 gen_update_fprs_dirty(DFPREG(rd
));
2577 case 0xcc: /* fitoq */
2578 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2579 gen_helper_fitoq(cpu_fpr
[rs2
]);
2580 gen_op_store_QT0_fpr(QFPREG(rd
));
2581 gen_update_fprs_dirty(QFPREG(rd
));
2583 case 0xcd: /* fstoq */
2584 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2585 gen_helper_fstoq(cpu_fpr
[rs2
]);
2586 gen_op_store_QT0_fpr(QFPREG(rd
));
2587 gen_update_fprs_dirty(QFPREG(rd
));
2589 case 0xce: /* fdtoq */
2590 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2591 gen_op_load_fpr_DT1(DFPREG(rs2
));
2593 gen_op_store_QT0_fpr(QFPREG(rd
));
2594 gen_update_fprs_dirty(QFPREG(rd
));
2596 case 0xd1: /* fstoi */
2597 gen_clear_float_exceptions();
2598 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2599 gen_helper_check_ieee_exceptions();
2600 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2601 gen_update_fprs_dirty(rd
);
2603 case 0xd2: /* fdtoi */
2604 gen_op_load_fpr_DT1(DFPREG(rs2
));
2605 gen_clear_float_exceptions();
2606 gen_helper_fdtoi(cpu_tmp32
);
2607 gen_helper_check_ieee_exceptions();
2608 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2609 gen_update_fprs_dirty(rd
);
2611 case 0xd3: /* fqtoi */
2612 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2613 gen_op_load_fpr_QT1(QFPREG(rs2
));
2614 gen_clear_float_exceptions();
2615 gen_helper_fqtoi(cpu_tmp32
);
2616 gen_helper_check_ieee_exceptions();
2617 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2618 gen_update_fprs_dirty(rd
);
2620 #ifdef TARGET_SPARC64
2621 case 0x2: /* V9 fmovd */
2622 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2623 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2624 cpu_fpr
[DFPREG(rs2
) + 1]);
2625 gen_update_fprs_dirty(DFPREG(rd
));
2627 case 0x3: /* V9 fmovq */
2628 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2629 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2630 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2631 cpu_fpr
[QFPREG(rs2
) + 1]);
2632 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2633 cpu_fpr
[QFPREG(rs2
) + 2]);
2634 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2635 cpu_fpr
[QFPREG(rs2
) + 3]);
2636 gen_update_fprs_dirty(QFPREG(rd
));
2638 case 0x6: /* V9 fnegd */
2639 gen_op_load_fpr_DT1(DFPREG(rs2
));
2641 gen_op_store_DT0_fpr(DFPREG(rd
));
2642 gen_update_fprs_dirty(DFPREG(rd
));
2644 case 0x7: /* V9 fnegq */
2645 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2646 gen_op_load_fpr_QT1(QFPREG(rs2
));
2648 gen_op_store_QT0_fpr(QFPREG(rd
));
2649 gen_update_fprs_dirty(QFPREG(rd
));
2651 case 0xa: /* V9 fabsd */
2652 gen_op_load_fpr_DT1(DFPREG(rs2
));
2654 gen_op_store_DT0_fpr(DFPREG(rd
));
2655 gen_update_fprs_dirty(DFPREG(rd
));
2657 case 0xb: /* V9 fabsq */
2658 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2659 gen_op_load_fpr_QT1(QFPREG(rs2
));
2661 gen_op_store_QT0_fpr(QFPREG(rd
));
2662 gen_update_fprs_dirty(QFPREG(rd
));
2664 case 0x81: /* V9 fstox */
2665 gen_clear_float_exceptions();
2666 gen_helper_fstox(cpu_fpr
[rs2
]);
2667 gen_helper_check_ieee_exceptions();
2668 gen_op_store_DT0_fpr(DFPREG(rd
));
2669 gen_update_fprs_dirty(DFPREG(rd
));
2671 case 0x82: /* V9 fdtox */
2672 gen_op_load_fpr_DT1(DFPREG(rs2
));
2673 gen_clear_float_exceptions();
2675 gen_helper_check_ieee_exceptions();
2676 gen_op_store_DT0_fpr(DFPREG(rd
));
2677 gen_update_fprs_dirty(DFPREG(rd
));
2679 case 0x83: /* V9 fqtox */
2680 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2681 gen_op_load_fpr_QT1(QFPREG(rs2
));
2682 gen_clear_float_exceptions();
2684 gen_helper_check_ieee_exceptions();
2685 gen_op_store_DT0_fpr(DFPREG(rd
));
2686 gen_update_fprs_dirty(DFPREG(rd
));
2688 case 0x84: /* V9 fxtos */
2689 gen_op_load_fpr_DT1(DFPREG(rs2
));
2690 gen_clear_float_exceptions();
2691 gen_helper_fxtos(cpu_tmp32
);
2692 gen_helper_check_ieee_exceptions();
2693 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2694 gen_update_fprs_dirty(rd
);
2696 case 0x88: /* V9 fxtod */
2697 gen_op_load_fpr_DT1(DFPREG(rs2
));
2698 gen_clear_float_exceptions();
2700 gen_helper_check_ieee_exceptions();
2701 gen_op_store_DT0_fpr(DFPREG(rd
));
2702 gen_update_fprs_dirty(DFPREG(rd
));
2704 case 0x8c: /* V9 fxtoq */
2705 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2706 gen_op_load_fpr_DT1(DFPREG(rs2
));
2707 gen_clear_float_exceptions();
2709 gen_helper_check_ieee_exceptions();
2710 gen_op_store_QT0_fpr(QFPREG(rd
));
2711 gen_update_fprs_dirty(QFPREG(rd
));
2717 } else if (xop
== 0x35) { /* FPU Operations */
2718 #ifdef TARGET_SPARC64
2721 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2723 gen_op_clear_ieee_excp_and_FTT();
2724 rs1
= GET_FIELD(insn
, 13, 17);
2725 rs2
= GET_FIELD(insn
, 27, 31);
2726 xop
= GET_FIELD(insn
, 18, 26);
2727 save_state(dc
, cpu_cond
);
2728 #ifdef TARGET_SPARC64
2729 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2732 l1
= gen_new_label();
2733 cond
= GET_FIELD_SP(insn
, 14, 17);
2734 cpu_src1
= get_src1(insn
, cpu_src1
);
2735 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2737 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2738 gen_update_fprs_dirty(rd
);
2741 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2744 l1
= gen_new_label();
2745 cond
= GET_FIELD_SP(insn
, 14, 17);
2746 cpu_src1
= get_src1(insn
, cpu_src1
);
2747 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2749 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2750 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2751 gen_update_fprs_dirty(DFPREG(rd
));
2754 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2757 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2758 l1
= gen_new_label();
2759 cond
= GET_FIELD_SP(insn
, 14, 17);
2760 cpu_src1
= get_src1(insn
, cpu_src1
);
2761 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2763 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2764 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2765 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2766 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2767 gen_update_fprs_dirty(QFPREG(rd
));
2773 #ifdef TARGET_SPARC64
2774 #define FMOVSCC(fcc) \
2779 l1 = gen_new_label(); \
2780 r_cond = tcg_temp_new(); \
2781 cond = GET_FIELD_SP(insn, 14, 17); \
2782 gen_fcond(r_cond, fcc, cond); \
2783 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2785 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2786 gen_update_fprs_dirty(rd); \
2787 gen_set_label(l1); \
2788 tcg_temp_free(r_cond); \
2790 #define FMOVDCC(fcc) \
2795 l1 = gen_new_label(); \
2796 r_cond = tcg_temp_new(); \
2797 cond = GET_FIELD_SP(insn, 14, 17); \
2798 gen_fcond(r_cond, fcc, cond); \
2799 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2801 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2802 cpu_fpr[DFPREG(rs2)]); \
2803 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2804 cpu_fpr[DFPREG(rs2) + 1]); \
2805 gen_update_fprs_dirty(DFPREG(rd)); \
2806 gen_set_label(l1); \
2807 tcg_temp_free(r_cond); \
2809 #define FMOVQCC(fcc) \
2814 l1 = gen_new_label(); \
2815 r_cond = tcg_temp_new(); \
2816 cond = GET_FIELD_SP(insn, 14, 17); \
2817 gen_fcond(r_cond, fcc, cond); \
2818 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2820 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2821 cpu_fpr[QFPREG(rs2)]); \
2822 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2823 cpu_fpr[QFPREG(rs2) + 1]); \
2824 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2825 cpu_fpr[QFPREG(rs2) + 2]); \
2826 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2827 cpu_fpr[QFPREG(rs2) + 3]); \
2828 gen_update_fprs_dirty(QFPREG(rd)); \
2829 gen_set_label(l1); \
2830 tcg_temp_free(r_cond); \
2832 case 0x001: /* V9 fmovscc %fcc0 */
2835 case 0x002: /* V9 fmovdcc %fcc0 */
2838 case 0x003: /* V9 fmovqcc %fcc0 */
2839 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2842 case 0x041: /* V9 fmovscc %fcc1 */
2845 case 0x042: /* V9 fmovdcc %fcc1 */
2848 case 0x043: /* V9 fmovqcc %fcc1 */
2849 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2852 case 0x081: /* V9 fmovscc %fcc2 */
2855 case 0x082: /* V9 fmovdcc %fcc2 */
2858 case 0x083: /* V9 fmovqcc %fcc2 */
2859 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2862 case 0x0c1: /* V9 fmovscc %fcc3 */
2865 case 0x0c2: /* V9 fmovdcc %fcc3 */
2868 case 0x0c3: /* V9 fmovqcc %fcc3 */
2869 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2875 #define FMOVSCC(icc) \
2880 l1 = gen_new_label(); \
2881 r_cond = tcg_temp_new(); \
2882 cond = GET_FIELD_SP(insn, 14, 17); \
2883 gen_cond(r_cond, icc, cond, dc); \
2884 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2886 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2887 gen_update_fprs_dirty(rd); \
2888 gen_set_label(l1); \
2889 tcg_temp_free(r_cond); \
2891 #define FMOVDCC(icc) \
2896 l1 = gen_new_label(); \
2897 r_cond = tcg_temp_new(); \
2898 cond = GET_FIELD_SP(insn, 14, 17); \
2899 gen_cond(r_cond, icc, cond, dc); \
2900 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2902 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2903 cpu_fpr[DFPREG(rs2)]); \
2904 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2905 cpu_fpr[DFPREG(rs2) + 1]); \
2906 gen_update_fprs_dirty(DFPREG(rd)); \
2907 gen_set_label(l1); \
2908 tcg_temp_free(r_cond); \
2910 #define FMOVQCC(icc) \
2915 l1 = gen_new_label(); \
2916 r_cond = tcg_temp_new(); \
2917 cond = GET_FIELD_SP(insn, 14, 17); \
2918 gen_cond(r_cond, icc, cond, dc); \
2919 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2921 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2922 cpu_fpr[QFPREG(rs2)]); \
2923 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2924 cpu_fpr[QFPREG(rs2) + 1]); \
2925 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2926 cpu_fpr[QFPREG(rs2) + 2]); \
2927 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2928 cpu_fpr[QFPREG(rs2) + 3]); \
2929 gen_update_fprs_dirty(QFPREG(rd)); \
2930 gen_set_label(l1); \
2931 tcg_temp_free(r_cond); \
2934 case 0x101: /* V9 fmovscc %icc */
2937 case 0x102: /* V9 fmovdcc %icc */
2940 case 0x103: /* V9 fmovqcc %icc */
2941 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2944 case 0x181: /* V9 fmovscc %xcc */
2947 case 0x182: /* V9 fmovdcc %xcc */
2950 case 0x183: /* V9 fmovqcc %xcc */
2951 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2958 case 0x51: /* fcmps, V9 %fcc */
2959 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2961 case 0x52: /* fcmpd, V9 %fcc */
2962 gen_op_load_fpr_DT0(DFPREG(rs1
));
2963 gen_op_load_fpr_DT1(DFPREG(rs2
));
2964 gen_op_fcmpd(rd
& 3);
2966 case 0x53: /* fcmpq, V9 %fcc */
2967 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2968 gen_op_load_fpr_QT0(QFPREG(rs1
));
2969 gen_op_load_fpr_QT1(QFPREG(rs2
));
2970 gen_op_fcmpq(rd
& 3);
2972 case 0x55: /* fcmpes, V9 %fcc */
2973 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2975 case 0x56: /* fcmped, V9 %fcc */
2976 gen_op_load_fpr_DT0(DFPREG(rs1
));
2977 gen_op_load_fpr_DT1(DFPREG(rs2
));
2978 gen_op_fcmped(rd
& 3);
2980 case 0x57: /* fcmpeq, V9 %fcc */
2981 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2982 gen_op_load_fpr_QT0(QFPREG(rs1
));
2983 gen_op_load_fpr_QT1(QFPREG(rs2
));
2984 gen_op_fcmpeq(rd
& 3);
2989 } else if (xop
== 0x2) {
2992 rs1
= GET_FIELD(insn
, 13, 17);
2994 // or %g0, x, y -> mov T0, x; mov y, T0
2995 if (IS_IMM
) { /* immediate */
2998 simm
= GET_FIELDs(insn
, 19, 31);
2999 r_const
= tcg_const_tl(simm
);
3000 gen_movl_TN_reg(rd
, r_const
);
3001 tcg_temp_free(r_const
);
3002 } else { /* register */
3003 rs2
= GET_FIELD(insn
, 27, 31);
3004 gen_movl_reg_TN(rs2
, cpu_dst
);
3005 gen_movl_TN_reg(rd
, cpu_dst
);
3008 cpu_src1
= get_src1(insn
, cpu_src1
);
3009 if (IS_IMM
) { /* immediate */
3010 simm
= GET_FIELDs(insn
, 19, 31);
3011 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3012 gen_movl_TN_reg(rd
, cpu_dst
);
3013 } else { /* register */
3014 // or x, %g0, y -> mov T1, x; mov y, T1
3015 rs2
= GET_FIELD(insn
, 27, 31);
3017 gen_movl_reg_TN(rs2
, cpu_src2
);
3018 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3019 gen_movl_TN_reg(rd
, cpu_dst
);
3021 gen_movl_TN_reg(rd
, cpu_src1
);
3024 #ifdef TARGET_SPARC64
3025 } else if (xop
== 0x25) { /* sll, V9 sllx */
3026 cpu_src1
= get_src1(insn
, cpu_src1
);
3027 if (IS_IMM
) { /* immediate */
3028 simm
= GET_FIELDs(insn
, 20, 31);
3029 if (insn
& (1 << 12)) {
3030 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3032 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3034 } else { /* register */
3035 rs2
= GET_FIELD(insn
, 27, 31);
3036 gen_movl_reg_TN(rs2
, cpu_src2
);
3037 if (insn
& (1 << 12)) {
3038 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3040 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3042 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3044 gen_movl_TN_reg(rd
, cpu_dst
);
3045 } else if (xop
== 0x26) { /* srl, V9 srlx */
3046 cpu_src1
= get_src1(insn
, cpu_src1
);
3047 if (IS_IMM
) { /* immediate */
3048 simm
= GET_FIELDs(insn
, 20, 31);
3049 if (insn
& (1 << 12)) {
3050 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3052 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3053 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3055 } else { /* register */
3056 rs2
= GET_FIELD(insn
, 27, 31);
3057 gen_movl_reg_TN(rs2
, cpu_src2
);
3058 if (insn
& (1 << 12)) {
3059 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3060 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3062 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3063 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3064 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3067 gen_movl_TN_reg(rd
, cpu_dst
);
3068 } else if (xop
== 0x27) { /* sra, V9 srax */
3069 cpu_src1
= get_src1(insn
, cpu_src1
);
3070 if (IS_IMM
) { /* immediate */
3071 simm
= GET_FIELDs(insn
, 20, 31);
3072 if (insn
& (1 << 12)) {
3073 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3075 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3076 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3077 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3079 } else { /* register */
3080 rs2
= GET_FIELD(insn
, 27, 31);
3081 gen_movl_reg_TN(rs2
, cpu_src2
);
3082 if (insn
& (1 << 12)) {
3083 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3084 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3086 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3087 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3088 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3089 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3092 gen_movl_TN_reg(rd
, cpu_dst
);
3094 } else if (xop
< 0x36) {
3096 cpu_src1
= get_src1(insn
, cpu_src1
);
3097 cpu_src2
= get_src2(insn
, cpu_src2
);
3098 switch (xop
& ~0x10) {
3101 simm
= GET_FIELDs(insn
, 19, 31);
3103 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3104 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3105 dc
->cc_op
= CC_OP_ADD
;
3107 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3111 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3112 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3113 dc
->cc_op
= CC_OP_ADD
;
3115 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3121 simm
= GET_FIELDs(insn
, 19, 31);
3122 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3124 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3127 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3128 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3129 dc
->cc_op
= CC_OP_LOGIC
;
3134 simm
= GET_FIELDs(insn
, 19, 31);
3135 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3137 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3140 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3141 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3142 dc
->cc_op
= CC_OP_LOGIC
;
3147 simm
= GET_FIELDs(insn
, 19, 31);
3148 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3150 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3153 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3154 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3155 dc
->cc_op
= CC_OP_LOGIC
;
3160 simm
= GET_FIELDs(insn
, 19, 31);
3162 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3164 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3168 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3169 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3170 dc
->cc_op
= CC_OP_SUB
;
3172 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3176 case 0x5: /* andn */
3178 simm
= GET_FIELDs(insn
, 19, 31);
3179 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3181 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3184 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3185 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3186 dc
->cc_op
= CC_OP_LOGIC
;
3191 simm
= GET_FIELDs(insn
, 19, 31);
3192 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3194 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3197 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3198 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3199 dc
->cc_op
= CC_OP_LOGIC
;
3202 case 0x7: /* xorn */
3204 simm
= GET_FIELDs(insn
, 19, 31);
3205 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3207 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3208 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3211 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3212 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3213 dc
->cc_op
= CC_OP_LOGIC
;
3216 case 0x8: /* addx, V9 addc */
3217 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3220 #ifdef TARGET_SPARC64
3221 case 0x9: /* V9 mulx */
3223 simm
= GET_FIELDs(insn
, 19, 31);
3224 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3226 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3230 case 0xa: /* umul */
3231 CHECK_IU_FEATURE(dc
, MUL
);
3232 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3234 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3235 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3236 dc
->cc_op
= CC_OP_LOGIC
;
3239 case 0xb: /* smul */
3240 CHECK_IU_FEATURE(dc
, MUL
);
3241 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3243 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3244 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3245 dc
->cc_op
= CC_OP_LOGIC
;
3248 case 0xc: /* subx, V9 subc */
3249 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3252 #ifdef TARGET_SPARC64
3253 case 0xd: /* V9 udivx */
3255 TCGv r_temp1
, r_temp2
;
3256 r_temp1
= tcg_temp_local_new();
3257 r_temp2
= tcg_temp_local_new();
3258 tcg_gen_mov_tl(r_temp1
, cpu_src1
);
3259 tcg_gen_mov_tl(r_temp2
, cpu_src2
);
3260 gen_trap_ifdivzero_tl(r_temp2
);
3261 tcg_gen_divu_i64(cpu_dst
, r_temp1
, r_temp2
);
3262 tcg_temp_free(r_temp1
);
3263 tcg_temp_free(r_temp2
);
3267 case 0xe: /* udiv */
3268 CHECK_IU_FEATURE(dc
, DIV
);
3270 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3271 dc
->cc_op
= CC_OP_DIV
;
3273 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3276 case 0xf: /* sdiv */
3277 CHECK_IU_FEATURE(dc
, DIV
);
3279 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3280 dc
->cc_op
= CC_OP_DIV
;
3282 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3288 gen_movl_TN_reg(rd
, cpu_dst
);
3290 cpu_src1
= get_src1(insn
, cpu_src1
);
3291 cpu_src2
= get_src2(insn
, cpu_src2
);
3293 case 0x20: /* taddcc */
3294 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3295 gen_movl_TN_reg(rd
, cpu_dst
);
3296 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3297 dc
->cc_op
= CC_OP_TADD
;
3299 case 0x21: /* tsubcc */
3300 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3301 gen_movl_TN_reg(rd
, cpu_dst
);
3302 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3303 dc
->cc_op
= CC_OP_TSUB
;
3305 case 0x22: /* taddcctv */
3306 save_state(dc
, cpu_cond
);
3307 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3308 gen_movl_TN_reg(rd
, cpu_dst
);
3309 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3310 dc
->cc_op
= CC_OP_TADDTV
;
3312 case 0x23: /* tsubcctv */
3313 save_state(dc
, cpu_cond
);
3314 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3315 gen_movl_TN_reg(rd
, cpu_dst
);
3316 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3317 dc
->cc_op
= CC_OP_TSUBTV
;
3319 case 0x24: /* mulscc */
3320 gen_helper_compute_psr();
3321 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3322 gen_movl_TN_reg(rd
, cpu_dst
);
3323 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3324 dc
->cc_op
= CC_OP_ADD
;
3326 #ifndef TARGET_SPARC64
3327 case 0x25: /* sll */
3328 if (IS_IMM
) { /* immediate */
3329 simm
= GET_FIELDs(insn
, 20, 31);
3330 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3331 } else { /* register */
3332 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3333 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3335 gen_movl_TN_reg(rd
, cpu_dst
);
3337 case 0x26: /* srl */
3338 if (IS_IMM
) { /* immediate */
3339 simm
= GET_FIELDs(insn
, 20, 31);
3340 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3341 } else { /* register */
3342 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3343 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3345 gen_movl_TN_reg(rd
, cpu_dst
);
3347 case 0x27: /* sra */
3348 if (IS_IMM
) { /* immediate */
3349 simm
= GET_FIELDs(insn
, 20, 31);
3350 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3351 } else { /* register */
3352 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3353 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3355 gen_movl_TN_reg(rd
, cpu_dst
);
3362 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3363 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3365 #ifndef TARGET_SPARC64
3366 case 0x01 ... 0x0f: /* undefined in the
3370 case 0x10 ... 0x1f: /* implementation-dependent
3376 case 0x2: /* V9 wrccr */
3377 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3378 gen_helper_wrccr(cpu_dst
);
3379 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3380 dc
->cc_op
= CC_OP_FLAGS
;
3382 case 0x3: /* V9 wrasi */
3383 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3384 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3385 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3387 case 0x6: /* V9 wrfprs */
3388 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3389 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3390 save_state(dc
, cpu_cond
);
3395 case 0xf: /* V9 sir, nop if user */
3396 #if !defined(CONFIG_USER_ONLY)
3397 if (supervisor(dc
)) {
3402 case 0x13: /* Graphics Status */
3403 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3405 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3407 case 0x14: /* Softint set */
3408 if (!supervisor(dc
))
3410 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3411 gen_helper_set_softint(cpu_tmp64
);
3413 case 0x15: /* Softint clear */
3414 if (!supervisor(dc
))
3416 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3417 gen_helper_clear_softint(cpu_tmp64
);
3419 case 0x16: /* Softint write */
3420 if (!supervisor(dc
))
3422 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3423 gen_helper_write_softint(cpu_tmp64
);
3425 case 0x17: /* Tick compare */
3426 #if !defined(CONFIG_USER_ONLY)
3427 if (!supervisor(dc
))
3433 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3435 r_tickptr
= tcg_temp_new_ptr();
3436 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3437 offsetof(CPUState
, tick
));
3438 gen_helper_tick_set_limit(r_tickptr
,
3440 tcg_temp_free_ptr(r_tickptr
);
3443 case 0x18: /* System tick */
3444 #if !defined(CONFIG_USER_ONLY)
3445 if (!supervisor(dc
))
3451 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3453 r_tickptr
= tcg_temp_new_ptr();
3454 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3455 offsetof(CPUState
, stick
));
3456 gen_helper_tick_set_count(r_tickptr
,
3458 tcg_temp_free_ptr(r_tickptr
);
3461 case 0x19: /* System tick compare */
3462 #if !defined(CONFIG_USER_ONLY)
3463 if (!supervisor(dc
))
3469 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3471 r_tickptr
= tcg_temp_new_ptr();
3472 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3473 offsetof(CPUState
, stick
));
3474 gen_helper_tick_set_limit(r_tickptr
,
3476 tcg_temp_free_ptr(r_tickptr
);
3480 case 0x10: /* Performance Control */
3481 case 0x11: /* Performance Instrumentation
3483 case 0x12: /* Dispatch Control */
3490 #if !defined(CONFIG_USER_ONLY)
3491 case 0x31: /* wrpsr, V9 saved, restored */
3493 if (!supervisor(dc
))
3495 #ifdef TARGET_SPARC64
3501 gen_helper_restored();
3503 case 2: /* UA2005 allclean */
3504 case 3: /* UA2005 otherw */
3505 case 4: /* UA2005 normalw */
3506 case 5: /* UA2005 invalw */
3512 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3513 gen_helper_wrpsr(cpu_dst
);
3514 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3515 dc
->cc_op
= CC_OP_FLAGS
;
3516 save_state(dc
, cpu_cond
);
3523 case 0x32: /* wrwim, V9 wrpr */
3525 if (!supervisor(dc
))
3527 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3528 #ifdef TARGET_SPARC64
3534 r_tsptr
= tcg_temp_new_ptr();
3535 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3536 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3537 offsetof(trap_state
, tpc
));
3538 tcg_temp_free_ptr(r_tsptr
);
3545 r_tsptr
= tcg_temp_new_ptr();
3546 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3547 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3548 offsetof(trap_state
, tnpc
));
3549 tcg_temp_free_ptr(r_tsptr
);
3556 r_tsptr
= tcg_temp_new_ptr();
3557 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3558 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3559 offsetof(trap_state
,
3561 tcg_temp_free_ptr(r_tsptr
);
3568 r_tsptr
= tcg_temp_new_ptr();
3569 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3570 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3571 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3572 offsetof(trap_state
, tt
));
3573 tcg_temp_free_ptr(r_tsptr
);
3580 r_tickptr
= tcg_temp_new_ptr();
3581 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3582 offsetof(CPUState
, tick
));
3583 gen_helper_tick_set_count(r_tickptr
,
3585 tcg_temp_free_ptr(r_tickptr
);
3589 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3593 TCGv r_tmp
= tcg_temp_local_new();
3595 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3596 save_state(dc
, cpu_cond
);
3597 gen_helper_wrpstate(r_tmp
);
3598 tcg_temp_free(r_tmp
);
3599 dc
->npc
= DYNAMIC_PC
;
3604 TCGv r_tmp
= tcg_temp_local_new();
3606 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3607 save_state(dc
, cpu_cond
);
3608 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3609 tcg_temp_free(r_tmp
);
3610 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3611 offsetof(CPUSPARCState
, tl
));
3612 dc
->npc
= DYNAMIC_PC
;
3616 gen_helper_wrpil(cpu_tmp0
);
3619 gen_helper_wrcwp(cpu_tmp0
);
3622 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3623 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3624 offsetof(CPUSPARCState
,
3627 case 11: // canrestore
3628 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3629 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3630 offsetof(CPUSPARCState
,
3633 case 12: // cleanwin
3634 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3635 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3636 offsetof(CPUSPARCState
,
3639 case 13: // otherwin
3640 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3641 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3642 offsetof(CPUSPARCState
,
3646 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3647 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3648 offsetof(CPUSPARCState
,
3651 case 16: // UA2005 gl
3652 CHECK_IU_FEATURE(dc
, GL
);
3653 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3654 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3655 offsetof(CPUSPARCState
, gl
));
3657 case 26: // UA2005 strand status
3658 CHECK_IU_FEATURE(dc
, HYPV
);
3659 if (!hypervisor(dc
))
3661 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3667 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3668 if (dc
->def
->nwindows
!= 32)
3669 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3670 (1 << dc
->def
->nwindows
) - 1);
3671 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3675 case 0x33: /* wrtbr, UA2005 wrhpr */
3677 #ifndef TARGET_SPARC64
3678 if (!supervisor(dc
))
3680 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3682 CHECK_IU_FEATURE(dc
, HYPV
);
3683 if (!hypervisor(dc
))
3685 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3688 // XXX gen_op_wrhpstate();
3689 save_state(dc
, cpu_cond
);
3695 // XXX gen_op_wrhtstate();
3698 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3701 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3703 case 31: // hstick_cmpr
3707 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3708 r_tickptr
= tcg_temp_new_ptr();
3709 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3710 offsetof(CPUState
, hstick
));
3711 gen_helper_tick_set_limit(r_tickptr
,
3713 tcg_temp_free_ptr(r_tickptr
);
3716 case 6: // hver readonly
3724 #ifdef TARGET_SPARC64
3725 case 0x2c: /* V9 movcc */
3727 int cc
= GET_FIELD_SP(insn
, 11, 12);
3728 int cond
= GET_FIELD_SP(insn
, 14, 17);
3732 r_cond
= tcg_temp_new();
3733 if (insn
& (1 << 18)) {
3735 gen_cond(r_cond
, 0, cond
, dc
);
3737 gen_cond(r_cond
, 1, cond
, dc
);
3741 gen_fcond(r_cond
, cc
, cond
);
3744 l1
= gen_new_label();
3746 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3747 if (IS_IMM
) { /* immediate */
3750 simm
= GET_FIELD_SPs(insn
, 0, 10);
3751 r_const
= tcg_const_tl(simm
);
3752 gen_movl_TN_reg(rd
, r_const
);
3753 tcg_temp_free(r_const
);
3755 rs2
= GET_FIELD_SP(insn
, 0, 4);
3756 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3757 gen_movl_TN_reg(rd
, cpu_tmp0
);
3760 tcg_temp_free(r_cond
);
3763 case 0x2d: /* V9 sdivx */
3764 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3765 gen_movl_TN_reg(rd
, cpu_dst
);
3767 case 0x2e: /* V9 popc */
3769 cpu_src2
= get_src2(insn
, cpu_src2
);
3770 gen_helper_popc(cpu_dst
, cpu_src2
);
3771 gen_movl_TN_reg(rd
, cpu_dst
);
3773 case 0x2f: /* V9 movr */
3775 int cond
= GET_FIELD_SP(insn
, 10, 12);
3778 cpu_src1
= get_src1(insn
, cpu_src1
);
3780 l1
= gen_new_label();
3782 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3784 if (IS_IMM
) { /* immediate */
3787 simm
= GET_FIELD_SPs(insn
, 0, 9);
3788 r_const
= tcg_const_tl(simm
);
3789 gen_movl_TN_reg(rd
, r_const
);
3790 tcg_temp_free(r_const
);
3792 rs2
= GET_FIELD_SP(insn
, 0, 4);
3793 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3794 gen_movl_TN_reg(rd
, cpu_tmp0
);
3804 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3805 #ifdef TARGET_SPARC64
3806 int opf
= GET_FIELD_SP(insn
, 5, 13);
3807 rs1
= GET_FIELD(insn
, 13, 17);
3808 rs2
= GET_FIELD(insn
, 27, 31);
3809 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3813 case 0x000: /* VIS I edge8cc */
3814 case 0x001: /* VIS II edge8n */
3815 case 0x002: /* VIS I edge8lcc */
3816 case 0x003: /* VIS II edge8ln */
3817 case 0x004: /* VIS I edge16cc */
3818 case 0x005: /* VIS II edge16n */
3819 case 0x006: /* VIS I edge16lcc */
3820 case 0x007: /* VIS II edge16ln */
3821 case 0x008: /* VIS I edge32cc */
3822 case 0x009: /* VIS II edge32n */
3823 case 0x00a: /* VIS I edge32lcc */
3824 case 0x00b: /* VIS II edge32ln */
3827 case 0x010: /* VIS I array8 */
3828 CHECK_FPU_FEATURE(dc
, VIS1
);
3829 cpu_src1
= get_src1(insn
, cpu_src1
);
3830 gen_movl_reg_TN(rs2
, cpu_src2
);
3831 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3832 gen_movl_TN_reg(rd
, cpu_dst
);
3834 case 0x012: /* VIS I array16 */
3835 CHECK_FPU_FEATURE(dc
, VIS1
);
3836 cpu_src1
= get_src1(insn
, cpu_src1
);
3837 gen_movl_reg_TN(rs2
, cpu_src2
);
3838 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3839 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3840 gen_movl_TN_reg(rd
, cpu_dst
);
3842 case 0x014: /* VIS I array32 */
3843 CHECK_FPU_FEATURE(dc
, VIS1
);
3844 cpu_src1
= get_src1(insn
, cpu_src1
);
3845 gen_movl_reg_TN(rs2
, cpu_src2
);
3846 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3847 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3848 gen_movl_TN_reg(rd
, cpu_dst
);
3850 case 0x018: /* VIS I alignaddr */
3851 CHECK_FPU_FEATURE(dc
, VIS1
);
3852 cpu_src1
= get_src1(insn
, cpu_src1
);
3853 gen_movl_reg_TN(rs2
, cpu_src2
);
3854 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3855 gen_movl_TN_reg(rd
, cpu_dst
);
3857 case 0x019: /* VIS II bmask */
3858 case 0x01a: /* VIS I alignaddrl */
3861 case 0x020: /* VIS I fcmple16 */
3862 CHECK_FPU_FEATURE(dc
, VIS1
);
3863 gen_op_load_fpr_DT0(DFPREG(rs1
));
3864 gen_op_load_fpr_DT1(DFPREG(rs2
));
3865 gen_helper_fcmple16(cpu_dst
);
3866 gen_movl_TN_reg(rd
, cpu_dst
);
3868 case 0x022: /* VIS I fcmpne16 */
3869 CHECK_FPU_FEATURE(dc
, VIS1
);
3870 gen_op_load_fpr_DT0(DFPREG(rs1
));
3871 gen_op_load_fpr_DT1(DFPREG(rs2
));
3872 gen_helper_fcmpne16(cpu_dst
);
3873 gen_movl_TN_reg(rd
, cpu_dst
);
3875 case 0x024: /* VIS I fcmple32 */
3876 CHECK_FPU_FEATURE(dc
, VIS1
);
3877 gen_op_load_fpr_DT0(DFPREG(rs1
));
3878 gen_op_load_fpr_DT1(DFPREG(rs2
));
3879 gen_helper_fcmple32(cpu_dst
);
3880 gen_movl_TN_reg(rd
, cpu_dst
);
3882 case 0x026: /* VIS I fcmpne32 */
3883 CHECK_FPU_FEATURE(dc
, VIS1
);
3884 gen_op_load_fpr_DT0(DFPREG(rs1
));
3885 gen_op_load_fpr_DT1(DFPREG(rs2
));
3886 gen_helper_fcmpne32(cpu_dst
);
3887 gen_movl_TN_reg(rd
, cpu_dst
);
3889 case 0x028: /* VIS I fcmpgt16 */
3890 CHECK_FPU_FEATURE(dc
, VIS1
);
3891 gen_op_load_fpr_DT0(DFPREG(rs1
));
3892 gen_op_load_fpr_DT1(DFPREG(rs2
));
3893 gen_helper_fcmpgt16(cpu_dst
);
3894 gen_movl_TN_reg(rd
, cpu_dst
);
3896 case 0x02a: /* VIS I fcmpeq16 */
3897 CHECK_FPU_FEATURE(dc
, VIS1
);
3898 gen_op_load_fpr_DT0(DFPREG(rs1
));
3899 gen_op_load_fpr_DT1(DFPREG(rs2
));
3900 gen_helper_fcmpeq16(cpu_dst
);
3901 gen_movl_TN_reg(rd
, cpu_dst
);
3903 case 0x02c: /* VIS I fcmpgt32 */
3904 CHECK_FPU_FEATURE(dc
, VIS1
);
3905 gen_op_load_fpr_DT0(DFPREG(rs1
));
3906 gen_op_load_fpr_DT1(DFPREG(rs2
));
3907 gen_helper_fcmpgt32(cpu_dst
);
3908 gen_movl_TN_reg(rd
, cpu_dst
);
3910 case 0x02e: /* VIS I fcmpeq32 */
3911 CHECK_FPU_FEATURE(dc
, VIS1
);
3912 gen_op_load_fpr_DT0(DFPREG(rs1
));
3913 gen_op_load_fpr_DT1(DFPREG(rs2
));
3914 gen_helper_fcmpeq32(cpu_dst
);
3915 gen_movl_TN_reg(rd
, cpu_dst
);
3917 case 0x031: /* VIS I fmul8x16 */
3918 CHECK_FPU_FEATURE(dc
, VIS1
);
3919 gen_op_load_fpr_DT0(DFPREG(rs1
));
3920 gen_op_load_fpr_DT1(DFPREG(rs2
));
3921 gen_helper_fmul8x16();
3922 gen_op_store_DT0_fpr(DFPREG(rd
));
3923 gen_update_fprs_dirty(DFPREG(rd
));
3925 case 0x033: /* VIS I fmul8x16au */
3926 CHECK_FPU_FEATURE(dc
, VIS1
);
3927 gen_op_load_fpr_DT0(DFPREG(rs1
));
3928 gen_op_load_fpr_DT1(DFPREG(rs2
));
3929 gen_helper_fmul8x16au();
3930 gen_op_store_DT0_fpr(DFPREG(rd
));
3931 gen_update_fprs_dirty(DFPREG(rd
));
3933 case 0x035: /* VIS I fmul8x16al */
3934 CHECK_FPU_FEATURE(dc
, VIS1
);
3935 gen_op_load_fpr_DT0(DFPREG(rs1
));
3936 gen_op_load_fpr_DT1(DFPREG(rs2
));
3937 gen_helper_fmul8x16al();
3938 gen_op_store_DT0_fpr(DFPREG(rd
));
3939 gen_update_fprs_dirty(DFPREG(rd
));
3941 case 0x036: /* VIS I fmul8sux16 */
3942 CHECK_FPU_FEATURE(dc
, VIS1
);
3943 gen_op_load_fpr_DT0(DFPREG(rs1
));
3944 gen_op_load_fpr_DT1(DFPREG(rs2
));
3945 gen_helper_fmul8sux16();
3946 gen_op_store_DT0_fpr(DFPREG(rd
));
3947 gen_update_fprs_dirty(DFPREG(rd
));
3949 case 0x037: /* VIS I fmul8ulx16 */
3950 CHECK_FPU_FEATURE(dc
, VIS1
);
3951 gen_op_load_fpr_DT0(DFPREG(rs1
));
3952 gen_op_load_fpr_DT1(DFPREG(rs2
));
3953 gen_helper_fmul8ulx16();
3954 gen_op_store_DT0_fpr(DFPREG(rd
));
3955 gen_update_fprs_dirty(DFPREG(rd
));
3957 case 0x038: /* VIS I fmuld8sux16 */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 gen_op_load_fpr_DT0(DFPREG(rs1
));
3960 gen_op_load_fpr_DT1(DFPREG(rs2
));
3961 gen_helper_fmuld8sux16();
3962 gen_op_store_DT0_fpr(DFPREG(rd
));
3963 gen_update_fprs_dirty(DFPREG(rd
));
3965 case 0x039: /* VIS I fmuld8ulx16 */
3966 CHECK_FPU_FEATURE(dc
, VIS1
);
3967 gen_op_load_fpr_DT0(DFPREG(rs1
));
3968 gen_op_load_fpr_DT1(DFPREG(rs2
));
3969 gen_helper_fmuld8ulx16();
3970 gen_op_store_DT0_fpr(DFPREG(rd
));
3971 gen_update_fprs_dirty(DFPREG(rd
));
3973 case 0x03a: /* VIS I fpack32 */
3974 case 0x03b: /* VIS I fpack16 */
3975 case 0x03d: /* VIS I fpackfix */
3976 case 0x03e: /* VIS I pdist */
3979 case 0x048: /* VIS I faligndata */
3980 CHECK_FPU_FEATURE(dc
, VIS1
);
3981 gen_op_load_fpr_DT0(DFPREG(rs1
));
3982 gen_op_load_fpr_DT1(DFPREG(rs2
));
3983 gen_helper_faligndata();
3984 gen_op_store_DT0_fpr(DFPREG(rd
));
3985 gen_update_fprs_dirty(DFPREG(rd
));
3987 case 0x04b: /* VIS I fpmerge */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 gen_op_load_fpr_DT0(DFPREG(rs1
));
3990 gen_op_load_fpr_DT1(DFPREG(rs2
));
3991 gen_helper_fpmerge();
3992 gen_op_store_DT0_fpr(DFPREG(rd
));
3993 gen_update_fprs_dirty(DFPREG(rd
));
3995 case 0x04c: /* VIS II bshuffle */
3998 case 0x04d: /* VIS I fexpand */
3999 CHECK_FPU_FEATURE(dc
, VIS1
);
4000 gen_op_load_fpr_DT0(DFPREG(rs1
));
4001 gen_op_load_fpr_DT1(DFPREG(rs2
));
4002 gen_helper_fexpand();
4003 gen_op_store_DT0_fpr(DFPREG(rd
));
4004 gen_update_fprs_dirty(DFPREG(rd
));
4006 case 0x050: /* VIS I fpadd16 */
4007 CHECK_FPU_FEATURE(dc
, VIS1
);
4008 gen_op_load_fpr_DT0(DFPREG(rs1
));
4009 gen_op_load_fpr_DT1(DFPREG(rs2
));
4010 gen_helper_fpadd16();
4011 gen_op_store_DT0_fpr(DFPREG(rd
));
4012 gen_update_fprs_dirty(DFPREG(rd
));
4014 case 0x051: /* VIS I fpadd16s */
4015 CHECK_FPU_FEATURE(dc
, VIS1
);
4016 gen_helper_fpadd16s(cpu_fpr
[rd
],
4017 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4018 gen_update_fprs_dirty(rd
);
4020 case 0x052: /* VIS I fpadd32 */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 gen_op_load_fpr_DT0(DFPREG(rs1
));
4023 gen_op_load_fpr_DT1(DFPREG(rs2
));
4024 gen_helper_fpadd32();
4025 gen_op_store_DT0_fpr(DFPREG(rd
));
4026 gen_update_fprs_dirty(DFPREG(rd
));
4028 case 0x053: /* VIS I fpadd32s */
4029 CHECK_FPU_FEATURE(dc
, VIS1
);
4030 gen_helper_fpadd32s(cpu_fpr
[rd
],
4031 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4032 gen_update_fprs_dirty(rd
);
4034 case 0x054: /* VIS I fpsub16 */
4035 CHECK_FPU_FEATURE(dc
, VIS1
);
4036 gen_op_load_fpr_DT0(DFPREG(rs1
));
4037 gen_op_load_fpr_DT1(DFPREG(rs2
));
4038 gen_helper_fpsub16();
4039 gen_op_store_DT0_fpr(DFPREG(rd
));
4040 gen_update_fprs_dirty(DFPREG(rd
));
4042 case 0x055: /* VIS I fpsub16s */
4043 CHECK_FPU_FEATURE(dc
, VIS1
);
4044 gen_helper_fpsub16s(cpu_fpr
[rd
],
4045 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4046 gen_update_fprs_dirty(rd
);
4048 case 0x056: /* VIS I fpsub32 */
4049 CHECK_FPU_FEATURE(dc
, VIS1
);
4050 gen_op_load_fpr_DT0(DFPREG(rs1
));
4051 gen_op_load_fpr_DT1(DFPREG(rs2
));
4052 gen_helper_fpsub32();
4053 gen_op_store_DT0_fpr(DFPREG(rd
));
4054 gen_update_fprs_dirty(DFPREG(rd
));
4056 case 0x057: /* VIS I fpsub32s */
4057 CHECK_FPU_FEATURE(dc
, VIS1
);
4058 gen_helper_fpsub32s(cpu_fpr
[rd
],
4059 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4060 gen_update_fprs_dirty(rd
);
4062 case 0x060: /* VIS I fzero */
4063 CHECK_FPU_FEATURE(dc
, VIS1
);
4064 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
4065 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
4066 gen_update_fprs_dirty(DFPREG(rd
));
4068 case 0x061: /* VIS I fzeros */
4069 CHECK_FPU_FEATURE(dc
, VIS1
);
4070 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
4071 gen_update_fprs_dirty(rd
);
4073 case 0x062: /* VIS I fnor */
4074 CHECK_FPU_FEATURE(dc
, VIS1
);
4075 tcg_gen_nor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4076 cpu_fpr
[DFPREG(rs2
)]);
4077 tcg_gen_nor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4078 cpu_fpr
[DFPREG(rs1
) + 1],
4079 cpu_fpr
[DFPREG(rs2
) + 1]);
4080 gen_update_fprs_dirty(DFPREG(rd
));
4082 case 0x063: /* VIS I fnors */
4083 CHECK_FPU_FEATURE(dc
, VIS1
);
4084 tcg_gen_nor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4085 gen_update_fprs_dirty(rd
);
4087 case 0x064: /* VIS I fandnot2 */
4088 CHECK_FPU_FEATURE(dc
, VIS1
);
4089 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4090 cpu_fpr
[DFPREG(rs2
)]);
4091 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4092 cpu_fpr
[DFPREG(rs1
) + 1],
4093 cpu_fpr
[DFPREG(rs2
) + 1]);
4094 gen_update_fprs_dirty(DFPREG(rd
));
4096 case 0x065: /* VIS I fandnot2s */
4097 CHECK_FPU_FEATURE(dc
, VIS1
);
4098 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4099 gen_update_fprs_dirty(rd
);
4101 case 0x066: /* VIS I fnot2 */
4102 CHECK_FPU_FEATURE(dc
, VIS1
);
4103 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
4104 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4105 cpu_fpr
[DFPREG(rs2
) + 1]);
4106 gen_update_fprs_dirty(DFPREG(rd
));
4108 case 0x067: /* VIS I fnot2s */
4109 CHECK_FPU_FEATURE(dc
, VIS1
);
4110 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4111 gen_update_fprs_dirty(rd
);
4113 case 0x068: /* VIS I fandnot1 */
4114 CHECK_FPU_FEATURE(dc
, VIS1
);
4115 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4116 cpu_fpr
[DFPREG(rs1
)]);
4117 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4118 cpu_fpr
[DFPREG(rs2
) + 1],
4119 cpu_fpr
[DFPREG(rs1
) + 1]);
4120 gen_update_fprs_dirty(DFPREG(rd
));
4122 case 0x069: /* VIS I fandnot1s */
4123 CHECK_FPU_FEATURE(dc
, VIS1
);
4124 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4125 gen_update_fprs_dirty(rd
);
4127 case 0x06a: /* VIS I fnot1 */
4128 CHECK_FPU_FEATURE(dc
, VIS1
);
4129 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4130 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4131 cpu_fpr
[DFPREG(rs1
) + 1]);
4132 gen_update_fprs_dirty(DFPREG(rd
));
4134 case 0x06b: /* VIS I fnot1s */
4135 CHECK_FPU_FEATURE(dc
, VIS1
);
4136 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4137 gen_update_fprs_dirty(rd
);
4139 case 0x06c: /* VIS I fxor */
4140 CHECK_FPU_FEATURE(dc
, VIS1
);
4141 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4142 cpu_fpr
[DFPREG(rs2
)]);
4143 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4144 cpu_fpr
[DFPREG(rs1
) + 1],
4145 cpu_fpr
[DFPREG(rs2
) + 1]);
4146 gen_update_fprs_dirty(DFPREG(rd
));
4148 case 0x06d: /* VIS I fxors */
4149 CHECK_FPU_FEATURE(dc
, VIS1
);
4150 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4151 gen_update_fprs_dirty(rd
);
4153 case 0x06e: /* VIS I fnand */
4154 CHECK_FPU_FEATURE(dc
, VIS1
);
4155 tcg_gen_nand_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4156 cpu_fpr
[DFPREG(rs2
)]);
4157 tcg_gen_nand_i32(cpu_fpr
[DFPREG(rd
) + 1],
4158 cpu_fpr
[DFPREG(rs1
) + 1],
4159 cpu_fpr
[DFPREG(rs2
) + 1]);
4160 gen_update_fprs_dirty(DFPREG(rd
));
4162 case 0x06f: /* VIS I fnands */
4163 CHECK_FPU_FEATURE(dc
, VIS1
);
4164 tcg_gen_nand_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4165 gen_update_fprs_dirty(rd
);
4167 case 0x070: /* VIS I fand */
4168 CHECK_FPU_FEATURE(dc
, VIS1
);
4169 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4170 cpu_fpr
[DFPREG(rs2
)]);
4171 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4172 cpu_fpr
[DFPREG(rs1
) + 1],
4173 cpu_fpr
[DFPREG(rs2
) + 1]);
4174 gen_update_fprs_dirty(DFPREG(rd
));
4176 case 0x071: /* VIS I fands */
4177 CHECK_FPU_FEATURE(dc
, VIS1
);
4178 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4179 gen_update_fprs_dirty(rd
);
4181 case 0x072: /* VIS I fxnor */
4182 CHECK_FPU_FEATURE(dc
, VIS1
);
4183 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4184 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4185 cpu_fpr
[DFPREG(rs1
)]);
4186 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4187 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4188 cpu_fpr
[DFPREG(rs1
) + 1]);
4189 gen_update_fprs_dirty(DFPREG(rd
));
4191 case 0x073: /* VIS I fxnors */
4192 CHECK_FPU_FEATURE(dc
, VIS1
);
4193 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4194 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4195 gen_update_fprs_dirty(rd
);
4197 case 0x074: /* VIS I fsrc1 */
4198 CHECK_FPU_FEATURE(dc
, VIS1
);
4199 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4200 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4201 cpu_fpr
[DFPREG(rs1
) + 1]);
4202 gen_update_fprs_dirty(DFPREG(rd
));
4204 case 0x075: /* VIS I fsrc1s */
4205 CHECK_FPU_FEATURE(dc
, VIS1
);
4206 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4207 gen_update_fprs_dirty(rd
);
4209 case 0x076: /* VIS I fornot2 */
4210 CHECK_FPU_FEATURE(dc
, VIS1
);
4211 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4212 cpu_fpr
[DFPREG(rs2
)]);
4213 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4214 cpu_fpr
[DFPREG(rs1
) + 1],
4215 cpu_fpr
[DFPREG(rs2
) + 1]);
4216 gen_update_fprs_dirty(DFPREG(rd
));
4218 case 0x077: /* VIS I fornot2s */
4219 CHECK_FPU_FEATURE(dc
, VIS1
);
4220 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4221 gen_update_fprs_dirty(rd
);
4223 case 0x078: /* VIS I fsrc2 */
4224 CHECK_FPU_FEATURE(dc
, VIS1
);
4225 gen_op_load_fpr_DT0(DFPREG(rs2
));
4226 gen_op_store_DT0_fpr(DFPREG(rd
));
4227 gen_update_fprs_dirty(DFPREG(rd
));
4229 case 0x079: /* VIS I fsrc2s */
4230 CHECK_FPU_FEATURE(dc
, VIS1
);
4231 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4232 gen_update_fprs_dirty(rd
);
4234 case 0x07a: /* VIS I fornot1 */
4235 CHECK_FPU_FEATURE(dc
, VIS1
);
4236 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4237 cpu_fpr
[DFPREG(rs1
)]);
4238 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4239 cpu_fpr
[DFPREG(rs2
) + 1],
4240 cpu_fpr
[DFPREG(rs1
) + 1]);
4241 gen_update_fprs_dirty(DFPREG(rd
));
4243 case 0x07b: /* VIS I fornot1s */
4244 CHECK_FPU_FEATURE(dc
, VIS1
);
4245 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4246 gen_update_fprs_dirty(rd
);
4248 case 0x07c: /* VIS I for */
4249 CHECK_FPU_FEATURE(dc
, VIS1
);
4250 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4251 cpu_fpr
[DFPREG(rs2
)]);
4252 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4253 cpu_fpr
[DFPREG(rs1
) + 1],
4254 cpu_fpr
[DFPREG(rs2
) + 1]);
4255 gen_update_fprs_dirty(DFPREG(rd
));
4257 case 0x07d: /* VIS I fors */
4258 CHECK_FPU_FEATURE(dc
, VIS1
);
4259 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4260 gen_update_fprs_dirty(rd
);
4262 case 0x07e: /* VIS I fone */
4263 CHECK_FPU_FEATURE(dc
, VIS1
);
4264 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4265 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4266 gen_update_fprs_dirty(DFPREG(rd
));
4268 case 0x07f: /* VIS I fones */
4269 CHECK_FPU_FEATURE(dc
, VIS1
);
4270 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4271 gen_update_fprs_dirty(rd
);
4273 case 0x080: /* VIS I shutdown */
4274 case 0x081: /* VIS II siam */
4283 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4284 #ifdef TARGET_SPARC64
4289 #ifdef TARGET_SPARC64
4290 } else if (xop
== 0x39) { /* V9 return */
4293 save_state(dc
, cpu_cond
);
4294 cpu_src1
= get_src1(insn
, cpu_src1
);
4295 if (IS_IMM
) { /* immediate */
4296 simm
= GET_FIELDs(insn
, 19, 31);
4297 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4298 } else { /* register */
4299 rs2
= GET_FIELD(insn
, 27, 31);
4301 gen_movl_reg_TN(rs2
, cpu_src2
);
4302 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4304 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4306 gen_helper_restore();
4307 gen_mov_pc_npc(dc
, cpu_cond
);
4308 r_const
= tcg_const_i32(3);
4309 gen_helper_check_align(cpu_dst
, r_const
);
4310 tcg_temp_free_i32(r_const
);
4311 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4312 dc
->npc
= DYNAMIC_PC
;
4316 cpu_src1
= get_src1(insn
, cpu_src1
);
4317 if (IS_IMM
) { /* immediate */
4318 simm
= GET_FIELDs(insn
, 19, 31);
4319 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4320 } else { /* register */
4321 rs2
= GET_FIELD(insn
, 27, 31);
4323 gen_movl_reg_TN(rs2
, cpu_src2
);
4324 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4326 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4329 case 0x38: /* jmpl */
4334 r_pc
= tcg_const_tl(dc
->pc
);
4335 gen_movl_TN_reg(rd
, r_pc
);
4336 tcg_temp_free(r_pc
);
4337 gen_mov_pc_npc(dc
, cpu_cond
);
4338 r_const
= tcg_const_i32(3);
4339 gen_helper_check_align(cpu_dst
, r_const
);
4340 tcg_temp_free_i32(r_const
);
4341 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4342 dc
->npc
= DYNAMIC_PC
;
4345 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4346 case 0x39: /* rett, V9 return */
4350 if (!supervisor(dc
))
4352 gen_mov_pc_npc(dc
, cpu_cond
);
4353 r_const
= tcg_const_i32(3);
4354 gen_helper_check_align(cpu_dst
, r_const
);
4355 tcg_temp_free_i32(r_const
);
4356 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4357 dc
->npc
= DYNAMIC_PC
;
4362 case 0x3b: /* flush */
4363 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4367 case 0x3c: /* save */
4368 save_state(dc
, cpu_cond
);
4370 gen_movl_TN_reg(rd
, cpu_dst
);
4372 case 0x3d: /* restore */
4373 save_state(dc
, cpu_cond
);
4374 gen_helper_restore();
4375 gen_movl_TN_reg(rd
, cpu_dst
);
4377 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4378 case 0x3e: /* V9 done/retry */
4382 if (!supervisor(dc
))
4384 dc
->npc
= DYNAMIC_PC
;
4385 dc
->pc
= DYNAMIC_PC
;
4389 if (!supervisor(dc
))
4391 dc
->npc
= DYNAMIC_PC
;
4392 dc
->pc
= DYNAMIC_PC
;
4408 case 3: /* load/store instructions */
4410 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4412 /* flush pending conditional evaluations before exposing
4414 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4415 dc
->cc_op
= CC_OP_FLAGS
;
4416 gen_helper_compute_psr();
4418 cpu_src1
= get_src1(insn
, cpu_src1
);
4419 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4420 rs2
= GET_FIELD(insn
, 27, 31);
4421 gen_movl_reg_TN(rs2
, cpu_src2
);
4422 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4423 } else if (IS_IMM
) { /* immediate */
4424 simm
= GET_FIELDs(insn
, 19, 31);
4425 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4426 } else { /* register */
4427 rs2
= GET_FIELD(insn
, 27, 31);
4429 gen_movl_reg_TN(rs2
, cpu_src2
);
4430 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4432 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4434 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4435 (xop
> 0x17 && xop
<= 0x1d ) ||
4436 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4438 case 0x0: /* ld, V9 lduw, load unsigned word */
4439 gen_address_mask(dc
, cpu_addr
);
4440 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4442 case 0x1: /* ldub, load unsigned byte */
4443 gen_address_mask(dc
, cpu_addr
);
4444 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4446 case 0x2: /* lduh, load unsigned halfword */
4447 gen_address_mask(dc
, cpu_addr
);
4448 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4450 case 0x3: /* ldd, load double word */
4456 save_state(dc
, cpu_cond
);
4457 r_const
= tcg_const_i32(7);
4458 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4459 tcg_temp_free_i32(r_const
);
4460 gen_address_mask(dc
, cpu_addr
);
4461 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4462 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4463 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4464 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4465 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4466 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4467 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4470 case 0x9: /* ldsb, load signed byte */
4471 gen_address_mask(dc
, cpu_addr
);
4472 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4474 case 0xa: /* ldsh, load signed halfword */
4475 gen_address_mask(dc
, cpu_addr
);
4476 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4478 case 0xd: /* ldstub -- XXX: should be atomically */
4482 gen_address_mask(dc
, cpu_addr
);
4483 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4484 r_const
= tcg_const_tl(0xff);
4485 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4486 tcg_temp_free(r_const
);
4489 case 0x0f: /* swap, swap register with memory. Also
4491 CHECK_IU_FEATURE(dc
, SWAP
);
4492 gen_movl_reg_TN(rd
, cpu_val
);
4493 gen_address_mask(dc
, cpu_addr
);
4494 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4495 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4496 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4498 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4499 case 0x10: /* lda, V9 lduwa, load word alternate */
4500 #ifndef TARGET_SPARC64
4503 if (!supervisor(dc
))
4506 save_state(dc
, cpu_cond
);
4507 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4509 case 0x11: /* lduba, load unsigned byte alternate */
4510 #ifndef TARGET_SPARC64
4513 if (!supervisor(dc
))
4516 save_state(dc
, cpu_cond
);
4517 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4519 case 0x12: /* lduha, load unsigned halfword alternate */
4520 #ifndef TARGET_SPARC64
4523 if (!supervisor(dc
))
4526 save_state(dc
, cpu_cond
);
4527 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4529 case 0x13: /* ldda, load double word alternate */
4530 #ifndef TARGET_SPARC64
4533 if (!supervisor(dc
))
4538 save_state(dc
, cpu_cond
);
4539 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4541 case 0x19: /* ldsba, load signed byte alternate */
4542 #ifndef TARGET_SPARC64
4545 if (!supervisor(dc
))
4548 save_state(dc
, cpu_cond
);
4549 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4551 case 0x1a: /* ldsha, load signed halfword alternate */
4552 #ifndef TARGET_SPARC64
4555 if (!supervisor(dc
))
4558 save_state(dc
, cpu_cond
);
4559 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4561 case 0x1d: /* ldstuba -- XXX: should be atomically */
4562 #ifndef TARGET_SPARC64
4565 if (!supervisor(dc
))
4568 save_state(dc
, cpu_cond
);
4569 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4571 case 0x1f: /* swapa, swap reg with alt. memory. Also
4573 CHECK_IU_FEATURE(dc
, SWAP
);
4574 #ifndef TARGET_SPARC64
4577 if (!supervisor(dc
))
4580 save_state(dc
, cpu_cond
);
4581 gen_movl_reg_TN(rd
, cpu_val
);
4582 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4585 #ifndef TARGET_SPARC64
4586 case 0x30: /* ldc */
4587 case 0x31: /* ldcsr */
4588 case 0x33: /* lddc */
4592 #ifdef TARGET_SPARC64
4593 case 0x08: /* V9 ldsw */
4594 gen_address_mask(dc
, cpu_addr
);
4595 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4597 case 0x0b: /* V9 ldx */
4598 gen_address_mask(dc
, cpu_addr
);
4599 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4601 case 0x18: /* V9 ldswa */
4602 save_state(dc
, cpu_cond
);
4603 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4605 case 0x1b: /* V9 ldxa */
4606 save_state(dc
, cpu_cond
);
4607 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4609 case 0x2d: /* V9 prefetch, no effect */
4611 case 0x30: /* V9 ldfa */
4612 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4615 save_state(dc
, cpu_cond
);
4616 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4617 gen_update_fprs_dirty(rd
);
4619 case 0x33: /* V9 lddfa */
4620 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4623 save_state(dc
, cpu_cond
);
4624 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4625 gen_update_fprs_dirty(DFPREG(rd
));
4627 case 0x3d: /* V9 prefetcha, no effect */
4629 case 0x32: /* V9 ldqfa */
4630 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4631 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4634 save_state(dc
, cpu_cond
);
4635 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4636 gen_update_fprs_dirty(QFPREG(rd
));
4642 gen_movl_TN_reg(rd
, cpu_val
);
4643 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4646 } else if (xop
>= 0x20 && xop
< 0x24) {
4647 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4649 save_state(dc
, cpu_cond
);
4651 case 0x20: /* ldf, load fpreg */
4652 gen_address_mask(dc
, cpu_addr
);
4653 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4654 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4655 gen_update_fprs_dirty(rd
);
4657 case 0x21: /* ldfsr, V9 ldxfsr */
4658 #ifdef TARGET_SPARC64
4659 gen_address_mask(dc
, cpu_addr
);
4661 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4662 gen_helper_ldxfsr(cpu_tmp64
);
4664 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4665 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4666 gen_helper_ldfsr(cpu_tmp32
);
4670 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4671 gen_helper_ldfsr(cpu_tmp32
);
4675 case 0x22: /* ldqf, load quad fpreg */
4679 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4680 r_const
= tcg_const_i32(dc
->mem_idx
);
4681 gen_address_mask(dc
, cpu_addr
);
4682 gen_helper_ldqf(cpu_addr
, r_const
);
4683 tcg_temp_free_i32(r_const
);
4684 gen_op_store_QT0_fpr(QFPREG(rd
));
4685 gen_update_fprs_dirty(QFPREG(rd
));
4688 case 0x23: /* lddf, load double fpreg */
4692 r_const
= tcg_const_i32(dc
->mem_idx
);
4693 gen_address_mask(dc
, cpu_addr
);
4694 gen_helper_lddf(cpu_addr
, r_const
);
4695 tcg_temp_free_i32(r_const
);
4696 gen_op_store_DT0_fpr(DFPREG(rd
));
4697 gen_update_fprs_dirty(DFPREG(rd
));
4703 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4704 xop
== 0xe || xop
== 0x1e) {
4705 gen_movl_reg_TN(rd
, cpu_val
);
4707 case 0x4: /* st, store word */
4708 gen_address_mask(dc
, cpu_addr
);
4709 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4711 case 0x5: /* stb, store byte */
4712 gen_address_mask(dc
, cpu_addr
);
4713 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4715 case 0x6: /* sth, store halfword */
4716 gen_address_mask(dc
, cpu_addr
);
4717 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4719 case 0x7: /* std, store double word */
4725 save_state(dc
, cpu_cond
);
4726 gen_address_mask(dc
, cpu_addr
);
4727 r_const
= tcg_const_i32(7);
4728 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4729 tcg_temp_free_i32(r_const
);
4730 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4731 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4732 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4735 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4736 case 0x14: /* sta, V9 stwa, store word alternate */
4737 #ifndef TARGET_SPARC64
4740 if (!supervisor(dc
))
4743 save_state(dc
, cpu_cond
);
4744 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4745 dc
->npc
= DYNAMIC_PC
;
4747 case 0x15: /* stba, store byte alternate */
4748 #ifndef TARGET_SPARC64
4751 if (!supervisor(dc
))
4754 save_state(dc
, cpu_cond
);
4755 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4756 dc
->npc
= DYNAMIC_PC
;
4758 case 0x16: /* stha, store halfword alternate */
4759 #ifndef TARGET_SPARC64
4762 if (!supervisor(dc
))
4765 save_state(dc
, cpu_cond
);
4766 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4767 dc
->npc
= DYNAMIC_PC
;
4769 case 0x17: /* stda, store double word alternate */
4770 #ifndef TARGET_SPARC64
4773 if (!supervisor(dc
))
4779 save_state(dc
, cpu_cond
);
4780 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4784 #ifdef TARGET_SPARC64
4785 case 0x0e: /* V9 stx */
4786 gen_address_mask(dc
, cpu_addr
);
4787 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4789 case 0x1e: /* V9 stxa */
4790 save_state(dc
, cpu_cond
);
4791 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4792 dc
->npc
= DYNAMIC_PC
;
4798 } else if (xop
> 0x23 && xop
< 0x28) {
4799 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4801 save_state(dc
, cpu_cond
);
4803 case 0x24: /* stf, store fpreg */
4804 gen_address_mask(dc
, cpu_addr
);
4805 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4806 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4808 case 0x25: /* stfsr, V9 stxfsr */
4809 #ifdef TARGET_SPARC64
4810 gen_address_mask(dc
, cpu_addr
);
4811 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4813 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4815 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4817 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4818 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4822 #ifdef TARGET_SPARC64
4823 /* V9 stqf, store quad fpreg */
4827 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4828 gen_op_load_fpr_QT0(QFPREG(rd
));
4829 r_const
= tcg_const_i32(dc
->mem_idx
);
4830 gen_address_mask(dc
, cpu_addr
);
4831 gen_helper_stqf(cpu_addr
, r_const
);
4832 tcg_temp_free_i32(r_const
);
4835 #else /* !TARGET_SPARC64 */
4836 /* stdfq, store floating point queue */
4837 #if defined(CONFIG_USER_ONLY)
4840 if (!supervisor(dc
))
4842 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4847 case 0x27: /* stdf, store double fpreg */
4851 gen_op_load_fpr_DT0(DFPREG(rd
));
4852 r_const
= tcg_const_i32(dc
->mem_idx
);
4853 gen_address_mask(dc
, cpu_addr
);
4854 gen_helper_stdf(cpu_addr
, r_const
);
4855 tcg_temp_free_i32(r_const
);
4861 } else if (xop
> 0x33 && xop
< 0x3f) {
4862 save_state(dc
, cpu_cond
);
4864 #ifdef TARGET_SPARC64
4865 case 0x34: /* V9 stfa */
4866 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4869 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4871 case 0x36: /* V9 stqfa */
4875 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4876 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4879 r_const
= tcg_const_i32(7);
4880 gen_helper_check_align(cpu_addr
, r_const
);
4881 tcg_temp_free_i32(r_const
);
4882 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4885 case 0x37: /* V9 stdfa */
4886 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4889 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4891 case 0x3c: /* V9 casa */
4892 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4893 gen_movl_TN_reg(rd
, cpu_val
);
4895 case 0x3e: /* V9 casxa */
4896 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4897 gen_movl_TN_reg(rd
, cpu_val
);
4900 case 0x34: /* stc */
4901 case 0x35: /* stcsr */
4902 case 0x36: /* stdcq */
4903 case 0x37: /* stdc */
4914 /* default case for non jump instructions */
4915 if (dc
->npc
== DYNAMIC_PC
) {
4916 dc
->pc
= DYNAMIC_PC
;
4918 } else if (dc
->npc
== JUMP_PC
) {
4919 /* we can do a static jump */
4920 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4924 dc
->npc
= dc
->npc
+ 4;
4932 save_state(dc
, cpu_cond
);
4933 r_const
= tcg_const_i32(TT_ILL_INSN
);
4934 gen_helper_raise_exception(r_const
);
4935 tcg_temp_free_i32(r_const
);
4943 save_state(dc
, cpu_cond
);
4944 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4945 gen_helper_raise_exception(r_const
);
4946 tcg_temp_free_i32(r_const
);
4950 #if !defined(CONFIG_USER_ONLY)
4955 save_state(dc
, cpu_cond
);
4956 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4957 gen_helper_raise_exception(r_const
);
4958 tcg_temp_free_i32(r_const
);
4964 save_state(dc
, cpu_cond
);
4965 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4968 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4970 save_state(dc
, cpu_cond
);
4971 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4975 #ifndef TARGET_SPARC64
4980 save_state(dc
, cpu_cond
);
4981 r_const
= tcg_const_i32(TT_NCP_INSN
);
4982 gen_helper_raise_exception(r_const
);
4983 tcg_temp_free(r_const
);
4989 tcg_temp_free(cpu_tmp1
);
4990 tcg_temp_free(cpu_tmp2
);
4993 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4994 int spc
, CPUSPARCState
*env
)
4996 target_ulong pc_start
, last_pc
;
4997 uint16_t *gen_opc_end
;
4998 DisasContext dc1
, *dc
= &dc1
;
5004 memset(dc
, 0, sizeof(DisasContext
));
5009 dc
->npc
= (target_ulong
) tb
->cs_base
;
5010 dc
->cc_op
= CC_OP_DYNAMIC
;
5011 dc
->mem_idx
= cpu_mmu_index(env
);
5013 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
5014 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
5015 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
5016 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5018 cpu_tmp0
= tcg_temp_new();
5019 cpu_tmp32
= tcg_temp_new_i32();
5020 cpu_tmp64
= tcg_temp_new_i64();
5022 cpu_dst
= tcg_temp_local_new();
5025 cpu_val
= tcg_temp_local_new();
5026 cpu_addr
= tcg_temp_local_new();
5029 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5031 max_insns
= CF_COUNT_MASK
;
5034 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5035 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5036 if (bp
->pc
== dc
->pc
) {
5037 if (dc
->pc
!= pc_start
)
5038 save_state(dc
, cpu_cond
);
5047 qemu_log("Search PC...\n");
5048 j
= gen_opc_ptr
- gen_opc_buf
;
5052 gen_opc_instr_start
[lj
++] = 0;
5053 gen_opc_pc
[lj
] = dc
->pc
;
5054 gen_opc_npc
[lj
] = dc
->npc
;
5055 gen_opc_instr_start
[lj
] = 1;
5056 gen_opc_icount
[lj
] = num_insns
;
5059 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
5062 disas_sparc_insn(dc
);
5067 /* if the next PC is different, we abort now */
5068 if (dc
->pc
!= (last_pc
+ 4))
5070 /* if we reach a page boundary, we stop generation so that the
5071 PC of a TT_TFAULT exception is always in the right page */
5072 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
5074 /* if single step mode, we generate only one instruction and
5075 generate an exception */
5076 if (dc
->singlestep
) {
5079 } while ((gen_opc_ptr
< gen_opc_end
) &&
5080 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5081 num_insns
< max_insns
);
5084 tcg_temp_free(cpu_addr
);
5085 tcg_temp_free(cpu_val
);
5086 tcg_temp_free(cpu_dst
);
5087 tcg_temp_free_i64(cpu_tmp64
);
5088 tcg_temp_free_i32(cpu_tmp32
);
5089 tcg_temp_free(cpu_tmp0
);
5090 if (tb
->cflags
& CF_LAST_IO
)
5093 if (dc
->pc
!= DYNAMIC_PC
&&
5094 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5095 /* static PC and NPC: we can use direct chaining */
5096 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5098 if (dc
->pc
!= DYNAMIC_PC
)
5099 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5100 save_npc(dc
, cpu_cond
);
5104 gen_icount_end(tb
, num_insns
);
5105 *gen_opc_ptr
= INDEX_op_end
;
5107 j
= gen_opc_ptr
- gen_opc_buf
;
5110 gen_opc_instr_start
[lj
++] = 0;
5114 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5115 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5117 tb
->size
= last_pc
+ 4 - pc_start
;
5118 tb
->icount
= num_insns
;
5121 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5122 qemu_log("--------------\n");
5123 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5124 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5130 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5132 gen_intermediate_code_internal(tb
, 0, env
);
5135 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5137 gen_intermediate_code_internal(tb
, 1, env
);
5140 void gen_intermediate_code_init(CPUSPARCState
*env
)
5144 static const char * const gregnames
[8] = {
5145 NULL
, // g0 not used
5154 static const char * const fregnames
[64] = {
5155 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5156 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5157 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5158 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5159 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5160 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5161 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5162 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5165 /* init various static tables */
5169 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5170 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5171 offsetof(CPUState
, regwptr
),
5173 #ifdef TARGET_SPARC64
5174 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5176 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5178 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5180 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5182 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5183 offsetof(CPUState
, tick_cmpr
),
5185 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5186 offsetof(CPUState
, stick_cmpr
),
5188 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5189 offsetof(CPUState
, hstick_cmpr
),
5191 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5193 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5195 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5197 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5198 offsetof(CPUState
, ssr
), "ssr");
5199 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5200 offsetof(CPUState
, version
), "ver");
5201 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5202 offsetof(CPUState
, softint
),
5205 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5208 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5210 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5212 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5213 offsetof(CPUState
, cc_src2
),
5215 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5217 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5219 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5221 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5223 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5225 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5227 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5228 #ifndef CONFIG_USER_ONLY
5229 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5232 for (i
= 1; i
< 8; i
++)
5233 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5234 offsetof(CPUState
, gregs
[i
]),
5236 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5237 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5238 offsetof(CPUState
, fpr
[i
]),
5241 /* register helpers */
5243 #define GEN_HELPER 2
5248 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5251 env
->pc
= gen_opc_pc
[pc_pos
];
5252 npc
= gen_opc_npc
[pc_pos
];
5254 /* dynamic NPC: already stored */
5255 } else if (npc
== 2) {
5256 /* jump PC: use 'cond' and the jump targets of the translation */
5258 env
->npc
= gen_opc_jump_pc
[0];
5260 env
->npc
= gen_opc_jump_pc
[1];
5266 /* flush pending conditional evaluations before exposing cpu state */
5267 if (CC_OP
!= CC_OP_FLAGS
) {
5268 helper_compute_psr();