4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x
, int len
)
112 return (x
<< len
) >> len
;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src
)
120 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
121 offsetof(CPU_DoubleU
, l
.upper
));
122 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
123 offsetof(CPU_DoubleU
, l
.lower
));
126 static void gen_op_load_fpr_DT1(unsigned int src
)
128 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
129 offsetof(CPU_DoubleU
, l
.upper
));
130 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
131 offsetof(CPU_DoubleU
, l
.lower
));
134 static void gen_op_store_DT0_fpr(unsigned int dst
)
136 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
137 offsetof(CPU_DoubleU
, l
.upper
));
138 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
139 offsetof(CPU_DoubleU
, l
.lower
));
142 static void gen_op_load_fpr_QT0(unsigned int src
)
144 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.upmost
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.upper
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.lower
));
150 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
151 offsetof(CPU_QuadU
, l
.lowest
));
154 static void gen_op_load_fpr_QT1(unsigned int src
)
156 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.upmost
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.upper
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.lower
));
162 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
163 offsetof(CPU_QuadU
, l
.lowest
));
166 static void gen_op_store_QT0_fpr(unsigned int dst
)
168 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.upmost
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.upper
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.lower
));
174 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
175 offsetof(CPU_QuadU
, l
.lowest
));
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
192 #ifdef TARGET_SPARC64
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #define AM_CHECK(dc) (1)
200 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
202 #ifdef TARGET_SPARC64
204 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
208 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
211 tcg_gen_movi_tl(tn
, 0);
213 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
215 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
219 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
224 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
226 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
230 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
231 target_ulong pc
, target_ulong npc
)
233 TranslationBlock
*tb
;
236 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
237 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num
);
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
243 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc
, pc
);
247 tcg_gen_movi_tl(cpu_npc
, npc
);
253 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
255 tcg_gen_extu_i32_tl(reg
, src
);
256 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
257 tcg_gen_andi_tl(reg
, reg
, 0x1);
260 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
262 tcg_gen_extu_i32_tl(reg
, src
);
263 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
264 tcg_gen_andi_tl(reg
, reg
, 0x1);
267 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
269 tcg_gen_extu_i32_tl(reg
, src
);
270 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
271 tcg_gen_andi_tl(reg
, reg
, 0x1);
274 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
276 tcg_gen_extu_i32_tl(reg
, src
);
277 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
278 tcg_gen_andi_tl(reg
, reg
, 0x1);
281 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
287 l1
= gen_new_label();
289 r_temp
= tcg_temp_new();
290 tcg_gen_xor_tl(r_temp
, src1
, src2
);
291 tcg_gen_not_tl(r_temp
, r_temp
);
292 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
293 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
294 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
296 r_const
= tcg_const_i32(TT_TOVF
);
297 gen_helper_raise_exception(r_const
);
298 tcg_temp_free_i32(r_const
);
300 tcg_temp_free(r_temp
);
303 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
308 l1
= gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
310 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
312 r_const
= tcg_const_i32(TT_TOVF
);
313 gen_helper_raise_exception(r_const
);
314 tcg_temp_free_i32(r_const
);
318 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
320 tcg_gen_mov_tl(cpu_cc_src
, src1
);
321 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
322 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
323 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
326 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
328 tcg_gen_mov_tl(cpu_cc_src
, src1
);
329 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
330 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
331 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
334 static TCGv_i32
gen_add32_carry32(void)
336 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32
= tcg_temp_new_i32();
341 cc_src2_32
= tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
343 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
345 cc_src1_32
= cpu_cc_dst
;
346 cc_src2_32
= cpu_cc_src
;
349 carry_32
= tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32
);
354 tcg_temp_free_i32(cc_src2_32
);
360 static TCGv_i32
gen_sub32_carry32(void)
362 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32
= tcg_temp_new_i32();
367 cc_src2_32
= tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
369 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
371 cc_src1_32
= cpu_cc_src
;
372 cc_src2_32
= cpu_cc_src2
;
375 carry_32
= tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32
);
380 tcg_temp_free_i32(cc_src2_32
);
386 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
387 TCGv src2
, int update_cc
)
395 /* Carry is known to be zero. Fall back to plain ADD. */
397 gen_op_add_cc(dst
, src1
, src2
);
399 tcg_gen_add_tl(dst
, src1
, src2
);
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low
= tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
414 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
415 tcg_temp_free(dst_low
);
419 carry_32
= gen_add32_carry32();
425 carry_32
= gen_sub32_carry32();
429 /* We need external help to produce the carry. */
430 carry_32
= tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32
);
435 #if TARGET_LONG_BITS == 64
436 carry
= tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry
, carry_32
);
442 tcg_gen_add_tl(dst
, src1
, src2
);
443 tcg_gen_add_tl(dst
, dst
, carry
);
445 tcg_temp_free_i32(carry_32
);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry
);
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
454 tcg_gen_mov_tl(cpu_cc_src
, src1
);
455 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
456 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
457 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
458 dc
->cc_op
= CC_OP_ADDX
;
462 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
464 tcg_gen_mov_tl(cpu_cc_src
, src1
);
465 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
466 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
467 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
470 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
472 tcg_gen_mov_tl(cpu_cc_src
, src1
);
473 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
474 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
475 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
476 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
480 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
486 l1
= gen_new_label();
488 r_temp
= tcg_temp_new();
489 tcg_gen_xor_tl(r_temp
, src1
, src2
);
490 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
491 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
492 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
494 r_const
= tcg_const_i32(TT_TOVF
);
495 gen_helper_raise_exception(r_const
);
496 tcg_temp_free_i32(r_const
);
498 tcg_temp_free(r_temp
);
501 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
503 tcg_gen_mov_tl(cpu_cc_src
, src1
);
504 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
506 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
507 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
508 dc
->cc_op
= CC_OP_LOGIC
;
510 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
511 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
512 dc
->cc_op
= CC_OP_SUB
;
514 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
517 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
519 tcg_gen_mov_tl(cpu_cc_src
, src1
);
520 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
521 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
522 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
525 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
526 TCGv src2
, int update_cc
)
534 /* Carry is known to be zero. Fall back to plain SUB. */
536 gen_op_sub_cc(dst
, src1
, src2
);
538 tcg_gen_sub_tl(dst
, src1
, src2
);
545 carry_32
= gen_add32_carry32();
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low
= tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
559 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
560 tcg_temp_free(dst_low
);
564 carry_32
= gen_sub32_carry32();
568 /* We need external help to produce the carry. */
569 carry_32
= tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32
);
574 #if TARGET_LONG_BITS == 64
575 carry
= tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry
, carry_32
);
581 tcg_gen_sub_tl(dst
, src1
, src2
);
582 tcg_gen_sub_tl(dst
, dst
, carry
);
584 tcg_temp_free_i32(carry_32
);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry
);
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
593 tcg_gen_mov_tl(cpu_cc_src
, src1
);
594 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
595 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
596 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
597 dc
->cc_op
= CC_OP_SUBX
;
601 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
603 tcg_gen_mov_tl(cpu_cc_src
, src1
);
604 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
605 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
606 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
609 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
611 tcg_gen_mov_tl(cpu_cc_src
, src1
);
612 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
613 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
614 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
615 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
619 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
624 l1
= gen_new_label();
625 r_temp
= tcg_temp_new();
631 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
632 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
635 tcg_gen_movi_tl(cpu_cc_src2
, 0);
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
641 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
642 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
643 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
645 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
648 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
649 gen_mov_reg_V(r_temp
, cpu_psr
);
650 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
651 tcg_temp_free(r_temp
);
653 // T0 = (b1 << 31) | (T0 >> 1);
655 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
656 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
657 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
659 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
661 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
664 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
666 TCGv_i32 r_src1
, r_src2
;
667 TCGv_i64 r_temp
, r_temp2
;
669 r_src1
= tcg_temp_new_i32();
670 r_src2
= tcg_temp_new_i32();
672 tcg_gen_trunc_tl_i32(r_src1
, src1
);
673 tcg_gen_trunc_tl_i32(r_src2
, src2
);
675 r_temp
= tcg_temp_new_i64();
676 r_temp2
= tcg_temp_new_i64();
679 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
680 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
682 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
683 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
686 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
688 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
690 tcg_temp_free_i64(r_temp
);
691 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
693 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
695 tcg_temp_free_i64(r_temp2
);
697 tcg_temp_free_i32(r_src1
);
698 tcg_temp_free_i32(r_src2
);
701 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst
, src1
, src2
, 0);
707 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst
, src1
, src2
, 1);
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
719 l1
= gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
721 r_const
= tcg_const_i32(TT_DIV_ZERO
);
722 gen_helper_raise_exception(r_const
);
723 tcg_temp_free_i32(r_const
);
727 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
731 l1
= gen_new_label();
732 l2
= gen_new_label();
733 tcg_gen_mov_tl(cpu_cc_src
, src1
);
734 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
735 gen_trap_ifdivzero_tl(cpu_cc_src2
);
736 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
737 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
738 tcg_gen_movi_i64(dst
, INT64_MIN
);
741 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
747 static inline void gen_op_eval_ba(TCGv dst
)
749 tcg_gen_movi_tl(dst
, 1);
753 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
755 gen_mov_reg_Z(dst
, src
);
759 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
761 gen_mov_reg_N(cpu_tmp0
, src
);
762 gen_mov_reg_V(dst
, src
);
763 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
764 gen_mov_reg_Z(cpu_tmp0
, src
);
765 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
769 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
771 gen_mov_reg_V(cpu_tmp0
, src
);
772 gen_mov_reg_N(dst
, src
);
773 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
777 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
779 gen_mov_reg_Z(cpu_tmp0
, src
);
780 gen_mov_reg_C(dst
, src
);
781 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
785 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
787 gen_mov_reg_C(dst
, src
);
791 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
793 gen_mov_reg_V(dst
, src
);
797 static inline void gen_op_eval_bn(TCGv dst
)
799 tcg_gen_movi_tl(dst
, 0);
803 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
805 gen_mov_reg_N(dst
, src
);
809 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
811 gen_mov_reg_Z(dst
, src
);
812 tcg_gen_xori_tl(dst
, dst
, 0x1);
816 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
818 gen_mov_reg_N(cpu_tmp0
, src
);
819 gen_mov_reg_V(dst
, src
);
820 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
821 gen_mov_reg_Z(cpu_tmp0
, src
);
822 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
823 tcg_gen_xori_tl(dst
, dst
, 0x1);
827 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
829 gen_mov_reg_V(cpu_tmp0
, src
);
830 gen_mov_reg_N(dst
, src
);
831 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
832 tcg_gen_xori_tl(dst
, dst
, 0x1);
836 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
838 gen_mov_reg_Z(cpu_tmp0
, src
);
839 gen_mov_reg_C(dst
, src
);
840 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
841 tcg_gen_xori_tl(dst
, dst
, 0x1);
845 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
847 gen_mov_reg_C(dst
, src
);
848 tcg_gen_xori_tl(dst
, dst
, 0x1);
852 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
854 gen_mov_reg_N(dst
, src
);
855 tcg_gen_xori_tl(dst
, dst
, 0x1);
859 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
861 gen_mov_reg_V(dst
, src
);
862 tcg_gen_xori_tl(dst
, dst
, 0x1);
866 FPSR bit field FCC1 | FCC0:
872 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
873 unsigned int fcc_offset
)
875 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
876 tcg_gen_andi_tl(reg
, reg
, 0x1);
879 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
880 unsigned int fcc_offset
)
882 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
883 tcg_gen_andi_tl(reg
, reg
, 0x1);
887 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
888 unsigned int fcc_offset
)
890 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
891 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
892 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
895 // 1 or 2: FCC0 ^ FCC1
896 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
897 unsigned int fcc_offset
)
899 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
900 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
901 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
905 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
906 unsigned int fcc_offset
)
908 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
912 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
913 unsigned int fcc_offset
)
915 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
916 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
917 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
918 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
922 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
923 unsigned int fcc_offset
)
925 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
929 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
930 unsigned int fcc_offset
)
932 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
933 tcg_gen_xori_tl(dst
, dst
, 0x1);
934 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
935 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
939 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
940 unsigned int fcc_offset
)
942 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
943 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
944 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
948 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
949 unsigned int fcc_offset
)
951 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
952 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
953 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
954 tcg_gen_xori_tl(dst
, dst
, 0x1);
957 // 0 or 3: !(FCC0 ^ FCC1)
958 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
959 unsigned int fcc_offset
)
961 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
962 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
963 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
964 tcg_gen_xori_tl(dst
, dst
, 0x1);
968 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
969 unsigned int fcc_offset
)
971 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
972 tcg_gen_xori_tl(dst
, dst
, 0x1);
975 // !1: !(FCC0 & !FCC1)
976 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
977 unsigned int fcc_offset
)
979 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
980 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
981 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
982 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
983 tcg_gen_xori_tl(dst
, dst
, 0x1);
987 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
988 unsigned int fcc_offset
)
990 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
991 tcg_gen_xori_tl(dst
, dst
, 0x1);
994 // !2: !(!FCC0 & FCC1)
995 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
996 unsigned int fcc_offset
)
998 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
999 tcg_gen_xori_tl(dst
, dst
, 0x1);
1000 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1001 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1002 tcg_gen_xori_tl(dst
, dst
, 0x1);
1005 // !3: !(FCC0 & FCC1)
1006 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1007 unsigned int fcc_offset
)
1009 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1010 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1011 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1012 tcg_gen_xori_tl(dst
, dst
, 0x1);
1015 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1016 target_ulong pc2
, TCGv r_cond
)
1020 l1
= gen_new_label();
1022 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1024 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1027 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1030 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1031 target_ulong pc2
, TCGv r_cond
)
1035 l1
= gen_new_label();
1037 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1039 gen_goto_tb(dc
, 0, pc2
, pc1
);
1042 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1045 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1050 l1
= gen_new_label();
1051 l2
= gen_new_label();
1053 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1055 tcg_gen_movi_tl(cpu_npc
, npc1
);
1059 tcg_gen_movi_tl(cpu_npc
, npc2
);
1063 /* call this function before using the condition register as it may
1064 have been set for a jump */
1065 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1067 if (dc
->npc
== JUMP_PC
) {
1068 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1069 dc
->npc
= DYNAMIC_PC
;
1073 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1075 if (dc
->npc
== JUMP_PC
) {
1076 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1077 dc
->npc
= DYNAMIC_PC
;
1078 } else if (dc
->npc
!= DYNAMIC_PC
) {
1079 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1083 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1085 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1086 /* flush pending conditional evaluations before exposing cpu state */
1087 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1088 dc
->cc_op
= CC_OP_FLAGS
;
1089 gen_helper_compute_psr();
1094 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1096 if (dc
->npc
== JUMP_PC
) {
1097 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1098 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1099 dc
->pc
= DYNAMIC_PC
;
1100 } else if (dc
->npc
== DYNAMIC_PC
) {
1101 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1102 dc
->pc
= DYNAMIC_PC
;
1108 static inline void gen_op_next_insn(void)
1110 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1111 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1114 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1119 #ifdef TARGET_SPARC64
1127 switch (dc
->cc_op
) {
1131 gen_helper_compute_psr();
1132 dc
->cc_op
= CC_OP_FLAGS
;
1137 gen_op_eval_bn(r_dst
);
1140 gen_op_eval_be(r_dst
, r_src
);
1143 gen_op_eval_ble(r_dst
, r_src
);
1146 gen_op_eval_bl(r_dst
, r_src
);
1149 gen_op_eval_bleu(r_dst
, r_src
);
1152 gen_op_eval_bcs(r_dst
, r_src
);
1155 gen_op_eval_bneg(r_dst
, r_src
);
1158 gen_op_eval_bvs(r_dst
, r_src
);
1161 gen_op_eval_ba(r_dst
);
1164 gen_op_eval_bne(r_dst
, r_src
);
1167 gen_op_eval_bg(r_dst
, r_src
);
1170 gen_op_eval_bge(r_dst
, r_src
);
1173 gen_op_eval_bgu(r_dst
, r_src
);
1176 gen_op_eval_bcc(r_dst
, r_src
);
1179 gen_op_eval_bpos(r_dst
, r_src
);
1182 gen_op_eval_bvc(r_dst
, r_src
);
1187 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1189 unsigned int offset
;
1209 gen_op_eval_bn(r_dst
);
1212 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1215 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1218 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1221 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1224 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1227 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1230 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1233 gen_op_eval_ba(r_dst
);
1236 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1239 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1242 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1245 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1248 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1251 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1254 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1259 #ifdef TARGET_SPARC64
1261 static const int gen_tcg_cond_reg
[8] = {
1272 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1276 l1
= gen_new_label();
1277 tcg_gen_movi_tl(r_dst
, 0);
1278 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1279 tcg_gen_movi_tl(r_dst
, 1);
1284 /* XXX: potentially incorrect if dynamic npc */
1285 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1288 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1289 target_ulong target
= dc
->pc
+ offset
;
1292 /* unconditional not taken */
1294 dc
->pc
= dc
->npc
+ 4;
1295 dc
->npc
= dc
->pc
+ 4;
1298 dc
->npc
= dc
->pc
+ 4;
1300 } else if (cond
== 0x8) {
1301 /* unconditional taken */
1304 dc
->npc
= dc
->pc
+ 4;
1308 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1311 flush_cond(dc
, r_cond
);
1312 gen_cond(r_cond
, cc
, cond
, dc
);
1314 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1318 dc
->jump_pc
[0] = target
;
1319 dc
->jump_pc
[1] = dc
->npc
+ 4;
1325 /* XXX: potentially incorrect if dynamic npc */
1326 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1329 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1330 target_ulong target
= dc
->pc
+ offset
;
1333 /* unconditional not taken */
1335 dc
->pc
= dc
->npc
+ 4;
1336 dc
->npc
= dc
->pc
+ 4;
1339 dc
->npc
= dc
->pc
+ 4;
1341 } else if (cond
== 0x8) {
1342 /* unconditional taken */
1345 dc
->npc
= dc
->pc
+ 4;
1349 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1352 flush_cond(dc
, r_cond
);
1353 gen_fcond(r_cond
, cc
, cond
);
1355 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1359 dc
->jump_pc
[0] = target
;
1360 dc
->jump_pc
[1] = dc
->npc
+ 4;
1366 #ifdef TARGET_SPARC64
1367 /* XXX: potentially incorrect if dynamic npc */
1368 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1369 TCGv r_cond
, TCGv r_reg
)
1371 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1372 target_ulong target
= dc
->pc
+ offset
;
1374 flush_cond(dc
, r_cond
);
1375 gen_cond_reg(r_cond
, cond
, r_reg
);
1377 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1381 dc
->jump_pc
[0] = target
;
1382 dc
->jump_pc
[1] = dc
->npc
+ 4;
1387 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1391 gen_helper_fcmps(r_rs1
, r_rs2
);
1394 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1397 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1400 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1405 static inline void gen_op_fcmpd(int fccno
)
1412 gen_helper_fcmpd_fcc1();
1415 gen_helper_fcmpd_fcc2();
1418 gen_helper_fcmpd_fcc3();
1423 static inline void gen_op_fcmpq(int fccno
)
1430 gen_helper_fcmpq_fcc1();
1433 gen_helper_fcmpq_fcc2();
1436 gen_helper_fcmpq_fcc3();
1441 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1445 gen_helper_fcmpes(r_rs1
, r_rs2
);
1448 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1451 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1454 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1459 static inline void gen_op_fcmped(int fccno
)
1463 gen_helper_fcmped();
1466 gen_helper_fcmped_fcc1();
1469 gen_helper_fcmped_fcc2();
1472 gen_helper_fcmped_fcc3();
1477 static inline void gen_op_fcmpeq(int fccno
)
1481 gen_helper_fcmpeq();
1484 gen_helper_fcmpeq_fcc1();
1487 gen_helper_fcmpeq_fcc2();
1490 gen_helper_fcmpeq_fcc3();
1497 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1499 gen_helper_fcmps(r_rs1
, r_rs2
);
1502 static inline void gen_op_fcmpd(int fccno
)
1507 static inline void gen_op_fcmpq(int fccno
)
1512 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1514 gen_helper_fcmpes(r_rs1
, r_rs2
);
1517 static inline void gen_op_fcmped(int fccno
)
1519 gen_helper_fcmped();
1522 static inline void gen_op_fcmpeq(int fccno
)
1524 gen_helper_fcmpeq();
1528 static inline void gen_op_fpexception_im(int fsr_flags
)
1532 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1533 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1534 r_const
= tcg_const_i32(TT_FP_EXCP
);
1535 gen_helper_raise_exception(r_const
);
1536 tcg_temp_free_i32(r_const
);
1539 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1541 #if !defined(CONFIG_USER_ONLY)
1542 if (!dc
->fpu_enabled
) {
1545 save_state(dc
, r_cond
);
1546 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1547 gen_helper_raise_exception(r_const
);
1548 tcg_temp_free_i32(r_const
);
1556 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1558 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1561 static inline void gen_clear_float_exceptions(void)
1563 gen_helper_clear_float_exceptions();
1567 #ifdef TARGET_SPARC64
1568 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1574 r_asi
= tcg_temp_new_i32();
1575 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1577 asi
= GET_FIELD(insn
, 19, 26);
1578 r_asi
= tcg_const_i32(asi
);
1583 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1586 TCGv_i32 r_asi
, r_size
, r_sign
;
1588 r_asi
= gen_get_asi(insn
, addr
);
1589 r_size
= tcg_const_i32(size
);
1590 r_sign
= tcg_const_i32(sign
);
1591 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1592 tcg_temp_free_i32(r_sign
);
1593 tcg_temp_free_i32(r_size
);
1594 tcg_temp_free_i32(r_asi
);
1597 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1599 TCGv_i32 r_asi
, r_size
;
1601 r_asi
= gen_get_asi(insn
, addr
);
1602 r_size
= tcg_const_i32(size
);
1603 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1604 tcg_temp_free_i32(r_size
);
1605 tcg_temp_free_i32(r_asi
);
1608 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1610 TCGv_i32 r_asi
, r_size
, r_rd
;
1612 r_asi
= gen_get_asi(insn
, addr
);
1613 r_size
= tcg_const_i32(size
);
1614 r_rd
= tcg_const_i32(rd
);
1615 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1616 tcg_temp_free_i32(r_rd
);
1617 tcg_temp_free_i32(r_size
);
1618 tcg_temp_free_i32(r_asi
);
1621 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1623 TCGv_i32 r_asi
, r_size
, r_rd
;
1625 r_asi
= gen_get_asi(insn
, addr
);
1626 r_size
= tcg_const_i32(size
);
1627 r_rd
= tcg_const_i32(rd
);
1628 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1629 tcg_temp_free_i32(r_rd
);
1630 tcg_temp_free_i32(r_size
);
1631 tcg_temp_free_i32(r_asi
);
1634 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1636 TCGv_i32 r_asi
, r_size
, r_sign
;
1638 r_asi
= gen_get_asi(insn
, addr
);
1639 r_size
= tcg_const_i32(4);
1640 r_sign
= tcg_const_i32(0);
1641 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1642 tcg_temp_free_i32(r_sign
);
1643 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1644 tcg_temp_free_i32(r_size
);
1645 tcg_temp_free_i32(r_asi
);
1646 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1649 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1651 TCGv_i32 r_asi
, r_rd
;
1653 r_asi
= gen_get_asi(insn
, addr
);
1654 r_rd
= tcg_const_i32(rd
);
1655 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1656 tcg_temp_free_i32(r_rd
);
1657 tcg_temp_free_i32(r_asi
);
1660 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1662 TCGv_i32 r_asi
, r_size
;
1664 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1665 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1666 r_asi
= gen_get_asi(insn
, addr
);
1667 r_size
= tcg_const_i32(8);
1668 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1669 tcg_temp_free_i32(r_size
);
1670 tcg_temp_free_i32(r_asi
);
1673 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1679 r_val1
= tcg_temp_new();
1680 gen_movl_reg_TN(rd
, r_val1
);
1681 r_asi
= gen_get_asi(insn
, addr
);
1682 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1683 tcg_temp_free_i32(r_asi
);
1684 tcg_temp_free(r_val1
);
1687 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1692 gen_movl_reg_TN(rd
, cpu_tmp64
);
1693 r_asi
= gen_get_asi(insn
, addr
);
1694 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1695 tcg_temp_free_i32(r_asi
);
1698 #elif !defined(CONFIG_USER_ONLY)
1700 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1703 TCGv_i32 r_asi
, r_size
, r_sign
;
1705 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1706 r_size
= tcg_const_i32(size
);
1707 r_sign
= tcg_const_i32(sign
);
1708 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1709 tcg_temp_free(r_sign
);
1710 tcg_temp_free(r_size
);
1711 tcg_temp_free(r_asi
);
1712 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1715 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1717 TCGv_i32 r_asi
, r_size
;
1719 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1720 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1721 r_size
= tcg_const_i32(size
);
1722 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1723 tcg_temp_free(r_size
);
1724 tcg_temp_free(r_asi
);
1727 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1729 TCGv_i32 r_asi
, r_size
, r_sign
;
1732 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1733 r_size
= tcg_const_i32(4);
1734 r_sign
= tcg_const_i32(0);
1735 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1736 tcg_temp_free(r_sign
);
1737 r_val
= tcg_temp_new_i64();
1738 tcg_gen_extu_tl_i64(r_val
, dst
);
1739 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1740 tcg_temp_free_i64(r_val
);
1741 tcg_temp_free(r_size
);
1742 tcg_temp_free(r_asi
);
1743 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1746 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1748 TCGv_i32 r_asi
, r_size
, r_sign
;
1750 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1751 r_size
= tcg_const_i32(8);
1752 r_sign
= tcg_const_i32(0);
1753 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1754 tcg_temp_free(r_sign
);
1755 tcg_temp_free(r_size
);
1756 tcg_temp_free(r_asi
);
1757 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1758 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1759 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1760 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1761 gen_movl_TN_reg(rd
, hi
);
1764 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1766 TCGv_i32 r_asi
, r_size
;
1768 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1769 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1770 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1771 r_size
= tcg_const_i32(8);
1772 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1773 tcg_temp_free(r_size
);
1774 tcg_temp_free(r_asi
);
1778 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1779 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1782 TCGv_i32 r_asi
, r_size
;
1784 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1786 r_val
= tcg_const_i64(0xffULL
);
1787 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1788 r_size
= tcg_const_i32(1);
1789 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1790 tcg_temp_free_i32(r_size
);
1791 tcg_temp_free_i32(r_asi
);
1792 tcg_temp_free_i64(r_val
);
1796 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1801 rs1
= GET_FIELD(insn
, 13, 17);
1803 tcg_gen_movi_tl(def
, 0);
1804 } else if (rs1
< 8) {
1805 r_rs1
= cpu_gregs
[rs1
];
1807 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1812 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1816 if (IS_IMM
) { /* immediate */
1817 target_long simm
= GET_FIELDs(insn
, 19, 31);
1818 tcg_gen_movi_tl(def
, simm
);
1819 } else { /* register */
1820 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1822 tcg_gen_movi_tl(def
, 0);
1823 } else if (rs2
< 8) {
1824 r_rs2
= cpu_gregs
[rs2
];
1826 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1832 #ifdef TARGET_SPARC64
1833 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1835 TCGv_i32 r_tl
= tcg_temp_new_i32();
1837 /* load env->tl into r_tl */
1838 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1840 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1841 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1843 /* calculate offset to current trap state from env->ts, reuse r_tl */
1844 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1845 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1847 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1849 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1850 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1851 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1852 tcg_temp_free_ptr(r_tl_tmp
);
1855 tcg_temp_free_i32(r_tl
);
1859 #define CHECK_IU_FEATURE(dc, FEATURE) \
1860 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1862 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1863 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1866 /* before an instruction, dc->pc must be static */
1867 static void disas_sparc_insn(DisasContext
* dc
)
1869 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1870 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1873 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1874 tcg_gen_debug_insn_start(dc
->pc
);
1875 insn
= ldl_code(dc
->pc
);
1876 opc
= GET_FIELD(insn
, 0, 1);
1878 rd
= GET_FIELD(insn
, 2, 6);
1880 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1881 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1884 case 0: /* branches/sethi */
1886 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1889 #ifdef TARGET_SPARC64
1890 case 0x1: /* V9 BPcc */
1894 target
= GET_FIELD_SP(insn
, 0, 18);
1895 target
= sign_extend(target
, 19);
1897 cc
= GET_FIELD_SP(insn
, 20, 21);
1899 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1901 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1906 case 0x3: /* V9 BPr */
1908 target
= GET_FIELD_SP(insn
, 0, 13) |
1909 (GET_FIELD_SP(insn
, 20, 21) << 14);
1910 target
= sign_extend(target
, 16);
1912 cpu_src1
= get_src1(insn
, cpu_src1
);
1913 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1916 case 0x5: /* V9 FBPcc */
1918 int cc
= GET_FIELD_SP(insn
, 20, 21);
1919 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1921 target
= GET_FIELD_SP(insn
, 0, 18);
1922 target
= sign_extend(target
, 19);
1924 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1928 case 0x7: /* CBN+x */
1933 case 0x2: /* BN+x */
1935 target
= GET_FIELD(insn
, 10, 31);
1936 target
= sign_extend(target
, 22);
1938 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1941 case 0x6: /* FBN+x */
1943 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1945 target
= GET_FIELD(insn
, 10, 31);
1946 target
= sign_extend(target
, 22);
1948 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1951 case 0x4: /* SETHI */
1953 uint32_t value
= GET_FIELD(insn
, 10, 31);
1956 r_const
= tcg_const_tl(value
<< 10);
1957 gen_movl_TN_reg(rd
, r_const
);
1958 tcg_temp_free(r_const
);
1961 case 0x0: /* UNIMPL */
1970 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1973 r_const
= tcg_const_tl(dc
->pc
);
1974 gen_movl_TN_reg(15, r_const
);
1975 tcg_temp_free(r_const
);
1977 gen_mov_pc_npc(dc
, cpu_cond
);
1981 case 2: /* FPU & Logical Operations */
1983 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1984 if (xop
== 0x3a) { /* generate trap */
1987 cpu_src1
= get_src1(insn
, cpu_src1
);
1989 rs2
= GET_FIELD(insn
, 25, 31);
1990 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1992 rs2
= GET_FIELD(insn
, 27, 31);
1994 gen_movl_reg_TN(rs2
, cpu_src2
);
1995 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1997 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2000 cond
= GET_FIELD(insn
, 3, 6);
2001 if (cond
== 0x8) { /* Trap Always */
2002 save_state(dc
, cpu_cond
);
2003 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2005 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2007 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2008 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2009 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2012 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2014 gen_helper_shutdown();
2017 gen_helper_raise_exception(cpu_tmp32
);
2019 } else if (cond
!= 0) {
2020 TCGv r_cond
= tcg_temp_new();
2022 #ifdef TARGET_SPARC64
2024 int cc
= GET_FIELD_SP(insn
, 11, 12);
2026 save_state(dc
, cpu_cond
);
2028 gen_cond(r_cond
, 0, cond
, dc
);
2030 gen_cond(r_cond
, 1, cond
, dc
);
2034 save_state(dc
, cpu_cond
);
2035 gen_cond(r_cond
, 0, cond
, dc
);
2037 l1
= gen_new_label();
2038 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2040 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2042 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2044 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2045 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2046 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2047 gen_helper_raise_exception(cpu_tmp32
);
2050 tcg_temp_free(r_cond
);
2056 } else if (xop
== 0x28) {
2057 rs1
= GET_FIELD(insn
, 13, 17);
2060 #ifndef TARGET_SPARC64
2061 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2062 manual, rdy on the microSPARC
2064 case 0x0f: /* stbar in the SPARCv8 manual,
2065 rdy on the microSPARC II */
2066 case 0x10 ... 0x1f: /* implementation-dependent in the
2067 SPARCv8 manual, rdy on the
2070 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2073 /* Read Asr17 for a Leon3 monoprocessor */
2074 r_const
= tcg_const_tl((1 << 8)
2075 | (dc
->def
->nwindows
- 1));
2076 gen_movl_TN_reg(rd
, r_const
);
2077 tcg_temp_free(r_const
);
2081 gen_movl_TN_reg(rd
, cpu_y
);
2083 #ifdef TARGET_SPARC64
2084 case 0x2: /* V9 rdccr */
2085 gen_helper_compute_psr();
2086 gen_helper_rdccr(cpu_dst
);
2087 gen_movl_TN_reg(rd
, cpu_dst
);
2089 case 0x3: /* V9 rdasi */
2090 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2091 gen_movl_TN_reg(rd
, cpu_dst
);
2093 case 0x4: /* V9 rdtick */
2097 r_tickptr
= tcg_temp_new_ptr();
2098 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2099 offsetof(CPUState
, tick
));
2100 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2101 tcg_temp_free_ptr(r_tickptr
);
2102 gen_movl_TN_reg(rd
, cpu_dst
);
2105 case 0x5: /* V9 rdpc */
2109 r_const
= tcg_const_tl(dc
->pc
);
2110 gen_movl_TN_reg(rd
, r_const
);
2111 tcg_temp_free(r_const
);
2114 case 0x6: /* V9 rdfprs */
2115 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2116 gen_movl_TN_reg(rd
, cpu_dst
);
2118 case 0xf: /* V9 membar */
2119 break; /* no effect */
2120 case 0x13: /* Graphics Status */
2121 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2123 gen_movl_TN_reg(rd
, cpu_gsr
);
2125 case 0x16: /* Softint */
2126 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2127 gen_movl_TN_reg(rd
, cpu_dst
);
2129 case 0x17: /* Tick compare */
2130 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2132 case 0x18: /* System tick */
2136 r_tickptr
= tcg_temp_new_ptr();
2137 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2138 offsetof(CPUState
, stick
));
2139 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2140 tcg_temp_free_ptr(r_tickptr
);
2141 gen_movl_TN_reg(rd
, cpu_dst
);
2144 case 0x19: /* System tick compare */
2145 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2147 case 0x10: /* Performance Control */
2148 case 0x11: /* Performance Instrumentation Counter */
2149 case 0x12: /* Dispatch Control */
2150 case 0x14: /* Softint set, WO */
2151 case 0x15: /* Softint clear, WO */
2156 #if !defined(CONFIG_USER_ONLY)
2157 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2158 #ifndef TARGET_SPARC64
2159 if (!supervisor(dc
))
2161 gen_helper_compute_psr();
2162 dc
->cc_op
= CC_OP_FLAGS
;
2163 gen_helper_rdpsr(cpu_dst
);
2165 CHECK_IU_FEATURE(dc
, HYPV
);
2166 if (!hypervisor(dc
))
2168 rs1
= GET_FIELD(insn
, 13, 17);
2171 // gen_op_rdhpstate();
2174 // gen_op_rdhtstate();
2177 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2180 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2183 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2185 case 31: // hstick_cmpr
2186 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2192 gen_movl_TN_reg(rd
, cpu_dst
);
2194 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2195 if (!supervisor(dc
))
2197 #ifdef TARGET_SPARC64
2198 rs1
= GET_FIELD(insn
, 13, 17);
2204 r_tsptr
= tcg_temp_new_ptr();
2205 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2206 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2207 offsetof(trap_state
, tpc
));
2208 tcg_temp_free_ptr(r_tsptr
);
2215 r_tsptr
= tcg_temp_new_ptr();
2216 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2217 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2218 offsetof(trap_state
, tnpc
));
2219 tcg_temp_free_ptr(r_tsptr
);
2226 r_tsptr
= tcg_temp_new_ptr();
2227 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2228 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2229 offsetof(trap_state
, tstate
));
2230 tcg_temp_free_ptr(r_tsptr
);
2237 r_tsptr
= tcg_temp_new_ptr();
2238 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2239 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2240 offsetof(trap_state
, tt
));
2241 tcg_temp_free_ptr(r_tsptr
);
2242 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2249 r_tickptr
= tcg_temp_new_ptr();
2250 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2251 offsetof(CPUState
, tick
));
2252 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2253 gen_movl_TN_reg(rd
, cpu_tmp0
);
2254 tcg_temp_free_ptr(r_tickptr
);
2258 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2261 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2262 offsetof(CPUSPARCState
, pstate
));
2263 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2266 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2267 offsetof(CPUSPARCState
, tl
));
2268 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2271 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2272 offsetof(CPUSPARCState
, psrpil
));
2273 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2276 gen_helper_rdcwp(cpu_tmp0
);
2279 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2280 offsetof(CPUSPARCState
, cansave
));
2281 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2283 case 11: // canrestore
2284 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2285 offsetof(CPUSPARCState
, canrestore
));
2286 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2288 case 12: // cleanwin
2289 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2290 offsetof(CPUSPARCState
, cleanwin
));
2291 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2293 case 13: // otherwin
2294 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2295 offsetof(CPUSPARCState
, otherwin
));
2296 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2299 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2300 offsetof(CPUSPARCState
, wstate
));
2301 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2303 case 16: // UA2005 gl
2304 CHECK_IU_FEATURE(dc
, GL
);
2305 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2306 offsetof(CPUSPARCState
, gl
));
2307 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2309 case 26: // UA2005 strand status
2310 CHECK_IU_FEATURE(dc
, HYPV
);
2311 if (!hypervisor(dc
))
2313 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2316 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2323 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2325 gen_movl_TN_reg(rd
, cpu_tmp0
);
2327 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2328 #ifdef TARGET_SPARC64
2329 save_state(dc
, cpu_cond
);
2330 gen_helper_flushw();
2332 if (!supervisor(dc
))
2334 gen_movl_TN_reg(rd
, cpu_tbr
);
2338 } else if (xop
== 0x34) { /* FPU Operations */
2339 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2341 gen_op_clear_ieee_excp_and_FTT();
2342 rs1
= GET_FIELD(insn
, 13, 17);
2343 rs2
= GET_FIELD(insn
, 27, 31);
2344 xop
= GET_FIELD(insn
, 18, 26);
2345 save_state(dc
, cpu_cond
);
2347 case 0x1: /* fmovs */
2348 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2350 case 0x5: /* fnegs */
2351 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2353 case 0x9: /* fabss */
2354 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2356 case 0x29: /* fsqrts */
2357 CHECK_FPU_FEATURE(dc
, FSQRT
);
2358 gen_clear_float_exceptions();
2359 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2363 case 0x2a: /* fsqrtd */
2364 CHECK_FPU_FEATURE(dc
, FSQRT
);
2365 gen_op_load_fpr_DT1(DFPREG(rs2
));
2366 gen_clear_float_exceptions();
2367 gen_helper_fsqrtd();
2368 gen_helper_check_ieee_exceptions();
2369 gen_op_store_DT0_fpr(DFPREG(rd
));
2371 case 0x2b: /* fsqrtq */
2372 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2373 gen_op_load_fpr_QT1(QFPREG(rs2
));
2374 gen_clear_float_exceptions();
2375 gen_helper_fsqrtq();
2376 gen_helper_check_ieee_exceptions();
2377 gen_op_store_QT0_fpr(QFPREG(rd
));
2379 case 0x41: /* fadds */
2380 gen_clear_float_exceptions();
2381 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2382 gen_helper_check_ieee_exceptions();
2383 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2385 case 0x42: /* faddd */
2386 gen_op_load_fpr_DT0(DFPREG(rs1
));
2387 gen_op_load_fpr_DT1(DFPREG(rs2
));
2388 gen_clear_float_exceptions();
2390 gen_helper_check_ieee_exceptions();
2391 gen_op_store_DT0_fpr(DFPREG(rd
));
2393 case 0x43: /* faddq */
2394 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2395 gen_op_load_fpr_QT0(QFPREG(rs1
));
2396 gen_op_load_fpr_QT1(QFPREG(rs2
));
2397 gen_clear_float_exceptions();
2399 gen_helper_check_ieee_exceptions();
2400 gen_op_store_QT0_fpr(QFPREG(rd
));
2402 case 0x45: /* fsubs */
2403 gen_clear_float_exceptions();
2404 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2405 gen_helper_check_ieee_exceptions();
2406 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2408 case 0x46: /* fsubd */
2409 gen_op_load_fpr_DT0(DFPREG(rs1
));
2410 gen_op_load_fpr_DT1(DFPREG(rs2
));
2411 gen_clear_float_exceptions();
2413 gen_helper_check_ieee_exceptions();
2414 gen_op_store_DT0_fpr(DFPREG(rd
));
2416 case 0x47: /* fsubq */
2417 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2418 gen_op_load_fpr_QT0(QFPREG(rs1
));
2419 gen_op_load_fpr_QT1(QFPREG(rs2
));
2420 gen_clear_float_exceptions();
2422 gen_helper_check_ieee_exceptions();
2423 gen_op_store_QT0_fpr(QFPREG(rd
));
2425 case 0x49: /* fmuls */
2426 CHECK_FPU_FEATURE(dc
, FMUL
);
2427 gen_clear_float_exceptions();
2428 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2429 gen_helper_check_ieee_exceptions();
2430 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2432 case 0x4a: /* fmuld */
2433 CHECK_FPU_FEATURE(dc
, FMUL
);
2434 gen_op_load_fpr_DT0(DFPREG(rs1
));
2435 gen_op_load_fpr_DT1(DFPREG(rs2
));
2436 gen_clear_float_exceptions();
2438 gen_helper_check_ieee_exceptions();
2439 gen_op_store_DT0_fpr(DFPREG(rd
));
2441 case 0x4b: /* fmulq */
2442 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2443 CHECK_FPU_FEATURE(dc
, FMUL
);
2444 gen_op_load_fpr_QT0(QFPREG(rs1
));
2445 gen_op_load_fpr_QT1(QFPREG(rs2
));
2446 gen_clear_float_exceptions();
2448 gen_helper_check_ieee_exceptions();
2449 gen_op_store_QT0_fpr(QFPREG(rd
));
2451 case 0x4d: /* fdivs */
2452 gen_clear_float_exceptions();
2453 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2454 gen_helper_check_ieee_exceptions();
2455 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2457 case 0x4e: /* fdivd */
2458 gen_op_load_fpr_DT0(DFPREG(rs1
));
2459 gen_op_load_fpr_DT1(DFPREG(rs2
));
2460 gen_clear_float_exceptions();
2462 gen_helper_check_ieee_exceptions();
2463 gen_op_store_DT0_fpr(DFPREG(rd
));
2465 case 0x4f: /* fdivq */
2466 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2467 gen_op_load_fpr_QT0(QFPREG(rs1
));
2468 gen_op_load_fpr_QT1(QFPREG(rs2
));
2469 gen_clear_float_exceptions();
2471 gen_helper_check_ieee_exceptions();
2472 gen_op_store_QT0_fpr(QFPREG(rd
));
2474 case 0x69: /* fsmuld */
2475 CHECK_FPU_FEATURE(dc
, FSMULD
);
2476 gen_clear_float_exceptions();
2477 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2478 gen_helper_check_ieee_exceptions();
2479 gen_op_store_DT0_fpr(DFPREG(rd
));
2481 case 0x6e: /* fdmulq */
2482 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2483 gen_op_load_fpr_DT0(DFPREG(rs1
));
2484 gen_op_load_fpr_DT1(DFPREG(rs2
));
2485 gen_clear_float_exceptions();
2486 gen_helper_fdmulq();
2487 gen_helper_check_ieee_exceptions();
2488 gen_op_store_QT0_fpr(QFPREG(rd
));
2490 case 0xc4: /* fitos */
2491 gen_clear_float_exceptions();
2492 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2493 gen_helper_check_ieee_exceptions();
2494 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2496 case 0xc6: /* fdtos */
2497 gen_op_load_fpr_DT1(DFPREG(rs2
));
2498 gen_clear_float_exceptions();
2499 gen_helper_fdtos(cpu_tmp32
);
2500 gen_helper_check_ieee_exceptions();
2501 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2503 case 0xc7: /* fqtos */
2504 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2505 gen_op_load_fpr_QT1(QFPREG(rs2
));
2506 gen_clear_float_exceptions();
2507 gen_helper_fqtos(cpu_tmp32
);
2508 gen_helper_check_ieee_exceptions();
2509 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2511 case 0xc8: /* fitod */
2512 gen_helper_fitod(cpu_fpr
[rs2
]);
2513 gen_op_store_DT0_fpr(DFPREG(rd
));
2515 case 0xc9: /* fstod */
2516 gen_helper_fstod(cpu_fpr
[rs2
]);
2517 gen_op_store_DT0_fpr(DFPREG(rd
));
2519 case 0xcb: /* fqtod */
2520 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2521 gen_op_load_fpr_QT1(QFPREG(rs2
));
2522 gen_clear_float_exceptions();
2524 gen_helper_check_ieee_exceptions();
2525 gen_op_store_DT0_fpr(DFPREG(rd
));
2527 case 0xcc: /* fitoq */
2528 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2529 gen_helper_fitoq(cpu_fpr
[rs2
]);
2530 gen_op_store_QT0_fpr(QFPREG(rd
));
2532 case 0xcd: /* fstoq */
2533 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2534 gen_helper_fstoq(cpu_fpr
[rs2
]);
2535 gen_op_store_QT0_fpr(QFPREG(rd
));
2537 case 0xce: /* fdtoq */
2538 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2539 gen_op_load_fpr_DT1(DFPREG(rs2
));
2541 gen_op_store_QT0_fpr(QFPREG(rd
));
2543 case 0xd1: /* fstoi */
2544 gen_clear_float_exceptions();
2545 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2546 gen_helper_check_ieee_exceptions();
2547 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2549 case 0xd2: /* fdtoi */
2550 gen_op_load_fpr_DT1(DFPREG(rs2
));
2551 gen_clear_float_exceptions();
2552 gen_helper_fdtoi(cpu_tmp32
);
2553 gen_helper_check_ieee_exceptions();
2554 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2556 case 0xd3: /* fqtoi */
2557 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2558 gen_op_load_fpr_QT1(QFPREG(rs2
));
2559 gen_clear_float_exceptions();
2560 gen_helper_fqtoi(cpu_tmp32
);
2561 gen_helper_check_ieee_exceptions();
2562 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2564 #ifdef TARGET_SPARC64
2565 case 0x2: /* V9 fmovd */
2566 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2567 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2568 cpu_fpr
[DFPREG(rs2
) + 1]);
2570 case 0x3: /* V9 fmovq */
2571 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2572 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2573 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2574 cpu_fpr
[QFPREG(rs2
) + 1]);
2575 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2576 cpu_fpr
[QFPREG(rs2
) + 2]);
2577 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2578 cpu_fpr
[QFPREG(rs2
) + 3]);
2580 case 0x6: /* V9 fnegd */
2581 gen_op_load_fpr_DT1(DFPREG(rs2
));
2583 gen_op_store_DT0_fpr(DFPREG(rd
));
2585 case 0x7: /* V9 fnegq */
2586 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2587 gen_op_load_fpr_QT1(QFPREG(rs2
));
2589 gen_op_store_QT0_fpr(QFPREG(rd
));
2591 case 0xa: /* V9 fabsd */
2592 gen_op_load_fpr_DT1(DFPREG(rs2
));
2594 gen_op_store_DT0_fpr(DFPREG(rd
));
2596 case 0xb: /* V9 fabsq */
2597 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2598 gen_op_load_fpr_QT1(QFPREG(rs2
));
2600 gen_op_store_QT0_fpr(QFPREG(rd
));
2602 case 0x81: /* V9 fstox */
2603 gen_clear_float_exceptions();
2604 gen_helper_fstox(cpu_fpr
[rs2
]);
2605 gen_helper_check_ieee_exceptions();
2606 gen_op_store_DT0_fpr(DFPREG(rd
));
2608 case 0x82: /* V9 fdtox */
2609 gen_op_load_fpr_DT1(DFPREG(rs2
));
2610 gen_clear_float_exceptions();
2612 gen_helper_check_ieee_exceptions();
2613 gen_op_store_DT0_fpr(DFPREG(rd
));
2615 case 0x83: /* V9 fqtox */
2616 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2617 gen_op_load_fpr_QT1(QFPREG(rs2
));
2618 gen_clear_float_exceptions();
2620 gen_helper_check_ieee_exceptions();
2621 gen_op_store_DT0_fpr(DFPREG(rd
));
2623 case 0x84: /* V9 fxtos */
2624 gen_op_load_fpr_DT1(DFPREG(rs2
));
2625 gen_clear_float_exceptions();
2626 gen_helper_fxtos(cpu_tmp32
);
2627 gen_helper_check_ieee_exceptions();
2628 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2630 case 0x88: /* V9 fxtod */
2631 gen_op_load_fpr_DT1(DFPREG(rs2
));
2632 gen_clear_float_exceptions();
2634 gen_helper_check_ieee_exceptions();
2635 gen_op_store_DT0_fpr(DFPREG(rd
));
2637 case 0x8c: /* V9 fxtoq */
2638 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2639 gen_op_load_fpr_DT1(DFPREG(rs2
));
2640 gen_clear_float_exceptions();
2642 gen_helper_check_ieee_exceptions();
2643 gen_op_store_QT0_fpr(QFPREG(rd
));
2649 } else if (xop
== 0x35) { /* FPU Operations */
2650 #ifdef TARGET_SPARC64
2653 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2655 gen_op_clear_ieee_excp_and_FTT();
2656 rs1
= GET_FIELD(insn
, 13, 17);
2657 rs2
= GET_FIELD(insn
, 27, 31);
2658 xop
= GET_FIELD(insn
, 18, 26);
2659 save_state(dc
, cpu_cond
);
2660 #ifdef TARGET_SPARC64
2661 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2664 l1
= gen_new_label();
2665 cond
= GET_FIELD_SP(insn
, 14, 17);
2666 cpu_src1
= get_src1(insn
, cpu_src1
);
2667 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2669 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2672 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2675 l1
= gen_new_label();
2676 cond
= GET_FIELD_SP(insn
, 14, 17);
2677 cpu_src1
= get_src1(insn
, cpu_src1
);
2678 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2680 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2681 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2684 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2687 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2688 l1
= gen_new_label();
2689 cond
= GET_FIELD_SP(insn
, 14, 17);
2690 cpu_src1
= get_src1(insn
, cpu_src1
);
2691 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2693 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2694 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2695 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2696 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2702 #ifdef TARGET_SPARC64
2703 #define FMOVSCC(fcc) \
2708 l1 = gen_new_label(); \
2709 r_cond = tcg_temp_new(); \
2710 cond = GET_FIELD_SP(insn, 14, 17); \
2711 gen_fcond(r_cond, fcc, cond); \
2712 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2714 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2715 gen_set_label(l1); \
2716 tcg_temp_free(r_cond); \
2718 #define FMOVDCC(fcc) \
2723 l1 = gen_new_label(); \
2724 r_cond = tcg_temp_new(); \
2725 cond = GET_FIELD_SP(insn, 14, 17); \
2726 gen_fcond(r_cond, fcc, cond); \
2727 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2729 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2730 cpu_fpr[DFPREG(rs2)]); \
2731 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2732 cpu_fpr[DFPREG(rs2) + 1]); \
2733 gen_set_label(l1); \
2734 tcg_temp_free(r_cond); \
2736 #define FMOVQCC(fcc) \
2741 l1 = gen_new_label(); \
2742 r_cond = tcg_temp_new(); \
2743 cond = GET_FIELD_SP(insn, 14, 17); \
2744 gen_fcond(r_cond, fcc, cond); \
2745 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2747 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2748 cpu_fpr[QFPREG(rs2)]); \
2749 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2750 cpu_fpr[QFPREG(rs2) + 1]); \
2751 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2752 cpu_fpr[QFPREG(rs2) + 2]); \
2753 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2754 cpu_fpr[QFPREG(rs2) + 3]); \
2755 gen_set_label(l1); \
2756 tcg_temp_free(r_cond); \
2758 case 0x001: /* V9 fmovscc %fcc0 */
2761 case 0x002: /* V9 fmovdcc %fcc0 */
2764 case 0x003: /* V9 fmovqcc %fcc0 */
2765 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2768 case 0x041: /* V9 fmovscc %fcc1 */
2771 case 0x042: /* V9 fmovdcc %fcc1 */
2774 case 0x043: /* V9 fmovqcc %fcc1 */
2775 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2778 case 0x081: /* V9 fmovscc %fcc2 */
2781 case 0x082: /* V9 fmovdcc %fcc2 */
2784 case 0x083: /* V9 fmovqcc %fcc2 */
2785 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2788 case 0x0c1: /* V9 fmovscc %fcc3 */
2791 case 0x0c2: /* V9 fmovdcc %fcc3 */
2794 case 0x0c3: /* V9 fmovqcc %fcc3 */
2795 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2801 #define FMOVSCC(icc) \
2806 l1 = gen_new_label(); \
2807 r_cond = tcg_temp_new(); \
2808 cond = GET_FIELD_SP(insn, 14, 17); \
2809 gen_cond(r_cond, icc, cond, dc); \
2810 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2812 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2813 gen_set_label(l1); \
2814 tcg_temp_free(r_cond); \
2816 #define FMOVDCC(icc) \
2821 l1 = gen_new_label(); \
2822 r_cond = tcg_temp_new(); \
2823 cond = GET_FIELD_SP(insn, 14, 17); \
2824 gen_cond(r_cond, icc, cond, dc); \
2825 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2827 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2828 cpu_fpr[DFPREG(rs2)]); \
2829 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2830 cpu_fpr[DFPREG(rs2) + 1]); \
2831 gen_set_label(l1); \
2832 tcg_temp_free(r_cond); \
2834 #define FMOVQCC(icc) \
2839 l1 = gen_new_label(); \
2840 r_cond = tcg_temp_new(); \
2841 cond = GET_FIELD_SP(insn, 14, 17); \
2842 gen_cond(r_cond, icc, cond, dc); \
2843 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2845 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2846 cpu_fpr[QFPREG(rs2)]); \
2847 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2848 cpu_fpr[QFPREG(rs2) + 1]); \
2849 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2850 cpu_fpr[QFPREG(rs2) + 2]); \
2851 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2852 cpu_fpr[QFPREG(rs2) + 3]); \
2853 gen_set_label(l1); \
2854 tcg_temp_free(r_cond); \
2857 case 0x101: /* V9 fmovscc %icc */
2860 case 0x102: /* V9 fmovdcc %icc */
2862 case 0x103: /* V9 fmovqcc %icc */
2863 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2866 case 0x181: /* V9 fmovscc %xcc */
2869 case 0x182: /* V9 fmovdcc %xcc */
2872 case 0x183: /* V9 fmovqcc %xcc */
2873 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2880 case 0x51: /* fcmps, V9 %fcc */
2881 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2883 case 0x52: /* fcmpd, V9 %fcc */
2884 gen_op_load_fpr_DT0(DFPREG(rs1
));
2885 gen_op_load_fpr_DT1(DFPREG(rs2
));
2886 gen_op_fcmpd(rd
& 3);
2888 case 0x53: /* fcmpq, V9 %fcc */
2889 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2890 gen_op_load_fpr_QT0(QFPREG(rs1
));
2891 gen_op_load_fpr_QT1(QFPREG(rs2
));
2892 gen_op_fcmpq(rd
& 3);
2894 case 0x55: /* fcmpes, V9 %fcc */
2895 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2897 case 0x56: /* fcmped, V9 %fcc */
2898 gen_op_load_fpr_DT0(DFPREG(rs1
));
2899 gen_op_load_fpr_DT1(DFPREG(rs2
));
2900 gen_op_fcmped(rd
& 3);
2902 case 0x57: /* fcmpeq, V9 %fcc */
2903 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2904 gen_op_load_fpr_QT0(QFPREG(rs1
));
2905 gen_op_load_fpr_QT1(QFPREG(rs2
));
2906 gen_op_fcmpeq(rd
& 3);
2911 } else if (xop
== 0x2) {
2914 rs1
= GET_FIELD(insn
, 13, 17);
2916 // or %g0, x, y -> mov T0, x; mov y, T0
2917 if (IS_IMM
) { /* immediate */
2920 simm
= GET_FIELDs(insn
, 19, 31);
2921 r_const
= tcg_const_tl(simm
);
2922 gen_movl_TN_reg(rd
, r_const
);
2923 tcg_temp_free(r_const
);
2924 } else { /* register */
2925 rs2
= GET_FIELD(insn
, 27, 31);
2926 gen_movl_reg_TN(rs2
, cpu_dst
);
2927 gen_movl_TN_reg(rd
, cpu_dst
);
2930 cpu_src1
= get_src1(insn
, cpu_src1
);
2931 if (IS_IMM
) { /* immediate */
2932 simm
= GET_FIELDs(insn
, 19, 31);
2933 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2934 gen_movl_TN_reg(rd
, cpu_dst
);
2935 } else { /* register */
2936 // or x, %g0, y -> mov T1, x; mov y, T1
2937 rs2
= GET_FIELD(insn
, 27, 31);
2939 gen_movl_reg_TN(rs2
, cpu_src2
);
2940 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2941 gen_movl_TN_reg(rd
, cpu_dst
);
2943 gen_movl_TN_reg(rd
, cpu_src1
);
2946 #ifdef TARGET_SPARC64
2947 } else if (xop
== 0x25) { /* sll, V9 sllx */
2948 cpu_src1
= get_src1(insn
, cpu_src1
);
2949 if (IS_IMM
) { /* immediate */
2950 simm
= GET_FIELDs(insn
, 20, 31);
2951 if (insn
& (1 << 12)) {
2952 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2954 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2956 } else { /* register */
2957 rs2
= GET_FIELD(insn
, 27, 31);
2958 gen_movl_reg_TN(rs2
, cpu_src2
);
2959 if (insn
& (1 << 12)) {
2960 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2962 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2964 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2966 gen_movl_TN_reg(rd
, cpu_dst
);
2967 } else if (xop
== 0x26) { /* srl, V9 srlx */
2968 cpu_src1
= get_src1(insn
, cpu_src1
);
2969 if (IS_IMM
) { /* immediate */
2970 simm
= GET_FIELDs(insn
, 20, 31);
2971 if (insn
& (1 << 12)) {
2972 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2974 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2975 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2977 } else { /* register */
2978 rs2
= GET_FIELD(insn
, 27, 31);
2979 gen_movl_reg_TN(rs2
, cpu_src2
);
2980 if (insn
& (1 << 12)) {
2981 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2982 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2984 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2985 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2986 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2989 gen_movl_TN_reg(rd
, cpu_dst
);
2990 } else if (xop
== 0x27) { /* sra, V9 srax */
2991 cpu_src1
= get_src1(insn
, cpu_src1
);
2992 if (IS_IMM
) { /* immediate */
2993 simm
= GET_FIELDs(insn
, 20, 31);
2994 if (insn
& (1 << 12)) {
2995 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2997 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2998 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2999 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3001 } else { /* register */
3002 rs2
= GET_FIELD(insn
, 27, 31);
3003 gen_movl_reg_TN(rs2
, cpu_src2
);
3004 if (insn
& (1 << 12)) {
3005 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3006 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3008 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3009 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3010 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3011 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3014 gen_movl_TN_reg(rd
, cpu_dst
);
3016 } else if (xop
< 0x36) {
3018 cpu_src1
= get_src1(insn
, cpu_src1
);
3019 cpu_src2
= get_src2(insn
, cpu_src2
);
3020 switch (xop
& ~0x10) {
3023 simm
= GET_FIELDs(insn
, 19, 31);
3025 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3026 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3027 dc
->cc_op
= CC_OP_ADD
;
3029 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3033 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3034 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3035 dc
->cc_op
= CC_OP_ADD
;
3037 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3043 simm
= GET_FIELDs(insn
, 19, 31);
3044 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3046 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3049 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3050 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3051 dc
->cc_op
= CC_OP_LOGIC
;
3056 simm
= GET_FIELDs(insn
, 19, 31);
3057 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3059 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3062 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3063 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3064 dc
->cc_op
= CC_OP_LOGIC
;
3069 simm
= GET_FIELDs(insn
, 19, 31);
3070 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3072 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3075 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3076 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3077 dc
->cc_op
= CC_OP_LOGIC
;
3082 simm
= GET_FIELDs(insn
, 19, 31);
3084 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3086 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3090 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3091 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3092 dc
->cc_op
= CC_OP_SUB
;
3094 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3098 case 0x5: /* andn */
3100 simm
= GET_FIELDs(insn
, 19, 31);
3101 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3103 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3106 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3107 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3108 dc
->cc_op
= CC_OP_LOGIC
;
3113 simm
= GET_FIELDs(insn
, 19, 31);
3114 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3116 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3119 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3120 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3121 dc
->cc_op
= CC_OP_LOGIC
;
3124 case 0x7: /* xorn */
3126 simm
= GET_FIELDs(insn
, 19, 31);
3127 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3129 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3130 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3133 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3134 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3135 dc
->cc_op
= CC_OP_LOGIC
;
3138 case 0x8: /* addx, V9 addc */
3139 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3142 #ifdef TARGET_SPARC64
3143 case 0x9: /* V9 mulx */
3145 simm
= GET_FIELDs(insn
, 19, 31);
3146 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3148 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3152 case 0xa: /* umul */
3153 CHECK_IU_FEATURE(dc
, MUL
);
3154 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3156 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3157 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3158 dc
->cc_op
= CC_OP_LOGIC
;
3161 case 0xb: /* smul */
3162 CHECK_IU_FEATURE(dc
, MUL
);
3163 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3165 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3166 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3167 dc
->cc_op
= CC_OP_LOGIC
;
3170 case 0xc: /* subx, V9 subc */
3171 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3174 #ifdef TARGET_SPARC64
3175 case 0xd: /* V9 udivx */
3176 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3177 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3178 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3179 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3182 case 0xe: /* udiv */
3183 CHECK_IU_FEATURE(dc
, DIV
);
3185 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3186 dc
->cc_op
= CC_OP_DIV
;
3188 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3191 case 0xf: /* sdiv */
3192 CHECK_IU_FEATURE(dc
, DIV
);
3194 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3195 dc
->cc_op
= CC_OP_DIV
;
3197 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3203 gen_movl_TN_reg(rd
, cpu_dst
);
3205 cpu_src1
= get_src1(insn
, cpu_src1
);
3206 cpu_src2
= get_src2(insn
, cpu_src2
);
3208 case 0x20: /* taddcc */
3209 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3210 gen_movl_TN_reg(rd
, cpu_dst
);
3211 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3212 dc
->cc_op
= CC_OP_TADD
;
3214 case 0x21: /* tsubcc */
3215 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3216 gen_movl_TN_reg(rd
, cpu_dst
);
3217 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3218 dc
->cc_op
= CC_OP_TSUB
;
3220 case 0x22: /* taddcctv */
3221 save_state(dc
, cpu_cond
);
3222 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3223 gen_movl_TN_reg(rd
, cpu_dst
);
3224 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3225 dc
->cc_op
= CC_OP_TADDTV
;
3227 case 0x23: /* tsubcctv */
3228 save_state(dc
, cpu_cond
);
3229 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3230 gen_movl_TN_reg(rd
, cpu_dst
);
3231 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3232 dc
->cc_op
= CC_OP_TSUBTV
;
3234 case 0x24: /* mulscc */
3235 gen_helper_compute_psr();
3236 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3237 gen_movl_TN_reg(rd
, cpu_dst
);
3238 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3239 dc
->cc_op
= CC_OP_ADD
;
3241 #ifndef TARGET_SPARC64
3242 case 0x25: /* sll */
3243 if (IS_IMM
) { /* immediate */
3244 simm
= GET_FIELDs(insn
, 20, 31);
3245 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3246 } else { /* register */
3247 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3248 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3250 gen_movl_TN_reg(rd
, cpu_dst
);
3252 case 0x26: /* srl */
3253 if (IS_IMM
) { /* immediate */
3254 simm
= GET_FIELDs(insn
, 20, 31);
3255 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3256 } else { /* register */
3257 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3258 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3260 gen_movl_TN_reg(rd
, cpu_dst
);
3262 case 0x27: /* sra */
3263 if (IS_IMM
) { /* immediate */
3264 simm
= GET_FIELDs(insn
, 20, 31);
3265 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3266 } else { /* register */
3267 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3268 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3270 gen_movl_TN_reg(rd
, cpu_dst
);
3277 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3278 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3280 #ifndef TARGET_SPARC64
3281 case 0x01 ... 0x0f: /* undefined in the
3285 case 0x10 ... 0x1f: /* implementation-dependent
3291 case 0x2: /* V9 wrccr */
3292 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3293 gen_helper_wrccr(cpu_dst
);
3294 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3295 dc
->cc_op
= CC_OP_FLAGS
;
3297 case 0x3: /* V9 wrasi */
3298 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3299 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3300 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3302 case 0x6: /* V9 wrfprs */
3303 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3304 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3305 save_state(dc
, cpu_cond
);
3310 case 0xf: /* V9 sir, nop if user */
3311 #if !defined(CONFIG_USER_ONLY)
3312 if (supervisor(dc
)) {
3317 case 0x13: /* Graphics Status */
3318 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3320 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3322 case 0x14: /* Softint set */
3323 if (!supervisor(dc
))
3325 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3326 gen_helper_set_softint(cpu_tmp64
);
3328 case 0x15: /* Softint clear */
3329 if (!supervisor(dc
))
3331 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3332 gen_helper_clear_softint(cpu_tmp64
);
3334 case 0x16: /* Softint write */
3335 if (!supervisor(dc
))
3337 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3338 gen_helper_write_softint(cpu_tmp64
);
3340 case 0x17: /* Tick compare */
3341 #if !defined(CONFIG_USER_ONLY)
3342 if (!supervisor(dc
))
3348 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3350 r_tickptr
= tcg_temp_new_ptr();
3351 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3352 offsetof(CPUState
, tick
));
3353 gen_helper_tick_set_limit(r_tickptr
,
3355 tcg_temp_free_ptr(r_tickptr
);
3358 case 0x18: /* System tick */
3359 #if !defined(CONFIG_USER_ONLY)
3360 if (!supervisor(dc
))
3366 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3368 r_tickptr
= tcg_temp_new_ptr();
3369 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3370 offsetof(CPUState
, stick
));
3371 gen_helper_tick_set_count(r_tickptr
,
3373 tcg_temp_free_ptr(r_tickptr
);
3376 case 0x19: /* System tick compare */
3377 #if !defined(CONFIG_USER_ONLY)
3378 if (!supervisor(dc
))
3384 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3386 r_tickptr
= tcg_temp_new_ptr();
3387 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3388 offsetof(CPUState
, stick
));
3389 gen_helper_tick_set_limit(r_tickptr
,
3391 tcg_temp_free_ptr(r_tickptr
);
3395 case 0x10: /* Performance Control */
3396 case 0x11: /* Performance Instrumentation
3398 case 0x12: /* Dispatch Control */
3405 #if !defined(CONFIG_USER_ONLY)
3406 case 0x31: /* wrpsr, V9 saved, restored */
3408 if (!supervisor(dc
))
3410 #ifdef TARGET_SPARC64
3416 gen_helper_restored();
3418 case 2: /* UA2005 allclean */
3419 case 3: /* UA2005 otherw */
3420 case 4: /* UA2005 normalw */
3421 case 5: /* UA2005 invalw */
3427 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3428 gen_helper_wrpsr(cpu_dst
);
3429 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3430 dc
->cc_op
= CC_OP_FLAGS
;
3431 save_state(dc
, cpu_cond
);
3438 case 0x32: /* wrwim, V9 wrpr */
3440 if (!supervisor(dc
))
3442 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3443 #ifdef TARGET_SPARC64
3449 r_tsptr
= tcg_temp_new_ptr();
3450 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3451 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3452 offsetof(trap_state
, tpc
));
3453 tcg_temp_free_ptr(r_tsptr
);
3460 r_tsptr
= tcg_temp_new_ptr();
3461 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3462 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3463 offsetof(trap_state
, tnpc
));
3464 tcg_temp_free_ptr(r_tsptr
);
3471 r_tsptr
= tcg_temp_new_ptr();
3472 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3473 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3474 offsetof(trap_state
,
3476 tcg_temp_free_ptr(r_tsptr
);
3483 r_tsptr
= tcg_temp_new_ptr();
3484 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3485 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3486 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3487 offsetof(trap_state
, tt
));
3488 tcg_temp_free_ptr(r_tsptr
);
3495 r_tickptr
= tcg_temp_new_ptr();
3496 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3497 offsetof(CPUState
, tick
));
3498 gen_helper_tick_set_count(r_tickptr
,
3500 tcg_temp_free_ptr(r_tickptr
);
3504 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3508 TCGv r_tmp
= tcg_temp_local_new();
3510 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3511 save_state(dc
, cpu_cond
);
3512 gen_helper_wrpstate(r_tmp
);
3513 tcg_temp_free(r_tmp
);
3514 dc
->npc
= DYNAMIC_PC
;
3519 TCGv r_tmp
= tcg_temp_local_new();
3521 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3522 save_state(dc
, cpu_cond
);
3523 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3524 tcg_temp_free(r_tmp
);
3525 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3526 offsetof(CPUSPARCState
, tl
));
3527 dc
->npc
= DYNAMIC_PC
;
3531 gen_helper_wrpil(cpu_tmp0
);
3534 gen_helper_wrcwp(cpu_tmp0
);
3537 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3538 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3539 offsetof(CPUSPARCState
,
3542 case 11: // canrestore
3543 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3544 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3545 offsetof(CPUSPARCState
,
3548 case 12: // cleanwin
3549 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3550 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3551 offsetof(CPUSPARCState
,
3554 case 13: // otherwin
3555 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3556 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3557 offsetof(CPUSPARCState
,
3561 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3562 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3563 offsetof(CPUSPARCState
,
3566 case 16: // UA2005 gl
3567 CHECK_IU_FEATURE(dc
, GL
);
3568 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3569 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3570 offsetof(CPUSPARCState
, gl
));
3572 case 26: // UA2005 strand status
3573 CHECK_IU_FEATURE(dc
, HYPV
);
3574 if (!hypervisor(dc
))
3576 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3582 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3583 if (dc
->def
->nwindows
!= 32)
3584 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3585 (1 << dc
->def
->nwindows
) - 1);
3586 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3590 case 0x33: /* wrtbr, UA2005 wrhpr */
3592 #ifndef TARGET_SPARC64
3593 if (!supervisor(dc
))
3595 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3597 CHECK_IU_FEATURE(dc
, HYPV
);
3598 if (!hypervisor(dc
))
3600 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3603 // XXX gen_op_wrhpstate();
3604 save_state(dc
, cpu_cond
);
3610 // XXX gen_op_wrhtstate();
3613 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3616 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3618 case 31: // hstick_cmpr
3622 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3623 r_tickptr
= tcg_temp_new_ptr();
3624 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3625 offsetof(CPUState
, hstick
));
3626 gen_helper_tick_set_limit(r_tickptr
,
3628 tcg_temp_free_ptr(r_tickptr
);
3631 case 6: // hver readonly
3639 #ifdef TARGET_SPARC64
3640 case 0x2c: /* V9 movcc */
3642 int cc
= GET_FIELD_SP(insn
, 11, 12);
3643 int cond
= GET_FIELD_SP(insn
, 14, 17);
3647 r_cond
= tcg_temp_new();
3648 if (insn
& (1 << 18)) {
3650 gen_cond(r_cond
, 0, cond
, dc
);
3652 gen_cond(r_cond
, 1, cond
, dc
);
3656 gen_fcond(r_cond
, cc
, cond
);
3659 l1
= gen_new_label();
3661 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3662 if (IS_IMM
) { /* immediate */
3665 simm
= GET_FIELD_SPs(insn
, 0, 10);
3666 r_const
= tcg_const_tl(simm
);
3667 gen_movl_TN_reg(rd
, r_const
);
3668 tcg_temp_free(r_const
);
3670 rs2
= GET_FIELD_SP(insn
, 0, 4);
3671 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3672 gen_movl_TN_reg(rd
, cpu_tmp0
);
3675 tcg_temp_free(r_cond
);
3678 case 0x2d: /* V9 sdivx */
3679 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3680 gen_movl_TN_reg(rd
, cpu_dst
);
3682 case 0x2e: /* V9 popc */
3684 cpu_src2
= get_src2(insn
, cpu_src2
);
3685 gen_helper_popc(cpu_dst
, cpu_src2
);
3686 gen_movl_TN_reg(rd
, cpu_dst
);
3688 case 0x2f: /* V9 movr */
3690 int cond
= GET_FIELD_SP(insn
, 10, 12);
3693 cpu_src1
= get_src1(insn
, cpu_src1
);
3695 l1
= gen_new_label();
3697 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3699 if (IS_IMM
) { /* immediate */
3702 simm
= GET_FIELD_SPs(insn
, 0, 9);
3703 r_const
= tcg_const_tl(simm
);
3704 gen_movl_TN_reg(rd
, r_const
);
3705 tcg_temp_free(r_const
);
3707 rs2
= GET_FIELD_SP(insn
, 0, 4);
3708 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3709 gen_movl_TN_reg(rd
, cpu_tmp0
);
3719 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3720 #ifdef TARGET_SPARC64
3721 int opf
= GET_FIELD_SP(insn
, 5, 13);
3722 rs1
= GET_FIELD(insn
, 13, 17);
3723 rs2
= GET_FIELD(insn
, 27, 31);
3724 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3728 case 0x000: /* VIS I edge8cc */
3729 case 0x001: /* VIS II edge8n */
3730 case 0x002: /* VIS I edge8lcc */
3731 case 0x003: /* VIS II edge8ln */
3732 case 0x004: /* VIS I edge16cc */
3733 case 0x005: /* VIS II edge16n */
3734 case 0x006: /* VIS I edge16lcc */
3735 case 0x007: /* VIS II edge16ln */
3736 case 0x008: /* VIS I edge32cc */
3737 case 0x009: /* VIS II edge32n */
3738 case 0x00a: /* VIS I edge32lcc */
3739 case 0x00b: /* VIS II edge32ln */
3742 case 0x010: /* VIS I array8 */
3743 CHECK_FPU_FEATURE(dc
, VIS1
);
3744 cpu_src1
= get_src1(insn
, cpu_src1
);
3745 gen_movl_reg_TN(rs2
, cpu_src2
);
3746 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3747 gen_movl_TN_reg(rd
, cpu_dst
);
3749 case 0x012: /* VIS I array16 */
3750 CHECK_FPU_FEATURE(dc
, VIS1
);
3751 cpu_src1
= get_src1(insn
, cpu_src1
);
3752 gen_movl_reg_TN(rs2
, cpu_src2
);
3753 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3754 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3755 gen_movl_TN_reg(rd
, cpu_dst
);
3757 case 0x014: /* VIS I array32 */
3758 CHECK_FPU_FEATURE(dc
, VIS1
);
3759 cpu_src1
= get_src1(insn
, cpu_src1
);
3760 gen_movl_reg_TN(rs2
, cpu_src2
);
3761 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3762 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3763 gen_movl_TN_reg(rd
, cpu_dst
);
3765 case 0x018: /* VIS I alignaddr */
3766 CHECK_FPU_FEATURE(dc
, VIS1
);
3767 cpu_src1
= get_src1(insn
, cpu_src1
);
3768 gen_movl_reg_TN(rs2
, cpu_src2
);
3769 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3770 gen_movl_TN_reg(rd
, cpu_dst
);
3772 case 0x019: /* VIS II bmask */
3773 case 0x01a: /* VIS I alignaddrl */
3776 case 0x020: /* VIS I fcmple16 */
3777 CHECK_FPU_FEATURE(dc
, VIS1
);
3778 gen_op_load_fpr_DT0(DFPREG(rs1
));
3779 gen_op_load_fpr_DT1(DFPREG(rs2
));
3780 gen_helper_fcmple16();
3781 gen_op_store_DT0_fpr(DFPREG(rd
));
3783 case 0x022: /* VIS I fcmpne16 */
3784 CHECK_FPU_FEATURE(dc
, VIS1
);
3785 gen_op_load_fpr_DT0(DFPREG(rs1
));
3786 gen_op_load_fpr_DT1(DFPREG(rs2
));
3787 gen_helper_fcmpne16();
3788 gen_op_store_DT0_fpr(DFPREG(rd
));
3790 case 0x024: /* VIS I fcmple32 */
3791 CHECK_FPU_FEATURE(dc
, VIS1
);
3792 gen_op_load_fpr_DT0(DFPREG(rs1
));
3793 gen_op_load_fpr_DT1(DFPREG(rs2
));
3794 gen_helper_fcmple32();
3795 gen_op_store_DT0_fpr(DFPREG(rd
));
3797 case 0x026: /* VIS I fcmpne32 */
3798 CHECK_FPU_FEATURE(dc
, VIS1
);
3799 gen_op_load_fpr_DT0(DFPREG(rs1
));
3800 gen_op_load_fpr_DT1(DFPREG(rs2
));
3801 gen_helper_fcmpne32();
3802 gen_op_store_DT0_fpr(DFPREG(rd
));
3804 case 0x028: /* VIS I fcmpgt16 */
3805 CHECK_FPU_FEATURE(dc
, VIS1
);
3806 gen_op_load_fpr_DT0(DFPREG(rs1
));
3807 gen_op_load_fpr_DT1(DFPREG(rs2
));
3808 gen_helper_fcmpgt16();
3809 gen_op_store_DT0_fpr(DFPREG(rd
));
3811 case 0x02a: /* VIS I fcmpeq16 */
3812 CHECK_FPU_FEATURE(dc
, VIS1
);
3813 gen_op_load_fpr_DT0(DFPREG(rs1
));
3814 gen_op_load_fpr_DT1(DFPREG(rs2
));
3815 gen_helper_fcmpeq16();
3816 gen_op_store_DT0_fpr(DFPREG(rd
));
3818 case 0x02c: /* VIS I fcmpgt32 */
3819 CHECK_FPU_FEATURE(dc
, VIS1
);
3820 gen_op_load_fpr_DT0(DFPREG(rs1
));
3821 gen_op_load_fpr_DT1(DFPREG(rs2
));
3822 gen_helper_fcmpgt32();
3823 gen_op_store_DT0_fpr(DFPREG(rd
));
3825 case 0x02e: /* VIS I fcmpeq32 */
3826 CHECK_FPU_FEATURE(dc
, VIS1
);
3827 gen_op_load_fpr_DT0(DFPREG(rs1
));
3828 gen_op_load_fpr_DT1(DFPREG(rs2
));
3829 gen_helper_fcmpeq32();
3830 gen_op_store_DT0_fpr(DFPREG(rd
));
3832 case 0x031: /* VIS I fmul8x16 */
3833 CHECK_FPU_FEATURE(dc
, VIS1
);
3834 gen_op_load_fpr_DT0(DFPREG(rs1
));
3835 gen_op_load_fpr_DT1(DFPREG(rs2
));
3836 gen_helper_fmul8x16();
3837 gen_op_store_DT0_fpr(DFPREG(rd
));
3839 case 0x033: /* VIS I fmul8x16au */
3840 CHECK_FPU_FEATURE(dc
, VIS1
);
3841 gen_op_load_fpr_DT0(DFPREG(rs1
));
3842 gen_op_load_fpr_DT1(DFPREG(rs2
));
3843 gen_helper_fmul8x16au();
3844 gen_op_store_DT0_fpr(DFPREG(rd
));
3846 case 0x035: /* VIS I fmul8x16al */
3847 CHECK_FPU_FEATURE(dc
, VIS1
);
3848 gen_op_load_fpr_DT0(DFPREG(rs1
));
3849 gen_op_load_fpr_DT1(DFPREG(rs2
));
3850 gen_helper_fmul8x16al();
3851 gen_op_store_DT0_fpr(DFPREG(rd
));
3853 case 0x036: /* VIS I fmul8sux16 */
3854 CHECK_FPU_FEATURE(dc
, VIS1
);
3855 gen_op_load_fpr_DT0(DFPREG(rs1
));
3856 gen_op_load_fpr_DT1(DFPREG(rs2
));
3857 gen_helper_fmul8sux16();
3858 gen_op_store_DT0_fpr(DFPREG(rd
));
3860 case 0x037: /* VIS I fmul8ulx16 */
3861 CHECK_FPU_FEATURE(dc
, VIS1
);
3862 gen_op_load_fpr_DT0(DFPREG(rs1
));
3863 gen_op_load_fpr_DT1(DFPREG(rs2
));
3864 gen_helper_fmul8ulx16();
3865 gen_op_store_DT0_fpr(DFPREG(rd
));
3867 case 0x038: /* VIS I fmuld8sux16 */
3868 CHECK_FPU_FEATURE(dc
, VIS1
);
3869 gen_op_load_fpr_DT0(DFPREG(rs1
));
3870 gen_op_load_fpr_DT1(DFPREG(rs2
));
3871 gen_helper_fmuld8sux16();
3872 gen_op_store_DT0_fpr(DFPREG(rd
));
3874 case 0x039: /* VIS I fmuld8ulx16 */
3875 CHECK_FPU_FEATURE(dc
, VIS1
);
3876 gen_op_load_fpr_DT0(DFPREG(rs1
));
3877 gen_op_load_fpr_DT1(DFPREG(rs2
));
3878 gen_helper_fmuld8ulx16();
3879 gen_op_store_DT0_fpr(DFPREG(rd
));
3881 case 0x03a: /* VIS I fpack32 */
3882 case 0x03b: /* VIS I fpack16 */
3883 case 0x03d: /* VIS I fpackfix */
3884 case 0x03e: /* VIS I pdist */
3887 case 0x048: /* VIS I faligndata */
3888 CHECK_FPU_FEATURE(dc
, VIS1
);
3889 gen_op_load_fpr_DT0(DFPREG(rs1
));
3890 gen_op_load_fpr_DT1(DFPREG(rs2
));
3891 gen_helper_faligndata();
3892 gen_op_store_DT0_fpr(DFPREG(rd
));
3894 case 0x04b: /* VIS I fpmerge */
3895 CHECK_FPU_FEATURE(dc
, VIS1
);
3896 gen_op_load_fpr_DT0(DFPREG(rs1
));
3897 gen_op_load_fpr_DT1(DFPREG(rs2
));
3898 gen_helper_fpmerge();
3899 gen_op_store_DT0_fpr(DFPREG(rd
));
3901 case 0x04c: /* VIS II bshuffle */
3904 case 0x04d: /* VIS I fexpand */
3905 CHECK_FPU_FEATURE(dc
, VIS1
);
3906 gen_op_load_fpr_DT0(DFPREG(rs1
));
3907 gen_op_load_fpr_DT1(DFPREG(rs2
));
3908 gen_helper_fexpand();
3909 gen_op_store_DT0_fpr(DFPREG(rd
));
3911 case 0x050: /* VIS I fpadd16 */
3912 CHECK_FPU_FEATURE(dc
, VIS1
);
3913 gen_op_load_fpr_DT0(DFPREG(rs1
));
3914 gen_op_load_fpr_DT1(DFPREG(rs2
));
3915 gen_helper_fpadd16();
3916 gen_op_store_DT0_fpr(DFPREG(rd
));
3918 case 0x051: /* VIS I fpadd16s */
3919 CHECK_FPU_FEATURE(dc
, VIS1
);
3920 gen_helper_fpadd16s(cpu_fpr
[rd
],
3921 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3923 case 0x052: /* VIS I fpadd32 */
3924 CHECK_FPU_FEATURE(dc
, VIS1
);
3925 gen_op_load_fpr_DT0(DFPREG(rs1
));
3926 gen_op_load_fpr_DT1(DFPREG(rs2
));
3927 gen_helper_fpadd32();
3928 gen_op_store_DT0_fpr(DFPREG(rd
));
3930 case 0x053: /* VIS I fpadd32s */
3931 CHECK_FPU_FEATURE(dc
, VIS1
);
3932 gen_helper_fpadd32s(cpu_fpr
[rd
],
3933 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3935 case 0x054: /* VIS I fpsub16 */
3936 CHECK_FPU_FEATURE(dc
, VIS1
);
3937 gen_op_load_fpr_DT0(DFPREG(rs1
));
3938 gen_op_load_fpr_DT1(DFPREG(rs2
));
3939 gen_helper_fpsub16();
3940 gen_op_store_DT0_fpr(DFPREG(rd
));
3942 case 0x055: /* VIS I fpsub16s */
3943 CHECK_FPU_FEATURE(dc
, VIS1
);
3944 gen_helper_fpsub16s(cpu_fpr
[rd
],
3945 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3947 case 0x056: /* VIS I fpsub32 */
3948 CHECK_FPU_FEATURE(dc
, VIS1
);
3949 gen_op_load_fpr_DT0(DFPREG(rs1
));
3950 gen_op_load_fpr_DT1(DFPREG(rs2
));
3951 gen_helper_fpsub32();
3952 gen_op_store_DT0_fpr(DFPREG(rd
));
3954 case 0x057: /* VIS I fpsub32s */
3955 CHECK_FPU_FEATURE(dc
, VIS1
);
3956 gen_helper_fpsub32s(cpu_fpr
[rd
],
3957 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3959 case 0x060: /* VIS I fzero */
3960 CHECK_FPU_FEATURE(dc
, VIS1
);
3961 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3962 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3964 case 0x061: /* VIS I fzeros */
3965 CHECK_FPU_FEATURE(dc
, VIS1
);
3966 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3968 case 0x062: /* VIS I fnor */
3969 CHECK_FPU_FEATURE(dc
, VIS1
);
3970 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3971 cpu_fpr
[DFPREG(rs2
)]);
3972 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3973 cpu_fpr
[DFPREG(rs2
) + 1]);
3975 case 0x063: /* VIS I fnors */
3976 CHECK_FPU_FEATURE(dc
, VIS1
);
3977 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3979 case 0x064: /* VIS I fandnot2 */
3980 CHECK_FPU_FEATURE(dc
, VIS1
);
3981 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3982 cpu_fpr
[DFPREG(rs2
)]);
3983 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3984 cpu_fpr
[DFPREG(rs1
) + 1],
3985 cpu_fpr
[DFPREG(rs2
) + 1]);
3987 case 0x065: /* VIS I fandnot2s */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3991 case 0x066: /* VIS I fnot2 */
3992 CHECK_FPU_FEATURE(dc
, VIS1
);
3993 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3994 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3995 cpu_fpr
[DFPREG(rs2
) + 1]);
3997 case 0x067: /* VIS I fnot2s */
3998 CHECK_FPU_FEATURE(dc
, VIS1
);
3999 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4001 case 0x068: /* VIS I fandnot1 */
4002 CHECK_FPU_FEATURE(dc
, VIS1
);
4003 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4004 cpu_fpr
[DFPREG(rs1
)]);
4005 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4006 cpu_fpr
[DFPREG(rs2
) + 1],
4007 cpu_fpr
[DFPREG(rs1
) + 1]);
4009 case 0x069: /* VIS I fandnot1s */
4010 CHECK_FPU_FEATURE(dc
, VIS1
);
4011 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4013 case 0x06a: /* VIS I fnot1 */
4014 CHECK_FPU_FEATURE(dc
, VIS1
);
4015 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4016 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4017 cpu_fpr
[DFPREG(rs1
) + 1]);
4019 case 0x06b: /* VIS I fnot1s */
4020 CHECK_FPU_FEATURE(dc
, VIS1
);
4021 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4023 case 0x06c: /* VIS I fxor */
4024 CHECK_FPU_FEATURE(dc
, VIS1
);
4025 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4026 cpu_fpr
[DFPREG(rs2
)]);
4027 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4028 cpu_fpr
[DFPREG(rs1
) + 1],
4029 cpu_fpr
[DFPREG(rs2
) + 1]);
4031 case 0x06d: /* VIS I fxors */
4032 CHECK_FPU_FEATURE(dc
, VIS1
);
4033 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4035 case 0x06e: /* VIS I fnand */
4036 CHECK_FPU_FEATURE(dc
, VIS1
);
4037 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4038 cpu_fpr
[DFPREG(rs2
)]);
4039 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4040 cpu_fpr
[DFPREG(rs2
) + 1]);
4042 case 0x06f: /* VIS I fnands */
4043 CHECK_FPU_FEATURE(dc
, VIS1
);
4044 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4046 case 0x070: /* VIS I fand */
4047 CHECK_FPU_FEATURE(dc
, VIS1
);
4048 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4049 cpu_fpr
[DFPREG(rs2
)]);
4050 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4051 cpu_fpr
[DFPREG(rs1
) + 1],
4052 cpu_fpr
[DFPREG(rs2
) + 1]);
4054 case 0x071: /* VIS I fands */
4055 CHECK_FPU_FEATURE(dc
, VIS1
);
4056 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4058 case 0x072: /* VIS I fxnor */
4059 CHECK_FPU_FEATURE(dc
, VIS1
);
4060 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4061 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4062 cpu_fpr
[DFPREG(rs1
)]);
4063 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4064 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4065 cpu_fpr
[DFPREG(rs1
) + 1]);
4067 case 0x073: /* VIS I fxnors */
4068 CHECK_FPU_FEATURE(dc
, VIS1
);
4069 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4070 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4072 case 0x074: /* VIS I fsrc1 */
4073 CHECK_FPU_FEATURE(dc
, VIS1
);
4074 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4075 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4076 cpu_fpr
[DFPREG(rs1
) + 1]);
4078 case 0x075: /* VIS I fsrc1s */
4079 CHECK_FPU_FEATURE(dc
, VIS1
);
4080 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4082 case 0x076: /* VIS I fornot2 */
4083 CHECK_FPU_FEATURE(dc
, VIS1
);
4084 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4085 cpu_fpr
[DFPREG(rs2
)]);
4086 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4087 cpu_fpr
[DFPREG(rs1
) + 1],
4088 cpu_fpr
[DFPREG(rs2
) + 1]);
4090 case 0x077: /* VIS I fornot2s */
4091 CHECK_FPU_FEATURE(dc
, VIS1
);
4092 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4094 case 0x078: /* VIS I fsrc2 */
4095 CHECK_FPU_FEATURE(dc
, VIS1
);
4096 gen_op_load_fpr_DT0(DFPREG(rs2
));
4097 gen_op_store_DT0_fpr(DFPREG(rd
));
4099 case 0x079: /* VIS I fsrc2s */
4100 CHECK_FPU_FEATURE(dc
, VIS1
);
4101 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4103 case 0x07a: /* VIS I fornot1 */
4104 CHECK_FPU_FEATURE(dc
, VIS1
);
4105 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4106 cpu_fpr
[DFPREG(rs1
)]);
4107 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4108 cpu_fpr
[DFPREG(rs2
) + 1],
4109 cpu_fpr
[DFPREG(rs1
) + 1]);
4111 case 0x07b: /* VIS I fornot1s */
4112 CHECK_FPU_FEATURE(dc
, VIS1
);
4113 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4115 case 0x07c: /* VIS I for */
4116 CHECK_FPU_FEATURE(dc
, VIS1
);
4117 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4118 cpu_fpr
[DFPREG(rs2
)]);
4119 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4120 cpu_fpr
[DFPREG(rs1
) + 1],
4121 cpu_fpr
[DFPREG(rs2
) + 1]);
4123 case 0x07d: /* VIS I fors */
4124 CHECK_FPU_FEATURE(dc
, VIS1
);
4125 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4127 case 0x07e: /* VIS I fone */
4128 CHECK_FPU_FEATURE(dc
, VIS1
);
4129 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4130 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4132 case 0x07f: /* VIS I fones */
4133 CHECK_FPU_FEATURE(dc
, VIS1
);
4134 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4136 case 0x080: /* VIS I shutdown */
4137 case 0x081: /* VIS II siam */
4146 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4147 #ifdef TARGET_SPARC64
4152 #ifdef TARGET_SPARC64
4153 } else if (xop
== 0x39) { /* V9 return */
4156 save_state(dc
, cpu_cond
);
4157 cpu_src1
= get_src1(insn
, cpu_src1
);
4158 if (IS_IMM
) { /* immediate */
4159 simm
= GET_FIELDs(insn
, 19, 31);
4160 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4161 } else { /* register */
4162 rs2
= GET_FIELD(insn
, 27, 31);
4164 gen_movl_reg_TN(rs2
, cpu_src2
);
4165 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4167 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4169 gen_helper_restore();
4170 gen_mov_pc_npc(dc
, cpu_cond
);
4171 r_const
= tcg_const_i32(3);
4172 gen_helper_check_align(cpu_dst
, r_const
);
4173 tcg_temp_free_i32(r_const
);
4174 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4175 dc
->npc
= DYNAMIC_PC
;
4179 cpu_src1
= get_src1(insn
, cpu_src1
);
4180 if (IS_IMM
) { /* immediate */
4181 simm
= GET_FIELDs(insn
, 19, 31);
4182 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4183 } else { /* register */
4184 rs2
= GET_FIELD(insn
, 27, 31);
4186 gen_movl_reg_TN(rs2
, cpu_src2
);
4187 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4189 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4192 case 0x38: /* jmpl */
4197 r_pc
= tcg_const_tl(dc
->pc
);
4198 gen_movl_TN_reg(rd
, r_pc
);
4199 tcg_temp_free(r_pc
);
4200 gen_mov_pc_npc(dc
, cpu_cond
);
4201 r_const
= tcg_const_i32(3);
4202 gen_helper_check_align(cpu_dst
, r_const
);
4203 tcg_temp_free_i32(r_const
);
4204 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4205 dc
->npc
= DYNAMIC_PC
;
4208 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4209 case 0x39: /* rett, V9 return */
4213 if (!supervisor(dc
))
4215 gen_mov_pc_npc(dc
, cpu_cond
);
4216 r_const
= tcg_const_i32(3);
4217 gen_helper_check_align(cpu_dst
, r_const
);
4218 tcg_temp_free_i32(r_const
);
4219 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4220 dc
->npc
= DYNAMIC_PC
;
4225 case 0x3b: /* flush */
4226 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4230 case 0x3c: /* save */
4231 save_state(dc
, cpu_cond
);
4233 gen_movl_TN_reg(rd
, cpu_dst
);
4235 case 0x3d: /* restore */
4236 save_state(dc
, cpu_cond
);
4237 gen_helper_restore();
4238 gen_movl_TN_reg(rd
, cpu_dst
);
4240 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4241 case 0x3e: /* V9 done/retry */
4245 if (!supervisor(dc
))
4247 dc
->npc
= DYNAMIC_PC
;
4248 dc
->pc
= DYNAMIC_PC
;
4252 if (!supervisor(dc
))
4254 dc
->npc
= DYNAMIC_PC
;
4255 dc
->pc
= DYNAMIC_PC
;
4271 case 3: /* load/store instructions */
4273 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4275 /* flush pending conditional evaluations before exposing
4277 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4278 dc
->cc_op
= CC_OP_FLAGS
;
4279 gen_helper_compute_psr();
4281 cpu_src1
= get_src1(insn
, cpu_src1
);
4282 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4283 rs2
= GET_FIELD(insn
, 27, 31);
4284 gen_movl_reg_TN(rs2
, cpu_src2
);
4285 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4286 } else if (IS_IMM
) { /* immediate */
4287 simm
= GET_FIELDs(insn
, 19, 31);
4288 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4289 } else { /* register */
4290 rs2
= GET_FIELD(insn
, 27, 31);
4292 gen_movl_reg_TN(rs2
, cpu_src2
);
4293 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4295 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4297 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4298 (xop
> 0x17 && xop
<= 0x1d ) ||
4299 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4301 case 0x0: /* ld, V9 lduw, load unsigned word */
4302 gen_address_mask(dc
, cpu_addr
);
4303 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4305 case 0x1: /* ldub, load unsigned byte */
4306 gen_address_mask(dc
, cpu_addr
);
4307 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4309 case 0x2: /* lduh, load unsigned halfword */
4310 gen_address_mask(dc
, cpu_addr
);
4311 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4313 case 0x3: /* ldd, load double word */
4319 save_state(dc
, cpu_cond
);
4320 r_const
= tcg_const_i32(7);
4321 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4322 tcg_temp_free_i32(r_const
);
4323 gen_address_mask(dc
, cpu_addr
);
4324 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4325 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4326 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4327 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4328 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4329 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4330 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4333 case 0x9: /* ldsb, load signed byte */
4334 gen_address_mask(dc
, cpu_addr
);
4335 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4337 case 0xa: /* ldsh, load signed halfword */
4338 gen_address_mask(dc
, cpu_addr
);
4339 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4341 case 0xd: /* ldstub -- XXX: should be atomically */
4345 gen_address_mask(dc
, cpu_addr
);
4346 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4347 r_const
= tcg_const_tl(0xff);
4348 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4349 tcg_temp_free(r_const
);
4352 case 0x0f: /* swap, swap register with memory. Also
4354 CHECK_IU_FEATURE(dc
, SWAP
);
4355 gen_movl_reg_TN(rd
, cpu_val
);
4356 gen_address_mask(dc
, cpu_addr
);
4357 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4358 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4359 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4361 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4362 case 0x10: /* lda, V9 lduwa, load word alternate */
4363 #ifndef TARGET_SPARC64
4366 if (!supervisor(dc
))
4369 save_state(dc
, cpu_cond
);
4370 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4372 case 0x11: /* lduba, load unsigned byte alternate */
4373 #ifndef TARGET_SPARC64
4376 if (!supervisor(dc
))
4379 save_state(dc
, cpu_cond
);
4380 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4382 case 0x12: /* lduha, load unsigned halfword alternate */
4383 #ifndef TARGET_SPARC64
4386 if (!supervisor(dc
))
4389 save_state(dc
, cpu_cond
);
4390 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4392 case 0x13: /* ldda, load double word alternate */
4393 #ifndef TARGET_SPARC64
4396 if (!supervisor(dc
))
4401 save_state(dc
, cpu_cond
);
4402 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4404 case 0x19: /* ldsba, load signed byte alternate */
4405 #ifndef TARGET_SPARC64
4408 if (!supervisor(dc
))
4411 save_state(dc
, cpu_cond
);
4412 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4414 case 0x1a: /* ldsha, load signed halfword alternate */
4415 #ifndef TARGET_SPARC64
4418 if (!supervisor(dc
))
4421 save_state(dc
, cpu_cond
);
4422 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4424 case 0x1d: /* ldstuba -- XXX: should be atomically */
4425 #ifndef TARGET_SPARC64
4428 if (!supervisor(dc
))
4431 save_state(dc
, cpu_cond
);
4432 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4434 case 0x1f: /* swapa, swap reg with alt. memory. Also
4436 CHECK_IU_FEATURE(dc
, SWAP
);
4437 #ifndef TARGET_SPARC64
4440 if (!supervisor(dc
))
4443 save_state(dc
, cpu_cond
);
4444 gen_movl_reg_TN(rd
, cpu_val
);
4445 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4448 #ifndef TARGET_SPARC64
4449 case 0x30: /* ldc */
4450 case 0x31: /* ldcsr */
4451 case 0x33: /* lddc */
4455 #ifdef TARGET_SPARC64
4456 case 0x08: /* V9 ldsw */
4457 gen_address_mask(dc
, cpu_addr
);
4458 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4460 case 0x0b: /* V9 ldx */
4461 gen_address_mask(dc
, cpu_addr
);
4462 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4464 case 0x18: /* V9 ldswa */
4465 save_state(dc
, cpu_cond
);
4466 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4468 case 0x1b: /* V9 ldxa */
4469 save_state(dc
, cpu_cond
);
4470 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4472 case 0x2d: /* V9 prefetch, no effect */
4474 case 0x30: /* V9 ldfa */
4475 save_state(dc
, cpu_cond
);
4476 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4478 case 0x33: /* V9 lddfa */
4479 save_state(dc
, cpu_cond
);
4480 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4482 case 0x3d: /* V9 prefetcha, no effect */
4484 case 0x32: /* V9 ldqfa */
4485 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4486 save_state(dc
, cpu_cond
);
4487 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4493 gen_movl_TN_reg(rd
, cpu_val
);
4494 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4497 } else if (xop
>= 0x20 && xop
< 0x24) {
4498 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4500 save_state(dc
, cpu_cond
);
4502 case 0x20: /* ldf, load fpreg */
4503 gen_address_mask(dc
, cpu_addr
);
4504 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4505 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4507 case 0x21: /* ldfsr, V9 ldxfsr */
4508 #ifdef TARGET_SPARC64
4509 gen_address_mask(dc
, cpu_addr
);
4511 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4512 gen_helper_ldxfsr(cpu_tmp64
);
4514 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4515 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4516 gen_helper_ldfsr(cpu_tmp32
);
4520 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4521 gen_helper_ldfsr(cpu_tmp32
);
4525 case 0x22: /* ldqf, load quad fpreg */
4529 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4530 r_const
= tcg_const_i32(dc
->mem_idx
);
4531 gen_address_mask(dc
, cpu_addr
);
4532 gen_helper_ldqf(cpu_addr
, r_const
);
4533 tcg_temp_free_i32(r_const
);
4534 gen_op_store_QT0_fpr(QFPREG(rd
));
4537 case 0x23: /* lddf, load double fpreg */
4541 r_const
= tcg_const_i32(dc
->mem_idx
);
4542 gen_address_mask(dc
, cpu_addr
);
4543 gen_helper_lddf(cpu_addr
, r_const
);
4544 tcg_temp_free_i32(r_const
);
4545 gen_op_store_DT0_fpr(DFPREG(rd
));
4551 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4552 xop
== 0xe || xop
== 0x1e) {
4553 gen_movl_reg_TN(rd
, cpu_val
);
4555 case 0x4: /* st, store word */
4556 gen_address_mask(dc
, cpu_addr
);
4557 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4559 case 0x5: /* stb, store byte */
4560 gen_address_mask(dc
, cpu_addr
);
4561 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4563 case 0x6: /* sth, store halfword */
4564 gen_address_mask(dc
, cpu_addr
);
4565 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4567 case 0x7: /* std, store double word */
4573 save_state(dc
, cpu_cond
);
4574 gen_address_mask(dc
, cpu_addr
);
4575 r_const
= tcg_const_i32(7);
4576 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4577 tcg_temp_free_i32(r_const
);
4578 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4579 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4580 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4583 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4584 case 0x14: /* sta, V9 stwa, store word alternate */
4585 #ifndef TARGET_SPARC64
4588 if (!supervisor(dc
))
4591 save_state(dc
, cpu_cond
);
4592 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4593 dc
->npc
= DYNAMIC_PC
;
4595 case 0x15: /* stba, store byte alternate */
4596 #ifndef TARGET_SPARC64
4599 if (!supervisor(dc
))
4602 save_state(dc
, cpu_cond
);
4603 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4604 dc
->npc
= DYNAMIC_PC
;
4606 case 0x16: /* stha, store halfword alternate */
4607 #ifndef TARGET_SPARC64
4610 if (!supervisor(dc
))
4613 save_state(dc
, cpu_cond
);
4614 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4615 dc
->npc
= DYNAMIC_PC
;
4617 case 0x17: /* stda, store double word alternate */
4618 #ifndef TARGET_SPARC64
4621 if (!supervisor(dc
))
4627 save_state(dc
, cpu_cond
);
4628 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4632 #ifdef TARGET_SPARC64
4633 case 0x0e: /* V9 stx */
4634 gen_address_mask(dc
, cpu_addr
);
4635 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4637 case 0x1e: /* V9 stxa */
4638 save_state(dc
, cpu_cond
);
4639 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4640 dc
->npc
= DYNAMIC_PC
;
4646 } else if (xop
> 0x23 && xop
< 0x28) {
4647 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4649 save_state(dc
, cpu_cond
);
4651 case 0x24: /* stf, store fpreg */
4652 gen_address_mask(dc
, cpu_addr
);
4653 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4654 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4656 case 0x25: /* stfsr, V9 stxfsr */
4657 #ifdef TARGET_SPARC64
4658 gen_address_mask(dc
, cpu_addr
);
4659 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4661 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4663 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4665 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4666 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4670 #ifdef TARGET_SPARC64
4671 /* V9 stqf, store quad fpreg */
4675 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4676 gen_op_load_fpr_QT0(QFPREG(rd
));
4677 r_const
= tcg_const_i32(dc
->mem_idx
);
4678 gen_address_mask(dc
, cpu_addr
);
4679 gen_helper_stqf(cpu_addr
, r_const
);
4680 tcg_temp_free_i32(r_const
);
4683 #else /* !TARGET_SPARC64 */
4684 /* stdfq, store floating point queue */
4685 #if defined(CONFIG_USER_ONLY)
4688 if (!supervisor(dc
))
4690 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4695 case 0x27: /* stdf, store double fpreg */
4699 gen_op_load_fpr_DT0(DFPREG(rd
));
4700 r_const
= tcg_const_i32(dc
->mem_idx
);
4701 gen_address_mask(dc
, cpu_addr
);
4702 gen_helper_stdf(cpu_addr
, r_const
);
4703 tcg_temp_free_i32(r_const
);
4709 } else if (xop
> 0x33 && xop
< 0x3f) {
4710 save_state(dc
, cpu_cond
);
4712 #ifdef TARGET_SPARC64
4713 case 0x34: /* V9 stfa */
4714 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4716 case 0x36: /* V9 stqfa */
4720 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4721 r_const
= tcg_const_i32(7);
4722 gen_helper_check_align(cpu_addr
, r_const
);
4723 tcg_temp_free_i32(r_const
);
4724 gen_op_load_fpr_QT0(QFPREG(rd
));
4725 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4728 case 0x37: /* V9 stdfa */
4729 gen_op_load_fpr_DT0(DFPREG(rd
));
4730 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4732 case 0x3c: /* V9 casa */
4733 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4734 gen_movl_TN_reg(rd
, cpu_val
);
4736 case 0x3e: /* V9 casxa */
4737 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4738 gen_movl_TN_reg(rd
, cpu_val
);
4741 case 0x34: /* stc */
4742 case 0x35: /* stcsr */
4743 case 0x36: /* stdcq */
4744 case 0x37: /* stdc */
4755 /* default case for non jump instructions */
4756 if (dc
->npc
== DYNAMIC_PC
) {
4757 dc
->pc
= DYNAMIC_PC
;
4759 } else if (dc
->npc
== JUMP_PC
) {
4760 /* we can do a static jump */
4761 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4765 dc
->npc
= dc
->npc
+ 4;
4773 save_state(dc
, cpu_cond
);
4774 r_const
= tcg_const_i32(TT_ILL_INSN
);
4775 gen_helper_raise_exception(r_const
);
4776 tcg_temp_free_i32(r_const
);
4784 save_state(dc
, cpu_cond
);
4785 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4786 gen_helper_raise_exception(r_const
);
4787 tcg_temp_free_i32(r_const
);
4791 #if !defined(CONFIG_USER_ONLY)
4796 save_state(dc
, cpu_cond
);
4797 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4798 gen_helper_raise_exception(r_const
);
4799 tcg_temp_free_i32(r_const
);
4805 save_state(dc
, cpu_cond
);
4806 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4809 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4811 save_state(dc
, cpu_cond
);
4812 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4816 #ifndef TARGET_SPARC64
4821 save_state(dc
, cpu_cond
);
4822 r_const
= tcg_const_i32(TT_NCP_INSN
);
4823 gen_helper_raise_exception(r_const
);
4824 tcg_temp_free(r_const
);
4830 tcg_temp_free(cpu_tmp1
);
4831 tcg_temp_free(cpu_tmp2
);
4834 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4835 int spc
, CPUSPARCState
*env
)
4837 target_ulong pc_start
, last_pc
;
4838 uint16_t *gen_opc_end
;
4839 DisasContext dc1
, *dc
= &dc1
;
4845 memset(dc
, 0, sizeof(DisasContext
));
4850 dc
->npc
= (target_ulong
) tb
->cs_base
;
4851 dc
->cc_op
= CC_OP_DYNAMIC
;
4852 dc
->mem_idx
= cpu_mmu_index(env
);
4854 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4855 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4857 dc
->fpu_enabled
= 0;
4858 #ifdef TARGET_SPARC64
4859 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4861 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4862 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4864 cpu_tmp0
= tcg_temp_new();
4865 cpu_tmp32
= tcg_temp_new_i32();
4866 cpu_tmp64
= tcg_temp_new_i64();
4868 cpu_dst
= tcg_temp_local_new();
4871 cpu_val
= tcg_temp_local_new();
4872 cpu_addr
= tcg_temp_local_new();
4875 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4877 max_insns
= CF_COUNT_MASK
;
4880 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4881 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4882 if (bp
->pc
== dc
->pc
) {
4883 if (dc
->pc
!= pc_start
)
4884 save_state(dc
, cpu_cond
);
4893 qemu_log("Search PC...\n");
4894 j
= gen_opc_ptr
- gen_opc_buf
;
4898 gen_opc_instr_start
[lj
++] = 0;
4899 gen_opc_pc
[lj
] = dc
->pc
;
4900 gen_opc_npc
[lj
] = dc
->npc
;
4901 gen_opc_instr_start
[lj
] = 1;
4902 gen_opc_icount
[lj
] = num_insns
;
4905 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4908 disas_sparc_insn(dc
);
4913 /* if the next PC is different, we abort now */
4914 if (dc
->pc
!= (last_pc
+ 4))
4916 /* if we reach a page boundary, we stop generation so that the
4917 PC of a TT_TFAULT exception is always in the right page */
4918 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4920 /* if single step mode, we generate only one instruction and
4921 generate an exception */
4922 if (dc
->singlestep
) {
4925 } while ((gen_opc_ptr
< gen_opc_end
) &&
4926 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4927 num_insns
< max_insns
);
4930 tcg_temp_free(cpu_addr
);
4931 tcg_temp_free(cpu_val
);
4932 tcg_temp_free(cpu_dst
);
4933 tcg_temp_free_i64(cpu_tmp64
);
4934 tcg_temp_free_i32(cpu_tmp32
);
4935 tcg_temp_free(cpu_tmp0
);
4936 if (tb
->cflags
& CF_LAST_IO
)
4939 if (dc
->pc
!= DYNAMIC_PC
&&
4940 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4941 /* static PC and NPC: we can use direct chaining */
4942 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4944 if (dc
->pc
!= DYNAMIC_PC
)
4945 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4946 save_npc(dc
, cpu_cond
);
4950 gen_icount_end(tb
, num_insns
);
4951 *gen_opc_ptr
= INDEX_op_end
;
4953 j
= gen_opc_ptr
- gen_opc_buf
;
4956 gen_opc_instr_start
[lj
++] = 0;
4960 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4961 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4963 tb
->size
= last_pc
+ 4 - pc_start
;
4964 tb
->icount
= num_insns
;
4967 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4968 qemu_log("--------------\n");
4969 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4970 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4976 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4978 gen_intermediate_code_internal(tb
, 0, env
);
4981 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4983 gen_intermediate_code_internal(tb
, 1, env
);
4986 void gen_intermediate_code_init(CPUSPARCState
*env
)
4990 static const char * const gregnames
[8] = {
4991 NULL
, // g0 not used
5000 static const char * const fregnames
[64] = {
5001 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5002 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5003 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5004 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5005 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5006 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5007 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5008 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5011 /* init various static tables */
5015 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5016 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5017 offsetof(CPUState
, regwptr
),
5019 #ifdef TARGET_SPARC64
5020 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5022 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5024 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5026 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5028 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5029 offsetof(CPUState
, tick_cmpr
),
5031 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5032 offsetof(CPUState
, stick_cmpr
),
5034 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5035 offsetof(CPUState
, hstick_cmpr
),
5037 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5039 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5041 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5043 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5044 offsetof(CPUState
, ssr
), "ssr");
5045 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5046 offsetof(CPUState
, version
), "ver");
5047 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5048 offsetof(CPUState
, softint
),
5051 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5054 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5056 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5058 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5059 offsetof(CPUState
, cc_src2
),
5061 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5063 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5065 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5067 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5069 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5071 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5073 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5074 #ifndef CONFIG_USER_ONLY
5075 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5078 for (i
= 1; i
< 8; i
++)
5079 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5080 offsetof(CPUState
, gregs
[i
]),
5082 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5083 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5084 offsetof(CPUState
, fpr
[i
]),
5087 /* register helpers */
5089 #define GEN_HELPER 2
5094 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5097 env
->pc
= gen_opc_pc
[pc_pos
];
5098 npc
= gen_opc_npc
[pc_pos
];
5100 /* dynamic NPC: already stored */
5101 } else if (npc
== 2) {
5102 /* jump PC: use 'cond' and the jump targets of the translation */
5104 env
->npc
= gen_opc_jump_pc
[0];
5106 env
->npc
= gen_opc_jump_pc
[1];
5112 /* flush pending conditional evaluations before exposing cpu state */
5113 if (CC_OP
!= CC_OP_FLAGS
) {
5114 helper_compute_psr();