4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x
, int len
)
112 return (x
<< len
) >> len
;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src
)
120 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
121 offsetof(CPU_DoubleU
, l
.upper
));
122 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
123 offsetof(CPU_DoubleU
, l
.lower
));
126 static void gen_op_load_fpr_DT1(unsigned int src
)
128 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
129 offsetof(CPU_DoubleU
, l
.upper
));
130 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
131 offsetof(CPU_DoubleU
, l
.lower
));
134 static void gen_op_store_DT0_fpr(unsigned int dst
)
136 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
137 offsetof(CPU_DoubleU
, l
.upper
));
138 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
139 offsetof(CPU_DoubleU
, l
.lower
));
142 static void gen_op_load_fpr_QT0(unsigned int src
)
144 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.upmost
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.upper
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.lower
));
150 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
151 offsetof(CPU_QuadU
, l
.lowest
));
154 static void gen_op_load_fpr_QT1(unsigned int src
)
156 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.upmost
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.upper
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.lower
));
162 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
163 offsetof(CPU_QuadU
, l
.lowest
));
166 static void gen_op_store_QT0_fpr(unsigned int dst
)
168 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.upmost
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.upper
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.lower
));
174 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
175 offsetof(CPU_QuadU
, l
.lowest
));
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
192 #ifdef TARGET_SPARC64
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #define AM_CHECK(dc) (1)
200 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
202 #ifdef TARGET_SPARC64
204 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
208 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
211 tcg_gen_movi_tl(tn
, 0);
213 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
215 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
219 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
224 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
226 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
230 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
231 target_ulong pc
, target_ulong npc
)
233 TranslationBlock
*tb
;
236 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
237 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num
);
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
243 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc
, pc
);
247 tcg_gen_movi_tl(cpu_npc
, npc
);
253 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
255 tcg_gen_extu_i32_tl(reg
, src
);
256 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
257 tcg_gen_andi_tl(reg
, reg
, 0x1);
260 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
262 tcg_gen_extu_i32_tl(reg
, src
);
263 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
264 tcg_gen_andi_tl(reg
, reg
, 0x1);
267 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
269 tcg_gen_extu_i32_tl(reg
, src
);
270 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
271 tcg_gen_andi_tl(reg
, reg
, 0x1);
274 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
276 tcg_gen_extu_i32_tl(reg
, src
);
277 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
278 tcg_gen_andi_tl(reg
, reg
, 0x1);
281 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
287 l1
= gen_new_label();
289 r_temp
= tcg_temp_new();
290 tcg_gen_xor_tl(r_temp
, src1
, src2
);
291 tcg_gen_not_tl(r_temp
, r_temp
);
292 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
293 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
294 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
296 r_const
= tcg_const_i32(TT_TOVF
);
297 gen_helper_raise_exception(r_const
);
298 tcg_temp_free_i32(r_const
);
300 tcg_temp_free(r_temp
);
303 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
308 l1
= gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
310 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
312 r_const
= tcg_const_i32(TT_TOVF
);
313 gen_helper_raise_exception(r_const
);
314 tcg_temp_free_i32(r_const
);
318 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
320 tcg_gen_mov_tl(cpu_cc_src
, src1
);
321 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
322 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
323 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
326 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
328 tcg_gen_mov_tl(cpu_cc_src
, src1
);
329 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
330 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
331 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
334 static TCGv_i32
gen_add32_carry32(void)
336 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32
= tcg_temp_new_i32();
341 cc_src2_32
= tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
343 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
345 cc_src1_32
= cpu_cc_dst
;
346 cc_src2_32
= cpu_cc_src
;
349 carry_32
= tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32
);
354 tcg_temp_free_i32(cc_src2_32
);
360 static TCGv_i32
gen_sub32_carry32(void)
362 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32
= tcg_temp_new_i32();
367 cc_src2_32
= tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
369 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
371 cc_src1_32
= cpu_cc_src
;
372 cc_src2_32
= cpu_cc_src2
;
375 carry_32
= tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32
);
380 tcg_temp_free_i32(cc_src2_32
);
386 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
387 TCGv src2
, int update_cc
)
395 /* Carry is known to be zero. Fall back to plain ADD. */
397 gen_op_add_cc(dst
, src1
, src2
);
399 tcg_gen_add_tl(dst
, src1
, src2
);
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low
= tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
414 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
415 tcg_temp_free(dst_low
);
419 carry_32
= gen_add32_carry32();
425 carry_32
= gen_sub32_carry32();
429 /* We need external help to produce the carry. */
430 carry_32
= tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32
);
435 #if TARGET_LONG_BITS == 64
436 carry
= tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry
, carry_32
);
442 tcg_gen_add_tl(dst
, src1
, src2
);
443 tcg_gen_add_tl(dst
, dst
, carry
);
445 tcg_temp_free_i32(carry_32
);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry
);
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
454 tcg_gen_mov_tl(cpu_cc_src
, src1
);
455 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
456 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
457 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
458 dc
->cc_op
= CC_OP_ADDX
;
462 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
464 tcg_gen_mov_tl(cpu_cc_src
, src1
);
465 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
466 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
467 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
470 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
472 tcg_gen_mov_tl(cpu_cc_src
, src1
);
473 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
474 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
475 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
476 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
480 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
486 l1
= gen_new_label();
488 r_temp
= tcg_temp_new();
489 tcg_gen_xor_tl(r_temp
, src1
, src2
);
490 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
491 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
492 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
494 r_const
= tcg_const_i32(TT_TOVF
);
495 gen_helper_raise_exception(r_const
);
496 tcg_temp_free_i32(r_const
);
498 tcg_temp_free(r_temp
);
501 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
503 tcg_gen_mov_tl(cpu_cc_src
, src1
);
504 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
506 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
507 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
508 dc
->cc_op
= CC_OP_LOGIC
;
510 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
511 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
512 dc
->cc_op
= CC_OP_SUB
;
514 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
517 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
519 tcg_gen_mov_tl(cpu_cc_src
, src1
);
520 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
521 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
522 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
525 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
526 TCGv src2
, int update_cc
)
534 /* Carry is known to be zero. Fall back to plain SUB. */
536 gen_op_sub_cc(dst
, src1
, src2
);
538 tcg_gen_sub_tl(dst
, src1
, src2
);
545 carry_32
= gen_add32_carry32();
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low
= tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
559 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
560 tcg_temp_free(dst_low
);
564 carry_32
= gen_sub32_carry32();
568 /* We need external help to produce the carry. */
569 carry_32
= tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32
);
574 #if TARGET_LONG_BITS == 64
575 carry
= tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry
, carry_32
);
581 tcg_gen_sub_tl(dst
, src1
, src2
);
582 tcg_gen_sub_tl(dst
, dst
, carry
);
584 tcg_temp_free_i32(carry_32
);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry
);
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
593 tcg_gen_mov_tl(cpu_cc_src
, src1
);
594 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
595 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
596 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
597 dc
->cc_op
= CC_OP_SUBX
;
601 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
603 tcg_gen_mov_tl(cpu_cc_src
, src1
);
604 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
605 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
606 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
609 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
611 tcg_gen_mov_tl(cpu_cc_src
, src1
);
612 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
613 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
614 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
615 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
619 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
624 l1
= gen_new_label();
625 r_temp
= tcg_temp_new();
631 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
632 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
635 tcg_gen_movi_tl(cpu_cc_src2
, 0);
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
641 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
642 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
643 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
645 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
648 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
649 gen_mov_reg_V(r_temp
, cpu_psr
);
650 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
651 tcg_temp_free(r_temp
);
653 // T0 = (b1 << 31) | (T0 >> 1);
655 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
656 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
657 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
659 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
661 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
664 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
666 TCGv_i32 r_src1
, r_src2
;
667 TCGv_i64 r_temp
, r_temp2
;
669 r_src1
= tcg_temp_new_i32();
670 r_src2
= tcg_temp_new_i32();
672 tcg_gen_trunc_tl_i32(r_src1
, src1
);
673 tcg_gen_trunc_tl_i32(r_src2
, src2
);
675 r_temp
= tcg_temp_new_i64();
676 r_temp2
= tcg_temp_new_i64();
679 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
680 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
682 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
683 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
686 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
688 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
690 tcg_temp_free_i64(r_temp
);
691 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
693 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
695 tcg_temp_free_i64(r_temp2
);
697 tcg_temp_free_i32(r_src1
);
698 tcg_temp_free_i32(r_src2
);
701 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst
, src1
, src2
, 0);
707 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst
, src1
, src2
, 1);
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
719 l1
= gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
721 r_const
= tcg_const_i32(TT_DIV_ZERO
);
722 gen_helper_raise_exception(r_const
);
723 tcg_temp_free_i32(r_const
);
727 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
730 TCGv r_temp1
, r_temp2
;
732 l1
= gen_new_label();
733 l2
= gen_new_label();
734 r_temp1
= tcg_temp_local_new();
735 r_temp2
= tcg_temp_local_new();
736 tcg_gen_mov_tl(r_temp1
, src1
);
737 tcg_gen_mov_tl(r_temp2
, src2
);
738 gen_trap_ifdivzero_tl(r_temp2
);
739 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp1
, INT64_MIN
, l1
);
740 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp2
, -1, l1
);
741 tcg_gen_movi_i64(dst
, INT64_MIN
);
744 tcg_gen_div_i64(dst
, r_temp1
, r_temp2
);
746 tcg_temp_free(r_temp1
);
747 tcg_temp_free(r_temp2
);
752 static inline void gen_op_eval_ba(TCGv dst
)
754 tcg_gen_movi_tl(dst
, 1);
758 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
760 gen_mov_reg_Z(dst
, src
);
764 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
766 gen_mov_reg_N(cpu_tmp0
, src
);
767 gen_mov_reg_V(dst
, src
);
768 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
769 gen_mov_reg_Z(cpu_tmp0
, src
);
770 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
774 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
776 gen_mov_reg_V(cpu_tmp0
, src
);
777 gen_mov_reg_N(dst
, src
);
778 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
782 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
784 gen_mov_reg_Z(cpu_tmp0
, src
);
785 gen_mov_reg_C(dst
, src
);
786 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
790 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
792 gen_mov_reg_C(dst
, src
);
796 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
798 gen_mov_reg_V(dst
, src
);
802 static inline void gen_op_eval_bn(TCGv dst
)
804 tcg_gen_movi_tl(dst
, 0);
808 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
810 gen_mov_reg_N(dst
, src
);
814 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
816 gen_mov_reg_Z(dst
, src
);
817 tcg_gen_xori_tl(dst
, dst
, 0x1);
821 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
823 gen_mov_reg_N(cpu_tmp0
, src
);
824 gen_mov_reg_V(dst
, src
);
825 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
826 gen_mov_reg_Z(cpu_tmp0
, src
);
827 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
828 tcg_gen_xori_tl(dst
, dst
, 0x1);
832 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
834 gen_mov_reg_V(cpu_tmp0
, src
);
835 gen_mov_reg_N(dst
, src
);
836 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
837 tcg_gen_xori_tl(dst
, dst
, 0x1);
841 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
843 gen_mov_reg_Z(cpu_tmp0
, src
);
844 gen_mov_reg_C(dst
, src
);
845 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
846 tcg_gen_xori_tl(dst
, dst
, 0x1);
850 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
852 gen_mov_reg_C(dst
, src
);
853 tcg_gen_xori_tl(dst
, dst
, 0x1);
857 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
859 gen_mov_reg_N(dst
, src
);
860 tcg_gen_xori_tl(dst
, dst
, 0x1);
864 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
866 gen_mov_reg_V(dst
, src
);
867 tcg_gen_xori_tl(dst
, dst
, 0x1);
871 FPSR bit field FCC1 | FCC0:
877 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
878 unsigned int fcc_offset
)
880 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
881 tcg_gen_andi_tl(reg
, reg
, 0x1);
884 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
885 unsigned int fcc_offset
)
887 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
888 tcg_gen_andi_tl(reg
, reg
, 0x1);
892 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
893 unsigned int fcc_offset
)
895 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
896 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
897 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
900 // 1 or 2: FCC0 ^ FCC1
901 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
902 unsigned int fcc_offset
)
904 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
905 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
906 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
910 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
911 unsigned int fcc_offset
)
913 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
917 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
918 unsigned int fcc_offset
)
920 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
921 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
922 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
923 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
927 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
928 unsigned int fcc_offset
)
930 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
934 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
935 unsigned int fcc_offset
)
937 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
938 tcg_gen_xori_tl(dst
, dst
, 0x1);
939 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
940 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
944 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
945 unsigned int fcc_offset
)
947 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
948 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
949 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
953 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
954 unsigned int fcc_offset
)
956 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
957 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
958 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
959 tcg_gen_xori_tl(dst
, dst
, 0x1);
962 // 0 or 3: !(FCC0 ^ FCC1)
963 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
964 unsigned int fcc_offset
)
966 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
967 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
968 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
969 tcg_gen_xori_tl(dst
, dst
, 0x1);
973 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
974 unsigned int fcc_offset
)
976 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
977 tcg_gen_xori_tl(dst
, dst
, 0x1);
980 // !1: !(FCC0 & !FCC1)
981 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
982 unsigned int fcc_offset
)
984 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
985 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
986 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
987 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
988 tcg_gen_xori_tl(dst
, dst
, 0x1);
992 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
993 unsigned int fcc_offset
)
995 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
996 tcg_gen_xori_tl(dst
, dst
, 0x1);
999 // !2: !(!FCC0 & FCC1)
1000 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1001 unsigned int fcc_offset
)
1003 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1004 tcg_gen_xori_tl(dst
, dst
, 0x1);
1005 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1006 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1007 tcg_gen_xori_tl(dst
, dst
, 0x1);
1010 // !3: !(FCC0 & FCC1)
1011 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1012 unsigned int fcc_offset
)
1014 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1015 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1016 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1017 tcg_gen_xori_tl(dst
, dst
, 0x1);
1020 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1021 target_ulong pc2
, TCGv r_cond
)
1025 l1
= gen_new_label();
1027 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1029 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1032 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1035 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1036 target_ulong pc2
, TCGv r_cond
)
1040 l1
= gen_new_label();
1042 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1044 gen_goto_tb(dc
, 0, pc2
, pc1
);
1047 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1050 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1055 l1
= gen_new_label();
1056 l2
= gen_new_label();
1058 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1060 tcg_gen_movi_tl(cpu_npc
, npc1
);
1064 tcg_gen_movi_tl(cpu_npc
, npc2
);
1068 /* call this function before using the condition register as it may
1069 have been set for a jump */
1070 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1072 if (dc
->npc
== JUMP_PC
) {
1073 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1074 dc
->npc
= DYNAMIC_PC
;
1078 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1080 if (dc
->npc
== JUMP_PC
) {
1081 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1082 dc
->npc
= DYNAMIC_PC
;
1083 } else if (dc
->npc
!= DYNAMIC_PC
) {
1084 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1088 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1090 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1091 /* flush pending conditional evaluations before exposing cpu state */
1092 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1093 dc
->cc_op
= CC_OP_FLAGS
;
1094 gen_helper_compute_psr();
1099 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1101 if (dc
->npc
== JUMP_PC
) {
1102 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1103 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1104 dc
->pc
= DYNAMIC_PC
;
1105 } else if (dc
->npc
== DYNAMIC_PC
) {
1106 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1107 dc
->pc
= DYNAMIC_PC
;
1113 static inline void gen_op_next_insn(void)
1115 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1116 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1119 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1124 #ifdef TARGET_SPARC64
1132 switch (dc
->cc_op
) {
1136 gen_helper_compute_psr();
1137 dc
->cc_op
= CC_OP_FLAGS
;
1142 gen_op_eval_bn(r_dst
);
1145 gen_op_eval_be(r_dst
, r_src
);
1148 gen_op_eval_ble(r_dst
, r_src
);
1151 gen_op_eval_bl(r_dst
, r_src
);
1154 gen_op_eval_bleu(r_dst
, r_src
);
1157 gen_op_eval_bcs(r_dst
, r_src
);
1160 gen_op_eval_bneg(r_dst
, r_src
);
1163 gen_op_eval_bvs(r_dst
, r_src
);
1166 gen_op_eval_ba(r_dst
);
1169 gen_op_eval_bne(r_dst
, r_src
);
1172 gen_op_eval_bg(r_dst
, r_src
);
1175 gen_op_eval_bge(r_dst
, r_src
);
1178 gen_op_eval_bgu(r_dst
, r_src
);
1181 gen_op_eval_bcc(r_dst
, r_src
);
1184 gen_op_eval_bpos(r_dst
, r_src
);
1187 gen_op_eval_bvc(r_dst
, r_src
);
1192 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1194 unsigned int offset
;
1214 gen_op_eval_bn(r_dst
);
1217 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1220 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1223 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1226 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1229 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1232 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1235 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1238 gen_op_eval_ba(r_dst
);
1241 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1244 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1247 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1250 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1253 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1256 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1259 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1264 #ifdef TARGET_SPARC64
1266 static const int gen_tcg_cond_reg
[8] = {
1277 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1281 l1
= gen_new_label();
1282 tcg_gen_movi_tl(r_dst
, 0);
1283 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1284 tcg_gen_movi_tl(r_dst
, 1);
1289 /* XXX: potentially incorrect if dynamic npc */
1290 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1293 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1294 target_ulong target
= dc
->pc
+ offset
;
1297 /* unconditional not taken */
1299 dc
->pc
= dc
->npc
+ 4;
1300 dc
->npc
= dc
->pc
+ 4;
1303 dc
->npc
= dc
->pc
+ 4;
1305 } else if (cond
== 0x8) {
1306 /* unconditional taken */
1309 dc
->npc
= dc
->pc
+ 4;
1313 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1316 flush_cond(dc
, r_cond
);
1317 gen_cond(r_cond
, cc
, cond
, dc
);
1319 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1323 dc
->jump_pc
[0] = target
;
1324 dc
->jump_pc
[1] = dc
->npc
+ 4;
1330 /* XXX: potentially incorrect if dynamic npc */
1331 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1334 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1335 target_ulong target
= dc
->pc
+ offset
;
1338 /* unconditional not taken */
1340 dc
->pc
= dc
->npc
+ 4;
1341 dc
->npc
= dc
->pc
+ 4;
1344 dc
->npc
= dc
->pc
+ 4;
1346 } else if (cond
== 0x8) {
1347 /* unconditional taken */
1350 dc
->npc
= dc
->pc
+ 4;
1354 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1357 flush_cond(dc
, r_cond
);
1358 gen_fcond(r_cond
, cc
, cond
);
1360 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1364 dc
->jump_pc
[0] = target
;
1365 dc
->jump_pc
[1] = dc
->npc
+ 4;
1371 #ifdef TARGET_SPARC64
1372 /* XXX: potentially incorrect if dynamic npc */
1373 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1374 TCGv r_cond
, TCGv r_reg
)
1376 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1377 target_ulong target
= dc
->pc
+ offset
;
1379 flush_cond(dc
, r_cond
);
1380 gen_cond_reg(r_cond
, cond
, r_reg
);
1382 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1386 dc
->jump_pc
[0] = target
;
1387 dc
->jump_pc
[1] = dc
->npc
+ 4;
1392 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1396 gen_helper_fcmps(r_rs1
, r_rs2
);
1399 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1402 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1405 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1410 static inline void gen_op_fcmpd(int fccno
)
1417 gen_helper_fcmpd_fcc1();
1420 gen_helper_fcmpd_fcc2();
1423 gen_helper_fcmpd_fcc3();
1428 static inline void gen_op_fcmpq(int fccno
)
1435 gen_helper_fcmpq_fcc1();
1438 gen_helper_fcmpq_fcc2();
1441 gen_helper_fcmpq_fcc3();
1446 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1450 gen_helper_fcmpes(r_rs1
, r_rs2
);
1453 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1456 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1459 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1464 static inline void gen_op_fcmped(int fccno
)
1468 gen_helper_fcmped();
1471 gen_helper_fcmped_fcc1();
1474 gen_helper_fcmped_fcc2();
1477 gen_helper_fcmped_fcc3();
1482 static inline void gen_op_fcmpeq(int fccno
)
1486 gen_helper_fcmpeq();
1489 gen_helper_fcmpeq_fcc1();
1492 gen_helper_fcmpeq_fcc2();
1495 gen_helper_fcmpeq_fcc3();
1502 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1504 gen_helper_fcmps(r_rs1
, r_rs2
);
1507 static inline void gen_op_fcmpd(int fccno
)
1512 static inline void gen_op_fcmpq(int fccno
)
1517 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1519 gen_helper_fcmpes(r_rs1
, r_rs2
);
1522 static inline void gen_op_fcmped(int fccno
)
1524 gen_helper_fcmped();
1527 static inline void gen_op_fcmpeq(int fccno
)
1529 gen_helper_fcmpeq();
1533 static inline void gen_op_fpexception_im(int fsr_flags
)
1537 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1538 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1539 r_const
= tcg_const_i32(TT_FP_EXCP
);
1540 gen_helper_raise_exception(r_const
);
1541 tcg_temp_free_i32(r_const
);
1544 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1546 #if !defined(CONFIG_USER_ONLY)
1547 if (!dc
->fpu_enabled
) {
1550 save_state(dc
, r_cond
);
1551 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1552 gen_helper_raise_exception(r_const
);
1553 tcg_temp_free_i32(r_const
);
1561 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1563 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1566 static inline void gen_clear_float_exceptions(void)
1568 gen_helper_clear_float_exceptions();
1572 #ifdef TARGET_SPARC64
1573 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1579 r_asi
= tcg_temp_new_i32();
1580 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1582 asi
= GET_FIELD(insn
, 19, 26);
1583 r_asi
= tcg_const_i32(asi
);
1588 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1591 TCGv_i32 r_asi
, r_size
, r_sign
;
1593 r_asi
= gen_get_asi(insn
, addr
);
1594 r_size
= tcg_const_i32(size
);
1595 r_sign
= tcg_const_i32(sign
);
1596 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1597 tcg_temp_free_i32(r_sign
);
1598 tcg_temp_free_i32(r_size
);
1599 tcg_temp_free_i32(r_asi
);
1602 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1604 TCGv_i32 r_asi
, r_size
;
1606 r_asi
= gen_get_asi(insn
, addr
);
1607 r_size
= tcg_const_i32(size
);
1608 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1609 tcg_temp_free_i32(r_size
);
1610 tcg_temp_free_i32(r_asi
);
1613 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1615 TCGv_i32 r_asi
, r_size
, r_rd
;
1617 r_asi
= gen_get_asi(insn
, addr
);
1618 r_size
= tcg_const_i32(size
);
1619 r_rd
= tcg_const_i32(rd
);
1620 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1621 tcg_temp_free_i32(r_rd
);
1622 tcg_temp_free_i32(r_size
);
1623 tcg_temp_free_i32(r_asi
);
1626 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1628 TCGv_i32 r_asi
, r_size
, r_rd
;
1630 r_asi
= gen_get_asi(insn
, addr
);
1631 r_size
= tcg_const_i32(size
);
1632 r_rd
= tcg_const_i32(rd
);
1633 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1634 tcg_temp_free_i32(r_rd
);
1635 tcg_temp_free_i32(r_size
);
1636 tcg_temp_free_i32(r_asi
);
1639 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1641 TCGv_i32 r_asi
, r_size
, r_sign
;
1643 r_asi
= gen_get_asi(insn
, addr
);
1644 r_size
= tcg_const_i32(4);
1645 r_sign
= tcg_const_i32(0);
1646 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1647 tcg_temp_free_i32(r_sign
);
1648 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1649 tcg_temp_free_i32(r_size
);
1650 tcg_temp_free_i32(r_asi
);
1651 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1654 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1656 TCGv_i32 r_asi
, r_rd
;
1658 r_asi
= gen_get_asi(insn
, addr
);
1659 r_rd
= tcg_const_i32(rd
);
1660 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1661 tcg_temp_free_i32(r_rd
);
1662 tcg_temp_free_i32(r_asi
);
1665 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1667 TCGv_i32 r_asi
, r_size
;
1669 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1670 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1671 r_asi
= gen_get_asi(insn
, addr
);
1672 r_size
= tcg_const_i32(8);
1673 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1674 tcg_temp_free_i32(r_size
);
1675 tcg_temp_free_i32(r_asi
);
1678 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1684 r_val1
= tcg_temp_new();
1685 gen_movl_reg_TN(rd
, r_val1
);
1686 r_asi
= gen_get_asi(insn
, addr
);
1687 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1688 tcg_temp_free_i32(r_asi
);
1689 tcg_temp_free(r_val1
);
1692 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1697 gen_movl_reg_TN(rd
, cpu_tmp64
);
1698 r_asi
= gen_get_asi(insn
, addr
);
1699 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1700 tcg_temp_free_i32(r_asi
);
1703 #elif !defined(CONFIG_USER_ONLY)
1705 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1708 TCGv_i32 r_asi
, r_size
, r_sign
;
1710 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1711 r_size
= tcg_const_i32(size
);
1712 r_sign
= tcg_const_i32(sign
);
1713 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1714 tcg_temp_free(r_sign
);
1715 tcg_temp_free(r_size
);
1716 tcg_temp_free(r_asi
);
1717 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1720 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1722 TCGv_i32 r_asi
, r_size
;
1724 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1725 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1726 r_size
= tcg_const_i32(size
);
1727 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1728 tcg_temp_free(r_size
);
1729 tcg_temp_free(r_asi
);
1732 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1734 TCGv_i32 r_asi
, r_size
, r_sign
;
1737 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1738 r_size
= tcg_const_i32(4);
1739 r_sign
= tcg_const_i32(0);
1740 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1741 tcg_temp_free(r_sign
);
1742 r_val
= tcg_temp_new_i64();
1743 tcg_gen_extu_tl_i64(r_val
, dst
);
1744 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1745 tcg_temp_free_i64(r_val
);
1746 tcg_temp_free(r_size
);
1747 tcg_temp_free(r_asi
);
1748 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1751 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1753 TCGv_i32 r_asi
, r_size
, r_sign
;
1755 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1756 r_size
= tcg_const_i32(8);
1757 r_sign
= tcg_const_i32(0);
1758 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1759 tcg_temp_free(r_sign
);
1760 tcg_temp_free(r_size
);
1761 tcg_temp_free(r_asi
);
1762 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1763 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1764 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1765 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1766 gen_movl_TN_reg(rd
, hi
);
1769 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1771 TCGv_i32 r_asi
, r_size
;
1773 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1774 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1775 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1776 r_size
= tcg_const_i32(8);
1777 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1778 tcg_temp_free(r_size
);
1779 tcg_temp_free(r_asi
);
1783 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1784 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1787 TCGv_i32 r_asi
, r_size
;
1789 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1791 r_val
= tcg_const_i64(0xffULL
);
1792 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1793 r_size
= tcg_const_i32(1);
1794 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1795 tcg_temp_free_i32(r_size
);
1796 tcg_temp_free_i32(r_asi
);
1797 tcg_temp_free_i64(r_val
);
1801 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1806 rs1
= GET_FIELD(insn
, 13, 17);
1808 tcg_gen_movi_tl(def
, 0);
1809 } else if (rs1
< 8) {
1810 r_rs1
= cpu_gregs
[rs1
];
1812 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1817 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1821 if (IS_IMM
) { /* immediate */
1822 target_long simm
= GET_FIELDs(insn
, 19, 31);
1823 tcg_gen_movi_tl(def
, simm
);
1824 } else { /* register */
1825 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1827 tcg_gen_movi_tl(def
, 0);
1828 } else if (rs2
< 8) {
1829 r_rs2
= cpu_gregs
[rs2
];
1831 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1837 #ifdef TARGET_SPARC64
1838 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1840 TCGv_i32 r_tl
= tcg_temp_new_i32();
1842 /* load env->tl into r_tl */
1843 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1845 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1846 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1848 /* calculate offset to current trap state from env->ts, reuse r_tl */
1849 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1850 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1852 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1854 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1855 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1856 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1857 tcg_temp_free_ptr(r_tl_tmp
);
1860 tcg_temp_free_i32(r_tl
);
1864 #define CHECK_IU_FEATURE(dc, FEATURE) \
1865 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1867 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1868 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1871 /* before an instruction, dc->pc must be static */
1872 static void disas_sparc_insn(DisasContext
* dc
)
1874 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1875 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1878 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1879 tcg_gen_debug_insn_start(dc
->pc
);
1880 insn
= ldl_code(dc
->pc
);
1881 opc
= GET_FIELD(insn
, 0, 1);
1883 rd
= GET_FIELD(insn
, 2, 6);
1885 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1886 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1889 case 0: /* branches/sethi */
1891 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1894 #ifdef TARGET_SPARC64
1895 case 0x1: /* V9 BPcc */
1899 target
= GET_FIELD_SP(insn
, 0, 18);
1900 target
= sign_extend(target
, 19);
1902 cc
= GET_FIELD_SP(insn
, 20, 21);
1904 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1906 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1911 case 0x3: /* V9 BPr */
1913 target
= GET_FIELD_SP(insn
, 0, 13) |
1914 (GET_FIELD_SP(insn
, 20, 21) << 14);
1915 target
= sign_extend(target
, 16);
1917 cpu_src1
= get_src1(insn
, cpu_src1
);
1918 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1921 case 0x5: /* V9 FBPcc */
1923 int cc
= GET_FIELD_SP(insn
, 20, 21);
1924 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1926 target
= GET_FIELD_SP(insn
, 0, 18);
1927 target
= sign_extend(target
, 19);
1929 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1933 case 0x7: /* CBN+x */
1938 case 0x2: /* BN+x */
1940 target
= GET_FIELD(insn
, 10, 31);
1941 target
= sign_extend(target
, 22);
1943 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1946 case 0x6: /* FBN+x */
1948 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1950 target
= GET_FIELD(insn
, 10, 31);
1951 target
= sign_extend(target
, 22);
1953 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1956 case 0x4: /* SETHI */
1958 uint32_t value
= GET_FIELD(insn
, 10, 31);
1961 r_const
= tcg_const_tl(value
<< 10);
1962 gen_movl_TN_reg(rd
, r_const
);
1963 tcg_temp_free(r_const
);
1966 case 0x0: /* UNIMPL */
1975 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1978 r_const
= tcg_const_tl(dc
->pc
);
1979 gen_movl_TN_reg(15, r_const
);
1980 tcg_temp_free(r_const
);
1982 gen_mov_pc_npc(dc
, cpu_cond
);
1986 case 2: /* FPU & Logical Operations */
1988 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1989 if (xop
== 0x3a) { /* generate trap */
1992 cpu_src1
= get_src1(insn
, cpu_src1
);
1994 rs2
= GET_FIELD(insn
, 25, 31);
1995 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1997 rs2
= GET_FIELD(insn
, 27, 31);
1999 gen_movl_reg_TN(rs2
, cpu_src2
);
2000 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2002 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2005 cond
= GET_FIELD(insn
, 3, 6);
2006 if (cond
== 0x8) { /* Trap Always */
2007 save_state(dc
, cpu_cond
);
2008 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2010 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2012 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2013 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2014 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2017 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2019 gen_helper_shutdown();
2022 gen_helper_raise_exception(cpu_tmp32
);
2024 } else if (cond
!= 0) {
2025 TCGv r_cond
= tcg_temp_new();
2027 #ifdef TARGET_SPARC64
2029 int cc
= GET_FIELD_SP(insn
, 11, 12);
2031 save_state(dc
, cpu_cond
);
2033 gen_cond(r_cond
, 0, cond
, dc
);
2035 gen_cond(r_cond
, 1, cond
, dc
);
2039 save_state(dc
, cpu_cond
);
2040 gen_cond(r_cond
, 0, cond
, dc
);
2042 l1
= gen_new_label();
2043 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2045 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2047 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2049 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2050 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2051 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2052 gen_helper_raise_exception(cpu_tmp32
);
2055 tcg_temp_free(r_cond
);
2061 } else if (xop
== 0x28) {
2062 rs1
= GET_FIELD(insn
, 13, 17);
2065 #ifndef TARGET_SPARC64
2066 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2067 manual, rdy on the microSPARC
2069 case 0x0f: /* stbar in the SPARCv8 manual,
2070 rdy on the microSPARC II */
2071 case 0x10 ... 0x1f: /* implementation-dependent in the
2072 SPARCv8 manual, rdy on the
2075 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2078 /* Read Asr17 for a Leon3 monoprocessor */
2079 r_const
= tcg_const_tl((1 << 8)
2080 | (dc
->def
->nwindows
- 1));
2081 gen_movl_TN_reg(rd
, r_const
);
2082 tcg_temp_free(r_const
);
2086 gen_movl_TN_reg(rd
, cpu_y
);
2088 #ifdef TARGET_SPARC64
2089 case 0x2: /* V9 rdccr */
2090 gen_helper_compute_psr();
2091 gen_helper_rdccr(cpu_dst
);
2092 gen_movl_TN_reg(rd
, cpu_dst
);
2094 case 0x3: /* V9 rdasi */
2095 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2096 gen_movl_TN_reg(rd
, cpu_dst
);
2098 case 0x4: /* V9 rdtick */
2102 r_tickptr
= tcg_temp_new_ptr();
2103 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2104 offsetof(CPUState
, tick
));
2105 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2106 tcg_temp_free_ptr(r_tickptr
);
2107 gen_movl_TN_reg(rd
, cpu_dst
);
2110 case 0x5: /* V9 rdpc */
2114 r_const
= tcg_const_tl(dc
->pc
);
2115 gen_movl_TN_reg(rd
, r_const
);
2116 tcg_temp_free(r_const
);
2119 case 0x6: /* V9 rdfprs */
2120 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2121 gen_movl_TN_reg(rd
, cpu_dst
);
2123 case 0xf: /* V9 membar */
2124 break; /* no effect */
2125 case 0x13: /* Graphics Status */
2126 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2128 gen_movl_TN_reg(rd
, cpu_gsr
);
2130 case 0x16: /* Softint */
2131 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2132 gen_movl_TN_reg(rd
, cpu_dst
);
2134 case 0x17: /* Tick compare */
2135 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2137 case 0x18: /* System tick */
2141 r_tickptr
= tcg_temp_new_ptr();
2142 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2143 offsetof(CPUState
, stick
));
2144 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2145 tcg_temp_free_ptr(r_tickptr
);
2146 gen_movl_TN_reg(rd
, cpu_dst
);
2149 case 0x19: /* System tick compare */
2150 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2152 case 0x10: /* Performance Control */
2153 case 0x11: /* Performance Instrumentation Counter */
2154 case 0x12: /* Dispatch Control */
2155 case 0x14: /* Softint set, WO */
2156 case 0x15: /* Softint clear, WO */
2161 #if !defined(CONFIG_USER_ONLY)
2162 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2163 #ifndef TARGET_SPARC64
2164 if (!supervisor(dc
))
2166 gen_helper_compute_psr();
2167 dc
->cc_op
= CC_OP_FLAGS
;
2168 gen_helper_rdpsr(cpu_dst
);
2170 CHECK_IU_FEATURE(dc
, HYPV
);
2171 if (!hypervisor(dc
))
2173 rs1
= GET_FIELD(insn
, 13, 17);
2176 // gen_op_rdhpstate();
2179 // gen_op_rdhtstate();
2182 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2185 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2188 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2190 case 31: // hstick_cmpr
2191 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2197 gen_movl_TN_reg(rd
, cpu_dst
);
2199 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2200 if (!supervisor(dc
))
2202 #ifdef TARGET_SPARC64
2203 rs1
= GET_FIELD(insn
, 13, 17);
2209 r_tsptr
= tcg_temp_new_ptr();
2210 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2211 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2212 offsetof(trap_state
, tpc
));
2213 tcg_temp_free_ptr(r_tsptr
);
2220 r_tsptr
= tcg_temp_new_ptr();
2221 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2222 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2223 offsetof(trap_state
, tnpc
));
2224 tcg_temp_free_ptr(r_tsptr
);
2231 r_tsptr
= tcg_temp_new_ptr();
2232 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2233 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2234 offsetof(trap_state
, tstate
));
2235 tcg_temp_free_ptr(r_tsptr
);
2242 r_tsptr
= tcg_temp_new_ptr();
2243 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2244 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2245 offsetof(trap_state
, tt
));
2246 tcg_temp_free_ptr(r_tsptr
);
2247 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2254 r_tickptr
= tcg_temp_new_ptr();
2255 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2256 offsetof(CPUState
, tick
));
2257 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2258 gen_movl_TN_reg(rd
, cpu_tmp0
);
2259 tcg_temp_free_ptr(r_tickptr
);
2263 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2266 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2267 offsetof(CPUSPARCState
, pstate
));
2268 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2271 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2272 offsetof(CPUSPARCState
, tl
));
2273 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2276 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2277 offsetof(CPUSPARCState
, psrpil
));
2278 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2281 gen_helper_rdcwp(cpu_tmp0
);
2284 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2285 offsetof(CPUSPARCState
, cansave
));
2286 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2288 case 11: // canrestore
2289 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2290 offsetof(CPUSPARCState
, canrestore
));
2291 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2293 case 12: // cleanwin
2294 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2295 offsetof(CPUSPARCState
, cleanwin
));
2296 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2298 case 13: // otherwin
2299 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2300 offsetof(CPUSPARCState
, otherwin
));
2301 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2304 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2305 offsetof(CPUSPARCState
, wstate
));
2306 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2308 case 16: // UA2005 gl
2309 CHECK_IU_FEATURE(dc
, GL
);
2310 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2311 offsetof(CPUSPARCState
, gl
));
2312 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2314 case 26: // UA2005 strand status
2315 CHECK_IU_FEATURE(dc
, HYPV
);
2316 if (!hypervisor(dc
))
2318 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2321 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2328 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2330 gen_movl_TN_reg(rd
, cpu_tmp0
);
2332 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2333 #ifdef TARGET_SPARC64
2334 save_state(dc
, cpu_cond
);
2335 gen_helper_flushw();
2337 if (!supervisor(dc
))
2339 gen_movl_TN_reg(rd
, cpu_tbr
);
2343 } else if (xop
== 0x34) { /* FPU Operations */
2344 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2346 gen_op_clear_ieee_excp_and_FTT();
2347 rs1
= GET_FIELD(insn
, 13, 17);
2348 rs2
= GET_FIELD(insn
, 27, 31);
2349 xop
= GET_FIELD(insn
, 18, 26);
2350 save_state(dc
, cpu_cond
);
2352 case 0x1: /* fmovs */
2353 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2355 case 0x5: /* fnegs */
2356 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2358 case 0x9: /* fabss */
2359 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2361 case 0x29: /* fsqrts */
2362 CHECK_FPU_FEATURE(dc
, FSQRT
);
2363 gen_clear_float_exceptions();
2364 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2365 gen_helper_check_ieee_exceptions();
2366 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2368 case 0x2a: /* fsqrtd */
2369 CHECK_FPU_FEATURE(dc
, FSQRT
);
2370 gen_op_load_fpr_DT1(DFPREG(rs2
));
2371 gen_clear_float_exceptions();
2372 gen_helper_fsqrtd();
2373 gen_helper_check_ieee_exceptions();
2374 gen_op_store_DT0_fpr(DFPREG(rd
));
2376 case 0x2b: /* fsqrtq */
2377 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2378 gen_op_load_fpr_QT1(QFPREG(rs2
));
2379 gen_clear_float_exceptions();
2380 gen_helper_fsqrtq();
2381 gen_helper_check_ieee_exceptions();
2382 gen_op_store_QT0_fpr(QFPREG(rd
));
2384 case 0x41: /* fadds */
2385 gen_clear_float_exceptions();
2386 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2387 gen_helper_check_ieee_exceptions();
2388 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2390 case 0x42: /* faddd */
2391 gen_op_load_fpr_DT0(DFPREG(rs1
));
2392 gen_op_load_fpr_DT1(DFPREG(rs2
));
2393 gen_clear_float_exceptions();
2395 gen_helper_check_ieee_exceptions();
2396 gen_op_store_DT0_fpr(DFPREG(rd
));
2398 case 0x43: /* faddq */
2399 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2400 gen_op_load_fpr_QT0(QFPREG(rs1
));
2401 gen_op_load_fpr_QT1(QFPREG(rs2
));
2402 gen_clear_float_exceptions();
2404 gen_helper_check_ieee_exceptions();
2405 gen_op_store_QT0_fpr(QFPREG(rd
));
2407 case 0x45: /* fsubs */
2408 gen_clear_float_exceptions();
2409 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2410 gen_helper_check_ieee_exceptions();
2411 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2413 case 0x46: /* fsubd */
2414 gen_op_load_fpr_DT0(DFPREG(rs1
));
2415 gen_op_load_fpr_DT1(DFPREG(rs2
));
2416 gen_clear_float_exceptions();
2418 gen_helper_check_ieee_exceptions();
2419 gen_op_store_DT0_fpr(DFPREG(rd
));
2421 case 0x47: /* fsubq */
2422 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2423 gen_op_load_fpr_QT0(QFPREG(rs1
));
2424 gen_op_load_fpr_QT1(QFPREG(rs2
));
2425 gen_clear_float_exceptions();
2427 gen_helper_check_ieee_exceptions();
2428 gen_op_store_QT0_fpr(QFPREG(rd
));
2430 case 0x49: /* fmuls */
2431 CHECK_FPU_FEATURE(dc
, FMUL
);
2432 gen_clear_float_exceptions();
2433 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2434 gen_helper_check_ieee_exceptions();
2435 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2437 case 0x4a: /* fmuld */
2438 CHECK_FPU_FEATURE(dc
, FMUL
);
2439 gen_op_load_fpr_DT0(DFPREG(rs1
));
2440 gen_op_load_fpr_DT1(DFPREG(rs2
));
2441 gen_clear_float_exceptions();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_DT0_fpr(DFPREG(rd
));
2446 case 0x4b: /* fmulq */
2447 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2448 CHECK_FPU_FEATURE(dc
, FMUL
);
2449 gen_op_load_fpr_QT0(QFPREG(rs1
));
2450 gen_op_load_fpr_QT1(QFPREG(rs2
));
2451 gen_clear_float_exceptions();
2453 gen_helper_check_ieee_exceptions();
2454 gen_op_store_QT0_fpr(QFPREG(rd
));
2456 case 0x4d: /* fdivs */
2457 gen_clear_float_exceptions();
2458 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2459 gen_helper_check_ieee_exceptions();
2460 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2462 case 0x4e: /* fdivd */
2463 gen_op_load_fpr_DT0(DFPREG(rs1
));
2464 gen_op_load_fpr_DT1(DFPREG(rs2
));
2465 gen_clear_float_exceptions();
2467 gen_helper_check_ieee_exceptions();
2468 gen_op_store_DT0_fpr(DFPREG(rd
));
2470 case 0x4f: /* fdivq */
2471 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2472 gen_op_load_fpr_QT0(QFPREG(rs1
));
2473 gen_op_load_fpr_QT1(QFPREG(rs2
));
2474 gen_clear_float_exceptions();
2476 gen_helper_check_ieee_exceptions();
2477 gen_op_store_QT0_fpr(QFPREG(rd
));
2479 case 0x69: /* fsmuld */
2480 CHECK_FPU_FEATURE(dc
, FSMULD
);
2481 gen_clear_float_exceptions();
2482 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2483 gen_helper_check_ieee_exceptions();
2484 gen_op_store_DT0_fpr(DFPREG(rd
));
2486 case 0x6e: /* fdmulq */
2487 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2488 gen_op_load_fpr_DT0(DFPREG(rs1
));
2489 gen_op_load_fpr_DT1(DFPREG(rs2
));
2490 gen_clear_float_exceptions();
2491 gen_helper_fdmulq();
2492 gen_helper_check_ieee_exceptions();
2493 gen_op_store_QT0_fpr(QFPREG(rd
));
2495 case 0xc4: /* fitos */
2496 gen_clear_float_exceptions();
2497 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2498 gen_helper_check_ieee_exceptions();
2499 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2501 case 0xc6: /* fdtos */
2502 gen_op_load_fpr_DT1(DFPREG(rs2
));
2503 gen_clear_float_exceptions();
2504 gen_helper_fdtos(cpu_tmp32
);
2505 gen_helper_check_ieee_exceptions();
2506 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2508 case 0xc7: /* fqtos */
2509 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2510 gen_op_load_fpr_QT1(QFPREG(rs2
));
2511 gen_clear_float_exceptions();
2512 gen_helper_fqtos(cpu_tmp32
);
2513 gen_helper_check_ieee_exceptions();
2514 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2516 case 0xc8: /* fitod */
2517 gen_helper_fitod(cpu_fpr
[rs2
]);
2518 gen_op_store_DT0_fpr(DFPREG(rd
));
2520 case 0xc9: /* fstod */
2521 gen_helper_fstod(cpu_fpr
[rs2
]);
2522 gen_op_store_DT0_fpr(DFPREG(rd
));
2524 case 0xcb: /* fqtod */
2525 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2526 gen_op_load_fpr_QT1(QFPREG(rs2
));
2527 gen_clear_float_exceptions();
2529 gen_helper_check_ieee_exceptions();
2530 gen_op_store_DT0_fpr(DFPREG(rd
));
2532 case 0xcc: /* fitoq */
2533 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2534 gen_helper_fitoq(cpu_fpr
[rs2
]);
2535 gen_op_store_QT0_fpr(QFPREG(rd
));
2537 case 0xcd: /* fstoq */
2538 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2539 gen_helper_fstoq(cpu_fpr
[rs2
]);
2540 gen_op_store_QT0_fpr(QFPREG(rd
));
2542 case 0xce: /* fdtoq */
2543 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2544 gen_op_load_fpr_DT1(DFPREG(rs2
));
2546 gen_op_store_QT0_fpr(QFPREG(rd
));
2548 case 0xd1: /* fstoi */
2549 gen_clear_float_exceptions();
2550 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2551 gen_helper_check_ieee_exceptions();
2552 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2554 case 0xd2: /* fdtoi */
2555 gen_op_load_fpr_DT1(DFPREG(rs2
));
2556 gen_clear_float_exceptions();
2557 gen_helper_fdtoi(cpu_tmp32
);
2558 gen_helper_check_ieee_exceptions();
2559 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2561 case 0xd3: /* fqtoi */
2562 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2563 gen_op_load_fpr_QT1(QFPREG(rs2
));
2564 gen_clear_float_exceptions();
2565 gen_helper_fqtoi(cpu_tmp32
);
2566 gen_helper_check_ieee_exceptions();
2567 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2569 #ifdef TARGET_SPARC64
2570 case 0x2: /* V9 fmovd */
2571 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2572 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2573 cpu_fpr
[DFPREG(rs2
) + 1]);
2575 case 0x3: /* V9 fmovq */
2576 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2577 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2578 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2579 cpu_fpr
[QFPREG(rs2
) + 1]);
2580 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2581 cpu_fpr
[QFPREG(rs2
) + 2]);
2582 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2583 cpu_fpr
[QFPREG(rs2
) + 3]);
2585 case 0x6: /* V9 fnegd */
2586 gen_op_load_fpr_DT1(DFPREG(rs2
));
2588 gen_op_store_DT0_fpr(DFPREG(rd
));
2590 case 0x7: /* V9 fnegq */
2591 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2592 gen_op_load_fpr_QT1(QFPREG(rs2
));
2594 gen_op_store_QT0_fpr(QFPREG(rd
));
2596 case 0xa: /* V9 fabsd */
2597 gen_op_load_fpr_DT1(DFPREG(rs2
));
2599 gen_op_store_DT0_fpr(DFPREG(rd
));
2601 case 0xb: /* V9 fabsq */
2602 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2603 gen_op_load_fpr_QT1(QFPREG(rs2
));
2605 gen_op_store_QT0_fpr(QFPREG(rd
));
2607 case 0x81: /* V9 fstox */
2608 gen_clear_float_exceptions();
2609 gen_helper_fstox(cpu_fpr
[rs2
]);
2610 gen_helper_check_ieee_exceptions();
2611 gen_op_store_DT0_fpr(DFPREG(rd
));
2613 case 0x82: /* V9 fdtox */
2614 gen_op_load_fpr_DT1(DFPREG(rs2
));
2615 gen_clear_float_exceptions();
2617 gen_helper_check_ieee_exceptions();
2618 gen_op_store_DT0_fpr(DFPREG(rd
));
2620 case 0x83: /* V9 fqtox */
2621 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2622 gen_op_load_fpr_QT1(QFPREG(rs2
));
2623 gen_clear_float_exceptions();
2625 gen_helper_check_ieee_exceptions();
2626 gen_op_store_DT0_fpr(DFPREG(rd
));
2628 case 0x84: /* V9 fxtos */
2629 gen_op_load_fpr_DT1(DFPREG(rs2
));
2630 gen_clear_float_exceptions();
2631 gen_helper_fxtos(cpu_tmp32
);
2632 gen_helper_check_ieee_exceptions();
2633 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2635 case 0x88: /* V9 fxtod */
2636 gen_op_load_fpr_DT1(DFPREG(rs2
));
2637 gen_clear_float_exceptions();
2639 gen_helper_check_ieee_exceptions();
2640 gen_op_store_DT0_fpr(DFPREG(rd
));
2642 case 0x8c: /* V9 fxtoq */
2643 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2644 gen_op_load_fpr_DT1(DFPREG(rs2
));
2645 gen_clear_float_exceptions();
2647 gen_helper_check_ieee_exceptions();
2648 gen_op_store_QT0_fpr(QFPREG(rd
));
2654 } else if (xop
== 0x35) { /* FPU Operations */
2655 #ifdef TARGET_SPARC64
2658 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2660 gen_op_clear_ieee_excp_and_FTT();
2661 rs1
= GET_FIELD(insn
, 13, 17);
2662 rs2
= GET_FIELD(insn
, 27, 31);
2663 xop
= GET_FIELD(insn
, 18, 26);
2664 save_state(dc
, cpu_cond
);
2665 #ifdef TARGET_SPARC64
2666 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2669 l1
= gen_new_label();
2670 cond
= GET_FIELD_SP(insn
, 14, 17);
2671 cpu_src1
= get_src1(insn
, cpu_src1
);
2672 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2674 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2677 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2680 l1
= gen_new_label();
2681 cond
= GET_FIELD_SP(insn
, 14, 17);
2682 cpu_src1
= get_src1(insn
, cpu_src1
);
2683 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2685 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2686 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2689 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2692 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2693 l1
= gen_new_label();
2694 cond
= GET_FIELD_SP(insn
, 14, 17);
2695 cpu_src1
= get_src1(insn
, cpu_src1
);
2696 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2698 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2699 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2700 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2701 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2707 #ifdef TARGET_SPARC64
2708 #define FMOVSCC(fcc) \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_fcond(r_cond, fcc, cond); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2719 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2720 gen_set_label(l1); \
2721 tcg_temp_free(r_cond); \
2723 #define FMOVDCC(fcc) \
2728 l1 = gen_new_label(); \
2729 r_cond = tcg_temp_new(); \
2730 cond = GET_FIELD_SP(insn, 14, 17); \
2731 gen_fcond(r_cond, fcc, cond); \
2732 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2734 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2735 cpu_fpr[DFPREG(rs2)]); \
2736 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2737 cpu_fpr[DFPREG(rs2) + 1]); \
2738 gen_set_label(l1); \
2739 tcg_temp_free(r_cond); \
2741 #define FMOVQCC(fcc) \
2746 l1 = gen_new_label(); \
2747 r_cond = tcg_temp_new(); \
2748 cond = GET_FIELD_SP(insn, 14, 17); \
2749 gen_fcond(r_cond, fcc, cond); \
2750 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2752 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2753 cpu_fpr[QFPREG(rs2)]); \
2754 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2755 cpu_fpr[QFPREG(rs2) + 1]); \
2756 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2757 cpu_fpr[QFPREG(rs2) + 2]); \
2758 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2759 cpu_fpr[QFPREG(rs2) + 3]); \
2760 gen_set_label(l1); \
2761 tcg_temp_free(r_cond); \
2763 case 0x001: /* V9 fmovscc %fcc0 */
2766 case 0x002: /* V9 fmovdcc %fcc0 */
2769 case 0x003: /* V9 fmovqcc %fcc0 */
2770 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2773 case 0x041: /* V9 fmovscc %fcc1 */
2776 case 0x042: /* V9 fmovdcc %fcc1 */
2779 case 0x043: /* V9 fmovqcc %fcc1 */
2780 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2783 case 0x081: /* V9 fmovscc %fcc2 */
2786 case 0x082: /* V9 fmovdcc %fcc2 */
2789 case 0x083: /* V9 fmovqcc %fcc2 */
2790 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2793 case 0x0c1: /* V9 fmovscc %fcc3 */
2796 case 0x0c2: /* V9 fmovdcc %fcc3 */
2799 case 0x0c3: /* V9 fmovqcc %fcc3 */
2800 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2806 #define FMOVSCC(icc) \
2811 l1 = gen_new_label(); \
2812 r_cond = tcg_temp_new(); \
2813 cond = GET_FIELD_SP(insn, 14, 17); \
2814 gen_cond(r_cond, icc, cond, dc); \
2815 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2817 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2818 gen_set_label(l1); \
2819 tcg_temp_free(r_cond); \
2821 #define FMOVDCC(icc) \
2826 l1 = gen_new_label(); \
2827 r_cond = tcg_temp_new(); \
2828 cond = GET_FIELD_SP(insn, 14, 17); \
2829 gen_cond(r_cond, icc, cond, dc); \
2830 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2832 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2833 cpu_fpr[DFPREG(rs2)]); \
2834 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2835 cpu_fpr[DFPREG(rs2) + 1]); \
2836 gen_set_label(l1); \
2837 tcg_temp_free(r_cond); \
2839 #define FMOVQCC(icc) \
2844 l1 = gen_new_label(); \
2845 r_cond = tcg_temp_new(); \
2846 cond = GET_FIELD_SP(insn, 14, 17); \
2847 gen_cond(r_cond, icc, cond, dc); \
2848 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2850 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2851 cpu_fpr[QFPREG(rs2)]); \
2852 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2853 cpu_fpr[QFPREG(rs2) + 1]); \
2854 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2855 cpu_fpr[QFPREG(rs2) + 2]); \
2856 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2857 cpu_fpr[QFPREG(rs2) + 3]); \
2858 gen_set_label(l1); \
2859 tcg_temp_free(r_cond); \
2862 case 0x101: /* V9 fmovscc %icc */
2865 case 0x102: /* V9 fmovdcc %icc */
2867 case 0x103: /* V9 fmovqcc %icc */
2868 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2871 case 0x181: /* V9 fmovscc %xcc */
2874 case 0x182: /* V9 fmovdcc %xcc */
2877 case 0x183: /* V9 fmovqcc %xcc */
2878 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2885 case 0x51: /* fcmps, V9 %fcc */
2886 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2888 case 0x52: /* fcmpd, V9 %fcc */
2889 gen_op_load_fpr_DT0(DFPREG(rs1
));
2890 gen_op_load_fpr_DT1(DFPREG(rs2
));
2891 gen_op_fcmpd(rd
& 3);
2893 case 0x53: /* fcmpq, V9 %fcc */
2894 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2895 gen_op_load_fpr_QT0(QFPREG(rs1
));
2896 gen_op_load_fpr_QT1(QFPREG(rs2
));
2897 gen_op_fcmpq(rd
& 3);
2899 case 0x55: /* fcmpes, V9 %fcc */
2900 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2902 case 0x56: /* fcmped, V9 %fcc */
2903 gen_op_load_fpr_DT0(DFPREG(rs1
));
2904 gen_op_load_fpr_DT1(DFPREG(rs2
));
2905 gen_op_fcmped(rd
& 3);
2907 case 0x57: /* fcmpeq, V9 %fcc */
2908 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2909 gen_op_load_fpr_QT0(QFPREG(rs1
));
2910 gen_op_load_fpr_QT1(QFPREG(rs2
));
2911 gen_op_fcmpeq(rd
& 3);
2916 } else if (xop
== 0x2) {
2919 rs1
= GET_FIELD(insn
, 13, 17);
2921 // or %g0, x, y -> mov T0, x; mov y, T0
2922 if (IS_IMM
) { /* immediate */
2925 simm
= GET_FIELDs(insn
, 19, 31);
2926 r_const
= tcg_const_tl(simm
);
2927 gen_movl_TN_reg(rd
, r_const
);
2928 tcg_temp_free(r_const
);
2929 } else { /* register */
2930 rs2
= GET_FIELD(insn
, 27, 31);
2931 gen_movl_reg_TN(rs2
, cpu_dst
);
2932 gen_movl_TN_reg(rd
, cpu_dst
);
2935 cpu_src1
= get_src1(insn
, cpu_src1
);
2936 if (IS_IMM
) { /* immediate */
2937 simm
= GET_FIELDs(insn
, 19, 31);
2938 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2939 gen_movl_TN_reg(rd
, cpu_dst
);
2940 } else { /* register */
2941 // or x, %g0, y -> mov T1, x; mov y, T1
2942 rs2
= GET_FIELD(insn
, 27, 31);
2944 gen_movl_reg_TN(rs2
, cpu_src2
);
2945 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2946 gen_movl_TN_reg(rd
, cpu_dst
);
2948 gen_movl_TN_reg(rd
, cpu_src1
);
2951 #ifdef TARGET_SPARC64
2952 } else if (xop
== 0x25) { /* sll, V9 sllx */
2953 cpu_src1
= get_src1(insn
, cpu_src1
);
2954 if (IS_IMM
) { /* immediate */
2955 simm
= GET_FIELDs(insn
, 20, 31);
2956 if (insn
& (1 << 12)) {
2957 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2959 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2961 } else { /* register */
2962 rs2
= GET_FIELD(insn
, 27, 31);
2963 gen_movl_reg_TN(rs2
, cpu_src2
);
2964 if (insn
& (1 << 12)) {
2965 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2967 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2969 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2971 gen_movl_TN_reg(rd
, cpu_dst
);
2972 } else if (xop
== 0x26) { /* srl, V9 srlx */
2973 cpu_src1
= get_src1(insn
, cpu_src1
);
2974 if (IS_IMM
) { /* immediate */
2975 simm
= GET_FIELDs(insn
, 20, 31);
2976 if (insn
& (1 << 12)) {
2977 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2979 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2980 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2982 } else { /* register */
2983 rs2
= GET_FIELD(insn
, 27, 31);
2984 gen_movl_reg_TN(rs2
, cpu_src2
);
2985 if (insn
& (1 << 12)) {
2986 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2987 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2989 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2990 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2991 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2994 gen_movl_TN_reg(rd
, cpu_dst
);
2995 } else if (xop
== 0x27) { /* sra, V9 srax */
2996 cpu_src1
= get_src1(insn
, cpu_src1
);
2997 if (IS_IMM
) { /* immediate */
2998 simm
= GET_FIELDs(insn
, 20, 31);
2999 if (insn
& (1 << 12)) {
3000 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3002 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3003 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3004 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3006 } else { /* register */
3007 rs2
= GET_FIELD(insn
, 27, 31);
3008 gen_movl_reg_TN(rs2
, cpu_src2
);
3009 if (insn
& (1 << 12)) {
3010 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3011 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3013 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3014 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3015 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3016 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3019 gen_movl_TN_reg(rd
, cpu_dst
);
3021 } else if (xop
< 0x36) {
3023 cpu_src1
= get_src1(insn
, cpu_src1
);
3024 cpu_src2
= get_src2(insn
, cpu_src2
);
3025 switch (xop
& ~0x10) {
3028 simm
= GET_FIELDs(insn
, 19, 31);
3030 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3031 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3032 dc
->cc_op
= CC_OP_ADD
;
3034 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3038 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3039 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3040 dc
->cc_op
= CC_OP_ADD
;
3042 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3048 simm
= GET_FIELDs(insn
, 19, 31);
3049 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3051 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3054 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3055 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3056 dc
->cc_op
= CC_OP_LOGIC
;
3061 simm
= GET_FIELDs(insn
, 19, 31);
3062 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3064 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3067 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3068 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3069 dc
->cc_op
= CC_OP_LOGIC
;
3074 simm
= GET_FIELDs(insn
, 19, 31);
3075 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3077 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3080 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3081 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3082 dc
->cc_op
= CC_OP_LOGIC
;
3087 simm
= GET_FIELDs(insn
, 19, 31);
3089 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3091 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3095 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3096 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3097 dc
->cc_op
= CC_OP_SUB
;
3099 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3103 case 0x5: /* andn */
3105 simm
= GET_FIELDs(insn
, 19, 31);
3106 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3108 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3111 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3112 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3113 dc
->cc_op
= CC_OP_LOGIC
;
3118 simm
= GET_FIELDs(insn
, 19, 31);
3119 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3121 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3124 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3125 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3126 dc
->cc_op
= CC_OP_LOGIC
;
3129 case 0x7: /* xorn */
3131 simm
= GET_FIELDs(insn
, 19, 31);
3132 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3134 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3135 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3138 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3139 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3140 dc
->cc_op
= CC_OP_LOGIC
;
3143 case 0x8: /* addx, V9 addc */
3144 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3147 #ifdef TARGET_SPARC64
3148 case 0x9: /* V9 mulx */
3150 simm
= GET_FIELDs(insn
, 19, 31);
3151 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3153 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3157 case 0xa: /* umul */
3158 CHECK_IU_FEATURE(dc
, MUL
);
3159 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3161 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3162 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3163 dc
->cc_op
= CC_OP_LOGIC
;
3166 case 0xb: /* smul */
3167 CHECK_IU_FEATURE(dc
, MUL
);
3168 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3170 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3171 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3172 dc
->cc_op
= CC_OP_LOGIC
;
3175 case 0xc: /* subx, V9 subc */
3176 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3179 #ifdef TARGET_SPARC64
3180 case 0xd: /* V9 udivx */
3182 TCGv r_temp1
, r_temp2
;
3183 r_temp1
= tcg_temp_local_new();
3184 r_temp2
= tcg_temp_local_new();
3185 tcg_gen_mov_tl(r_temp1
, cpu_src1
);
3186 tcg_gen_mov_tl(r_temp2
, cpu_src2
);
3187 gen_trap_ifdivzero_tl(r_temp2
);
3188 tcg_gen_divu_i64(cpu_dst
, r_temp1
, r_temp2
);
3189 tcg_temp_free(r_temp1
);
3190 tcg_temp_free(r_temp2
);
3194 case 0xe: /* udiv */
3195 CHECK_IU_FEATURE(dc
, DIV
);
3197 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3198 dc
->cc_op
= CC_OP_DIV
;
3200 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3203 case 0xf: /* sdiv */
3204 CHECK_IU_FEATURE(dc
, DIV
);
3206 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3207 dc
->cc_op
= CC_OP_DIV
;
3209 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3215 gen_movl_TN_reg(rd
, cpu_dst
);
3217 cpu_src1
= get_src1(insn
, cpu_src1
);
3218 cpu_src2
= get_src2(insn
, cpu_src2
);
3220 case 0x20: /* taddcc */
3221 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3222 gen_movl_TN_reg(rd
, cpu_dst
);
3223 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3224 dc
->cc_op
= CC_OP_TADD
;
3226 case 0x21: /* tsubcc */
3227 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3228 gen_movl_TN_reg(rd
, cpu_dst
);
3229 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3230 dc
->cc_op
= CC_OP_TSUB
;
3232 case 0x22: /* taddcctv */
3233 save_state(dc
, cpu_cond
);
3234 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3235 gen_movl_TN_reg(rd
, cpu_dst
);
3236 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3237 dc
->cc_op
= CC_OP_TADDTV
;
3239 case 0x23: /* tsubcctv */
3240 save_state(dc
, cpu_cond
);
3241 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3242 gen_movl_TN_reg(rd
, cpu_dst
);
3243 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3244 dc
->cc_op
= CC_OP_TSUBTV
;
3246 case 0x24: /* mulscc */
3247 gen_helper_compute_psr();
3248 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3249 gen_movl_TN_reg(rd
, cpu_dst
);
3250 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3251 dc
->cc_op
= CC_OP_ADD
;
3253 #ifndef TARGET_SPARC64
3254 case 0x25: /* sll */
3255 if (IS_IMM
) { /* immediate */
3256 simm
= GET_FIELDs(insn
, 20, 31);
3257 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3258 } else { /* register */
3259 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3260 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3262 gen_movl_TN_reg(rd
, cpu_dst
);
3264 case 0x26: /* srl */
3265 if (IS_IMM
) { /* immediate */
3266 simm
= GET_FIELDs(insn
, 20, 31);
3267 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3268 } else { /* register */
3269 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3270 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3272 gen_movl_TN_reg(rd
, cpu_dst
);
3274 case 0x27: /* sra */
3275 if (IS_IMM
) { /* immediate */
3276 simm
= GET_FIELDs(insn
, 20, 31);
3277 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3278 } else { /* register */
3279 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3280 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3282 gen_movl_TN_reg(rd
, cpu_dst
);
3289 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3290 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3292 #ifndef TARGET_SPARC64
3293 case 0x01 ... 0x0f: /* undefined in the
3297 case 0x10 ... 0x1f: /* implementation-dependent
3303 case 0x2: /* V9 wrccr */
3304 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3305 gen_helper_wrccr(cpu_dst
);
3306 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3307 dc
->cc_op
= CC_OP_FLAGS
;
3309 case 0x3: /* V9 wrasi */
3310 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3311 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3312 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3314 case 0x6: /* V9 wrfprs */
3315 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3316 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3317 save_state(dc
, cpu_cond
);
3322 case 0xf: /* V9 sir, nop if user */
3323 #if !defined(CONFIG_USER_ONLY)
3324 if (supervisor(dc
)) {
3329 case 0x13: /* Graphics Status */
3330 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3332 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3334 case 0x14: /* Softint set */
3335 if (!supervisor(dc
))
3337 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3338 gen_helper_set_softint(cpu_tmp64
);
3340 case 0x15: /* Softint clear */
3341 if (!supervisor(dc
))
3343 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3344 gen_helper_clear_softint(cpu_tmp64
);
3346 case 0x16: /* Softint write */
3347 if (!supervisor(dc
))
3349 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3350 gen_helper_write_softint(cpu_tmp64
);
3352 case 0x17: /* Tick compare */
3353 #if !defined(CONFIG_USER_ONLY)
3354 if (!supervisor(dc
))
3360 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3362 r_tickptr
= tcg_temp_new_ptr();
3363 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3364 offsetof(CPUState
, tick
));
3365 gen_helper_tick_set_limit(r_tickptr
,
3367 tcg_temp_free_ptr(r_tickptr
);
3370 case 0x18: /* System tick */
3371 #if !defined(CONFIG_USER_ONLY)
3372 if (!supervisor(dc
))
3378 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3380 r_tickptr
= tcg_temp_new_ptr();
3381 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3382 offsetof(CPUState
, stick
));
3383 gen_helper_tick_set_count(r_tickptr
,
3385 tcg_temp_free_ptr(r_tickptr
);
3388 case 0x19: /* System tick compare */
3389 #if !defined(CONFIG_USER_ONLY)
3390 if (!supervisor(dc
))
3396 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3398 r_tickptr
= tcg_temp_new_ptr();
3399 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3400 offsetof(CPUState
, stick
));
3401 gen_helper_tick_set_limit(r_tickptr
,
3403 tcg_temp_free_ptr(r_tickptr
);
3407 case 0x10: /* Performance Control */
3408 case 0x11: /* Performance Instrumentation
3410 case 0x12: /* Dispatch Control */
3417 #if !defined(CONFIG_USER_ONLY)
3418 case 0x31: /* wrpsr, V9 saved, restored */
3420 if (!supervisor(dc
))
3422 #ifdef TARGET_SPARC64
3428 gen_helper_restored();
3430 case 2: /* UA2005 allclean */
3431 case 3: /* UA2005 otherw */
3432 case 4: /* UA2005 normalw */
3433 case 5: /* UA2005 invalw */
3439 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3440 gen_helper_wrpsr(cpu_dst
);
3441 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3442 dc
->cc_op
= CC_OP_FLAGS
;
3443 save_state(dc
, cpu_cond
);
3450 case 0x32: /* wrwim, V9 wrpr */
3452 if (!supervisor(dc
))
3454 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3455 #ifdef TARGET_SPARC64
3461 r_tsptr
= tcg_temp_new_ptr();
3462 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3463 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3464 offsetof(trap_state
, tpc
));
3465 tcg_temp_free_ptr(r_tsptr
);
3472 r_tsptr
= tcg_temp_new_ptr();
3473 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3474 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3475 offsetof(trap_state
, tnpc
));
3476 tcg_temp_free_ptr(r_tsptr
);
3483 r_tsptr
= tcg_temp_new_ptr();
3484 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3485 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3486 offsetof(trap_state
,
3488 tcg_temp_free_ptr(r_tsptr
);
3495 r_tsptr
= tcg_temp_new_ptr();
3496 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3497 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3498 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3499 offsetof(trap_state
, tt
));
3500 tcg_temp_free_ptr(r_tsptr
);
3507 r_tickptr
= tcg_temp_new_ptr();
3508 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3509 offsetof(CPUState
, tick
));
3510 gen_helper_tick_set_count(r_tickptr
,
3512 tcg_temp_free_ptr(r_tickptr
);
3516 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3520 TCGv r_tmp
= tcg_temp_local_new();
3522 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3523 save_state(dc
, cpu_cond
);
3524 gen_helper_wrpstate(r_tmp
);
3525 tcg_temp_free(r_tmp
);
3526 dc
->npc
= DYNAMIC_PC
;
3531 TCGv r_tmp
= tcg_temp_local_new();
3533 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3534 save_state(dc
, cpu_cond
);
3535 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3536 tcg_temp_free(r_tmp
);
3537 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3538 offsetof(CPUSPARCState
, tl
));
3539 dc
->npc
= DYNAMIC_PC
;
3543 gen_helper_wrpil(cpu_tmp0
);
3546 gen_helper_wrcwp(cpu_tmp0
);
3549 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3550 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3551 offsetof(CPUSPARCState
,
3554 case 11: // canrestore
3555 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3556 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3557 offsetof(CPUSPARCState
,
3560 case 12: // cleanwin
3561 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3562 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3563 offsetof(CPUSPARCState
,
3566 case 13: // otherwin
3567 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3568 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3569 offsetof(CPUSPARCState
,
3573 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3574 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3575 offsetof(CPUSPARCState
,
3578 case 16: // UA2005 gl
3579 CHECK_IU_FEATURE(dc
, GL
);
3580 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3581 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3582 offsetof(CPUSPARCState
, gl
));
3584 case 26: // UA2005 strand status
3585 CHECK_IU_FEATURE(dc
, HYPV
);
3586 if (!hypervisor(dc
))
3588 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3594 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3595 if (dc
->def
->nwindows
!= 32)
3596 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3597 (1 << dc
->def
->nwindows
) - 1);
3598 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3602 case 0x33: /* wrtbr, UA2005 wrhpr */
3604 #ifndef TARGET_SPARC64
3605 if (!supervisor(dc
))
3607 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3609 CHECK_IU_FEATURE(dc
, HYPV
);
3610 if (!hypervisor(dc
))
3612 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3615 // XXX gen_op_wrhpstate();
3616 save_state(dc
, cpu_cond
);
3622 // XXX gen_op_wrhtstate();
3625 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3628 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3630 case 31: // hstick_cmpr
3634 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3635 r_tickptr
= tcg_temp_new_ptr();
3636 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3637 offsetof(CPUState
, hstick
));
3638 gen_helper_tick_set_limit(r_tickptr
,
3640 tcg_temp_free_ptr(r_tickptr
);
3643 case 6: // hver readonly
3651 #ifdef TARGET_SPARC64
3652 case 0x2c: /* V9 movcc */
3654 int cc
= GET_FIELD_SP(insn
, 11, 12);
3655 int cond
= GET_FIELD_SP(insn
, 14, 17);
3659 r_cond
= tcg_temp_new();
3660 if (insn
& (1 << 18)) {
3662 gen_cond(r_cond
, 0, cond
, dc
);
3664 gen_cond(r_cond
, 1, cond
, dc
);
3668 gen_fcond(r_cond
, cc
, cond
);
3671 l1
= gen_new_label();
3673 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3674 if (IS_IMM
) { /* immediate */
3677 simm
= GET_FIELD_SPs(insn
, 0, 10);
3678 r_const
= tcg_const_tl(simm
);
3679 gen_movl_TN_reg(rd
, r_const
);
3680 tcg_temp_free(r_const
);
3682 rs2
= GET_FIELD_SP(insn
, 0, 4);
3683 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3684 gen_movl_TN_reg(rd
, cpu_tmp0
);
3687 tcg_temp_free(r_cond
);
3690 case 0x2d: /* V9 sdivx */
3691 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3692 gen_movl_TN_reg(rd
, cpu_dst
);
3694 case 0x2e: /* V9 popc */
3696 cpu_src2
= get_src2(insn
, cpu_src2
);
3697 gen_helper_popc(cpu_dst
, cpu_src2
);
3698 gen_movl_TN_reg(rd
, cpu_dst
);
3700 case 0x2f: /* V9 movr */
3702 int cond
= GET_FIELD_SP(insn
, 10, 12);
3705 cpu_src1
= get_src1(insn
, cpu_src1
);
3707 l1
= gen_new_label();
3709 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3711 if (IS_IMM
) { /* immediate */
3714 simm
= GET_FIELD_SPs(insn
, 0, 9);
3715 r_const
= tcg_const_tl(simm
);
3716 gen_movl_TN_reg(rd
, r_const
);
3717 tcg_temp_free(r_const
);
3719 rs2
= GET_FIELD_SP(insn
, 0, 4);
3720 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3721 gen_movl_TN_reg(rd
, cpu_tmp0
);
3731 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3732 #ifdef TARGET_SPARC64
3733 int opf
= GET_FIELD_SP(insn
, 5, 13);
3734 rs1
= GET_FIELD(insn
, 13, 17);
3735 rs2
= GET_FIELD(insn
, 27, 31);
3736 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3740 case 0x000: /* VIS I edge8cc */
3741 case 0x001: /* VIS II edge8n */
3742 case 0x002: /* VIS I edge8lcc */
3743 case 0x003: /* VIS II edge8ln */
3744 case 0x004: /* VIS I edge16cc */
3745 case 0x005: /* VIS II edge16n */
3746 case 0x006: /* VIS I edge16lcc */
3747 case 0x007: /* VIS II edge16ln */
3748 case 0x008: /* VIS I edge32cc */
3749 case 0x009: /* VIS II edge32n */
3750 case 0x00a: /* VIS I edge32lcc */
3751 case 0x00b: /* VIS II edge32ln */
3754 case 0x010: /* VIS I array8 */
3755 CHECK_FPU_FEATURE(dc
, VIS1
);
3756 cpu_src1
= get_src1(insn
, cpu_src1
);
3757 gen_movl_reg_TN(rs2
, cpu_src2
);
3758 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3759 gen_movl_TN_reg(rd
, cpu_dst
);
3761 case 0x012: /* VIS I array16 */
3762 CHECK_FPU_FEATURE(dc
, VIS1
);
3763 cpu_src1
= get_src1(insn
, cpu_src1
);
3764 gen_movl_reg_TN(rs2
, cpu_src2
);
3765 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3766 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3767 gen_movl_TN_reg(rd
, cpu_dst
);
3769 case 0x014: /* VIS I array32 */
3770 CHECK_FPU_FEATURE(dc
, VIS1
);
3771 cpu_src1
= get_src1(insn
, cpu_src1
);
3772 gen_movl_reg_TN(rs2
, cpu_src2
);
3773 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3774 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3775 gen_movl_TN_reg(rd
, cpu_dst
);
3777 case 0x018: /* VIS I alignaddr */
3778 CHECK_FPU_FEATURE(dc
, VIS1
);
3779 cpu_src1
= get_src1(insn
, cpu_src1
);
3780 gen_movl_reg_TN(rs2
, cpu_src2
);
3781 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3782 gen_movl_TN_reg(rd
, cpu_dst
);
3784 case 0x019: /* VIS II bmask */
3785 case 0x01a: /* VIS I alignaddrl */
3788 case 0x020: /* VIS I fcmple16 */
3789 CHECK_FPU_FEATURE(dc
, VIS1
);
3790 gen_op_load_fpr_DT0(DFPREG(rs1
));
3791 gen_op_load_fpr_DT1(DFPREG(rs2
));
3792 gen_helper_fcmple16();
3793 gen_op_store_DT0_fpr(DFPREG(rd
));
3795 case 0x022: /* VIS I fcmpne16 */
3796 CHECK_FPU_FEATURE(dc
, VIS1
);
3797 gen_op_load_fpr_DT0(DFPREG(rs1
));
3798 gen_op_load_fpr_DT1(DFPREG(rs2
));
3799 gen_helper_fcmpne16();
3800 gen_op_store_DT0_fpr(DFPREG(rd
));
3802 case 0x024: /* VIS I fcmple32 */
3803 CHECK_FPU_FEATURE(dc
, VIS1
);
3804 gen_op_load_fpr_DT0(DFPREG(rs1
));
3805 gen_op_load_fpr_DT1(DFPREG(rs2
));
3806 gen_helper_fcmple32();
3807 gen_op_store_DT0_fpr(DFPREG(rd
));
3809 case 0x026: /* VIS I fcmpne32 */
3810 CHECK_FPU_FEATURE(dc
, VIS1
);
3811 gen_op_load_fpr_DT0(DFPREG(rs1
));
3812 gen_op_load_fpr_DT1(DFPREG(rs2
));
3813 gen_helper_fcmpne32();
3814 gen_op_store_DT0_fpr(DFPREG(rd
));
3816 case 0x028: /* VIS I fcmpgt16 */
3817 CHECK_FPU_FEATURE(dc
, VIS1
);
3818 gen_op_load_fpr_DT0(DFPREG(rs1
));
3819 gen_op_load_fpr_DT1(DFPREG(rs2
));
3820 gen_helper_fcmpgt16();
3821 gen_op_store_DT0_fpr(DFPREG(rd
));
3823 case 0x02a: /* VIS I fcmpeq16 */
3824 CHECK_FPU_FEATURE(dc
, VIS1
);
3825 gen_op_load_fpr_DT0(DFPREG(rs1
));
3826 gen_op_load_fpr_DT1(DFPREG(rs2
));
3827 gen_helper_fcmpeq16();
3828 gen_op_store_DT0_fpr(DFPREG(rd
));
3830 case 0x02c: /* VIS I fcmpgt32 */
3831 CHECK_FPU_FEATURE(dc
, VIS1
);
3832 gen_op_load_fpr_DT0(DFPREG(rs1
));
3833 gen_op_load_fpr_DT1(DFPREG(rs2
));
3834 gen_helper_fcmpgt32();
3835 gen_op_store_DT0_fpr(DFPREG(rd
));
3837 case 0x02e: /* VIS I fcmpeq32 */
3838 CHECK_FPU_FEATURE(dc
, VIS1
);
3839 gen_op_load_fpr_DT0(DFPREG(rs1
));
3840 gen_op_load_fpr_DT1(DFPREG(rs2
));
3841 gen_helper_fcmpeq32();
3842 gen_op_store_DT0_fpr(DFPREG(rd
));
3844 case 0x031: /* VIS I fmul8x16 */
3845 CHECK_FPU_FEATURE(dc
, VIS1
);
3846 gen_op_load_fpr_DT0(DFPREG(rs1
));
3847 gen_op_load_fpr_DT1(DFPREG(rs2
));
3848 gen_helper_fmul8x16();
3849 gen_op_store_DT0_fpr(DFPREG(rd
));
3851 case 0x033: /* VIS I fmul8x16au */
3852 CHECK_FPU_FEATURE(dc
, VIS1
);
3853 gen_op_load_fpr_DT0(DFPREG(rs1
));
3854 gen_op_load_fpr_DT1(DFPREG(rs2
));
3855 gen_helper_fmul8x16au();
3856 gen_op_store_DT0_fpr(DFPREG(rd
));
3858 case 0x035: /* VIS I fmul8x16al */
3859 CHECK_FPU_FEATURE(dc
, VIS1
);
3860 gen_op_load_fpr_DT0(DFPREG(rs1
));
3861 gen_op_load_fpr_DT1(DFPREG(rs2
));
3862 gen_helper_fmul8x16al();
3863 gen_op_store_DT0_fpr(DFPREG(rd
));
3865 case 0x036: /* VIS I fmul8sux16 */
3866 CHECK_FPU_FEATURE(dc
, VIS1
);
3867 gen_op_load_fpr_DT0(DFPREG(rs1
));
3868 gen_op_load_fpr_DT1(DFPREG(rs2
));
3869 gen_helper_fmul8sux16();
3870 gen_op_store_DT0_fpr(DFPREG(rd
));
3872 case 0x037: /* VIS I fmul8ulx16 */
3873 CHECK_FPU_FEATURE(dc
, VIS1
);
3874 gen_op_load_fpr_DT0(DFPREG(rs1
));
3875 gen_op_load_fpr_DT1(DFPREG(rs2
));
3876 gen_helper_fmul8ulx16();
3877 gen_op_store_DT0_fpr(DFPREG(rd
));
3879 case 0x038: /* VIS I fmuld8sux16 */
3880 CHECK_FPU_FEATURE(dc
, VIS1
);
3881 gen_op_load_fpr_DT0(DFPREG(rs1
));
3882 gen_op_load_fpr_DT1(DFPREG(rs2
));
3883 gen_helper_fmuld8sux16();
3884 gen_op_store_DT0_fpr(DFPREG(rd
));
3886 case 0x039: /* VIS I fmuld8ulx16 */
3887 CHECK_FPU_FEATURE(dc
, VIS1
);
3888 gen_op_load_fpr_DT0(DFPREG(rs1
));
3889 gen_op_load_fpr_DT1(DFPREG(rs2
));
3890 gen_helper_fmuld8ulx16();
3891 gen_op_store_DT0_fpr(DFPREG(rd
));
3893 case 0x03a: /* VIS I fpack32 */
3894 case 0x03b: /* VIS I fpack16 */
3895 case 0x03d: /* VIS I fpackfix */
3896 case 0x03e: /* VIS I pdist */
3899 case 0x048: /* VIS I faligndata */
3900 CHECK_FPU_FEATURE(dc
, VIS1
);
3901 gen_op_load_fpr_DT0(DFPREG(rs1
));
3902 gen_op_load_fpr_DT1(DFPREG(rs2
));
3903 gen_helper_faligndata();
3904 gen_op_store_DT0_fpr(DFPREG(rd
));
3906 case 0x04b: /* VIS I fpmerge */
3907 CHECK_FPU_FEATURE(dc
, VIS1
);
3908 gen_op_load_fpr_DT0(DFPREG(rs1
));
3909 gen_op_load_fpr_DT1(DFPREG(rs2
));
3910 gen_helper_fpmerge();
3911 gen_op_store_DT0_fpr(DFPREG(rd
));
3913 case 0x04c: /* VIS II bshuffle */
3916 case 0x04d: /* VIS I fexpand */
3917 CHECK_FPU_FEATURE(dc
, VIS1
);
3918 gen_op_load_fpr_DT0(DFPREG(rs1
));
3919 gen_op_load_fpr_DT1(DFPREG(rs2
));
3920 gen_helper_fexpand();
3921 gen_op_store_DT0_fpr(DFPREG(rd
));
3923 case 0x050: /* VIS I fpadd16 */
3924 CHECK_FPU_FEATURE(dc
, VIS1
);
3925 gen_op_load_fpr_DT0(DFPREG(rs1
));
3926 gen_op_load_fpr_DT1(DFPREG(rs2
));
3927 gen_helper_fpadd16();
3928 gen_op_store_DT0_fpr(DFPREG(rd
));
3930 case 0x051: /* VIS I fpadd16s */
3931 CHECK_FPU_FEATURE(dc
, VIS1
);
3932 gen_helper_fpadd16s(cpu_fpr
[rd
],
3933 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3935 case 0x052: /* VIS I fpadd32 */
3936 CHECK_FPU_FEATURE(dc
, VIS1
);
3937 gen_op_load_fpr_DT0(DFPREG(rs1
));
3938 gen_op_load_fpr_DT1(DFPREG(rs2
));
3939 gen_helper_fpadd32();
3940 gen_op_store_DT0_fpr(DFPREG(rd
));
3942 case 0x053: /* VIS I fpadd32s */
3943 CHECK_FPU_FEATURE(dc
, VIS1
);
3944 gen_helper_fpadd32s(cpu_fpr
[rd
],
3945 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3947 case 0x054: /* VIS I fpsub16 */
3948 CHECK_FPU_FEATURE(dc
, VIS1
);
3949 gen_op_load_fpr_DT0(DFPREG(rs1
));
3950 gen_op_load_fpr_DT1(DFPREG(rs2
));
3951 gen_helper_fpsub16();
3952 gen_op_store_DT0_fpr(DFPREG(rd
));
3954 case 0x055: /* VIS I fpsub16s */
3955 CHECK_FPU_FEATURE(dc
, VIS1
);
3956 gen_helper_fpsub16s(cpu_fpr
[rd
],
3957 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3959 case 0x056: /* VIS I fpsub32 */
3960 CHECK_FPU_FEATURE(dc
, VIS1
);
3961 gen_op_load_fpr_DT0(DFPREG(rs1
));
3962 gen_op_load_fpr_DT1(DFPREG(rs2
));
3963 gen_helper_fpsub32();
3964 gen_op_store_DT0_fpr(DFPREG(rd
));
3966 case 0x057: /* VIS I fpsub32s */
3967 CHECK_FPU_FEATURE(dc
, VIS1
);
3968 gen_helper_fpsub32s(cpu_fpr
[rd
],
3969 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3971 case 0x060: /* VIS I fzero */
3972 CHECK_FPU_FEATURE(dc
, VIS1
);
3973 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3974 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3976 case 0x061: /* VIS I fzeros */
3977 CHECK_FPU_FEATURE(dc
, VIS1
);
3978 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3980 case 0x062: /* VIS I fnor */
3981 CHECK_FPU_FEATURE(dc
, VIS1
);
3982 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3983 cpu_fpr
[DFPREG(rs2
)]);
3984 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3985 cpu_fpr
[DFPREG(rs2
) + 1]);
3987 case 0x063: /* VIS I fnors */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3991 case 0x064: /* VIS I fandnot2 */
3992 CHECK_FPU_FEATURE(dc
, VIS1
);
3993 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3994 cpu_fpr
[DFPREG(rs2
)]);
3995 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3996 cpu_fpr
[DFPREG(rs1
) + 1],
3997 cpu_fpr
[DFPREG(rs2
) + 1]);
3999 case 0x065: /* VIS I fandnot2s */
4000 CHECK_FPU_FEATURE(dc
, VIS1
);
4001 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4003 case 0x066: /* VIS I fnot2 */
4004 CHECK_FPU_FEATURE(dc
, VIS1
);
4005 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
4006 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4007 cpu_fpr
[DFPREG(rs2
) + 1]);
4009 case 0x067: /* VIS I fnot2s */
4010 CHECK_FPU_FEATURE(dc
, VIS1
);
4011 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4013 case 0x068: /* VIS I fandnot1 */
4014 CHECK_FPU_FEATURE(dc
, VIS1
);
4015 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4016 cpu_fpr
[DFPREG(rs1
)]);
4017 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4018 cpu_fpr
[DFPREG(rs2
) + 1],
4019 cpu_fpr
[DFPREG(rs1
) + 1]);
4021 case 0x069: /* VIS I fandnot1s */
4022 CHECK_FPU_FEATURE(dc
, VIS1
);
4023 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4025 case 0x06a: /* VIS I fnot1 */
4026 CHECK_FPU_FEATURE(dc
, VIS1
);
4027 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4028 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4029 cpu_fpr
[DFPREG(rs1
) + 1]);
4031 case 0x06b: /* VIS I fnot1s */
4032 CHECK_FPU_FEATURE(dc
, VIS1
);
4033 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4035 case 0x06c: /* VIS I fxor */
4036 CHECK_FPU_FEATURE(dc
, VIS1
);
4037 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4038 cpu_fpr
[DFPREG(rs2
)]);
4039 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4040 cpu_fpr
[DFPREG(rs1
) + 1],
4041 cpu_fpr
[DFPREG(rs2
) + 1]);
4043 case 0x06d: /* VIS I fxors */
4044 CHECK_FPU_FEATURE(dc
, VIS1
);
4045 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4047 case 0x06e: /* VIS I fnand */
4048 CHECK_FPU_FEATURE(dc
, VIS1
);
4049 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4050 cpu_fpr
[DFPREG(rs2
)]);
4051 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4052 cpu_fpr
[DFPREG(rs2
) + 1]);
4054 case 0x06f: /* VIS I fnands */
4055 CHECK_FPU_FEATURE(dc
, VIS1
);
4056 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4058 case 0x070: /* VIS I fand */
4059 CHECK_FPU_FEATURE(dc
, VIS1
);
4060 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4061 cpu_fpr
[DFPREG(rs2
)]);
4062 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4063 cpu_fpr
[DFPREG(rs1
) + 1],
4064 cpu_fpr
[DFPREG(rs2
) + 1]);
4066 case 0x071: /* VIS I fands */
4067 CHECK_FPU_FEATURE(dc
, VIS1
);
4068 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4070 case 0x072: /* VIS I fxnor */
4071 CHECK_FPU_FEATURE(dc
, VIS1
);
4072 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4073 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4074 cpu_fpr
[DFPREG(rs1
)]);
4075 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4076 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4077 cpu_fpr
[DFPREG(rs1
) + 1]);
4079 case 0x073: /* VIS I fxnors */
4080 CHECK_FPU_FEATURE(dc
, VIS1
);
4081 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4082 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4084 case 0x074: /* VIS I fsrc1 */
4085 CHECK_FPU_FEATURE(dc
, VIS1
);
4086 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4087 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4088 cpu_fpr
[DFPREG(rs1
) + 1]);
4090 case 0x075: /* VIS I fsrc1s */
4091 CHECK_FPU_FEATURE(dc
, VIS1
);
4092 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4094 case 0x076: /* VIS I fornot2 */
4095 CHECK_FPU_FEATURE(dc
, VIS1
);
4096 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4097 cpu_fpr
[DFPREG(rs2
)]);
4098 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4099 cpu_fpr
[DFPREG(rs1
) + 1],
4100 cpu_fpr
[DFPREG(rs2
) + 1]);
4102 case 0x077: /* VIS I fornot2s */
4103 CHECK_FPU_FEATURE(dc
, VIS1
);
4104 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4106 case 0x078: /* VIS I fsrc2 */
4107 CHECK_FPU_FEATURE(dc
, VIS1
);
4108 gen_op_load_fpr_DT0(DFPREG(rs2
));
4109 gen_op_store_DT0_fpr(DFPREG(rd
));
4111 case 0x079: /* VIS I fsrc2s */
4112 CHECK_FPU_FEATURE(dc
, VIS1
);
4113 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4115 case 0x07a: /* VIS I fornot1 */
4116 CHECK_FPU_FEATURE(dc
, VIS1
);
4117 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4118 cpu_fpr
[DFPREG(rs1
)]);
4119 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4120 cpu_fpr
[DFPREG(rs2
) + 1],
4121 cpu_fpr
[DFPREG(rs1
) + 1]);
4123 case 0x07b: /* VIS I fornot1s */
4124 CHECK_FPU_FEATURE(dc
, VIS1
);
4125 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4127 case 0x07c: /* VIS I for */
4128 CHECK_FPU_FEATURE(dc
, VIS1
);
4129 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4130 cpu_fpr
[DFPREG(rs2
)]);
4131 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4132 cpu_fpr
[DFPREG(rs1
) + 1],
4133 cpu_fpr
[DFPREG(rs2
) + 1]);
4135 case 0x07d: /* VIS I fors */
4136 CHECK_FPU_FEATURE(dc
, VIS1
);
4137 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4139 case 0x07e: /* VIS I fone */
4140 CHECK_FPU_FEATURE(dc
, VIS1
);
4141 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4142 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4144 case 0x07f: /* VIS I fones */
4145 CHECK_FPU_FEATURE(dc
, VIS1
);
4146 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4148 case 0x080: /* VIS I shutdown */
4149 case 0x081: /* VIS II siam */
4158 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4159 #ifdef TARGET_SPARC64
4164 #ifdef TARGET_SPARC64
4165 } else if (xop
== 0x39) { /* V9 return */
4168 save_state(dc
, cpu_cond
);
4169 cpu_src1
= get_src1(insn
, cpu_src1
);
4170 if (IS_IMM
) { /* immediate */
4171 simm
= GET_FIELDs(insn
, 19, 31);
4172 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4173 } else { /* register */
4174 rs2
= GET_FIELD(insn
, 27, 31);
4176 gen_movl_reg_TN(rs2
, cpu_src2
);
4177 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4179 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4181 gen_helper_restore();
4182 gen_mov_pc_npc(dc
, cpu_cond
);
4183 r_const
= tcg_const_i32(3);
4184 gen_helper_check_align(cpu_dst
, r_const
);
4185 tcg_temp_free_i32(r_const
);
4186 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4187 dc
->npc
= DYNAMIC_PC
;
4191 cpu_src1
= get_src1(insn
, cpu_src1
);
4192 if (IS_IMM
) { /* immediate */
4193 simm
= GET_FIELDs(insn
, 19, 31);
4194 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4195 } else { /* register */
4196 rs2
= GET_FIELD(insn
, 27, 31);
4198 gen_movl_reg_TN(rs2
, cpu_src2
);
4199 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4201 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4204 case 0x38: /* jmpl */
4209 r_pc
= tcg_const_tl(dc
->pc
);
4210 gen_movl_TN_reg(rd
, r_pc
);
4211 tcg_temp_free(r_pc
);
4212 gen_mov_pc_npc(dc
, cpu_cond
);
4213 r_const
= tcg_const_i32(3);
4214 gen_helper_check_align(cpu_dst
, r_const
);
4215 tcg_temp_free_i32(r_const
);
4216 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4217 dc
->npc
= DYNAMIC_PC
;
4220 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4221 case 0x39: /* rett, V9 return */
4225 if (!supervisor(dc
))
4227 gen_mov_pc_npc(dc
, cpu_cond
);
4228 r_const
= tcg_const_i32(3);
4229 gen_helper_check_align(cpu_dst
, r_const
);
4230 tcg_temp_free_i32(r_const
);
4231 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4232 dc
->npc
= DYNAMIC_PC
;
4237 case 0x3b: /* flush */
4238 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4242 case 0x3c: /* save */
4243 save_state(dc
, cpu_cond
);
4245 gen_movl_TN_reg(rd
, cpu_dst
);
4247 case 0x3d: /* restore */
4248 save_state(dc
, cpu_cond
);
4249 gen_helper_restore();
4250 gen_movl_TN_reg(rd
, cpu_dst
);
4252 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4253 case 0x3e: /* V9 done/retry */
4257 if (!supervisor(dc
))
4259 dc
->npc
= DYNAMIC_PC
;
4260 dc
->pc
= DYNAMIC_PC
;
4264 if (!supervisor(dc
))
4266 dc
->npc
= DYNAMIC_PC
;
4267 dc
->pc
= DYNAMIC_PC
;
4283 case 3: /* load/store instructions */
4285 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4287 /* flush pending conditional evaluations before exposing
4289 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4290 dc
->cc_op
= CC_OP_FLAGS
;
4291 gen_helper_compute_psr();
4293 cpu_src1
= get_src1(insn
, cpu_src1
);
4294 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4295 rs2
= GET_FIELD(insn
, 27, 31);
4296 gen_movl_reg_TN(rs2
, cpu_src2
);
4297 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4298 } else if (IS_IMM
) { /* immediate */
4299 simm
= GET_FIELDs(insn
, 19, 31);
4300 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4301 } else { /* register */
4302 rs2
= GET_FIELD(insn
, 27, 31);
4304 gen_movl_reg_TN(rs2
, cpu_src2
);
4305 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4307 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4309 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4310 (xop
> 0x17 && xop
<= 0x1d ) ||
4311 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4313 case 0x0: /* ld, V9 lduw, load unsigned word */
4314 gen_address_mask(dc
, cpu_addr
);
4315 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4317 case 0x1: /* ldub, load unsigned byte */
4318 gen_address_mask(dc
, cpu_addr
);
4319 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4321 case 0x2: /* lduh, load unsigned halfword */
4322 gen_address_mask(dc
, cpu_addr
);
4323 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4325 case 0x3: /* ldd, load double word */
4331 save_state(dc
, cpu_cond
);
4332 r_const
= tcg_const_i32(7);
4333 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4334 tcg_temp_free_i32(r_const
);
4335 gen_address_mask(dc
, cpu_addr
);
4336 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4337 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4338 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4339 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4340 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4341 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4342 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4345 case 0x9: /* ldsb, load signed byte */
4346 gen_address_mask(dc
, cpu_addr
);
4347 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4349 case 0xa: /* ldsh, load signed halfword */
4350 gen_address_mask(dc
, cpu_addr
);
4351 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4353 case 0xd: /* ldstub -- XXX: should be atomically */
4357 gen_address_mask(dc
, cpu_addr
);
4358 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4359 r_const
= tcg_const_tl(0xff);
4360 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4361 tcg_temp_free(r_const
);
4364 case 0x0f: /* swap, swap register with memory. Also
4366 CHECK_IU_FEATURE(dc
, SWAP
);
4367 gen_movl_reg_TN(rd
, cpu_val
);
4368 gen_address_mask(dc
, cpu_addr
);
4369 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4370 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4371 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4373 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4374 case 0x10: /* lda, V9 lduwa, load word alternate */
4375 #ifndef TARGET_SPARC64
4378 if (!supervisor(dc
))
4381 save_state(dc
, cpu_cond
);
4382 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4384 case 0x11: /* lduba, load unsigned byte alternate */
4385 #ifndef TARGET_SPARC64
4388 if (!supervisor(dc
))
4391 save_state(dc
, cpu_cond
);
4392 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4394 case 0x12: /* lduha, load unsigned halfword alternate */
4395 #ifndef TARGET_SPARC64
4398 if (!supervisor(dc
))
4401 save_state(dc
, cpu_cond
);
4402 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4404 case 0x13: /* ldda, load double word alternate */
4405 #ifndef TARGET_SPARC64
4408 if (!supervisor(dc
))
4413 save_state(dc
, cpu_cond
);
4414 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4416 case 0x19: /* ldsba, load signed byte alternate */
4417 #ifndef TARGET_SPARC64
4420 if (!supervisor(dc
))
4423 save_state(dc
, cpu_cond
);
4424 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4426 case 0x1a: /* ldsha, load signed halfword alternate */
4427 #ifndef TARGET_SPARC64
4430 if (!supervisor(dc
))
4433 save_state(dc
, cpu_cond
);
4434 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4436 case 0x1d: /* ldstuba -- XXX: should be atomically */
4437 #ifndef TARGET_SPARC64
4440 if (!supervisor(dc
))
4443 save_state(dc
, cpu_cond
);
4444 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4446 case 0x1f: /* swapa, swap reg with alt. memory. Also
4448 CHECK_IU_FEATURE(dc
, SWAP
);
4449 #ifndef TARGET_SPARC64
4452 if (!supervisor(dc
))
4455 save_state(dc
, cpu_cond
);
4456 gen_movl_reg_TN(rd
, cpu_val
);
4457 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4460 #ifndef TARGET_SPARC64
4461 case 0x30: /* ldc */
4462 case 0x31: /* ldcsr */
4463 case 0x33: /* lddc */
4467 #ifdef TARGET_SPARC64
4468 case 0x08: /* V9 ldsw */
4469 gen_address_mask(dc
, cpu_addr
);
4470 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4472 case 0x0b: /* V9 ldx */
4473 gen_address_mask(dc
, cpu_addr
);
4474 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4476 case 0x18: /* V9 ldswa */
4477 save_state(dc
, cpu_cond
);
4478 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4480 case 0x1b: /* V9 ldxa */
4481 save_state(dc
, cpu_cond
);
4482 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4484 case 0x2d: /* V9 prefetch, no effect */
4486 case 0x30: /* V9 ldfa */
4487 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4490 save_state(dc
, cpu_cond
);
4491 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4493 case 0x33: /* V9 lddfa */
4494 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4497 save_state(dc
, cpu_cond
);
4498 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4500 case 0x3d: /* V9 prefetcha, no effect */
4502 case 0x32: /* V9 ldqfa */
4503 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4504 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4507 save_state(dc
, cpu_cond
);
4508 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4514 gen_movl_TN_reg(rd
, cpu_val
);
4515 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4518 } else if (xop
>= 0x20 && xop
< 0x24) {
4519 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4521 save_state(dc
, cpu_cond
);
4523 case 0x20: /* ldf, load fpreg */
4524 gen_address_mask(dc
, cpu_addr
);
4525 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4526 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4528 case 0x21: /* ldfsr, V9 ldxfsr */
4529 #ifdef TARGET_SPARC64
4530 gen_address_mask(dc
, cpu_addr
);
4532 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4533 gen_helper_ldxfsr(cpu_tmp64
);
4535 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4536 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4537 gen_helper_ldfsr(cpu_tmp32
);
4541 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4542 gen_helper_ldfsr(cpu_tmp32
);
4546 case 0x22: /* ldqf, load quad fpreg */
4550 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4551 r_const
= tcg_const_i32(dc
->mem_idx
);
4552 gen_address_mask(dc
, cpu_addr
);
4553 gen_helper_ldqf(cpu_addr
, r_const
);
4554 tcg_temp_free_i32(r_const
);
4555 gen_op_store_QT0_fpr(QFPREG(rd
));
4558 case 0x23: /* lddf, load double fpreg */
4562 r_const
= tcg_const_i32(dc
->mem_idx
);
4563 gen_address_mask(dc
, cpu_addr
);
4564 gen_helper_lddf(cpu_addr
, r_const
);
4565 tcg_temp_free_i32(r_const
);
4566 gen_op_store_DT0_fpr(DFPREG(rd
));
4572 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4573 xop
== 0xe || xop
== 0x1e) {
4574 gen_movl_reg_TN(rd
, cpu_val
);
4576 case 0x4: /* st, store word */
4577 gen_address_mask(dc
, cpu_addr
);
4578 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4580 case 0x5: /* stb, store byte */
4581 gen_address_mask(dc
, cpu_addr
);
4582 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4584 case 0x6: /* sth, store halfword */
4585 gen_address_mask(dc
, cpu_addr
);
4586 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4588 case 0x7: /* std, store double word */
4594 save_state(dc
, cpu_cond
);
4595 gen_address_mask(dc
, cpu_addr
);
4596 r_const
= tcg_const_i32(7);
4597 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4598 tcg_temp_free_i32(r_const
);
4599 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4600 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4601 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4604 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4605 case 0x14: /* sta, V9 stwa, store word alternate */
4606 #ifndef TARGET_SPARC64
4609 if (!supervisor(dc
))
4612 save_state(dc
, cpu_cond
);
4613 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4614 dc
->npc
= DYNAMIC_PC
;
4616 case 0x15: /* stba, store byte alternate */
4617 #ifndef TARGET_SPARC64
4620 if (!supervisor(dc
))
4623 save_state(dc
, cpu_cond
);
4624 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4625 dc
->npc
= DYNAMIC_PC
;
4627 case 0x16: /* stha, store halfword alternate */
4628 #ifndef TARGET_SPARC64
4631 if (!supervisor(dc
))
4634 save_state(dc
, cpu_cond
);
4635 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4636 dc
->npc
= DYNAMIC_PC
;
4638 case 0x17: /* stda, store double word alternate */
4639 #ifndef TARGET_SPARC64
4642 if (!supervisor(dc
))
4648 save_state(dc
, cpu_cond
);
4649 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4653 #ifdef TARGET_SPARC64
4654 case 0x0e: /* V9 stx */
4655 gen_address_mask(dc
, cpu_addr
);
4656 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4658 case 0x1e: /* V9 stxa */
4659 save_state(dc
, cpu_cond
);
4660 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4661 dc
->npc
= DYNAMIC_PC
;
4667 } else if (xop
> 0x23 && xop
< 0x28) {
4668 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4670 save_state(dc
, cpu_cond
);
4672 case 0x24: /* stf, store fpreg */
4673 gen_address_mask(dc
, cpu_addr
);
4674 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4675 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4677 case 0x25: /* stfsr, V9 stxfsr */
4678 #ifdef TARGET_SPARC64
4679 gen_address_mask(dc
, cpu_addr
);
4680 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4682 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4684 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4686 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4687 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4691 #ifdef TARGET_SPARC64
4692 /* V9 stqf, store quad fpreg */
4696 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4697 gen_op_load_fpr_QT0(QFPREG(rd
));
4698 r_const
= tcg_const_i32(dc
->mem_idx
);
4699 gen_address_mask(dc
, cpu_addr
);
4700 gen_helper_stqf(cpu_addr
, r_const
);
4701 tcg_temp_free_i32(r_const
);
4704 #else /* !TARGET_SPARC64 */
4705 /* stdfq, store floating point queue */
4706 #if defined(CONFIG_USER_ONLY)
4709 if (!supervisor(dc
))
4711 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4716 case 0x27: /* stdf, store double fpreg */
4720 gen_op_load_fpr_DT0(DFPREG(rd
));
4721 r_const
= tcg_const_i32(dc
->mem_idx
);
4722 gen_address_mask(dc
, cpu_addr
);
4723 gen_helper_stdf(cpu_addr
, r_const
);
4724 tcg_temp_free_i32(r_const
);
4730 } else if (xop
> 0x33 && xop
< 0x3f) {
4731 save_state(dc
, cpu_cond
);
4733 #ifdef TARGET_SPARC64
4734 case 0x34: /* V9 stfa */
4735 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4738 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4740 case 0x36: /* V9 stqfa */
4744 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4745 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4748 r_const
= tcg_const_i32(7);
4749 gen_helper_check_align(cpu_addr
, r_const
);
4750 tcg_temp_free_i32(r_const
);
4751 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4754 case 0x37: /* V9 stdfa */
4755 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4758 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4760 case 0x3c: /* V9 casa */
4761 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4762 gen_movl_TN_reg(rd
, cpu_val
);
4764 case 0x3e: /* V9 casxa */
4765 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4766 gen_movl_TN_reg(rd
, cpu_val
);
4769 case 0x34: /* stc */
4770 case 0x35: /* stcsr */
4771 case 0x36: /* stdcq */
4772 case 0x37: /* stdc */
4783 /* default case for non jump instructions */
4784 if (dc
->npc
== DYNAMIC_PC
) {
4785 dc
->pc
= DYNAMIC_PC
;
4787 } else if (dc
->npc
== JUMP_PC
) {
4788 /* we can do a static jump */
4789 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4793 dc
->npc
= dc
->npc
+ 4;
4801 save_state(dc
, cpu_cond
);
4802 r_const
= tcg_const_i32(TT_ILL_INSN
);
4803 gen_helper_raise_exception(r_const
);
4804 tcg_temp_free_i32(r_const
);
4812 save_state(dc
, cpu_cond
);
4813 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4814 gen_helper_raise_exception(r_const
);
4815 tcg_temp_free_i32(r_const
);
4819 #if !defined(CONFIG_USER_ONLY)
4824 save_state(dc
, cpu_cond
);
4825 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4826 gen_helper_raise_exception(r_const
);
4827 tcg_temp_free_i32(r_const
);
4833 save_state(dc
, cpu_cond
);
4834 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4837 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4839 save_state(dc
, cpu_cond
);
4840 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4844 #ifndef TARGET_SPARC64
4849 save_state(dc
, cpu_cond
);
4850 r_const
= tcg_const_i32(TT_NCP_INSN
);
4851 gen_helper_raise_exception(r_const
);
4852 tcg_temp_free(r_const
);
4858 tcg_temp_free(cpu_tmp1
);
4859 tcg_temp_free(cpu_tmp2
);
4862 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4863 int spc
, CPUSPARCState
*env
)
4865 target_ulong pc_start
, last_pc
;
4866 uint16_t *gen_opc_end
;
4867 DisasContext dc1
, *dc
= &dc1
;
4873 memset(dc
, 0, sizeof(DisasContext
));
4878 dc
->npc
= (target_ulong
) tb
->cs_base
;
4879 dc
->cc_op
= CC_OP_DYNAMIC
;
4880 dc
->mem_idx
= cpu_mmu_index(env
);
4882 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
4883 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
4884 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4885 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4887 cpu_tmp0
= tcg_temp_new();
4888 cpu_tmp32
= tcg_temp_new_i32();
4889 cpu_tmp64
= tcg_temp_new_i64();
4891 cpu_dst
= tcg_temp_local_new();
4894 cpu_val
= tcg_temp_local_new();
4895 cpu_addr
= tcg_temp_local_new();
4898 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4900 max_insns
= CF_COUNT_MASK
;
4903 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4904 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4905 if (bp
->pc
== dc
->pc
) {
4906 if (dc
->pc
!= pc_start
)
4907 save_state(dc
, cpu_cond
);
4916 qemu_log("Search PC...\n");
4917 j
= gen_opc_ptr
- gen_opc_buf
;
4921 gen_opc_instr_start
[lj
++] = 0;
4922 gen_opc_pc
[lj
] = dc
->pc
;
4923 gen_opc_npc
[lj
] = dc
->npc
;
4924 gen_opc_instr_start
[lj
] = 1;
4925 gen_opc_icount
[lj
] = num_insns
;
4928 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4931 disas_sparc_insn(dc
);
4936 /* if the next PC is different, we abort now */
4937 if (dc
->pc
!= (last_pc
+ 4))
4939 /* if we reach a page boundary, we stop generation so that the
4940 PC of a TT_TFAULT exception is always in the right page */
4941 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4943 /* if single step mode, we generate only one instruction and
4944 generate an exception */
4945 if (dc
->singlestep
) {
4948 } while ((gen_opc_ptr
< gen_opc_end
) &&
4949 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4950 num_insns
< max_insns
);
4953 tcg_temp_free(cpu_addr
);
4954 tcg_temp_free(cpu_val
);
4955 tcg_temp_free(cpu_dst
);
4956 tcg_temp_free_i64(cpu_tmp64
);
4957 tcg_temp_free_i32(cpu_tmp32
);
4958 tcg_temp_free(cpu_tmp0
);
4959 if (tb
->cflags
& CF_LAST_IO
)
4962 if (dc
->pc
!= DYNAMIC_PC
&&
4963 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4964 /* static PC and NPC: we can use direct chaining */
4965 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4967 if (dc
->pc
!= DYNAMIC_PC
)
4968 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4969 save_npc(dc
, cpu_cond
);
4973 gen_icount_end(tb
, num_insns
);
4974 *gen_opc_ptr
= INDEX_op_end
;
4976 j
= gen_opc_ptr
- gen_opc_buf
;
4979 gen_opc_instr_start
[lj
++] = 0;
4983 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4984 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4986 tb
->size
= last_pc
+ 4 - pc_start
;
4987 tb
->icount
= num_insns
;
4990 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4991 qemu_log("--------------\n");
4992 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4993 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4999 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5001 gen_intermediate_code_internal(tb
, 0, env
);
5004 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5006 gen_intermediate_code_internal(tb
, 1, env
);
5009 void gen_intermediate_code_init(CPUSPARCState
*env
)
5013 static const char * const gregnames
[8] = {
5014 NULL
, // g0 not used
5023 static const char * const fregnames
[64] = {
5024 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5025 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5026 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5027 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5028 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5029 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5030 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5031 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5034 /* init various static tables */
5038 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5039 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5040 offsetof(CPUState
, regwptr
),
5042 #ifdef TARGET_SPARC64
5043 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5045 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5047 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5049 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5051 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5052 offsetof(CPUState
, tick_cmpr
),
5054 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5055 offsetof(CPUState
, stick_cmpr
),
5057 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5058 offsetof(CPUState
, hstick_cmpr
),
5060 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5062 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5064 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5066 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5067 offsetof(CPUState
, ssr
), "ssr");
5068 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5069 offsetof(CPUState
, version
), "ver");
5070 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5071 offsetof(CPUState
, softint
),
5074 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5077 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5079 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5081 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5082 offsetof(CPUState
, cc_src2
),
5084 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5086 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5088 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5090 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5092 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5094 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5096 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5097 #ifndef CONFIG_USER_ONLY
5098 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5101 for (i
= 1; i
< 8; i
++)
5102 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5103 offsetof(CPUState
, gregs
[i
]),
5105 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5106 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5107 offsetof(CPUState
, fpr
[i
]),
5110 /* register helpers */
5112 #define GEN_HELPER 2
5117 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5120 env
->pc
= gen_opc_pc
[pc_pos
];
5121 npc
= gen_opc_npc
[pc_pos
];
5123 /* dynamic NPC: already stored */
5124 } else if (npc
== 2) {
5125 /* jump PC: use 'cond' and the jump targets of the translation */
5127 env
->npc
= gen_opc_jump_pc
[0];
5129 env
->npc
= gen_opc_jump_pc
[1];
5135 /* flush pending conditional evaluations before exposing cpu state */
5136 if (CC_OP
!= CC_OP_FLAGS
) {
5137 helper_compute_psr();