4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env
, cpu_T
[3], cpu_regwptr
, cpu_cc_src
, cpu_cc_dst
, cpu_psr
;
53 /* local register indexes (only used inside old micro ops) */
56 typedef struct DisasContext
{
57 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
58 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
59 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
63 struct TranslationBlock
*tb
;
66 typedef struct sparc_def_t sparc_def_t
;
69 const unsigned char *name
;
70 target_ulong iu_version
;
74 uint32_t mmu_ctpr_mask
;
75 uint32_t mmu_cxr_mask
;
76 uint32_t mmu_sfsr_mask
;
77 uint32_t mmu_trcr_mask
;
80 static const sparc_def_t
*cpu_sparc_find_by_name(const unsigned char *name
);
85 // This function uses non-native bit order
86 #define GET_FIELD(X, FROM, TO) \
87 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
89 // This function uses the order in the manuals, i.e. bit 0 is 2^0
90 #define GET_FIELD_SP(X, FROM, TO) \
91 GET_FIELD(X, 31 - (TO), 31 - (FROM))
93 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
94 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
99 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #define FFPREG(r) (r)
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
106 static int sign_extend(int x
, int len
)
109 return (x
<< len
) >> len
;
112 #define IS_IMM (insn & (1<<13))
114 static void disas_sparc_insn(DisasContext
* dc
);
116 #ifdef TARGET_SPARC64
117 #define GEN32(func, NAME) \
118 static GenOpFunc * const NAME ## _table [64] = { \
119 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
120 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
121 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
122 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
123 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
124 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
125 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
126 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
127 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
128 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
129 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
130 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
132 static inline void func(int n) \
134 NAME ## _table[n](); \
137 #define GEN32(func, NAME) \
138 static GenOpFunc *const NAME ## _table [32] = { \
139 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
140 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
141 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
142 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
143 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
144 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
145 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
146 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
148 static inline void func(int n) \
150 NAME ## _table[n](); \
154 /* floating point registers moves */
155 GEN32(gen_op_load_fpr_FT0
, gen_op_load_fpr_FT0_fprf
);
156 GEN32(gen_op_load_fpr_FT1
, gen_op_load_fpr_FT1_fprf
);
157 GEN32(gen_op_store_FT0_fpr
, gen_op_store_FT0_fpr_fprf
);
158 GEN32(gen_op_store_FT1_fpr
, gen_op_store_FT1_fpr_fprf
);
160 GEN32(gen_op_load_fpr_DT0
, gen_op_load_fpr_DT0_fprf
);
161 GEN32(gen_op_load_fpr_DT1
, gen_op_load_fpr_DT1_fprf
);
162 GEN32(gen_op_store_DT0_fpr
, gen_op_store_DT0_fpr_fprf
);
163 GEN32(gen_op_store_DT1_fpr
, gen_op_store_DT1_fpr_fprf
);
165 #if defined(CONFIG_USER_ONLY)
166 GEN32(gen_op_load_fpr_QT0
, gen_op_load_fpr_QT0_fprf
);
167 GEN32(gen_op_load_fpr_QT1
, gen_op_load_fpr_QT1_fprf
);
168 GEN32(gen_op_store_QT0_fpr
, gen_op_store_QT0_fpr_fprf
);
169 GEN32(gen_op_store_QT1_fpr
, gen_op_store_QT1_fpr_fprf
);
173 #ifdef CONFIG_USER_ONLY
174 #define supervisor(dc) 0
175 #ifdef TARGET_SPARC64
176 #define hypervisor(dc) 0
178 #define gen_op_ldst(name) gen_op_##name##_raw()
180 #define supervisor(dc) (dc->mem_idx >= 1)
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) (dc->mem_idx == 2)
183 #define OP_LD_TABLE(width) \
184 static GenOpFunc * const gen_op_##width[] = { \
185 &gen_op_##width##_user, \
186 &gen_op_##width##_kernel, \
187 &gen_op_##width##_hypv, \
190 #define OP_LD_TABLE(width) \
191 static GenOpFunc * const gen_op_##width[] = { \
192 &gen_op_##width##_user, \
193 &gen_op_##width##_kernel, \
196 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
199 #ifndef CONFIG_USER_ONLY
202 #endif /* __i386__ */
210 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
212 #define ABI32_MASK(addr)
215 static inline void gen_movl_simm_T1(int32_t val
)
217 tcg_gen_movi_tl(cpu_T
[1], val
);
220 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
223 tcg_gen_movi_tl(tn
, 0);
225 tcg_gen_ld_tl(tn
, cpu_env
, offsetof(CPUState
, gregs
[reg
]));
227 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_movl_reg_T0(int reg
)
233 gen_movl_reg_TN(reg
, cpu_T
[0]);
236 static inline void gen_movl_reg_T1(int reg
)
238 gen_movl_reg_TN(reg
, cpu_T
[1]);
242 static inline void gen_movl_reg_T2(int reg
)
244 gen_movl_reg_TN(reg
, cpu_T
[2]);
247 #endif /* __i386__ */
248 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
253 tcg_gen_st_tl(tn
, cpu_env
, offsetof(CPUState
, gregs
[reg
]));
255 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
259 static inline void gen_movl_T0_reg(int reg
)
261 gen_movl_TN_reg(reg
, cpu_T
[0]);
264 static inline void gen_movl_T1_reg(int reg
)
266 gen_movl_TN_reg(reg
, cpu_T
[1]);
269 static inline void gen_op_movl_T0_env(size_t offset
)
271 tcg_gen_ld_i32(cpu_T
[0], cpu_env
, offset
);
274 static inline void gen_op_movl_env_T0(size_t offset
)
276 tcg_gen_st_i32(cpu_T
[0], cpu_env
, offset
);
279 static inline void gen_op_movtl_T0_env(size_t offset
)
281 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offset
);
284 static inline void gen_op_movtl_env_T0(size_t offset
)
286 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offset
);
289 static inline void gen_op_add_T1_T0(void)
291 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
294 static inline void gen_op_or_T1_T0(void)
296 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
299 static inline void gen_op_xor_T1_T0(void)
301 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
304 static inline void gen_jmp_im(target_ulong pc
)
306 tcg_gen_movi_tl(cpu_tmp0
, pc
);
307 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, pc
));
310 static inline void gen_movl_npc_im(target_ulong npc
)
312 tcg_gen_movi_tl(cpu_tmp0
, npc
);
313 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, npc
));
316 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
317 target_ulong pc
, target_ulong npc
)
319 TranslationBlock
*tb
;
322 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
323 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
324 /* jump to same page: we can use a direct jump */
325 tcg_gen_goto_tb(tb_num
);
327 gen_movl_npc_im(npc
);
328 tcg_gen_exit_tb((long)tb
+ tb_num
);
330 /* jump to another page: currently not optimized */
332 gen_movl_npc_im(npc
);
338 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
340 tcg_gen_shri_i32(reg
, src
, 23);
341 tcg_gen_andi_tl(reg
, reg
, 0x1);
344 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
346 tcg_gen_shri_i32(reg
, src
, 22);
347 tcg_gen_andi_tl(reg
, reg
, 0x1);
350 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
352 tcg_gen_shri_i32(reg
, src
, 21);
353 tcg_gen_andi_tl(reg
, reg
, 0x1);
356 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
358 tcg_gen_shri_i32(reg
, src
, 20);
359 tcg_gen_andi_tl(reg
, reg
, 0x1);
362 static inline void gen_op_exception(int exception
)
366 r_except
= tcg_temp_new(TCG_TYPE_I32
);
367 tcg_gen_movi_i32(r_except
, exception
);
368 tcg_gen_helper_0_1(raise_exception
, r_except
);
371 static inline void gen_cc_clear(void)
373 tcg_gen_movi_i32(cpu_psr
, 0);
374 #ifdef TARGET_SPARC64
375 tcg_gen_movi_i32(cpu_xcc
, 0);
381 env->psr |= PSR_ZERO;
382 if ((int32_t) T0 < 0)
385 static inline void gen_cc_NZ(TCGv dst
)
390 l1
= gen_new_label();
391 l2
= gen_new_label();
392 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
393 tcg_gen_movi_tl(r_zero
, 0);
394 tcg_gen_brcond_i32(TCG_COND_NE
, dst
, r_zero
, l1
);
395 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
397 tcg_gen_brcond_i32(TCG_COND_GE
, dst
, r_zero
, l2
);
398 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
400 #ifdef TARGET_SPARC64
404 l3
= gen_new_label();
405 l4
= gen_new_label();
406 tcg_gen_brcond_tl(TCG_COND_NE
, dst
, r_zero
, l3
);
407 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
409 tcg_gen_brcond_tl(TCG_COND_GE
, dst
, r_zero
, l4
);
410 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
418 env->psr |= PSR_CARRY;
420 static inline void gen_cc_C_add(TCGv dst
, TCGv src1
)
424 l1
= gen_new_label();
425 tcg_gen_brcond_i32(TCG_COND_GEU
, dst
, src1
, l1
);
426 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
428 #ifdef TARGET_SPARC64
432 l2
= gen_new_label();
433 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l2
);
434 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
441 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
444 static inline void gen_cc_V_add(TCGv dst
, TCGv src1
, TCGv src2
)
446 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
449 l1
= gen_new_label();
451 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
452 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
453 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
454 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
455 tcg_gen_movi_tl(r_zero
, 0);
456 tcg_gen_xor_tl(r_temp
, src1
, src2
);
457 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
458 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
459 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
460 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
461 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
462 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
464 #ifdef TARGET_SPARC64
468 l2
= gen_new_label();
469 tcg_gen_movi_tl(r_zero
, 0);
470 tcg_gen_xor_tl(r_temp
, src1
, src2
);
471 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
472 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
473 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
474 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
475 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
476 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_OVF
);
482 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
484 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
487 l1
= gen_new_label();
489 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
490 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
491 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
492 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
493 tcg_gen_movi_tl(r_zero
, 0);
494 tcg_gen_xor_tl(r_temp
, src1
, src2
);
495 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
496 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
497 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
498 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
499 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
500 gen_op_exception(TT_TOVF
);
502 #ifdef TARGET_SPARC64
506 l2
= gen_new_label();
507 tcg_gen_movi_tl(r_zero
, 0);
508 tcg_gen_xor_tl(r_temp
, src1
, src2
);
509 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
510 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
511 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
512 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
513 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
514 gen_op_exception(TT_TOVF
);
520 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
525 l1
= gen_new_label();
526 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
527 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
528 tcg_gen_movi_tl(r_zero
, 0);
529 tcg_gen_or_tl(r_temp
, src1
, src2
);
530 tcg_gen_andi_tl(r_temp
, r_temp
, 0x3);
531 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp
, r_zero
, l1
);
532 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
536 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
541 l1
= gen_new_label();
542 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
543 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
544 tcg_gen_movi_tl(r_zero
, 0);
545 tcg_gen_or_tl(r_temp
, src1
, src2
);
546 tcg_gen_andi_tl(r_temp
, r_temp
, 0x3);
547 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp
, r_zero
, l1
);
548 gen_op_exception(TT_TOVF
);
552 static inline void gen_op_add_T1_T0_cc(void)
554 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
555 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
558 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
559 gen_cc_V_add(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
562 static inline void gen_op_addx_T1_T0_cc(void)
564 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
565 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
566 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
568 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
569 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
570 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
572 gen_cc_V_add(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
575 static inline void gen_op_tadd_T1_T0_cc(void)
577 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
578 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
581 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
582 gen_cc_V_add(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
583 gen_cc_V_tag(cpu_cc_src
, cpu_T
[1]);
586 static inline void gen_op_tadd_T1_T0_ccTV(void)
588 gen_tag_tv(cpu_T
[0], cpu_T
[1]);
589 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
590 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
591 gen_add_tv(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
594 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
599 env->psr |= PSR_CARRY;
601 static inline void gen_cc_C_sub(TCGv src1
, TCGv src2
)
605 l1
= gen_new_label();
606 tcg_gen_brcond_i32(TCG_COND_GEU
, src1
, src2
, l1
);
607 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
609 #ifdef TARGET_SPARC64
613 l2
= gen_new_label();
614 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l2
);
615 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
622 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
625 static inline void gen_cc_V_sub(TCGv dst
, TCGv src1
, TCGv src2
)
627 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
630 l1
= gen_new_label();
632 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
633 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
634 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
635 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
636 tcg_gen_movi_tl(r_zero
, 0);
637 tcg_gen_xor_tl(r_temp
, src1
, src2
);
638 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
639 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
640 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
641 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
642 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
644 #ifdef TARGET_SPARC64
648 l2
= gen_new_label();
649 tcg_gen_movi_tl(r_zero
, 0);
650 tcg_gen_xor_tl(r_temp
, src1
, src2
);
651 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
652 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
653 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
654 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
655 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_OVF
);
661 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
663 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
666 l1
= gen_new_label();
668 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
669 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
670 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
671 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
672 tcg_gen_movi_tl(r_zero
, 0);
673 tcg_gen_xor_tl(r_temp
, src1
, src2
);
674 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
675 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
676 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
677 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
678 gen_op_exception(TT_TOVF
);
680 #ifdef TARGET_SPARC64
684 l2
= gen_new_label();
685 tcg_gen_movi_tl(r_zero
, 0);
686 tcg_gen_xor_tl(r_temp
, src1
, src2
);
687 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
688 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
689 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
690 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
691 gen_op_exception(TT_TOVF
);
697 static inline void gen_op_sub_T1_T0_cc(void)
699 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
700 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
703 gen_cc_C_sub(cpu_cc_src
, cpu_T
[1]);
704 gen_cc_V_sub(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
707 static inline void gen_op_subx_T1_T0_cc(void)
709 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
710 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
711 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
713 gen_cc_C_sub(cpu_T
[0], cpu_cc_src
);
714 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
715 gen_cc_C_sub(cpu_T
[0], cpu_cc_src
);
717 gen_cc_V_sub(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
720 static inline void gen_op_tsub_T1_T0_cc(void)
722 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
723 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
726 gen_cc_C_sub(cpu_cc_src
, cpu_T
[1]);
727 gen_cc_V_sub(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
728 gen_cc_V_tag(cpu_cc_src
, cpu_T
[1]);
731 static inline void gen_op_tsub_T1_T0_ccTV(void)
733 gen_tag_tv(cpu_T
[0], cpu_T
[1]);
734 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
735 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
736 gen_sub_tv(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
739 gen_cc_C_sub(cpu_cc_src
, cpu_T
[1]);
742 static inline void gen_op_div_cc(void)
749 l1
= gen_new_label();
750 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
751 tcg_gen_movi_tl(r_zero
, 0);
752 tcg_gen_brcond_i32(TCG_COND_EQ
, cpu_T
[1], r_zero
, l1
);
753 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
757 static inline void gen_op_logic_T0_cc(void)
764 static inline void gen_op_eval_ba(TCGv dst
)
766 tcg_gen_movi_tl(dst
, 1);
770 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
772 gen_mov_reg_Z(dst
, src
);
776 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
780 r_flag
= tcg_temp_new(TCG_TYPE_TL
);
781 gen_mov_reg_N(r_flag
, src
);
782 gen_mov_reg_V(dst
, src
);
783 tcg_gen_xor_tl(dst
, dst
, r_flag
);
784 gen_mov_reg_Z(r_flag
, src
);
785 tcg_gen_or_tl(dst
, dst
, r_flag
);
789 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
793 r_V
= tcg_temp_new(TCG_TYPE_TL
);
794 gen_mov_reg_V(r_V
, src
);
795 gen_mov_reg_N(dst
, src
);
796 tcg_gen_xor_tl(dst
, dst
, r_V
);
800 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
804 r_Z
= tcg_temp_new(TCG_TYPE_TL
);
805 gen_mov_reg_Z(r_Z
, src
);
806 gen_mov_reg_C(dst
, src
);
807 tcg_gen_or_tl(dst
, dst
, r_Z
);
811 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
813 gen_mov_reg_C(dst
, src
);
817 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
819 gen_mov_reg_V(dst
, src
);
823 static inline void gen_op_eval_bn(TCGv dst
)
825 tcg_gen_movi_tl(dst
, 0);
829 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
831 gen_mov_reg_N(dst
, src
);
835 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
837 gen_mov_reg_Z(dst
, src
);
838 tcg_gen_xori_tl(dst
, dst
, 0x1);
842 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
846 r_flag
= tcg_temp_new(TCG_TYPE_TL
);
847 gen_mov_reg_N(r_flag
, src
);
848 gen_mov_reg_V(dst
, src
);
849 tcg_gen_xor_tl(dst
, dst
, r_flag
);
850 gen_mov_reg_Z(r_flag
, src
);
851 tcg_gen_or_tl(dst
, dst
, r_flag
);
852 tcg_gen_xori_tl(dst
, dst
, 0x1);
856 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
860 r_V
= tcg_temp_new(TCG_TYPE_TL
);
861 gen_mov_reg_V(r_V
, src
);
862 gen_mov_reg_N(dst
, src
);
863 tcg_gen_xor_tl(dst
, dst
, r_V
);
864 tcg_gen_xori_tl(dst
, dst
, 0x1);
868 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
872 r_Z
= tcg_temp_new(TCG_TYPE_TL
);
873 gen_mov_reg_Z(r_Z
, src
);
874 gen_mov_reg_C(dst
, src
);
875 tcg_gen_or_tl(dst
, dst
, r_Z
);
876 tcg_gen_xori_tl(dst
, dst
, 0x1);
880 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
882 gen_mov_reg_C(dst
, src
);
883 tcg_gen_xori_tl(dst
, dst
, 0x1);
887 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
889 gen_mov_reg_N(dst
, src
);
890 tcg_gen_xori_tl(dst
, dst
, 0x1);
894 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
896 gen_mov_reg_V(dst
, src
);
897 tcg_gen_xori_tl(dst
, dst
, 0x1);
901 FPSR bit field FCC1 | FCC0:
907 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
908 unsigned int fcc_offset
)
910 tcg_gen_shri_i32(reg
, src
, 10 + fcc_offset
);
911 tcg_gen_andi_tl(reg
, reg
, 0x1);
914 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
915 unsigned int fcc_offset
)
917 tcg_gen_shri_i32(reg
, src
, 11 + fcc_offset
);
918 tcg_gen_andi_tl(reg
, reg
, 0x1);
922 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
923 unsigned int fcc_offset
)
927 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
928 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
929 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
930 tcg_gen_or_tl(dst
, dst
, r_fcc1
);
933 // 1 or 2: FCC0 ^ FCC1
934 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
935 unsigned int fcc_offset
)
939 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
940 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
941 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
942 tcg_gen_xor_tl(dst
, dst
, r_fcc1
);
946 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
947 unsigned int fcc_offset
)
949 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
953 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
954 unsigned int fcc_offset
)
958 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
959 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
960 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
961 tcg_gen_xori_tl(r_fcc1
, r_fcc1
, 0x1);
962 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
966 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
967 unsigned int fcc_offset
)
969 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
973 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
974 unsigned int fcc_offset
)
978 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
979 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
980 tcg_gen_xori_tl(dst
, dst
, 0x1);
981 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
982 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
986 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
987 unsigned int fcc_offset
)
991 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
992 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
993 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
994 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
998 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
999 unsigned int fcc_offset
)
1003 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1004 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1005 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1006 tcg_gen_or_tl(dst
, dst
, r_fcc1
);
1007 tcg_gen_xori_tl(dst
, dst
, 0x1);
1010 // 0 or 3: !(FCC0 ^ FCC1)
1011 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1012 unsigned int fcc_offset
)
1016 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1017 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1018 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1019 tcg_gen_xor_tl(dst
, dst
, r_fcc1
);
1020 tcg_gen_xori_tl(dst
, dst
, 0x1);
1024 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1025 unsigned int fcc_offset
)
1027 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1028 tcg_gen_xori_tl(dst
, dst
, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1033 unsigned int fcc_offset
)
1037 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1038 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1039 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1040 tcg_gen_xori_tl(r_fcc1
, r_fcc1
, 0x1);
1041 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1042 tcg_gen_xori_tl(dst
, dst
, 0x1);
1046 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1047 unsigned int fcc_offset
)
1049 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1050 tcg_gen_xori_tl(dst
, dst
, 0x1);
1053 // !2: !(!FCC0 & FCC1)
1054 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1055 unsigned int fcc_offset
)
1059 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1060 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1061 tcg_gen_xori_tl(dst
, dst
, 0x1);
1062 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1063 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1064 tcg_gen_xori_tl(dst
, dst
, 0x1);
1067 // !3: !(FCC0 & FCC1)
1068 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1069 unsigned int fcc_offset
)
1073 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1074 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1075 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1076 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1077 tcg_gen_xori_tl(dst
, dst
, 0x1);
1080 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1081 target_ulong pc2
, TCGv r_cond
)
1086 l1
= gen_new_label();
1087 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
1088 tcg_gen_movi_tl(r_zero
, 0);
1090 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
1092 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1095 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1098 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1099 target_ulong pc2
, TCGv r_cond
)
1104 l1
= gen_new_label();
1105 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
1106 tcg_gen_movi_tl(r_zero
, 0);
1108 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
1110 gen_goto_tb(dc
, 0, pc2
, pc1
);
1113 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1116 static inline void gen_branch(DisasContext
*dc
, target_ulong pc
,
1119 gen_goto_tb(dc
, 0, pc
, npc
);
1122 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1128 l1
= gen_new_label();
1129 l2
= gen_new_label();
1130 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
1131 tcg_gen_movi_tl(r_zero
, 0);
1133 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
1135 gen_movl_npc_im(npc1
);
1136 gen_op_jmp_label(l2
);
1139 gen_movl_npc_im(npc2
);
1143 /* call this function before using T2 as it may have been set for a jump */
1144 static inline void flush_T2(DisasContext
* dc
)
1146 if (dc
->npc
== JUMP_PC
) {
1147 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
1148 dc
->npc
= DYNAMIC_PC
;
1152 static inline void save_npc(DisasContext
* dc
)
1154 if (dc
->npc
== JUMP_PC
) {
1155 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
1156 dc
->npc
= DYNAMIC_PC
;
1157 } else if (dc
->npc
!= DYNAMIC_PC
) {
1158 gen_movl_npc_im(dc
->npc
);
1162 static inline void save_state(DisasContext
* dc
)
1168 static inline void gen_mov_pc_npc(DisasContext
* dc
)
1170 if (dc
->npc
== JUMP_PC
) {
1171 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
1172 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1173 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
1174 dc
->pc
= DYNAMIC_PC
;
1175 } else if (dc
->npc
== DYNAMIC_PC
) {
1176 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1177 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
1178 dc
->pc
= DYNAMIC_PC
;
1184 static inline void gen_op_next_insn(void)
1186 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1187 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
1188 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, 4);
1189 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1192 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1196 #ifdef TARGET_SPARC64
1206 gen_op_eval_bn(r_dst
);
1209 gen_op_eval_be(r_dst
, r_src
);
1212 gen_op_eval_ble(r_dst
, r_src
);
1215 gen_op_eval_bl(r_dst
, r_src
);
1218 gen_op_eval_bleu(r_dst
, r_src
);
1221 gen_op_eval_bcs(r_dst
, r_src
);
1224 gen_op_eval_bneg(r_dst
, r_src
);
1227 gen_op_eval_bvs(r_dst
, r_src
);
1230 gen_op_eval_ba(r_dst
);
1233 gen_op_eval_bne(r_dst
, r_src
);
1236 gen_op_eval_bg(r_dst
, r_src
);
1239 gen_op_eval_bge(r_dst
, r_src
);
1242 gen_op_eval_bgu(r_dst
, r_src
);
1245 gen_op_eval_bcc(r_dst
, r_src
);
1248 gen_op_eval_bpos(r_dst
, r_src
);
1251 gen_op_eval_bvc(r_dst
, r_src
);
1256 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1259 unsigned int offset
;
1261 r_src
= tcg_temp_new(TCG_TYPE_TL
);
1262 tcg_gen_ld_tl(r_src
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1282 gen_op_eval_bn(r_dst
);
1285 gen_op_eval_fbne(r_dst
, r_src
, offset
);
1288 gen_op_eval_fblg(r_dst
, r_src
, offset
);
1291 gen_op_eval_fbul(r_dst
, r_src
, offset
);
1294 gen_op_eval_fbl(r_dst
, r_src
, offset
);
1297 gen_op_eval_fbug(r_dst
, r_src
, offset
);
1300 gen_op_eval_fbg(r_dst
, r_src
, offset
);
1303 gen_op_eval_fbu(r_dst
, r_src
, offset
);
1306 gen_op_eval_ba(r_dst
);
1309 gen_op_eval_fbe(r_dst
, r_src
, offset
);
1312 gen_op_eval_fbue(r_dst
, r_src
, offset
);
1315 gen_op_eval_fbge(r_dst
, r_src
, offset
);
1318 gen_op_eval_fbuge(r_dst
, r_src
, offset
);
1321 gen_op_eval_fble(r_dst
, r_src
, offset
);
1324 gen_op_eval_fbule(r_dst
, r_src
, offset
);
1327 gen_op_eval_fbo(r_dst
, r_src
, offset
);
1332 #ifdef TARGET_SPARC64
1334 static const int gen_tcg_cond_reg
[8] = {
1345 static inline void gen_cond_reg(TCGv r_dst
, int cond
)
1350 l1
= gen_new_label();
1351 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
1352 tcg_gen_movi_tl(r_zero
, 0);
1353 tcg_gen_mov_tl(r_dst
, r_zero
);
1354 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
1355 tcg_gen_movi_tl(r_dst
, 1);
1360 /* XXX: potentially incorrect if dynamic npc */
1361 static void do_branch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
1363 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1364 target_ulong target
= dc
->pc
+ offset
;
1367 /* unconditional not taken */
1369 dc
->pc
= dc
->npc
+ 4;
1370 dc
->npc
= dc
->pc
+ 4;
1373 dc
->npc
= dc
->pc
+ 4;
1375 } else if (cond
== 0x8) {
1376 /* unconditional taken */
1379 dc
->npc
= dc
->pc
+ 4;
1386 gen_cond(cpu_T
[2], cc
, cond
);
1388 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1392 dc
->jump_pc
[0] = target
;
1393 dc
->jump_pc
[1] = dc
->npc
+ 4;
1399 /* XXX: potentially incorrect if dynamic npc */
1400 static void do_fbranch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
1402 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1403 target_ulong target
= dc
->pc
+ offset
;
1406 /* unconditional not taken */
1408 dc
->pc
= dc
->npc
+ 4;
1409 dc
->npc
= dc
->pc
+ 4;
1412 dc
->npc
= dc
->pc
+ 4;
1414 } else if (cond
== 0x8) {
1415 /* unconditional taken */
1418 dc
->npc
= dc
->pc
+ 4;
1425 gen_fcond(cpu_T
[2], cc
, cond
);
1427 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1431 dc
->jump_pc
[0] = target
;
1432 dc
->jump_pc
[1] = dc
->npc
+ 4;
1438 #ifdef TARGET_SPARC64
1439 /* XXX: potentially incorrect if dynamic npc */
1440 static void do_branch_reg(DisasContext
* dc
, int32_t offset
, uint32_t insn
)
1442 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1443 target_ulong target
= dc
->pc
+ offset
;
1446 gen_cond_reg(cpu_T
[2], cond
);
1448 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1452 dc
->jump_pc
[0] = target
;
1453 dc
->jump_pc
[1] = dc
->npc
+ 4;
1458 static GenOpFunc
* const gen_fcmps
[4] = {
1465 static GenOpFunc
* const gen_fcmpd
[4] = {
1472 #if defined(CONFIG_USER_ONLY)
1473 static GenOpFunc
* const gen_fcmpq
[4] = {
1481 static GenOpFunc
* const gen_fcmpes
[4] = {
1488 static GenOpFunc
* const gen_fcmped
[4] = {
1495 #if defined(CONFIG_USER_ONLY)
1496 static GenOpFunc
* const gen_fcmpeq
[4] = {
1504 static inline void gen_op_fcmps(int fccno
)
1506 tcg_gen_helper_0_0(gen_fcmps
[fccno
]);
1509 static inline void gen_op_fcmpd(int fccno
)
1511 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1514 #if defined(CONFIG_USER_ONLY)
1515 static inline void gen_op_fcmpq(int fccno
)
1517 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1521 static inline void gen_op_fcmpes(int fccno
)
1523 tcg_gen_helper_0_0(gen_fcmpes
[fccno
]);
1526 static inline void gen_op_fcmped(int fccno
)
1528 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1531 #if defined(CONFIG_USER_ONLY)
1532 static inline void gen_op_fcmpeq(int fccno
)
1534 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1540 static inline void gen_op_fcmps(int fccno
)
1542 tcg_gen_helper_0_0(helper_fcmps
);
1545 static inline void gen_op_fcmpd(int fccno
)
1547 tcg_gen_helper_0_0(helper_fcmpd
);
1550 #if defined(CONFIG_USER_ONLY)
1551 static inline void gen_op_fcmpq(int fccno
)
1553 tcg_gen_helper_0_0(helper_fcmpq
);
1557 static inline void gen_op_fcmpes(int fccno
)
1559 tcg_gen_helper_0_0(helper_fcmpes
);
1562 static inline void gen_op_fcmped(int fccno
)
1564 tcg_gen_helper_0_0(helper_fcmped
);
1567 #if defined(CONFIG_USER_ONLY)
1568 static inline void gen_op_fcmpeq(int fccno
)
1570 tcg_gen_helper_0_0(helper_fcmpeq
);
1576 static inline void gen_op_fpexception_im(int fsr_flags
)
1578 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1579 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, ~FSR_FTT_MASK
);
1580 tcg_gen_ori_tl(cpu_tmp0
, cpu_tmp0
, fsr_flags
);
1581 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1582 gen_op_exception(TT_FP_EXCP
);
1585 static int gen_trap_ifnofpu(DisasContext
* dc
)
1587 #if !defined(CONFIG_USER_ONLY)
1588 if (!dc
->fpu_enabled
) {
1590 gen_op_exception(TT_NFPU_INSN
);
1598 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1600 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1601 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, ~(FSR_FTT_MASK
| FSR_CEXC_MASK
));
1602 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1605 static inline void gen_clear_float_exceptions(void)
1607 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1611 #ifdef TARGET_SPARC64
1612 static inline void gen_ld_asi(int insn
, int size
, int sign
)
1615 TCGv r_size
, r_sign
;
1617 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1618 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1619 tcg_gen_movi_i32(r_size
, size
);
1620 tcg_gen_movi_i32(r_sign
, sign
);
1622 offset
= GET_FIELD(insn
, 25, 31);
1623 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1624 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1626 asi
= GET_FIELD(insn
, 19, 26);
1627 tcg_gen_movi_i32(cpu_T
[1], asi
);
1629 tcg_gen_helper_1_4(helper_ld_asi
, cpu_T
[1], cpu_T
[0], cpu_T
[1], r_size
,
1633 static inline void gen_st_asi(int insn
, int size
)
1638 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1639 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1640 tcg_gen_movi_i32(r_size
, size
);
1642 offset
= GET_FIELD(insn
, 25, 31);
1643 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1644 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1646 asi
= GET_FIELD(insn
, 19, 26);
1647 tcg_gen_movi_i32(r_asi
, asi
);
1649 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_asi
, r_size
);
1652 static inline void gen_ldf_asi(int insn
, int size
, int rd
)
1655 TCGv r_asi
, r_size
, r_rd
;
1657 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1658 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1659 r_rd
= tcg_temp_new(TCG_TYPE_I32
);
1660 tcg_gen_movi_i32(r_size
, size
);
1661 tcg_gen_movi_i32(r_rd
, rd
);
1663 offset
= GET_FIELD(insn
, 25, 31);
1664 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1665 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1667 asi
= GET_FIELD(insn
, 19, 26);
1668 tcg_gen_movi_i32(r_asi
, asi
);
1670 tcg_gen_helper_0_4(helper_ldf_asi
, cpu_T
[0], r_asi
, r_size
, r_rd
);
1673 static inline void gen_stf_asi(int insn
, int size
, int rd
)
1676 TCGv r_asi
, r_size
, r_rd
;
1678 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1679 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1680 r_rd
= tcg_temp_new(TCG_TYPE_I32
);
1681 tcg_gen_movi_i32(r_size
, size
);
1682 tcg_gen_movi_i32(r_rd
, rd
);
1684 offset
= GET_FIELD(insn
, 25, 31);
1685 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1686 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1688 asi
= GET_FIELD(insn
, 19, 26);
1689 tcg_gen_movi_i32(r_asi
, asi
);
1691 tcg_gen_helper_0_4(helper_stf_asi
, cpu_T
[0], r_asi
, r_size
, r_rd
);
1694 static inline void gen_swap_asi(int insn
)
1697 TCGv r_size
, r_sign
, r_temp
;
1699 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1700 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1701 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1702 tcg_gen_movi_i32(r_size
, 4);
1703 tcg_gen_movi_i32(r_sign
, 0);
1705 offset
= GET_FIELD(insn
, 25, 31);
1706 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1707 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1709 asi
= GET_FIELD(insn
, 19, 26);
1710 tcg_gen_movi_i32(cpu_T
[1], asi
);
1712 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, cpu_T
[0], cpu_T
[1], r_size
,
1714 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_size
, r_sign
);
1715 tcg_gen_mov_i32(cpu_T
[1], r_temp
);
1718 static inline void gen_ldda_asi(int insn
)
1721 TCGv r_size
, r_sign
, r_dword
;
1723 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1724 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1725 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1726 tcg_gen_movi_i32(r_size
, 8);
1727 tcg_gen_movi_i32(r_sign
, 0);
1729 offset
= GET_FIELD(insn
, 25, 31);
1730 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1731 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1733 asi
= GET_FIELD(insn
, 19, 26);
1734 tcg_gen_movi_i32(cpu_T
[1], asi
);
1736 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1738 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
1739 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
1740 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1743 static inline void gen_cas_asi(int insn
, int rd
)
1748 r_val1
= tcg_temp_new(TCG_TYPE_I32
);
1749 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1750 gen_movl_reg_TN(rd
, r_val1
);
1752 offset
= GET_FIELD(insn
, 25, 31);
1753 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1754 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1756 asi
= GET_FIELD(insn
, 19, 26);
1757 tcg_gen_movi_i32(r_asi
, asi
);
1759 tcg_gen_helper_1_4(helper_cas_asi
, cpu_T
[1], cpu_T
[0], r_val1
, cpu_T
[1],
1763 static inline void gen_casx_asi(int insn
, int rd
)
1768 r_val1
= tcg_temp_new(TCG_TYPE_I64
);
1769 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1770 gen_movl_reg_TN(rd
, r_val1
);
1772 offset
= GET_FIELD(insn
, 25, 31);
1773 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1774 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1776 asi
= GET_FIELD(insn
, 19, 26);
1777 tcg_gen_movi_i32(r_asi
, asi
);
1779 tcg_gen_helper_1_4(helper_casx_asi
, cpu_T
[1], cpu_T
[0], r_val1
, cpu_T
[1],
1783 #elif !defined(CONFIG_USER_ONLY)
1785 static inline void gen_ld_asi(int insn
, int size
, int sign
)
1788 TCGv r_size
, r_sign
, r_dword
;
1790 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1791 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1792 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1793 tcg_gen_movi_i32(r_size
, size
);
1794 tcg_gen_movi_i32(r_sign
, sign
);
1795 asi
= GET_FIELD(insn
, 19, 26);
1796 tcg_gen_movi_i32(cpu_T
[1], asi
);
1797 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1799 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1802 static inline void gen_st_asi(int insn
, int size
)
1805 TCGv r_dword
, r_asi
, r_size
;
1807 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1808 tcg_gen_extu_i32_i64(r_dword
, cpu_T
[1]);
1809 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1810 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1811 asi
= GET_FIELD(insn
, 19, 26);
1812 tcg_gen_movi_i32(r_asi
, asi
);
1813 tcg_gen_movi_i32(r_size
, size
);
1814 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_asi
, r_size
);
1817 static inline void gen_swap_asi(int insn
)
1820 TCGv r_size
, r_sign
, r_temp
;
1822 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1823 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1824 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1825 tcg_gen_movi_i32(r_size
, 4);
1826 tcg_gen_movi_i32(r_sign
, 0);
1827 asi
= GET_FIELD(insn
, 19, 26);
1828 tcg_gen_movi_i32(cpu_T
[1], asi
);
1829 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, cpu_T
[0], cpu_T
[1], r_size
,
1831 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_size
, r_sign
);
1832 tcg_gen_mov_i32(cpu_T
[1], r_temp
);
1835 static inline void gen_ldda_asi(int insn
)
1838 TCGv r_size
, r_sign
, r_dword
;
1840 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1841 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1842 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1843 tcg_gen_movi_i32(r_size
, 8);
1844 tcg_gen_movi_i32(r_sign
, 0);
1845 asi
= GET_FIELD(insn
, 19, 26);
1846 tcg_gen_movi_i32(cpu_T
[1], asi
);
1847 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1849 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
1850 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
1851 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1855 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1856 static inline void gen_ldstub_asi(int insn
)
1859 TCGv r_dword
, r_asi
, r_size
;
1861 gen_ld_asi(insn
, 1, 0);
1863 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1864 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1865 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1866 asi
= GET_FIELD(insn
, 19, 26);
1867 tcg_gen_movi_i32(r_dword
, 0xff);
1868 tcg_gen_movi_i32(r_asi
, asi
);
1869 tcg_gen_movi_i32(r_size
, 1);
1870 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_asi
, r_size
);
1874 /* before an instruction, dc->pc must be static */
1875 static void disas_sparc_insn(DisasContext
* dc
)
1877 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1879 insn
= ldl_code(dc
->pc
);
1880 opc
= GET_FIELD(insn
, 0, 1);
1882 rd
= GET_FIELD(insn
, 2, 6);
1884 case 0: /* branches/sethi */
1886 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1889 #ifdef TARGET_SPARC64
1890 case 0x1: /* V9 BPcc */
1894 target
= GET_FIELD_SP(insn
, 0, 18);
1895 target
= sign_extend(target
, 18);
1897 cc
= GET_FIELD_SP(insn
, 20, 21);
1899 do_branch(dc
, target
, insn
, 0);
1901 do_branch(dc
, target
, insn
, 1);
1906 case 0x3: /* V9 BPr */
1908 target
= GET_FIELD_SP(insn
, 0, 13) |
1909 (GET_FIELD_SP(insn
, 20, 21) << 14);
1910 target
= sign_extend(target
, 16);
1912 rs1
= GET_FIELD(insn
, 13, 17);
1913 gen_movl_reg_T0(rs1
);
1914 do_branch_reg(dc
, target
, insn
);
1917 case 0x5: /* V9 FBPcc */
1919 int cc
= GET_FIELD_SP(insn
, 20, 21);
1920 if (gen_trap_ifnofpu(dc
))
1922 target
= GET_FIELD_SP(insn
, 0, 18);
1923 target
= sign_extend(target
, 19);
1925 do_fbranch(dc
, target
, insn
, cc
);
1929 case 0x7: /* CBN+x */
1934 case 0x2: /* BN+x */
1936 target
= GET_FIELD(insn
, 10, 31);
1937 target
= sign_extend(target
, 22);
1939 do_branch(dc
, target
, insn
, 0);
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc
))
1946 target
= GET_FIELD(insn
, 10, 31);
1947 target
= sign_extend(target
, 22);
1949 do_fbranch(dc
, target
, insn
, 0);
1952 case 0x4: /* SETHI */
1957 uint32_t value
= GET_FIELD(insn
, 10, 31);
1958 tcg_gen_movi_tl(cpu_T
[0], value
<< 10);
1959 gen_movl_T0_reg(rd
);
1964 case 0x0: /* UNIMPL */
1973 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1975 tcg_gen_movi_tl(cpu_T
[0], dc
->pc
);
1976 gen_movl_T0_reg(15);
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1985 if (xop
== 0x3a) { /* generate trap */
1988 rs1
= GET_FIELD(insn
, 13, 17);
1989 gen_movl_reg_T0(rs1
);
1991 rs2
= GET_FIELD(insn
, 25, 31);
1992 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], rs2
);
1994 rs2
= GET_FIELD(insn
, 27, 31);
1998 gen_movl_reg_T1(rs2
);
2004 cond
= GET_FIELD(insn
, 3, 6);
2007 tcg_gen_helper_0_1(helper_trap
, cpu_T
[0]);
2008 } else if (cond
!= 0) {
2009 #ifdef TARGET_SPARC64
2011 int cc
= GET_FIELD_SP(insn
, 11, 12);
2015 gen_cond(cpu_T
[2], 0, cond
);
2017 gen_cond(cpu_T
[2], 1, cond
);
2023 gen_cond(cpu_T
[2], 0, cond
);
2025 tcg_gen_helper_0_2(helper_trapcc
, cpu_T
[0], cpu_T
[2]);
2031 } else if (xop
== 0x28) {
2032 rs1
= GET_FIELD(insn
, 13, 17);
2035 #ifndef TARGET_SPARC64
2036 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2037 manual, rdy on the microSPARC
2039 case 0x0f: /* stbar in the SPARCv8 manual,
2040 rdy on the microSPARC II */
2041 case 0x10 ... 0x1f: /* implementation-dependent in the
2042 SPARCv8 manual, rdy on the
2045 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, y
));
2046 gen_movl_T0_reg(rd
);
2048 #ifdef TARGET_SPARC64
2049 case 0x2: /* V9 rdccr */
2051 gen_movl_T0_reg(rd
);
2053 case 0x3: /* V9 rdasi */
2054 gen_op_movl_T0_env(offsetof(CPUSPARCState
, asi
));
2055 gen_movl_T0_reg(rd
);
2057 case 0x4: /* V9 rdtick */
2061 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2062 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2063 offsetof(CPUState
, tick
));
2064 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
2066 gen_movl_T0_reg(rd
);
2069 case 0x5: /* V9 rdpc */
2070 tcg_gen_movi_tl(cpu_T
[0], dc
->pc
);
2071 gen_movl_T0_reg(rd
);
2073 case 0x6: /* V9 rdfprs */
2074 gen_op_movl_T0_env(offsetof(CPUSPARCState
, fprs
));
2075 gen_movl_T0_reg(rd
);
2077 case 0xf: /* V9 membar */
2078 break; /* no effect */
2079 case 0x13: /* Graphics Status */
2080 if (gen_trap_ifnofpu(dc
))
2082 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, gsr
));
2083 gen_movl_T0_reg(rd
);
2085 case 0x17: /* Tick compare */
2086 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tick_cmpr
));
2087 gen_movl_T0_reg(rd
);
2089 case 0x18: /* System tick */
2093 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2094 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2095 offsetof(CPUState
, stick
));
2096 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
2098 gen_movl_T0_reg(rd
);
2101 case 0x19: /* System tick compare */
2102 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, stick_cmpr
));
2103 gen_movl_T0_reg(rd
);
2105 case 0x10: /* Performance Control */
2106 case 0x11: /* Performance Instrumentation Counter */
2107 case 0x12: /* Dispatch Control */
2108 case 0x14: /* Softint set, WO */
2109 case 0x15: /* Softint clear, WO */
2110 case 0x16: /* Softint write */
2115 #if !defined(CONFIG_USER_ONLY)
2116 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2117 #ifndef TARGET_SPARC64
2118 if (!supervisor(dc
))
2120 tcg_gen_helper_1_0(helper_rdpsr
, cpu_T
[0]);
2122 if (!hypervisor(dc
))
2124 rs1
= GET_FIELD(insn
, 13, 17);
2127 // gen_op_rdhpstate();
2130 // gen_op_rdhtstate();
2133 gen_op_movl_T0_env(offsetof(CPUSPARCState
, hintp
));
2136 gen_op_movl_T0_env(offsetof(CPUSPARCState
, htba
));
2139 gen_op_movl_T0_env(offsetof(CPUSPARCState
, hver
));
2141 case 31: // hstick_cmpr
2142 gen_op_movl_env_T0(offsetof(CPUSPARCState
, hstick_cmpr
));
2148 gen_movl_T0_reg(rd
);
2150 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2151 if (!supervisor(dc
))
2153 #ifdef TARGET_SPARC64
2154 rs1
= GET_FIELD(insn
, 13, 17);
2160 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2161 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2162 offsetof(CPUState
, tsptr
));
2163 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
2164 offsetof(trap_state
, tpc
));
2171 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2172 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2173 offsetof(CPUState
, tsptr
));
2174 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
2175 offsetof(trap_state
, tnpc
));
2182 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2183 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2184 offsetof(CPUState
, tsptr
));
2185 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
2186 offsetof(trap_state
, tstate
));
2193 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2194 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2195 offsetof(CPUState
, tsptr
));
2196 tcg_gen_ld_i32(cpu_T
[0], r_tsptr
,
2197 offsetof(trap_state
, tt
));
2204 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2205 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2206 offsetof(CPUState
, tick
));
2207 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
2209 gen_movl_T0_reg(rd
);
2213 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
2216 gen_op_movl_T0_env(offsetof(CPUSPARCState
, pstate
));
2219 gen_op_movl_T0_env(offsetof(CPUSPARCState
, tl
));
2222 gen_op_movl_T0_env(offsetof(CPUSPARCState
, psrpil
));
2228 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cansave
));
2230 case 11: // canrestore
2231 gen_op_movl_T0_env(offsetof(CPUSPARCState
, canrestore
));
2233 case 12: // cleanwin
2234 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cleanwin
));
2236 case 13: // otherwin
2237 gen_op_movl_T0_env(offsetof(CPUSPARCState
, otherwin
));
2240 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wstate
));
2242 case 16: // UA2005 gl
2243 gen_op_movl_T0_env(offsetof(CPUSPARCState
, gl
));
2245 case 26: // UA2005 strand status
2246 if (!hypervisor(dc
))
2248 gen_op_movl_T0_env(offsetof(CPUSPARCState
, ssr
));
2251 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, version
));
2258 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wim
));
2260 gen_movl_T0_reg(rd
);
2262 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2263 #ifdef TARGET_SPARC64
2266 if (!supervisor(dc
))
2268 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
2269 gen_movl_T0_reg(rd
);
2273 } else if (xop
== 0x34) { /* FPU Operations */
2274 if (gen_trap_ifnofpu(dc
))
2276 gen_op_clear_ieee_excp_and_FTT();
2277 rs1
= GET_FIELD(insn
, 13, 17);
2278 rs2
= GET_FIELD(insn
, 27, 31);
2279 xop
= GET_FIELD(insn
, 18, 26);
2281 case 0x1: /* fmovs */
2282 gen_op_load_fpr_FT0(rs2
);
2283 gen_op_store_FT0_fpr(rd
);
2285 case 0x5: /* fnegs */
2286 gen_op_load_fpr_FT1(rs2
);
2288 gen_op_store_FT0_fpr(rd
);
2290 case 0x9: /* fabss */
2291 gen_op_load_fpr_FT1(rs2
);
2292 tcg_gen_helper_0_0(helper_fabss
);
2293 gen_op_store_FT0_fpr(rd
);
2295 case 0x29: /* fsqrts */
2296 gen_op_load_fpr_FT1(rs2
);
2297 gen_clear_float_exceptions();
2298 tcg_gen_helper_0_0(helper_fsqrts
);
2299 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2300 gen_op_store_FT0_fpr(rd
);
2302 case 0x2a: /* fsqrtd */
2303 gen_op_load_fpr_DT1(DFPREG(rs2
));
2304 gen_clear_float_exceptions();
2305 tcg_gen_helper_0_0(helper_fsqrtd
);
2306 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2307 gen_op_store_DT0_fpr(DFPREG(rd
));
2309 case 0x2b: /* fsqrtq */
2310 #if defined(CONFIG_USER_ONLY)
2311 gen_op_load_fpr_QT1(QFPREG(rs2
));
2312 gen_clear_float_exceptions();
2313 tcg_gen_helper_0_0(helper_fsqrtq
);
2314 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2315 gen_op_store_QT0_fpr(QFPREG(rd
));
2321 gen_op_load_fpr_FT0(rs1
);
2322 gen_op_load_fpr_FT1(rs2
);
2323 gen_clear_float_exceptions();
2325 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2326 gen_op_store_FT0_fpr(rd
);
2329 gen_op_load_fpr_DT0(DFPREG(rs1
));
2330 gen_op_load_fpr_DT1(DFPREG(rs2
));
2331 gen_clear_float_exceptions();
2333 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2334 gen_op_store_DT0_fpr(DFPREG(rd
));
2336 case 0x43: /* faddq */
2337 #if defined(CONFIG_USER_ONLY)
2338 gen_op_load_fpr_QT0(QFPREG(rs1
));
2339 gen_op_load_fpr_QT1(QFPREG(rs2
));
2340 gen_clear_float_exceptions();
2342 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2343 gen_op_store_QT0_fpr(QFPREG(rd
));
2349 gen_op_load_fpr_FT0(rs1
);
2350 gen_op_load_fpr_FT1(rs2
);
2351 gen_clear_float_exceptions();
2353 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2354 gen_op_store_FT0_fpr(rd
);
2357 gen_op_load_fpr_DT0(DFPREG(rs1
));
2358 gen_op_load_fpr_DT1(DFPREG(rs2
));
2359 gen_clear_float_exceptions();
2361 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2362 gen_op_store_DT0_fpr(DFPREG(rd
));
2364 case 0x47: /* fsubq */
2365 #if defined(CONFIG_USER_ONLY)
2366 gen_op_load_fpr_QT0(QFPREG(rs1
));
2367 gen_op_load_fpr_QT1(QFPREG(rs2
));
2368 gen_clear_float_exceptions();
2370 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2371 gen_op_store_QT0_fpr(QFPREG(rd
));
2377 gen_op_load_fpr_FT0(rs1
);
2378 gen_op_load_fpr_FT1(rs2
);
2379 gen_clear_float_exceptions();
2381 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2382 gen_op_store_FT0_fpr(rd
);
2385 gen_op_load_fpr_DT0(DFPREG(rs1
));
2386 gen_op_load_fpr_DT1(DFPREG(rs2
));
2387 gen_clear_float_exceptions();
2389 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2390 gen_op_store_DT0_fpr(DFPREG(rd
));
2392 case 0x4b: /* fmulq */
2393 #if defined(CONFIG_USER_ONLY)
2394 gen_op_load_fpr_QT0(QFPREG(rs1
));
2395 gen_op_load_fpr_QT1(QFPREG(rs2
));
2396 gen_clear_float_exceptions();
2398 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2399 gen_op_store_QT0_fpr(QFPREG(rd
));
2405 gen_op_load_fpr_FT0(rs1
);
2406 gen_op_load_fpr_FT1(rs2
);
2407 gen_clear_float_exceptions();
2409 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2410 gen_op_store_FT0_fpr(rd
);
2413 gen_op_load_fpr_DT0(DFPREG(rs1
));
2414 gen_op_load_fpr_DT1(DFPREG(rs2
));
2415 gen_clear_float_exceptions();
2417 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2418 gen_op_store_DT0_fpr(DFPREG(rd
));
2420 case 0x4f: /* fdivq */
2421 #if defined(CONFIG_USER_ONLY)
2422 gen_op_load_fpr_QT0(QFPREG(rs1
));
2423 gen_op_load_fpr_QT1(QFPREG(rs2
));
2424 gen_clear_float_exceptions();
2426 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2427 gen_op_store_QT0_fpr(QFPREG(rd
));
2433 gen_op_load_fpr_FT0(rs1
);
2434 gen_op_load_fpr_FT1(rs2
);
2435 gen_clear_float_exceptions();
2437 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2438 gen_op_store_DT0_fpr(DFPREG(rd
));
2440 case 0x6e: /* fdmulq */
2441 #if defined(CONFIG_USER_ONLY)
2442 gen_op_load_fpr_DT0(DFPREG(rs1
));
2443 gen_op_load_fpr_DT1(DFPREG(rs2
));
2444 gen_clear_float_exceptions();
2446 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2447 gen_op_store_QT0_fpr(QFPREG(rd
));
2453 gen_op_load_fpr_FT1(rs2
);
2454 gen_clear_float_exceptions();
2456 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2457 gen_op_store_FT0_fpr(rd
);
2460 gen_op_load_fpr_DT1(DFPREG(rs2
));
2461 gen_clear_float_exceptions();
2463 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2464 gen_op_store_FT0_fpr(rd
);
2466 case 0xc7: /* fqtos */
2467 #if defined(CONFIG_USER_ONLY)
2468 gen_op_load_fpr_QT1(QFPREG(rs2
));
2469 gen_clear_float_exceptions();
2471 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2472 gen_op_store_FT0_fpr(rd
);
2478 gen_op_load_fpr_FT1(rs2
);
2480 gen_op_store_DT0_fpr(DFPREG(rd
));
2483 gen_op_load_fpr_FT1(rs2
);
2485 gen_op_store_DT0_fpr(DFPREG(rd
));
2487 case 0xcb: /* fqtod */
2488 #if defined(CONFIG_USER_ONLY)
2489 gen_op_load_fpr_QT1(QFPREG(rs2
));
2490 gen_clear_float_exceptions();
2492 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2493 gen_op_store_DT0_fpr(DFPREG(rd
));
2498 case 0xcc: /* fitoq */
2499 #if defined(CONFIG_USER_ONLY)
2500 gen_op_load_fpr_FT1(rs2
);
2502 gen_op_store_QT0_fpr(QFPREG(rd
));
2507 case 0xcd: /* fstoq */
2508 #if defined(CONFIG_USER_ONLY)
2509 gen_op_load_fpr_FT1(rs2
);
2511 gen_op_store_QT0_fpr(QFPREG(rd
));
2516 case 0xce: /* fdtoq */
2517 #if defined(CONFIG_USER_ONLY)
2518 gen_op_load_fpr_DT1(DFPREG(rs2
));
2520 gen_op_store_QT0_fpr(QFPREG(rd
));
2526 gen_op_load_fpr_FT1(rs2
);
2527 gen_clear_float_exceptions();
2529 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2530 gen_op_store_FT0_fpr(rd
);
2533 gen_op_load_fpr_DT1(DFPREG(rs2
));
2534 gen_clear_float_exceptions();
2536 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2537 gen_op_store_FT0_fpr(rd
);
2539 case 0xd3: /* fqtoi */
2540 #if defined(CONFIG_USER_ONLY)
2541 gen_op_load_fpr_QT1(QFPREG(rs2
));
2542 gen_clear_float_exceptions();
2544 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2545 gen_op_store_FT0_fpr(rd
);
2550 #ifdef TARGET_SPARC64
2551 case 0x2: /* V9 fmovd */
2552 gen_op_load_fpr_DT0(DFPREG(rs2
));
2553 gen_op_store_DT0_fpr(DFPREG(rd
));
2555 case 0x3: /* V9 fmovq */
2556 #if defined(CONFIG_USER_ONLY)
2557 gen_op_load_fpr_QT0(QFPREG(rs2
));
2558 gen_op_store_QT0_fpr(QFPREG(rd
));
2563 case 0x6: /* V9 fnegd */
2564 gen_op_load_fpr_DT1(DFPREG(rs2
));
2566 gen_op_store_DT0_fpr(DFPREG(rd
));
2568 case 0x7: /* V9 fnegq */
2569 #if defined(CONFIG_USER_ONLY)
2570 gen_op_load_fpr_QT1(QFPREG(rs2
));
2572 gen_op_store_QT0_fpr(QFPREG(rd
));
2577 case 0xa: /* V9 fabsd */
2578 gen_op_load_fpr_DT1(DFPREG(rs2
));
2579 tcg_gen_helper_0_0(helper_fabsd
);
2580 gen_op_store_DT0_fpr(DFPREG(rd
));
2582 case 0xb: /* V9 fabsq */
2583 #if defined(CONFIG_USER_ONLY)
2584 gen_op_load_fpr_QT1(QFPREG(rs2
));
2585 tcg_gen_helper_0_0(helper_fabsq
);
2586 gen_op_store_QT0_fpr(QFPREG(rd
));
2591 case 0x81: /* V9 fstox */
2592 gen_op_load_fpr_FT1(rs2
);
2593 gen_clear_float_exceptions();
2595 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2596 gen_op_store_DT0_fpr(DFPREG(rd
));
2598 case 0x82: /* V9 fdtox */
2599 gen_op_load_fpr_DT1(DFPREG(rs2
));
2600 gen_clear_float_exceptions();
2602 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2603 gen_op_store_DT0_fpr(DFPREG(rd
));
2605 case 0x83: /* V9 fqtox */
2606 #if defined(CONFIG_USER_ONLY)
2607 gen_op_load_fpr_QT1(QFPREG(rs2
));
2608 gen_clear_float_exceptions();
2610 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2611 gen_op_store_DT0_fpr(DFPREG(rd
));
2616 case 0x84: /* V9 fxtos */
2617 gen_op_load_fpr_DT1(DFPREG(rs2
));
2618 gen_clear_float_exceptions();
2620 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2621 gen_op_store_FT0_fpr(rd
);
2623 case 0x88: /* V9 fxtod */
2624 gen_op_load_fpr_DT1(DFPREG(rs2
));
2625 gen_clear_float_exceptions();
2627 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2628 gen_op_store_DT0_fpr(DFPREG(rd
));
2630 case 0x8c: /* V9 fxtoq */
2631 #if defined(CONFIG_USER_ONLY)
2632 gen_op_load_fpr_DT1(DFPREG(rs2
));
2633 gen_clear_float_exceptions();
2635 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2636 gen_op_store_QT0_fpr(QFPREG(rd
));
2645 } else if (xop
== 0x35) { /* FPU Operations */
2646 #ifdef TARGET_SPARC64
2649 if (gen_trap_ifnofpu(dc
))
2651 gen_op_clear_ieee_excp_and_FTT();
2652 rs1
= GET_FIELD(insn
, 13, 17);
2653 rs2
= GET_FIELD(insn
, 27, 31);
2654 xop
= GET_FIELD(insn
, 18, 26);
2655 #ifdef TARGET_SPARC64
2656 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2660 l1
= gen_new_label();
2661 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
2662 cond
= GET_FIELD_SP(insn
, 14, 17);
2663 rs1
= GET_FIELD(insn
, 13, 17);
2664 gen_movl_reg_T0(rs1
);
2665 tcg_gen_movi_tl(r_zero
, 0);
2666 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2667 gen_op_load_fpr_FT0(rs2
);
2668 gen_op_store_FT0_fpr(rd
);
2671 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2675 l1
= gen_new_label();
2676 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
2677 cond
= GET_FIELD_SP(insn
, 14, 17);
2678 rs1
= GET_FIELD(insn
, 13, 17);
2679 gen_movl_reg_T0(rs1
);
2680 tcg_gen_movi_tl(r_zero
, 0);
2681 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2682 gen_op_load_fpr_DT0(DFPREG(rs2
));
2683 gen_op_store_DT0_fpr(DFPREG(rd
));
2686 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2687 #if defined(CONFIG_USER_ONLY)
2691 l1
= gen_new_label();
2692 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
2693 cond
= GET_FIELD_SP(insn
, 14, 17);
2694 rs1
= GET_FIELD(insn
, 13, 17);
2695 gen_movl_reg_T0(rs1
);
2696 tcg_gen_movi_tl(r_zero
, 0);
2697 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2698 gen_op_load_fpr_QT0(QFPREG(rs2
));
2699 gen_op_store_QT0_fpr(QFPREG(rd
));
2708 #ifdef TARGET_SPARC64
2709 #define FMOVCC(size_FDQ, fcc) \
2711 TCGv r_zero, r_cond; \
2714 l1 = gen_new_label(); \
2715 r_zero = tcg_temp_new(TCG_TYPE_TL); \
2716 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2717 tcg_gen_movi_tl(r_zero, 0); \
2718 cond = GET_FIELD_SP(insn, 14, 17); \
2719 gen_fcond(r_cond, fcc, cond); \
2720 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2721 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2722 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2723 gen_set_label(l1); \
2725 case 0x001: /* V9 fmovscc %fcc0 */
2728 case 0x002: /* V9 fmovdcc %fcc0 */
2731 case 0x003: /* V9 fmovqcc %fcc0 */
2732 #if defined(CONFIG_USER_ONLY)
2738 case 0x041: /* V9 fmovscc %fcc1 */
2741 case 0x042: /* V9 fmovdcc %fcc1 */
2744 case 0x043: /* V9 fmovqcc %fcc1 */
2745 #if defined(CONFIG_USER_ONLY)
2751 case 0x081: /* V9 fmovscc %fcc2 */
2754 case 0x082: /* V9 fmovdcc %fcc2 */
2757 case 0x083: /* V9 fmovqcc %fcc2 */
2758 #if defined(CONFIG_USER_ONLY)
2764 case 0x0c1: /* V9 fmovscc %fcc3 */
2767 case 0x0c2: /* V9 fmovdcc %fcc3 */
2770 case 0x0c3: /* V9 fmovqcc %fcc3 */
2771 #if defined(CONFIG_USER_ONLY)
2778 #define FMOVCC(size_FDQ, icc) \
2780 TCGv r_zero, r_cond; \
2783 l1 = gen_new_label(); \
2784 r_zero = tcg_temp_new(TCG_TYPE_TL); \
2785 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2786 tcg_gen_movi_tl(r_zero, 0); \
2787 cond = GET_FIELD_SP(insn, 14, 17); \
2788 gen_cond(r_cond, icc, cond); \
2789 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2790 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2791 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2792 gen_set_label(l1); \
2795 case 0x101: /* V9 fmovscc %icc */
2798 case 0x102: /* V9 fmovdcc %icc */
2800 case 0x103: /* V9 fmovqcc %icc */
2801 #if defined(CONFIG_USER_ONLY)
2807 case 0x181: /* V9 fmovscc %xcc */
2810 case 0x182: /* V9 fmovdcc %xcc */
2813 case 0x183: /* V9 fmovqcc %xcc */
2814 #if defined(CONFIG_USER_ONLY)
2822 case 0x51: /* fcmps, V9 %fcc */
2823 gen_op_load_fpr_FT0(rs1
);
2824 gen_op_load_fpr_FT1(rs2
);
2825 gen_op_fcmps(rd
& 3);
2827 case 0x52: /* fcmpd, V9 %fcc */
2828 gen_op_load_fpr_DT0(DFPREG(rs1
));
2829 gen_op_load_fpr_DT1(DFPREG(rs2
));
2830 gen_op_fcmpd(rd
& 3);
2832 case 0x53: /* fcmpq, V9 %fcc */
2833 #if defined(CONFIG_USER_ONLY)
2834 gen_op_load_fpr_QT0(QFPREG(rs1
));
2835 gen_op_load_fpr_QT1(QFPREG(rs2
));
2836 gen_op_fcmpq(rd
& 3);
2838 #else /* !defined(CONFIG_USER_ONLY) */
2841 case 0x55: /* fcmpes, V9 %fcc */
2842 gen_op_load_fpr_FT0(rs1
);
2843 gen_op_load_fpr_FT1(rs2
);
2844 gen_op_fcmpes(rd
& 3);
2846 case 0x56: /* fcmped, V9 %fcc */
2847 gen_op_load_fpr_DT0(DFPREG(rs1
));
2848 gen_op_load_fpr_DT1(DFPREG(rs2
));
2849 gen_op_fcmped(rd
& 3);
2851 case 0x57: /* fcmpeq, V9 %fcc */
2852 #if defined(CONFIG_USER_ONLY)
2853 gen_op_load_fpr_QT0(QFPREG(rs1
));
2854 gen_op_load_fpr_QT1(QFPREG(rs2
));
2855 gen_op_fcmpeq(rd
& 3);
2857 #else/* !defined(CONFIG_USER_ONLY) */
2864 } else if (xop
== 0x2) {
2867 rs1
= GET_FIELD(insn
, 13, 17);
2869 // or %g0, x, y -> mov T0, x; mov y, T0
2870 if (IS_IMM
) { /* immediate */
2871 rs2
= GET_FIELDs(insn
, 19, 31);
2872 tcg_gen_movi_tl(cpu_T
[0], (int)rs2
);
2873 } else { /* register */
2874 rs2
= GET_FIELD(insn
, 27, 31);
2875 gen_movl_reg_T0(rs2
);
2878 gen_movl_reg_T0(rs1
);
2879 if (IS_IMM
) { /* immediate */
2880 rs2
= GET_FIELDs(insn
, 19, 31);
2881 tcg_gen_ori_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
2882 } else { /* register */
2883 // or x, %g0, y -> mov T1, x; mov y, T1
2884 rs2
= GET_FIELD(insn
, 27, 31);
2886 gen_movl_reg_T1(rs2
);
2891 gen_movl_T0_reg(rd
);
2893 #ifdef TARGET_SPARC64
2894 } else if (xop
== 0x25) { /* sll, V9 sllx */
2895 rs1
= GET_FIELD(insn
, 13, 17);
2896 gen_movl_reg_T0(rs1
);
2897 if (IS_IMM
) { /* immediate */
2898 rs2
= GET_FIELDs(insn
, 20, 31);
2899 if (insn
& (1 << 12)) {
2900 tcg_gen_shli_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2902 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2903 tcg_gen_shli_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2905 } else { /* register */
2906 rs2
= GET_FIELD(insn
, 27, 31);
2907 gen_movl_reg_T1(rs2
);
2908 if (insn
& (1 << 12)) {
2909 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2910 tcg_gen_shl_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2912 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2913 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2914 tcg_gen_shl_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2917 gen_movl_T0_reg(rd
);
2918 } else if (xop
== 0x26) { /* srl, V9 srlx */
2919 rs1
= GET_FIELD(insn
, 13, 17);
2920 gen_movl_reg_T0(rs1
);
2921 if (IS_IMM
) { /* immediate */
2922 rs2
= GET_FIELDs(insn
, 20, 31);
2923 if (insn
& (1 << 12)) {
2924 tcg_gen_shri_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2926 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2927 tcg_gen_shri_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2929 } else { /* register */
2930 rs2
= GET_FIELD(insn
, 27, 31);
2931 gen_movl_reg_T1(rs2
);
2932 if (insn
& (1 << 12)) {
2933 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2934 tcg_gen_shr_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2936 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2937 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2938 tcg_gen_shr_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2941 gen_movl_T0_reg(rd
);
2942 } else if (xop
== 0x27) { /* sra, V9 srax */
2943 rs1
= GET_FIELD(insn
, 13, 17);
2944 gen_movl_reg_T0(rs1
);
2945 if (IS_IMM
) { /* immediate */
2946 rs2
= GET_FIELDs(insn
, 20, 31);
2947 if (insn
& (1 << 12)) {
2948 tcg_gen_sari_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2950 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2951 tcg_gen_ext_i32_i64(cpu_T
[0], cpu_T
[0]);
2952 tcg_gen_sari_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2954 } else { /* register */
2955 rs2
= GET_FIELD(insn
, 27, 31);
2956 gen_movl_reg_T1(rs2
);
2957 if (insn
& (1 << 12)) {
2958 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2959 tcg_gen_sar_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2961 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2962 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2963 tcg_gen_sar_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2966 gen_movl_T0_reg(rd
);
2968 } else if (xop
< 0x36) {
2969 rs1
= GET_FIELD(insn
, 13, 17);
2970 gen_movl_reg_T0(rs1
);
2971 if (IS_IMM
) { /* immediate */
2972 rs2
= GET_FIELDs(insn
, 19, 31);
2973 gen_movl_simm_T1(rs2
);
2974 } else { /* register */
2975 rs2
= GET_FIELD(insn
, 27, 31);
2976 gen_movl_reg_T1(rs2
);
2979 switch (xop
& ~0x10) {
2982 gen_op_add_T1_T0_cc();
2987 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2989 gen_op_logic_T0_cc();
2992 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2994 gen_op_logic_T0_cc();
2997 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2999 gen_op_logic_T0_cc();
3003 gen_op_sub_T1_T0_cc();
3005 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3008 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
3009 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3011 gen_op_logic_T0_cc();
3014 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
3015 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3017 gen_op_logic_T0_cc();
3020 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
3021 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3023 gen_op_logic_T0_cc();
3027 gen_op_addx_T1_T0_cc();
3029 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3030 tcg_gen_add_tl(cpu_T
[1], cpu_T
[1], cpu_tmp0
);
3031 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3034 #ifdef TARGET_SPARC64
3035 case 0x9: /* V9 mulx */
3036 tcg_gen_mul_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3040 gen_op_umul_T1_T0();
3042 gen_op_logic_T0_cc();
3045 gen_op_smul_T1_T0();
3047 gen_op_logic_T0_cc();
3051 gen_op_subx_T1_T0_cc();
3053 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3054 tcg_gen_add_tl(cpu_T
[1], cpu_T
[1], cpu_tmp0
);
3055 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3058 #ifdef TARGET_SPARC64
3059 case 0xd: /* V9 udivx */
3060 gen_op_udivx_T1_T0();
3064 gen_op_udiv_T1_T0();
3069 gen_op_sdiv_T1_T0();
3076 gen_movl_T0_reg(rd
);
3079 case 0x20: /* taddcc */
3080 gen_op_tadd_T1_T0_cc();
3081 gen_movl_T0_reg(rd
);
3083 case 0x21: /* tsubcc */
3084 gen_op_tsub_T1_T0_cc();
3085 gen_movl_T0_reg(rd
);
3087 case 0x22: /* taddcctv */
3089 gen_op_tadd_T1_T0_ccTV();
3090 gen_movl_T0_reg(rd
);
3092 case 0x23: /* tsubcctv */
3094 gen_op_tsub_T1_T0_ccTV();
3095 gen_movl_T0_reg(rd
);
3097 case 0x24: /* mulscc */
3098 gen_op_mulscc_T1_T0();
3099 gen_movl_T0_reg(rd
);
3101 #ifndef TARGET_SPARC64
3102 case 0x25: /* sll */
3103 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
3104 tcg_gen_shl_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3105 gen_movl_T0_reg(rd
);
3107 case 0x26: /* srl */
3108 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
3109 tcg_gen_shr_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3110 gen_movl_T0_reg(rd
);
3112 case 0x27: /* sra */
3113 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
3114 tcg_gen_sar_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3115 gen_movl_T0_reg(rd
);
3123 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, y
));
3125 #ifndef TARGET_SPARC64
3126 case 0x01 ... 0x0f: /* undefined in the
3130 case 0x10 ... 0x1f: /* implementation-dependent
3136 case 0x2: /* V9 wrccr */
3140 case 0x3: /* V9 wrasi */
3142 gen_op_movl_env_T0(offsetof(CPUSPARCState
, asi
));
3144 case 0x6: /* V9 wrfprs */
3146 gen_op_movl_env_T0(offsetof(CPUSPARCState
, fprs
));
3152 case 0xf: /* V9 sir, nop if user */
3153 #if !defined(CONFIG_USER_ONLY)
3158 case 0x13: /* Graphics Status */
3159 if (gen_trap_ifnofpu(dc
))
3162 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, gsr
));
3164 case 0x17: /* Tick compare */
3165 #if !defined(CONFIG_USER_ONLY)
3166 if (!supervisor(dc
))
3173 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
3175 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3176 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3177 offsetof(CPUState
, tick
));
3178 tcg_gen_helper_0_2(helper_tick_set_limit
,
3179 r_tickptr
, cpu_T
[0]);
3182 case 0x18: /* System tick */
3183 #if !defined(CONFIG_USER_ONLY)
3184 if (!supervisor(dc
))
3191 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3192 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3193 offsetof(CPUState
, stick
));
3194 tcg_gen_helper_0_2(helper_tick_set_count
,
3195 r_tickptr
, cpu_T
[0]);
3198 case 0x19: /* System tick compare */
3199 #if !defined(CONFIG_USER_ONLY)
3200 if (!supervisor(dc
))
3207 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
3209 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3210 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3211 offsetof(CPUState
, stick
));
3212 tcg_gen_helper_0_2(helper_tick_set_limit
,
3213 r_tickptr
, cpu_T
[0]);
3217 case 0x10: /* Performance Control */
3218 case 0x11: /* Performance Instrumentation Counter */
3219 case 0x12: /* Dispatch Control */
3220 case 0x14: /* Softint set */
3221 case 0x15: /* Softint clear */
3222 case 0x16: /* Softint write */
3229 #if !defined(CONFIG_USER_ONLY)
3230 case 0x31: /* wrpsr, V9 saved, restored */
3232 if (!supervisor(dc
))
3234 #ifdef TARGET_SPARC64
3242 case 2: /* UA2005 allclean */
3243 case 3: /* UA2005 otherw */
3244 case 4: /* UA2005 normalw */
3245 case 5: /* UA2005 invalw */
3252 tcg_gen_helper_0_1(helper_wrpsr
, cpu_T
[0]);
3260 case 0x32: /* wrwim, V9 wrpr */
3262 if (!supervisor(dc
))
3265 #ifdef TARGET_SPARC64
3271 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3272 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3273 offsetof(CPUState
, tsptr
));
3274 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
3275 offsetof(trap_state
, tpc
));
3282 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3283 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3284 offsetof(CPUState
, tsptr
));
3285 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
3286 offsetof(trap_state
, tnpc
));
3293 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3294 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3295 offsetof(CPUState
, tsptr
));
3296 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
3297 offsetof(trap_state
, tstate
));
3304 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3305 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3306 offsetof(CPUState
, tsptr
));
3307 tcg_gen_st_i32(cpu_T
[0], r_tsptr
,
3308 offsetof(trap_state
, tt
));
3315 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3316 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3317 offsetof(CPUState
, tick
));
3318 tcg_gen_helper_0_2(helper_tick_set_count
,
3319 r_tickptr
, cpu_T
[0]);
3323 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
3327 tcg_gen_helper_0_1(helper_wrpstate
, cpu_T
[0]);
3333 gen_op_movl_env_T0(offsetof(CPUSPARCState
, tl
));
3336 gen_op_movl_env_T0(offsetof(CPUSPARCState
, psrpil
));
3342 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cansave
));
3344 case 11: // canrestore
3345 gen_op_movl_env_T0(offsetof(CPUSPARCState
, canrestore
));
3347 case 12: // cleanwin
3348 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cleanwin
));
3350 case 13: // otherwin
3351 gen_op_movl_env_T0(offsetof(CPUSPARCState
, otherwin
));
3354 gen_op_movl_env_T0(offsetof(CPUSPARCState
, wstate
));
3356 case 16: // UA2005 gl
3357 gen_op_movl_env_T0(offsetof(CPUSPARCState
, gl
));
3359 case 26: // UA2005 strand status
3360 if (!hypervisor(dc
))
3362 gen_op_movl_env_T0(offsetof(CPUSPARCState
, ssr
));
3368 tcg_gen_andi_i32(cpu_T
[0], cpu_T
[0], ((1 << NWINDOWS
) - 1));
3369 gen_op_movl_env_T0(offsetof(CPUSPARCState
, wim
));
3373 case 0x33: /* wrtbr, UA2005 wrhpr */
3375 #ifndef TARGET_SPARC64
3376 if (!supervisor(dc
))
3379 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
3381 if (!hypervisor(dc
))
3386 // XXX gen_op_wrhpstate();
3393 // XXX gen_op_wrhtstate();
3396 gen_op_movl_env_T0(offsetof(CPUSPARCState
, hintp
));
3399 gen_op_movl_env_T0(offsetof(CPUSPARCState
, htba
));
3401 case 31: // hstick_cmpr
3405 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
3407 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3408 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3409 offsetof(CPUState
, hstick
));
3410 tcg_gen_helper_0_2(helper_tick_set_limit
,
3411 r_tickptr
, cpu_T
[0]);
3414 case 6: // hver readonly
3422 #ifdef TARGET_SPARC64
3423 case 0x2c: /* V9 movcc */
3425 int cc
= GET_FIELD_SP(insn
, 11, 12);
3426 int cond
= GET_FIELD_SP(insn
, 14, 17);
3431 if (insn
& (1 << 18)) {
3433 gen_cond(cpu_T
[2], 0, cond
);
3435 gen_cond(cpu_T
[2], 1, cond
);
3439 gen_fcond(cpu_T
[2], cc
, cond
);
3442 l1
= gen_new_label();
3444 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
3445 tcg_gen_movi_tl(r_zero
, 0);
3446 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[2], r_zero
, l1
);
3447 if (IS_IMM
) { /* immediate */
3448 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3449 gen_movl_simm_T1(rs2
);
3451 rs2
= GET_FIELD_SP(insn
, 0, 4);
3452 gen_movl_reg_T1(rs2
);
3454 gen_movl_T1_reg(rd
);
3458 case 0x2d: /* V9 sdivx */
3459 gen_op_sdivx_T1_T0();
3460 gen_movl_T0_reg(rd
);
3462 case 0x2e: /* V9 popc */
3464 if (IS_IMM
) { /* immediate */
3465 rs2
= GET_FIELD_SPs(insn
, 0, 12);
3466 gen_movl_simm_T1(rs2
);
3467 // XXX optimize: popc(constant)
3470 rs2
= GET_FIELD_SP(insn
, 0, 4);
3471 gen_movl_reg_T1(rs2
);
3473 tcg_gen_helper_1_1(helper_popc
, cpu_T
[0],
3475 gen_movl_T0_reg(rd
);
3477 case 0x2f: /* V9 movr */
3479 int cond
= GET_FIELD_SP(insn
, 10, 12);
3483 rs1
= GET_FIELD(insn
, 13, 17);
3484 gen_movl_reg_T0(rs1
);
3486 l1
= gen_new_label();
3488 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
3489 tcg_gen_movi_tl(r_zero
, 0);
3490 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
3491 if (IS_IMM
) { /* immediate */
3492 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3493 gen_movl_simm_T1(rs2
);
3495 rs2
= GET_FIELD_SP(insn
, 0, 4);
3496 gen_movl_reg_T1(rs2
);
3498 gen_movl_T1_reg(rd
);
3507 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3508 #ifdef TARGET_SPARC64
3509 int opf
= GET_FIELD_SP(insn
, 5, 13);
3510 rs1
= GET_FIELD(insn
, 13, 17);
3511 rs2
= GET_FIELD(insn
, 27, 31);
3512 if (gen_trap_ifnofpu(dc
))
3516 case 0x000: /* VIS I edge8cc */
3517 case 0x001: /* VIS II edge8n */
3518 case 0x002: /* VIS I edge8lcc */
3519 case 0x003: /* VIS II edge8ln */
3520 case 0x004: /* VIS I edge16cc */
3521 case 0x005: /* VIS II edge16n */
3522 case 0x006: /* VIS I edge16lcc */
3523 case 0x007: /* VIS II edge16ln */
3524 case 0x008: /* VIS I edge32cc */
3525 case 0x009: /* VIS II edge32n */
3526 case 0x00a: /* VIS I edge32lcc */
3527 case 0x00b: /* VIS II edge32ln */
3530 case 0x010: /* VIS I array8 */
3531 gen_movl_reg_T0(rs1
);
3532 gen_movl_reg_T1(rs2
);
3534 gen_movl_T0_reg(rd
);
3536 case 0x012: /* VIS I array16 */
3537 gen_movl_reg_T0(rs1
);
3538 gen_movl_reg_T1(rs2
);
3540 gen_movl_T0_reg(rd
);
3542 case 0x014: /* VIS I array32 */
3543 gen_movl_reg_T0(rs1
);
3544 gen_movl_reg_T1(rs2
);
3546 gen_movl_T0_reg(rd
);
3548 case 0x018: /* VIS I alignaddr */
3549 gen_movl_reg_T0(rs1
);
3550 gen_movl_reg_T1(rs2
);
3552 gen_movl_T0_reg(rd
);
3554 case 0x019: /* VIS II bmask */
3555 case 0x01a: /* VIS I alignaddrl */
3558 case 0x020: /* VIS I fcmple16 */
3559 gen_op_load_fpr_DT0(DFPREG(rs1
));
3560 gen_op_load_fpr_DT1(DFPREG(rs2
));
3562 gen_op_store_DT0_fpr(DFPREG(rd
));
3564 case 0x022: /* VIS I fcmpne16 */
3565 gen_op_load_fpr_DT0(DFPREG(rs1
));
3566 gen_op_load_fpr_DT1(DFPREG(rs2
));
3568 gen_op_store_DT0_fpr(DFPREG(rd
));
3570 case 0x024: /* VIS I fcmple32 */
3571 gen_op_load_fpr_DT0(DFPREG(rs1
));
3572 gen_op_load_fpr_DT1(DFPREG(rs2
));
3574 gen_op_store_DT0_fpr(DFPREG(rd
));
3576 case 0x026: /* VIS I fcmpne32 */
3577 gen_op_load_fpr_DT0(DFPREG(rs1
));
3578 gen_op_load_fpr_DT1(DFPREG(rs2
));
3580 gen_op_store_DT0_fpr(DFPREG(rd
));
3582 case 0x028: /* VIS I fcmpgt16 */
3583 gen_op_load_fpr_DT0(DFPREG(rs1
));
3584 gen_op_load_fpr_DT1(DFPREG(rs2
));
3586 gen_op_store_DT0_fpr(DFPREG(rd
));
3588 case 0x02a: /* VIS I fcmpeq16 */
3589 gen_op_load_fpr_DT0(DFPREG(rs1
));
3590 gen_op_load_fpr_DT1(DFPREG(rs2
));
3592 gen_op_store_DT0_fpr(DFPREG(rd
));
3594 case 0x02c: /* VIS I fcmpgt32 */
3595 gen_op_load_fpr_DT0(DFPREG(rs1
));
3596 gen_op_load_fpr_DT1(DFPREG(rs2
));
3598 gen_op_store_DT0_fpr(DFPREG(rd
));
3600 case 0x02e: /* VIS I fcmpeq32 */
3601 gen_op_load_fpr_DT0(DFPREG(rs1
));
3602 gen_op_load_fpr_DT1(DFPREG(rs2
));
3604 gen_op_store_DT0_fpr(DFPREG(rd
));
3606 case 0x031: /* VIS I fmul8x16 */
3607 gen_op_load_fpr_DT0(DFPREG(rs1
));
3608 gen_op_load_fpr_DT1(DFPREG(rs2
));
3610 gen_op_store_DT0_fpr(DFPREG(rd
));
3612 case 0x033: /* VIS I fmul8x16au */
3613 gen_op_load_fpr_DT0(DFPREG(rs1
));
3614 gen_op_load_fpr_DT1(DFPREG(rs2
));
3615 gen_op_fmul8x16au();
3616 gen_op_store_DT0_fpr(DFPREG(rd
));
3618 case 0x035: /* VIS I fmul8x16al */
3619 gen_op_load_fpr_DT0(DFPREG(rs1
));
3620 gen_op_load_fpr_DT1(DFPREG(rs2
));
3621 gen_op_fmul8x16al();
3622 gen_op_store_DT0_fpr(DFPREG(rd
));
3624 case 0x036: /* VIS I fmul8sux16 */
3625 gen_op_load_fpr_DT0(DFPREG(rs1
));
3626 gen_op_load_fpr_DT1(DFPREG(rs2
));
3627 gen_op_fmul8sux16();
3628 gen_op_store_DT0_fpr(DFPREG(rd
));
3630 case 0x037: /* VIS I fmul8ulx16 */
3631 gen_op_load_fpr_DT0(DFPREG(rs1
));
3632 gen_op_load_fpr_DT1(DFPREG(rs2
));
3633 gen_op_fmul8ulx16();
3634 gen_op_store_DT0_fpr(DFPREG(rd
));
3636 case 0x038: /* VIS I fmuld8sux16 */
3637 gen_op_load_fpr_DT0(DFPREG(rs1
));
3638 gen_op_load_fpr_DT1(DFPREG(rs2
));
3639 gen_op_fmuld8sux16();
3640 gen_op_store_DT0_fpr(DFPREG(rd
));
3642 case 0x039: /* VIS I fmuld8ulx16 */
3643 gen_op_load_fpr_DT0(DFPREG(rs1
));
3644 gen_op_load_fpr_DT1(DFPREG(rs2
));
3645 gen_op_fmuld8ulx16();
3646 gen_op_store_DT0_fpr(DFPREG(rd
));
3648 case 0x03a: /* VIS I fpack32 */
3649 case 0x03b: /* VIS I fpack16 */
3650 case 0x03d: /* VIS I fpackfix */
3651 case 0x03e: /* VIS I pdist */
3654 case 0x048: /* VIS I faligndata */
3655 gen_op_load_fpr_DT0(DFPREG(rs1
));
3656 gen_op_load_fpr_DT1(DFPREG(rs2
));
3657 gen_op_faligndata();
3658 gen_op_store_DT0_fpr(DFPREG(rd
));
3660 case 0x04b: /* VIS I fpmerge */
3661 gen_op_load_fpr_DT0(DFPREG(rs1
));
3662 gen_op_load_fpr_DT1(DFPREG(rs2
));
3664 gen_op_store_DT0_fpr(DFPREG(rd
));
3666 case 0x04c: /* VIS II bshuffle */
3669 case 0x04d: /* VIS I fexpand */
3670 gen_op_load_fpr_DT0(DFPREG(rs1
));
3671 gen_op_load_fpr_DT1(DFPREG(rs2
));
3673 gen_op_store_DT0_fpr(DFPREG(rd
));
3675 case 0x050: /* VIS I fpadd16 */
3676 gen_op_load_fpr_DT0(DFPREG(rs1
));
3677 gen_op_load_fpr_DT1(DFPREG(rs2
));
3679 gen_op_store_DT0_fpr(DFPREG(rd
));
3681 case 0x051: /* VIS I fpadd16s */
3682 gen_op_load_fpr_FT0(rs1
);
3683 gen_op_load_fpr_FT1(rs2
);
3685 gen_op_store_FT0_fpr(rd
);
3687 case 0x052: /* VIS I fpadd32 */
3688 gen_op_load_fpr_DT0(DFPREG(rs1
));
3689 gen_op_load_fpr_DT1(DFPREG(rs2
));
3691 gen_op_store_DT0_fpr(DFPREG(rd
));
3693 case 0x053: /* VIS I fpadd32s */
3694 gen_op_load_fpr_FT0(rs1
);
3695 gen_op_load_fpr_FT1(rs2
);
3697 gen_op_store_FT0_fpr(rd
);
3699 case 0x054: /* VIS I fpsub16 */
3700 gen_op_load_fpr_DT0(DFPREG(rs1
));
3701 gen_op_load_fpr_DT1(DFPREG(rs2
));
3703 gen_op_store_DT0_fpr(DFPREG(rd
));
3705 case 0x055: /* VIS I fpsub16s */
3706 gen_op_load_fpr_FT0(rs1
);
3707 gen_op_load_fpr_FT1(rs2
);
3709 gen_op_store_FT0_fpr(rd
);
3711 case 0x056: /* VIS I fpsub32 */
3712 gen_op_load_fpr_DT0(DFPREG(rs1
));
3713 gen_op_load_fpr_DT1(DFPREG(rs2
));
3715 gen_op_store_DT0_fpr(DFPREG(rd
));
3717 case 0x057: /* VIS I fpsub32s */
3718 gen_op_load_fpr_FT0(rs1
);
3719 gen_op_load_fpr_FT1(rs2
);
3721 gen_op_store_FT0_fpr(rd
);
3723 case 0x060: /* VIS I fzero */
3724 gen_op_movl_DT0_0();
3725 gen_op_store_DT0_fpr(DFPREG(rd
));
3727 case 0x061: /* VIS I fzeros */
3728 gen_op_movl_FT0_0();
3729 gen_op_store_FT0_fpr(rd
);
3731 case 0x062: /* VIS I fnor */
3732 gen_op_load_fpr_DT0(DFPREG(rs1
));
3733 gen_op_load_fpr_DT1(DFPREG(rs2
));
3735 gen_op_store_DT0_fpr(DFPREG(rd
));
3737 case 0x063: /* VIS I fnors */
3738 gen_op_load_fpr_FT0(rs1
);
3739 gen_op_load_fpr_FT1(rs2
);
3741 gen_op_store_FT0_fpr(rd
);
3743 case 0x064: /* VIS I fandnot2 */
3744 gen_op_load_fpr_DT1(DFPREG(rs1
));
3745 gen_op_load_fpr_DT0(DFPREG(rs2
));
3747 gen_op_store_DT0_fpr(DFPREG(rd
));
3749 case 0x065: /* VIS I fandnot2s */
3750 gen_op_load_fpr_FT1(rs1
);
3751 gen_op_load_fpr_FT0(rs2
);
3753 gen_op_store_FT0_fpr(rd
);
3755 case 0x066: /* VIS I fnot2 */
3756 gen_op_load_fpr_DT1(DFPREG(rs2
));
3758 gen_op_store_DT0_fpr(DFPREG(rd
));
3760 case 0x067: /* VIS I fnot2s */
3761 gen_op_load_fpr_FT1(rs2
);
3763 gen_op_store_FT0_fpr(rd
);
3765 case 0x068: /* VIS I fandnot1 */
3766 gen_op_load_fpr_DT0(DFPREG(rs1
));
3767 gen_op_load_fpr_DT1(DFPREG(rs2
));
3769 gen_op_store_DT0_fpr(DFPREG(rd
));
3771 case 0x069: /* VIS I fandnot1s */
3772 gen_op_load_fpr_FT0(rs1
);
3773 gen_op_load_fpr_FT1(rs2
);
3775 gen_op_store_FT0_fpr(rd
);
3777 case 0x06a: /* VIS I fnot1 */
3778 gen_op_load_fpr_DT1(DFPREG(rs1
));
3780 gen_op_store_DT0_fpr(DFPREG(rd
));
3782 case 0x06b: /* VIS I fnot1s */
3783 gen_op_load_fpr_FT1(rs1
);
3785 gen_op_store_FT0_fpr(rd
);
3787 case 0x06c: /* VIS I fxor */
3788 gen_op_load_fpr_DT0(DFPREG(rs1
));
3789 gen_op_load_fpr_DT1(DFPREG(rs2
));
3791 gen_op_store_DT0_fpr(DFPREG(rd
));
3793 case 0x06d: /* VIS I fxors */
3794 gen_op_load_fpr_FT0(rs1
);
3795 gen_op_load_fpr_FT1(rs2
);
3797 gen_op_store_FT0_fpr(rd
);
3799 case 0x06e: /* VIS I fnand */
3800 gen_op_load_fpr_DT0(DFPREG(rs1
));
3801 gen_op_load_fpr_DT1(DFPREG(rs2
));
3803 gen_op_store_DT0_fpr(DFPREG(rd
));
3805 case 0x06f: /* VIS I fnands */
3806 gen_op_load_fpr_FT0(rs1
);
3807 gen_op_load_fpr_FT1(rs2
);
3809 gen_op_store_FT0_fpr(rd
);
3811 case 0x070: /* VIS I fand */
3812 gen_op_load_fpr_DT0(DFPREG(rs1
));
3813 gen_op_load_fpr_DT1(DFPREG(rs2
));
3815 gen_op_store_DT0_fpr(DFPREG(rd
));
3817 case 0x071: /* VIS I fands */
3818 gen_op_load_fpr_FT0(rs1
);
3819 gen_op_load_fpr_FT1(rs2
);
3821 gen_op_store_FT0_fpr(rd
);
3823 case 0x072: /* VIS I fxnor */
3824 gen_op_load_fpr_DT0(DFPREG(rs1
));
3825 gen_op_load_fpr_DT1(DFPREG(rs2
));
3827 gen_op_store_DT0_fpr(DFPREG(rd
));
3829 case 0x073: /* VIS I fxnors */
3830 gen_op_load_fpr_FT0(rs1
);
3831 gen_op_load_fpr_FT1(rs2
);
3833 gen_op_store_FT0_fpr(rd
);
3835 case 0x074: /* VIS I fsrc1 */
3836 gen_op_load_fpr_DT0(DFPREG(rs1
));
3837 gen_op_store_DT0_fpr(DFPREG(rd
));
3839 case 0x075: /* VIS I fsrc1s */
3840 gen_op_load_fpr_FT0(rs1
);
3841 gen_op_store_FT0_fpr(rd
);
3843 case 0x076: /* VIS I fornot2 */
3844 gen_op_load_fpr_DT1(DFPREG(rs1
));
3845 gen_op_load_fpr_DT0(DFPREG(rs2
));
3847 gen_op_store_DT0_fpr(DFPREG(rd
));
3849 case 0x077: /* VIS I fornot2s */
3850 gen_op_load_fpr_FT1(rs1
);
3851 gen_op_load_fpr_FT0(rs2
);
3853 gen_op_store_FT0_fpr(rd
);
3855 case 0x078: /* VIS I fsrc2 */
3856 gen_op_load_fpr_DT0(DFPREG(rs2
));
3857 gen_op_store_DT0_fpr(DFPREG(rd
));
3859 case 0x079: /* VIS I fsrc2s */
3860 gen_op_load_fpr_FT0(rs2
);
3861 gen_op_store_FT0_fpr(rd
);
3863 case 0x07a: /* VIS I fornot1 */
3864 gen_op_load_fpr_DT0(DFPREG(rs1
));
3865 gen_op_load_fpr_DT1(DFPREG(rs2
));
3867 gen_op_store_DT0_fpr(DFPREG(rd
));
3869 case 0x07b: /* VIS I fornot1s */
3870 gen_op_load_fpr_FT0(rs1
);
3871 gen_op_load_fpr_FT1(rs2
);
3873 gen_op_store_FT0_fpr(rd
);
3875 case 0x07c: /* VIS I for */
3876 gen_op_load_fpr_DT0(DFPREG(rs1
));
3877 gen_op_load_fpr_DT1(DFPREG(rs2
));
3879 gen_op_store_DT0_fpr(DFPREG(rd
));
3881 case 0x07d: /* VIS I fors */
3882 gen_op_load_fpr_FT0(rs1
);
3883 gen_op_load_fpr_FT1(rs2
);
3885 gen_op_store_FT0_fpr(rd
);
3887 case 0x07e: /* VIS I fone */
3888 gen_op_movl_DT0_1();
3889 gen_op_store_DT0_fpr(DFPREG(rd
));
3891 case 0x07f: /* VIS I fones */
3892 gen_op_movl_FT0_1();
3893 gen_op_store_FT0_fpr(rd
);
3895 case 0x080: /* VIS I shutdown */
3896 case 0x081: /* VIS II siam */
3905 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3906 #ifdef TARGET_SPARC64
3911 #ifdef TARGET_SPARC64
3912 } else if (xop
== 0x39) { /* V9 return */
3913 rs1
= GET_FIELD(insn
, 13, 17);
3915 gen_movl_reg_T0(rs1
);
3916 if (IS_IMM
) { /* immediate */
3917 rs2
= GET_FIELDs(insn
, 19, 31);
3918 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
3919 } else { /* register */
3920 rs2
= GET_FIELD(insn
, 27, 31);
3924 gen_movl_reg_T1(rs2
);
3932 gen_op_check_align_T0_3();
3933 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3934 dc
->npc
= DYNAMIC_PC
;
3938 rs1
= GET_FIELD(insn
, 13, 17);
3939 gen_movl_reg_T0(rs1
);
3940 if (IS_IMM
) { /* immediate */
3941 rs2
= GET_FIELDs(insn
, 19, 31);
3942 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
3943 } else { /* register */
3944 rs2
= GET_FIELD(insn
, 27, 31);
3948 gen_movl_reg_T1(rs2
);
3955 case 0x38: /* jmpl */
3958 tcg_gen_movi_tl(cpu_T
[1], dc
->pc
);
3959 gen_movl_T1_reg(rd
);
3962 gen_op_check_align_T0_3();
3963 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3964 dc
->npc
= DYNAMIC_PC
;
3967 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3968 case 0x39: /* rett, V9 return */
3970 if (!supervisor(dc
))
3973 gen_op_check_align_T0_3();
3974 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3975 dc
->npc
= DYNAMIC_PC
;
3976 tcg_gen_helper_0_0(helper_rett
);
3980 case 0x3b: /* flush */
3981 tcg_gen_helper_0_1(helper_flush
, cpu_T
[0]);
3983 case 0x3c: /* save */
3986 gen_movl_T0_reg(rd
);
3988 case 0x3d: /* restore */
3991 gen_movl_T0_reg(rd
);
3993 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3994 case 0x3e: /* V9 done/retry */
3998 if (!supervisor(dc
))
4000 dc
->npc
= DYNAMIC_PC
;
4001 dc
->pc
= DYNAMIC_PC
;
4002 tcg_gen_helper_0_0(helper_done
);
4005 if (!supervisor(dc
))
4007 dc
->npc
= DYNAMIC_PC
;
4008 dc
->pc
= DYNAMIC_PC
;
4009 tcg_gen_helper_0_0(helper_retry
);
4024 case 3: /* load/store instructions */
4026 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4027 rs1
= GET_FIELD(insn
, 13, 17);
4029 gen_movl_reg_T0(rs1
);
4030 if (xop
== 0x3c || xop
== 0x3e)
4032 rs2
= GET_FIELD(insn
, 27, 31);
4033 gen_movl_reg_T1(rs2
);
4035 else if (IS_IMM
) { /* immediate */
4036 rs2
= GET_FIELDs(insn
, 19, 31);
4037 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
4038 } else { /* register */
4039 rs2
= GET_FIELD(insn
, 27, 31);
4043 gen_movl_reg_T1(rs2
);
4049 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4050 (xop
> 0x17 && xop
<= 0x1d ) ||
4051 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4053 case 0x0: /* load unsigned word */
4054 gen_op_check_align_T0_3();
4055 ABI32_MASK(cpu_T
[0]);
4056 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4058 case 0x1: /* load unsigned byte */
4059 ABI32_MASK(cpu_T
[0]);
4060 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4062 case 0x2: /* load unsigned halfword */
4063 gen_op_check_align_T0_1();
4064 ABI32_MASK(cpu_T
[0]);
4065 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4067 case 0x3: /* load double word */
4073 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
4074 gen_op_check_align_T0_7();
4075 ABI32_MASK(cpu_T
[0]);
4076 tcg_gen_qemu_ld64(r_dword
, cpu_T
[0], dc
->mem_idx
);
4077 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
4078 gen_movl_T0_reg(rd
+ 1);
4079 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
4080 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
4083 case 0x9: /* load signed byte */
4084 ABI32_MASK(cpu_T
[0]);
4085 tcg_gen_qemu_ld8s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4087 case 0xa: /* load signed halfword */
4088 gen_op_check_align_T0_1();
4089 ABI32_MASK(cpu_T
[0]);
4090 tcg_gen_qemu_ld16s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4092 case 0xd: /* ldstub -- XXX: should be atomically */
4093 tcg_gen_movi_i32(cpu_tmp0
, 0xff);
4094 ABI32_MASK(cpu_T
[0]);
4095 tcg_gen_qemu_ld8s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4096 tcg_gen_qemu_st8(cpu_tmp0
, cpu_T
[0], dc
->mem_idx
);
4098 case 0x0f: /* swap register with memory. Also atomically */
4099 gen_op_check_align_T0_3();
4100 gen_movl_reg_T1(rd
);
4101 ABI32_MASK(cpu_T
[0]);
4102 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_T
[0], dc
->mem_idx
);
4103 tcg_gen_qemu_st32(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4104 tcg_gen_mov_i32(cpu_T
[1], cpu_tmp0
);
4106 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4107 case 0x10: /* load word alternate */
4108 #ifndef TARGET_SPARC64
4111 if (!supervisor(dc
))
4114 gen_op_check_align_T0_3();
4115 gen_ld_asi(insn
, 4, 0);
4117 case 0x11: /* load unsigned byte alternate */
4118 #ifndef TARGET_SPARC64
4121 if (!supervisor(dc
))
4124 gen_ld_asi(insn
, 1, 0);
4126 case 0x12: /* load unsigned halfword alternate */
4127 #ifndef TARGET_SPARC64
4130 if (!supervisor(dc
))
4133 gen_op_check_align_T0_1();
4134 gen_ld_asi(insn
, 2, 0);
4136 case 0x13: /* load double word alternate */
4137 #ifndef TARGET_SPARC64
4140 if (!supervisor(dc
))
4145 gen_op_check_align_T0_7();
4147 gen_movl_T0_reg(rd
+ 1);
4149 case 0x19: /* load signed byte alternate */
4150 #ifndef TARGET_SPARC64
4153 if (!supervisor(dc
))
4156 gen_ld_asi(insn
, 1, 1);
4158 case 0x1a: /* load signed halfword alternate */
4159 #ifndef TARGET_SPARC64
4162 if (!supervisor(dc
))
4165 gen_op_check_align_T0_1();
4166 gen_ld_asi(insn
, 2, 1);
4168 case 0x1d: /* ldstuba -- XXX: should be atomically */
4169 #ifndef TARGET_SPARC64
4172 if (!supervisor(dc
))
4175 gen_ldstub_asi(insn
);
4177 case 0x1f: /* swap reg with alt. memory. Also atomically */
4178 #ifndef TARGET_SPARC64
4181 if (!supervisor(dc
))
4184 gen_op_check_align_T0_3();
4185 gen_movl_reg_T1(rd
);
4189 #ifndef TARGET_SPARC64
4190 case 0x30: /* ldc */
4191 case 0x31: /* ldcsr */
4192 case 0x33: /* lddc */
4196 #ifdef TARGET_SPARC64
4197 case 0x08: /* V9 ldsw */
4198 gen_op_check_align_T0_3();
4199 ABI32_MASK(cpu_T
[0]);
4200 tcg_gen_qemu_ld32s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4202 case 0x0b: /* V9 ldx */
4203 gen_op_check_align_T0_7();
4204 ABI32_MASK(cpu_T
[0]);
4205 tcg_gen_qemu_ld64(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4207 case 0x18: /* V9 ldswa */
4208 gen_op_check_align_T0_3();
4209 gen_ld_asi(insn
, 4, 1);
4211 case 0x1b: /* V9 ldxa */
4212 gen_op_check_align_T0_7();
4213 gen_ld_asi(insn
, 8, 0);
4215 case 0x2d: /* V9 prefetch, no effect */
4217 case 0x30: /* V9 ldfa */
4218 gen_op_check_align_T0_3();
4219 gen_ldf_asi(insn
, 4, rd
);
4221 case 0x33: /* V9 lddfa */
4222 gen_op_check_align_T0_3();
4223 gen_ldf_asi(insn
, 8, DFPREG(rd
));
4225 case 0x3d: /* V9 prefetcha, no effect */
4227 case 0x32: /* V9 ldqfa */
4228 #if defined(CONFIG_USER_ONLY)
4229 gen_op_check_align_T0_3();
4230 gen_ldf_asi(insn
, 16, QFPREG(rd
));
4239 gen_movl_T1_reg(rd
);
4240 #ifdef TARGET_SPARC64
4243 } else if (xop
>= 0x20 && xop
< 0x24) {
4244 if (gen_trap_ifnofpu(dc
))
4247 case 0x20: /* load fpreg */
4248 gen_op_check_align_T0_3();
4250 gen_op_store_FT0_fpr(rd
);
4252 case 0x21: /* load fsr */
4253 gen_op_check_align_T0_3();
4256 tcg_gen_helper_0_0(helper_ldfsr
);
4258 case 0x22: /* load quad fpreg */
4259 #if defined(CONFIG_USER_ONLY)
4260 gen_op_check_align_T0_7();
4262 gen_op_store_QT0_fpr(QFPREG(rd
));
4267 case 0x23: /* load double fpreg */
4268 gen_op_check_align_T0_7();
4270 gen_op_store_DT0_fpr(DFPREG(rd
));
4275 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4276 xop
== 0xe || xop
== 0x1e) {
4277 gen_movl_reg_T1(rd
);
4279 case 0x4: /* store word */
4280 gen_op_check_align_T0_3();
4281 ABI32_MASK(cpu_T
[0]);
4282 tcg_gen_qemu_st32(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4284 case 0x5: /* store byte */
4285 ABI32_MASK(cpu_T
[0]);
4286 tcg_gen_qemu_st8(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4288 case 0x6: /* store halfword */
4289 gen_op_check_align_T0_1();
4290 ABI32_MASK(cpu_T
[0]);
4291 tcg_gen_qemu_st16(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4293 case 0x7: /* store double word */
4298 TCGv r_dword
, r_low
;
4300 gen_op_check_align_T0_7();
4301 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
4302 r_low
= tcg_temp_new(TCG_TYPE_I32
);
4303 gen_movl_reg_TN(rd
+ 1, r_low
);
4304 tcg_gen_helper_1_2(helper_pack64
, r_dword
, cpu_T
[1],
4306 tcg_gen_qemu_st64(r_dword
, cpu_T
[0], dc
->mem_idx
);
4308 #else /* __i386__ */
4309 gen_op_check_align_T0_7();
4311 gen_movl_reg_T2(rd
+ 1);
4313 #endif /* __i386__ */
4315 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4316 case 0x14: /* store word alternate */
4317 #ifndef TARGET_SPARC64
4320 if (!supervisor(dc
))
4323 gen_op_check_align_T0_3();
4324 gen_st_asi(insn
, 4);
4326 case 0x15: /* store byte alternate */
4327 #ifndef TARGET_SPARC64
4330 if (!supervisor(dc
))
4333 gen_st_asi(insn
, 1);
4335 case 0x16: /* store halfword alternate */
4336 #ifndef TARGET_SPARC64
4339 if (!supervisor(dc
))
4342 gen_op_check_align_T0_1();
4343 gen_st_asi(insn
, 2);
4345 case 0x17: /* store double word alternate */
4346 #ifndef TARGET_SPARC64
4349 if (!supervisor(dc
))
4356 TCGv r_dword
, r_temp
, r_size
;
4358 gen_op_check_align_T0_7();
4359 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
4360 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
4361 r_size
= tcg_temp_new(TCG_TYPE_I32
);
4362 gen_movl_reg_TN(rd
+ 1, r_temp
);
4363 tcg_gen_helper_1_2(helper_pack64
, r_dword
, cpu_T
[1],
4365 #ifdef TARGET_SPARC64
4369 offset
= GET_FIELD(insn
, 25, 31);
4370 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
4371 tcg_gen_ld_i32(r_dword
, cpu_env
, offsetof(CPUSPARCState
, asi
));
4374 asi
= GET_FIELD(insn
, 19, 26);
4375 tcg_gen_movi_i32(r_temp
, asi
);
4376 #ifdef TARGET_SPARC64
4379 tcg_gen_movi_i32(r_size
, 8);
4380 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_temp
, r_size
);
4384 #ifdef TARGET_SPARC64
4385 case 0x0e: /* V9 stx */
4386 gen_op_check_align_T0_7();
4387 ABI32_MASK(cpu_T
[0]);
4388 tcg_gen_qemu_st64(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4390 case 0x1e: /* V9 stxa */
4391 gen_op_check_align_T0_7();
4392 gen_st_asi(insn
, 8);
4398 } else if (xop
> 0x23 && xop
< 0x28) {
4399 if (gen_trap_ifnofpu(dc
))
4403 gen_op_check_align_T0_3();
4404 gen_op_load_fpr_FT0(rd
);
4407 case 0x25: /* stfsr, V9 stxfsr */
4408 #ifdef CONFIG_USER_ONLY
4409 gen_op_check_align_T0_3();
4415 #ifdef TARGET_SPARC64
4416 #if defined(CONFIG_USER_ONLY)
4417 /* V9 stqf, store quad fpreg */
4418 gen_op_check_align_T0_7();
4419 gen_op_load_fpr_QT0(QFPREG(rd
));
4425 #else /* !TARGET_SPARC64 */
4426 /* stdfq, store floating point queue */
4427 #if defined(CONFIG_USER_ONLY)
4430 if (!supervisor(dc
))
4432 if (gen_trap_ifnofpu(dc
))
4438 gen_op_check_align_T0_7();
4439 gen_op_load_fpr_DT0(DFPREG(rd
));
4445 } else if (xop
> 0x33 && xop
< 0x3f) {
4447 #ifdef TARGET_SPARC64
4448 case 0x34: /* V9 stfa */
4449 gen_op_check_align_T0_3();
4450 gen_op_load_fpr_FT0(rd
);
4451 gen_stf_asi(insn
, 4, rd
);
4453 case 0x36: /* V9 stqfa */
4454 #if defined(CONFIG_USER_ONLY)
4455 gen_op_check_align_T0_7();
4456 gen_op_load_fpr_QT0(QFPREG(rd
));
4457 gen_stf_asi(insn
, 16, QFPREG(rd
));
4462 case 0x37: /* V9 stdfa */
4463 gen_op_check_align_T0_3();
4464 gen_op_load_fpr_DT0(DFPREG(rd
));
4465 gen_stf_asi(insn
, 8, DFPREG(rd
));
4467 case 0x3c: /* V9 casa */
4468 gen_op_check_align_T0_3();
4469 gen_cas_asi(insn
, rd
);
4470 gen_movl_T1_reg(rd
);
4472 case 0x3e: /* V9 casxa */
4473 gen_op_check_align_T0_7();
4474 gen_casx_asi(insn
, rd
);
4475 gen_movl_T1_reg(rd
);
4478 case 0x34: /* stc */
4479 case 0x35: /* stcsr */
4480 case 0x36: /* stdcq */
4481 case 0x37: /* stdc */
4493 /* default case for non jump instructions */
4494 if (dc
->npc
== DYNAMIC_PC
) {
4495 dc
->pc
= DYNAMIC_PC
;
4497 } else if (dc
->npc
== JUMP_PC
) {
4498 /* we can do a static jump */
4499 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
4503 dc
->npc
= dc
->npc
+ 4;
4509 gen_op_exception(TT_ILL_INSN
);
4512 #if !defined(CONFIG_USER_ONLY)
4515 gen_op_exception(TT_PRIV_INSN
);
4520 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4523 #ifndef TARGET_SPARC64
4526 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4531 #ifndef TARGET_SPARC64
4534 gen_op_exception(TT_NCP_INSN
);
4540 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
4544 static inline int gen_intermediate_code_internal(TranslationBlock
* tb
,
4545 int spc
, CPUSPARCState
*env
)
4547 target_ulong pc_start
, last_pc
;
4548 uint16_t *gen_opc_end
;
4549 DisasContext dc1
, *dc
= &dc1
;
4552 memset(dc
, 0, sizeof(DisasContext
));
4557 dc
->npc
= (target_ulong
) tb
->cs_base
;
4558 dc
->mem_idx
= cpu_mmu_index(env
);
4559 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4560 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4562 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4565 if (env
->nb_breakpoints
> 0) {
4566 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4567 if (env
->breakpoints
[j
] == dc
->pc
) {
4568 if (dc
->pc
!= pc_start
)
4570 tcg_gen_helper_0_0(helper_debug
);
4579 fprintf(logfile
, "Search PC...\n");
4580 j
= gen_opc_ptr
- gen_opc_buf
;
4584 gen_opc_instr_start
[lj
++] = 0;
4585 gen_opc_pc
[lj
] = dc
->pc
;
4586 gen_opc_npc
[lj
] = dc
->npc
;
4587 gen_opc_instr_start
[lj
] = 1;
4591 disas_sparc_insn(dc
);
4595 /* if the next PC is different, we abort now */
4596 if (dc
->pc
!= (last_pc
+ 4))
4598 /* if we reach a page boundary, we stop generation so that the
4599 PC of a TT_TFAULT exception is always in the right page */
4600 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4602 /* if single step mode, we generate only one instruction and
4603 generate an exception */
4604 if (env
->singlestep_enabled
) {
4609 } while ((gen_opc_ptr
< gen_opc_end
) &&
4610 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32));
4614 if (dc
->pc
!= DYNAMIC_PC
&&
4615 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4616 /* static PC and NPC: we can use direct chaining */
4617 gen_branch(dc
, dc
->pc
, dc
->npc
);
4619 if (dc
->pc
!= DYNAMIC_PC
)
4625 *gen_opc_ptr
= INDEX_op_end
;
4627 j
= gen_opc_ptr
- gen_opc_buf
;
4630 gen_opc_instr_start
[lj
++] = 0;
4636 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4637 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4639 tb
->size
= last_pc
+ 4 - pc_start
;
4642 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4643 fprintf(logfile
, "--------------\n");
4644 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4645 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4646 fprintf(logfile
, "\n");
4652 int gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4654 return gen_intermediate_code_internal(tb
, 0, env
);
4657 int gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4659 return gen_intermediate_code_internal(tb
, 1, env
);
4662 void cpu_reset(CPUSPARCState
*env
)
4667 env
->regwptr
= env
->regbase
+ (env
->cwp
* 16);
4668 #if defined(CONFIG_USER_ONLY)
4669 env
->user_mode_only
= 1;
4670 #ifdef TARGET_SPARC64
4671 env
->cleanwin
= NWINDOWS
- 2;
4672 env
->cansave
= NWINDOWS
- 2;
4673 env
->pstate
= PS_RMO
| PS_PEF
| PS_IE
;
4674 env
->asi
= 0x82; // Primary no-fault
4680 #ifdef TARGET_SPARC64
4681 env
->pstate
= PS_PRIV
;
4682 env
->hpstate
= HS_PRIV
;
4683 env
->pc
= 0x1fff0000000ULL
;
4684 env
->tsptr
= &env
->ts
[env
->tl
];
4687 env
->mmuregs
[0] &= ~(MMU_E
| MMU_NF
);
4688 env
->mmuregs
[0] |= env
->mmu_bm
;
4690 env
->npc
= env
->pc
+ 4;
4694 CPUSPARCState
*cpu_sparc_init(const char *cpu_model
)
4697 const sparc_def_t
*def
;
4700 def
= cpu_sparc_find_by_name(cpu_model
);
4704 env
= qemu_mallocz(sizeof(CPUSPARCState
));
4708 env
->cpu_model_str
= cpu_model
;
4709 env
->version
= def
->iu_version
;
4710 env
->fsr
= def
->fpu_version
;
4711 #if !defined(TARGET_SPARC64)
4712 env
->mmu_bm
= def
->mmu_bm
;
4713 env
->mmu_ctpr_mask
= def
->mmu_ctpr_mask
;
4714 env
->mmu_cxr_mask
= def
->mmu_cxr_mask
;
4715 env
->mmu_sfsr_mask
= def
->mmu_sfsr_mask
;
4716 env
->mmu_trcr_mask
= def
->mmu_trcr_mask
;
4717 env
->mmuregs
[0] |= def
->mmu_version
;
4718 cpu_sparc_set_id(env
, 0);
4721 /* init various static tables */
4725 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
4726 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4727 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4728 offsetof(CPUState
, regwptr
),
4730 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4731 #ifdef TARGET_SPARC64
4732 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
4733 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
4734 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
4735 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
4736 cpu_T
[2] = tcg_global_mem_new(TCG_TYPE_TL
,
4737 TCG_AREG0
, offsetof(CPUState
, t2
), "T2");
4738 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4739 TCG_AREG0
, offsetof(CPUState
, xcc
),
4742 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
4743 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
4744 cpu_T
[2] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "T2");
4746 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4747 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4749 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4750 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4752 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4753 TCG_AREG0
, offsetof(CPUState
, psr
),
4762 void cpu_sparc_set_id(CPUSPARCState
*env
, unsigned int cpu
)
4764 #if !defined(TARGET_SPARC64)
4765 env
->mxccregs
[7] = ((cpu
+ 8) & 0xf) << 24;
4769 static const sparc_def_t sparc_defs
[] = {
4770 #ifdef TARGET_SPARC64
4772 .name
= "Fujitsu Sparc64",
4773 .iu_version
= ((0x04ULL
<< 48) | (0x02ULL
<< 32) | (0ULL << 24)
4774 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4775 .fpu_version
= 0x00000000,
4779 .name
= "Fujitsu Sparc64 III",
4780 .iu_version
= ((0x04ULL
<< 48) | (0x03ULL
<< 32) | (0ULL << 24)
4781 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4782 .fpu_version
= 0x00000000,
4786 .name
= "Fujitsu Sparc64 IV",
4787 .iu_version
= ((0x04ULL
<< 48) | (0x04ULL
<< 32) | (0ULL << 24)
4788 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4789 .fpu_version
= 0x00000000,
4793 .name
= "Fujitsu Sparc64 V",
4794 .iu_version
= ((0x04ULL
<< 48) | (0x05ULL
<< 32) | (0x51ULL
<< 24)
4795 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4796 .fpu_version
= 0x00000000,
4800 .name
= "TI UltraSparc I",
4801 .iu_version
= ((0x17ULL
<< 48) | (0x10ULL
<< 32) | (0x40ULL
<< 24)
4802 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4803 .fpu_version
= 0x00000000,
4807 .name
= "TI UltraSparc II",
4808 .iu_version
= ((0x17ULL
<< 48) | (0x11ULL
<< 32) | (0x20ULL
<< 24)
4809 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4810 .fpu_version
= 0x00000000,
4814 .name
= "TI UltraSparc IIi",
4815 .iu_version
= ((0x17ULL
<< 48) | (0x12ULL
<< 32) | (0x91ULL
<< 24)
4816 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4817 .fpu_version
= 0x00000000,
4821 .name
= "TI UltraSparc IIe",
4822 .iu_version
= ((0x17ULL
<< 48) | (0x13ULL
<< 32) | (0x14ULL
<< 24)
4823 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4824 .fpu_version
= 0x00000000,
4828 .name
= "Sun UltraSparc III",
4829 .iu_version
= ((0x3eULL
<< 48) | (0x14ULL
<< 32) | (0x34ULL
<< 24)
4830 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4831 .fpu_version
= 0x00000000,
4835 .name
= "Sun UltraSparc III Cu",
4836 .iu_version
= ((0x3eULL
<< 48) | (0x15ULL
<< 32) | (0x41ULL
<< 24)
4837 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4838 .fpu_version
= 0x00000000,
4842 .name
= "Sun UltraSparc IIIi",
4843 .iu_version
= ((0x3eULL
<< 48) | (0x16ULL
<< 32) | (0x34ULL
<< 24)
4844 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4845 .fpu_version
= 0x00000000,
4849 .name
= "Sun UltraSparc IV",
4850 .iu_version
= ((0x3eULL
<< 48) | (0x18ULL
<< 32) | (0x31ULL
<< 24)
4851 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4852 .fpu_version
= 0x00000000,
4856 .name
= "Sun UltraSparc IV+",
4857 .iu_version
= ((0x3eULL
<< 48) | (0x19ULL
<< 32) | (0x22ULL
<< 24)
4858 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4859 .fpu_version
= 0x00000000,
4863 .name
= "Sun UltraSparc IIIi+",
4864 .iu_version
= ((0x3eULL
<< 48) | (0x22ULL
<< 32) | (0ULL << 24)
4865 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4866 .fpu_version
= 0x00000000,
4870 .name
= "NEC UltraSparc I",
4871 .iu_version
= ((0x22ULL
<< 48) | (0x10ULL
<< 32) | (0x40ULL
<< 24)
4872 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4873 .fpu_version
= 0x00000000,
4878 .name
= "Fujitsu MB86900",
4879 .iu_version
= 0x00 << 24, /* Impl 0, ver 0 */
4880 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4881 .mmu_version
= 0x00 << 24, /* Impl 0, ver 0 */
4882 .mmu_bm
= 0x00004000,
4883 .mmu_ctpr_mask
= 0x007ffff0,
4884 .mmu_cxr_mask
= 0x0000003f,
4885 .mmu_sfsr_mask
= 0xffffffff,
4886 .mmu_trcr_mask
= 0xffffffff,
4889 .name
= "Fujitsu MB86904",
4890 .iu_version
= 0x04 << 24, /* Impl 0, ver 4 */
4891 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4892 .mmu_version
= 0x04 << 24, /* Impl 0, ver 4 */
4893 .mmu_bm
= 0x00004000,
4894 .mmu_ctpr_mask
= 0x00ffffc0,
4895 .mmu_cxr_mask
= 0x000000ff,
4896 .mmu_sfsr_mask
= 0x00016fff,
4897 .mmu_trcr_mask
= 0x00ffffff,
4900 .name
= "Fujitsu MB86907",
4901 .iu_version
= 0x05 << 24, /* Impl 0, ver 5 */
4902 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4903 .mmu_version
= 0x05 << 24, /* Impl 0, ver 5 */
4904 .mmu_bm
= 0x00004000,
4905 .mmu_ctpr_mask
= 0xffffffc0,
4906 .mmu_cxr_mask
= 0x000000ff,
4907 .mmu_sfsr_mask
= 0x00016fff,
4908 .mmu_trcr_mask
= 0xffffffff,
4911 .name
= "LSI L64811",
4912 .iu_version
= 0x10 << 24, /* Impl 1, ver 0 */
4913 .fpu_version
= 1 << 17, /* FPU version 1 (LSI L64814) */
4914 .mmu_version
= 0x10 << 24,
4915 .mmu_bm
= 0x00004000,
4916 .mmu_ctpr_mask
= 0x007ffff0,
4917 .mmu_cxr_mask
= 0x0000003f,
4918 .mmu_sfsr_mask
= 0xffffffff,
4919 .mmu_trcr_mask
= 0xffffffff,
4922 .name
= "Cypress CY7C601",
4923 .iu_version
= 0x11 << 24, /* Impl 1, ver 1 */
4924 .fpu_version
= 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4925 .mmu_version
= 0x10 << 24,
4926 .mmu_bm
= 0x00004000,
4927 .mmu_ctpr_mask
= 0x007ffff0,
4928 .mmu_cxr_mask
= 0x0000003f,
4929 .mmu_sfsr_mask
= 0xffffffff,
4930 .mmu_trcr_mask
= 0xffffffff,
4933 .name
= "Cypress CY7C611",
4934 .iu_version
= 0x13 << 24, /* Impl 1, ver 3 */
4935 .fpu_version
= 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4936 .mmu_version
= 0x10 << 24,
4937 .mmu_bm
= 0x00004000,
4938 .mmu_ctpr_mask
= 0x007ffff0,
4939 .mmu_cxr_mask
= 0x0000003f,
4940 .mmu_sfsr_mask
= 0xffffffff,
4941 .mmu_trcr_mask
= 0xffffffff,
4944 .name
= "TI SuperSparc II",
4945 .iu_version
= 0x40000000,
4946 .fpu_version
= 0 << 17,
4947 .mmu_version
= 0x04000000,
4948 .mmu_bm
= 0x00002000,
4949 .mmu_ctpr_mask
= 0xffffffc0,
4950 .mmu_cxr_mask
= 0x0000ffff,
4951 .mmu_sfsr_mask
= 0xffffffff,
4952 .mmu_trcr_mask
= 0xffffffff,
4955 .name
= "TI MicroSparc I",
4956 .iu_version
= 0x41000000,
4957 .fpu_version
= 4 << 17,
4958 .mmu_version
= 0x41000000,
4959 .mmu_bm
= 0x00004000,
4960 .mmu_ctpr_mask
= 0x007ffff0,
4961 .mmu_cxr_mask
= 0x0000003f,
4962 .mmu_sfsr_mask
= 0x00016fff,
4963 .mmu_trcr_mask
= 0x0000003f,
4966 .name
= "TI MicroSparc II",
4967 .iu_version
= 0x42000000,
4968 .fpu_version
= 4 << 17,
4969 .mmu_version
= 0x02000000,
4970 .mmu_bm
= 0x00004000,
4971 .mmu_ctpr_mask
= 0x00ffffc0,
4972 .mmu_cxr_mask
= 0x000000ff,
4973 .mmu_sfsr_mask
= 0x00016fff,
4974 .mmu_trcr_mask
= 0x00ffffff,
4977 .name
= "TI MicroSparc IIep",
4978 .iu_version
= 0x42000000,
4979 .fpu_version
= 4 << 17,
4980 .mmu_version
= 0x04000000,
4981 .mmu_bm
= 0x00004000,
4982 .mmu_ctpr_mask
= 0x00ffffc0,
4983 .mmu_cxr_mask
= 0x000000ff,
4984 .mmu_sfsr_mask
= 0x00016bff,
4985 .mmu_trcr_mask
= 0x00ffffff,
4988 .name
= "TI SuperSparc 51",
4989 .iu_version
= 0x43000000,
4990 .fpu_version
= 0 << 17,
4991 .mmu_version
= 0x04000000,
4992 .mmu_bm
= 0x00002000,
4993 .mmu_ctpr_mask
= 0xffffffc0,
4994 .mmu_cxr_mask
= 0x0000ffff,
4995 .mmu_sfsr_mask
= 0xffffffff,
4996 .mmu_trcr_mask
= 0xffffffff,
4999 .name
= "TI SuperSparc 61",
5000 .iu_version
= 0x44000000,
5001 .fpu_version
= 0 << 17,
5002 .mmu_version
= 0x04000000,
5003 .mmu_bm
= 0x00002000,
5004 .mmu_ctpr_mask
= 0xffffffc0,
5005 .mmu_cxr_mask
= 0x0000ffff,
5006 .mmu_sfsr_mask
= 0xffffffff,
5007 .mmu_trcr_mask
= 0xffffffff,
5010 .name
= "Ross RT625",
5011 .iu_version
= 0x1e000000,
5012 .fpu_version
= 1 << 17,
5013 .mmu_version
= 0x1e000000,
5014 .mmu_bm
= 0x00004000,
5015 .mmu_ctpr_mask
= 0x007ffff0,
5016 .mmu_cxr_mask
= 0x0000003f,
5017 .mmu_sfsr_mask
= 0xffffffff,
5018 .mmu_trcr_mask
= 0xffffffff,
5021 .name
= "Ross RT620",
5022 .iu_version
= 0x1f000000,
5023 .fpu_version
= 1 << 17,
5024 .mmu_version
= 0x1f000000,
5025 .mmu_bm
= 0x00004000,
5026 .mmu_ctpr_mask
= 0x007ffff0,
5027 .mmu_cxr_mask
= 0x0000003f,
5028 .mmu_sfsr_mask
= 0xffffffff,
5029 .mmu_trcr_mask
= 0xffffffff,
5032 .name
= "BIT B5010",
5033 .iu_version
= 0x20000000,
5034 .fpu_version
= 0 << 17, /* B5010/B5110/B5120/B5210 */
5035 .mmu_version
= 0x20000000,
5036 .mmu_bm
= 0x00004000,
5037 .mmu_ctpr_mask
= 0x007ffff0,
5038 .mmu_cxr_mask
= 0x0000003f,
5039 .mmu_sfsr_mask
= 0xffffffff,
5040 .mmu_trcr_mask
= 0xffffffff,
5043 .name
= "Matsushita MN10501",
5044 .iu_version
= 0x50000000,
5045 .fpu_version
= 0 << 17,
5046 .mmu_version
= 0x50000000,
5047 .mmu_bm
= 0x00004000,
5048 .mmu_ctpr_mask
= 0x007ffff0,
5049 .mmu_cxr_mask
= 0x0000003f,
5050 .mmu_sfsr_mask
= 0xffffffff,
5051 .mmu_trcr_mask
= 0xffffffff,
5054 .name
= "Weitek W8601",
5055 .iu_version
= 0x90 << 24, /* Impl 9, ver 0 */
5056 .fpu_version
= 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
5057 .mmu_version
= 0x10 << 24,
5058 .mmu_bm
= 0x00004000,
5059 .mmu_ctpr_mask
= 0x007ffff0,
5060 .mmu_cxr_mask
= 0x0000003f,
5061 .mmu_sfsr_mask
= 0xffffffff,
5062 .mmu_trcr_mask
= 0xffffffff,
5066 .iu_version
= 0xf2000000,
5067 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
5068 .mmu_version
= 0xf2000000,
5069 .mmu_bm
= 0x00004000,
5070 .mmu_ctpr_mask
= 0x007ffff0,
5071 .mmu_cxr_mask
= 0x0000003f,
5072 .mmu_sfsr_mask
= 0xffffffff,
5073 .mmu_trcr_mask
= 0xffffffff,
5077 .iu_version
= 0xf3000000,
5078 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
5079 .mmu_version
= 0xf3000000,
5080 .mmu_bm
= 0x00004000,
5081 .mmu_ctpr_mask
= 0x007ffff0,
5082 .mmu_cxr_mask
= 0x0000003f,
5083 .mmu_sfsr_mask
= 0xffffffff,
5084 .mmu_trcr_mask
= 0xffffffff,
5089 static const sparc_def_t
*cpu_sparc_find_by_name(const unsigned char *name
)
5093 for (i
= 0; i
< sizeof(sparc_defs
) / sizeof(sparc_def_t
); i
++) {
5094 if (strcasecmp(name
, sparc_defs
[i
].name
) == 0) {
5095 return &sparc_defs
[i
];
5101 void sparc_cpu_list (FILE *f
, int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...))
5105 for (i
= 0; i
< sizeof(sparc_defs
) / sizeof(sparc_def_t
); i
++) {
5106 (*cpu_fprintf
)(f
, "Sparc %16s IU " TARGET_FMT_lx
" FPU %08x MMU %08x\n",
5108 sparc_defs
[i
].iu_version
,
5109 sparc_defs
[i
].fpu_version
,
5110 sparc_defs
[i
].mmu_version
);
5114 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
5116 void cpu_dump_state(CPUState
*env
, FILE *f
,
5117 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
5122 cpu_fprintf(f
, "pc: " TARGET_FMT_lx
" npc: " TARGET_FMT_lx
"\n", env
->pc
, env
->npc
);
5123 cpu_fprintf(f
, "General Registers:\n");
5124 for (i
= 0; i
< 4; i
++)
5125 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
5126 cpu_fprintf(f
, "\n");
5128 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
5129 cpu_fprintf(f
, "\nCurrent Register Window:\n");
5130 for (x
= 0; x
< 3; x
++) {
5131 for (i
= 0; i
< 4; i
++)
5132 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
5133 (x
== 0 ? 'o' : (x
== 1 ? 'l' : 'i')), i
,
5134 env
->regwptr
[i
+ x
* 8]);
5135 cpu_fprintf(f
, "\n");
5137 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
5138 (x
== 0 ? 'o' : x
== 1 ? 'l' : 'i'), i
,
5139 env
->regwptr
[i
+ x
* 8]);
5140 cpu_fprintf(f
, "\n");
5142 cpu_fprintf(f
, "\nFloating Point Registers:\n");
5143 for (i
= 0; i
< 32; i
++) {
5145 cpu_fprintf(f
, "%%f%02d:", i
);
5146 cpu_fprintf(f
, " %016lf", env
->fpr
[i
]);
5148 cpu_fprintf(f
, "\n");
5150 #ifdef TARGET_SPARC64
5151 cpu_fprintf(f
, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
5152 env
->pstate
, GET_CCR(env
), env
->asi
, env
->tl
, env
->fprs
);
5153 cpu_fprintf(f
, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
5154 env
->cansave
, env
->canrestore
, env
->otherwin
, env
->wstate
,
5155 env
->cleanwin
, NWINDOWS
- 1 - env
->cwp
);
5157 cpu_fprintf(f
, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env
),
5158 GET_FLAG(PSR_ZERO
, 'Z'), GET_FLAG(PSR_OVF
, 'V'),
5159 GET_FLAG(PSR_NEG
, 'N'), GET_FLAG(PSR_CARRY
, 'C'),
5160 env
->psrs
?'S':'-', env
->psrps
?'P':'-',
5161 env
->psret
?'E':'-', env
->wim
);
5163 cpu_fprintf(f
, "fsr: 0x%08x\n", GET_FSR32(env
));
5166 #if defined(CONFIG_USER_ONLY)
5167 target_phys_addr_t
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
5173 extern int get_physical_address (CPUState
*env
, target_phys_addr_t
*physical
, int *prot
,
5174 int *access_index
, target_ulong address
, int rw
,
5177 target_phys_addr_t
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
5179 target_phys_addr_t phys_addr
;
5180 int prot
, access_index
;
5182 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
, 2,
5183 MMU_KERNEL_IDX
) != 0)
5184 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
,
5185 0, MMU_KERNEL_IDX
) != 0)
5187 if (cpu_get_physical_page_desc(phys_addr
) == IO_MEM_UNASSIGNED
)
5193 void helper_flush(target_ulong addr
)
5196 tb_invalidate_page_range(addr
, addr
+ 8);