4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env
, cpu_regwptr
;
42 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
43 static TCGv cpu_psr
, cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
45 #ifndef CONFIG_USER_ONLY
48 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
50 static TCGv cpu_xcc
, cpu_asi
, cpu_fprs
, cpu_gsr
;
51 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
52 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0
, cpu_tmp32
, cpu_tmp64
;
58 /* Floating point registers */
59 static TCGv cpu_fpr
[TARGET_FPREGS
];
61 #include "gen-icount.h"
63 typedef struct DisasContext
{
64 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
70 int address_mask_32bit
;
71 struct TranslationBlock
*tb
;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
96 static int sign_extend(int x
, int len
)
99 return (x
<< len
) >> len
;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src
)
107 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
108 offsetof(CPU_DoubleU
, l
.upper
));
109 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
110 offsetof(CPU_DoubleU
, l
.lower
));
113 static void gen_op_load_fpr_DT1(unsigned int src
)
115 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
116 offsetof(CPU_DoubleU
, l
.upper
));
117 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
118 offsetof(CPU_DoubleU
, l
.lower
));
121 static void gen_op_store_DT0_fpr(unsigned int dst
)
123 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.upper
));
125 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
126 offsetof(CPU_DoubleU
, l
.lower
));
129 static void gen_op_load_fpr_QT0(unsigned int src
)
131 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
132 offsetof(CPU_QuadU
, l
.upmost
));
133 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
134 offsetof(CPU_QuadU
, l
.upper
));
135 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
136 offsetof(CPU_QuadU
, l
.lower
));
137 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
138 offsetof(CPU_QuadU
, l
.lowest
));
141 static void gen_op_load_fpr_QT1(unsigned int src
)
143 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
144 offsetof(CPU_QuadU
, l
.upmost
));
145 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
146 offsetof(CPU_QuadU
, l
.upper
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
148 offsetof(CPU_QuadU
, l
.lower
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
150 offsetof(CPU_QuadU
, l
.lowest
));
153 static void gen_op_store_QT0_fpr(unsigned int dst
)
155 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
156 offsetof(CPU_QuadU
, l
.upmost
));
157 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
158 offsetof(CPU_QuadU
, l
.upper
));
159 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
160 offsetof(CPU_QuadU
, l
.lower
));
161 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
162 offsetof(CPU_QuadU
, l
.lowest
));
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
179 #ifdef TARGET_SPARC64
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
183 #define AM_CHECK(dc) (1)
187 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
189 #ifdef TARGET_SPARC64
191 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
195 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
198 tcg_gen_movi_tl(tn
, 0);
200 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
202 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
206 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
211 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
213 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
217 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
218 target_ulong pc
, target_ulong npc
)
220 TranslationBlock
*tb
;
223 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
224 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num
);
227 tcg_gen_movi_tl(cpu_pc
, pc
);
228 tcg_gen_movi_tl(cpu_npc
, npc
);
229 tcg_gen_exit_tb((long)tb
+ tb_num
);
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc
, pc
);
233 tcg_gen_movi_tl(cpu_npc
, npc
);
239 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
241 tcg_gen_extu_i32_tl(reg
, src
);
242 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
243 tcg_gen_andi_tl(reg
, reg
, 0x1);
246 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
248 tcg_gen_extu_i32_tl(reg
, src
);
249 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
250 tcg_gen_andi_tl(reg
, reg
, 0x1);
253 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
255 tcg_gen_extu_i32_tl(reg
, src
);
256 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
257 tcg_gen_andi_tl(reg
, reg
, 0x1);
260 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
262 tcg_gen_extu_i32_tl(reg
, src
);
263 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
264 tcg_gen_andi_tl(reg
, reg
, 0x1);
267 static inline void gen_cc_clear_icc(void)
269 tcg_gen_movi_i32(cpu_psr
, 0);
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
275 tcg_gen_movi_i32(cpu_xcc
, 0);
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
285 static inline void gen_cc_NZ_icc(TCGv dst
)
290 l1
= gen_new_label();
291 l2
= gen_new_label();
292 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
293 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
294 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
295 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
297 tcg_gen_ext_i32_tl(r_temp
, dst
);
298 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
299 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
301 tcg_temp_free(r_temp
);
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst
)
309 l1
= gen_new_label();
310 l2
= gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
312 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
314 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
315 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
322 env->psr |= PSR_CARRY;
324 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
326 TCGv r_temp1
, r_temp2
;
329 l1
= gen_new_label();
330 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
331 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
332 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
333 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
334 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
335 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
337 tcg_temp_free(r_temp1
);
338 tcg_temp_free(r_temp2
);
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
346 l1
= gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
348 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
357 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
361 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
362 tcg_gen_xor_tl(r_temp
, src1
, src2
);
363 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
364 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
365 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
366 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
368 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
369 tcg_temp_free(r_temp
);
370 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
378 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
379 tcg_gen_xor_tl(r_temp
, src1
, src2
);
380 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
381 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
382 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
383 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
385 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
386 tcg_temp_free(r_temp
);
387 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
391 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
393 TCGv r_temp
, r_const
;
396 l1
= gen_new_label();
398 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
399 tcg_gen_xor_tl(r_temp
, src1
, src2
);
400 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
401 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
402 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
403 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
405 r_const
= tcg_const_i32(TT_TOVF
);
406 tcg_gen_helper_0_1(raise_exception
, r_const
);
407 tcg_temp_free(r_const
);
409 tcg_temp_free(r_temp
);
412 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
416 l1
= gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
418 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
420 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
424 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
429 l1
= gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
431 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
433 r_const
= tcg_const_i32(TT_TOVF
);
434 tcg_gen_helper_0_1(raise_exception
, r_const
);
435 tcg_temp_free(r_const
);
439 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
441 tcg_gen_mov_tl(cpu_cc_src
, src1
);
442 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
443 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
445 gen_cc_NZ_icc(cpu_cc_dst
);
446 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
447 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
448 #ifdef TARGET_SPARC64
450 gen_cc_NZ_xcc(cpu_cc_dst
);
451 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
452 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
454 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
457 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
459 tcg_gen_mov_tl(cpu_cc_src
, src1
);
460 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
461 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
462 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
464 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
465 #ifdef TARGET_SPARC64
467 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
469 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
470 gen_cc_NZ_icc(cpu_cc_dst
);
471 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
472 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst
);
475 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
476 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
483 tcg_gen_mov_tl(cpu_cc_src
, src1
);
484 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
485 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
487 gen_cc_NZ_icc(cpu_cc_dst
);
488 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
489 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
490 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
491 #ifdef TARGET_SPARC64
493 gen_cc_NZ_xcc(cpu_cc_dst
);
494 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
495 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
497 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
500 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
502 tcg_gen_mov_tl(cpu_cc_src
, src1
);
503 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
504 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
505 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
506 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
508 gen_cc_NZ_icc(cpu_cc_dst
);
509 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
510 #ifdef TARGET_SPARC64
512 gen_cc_NZ_xcc(cpu_cc_dst
);
513 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
514 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
516 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
521 env->psr |= PSR_CARRY;
523 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
525 TCGv r_temp1
, r_temp2
;
528 l1
= gen_new_label();
529 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
530 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
531 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
532 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
533 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
534 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
536 tcg_temp_free(r_temp1
);
537 tcg_temp_free(r_temp2
);
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
545 l1
= gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
547 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
556 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
560 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
561 tcg_gen_xor_tl(r_temp
, src1
, src2
);
562 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
563 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
564 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
566 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
567 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
568 tcg_temp_free(r_temp
);
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
576 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
577 tcg_gen_xor_tl(r_temp
, src1
, src2
);
578 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
579 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
580 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
582 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
583 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
584 tcg_temp_free(r_temp
);
588 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
590 TCGv r_temp
, r_const
;
593 l1
= gen_new_label();
595 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
596 tcg_gen_xor_tl(r_temp
, src1
, src2
);
597 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
598 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
599 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
601 r_const
= tcg_const_i32(TT_TOVF
);
602 tcg_gen_helper_0_1(raise_exception
, r_const
);
603 tcg_temp_free(r_const
);
605 tcg_temp_free(r_temp
);
608 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
610 tcg_gen_mov_tl(cpu_cc_src
, src1
);
611 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
612 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
614 gen_cc_NZ_icc(cpu_cc_dst
);
615 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
616 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 #ifdef TARGET_SPARC64
619 gen_cc_NZ_xcc(cpu_cc_dst
);
620 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
621 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
623 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
626 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
628 tcg_gen_mov_tl(cpu_cc_src
, src1
);
629 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
630 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
631 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
633 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
634 #ifdef TARGET_SPARC64
636 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
638 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
639 gen_cc_NZ_icc(cpu_cc_dst
);
640 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
641 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst
);
644 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
645 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
647 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
650 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
652 tcg_gen_mov_tl(cpu_cc_src
, src1
);
653 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
654 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
656 gen_cc_NZ_icc(cpu_cc_dst
);
657 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
658 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
659 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
660 #ifdef TARGET_SPARC64
662 gen_cc_NZ_xcc(cpu_cc_dst
);
663 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
664 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
666 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
669 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
671 tcg_gen_mov_tl(cpu_cc_src
, src1
);
672 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
673 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
674 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
675 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
677 gen_cc_NZ_icc(cpu_cc_dst
);
678 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
679 #ifdef TARGET_SPARC64
681 gen_cc_NZ_xcc(cpu_cc_dst
);
682 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
683 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
685 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
688 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
693 l1
= gen_new_label();
694 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
700 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
701 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
702 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
703 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
704 tcg_gen_movi_tl(cpu_cc_src2
, 0);
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
710 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
711 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
712 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
713 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
714 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
717 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
718 gen_mov_reg_V(r_temp
, cpu_psr
);
719 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
720 tcg_temp_free(r_temp
);
722 // T0 = (b1 << 31) | (T0 >> 1);
724 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
725 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
726 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
728 /* do addition and update flags */
729 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
732 gen_cc_NZ_icc(cpu_cc_dst
);
733 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
734 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
735 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
738 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
740 TCGv r_temp
, r_temp2
;
742 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
743 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
745 tcg_gen_extu_i32_i64(r_temp
, src2
);
746 tcg_gen_extu_i32_i64(r_temp2
, src1
);
747 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
749 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
750 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
751 tcg_temp_free(r_temp
);
752 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst
, r_temp2
);
756 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
758 tcg_temp_free(r_temp2
);
761 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
763 TCGv r_temp
, r_temp2
;
765 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
766 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
768 tcg_gen_ext_i32_i64(r_temp
, src2
);
769 tcg_gen_ext_i32_i64(r_temp2
, src1
);
770 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
772 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
773 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
774 tcg_temp_free(r_temp
);
775 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(dst
, r_temp2
);
779 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
781 tcg_temp_free(r_temp2
);
784 #ifdef TARGET_SPARC64
785 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
790 l1
= gen_new_label();
791 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
792 r_const
= tcg_const_i32(TT_DIV_ZERO
);
793 tcg_gen_helper_0_1(raise_exception
, r_const
);
794 tcg_temp_free(r_const
);
798 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
802 l1
= gen_new_label();
803 l2
= gen_new_label();
804 tcg_gen_mov_tl(cpu_cc_src
, src1
);
805 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
806 gen_trap_ifdivzero_tl(cpu_cc_src2
);
807 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
808 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
809 tcg_gen_movi_i64(dst
, INT64_MIN
);
812 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
817 static inline void gen_op_div_cc(TCGv dst
)
821 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
823 gen_cc_NZ_icc(cpu_cc_dst
);
824 l1
= gen_new_label();
825 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
826 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
830 static inline void gen_op_logic_cc(TCGv dst
)
832 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
835 gen_cc_NZ_icc(cpu_cc_dst
);
836 #ifdef TARGET_SPARC64
838 gen_cc_NZ_xcc(cpu_cc_dst
);
843 static inline void gen_op_eval_ba(TCGv dst
)
845 tcg_gen_movi_tl(dst
, 1);
849 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
851 gen_mov_reg_Z(dst
, src
);
855 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
857 gen_mov_reg_N(cpu_tmp0
, src
);
858 gen_mov_reg_V(dst
, src
);
859 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
860 gen_mov_reg_Z(cpu_tmp0
, src
);
861 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
865 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
867 gen_mov_reg_V(cpu_tmp0
, src
);
868 gen_mov_reg_N(dst
, src
);
869 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
873 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
875 gen_mov_reg_Z(cpu_tmp0
, src
);
876 gen_mov_reg_C(dst
, src
);
877 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
881 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
883 gen_mov_reg_C(dst
, src
);
887 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
889 gen_mov_reg_V(dst
, src
);
893 static inline void gen_op_eval_bn(TCGv dst
)
895 tcg_gen_movi_tl(dst
, 0);
899 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
901 gen_mov_reg_N(dst
, src
);
905 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
907 gen_mov_reg_Z(dst
, src
);
908 tcg_gen_xori_tl(dst
, dst
, 0x1);
912 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
914 gen_mov_reg_N(cpu_tmp0
, src
);
915 gen_mov_reg_V(dst
, src
);
916 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
917 gen_mov_reg_Z(cpu_tmp0
, src
);
918 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
919 tcg_gen_xori_tl(dst
, dst
, 0x1);
923 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
925 gen_mov_reg_V(cpu_tmp0
, src
);
926 gen_mov_reg_N(dst
, src
);
927 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
928 tcg_gen_xori_tl(dst
, dst
, 0x1);
932 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
934 gen_mov_reg_Z(cpu_tmp0
, src
);
935 gen_mov_reg_C(dst
, src
);
936 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
937 tcg_gen_xori_tl(dst
, dst
, 0x1);
941 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
943 gen_mov_reg_C(dst
, src
);
944 tcg_gen_xori_tl(dst
, dst
, 0x1);
948 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
950 gen_mov_reg_N(dst
, src
);
951 tcg_gen_xori_tl(dst
, dst
, 0x1);
955 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
957 gen_mov_reg_V(dst
, src
);
958 tcg_gen_xori_tl(dst
, dst
, 0x1);
962 FPSR bit field FCC1 | FCC0:
968 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
969 unsigned int fcc_offset
)
971 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
972 tcg_gen_andi_tl(reg
, reg
, 0x1);
975 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
976 unsigned int fcc_offset
)
978 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
979 tcg_gen_andi_tl(reg
, reg
, 0x1);
983 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
984 unsigned int fcc_offset
)
986 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
987 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
988 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
991 // 1 or 2: FCC0 ^ FCC1
992 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
993 unsigned int fcc_offset
)
995 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
996 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
997 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1001 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1002 unsigned int fcc_offset
)
1004 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1008 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1009 unsigned int fcc_offset
)
1011 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1012 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1013 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1014 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1018 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1019 unsigned int fcc_offset
)
1021 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1025 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1026 unsigned int fcc_offset
)
1028 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1029 tcg_gen_xori_tl(dst
, dst
, 0x1);
1030 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1031 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1035 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1036 unsigned int fcc_offset
)
1038 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1039 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1040 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1043 // 0: !(FCC0 | FCC1)
1044 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1045 unsigned int fcc_offset
)
1047 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1048 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1049 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1050 tcg_gen_xori_tl(dst
, dst
, 0x1);
1053 // 0 or 3: !(FCC0 ^ FCC1)
1054 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1055 unsigned int fcc_offset
)
1057 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1058 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1059 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1060 tcg_gen_xori_tl(dst
, dst
, 0x1);
1064 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1065 unsigned int fcc_offset
)
1067 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1068 tcg_gen_xori_tl(dst
, dst
, 0x1);
1071 // !1: !(FCC0 & !FCC1)
1072 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1073 unsigned int fcc_offset
)
1075 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1076 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1077 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1078 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1079 tcg_gen_xori_tl(dst
, dst
, 0x1);
1083 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1084 unsigned int fcc_offset
)
1086 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1087 tcg_gen_xori_tl(dst
, dst
, 0x1);
1090 // !2: !(!FCC0 & FCC1)
1091 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1092 unsigned int fcc_offset
)
1094 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1095 tcg_gen_xori_tl(dst
, dst
, 0x1);
1096 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1097 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1098 tcg_gen_xori_tl(dst
, dst
, 0x1);
1101 // !3: !(FCC0 & FCC1)
1102 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1103 unsigned int fcc_offset
)
1105 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1106 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1107 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1108 tcg_gen_xori_tl(dst
, dst
, 0x1);
1111 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1112 target_ulong pc2
, TCGv r_cond
)
1116 l1
= gen_new_label();
1118 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1120 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1123 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1126 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1127 target_ulong pc2
, TCGv r_cond
)
1131 l1
= gen_new_label();
1133 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1135 gen_goto_tb(dc
, 0, pc2
, pc1
);
1138 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1141 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1146 l1
= gen_new_label();
1147 l2
= gen_new_label();
1149 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1151 tcg_gen_movi_tl(cpu_npc
, npc1
);
1155 tcg_gen_movi_tl(cpu_npc
, npc2
);
1159 /* call this function before using the condition register as it may
1160 have been set for a jump */
1161 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1163 if (dc
->npc
== JUMP_PC
) {
1164 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1165 dc
->npc
= DYNAMIC_PC
;
1169 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1171 if (dc
->npc
== JUMP_PC
) {
1172 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1173 dc
->npc
= DYNAMIC_PC
;
1174 } else if (dc
->npc
!= DYNAMIC_PC
) {
1175 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1179 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1181 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1185 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1187 if (dc
->npc
== JUMP_PC
) {
1188 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1189 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1190 dc
->pc
= DYNAMIC_PC
;
1191 } else if (dc
->npc
== DYNAMIC_PC
) {
1192 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1193 dc
->pc
= DYNAMIC_PC
;
1199 static inline void gen_op_next_insn(void)
1201 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1202 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1205 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1209 #ifdef TARGET_SPARC64
1219 gen_op_eval_bn(r_dst
);
1222 gen_op_eval_be(r_dst
, r_src
);
1225 gen_op_eval_ble(r_dst
, r_src
);
1228 gen_op_eval_bl(r_dst
, r_src
);
1231 gen_op_eval_bleu(r_dst
, r_src
);
1234 gen_op_eval_bcs(r_dst
, r_src
);
1237 gen_op_eval_bneg(r_dst
, r_src
);
1240 gen_op_eval_bvs(r_dst
, r_src
);
1243 gen_op_eval_ba(r_dst
);
1246 gen_op_eval_bne(r_dst
, r_src
);
1249 gen_op_eval_bg(r_dst
, r_src
);
1252 gen_op_eval_bge(r_dst
, r_src
);
1255 gen_op_eval_bgu(r_dst
, r_src
);
1258 gen_op_eval_bcc(r_dst
, r_src
);
1261 gen_op_eval_bpos(r_dst
, r_src
);
1264 gen_op_eval_bvc(r_dst
, r_src
);
1269 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1271 unsigned int offset
;
1291 gen_op_eval_bn(r_dst
);
1294 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1297 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1300 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1303 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1306 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1309 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1312 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1315 gen_op_eval_ba(r_dst
);
1318 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1321 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1324 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1327 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1330 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1333 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1336 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1341 #ifdef TARGET_SPARC64
1343 static const int gen_tcg_cond_reg
[8] = {
1354 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1358 l1
= gen_new_label();
1359 tcg_gen_movi_tl(r_dst
, 0);
1360 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1361 tcg_gen_movi_tl(r_dst
, 1);
1366 /* XXX: potentially incorrect if dynamic npc */
1367 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1370 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1371 target_ulong target
= dc
->pc
+ offset
;
1374 /* unconditional not taken */
1376 dc
->pc
= dc
->npc
+ 4;
1377 dc
->npc
= dc
->pc
+ 4;
1380 dc
->npc
= dc
->pc
+ 4;
1382 } else if (cond
== 0x8) {
1383 /* unconditional taken */
1386 dc
->npc
= dc
->pc
+ 4;
1392 flush_cond(dc
, r_cond
);
1393 gen_cond(r_cond
, cc
, cond
);
1395 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1399 dc
->jump_pc
[0] = target
;
1400 dc
->jump_pc
[1] = dc
->npc
+ 4;
1406 /* XXX: potentially incorrect if dynamic npc */
1407 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1410 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1411 target_ulong target
= dc
->pc
+ offset
;
1414 /* unconditional not taken */
1416 dc
->pc
= dc
->npc
+ 4;
1417 dc
->npc
= dc
->pc
+ 4;
1420 dc
->npc
= dc
->pc
+ 4;
1422 } else if (cond
== 0x8) {
1423 /* unconditional taken */
1426 dc
->npc
= dc
->pc
+ 4;
1432 flush_cond(dc
, r_cond
);
1433 gen_fcond(r_cond
, cc
, cond
);
1435 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1439 dc
->jump_pc
[0] = target
;
1440 dc
->jump_pc
[1] = dc
->npc
+ 4;
1446 #ifdef TARGET_SPARC64
1447 /* XXX: potentially incorrect if dynamic npc */
1448 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1449 TCGv r_cond
, TCGv r_reg
)
1451 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1452 target_ulong target
= dc
->pc
+ offset
;
1454 flush_cond(dc
, r_cond
);
1455 gen_cond_reg(r_cond
, cond
, r_reg
);
1457 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1461 dc
->jump_pc
[0] = target
;
1462 dc
->jump_pc
[1] = dc
->npc
+ 4;
1467 static GenOpFunc
* const gen_fcmpd
[4] = {
1474 static GenOpFunc
* const gen_fcmpq
[4] = {
1481 static GenOpFunc
* const gen_fcmped
[4] = {
1488 static GenOpFunc
* const gen_fcmpeq
[4] = {
1495 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1499 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1502 tcg_gen_helper_0_2(helper_fcmps_fcc1
, r_rs1
, r_rs2
);
1505 tcg_gen_helper_0_2(helper_fcmps_fcc2
, r_rs1
, r_rs2
);
1508 tcg_gen_helper_0_2(helper_fcmps_fcc3
, r_rs1
, r_rs2
);
1513 static inline void gen_op_fcmpd(int fccno
)
1515 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1518 static inline void gen_op_fcmpq(int fccno
)
1520 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1523 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1527 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1530 tcg_gen_helper_0_2(helper_fcmpes_fcc1
, r_rs1
, r_rs2
);
1533 tcg_gen_helper_0_2(helper_fcmpes_fcc2
, r_rs1
, r_rs2
);
1536 tcg_gen_helper_0_2(helper_fcmpes_fcc3
, r_rs1
, r_rs2
);
1541 static inline void gen_op_fcmped(int fccno
)
1543 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1546 static inline void gen_op_fcmpeq(int fccno
)
1548 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1553 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1555 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1558 static inline void gen_op_fcmpd(int fccno
)
1560 tcg_gen_helper_0_0(helper_fcmpd
);
1563 static inline void gen_op_fcmpq(int fccno
)
1565 tcg_gen_helper_0_0(helper_fcmpq
);
1568 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1570 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1573 static inline void gen_op_fcmped(int fccno
)
1575 tcg_gen_helper_0_0(helper_fcmped
);
1578 static inline void gen_op_fcmpeq(int fccno
)
1580 tcg_gen_helper_0_0(helper_fcmpeq
);
1584 static inline void gen_op_fpexception_im(int fsr_flags
)
1588 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1589 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1590 r_const
= tcg_const_i32(TT_FP_EXCP
);
1591 tcg_gen_helper_0_1(raise_exception
, r_const
);
1592 tcg_temp_free(r_const
);
1595 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1597 #if !defined(CONFIG_USER_ONLY)
1598 if (!dc
->fpu_enabled
) {
1601 save_state(dc
, r_cond
);
1602 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1603 tcg_gen_helper_0_1(raise_exception
, r_const
);
1604 tcg_temp_free(r_const
);
1612 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1614 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1617 static inline void gen_clear_float_exceptions(void)
1619 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1623 #ifdef TARGET_SPARC64
1624 static inline TCGv
gen_get_asi(int insn
, TCGv r_addr
)
1630 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1631 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1633 asi
= GET_FIELD(insn
, 19, 26);
1634 r_asi
= tcg_const_i32(asi
);
1639 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1642 TCGv r_asi
, r_size
, r_sign
;
1644 r_asi
= gen_get_asi(insn
, addr
);
1645 r_size
= tcg_const_i32(size
);
1646 r_sign
= tcg_const_i32(sign
);
1647 tcg_gen_helper_1_4(helper_ld_asi
, dst
, addr
, r_asi
, r_size
, r_sign
);
1648 tcg_temp_free(r_sign
);
1649 tcg_temp_free(r_size
);
1650 tcg_temp_free(r_asi
);
1653 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1657 r_asi
= gen_get_asi(insn
, addr
);
1658 r_size
= tcg_const_i32(size
);
1659 tcg_gen_helper_0_4(helper_st_asi
, addr
, src
, r_asi
, r_size
);
1660 tcg_temp_free(r_size
);
1661 tcg_temp_free(r_asi
);
1664 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1666 TCGv r_asi
, r_size
, r_rd
;
1668 r_asi
= gen_get_asi(insn
, addr
);
1669 r_size
= tcg_const_i32(size
);
1670 r_rd
= tcg_const_i32(rd
);
1671 tcg_gen_helper_0_4(helper_ldf_asi
, addr
, r_asi
, r_size
, r_rd
);
1672 tcg_temp_free(r_rd
);
1673 tcg_temp_free(r_size
);
1674 tcg_temp_free(r_asi
);
1677 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1679 TCGv r_asi
, r_size
, r_rd
;
1681 r_asi
= gen_get_asi(insn
, addr
);
1682 r_size
= tcg_const_i32(size
);
1683 r_rd
= tcg_const_i32(rd
);
1684 tcg_gen_helper_0_4(helper_stf_asi
, addr
, r_asi
, r_size
, r_rd
);
1685 tcg_temp_free(r_rd
);
1686 tcg_temp_free(r_size
);
1687 tcg_temp_free(r_asi
);
1690 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1692 TCGv r_asi
, r_size
, r_sign
;
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 r_size
= tcg_const_i32(4);
1696 r_sign
= tcg_const_i32(0);
1697 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1698 tcg_temp_free(r_sign
);
1699 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1700 tcg_temp_free(r_size
);
1701 tcg_temp_free(r_asi
);
1702 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1705 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1709 r_asi
= gen_get_asi(insn
, addr
);
1710 r_rd
= tcg_const_i32(rd
);
1711 tcg_gen_helper_0_3(helper_ldda_asi
, addr
, r_asi
, r_rd
);
1712 tcg_temp_free(r_rd
);
1713 tcg_temp_free(r_asi
);
1716 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1718 TCGv r_temp
, r_asi
, r_size
;
1720 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
1721 gen_movl_reg_TN(rd
+ 1, r_temp
);
1722 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
,
1724 tcg_temp_free(r_temp
);
1725 r_asi
= gen_get_asi(insn
, addr
);
1726 r_size
= tcg_const_i32(8);
1727 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1728 tcg_temp_free(r_size
);
1729 tcg_temp_free(r_asi
);
1732 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1737 r_val1
= tcg_temp_new(TCG_TYPE_TL
);
1738 gen_movl_reg_TN(rd
, r_val1
);
1739 r_asi
= gen_get_asi(insn
, addr
);
1740 tcg_gen_helper_1_4(helper_cas_asi
, dst
, addr
, r_val1
, val2
, r_asi
);
1741 tcg_temp_free(r_asi
);
1742 tcg_temp_free(r_val1
);
1745 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1750 gen_movl_reg_TN(rd
, cpu_tmp64
);
1751 r_asi
= gen_get_asi(insn
, addr
);
1752 tcg_gen_helper_1_4(helper_casx_asi
, dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1753 tcg_temp_free(r_asi
);
1756 #elif !defined(CONFIG_USER_ONLY)
1758 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1761 TCGv r_asi
, r_size
, r_sign
;
1763 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1764 r_size
= tcg_const_i32(size
);
1765 r_sign
= tcg_const_i32(sign
);
1766 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1767 tcg_temp_free(r_sign
);
1768 tcg_temp_free(r_size
);
1769 tcg_temp_free(r_asi
);
1770 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1773 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1777 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1778 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1779 r_size
= tcg_const_i32(size
);
1780 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1781 tcg_temp_free(r_size
);
1782 tcg_temp_free(r_asi
);
1785 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1787 TCGv r_asi
, r_size
, r_sign
;
1789 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1790 r_size
= tcg_const_i32(4);
1791 r_sign
= tcg_const_i32(0);
1792 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1793 tcg_temp_free(r_sign
);
1794 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1795 tcg_temp_free(r_size
);
1796 tcg_temp_free(r_asi
);
1797 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1800 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1802 TCGv r_asi
, r_size
, r_sign
;
1804 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1805 r_size
= tcg_const_i32(8);
1806 r_sign
= tcg_const_i32(0);
1807 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1808 tcg_temp_free(r_sign
);
1809 tcg_temp_free(r_size
);
1810 tcg_temp_free(r_asi
);
1811 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1812 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1813 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1814 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1815 gen_movl_TN_reg(rd
, hi
);
1818 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1820 TCGv r_temp
, r_asi
, r_size
;
1822 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
1823 gen_movl_reg_TN(rd
+ 1, r_temp
);
1824 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
, r_temp
);
1825 tcg_temp_free(r_temp
);
1826 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1827 r_size
= tcg_const_i32(8);
1828 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1829 tcg_temp_free(r_size
);
1830 tcg_temp_free(r_asi
);
1834 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1835 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1837 TCGv r_val
, r_asi
, r_size
;
1839 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1841 r_val
= tcg_const_i64(0xffULL
);
1842 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1843 r_size
= tcg_const_i32(1);
1844 tcg_gen_helper_0_4(helper_st_asi
, addr
, r_val
, r_asi
, r_size
);
1845 tcg_temp_free(r_size
);
1846 tcg_temp_free(r_asi
);
1847 tcg_temp_free(r_val
);
1851 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1856 rs1
= GET_FIELD(insn
, 13, 17);
1858 r_rs1
= tcg_const_tl(0); // XXX how to free?
1860 r_rs1
= cpu_gregs
[rs1
];
1862 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1866 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1871 if (IS_IMM
) { /* immediate */
1872 rs2
= GET_FIELDs(insn
, 19, 31);
1873 r_rs2
= tcg_const_tl((int)rs2
); // XXX how to free?
1874 } else { /* register */
1875 rs2
= GET_FIELD(insn
, 27, 31);
1877 r_rs2
= tcg_const_tl(0); // XXX how to free?
1879 r_rs2
= cpu_gregs
[rs2
];
1881 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1886 #define CHECK_IU_FEATURE(dc, FEATURE) \
1887 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1889 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1890 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1893 /* before an instruction, dc->pc must be static */
1894 static void disas_sparc_insn(DisasContext
* dc
)
1896 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1898 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
1899 tcg_gen_debug_insn_start(dc
->pc
);
1900 insn
= ldl_code(dc
->pc
);
1901 opc
= GET_FIELD(insn
, 0, 1);
1903 rd
= GET_FIELD(insn
, 2, 6);
1905 cpu_src1
= tcg_temp_new(TCG_TYPE_TL
); // const
1906 cpu_src2
= tcg_temp_new(TCG_TYPE_TL
); // const
1909 case 0: /* branches/sethi */
1911 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1914 #ifdef TARGET_SPARC64
1915 case 0x1: /* V9 BPcc */
1919 target
= GET_FIELD_SP(insn
, 0, 18);
1920 target
= sign_extend(target
, 18);
1922 cc
= GET_FIELD_SP(insn
, 20, 21);
1924 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1926 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1931 case 0x3: /* V9 BPr */
1933 target
= GET_FIELD_SP(insn
, 0, 13) |
1934 (GET_FIELD_SP(insn
, 20, 21) << 14);
1935 target
= sign_extend(target
, 16);
1937 cpu_src1
= get_src1(insn
, cpu_src1
);
1938 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1941 case 0x5: /* V9 FBPcc */
1943 int cc
= GET_FIELD_SP(insn
, 20, 21);
1944 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1946 target
= GET_FIELD_SP(insn
, 0, 18);
1947 target
= sign_extend(target
, 19);
1949 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1953 case 0x7: /* CBN+x */
1958 case 0x2: /* BN+x */
1960 target
= GET_FIELD(insn
, 10, 31);
1961 target
= sign_extend(target
, 22);
1963 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1966 case 0x6: /* FBN+x */
1968 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1970 target
= GET_FIELD(insn
, 10, 31);
1971 target
= sign_extend(target
, 22);
1973 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1976 case 0x4: /* SETHI */
1978 uint32_t value
= GET_FIELD(insn
, 10, 31);
1981 r_const
= tcg_const_tl(value
<< 10);
1982 gen_movl_TN_reg(rd
, r_const
);
1983 tcg_temp_free(r_const
);
1986 case 0x0: /* UNIMPL */
1995 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1998 r_const
= tcg_const_tl(dc
->pc
);
1999 gen_movl_TN_reg(15, r_const
);
2000 tcg_temp_free(r_const
);
2002 gen_mov_pc_npc(dc
, cpu_cond
);
2006 case 2: /* FPU & Logical Operations */
2008 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2009 if (xop
== 0x3a) { /* generate trap */
2012 cpu_src1
= get_src1(insn
, cpu_src1
);
2014 rs2
= GET_FIELD(insn
, 25, 31);
2015 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2017 rs2
= GET_FIELD(insn
, 27, 31);
2019 gen_movl_reg_TN(rs2
, cpu_src2
);
2020 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2022 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2024 cond
= GET_FIELD(insn
, 3, 6);
2026 save_state(dc
, cpu_cond
);
2027 tcg_gen_helper_0_1(helper_trap
, cpu_dst
);
2028 } else if (cond
!= 0) {
2029 TCGv r_cond
= tcg_temp_new(TCG_TYPE_TL
);
2030 #ifdef TARGET_SPARC64
2032 int cc
= GET_FIELD_SP(insn
, 11, 12);
2034 save_state(dc
, cpu_cond
);
2036 gen_cond(r_cond
, 0, cond
);
2038 gen_cond(r_cond
, 1, cond
);
2042 save_state(dc
, cpu_cond
);
2043 gen_cond(r_cond
, 0, cond
);
2045 tcg_gen_helper_0_2(helper_trapcc
, cpu_dst
, r_cond
);
2046 tcg_temp_free(r_cond
);
2052 } else if (xop
== 0x28) {
2053 rs1
= GET_FIELD(insn
, 13, 17);
2056 #ifndef TARGET_SPARC64
2057 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2058 manual, rdy on the microSPARC
2060 case 0x0f: /* stbar in the SPARCv8 manual,
2061 rdy on the microSPARC II */
2062 case 0x10 ... 0x1f: /* implementation-dependent in the
2063 SPARCv8 manual, rdy on the
2066 gen_movl_TN_reg(rd
, cpu_y
);
2068 #ifdef TARGET_SPARC64
2069 case 0x2: /* V9 rdccr */
2070 tcg_gen_helper_1_0(helper_rdccr
, cpu_dst
);
2071 gen_movl_TN_reg(rd
, cpu_dst
);
2073 case 0x3: /* V9 rdasi */
2074 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2075 gen_movl_TN_reg(rd
, cpu_dst
);
2077 case 0x4: /* V9 rdtick */
2081 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2082 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2083 offsetof(CPUState
, tick
));
2084 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2086 tcg_temp_free(r_tickptr
);
2087 gen_movl_TN_reg(rd
, cpu_dst
);
2090 case 0x5: /* V9 rdpc */
2094 r_const
= tcg_const_tl(dc
->pc
);
2095 gen_movl_TN_reg(rd
, r_const
);
2096 tcg_temp_free(r_const
);
2099 case 0x6: /* V9 rdfprs */
2100 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2101 gen_movl_TN_reg(rd
, cpu_dst
);
2103 case 0xf: /* V9 membar */
2104 break; /* no effect */
2105 case 0x13: /* Graphics Status */
2106 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2108 gen_movl_TN_reg(rd
, cpu_gsr
);
2110 case 0x17: /* Tick compare */
2111 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2113 case 0x18: /* System tick */
2117 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2118 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2119 offsetof(CPUState
, stick
));
2120 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2122 tcg_temp_free(r_tickptr
);
2123 gen_movl_TN_reg(rd
, cpu_dst
);
2126 case 0x19: /* System tick compare */
2127 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2129 case 0x10: /* Performance Control */
2130 case 0x11: /* Performance Instrumentation Counter */
2131 case 0x12: /* Dispatch Control */
2132 case 0x14: /* Softint set, WO */
2133 case 0x15: /* Softint clear, WO */
2134 case 0x16: /* Softint write */
2139 #if !defined(CONFIG_USER_ONLY)
2140 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2141 #ifndef TARGET_SPARC64
2142 if (!supervisor(dc
))
2144 tcg_gen_helper_1_0(helper_rdpsr
, cpu_dst
);
2146 CHECK_IU_FEATURE(dc
, HYPV
);
2147 if (!hypervisor(dc
))
2149 rs1
= GET_FIELD(insn
, 13, 17);
2152 // gen_op_rdhpstate();
2155 // gen_op_rdhtstate();
2158 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2161 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2164 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2166 case 31: // hstick_cmpr
2167 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2173 gen_movl_TN_reg(rd
, cpu_dst
);
2175 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2176 if (!supervisor(dc
))
2178 #ifdef TARGET_SPARC64
2179 rs1
= GET_FIELD(insn
, 13, 17);
2185 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2186 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2187 offsetof(CPUState
, tsptr
));
2188 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2189 offsetof(trap_state
, tpc
));
2190 tcg_temp_free(r_tsptr
);
2197 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2198 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2199 offsetof(CPUState
, tsptr
));
2200 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2201 offsetof(trap_state
, tnpc
));
2202 tcg_temp_free(r_tsptr
);
2209 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2210 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2211 offsetof(CPUState
, tsptr
));
2212 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2213 offsetof(trap_state
, tstate
));
2214 tcg_temp_free(r_tsptr
);
2221 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2222 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2223 offsetof(CPUState
, tsptr
));
2224 tcg_gen_ld_i32(cpu_tmp0
, r_tsptr
,
2225 offsetof(trap_state
, tt
));
2226 tcg_temp_free(r_tsptr
);
2233 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2234 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2235 offsetof(CPUState
, tick
));
2236 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_tmp0
,
2238 gen_movl_TN_reg(rd
, cpu_tmp0
);
2239 tcg_temp_free(r_tickptr
);
2243 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2246 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2247 offsetof(CPUSPARCState
, pstate
));
2248 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2251 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2252 offsetof(CPUSPARCState
, tl
));
2253 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2256 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2257 offsetof(CPUSPARCState
, psrpil
));
2258 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2261 tcg_gen_helper_1_0(helper_rdcwp
, cpu_tmp0
);
2264 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2265 offsetof(CPUSPARCState
, cansave
));
2266 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2268 case 11: // canrestore
2269 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2270 offsetof(CPUSPARCState
, canrestore
));
2271 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2273 case 12: // cleanwin
2274 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2275 offsetof(CPUSPARCState
, cleanwin
));
2276 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2278 case 13: // otherwin
2279 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2280 offsetof(CPUSPARCState
, otherwin
));
2281 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2284 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2285 offsetof(CPUSPARCState
, wstate
));
2286 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2288 case 16: // UA2005 gl
2289 CHECK_IU_FEATURE(dc
, GL
);
2290 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2291 offsetof(CPUSPARCState
, gl
));
2292 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2294 case 26: // UA2005 strand status
2295 CHECK_IU_FEATURE(dc
, HYPV
);
2296 if (!hypervisor(dc
))
2298 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_ssr
);
2301 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2308 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2310 gen_movl_TN_reg(rd
, cpu_tmp0
);
2312 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2313 #ifdef TARGET_SPARC64
2314 save_state(dc
, cpu_cond
);
2315 tcg_gen_helper_0_0(helper_flushw
);
2317 if (!supervisor(dc
))
2319 gen_movl_TN_reg(rd
, cpu_tbr
);
2323 } else if (xop
== 0x34) { /* FPU Operations */
2324 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2326 gen_op_clear_ieee_excp_and_FTT();
2327 rs1
= GET_FIELD(insn
, 13, 17);
2328 rs2
= GET_FIELD(insn
, 27, 31);
2329 xop
= GET_FIELD(insn
, 18, 26);
2331 case 0x1: /* fmovs */
2332 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2334 case 0x5: /* fnegs */
2335 tcg_gen_helper_1_1(helper_fnegs
, cpu_fpr
[rd
],
2338 case 0x9: /* fabss */
2339 tcg_gen_helper_1_1(helper_fabss
, cpu_fpr
[rd
],
2342 case 0x29: /* fsqrts */
2343 CHECK_FPU_FEATURE(dc
, FSQRT
);
2344 gen_clear_float_exceptions();
2345 tcg_gen_helper_1_1(helper_fsqrts
, cpu_tmp32
,
2347 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2348 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2350 case 0x2a: /* fsqrtd */
2351 CHECK_FPU_FEATURE(dc
, FSQRT
);
2352 gen_op_load_fpr_DT1(DFPREG(rs2
));
2353 gen_clear_float_exceptions();
2354 tcg_gen_helper_0_0(helper_fsqrtd
);
2355 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2356 gen_op_store_DT0_fpr(DFPREG(rd
));
2358 case 0x2b: /* fsqrtq */
2359 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2360 gen_op_load_fpr_QT1(QFPREG(rs2
));
2361 gen_clear_float_exceptions();
2362 tcg_gen_helper_0_0(helper_fsqrtq
);
2363 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2364 gen_op_store_QT0_fpr(QFPREG(rd
));
2366 case 0x41: /* fadds */
2367 gen_clear_float_exceptions();
2368 tcg_gen_helper_1_2(helper_fadds
, cpu_tmp32
,
2369 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2370 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2371 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2374 gen_op_load_fpr_DT0(DFPREG(rs1
));
2375 gen_op_load_fpr_DT1(DFPREG(rs2
));
2376 gen_clear_float_exceptions();
2377 tcg_gen_helper_0_0(helper_faddd
);
2378 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2379 gen_op_store_DT0_fpr(DFPREG(rd
));
2381 case 0x43: /* faddq */
2382 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2383 gen_op_load_fpr_QT0(QFPREG(rs1
));
2384 gen_op_load_fpr_QT1(QFPREG(rs2
));
2385 gen_clear_float_exceptions();
2386 tcg_gen_helper_0_0(helper_faddq
);
2387 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2388 gen_op_store_QT0_fpr(QFPREG(rd
));
2390 case 0x45: /* fsubs */
2391 gen_clear_float_exceptions();
2392 tcg_gen_helper_1_2(helper_fsubs
, cpu_tmp32
,
2393 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2394 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2395 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2398 gen_op_load_fpr_DT0(DFPREG(rs1
));
2399 gen_op_load_fpr_DT1(DFPREG(rs2
));
2400 gen_clear_float_exceptions();
2401 tcg_gen_helper_0_0(helper_fsubd
);
2402 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2403 gen_op_store_DT0_fpr(DFPREG(rd
));
2405 case 0x47: /* fsubq */
2406 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2407 gen_op_load_fpr_QT0(QFPREG(rs1
));
2408 gen_op_load_fpr_QT1(QFPREG(rs2
));
2409 gen_clear_float_exceptions();
2410 tcg_gen_helper_0_0(helper_fsubq
);
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2412 gen_op_store_QT0_fpr(QFPREG(rd
));
2414 case 0x49: /* fmuls */
2415 CHECK_FPU_FEATURE(dc
, FMUL
);
2416 gen_clear_float_exceptions();
2417 tcg_gen_helper_1_2(helper_fmuls
, cpu_tmp32
,
2418 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2419 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2420 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2422 case 0x4a: /* fmuld */
2423 CHECK_FPU_FEATURE(dc
, FMUL
);
2424 gen_op_load_fpr_DT0(DFPREG(rs1
));
2425 gen_op_load_fpr_DT1(DFPREG(rs2
));
2426 gen_clear_float_exceptions();
2427 tcg_gen_helper_0_0(helper_fmuld
);
2428 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2429 gen_op_store_DT0_fpr(DFPREG(rd
));
2431 case 0x4b: /* fmulq */
2432 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2433 CHECK_FPU_FEATURE(dc
, FMUL
);
2434 gen_op_load_fpr_QT0(QFPREG(rs1
));
2435 gen_op_load_fpr_QT1(QFPREG(rs2
));
2436 gen_clear_float_exceptions();
2437 tcg_gen_helper_0_0(helper_fmulq
);
2438 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2439 gen_op_store_QT0_fpr(QFPREG(rd
));
2441 case 0x4d: /* fdivs */
2442 gen_clear_float_exceptions();
2443 tcg_gen_helper_1_2(helper_fdivs
, cpu_tmp32
,
2444 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2445 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2446 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2449 gen_op_load_fpr_DT0(DFPREG(rs1
));
2450 gen_op_load_fpr_DT1(DFPREG(rs2
));
2451 gen_clear_float_exceptions();
2452 tcg_gen_helper_0_0(helper_fdivd
);
2453 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2454 gen_op_store_DT0_fpr(DFPREG(rd
));
2456 case 0x4f: /* fdivq */
2457 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2458 gen_op_load_fpr_QT0(QFPREG(rs1
));
2459 gen_op_load_fpr_QT1(QFPREG(rs2
));
2460 gen_clear_float_exceptions();
2461 tcg_gen_helper_0_0(helper_fdivq
);
2462 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2463 gen_op_store_QT0_fpr(QFPREG(rd
));
2465 case 0x69: /* fsmuld */
2466 CHECK_FPU_FEATURE(dc
, FSMULD
);
2467 gen_clear_float_exceptions();
2468 tcg_gen_helper_0_2(helper_fsmuld
, cpu_fpr
[rs1
],
2470 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2471 gen_op_store_DT0_fpr(DFPREG(rd
));
2473 case 0x6e: /* fdmulq */
2474 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2475 gen_op_load_fpr_DT0(DFPREG(rs1
));
2476 gen_op_load_fpr_DT1(DFPREG(rs2
));
2477 gen_clear_float_exceptions();
2478 tcg_gen_helper_0_0(helper_fdmulq
);
2479 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2480 gen_op_store_QT0_fpr(QFPREG(rd
));
2482 case 0xc4: /* fitos */
2483 gen_clear_float_exceptions();
2484 tcg_gen_helper_1_1(helper_fitos
, cpu_tmp32
,
2486 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2487 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2489 case 0xc6: /* fdtos */
2490 gen_op_load_fpr_DT1(DFPREG(rs2
));
2491 gen_clear_float_exceptions();
2492 tcg_gen_helper_1_0(helper_fdtos
, cpu_tmp32
);
2493 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2494 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2496 case 0xc7: /* fqtos */
2497 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2498 gen_op_load_fpr_QT1(QFPREG(rs2
));
2499 gen_clear_float_exceptions();
2500 tcg_gen_helper_1_0(helper_fqtos
, cpu_tmp32
);
2501 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2502 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2504 case 0xc8: /* fitod */
2505 tcg_gen_helper_0_1(helper_fitod
, cpu_fpr
[rs2
]);
2506 gen_op_store_DT0_fpr(DFPREG(rd
));
2508 case 0xc9: /* fstod */
2509 tcg_gen_helper_0_1(helper_fstod
, cpu_fpr
[rs2
]);
2510 gen_op_store_DT0_fpr(DFPREG(rd
));
2512 case 0xcb: /* fqtod */
2513 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2514 gen_op_load_fpr_QT1(QFPREG(rs2
));
2515 gen_clear_float_exceptions();
2516 tcg_gen_helper_0_0(helper_fqtod
);
2517 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2518 gen_op_store_DT0_fpr(DFPREG(rd
));
2520 case 0xcc: /* fitoq */
2521 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2522 tcg_gen_helper_0_1(helper_fitoq
, cpu_fpr
[rs2
]);
2523 gen_op_store_QT0_fpr(QFPREG(rd
));
2525 case 0xcd: /* fstoq */
2526 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2527 tcg_gen_helper_0_1(helper_fstoq
, cpu_fpr
[rs2
]);
2528 gen_op_store_QT0_fpr(QFPREG(rd
));
2530 case 0xce: /* fdtoq */
2531 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2532 gen_op_load_fpr_DT1(DFPREG(rs2
));
2533 tcg_gen_helper_0_0(helper_fdtoq
);
2534 gen_op_store_QT0_fpr(QFPREG(rd
));
2536 case 0xd1: /* fstoi */
2537 gen_clear_float_exceptions();
2538 tcg_gen_helper_1_1(helper_fstoi
, cpu_tmp32
,
2540 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2541 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2543 case 0xd2: /* fdtoi */
2544 gen_op_load_fpr_DT1(DFPREG(rs2
));
2545 gen_clear_float_exceptions();
2546 tcg_gen_helper_1_0(helper_fdtoi
, cpu_tmp32
);
2547 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2548 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2550 case 0xd3: /* fqtoi */
2551 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2552 gen_op_load_fpr_QT1(QFPREG(rs2
));
2553 gen_clear_float_exceptions();
2554 tcg_gen_helper_1_0(helper_fqtoi
, cpu_tmp32
);
2555 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2556 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2558 #ifdef TARGET_SPARC64
2559 case 0x2: /* V9 fmovd */
2560 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)],
2561 cpu_fpr
[DFPREG(rs2
)]);
2562 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2563 cpu_fpr
[DFPREG(rs2
) + 1]);
2565 case 0x3: /* V9 fmovq */
2566 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2567 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)],
2568 cpu_fpr
[QFPREG(rs2
)]);
2569 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2570 cpu_fpr
[QFPREG(rs2
) + 1]);
2571 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2572 cpu_fpr
[QFPREG(rs2
) + 2]);
2573 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2574 cpu_fpr
[QFPREG(rs2
) + 3]);
2576 case 0x6: /* V9 fnegd */
2577 gen_op_load_fpr_DT1(DFPREG(rs2
));
2578 tcg_gen_helper_0_0(helper_fnegd
);
2579 gen_op_store_DT0_fpr(DFPREG(rd
));
2581 case 0x7: /* V9 fnegq */
2582 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2583 gen_op_load_fpr_QT1(QFPREG(rs2
));
2584 tcg_gen_helper_0_0(helper_fnegq
);
2585 gen_op_store_QT0_fpr(QFPREG(rd
));
2587 case 0xa: /* V9 fabsd */
2588 gen_op_load_fpr_DT1(DFPREG(rs2
));
2589 tcg_gen_helper_0_0(helper_fabsd
);
2590 gen_op_store_DT0_fpr(DFPREG(rd
));
2592 case 0xb: /* V9 fabsq */
2593 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2594 gen_op_load_fpr_QT1(QFPREG(rs2
));
2595 tcg_gen_helper_0_0(helper_fabsq
);
2596 gen_op_store_QT0_fpr(QFPREG(rd
));
2598 case 0x81: /* V9 fstox */
2599 gen_clear_float_exceptions();
2600 tcg_gen_helper_0_1(helper_fstox
, cpu_fpr
[rs2
]);
2601 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2602 gen_op_store_DT0_fpr(DFPREG(rd
));
2604 case 0x82: /* V9 fdtox */
2605 gen_op_load_fpr_DT1(DFPREG(rs2
));
2606 gen_clear_float_exceptions();
2607 tcg_gen_helper_0_0(helper_fdtox
);
2608 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2609 gen_op_store_DT0_fpr(DFPREG(rd
));
2611 case 0x83: /* V9 fqtox */
2612 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2613 gen_op_load_fpr_QT1(QFPREG(rs2
));
2614 gen_clear_float_exceptions();
2615 tcg_gen_helper_0_0(helper_fqtox
);
2616 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2617 gen_op_store_DT0_fpr(DFPREG(rd
));
2619 case 0x84: /* V9 fxtos */
2620 gen_op_load_fpr_DT1(DFPREG(rs2
));
2621 gen_clear_float_exceptions();
2622 tcg_gen_helper_1_0(helper_fxtos
, cpu_tmp32
);
2623 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2624 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2626 case 0x88: /* V9 fxtod */
2627 gen_op_load_fpr_DT1(DFPREG(rs2
));
2628 gen_clear_float_exceptions();
2629 tcg_gen_helper_0_0(helper_fxtod
);
2630 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2631 gen_op_store_DT0_fpr(DFPREG(rd
));
2633 case 0x8c: /* V9 fxtoq */
2634 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2635 gen_op_load_fpr_DT1(DFPREG(rs2
));
2636 gen_clear_float_exceptions();
2637 tcg_gen_helper_0_0(helper_fxtoq
);
2638 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2639 gen_op_store_QT0_fpr(QFPREG(rd
));
2645 } else if (xop
== 0x35) { /* FPU Operations */
2646 #ifdef TARGET_SPARC64
2649 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2651 gen_op_clear_ieee_excp_and_FTT();
2652 rs1
= GET_FIELD(insn
, 13, 17);
2653 rs2
= GET_FIELD(insn
, 27, 31);
2654 xop
= GET_FIELD(insn
, 18, 26);
2655 #ifdef TARGET_SPARC64
2656 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2659 l1
= gen_new_label();
2660 cond
= GET_FIELD_SP(insn
, 14, 17);
2661 cpu_src1
= get_src1(insn
, cpu_src1
);
2662 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2664 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2667 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2670 l1
= gen_new_label();
2671 cond
= GET_FIELD_SP(insn
, 14, 17);
2672 cpu_src1
= get_src1(insn
, cpu_src1
);
2673 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2675 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2676 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2679 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2682 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2683 l1
= gen_new_label();
2684 cond
= GET_FIELD_SP(insn
, 14, 17);
2685 cpu_src1
= get_src1(insn
, cpu_src1
);
2686 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2688 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2689 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2690 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2691 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2697 #ifdef TARGET_SPARC64
2698 #define FMOVSCC(fcc) \
2703 l1 = gen_new_label(); \
2704 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2705 cond = GET_FIELD_SP(insn, 14, 17); \
2706 gen_fcond(r_cond, fcc, cond); \
2707 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2709 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2710 gen_set_label(l1); \
2711 tcg_temp_free(r_cond); \
2713 #define FMOVDCC(fcc) \
2718 l1 = gen_new_label(); \
2719 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2720 cond = GET_FIELD_SP(insn, 14, 17); \
2721 gen_fcond(r_cond, fcc, cond); \
2722 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2724 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2725 cpu_fpr[DFPREG(rs2)]); \
2726 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2727 cpu_fpr[DFPREG(rs2) + 1]); \
2728 gen_set_label(l1); \
2729 tcg_temp_free(r_cond); \
2731 #define FMOVQCC(fcc) \
2736 l1 = gen_new_label(); \
2737 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2738 cond = GET_FIELD_SP(insn, 14, 17); \
2739 gen_fcond(r_cond, fcc, cond); \
2740 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2742 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2743 cpu_fpr[QFPREG(rs2)]); \
2744 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2745 cpu_fpr[QFPREG(rs2) + 1]); \
2746 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2747 cpu_fpr[QFPREG(rs2) + 2]); \
2748 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2749 cpu_fpr[QFPREG(rs2) + 3]); \
2750 gen_set_label(l1); \
2751 tcg_temp_free(r_cond); \
2753 case 0x001: /* V9 fmovscc %fcc0 */
2756 case 0x002: /* V9 fmovdcc %fcc0 */
2759 case 0x003: /* V9 fmovqcc %fcc0 */
2760 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2763 case 0x041: /* V9 fmovscc %fcc1 */
2766 case 0x042: /* V9 fmovdcc %fcc1 */
2769 case 0x043: /* V9 fmovqcc %fcc1 */
2770 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2773 case 0x081: /* V9 fmovscc %fcc2 */
2776 case 0x082: /* V9 fmovdcc %fcc2 */
2779 case 0x083: /* V9 fmovqcc %fcc2 */
2780 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2783 case 0x0c1: /* V9 fmovscc %fcc3 */
2786 case 0x0c2: /* V9 fmovdcc %fcc3 */
2789 case 0x0c3: /* V9 fmovqcc %fcc3 */
2790 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2796 #define FMOVCC(size_FDQ, icc) \
2801 l1 = gen_new_label(); \
2802 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2803 cond = GET_FIELD_SP(insn, 14, 17); \
2804 gen_cond(r_cond, icc, cond); \
2805 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2807 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2808 (glue(size_FDQ, FPREG(rs2))); \
2809 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2810 (glue(size_FDQ, FPREG(rd))); \
2811 gen_set_label(l1); \
2812 tcg_temp_free(r_cond); \
2814 #define FMOVSCC(icc) \
2819 l1 = gen_new_label(); \
2820 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2821 cond = GET_FIELD_SP(insn, 14, 17); \
2822 gen_cond(r_cond, icc, cond); \
2823 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2825 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2826 gen_set_label(l1); \
2827 tcg_temp_free(r_cond); \
2829 #define FMOVDCC(icc) \
2834 l1 = gen_new_label(); \
2835 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2836 cond = GET_FIELD_SP(insn, 14, 17); \
2837 gen_cond(r_cond, icc, cond); \
2838 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2840 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2841 cpu_fpr[DFPREG(rs2)]); \
2842 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2843 cpu_fpr[DFPREG(rs2) + 1]); \
2844 gen_set_label(l1); \
2845 tcg_temp_free(r_cond); \
2847 #define FMOVQCC(icc) \
2852 l1 = gen_new_label(); \
2853 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2854 cond = GET_FIELD_SP(insn, 14, 17); \
2855 gen_cond(r_cond, icc, cond); \
2856 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2858 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2859 cpu_fpr[QFPREG(rs2)]); \
2860 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2861 cpu_fpr[QFPREG(rs2) + 1]); \
2862 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2863 cpu_fpr[QFPREG(rs2) + 2]); \
2864 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2865 cpu_fpr[QFPREG(rs2) + 3]); \
2866 gen_set_label(l1); \
2867 tcg_temp_free(r_cond); \
2870 case 0x101: /* V9 fmovscc %icc */
2873 case 0x102: /* V9 fmovdcc %icc */
2875 case 0x103: /* V9 fmovqcc %icc */
2876 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2879 case 0x181: /* V9 fmovscc %xcc */
2882 case 0x182: /* V9 fmovdcc %xcc */
2885 case 0x183: /* V9 fmovqcc %xcc */
2886 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2893 case 0x51: /* fcmps, V9 %fcc */
2894 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2896 case 0x52: /* fcmpd, V9 %fcc */
2897 gen_op_load_fpr_DT0(DFPREG(rs1
));
2898 gen_op_load_fpr_DT1(DFPREG(rs2
));
2899 gen_op_fcmpd(rd
& 3);
2901 case 0x53: /* fcmpq, V9 %fcc */
2902 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2903 gen_op_load_fpr_QT0(QFPREG(rs1
));
2904 gen_op_load_fpr_QT1(QFPREG(rs2
));
2905 gen_op_fcmpq(rd
& 3);
2907 case 0x55: /* fcmpes, V9 %fcc */
2908 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2910 case 0x56: /* fcmped, V9 %fcc */
2911 gen_op_load_fpr_DT0(DFPREG(rs1
));
2912 gen_op_load_fpr_DT1(DFPREG(rs2
));
2913 gen_op_fcmped(rd
& 3);
2915 case 0x57: /* fcmpeq, V9 %fcc */
2916 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2917 gen_op_load_fpr_QT0(QFPREG(rs1
));
2918 gen_op_load_fpr_QT1(QFPREG(rs2
));
2919 gen_op_fcmpeq(rd
& 3);
2924 } else if (xop
== 0x2) {
2927 rs1
= GET_FIELD(insn
, 13, 17);
2929 // or %g0, x, y -> mov T0, x; mov y, T0
2930 if (IS_IMM
) { /* immediate */
2933 rs2
= GET_FIELDs(insn
, 19, 31);
2934 r_const
= tcg_const_tl((int)rs2
);
2935 gen_movl_TN_reg(rd
, r_const
);
2936 tcg_temp_free(r_const
);
2937 } else { /* register */
2938 rs2
= GET_FIELD(insn
, 27, 31);
2939 gen_movl_reg_TN(rs2
, cpu_dst
);
2940 gen_movl_TN_reg(rd
, cpu_dst
);
2943 cpu_src1
= get_src1(insn
, cpu_src1
);
2944 if (IS_IMM
) { /* immediate */
2945 rs2
= GET_FIELDs(insn
, 19, 31);
2946 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
2947 gen_movl_TN_reg(rd
, cpu_dst
);
2948 } else { /* register */
2949 // or x, %g0, y -> mov T1, x; mov y, T1
2950 rs2
= GET_FIELD(insn
, 27, 31);
2952 gen_movl_reg_TN(rs2
, cpu_src2
);
2953 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2954 gen_movl_TN_reg(rd
, cpu_dst
);
2956 gen_movl_TN_reg(rd
, cpu_src1
);
2959 #ifdef TARGET_SPARC64
2960 } else if (xop
== 0x25) { /* sll, V9 sllx */
2961 cpu_src1
= get_src1(insn
, cpu_src1
);
2962 if (IS_IMM
) { /* immediate */
2963 rs2
= GET_FIELDs(insn
, 20, 31);
2964 if (insn
& (1 << 12)) {
2965 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2967 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x1f);
2969 } else { /* register */
2970 rs2
= GET_FIELD(insn
, 27, 31);
2971 gen_movl_reg_TN(rs2
, cpu_src2
);
2972 if (insn
& (1 << 12)) {
2973 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2975 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2977 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2979 gen_movl_TN_reg(rd
, cpu_dst
);
2980 } else if (xop
== 0x26) { /* srl, V9 srlx */
2981 cpu_src1
= get_src1(insn
, cpu_src1
);
2982 if (IS_IMM
) { /* immediate */
2983 rs2
= GET_FIELDs(insn
, 20, 31);
2984 if (insn
& (1 << 12)) {
2985 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2987 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2988 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2990 } else { /* register */
2991 rs2
= GET_FIELD(insn
, 27, 31);
2992 gen_movl_reg_TN(rs2
, cpu_src2
);
2993 if (insn
& (1 << 12)) {
2994 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2995 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2997 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2998 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2999 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3002 gen_movl_TN_reg(rd
, cpu_dst
);
3003 } else if (xop
== 0x27) { /* sra, V9 srax */
3004 cpu_src1
= get_src1(insn
, cpu_src1
);
3005 if (IS_IMM
) { /* immediate */
3006 rs2
= GET_FIELDs(insn
, 20, 31);
3007 if (insn
& (1 << 12)) {
3008 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3010 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3011 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3012 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3014 } else { /* register */
3015 rs2
= GET_FIELD(insn
, 27, 31);
3016 gen_movl_reg_TN(rs2
, cpu_src2
);
3017 if (insn
& (1 << 12)) {
3018 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3019 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3021 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3022 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3023 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3024 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3027 gen_movl_TN_reg(rd
, cpu_dst
);
3029 } else if (xop
< 0x36) {
3030 cpu_src1
= get_src1(insn
, cpu_src1
);
3031 cpu_src2
= get_src2(insn
, cpu_src2
);
3033 switch (xop
& ~0x10) {
3036 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3038 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3041 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3043 gen_op_logic_cc(cpu_dst
);
3046 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3048 gen_op_logic_cc(cpu_dst
);
3051 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3053 gen_op_logic_cc(cpu_dst
);
3057 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3059 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3062 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3063 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3065 gen_op_logic_cc(cpu_dst
);
3068 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3069 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3071 gen_op_logic_cc(cpu_dst
);
3074 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3075 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3077 gen_op_logic_cc(cpu_dst
);
3081 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3083 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3084 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3085 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3088 #ifdef TARGET_SPARC64
3089 case 0x9: /* V9 mulx */
3090 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3094 CHECK_IU_FEATURE(dc
, MUL
);
3095 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3097 gen_op_logic_cc(cpu_dst
);
3100 CHECK_IU_FEATURE(dc
, MUL
);
3101 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3103 gen_op_logic_cc(cpu_dst
);
3107 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3109 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3110 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3111 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3114 #ifdef TARGET_SPARC64
3115 case 0xd: /* V9 udivx */
3116 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3117 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3118 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3119 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3123 CHECK_IU_FEATURE(dc
, DIV
);
3124 tcg_gen_helper_1_2(helper_udiv
, cpu_dst
, cpu_src1
,
3127 gen_op_div_cc(cpu_dst
);
3130 CHECK_IU_FEATURE(dc
, DIV
);
3131 tcg_gen_helper_1_2(helper_sdiv
, cpu_dst
, cpu_src1
,
3134 gen_op_div_cc(cpu_dst
);
3139 gen_movl_TN_reg(rd
, cpu_dst
);
3142 case 0x20: /* taddcc */
3143 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3144 gen_movl_TN_reg(rd
, cpu_dst
);
3146 case 0x21: /* tsubcc */
3147 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3148 gen_movl_TN_reg(rd
, cpu_dst
);
3150 case 0x22: /* taddcctv */
3151 save_state(dc
, cpu_cond
);
3152 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3153 gen_movl_TN_reg(rd
, cpu_dst
);
3155 case 0x23: /* tsubcctv */
3156 save_state(dc
, cpu_cond
);
3157 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3158 gen_movl_TN_reg(rd
, cpu_dst
);
3160 case 0x24: /* mulscc */
3161 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3162 gen_movl_TN_reg(rd
, cpu_dst
);
3164 #ifndef TARGET_SPARC64
3165 case 0x25: /* sll */
3166 if (IS_IMM
) { /* immediate */
3167 rs2
= GET_FIELDs(insn
, 20, 31);
3168 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3169 } else { /* register */
3170 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3171 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3173 gen_movl_TN_reg(rd
, cpu_dst
);
3175 case 0x26: /* srl */
3176 if (IS_IMM
) { /* immediate */
3177 rs2
= GET_FIELDs(insn
, 20, 31);
3178 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3179 } else { /* register */
3180 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3181 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3183 gen_movl_TN_reg(rd
, cpu_dst
);
3185 case 0x27: /* sra */
3186 if (IS_IMM
) { /* immediate */
3187 rs2
= GET_FIELDs(insn
, 20, 31);
3188 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3189 } else { /* register */
3190 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3191 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3193 gen_movl_TN_reg(rd
, cpu_dst
);
3200 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3201 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3203 #ifndef TARGET_SPARC64
3204 case 0x01 ... 0x0f: /* undefined in the
3208 case 0x10 ... 0x1f: /* implementation-dependent
3214 case 0x2: /* V9 wrccr */
3215 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3216 tcg_gen_helper_0_1(helper_wrccr
, cpu_dst
);
3218 case 0x3: /* V9 wrasi */
3219 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3220 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3222 case 0x6: /* V9 wrfprs */
3223 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3224 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3225 save_state(dc
, cpu_cond
);
3230 case 0xf: /* V9 sir, nop if user */
3231 #if !defined(CONFIG_USER_ONLY)
3236 case 0x13: /* Graphics Status */
3237 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3239 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3241 case 0x17: /* Tick compare */
3242 #if !defined(CONFIG_USER_ONLY)
3243 if (!supervisor(dc
))
3249 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3251 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3252 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3253 offsetof(CPUState
, tick
));
3254 tcg_gen_helper_0_2(helper_tick_set_limit
,
3255 r_tickptr
, cpu_tick_cmpr
);
3256 tcg_temp_free(r_tickptr
);
3259 case 0x18: /* System tick */
3260 #if !defined(CONFIG_USER_ONLY)
3261 if (!supervisor(dc
))
3267 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3269 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3270 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3271 offsetof(CPUState
, stick
));
3272 tcg_gen_helper_0_2(helper_tick_set_count
,
3273 r_tickptr
, cpu_dst
);
3274 tcg_temp_free(r_tickptr
);
3277 case 0x19: /* System tick compare */
3278 #if !defined(CONFIG_USER_ONLY)
3279 if (!supervisor(dc
))
3285 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3287 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3288 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3289 offsetof(CPUState
, stick
));
3290 tcg_gen_helper_0_2(helper_tick_set_limit
,
3291 r_tickptr
, cpu_stick_cmpr
);
3292 tcg_temp_free(r_tickptr
);
3296 case 0x10: /* Performance Control */
3297 case 0x11: /* Performance Instrumentation
3299 case 0x12: /* Dispatch Control */
3300 case 0x14: /* Softint set */
3301 case 0x15: /* Softint clear */
3302 case 0x16: /* Softint write */
3309 #if !defined(CONFIG_USER_ONLY)
3310 case 0x31: /* wrpsr, V9 saved, restored */
3312 if (!supervisor(dc
))
3314 #ifdef TARGET_SPARC64
3317 tcg_gen_helper_0_0(helper_saved
);
3320 tcg_gen_helper_0_0(helper_restored
);
3322 case 2: /* UA2005 allclean */
3323 case 3: /* UA2005 otherw */
3324 case 4: /* UA2005 normalw */
3325 case 5: /* UA2005 invalw */
3331 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3332 tcg_gen_helper_0_1(helper_wrpsr
, cpu_dst
);
3333 save_state(dc
, cpu_cond
);
3340 case 0x32: /* wrwim, V9 wrpr */
3342 if (!supervisor(dc
))
3344 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3345 #ifdef TARGET_SPARC64
3351 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3352 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3353 offsetof(CPUState
, tsptr
));
3354 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3355 offsetof(trap_state
, tpc
));
3356 tcg_temp_free(r_tsptr
);
3363 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3364 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3365 offsetof(CPUState
, tsptr
));
3366 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3367 offsetof(trap_state
, tnpc
));
3368 tcg_temp_free(r_tsptr
);
3375 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3376 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3377 offsetof(CPUState
, tsptr
));
3378 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3379 offsetof(trap_state
,
3381 tcg_temp_free(r_tsptr
);
3388 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3389 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3390 offsetof(CPUState
, tsptr
));
3391 tcg_gen_st_i32(cpu_tmp0
, r_tsptr
,
3392 offsetof(trap_state
, tt
));
3393 tcg_temp_free(r_tsptr
);
3400 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3401 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3402 offsetof(CPUState
, tick
));
3403 tcg_gen_helper_0_2(helper_tick_set_count
,
3404 r_tickptr
, cpu_tmp0
);
3405 tcg_temp_free(r_tickptr
);
3409 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3412 save_state(dc
, cpu_cond
);
3413 tcg_gen_helper_0_1(helper_wrpstate
, cpu_tmp0
);
3419 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3420 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3421 offsetof(CPUSPARCState
, tl
));
3424 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3425 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3426 offsetof(CPUSPARCState
,
3430 tcg_gen_helper_0_1(helper_wrcwp
, cpu_tmp0
);
3433 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3434 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3435 offsetof(CPUSPARCState
,
3438 case 11: // canrestore
3439 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3440 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3441 offsetof(CPUSPARCState
,
3444 case 12: // cleanwin
3445 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3446 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3447 offsetof(CPUSPARCState
,
3450 case 13: // otherwin
3451 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3452 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3453 offsetof(CPUSPARCState
,
3457 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3458 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3459 offsetof(CPUSPARCState
,
3462 case 16: // UA2005 gl
3463 CHECK_IU_FEATURE(dc
, GL
);
3464 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3465 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3466 offsetof(CPUSPARCState
, gl
));
3468 case 26: // UA2005 strand status
3469 CHECK_IU_FEATURE(dc
, HYPV
);
3470 if (!hypervisor(dc
))
3472 tcg_gen_trunc_tl_i32(cpu_ssr
, cpu_tmp0
);
3478 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3479 if (dc
->def
->nwindows
!= 32)
3480 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3481 (1 << dc
->def
->nwindows
) - 1);
3482 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3486 case 0x33: /* wrtbr, UA2005 wrhpr */
3488 #ifndef TARGET_SPARC64
3489 if (!supervisor(dc
))
3491 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3493 CHECK_IU_FEATURE(dc
, HYPV
);
3494 if (!hypervisor(dc
))
3496 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3499 // XXX gen_op_wrhpstate();
3500 save_state(dc
, cpu_cond
);
3506 // XXX gen_op_wrhtstate();
3509 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3512 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3514 case 31: // hstick_cmpr
3518 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3519 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3520 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3521 offsetof(CPUState
, hstick
));
3522 tcg_gen_helper_0_2(helper_tick_set_limit
,
3523 r_tickptr
, cpu_hstick_cmpr
);
3524 tcg_temp_free(r_tickptr
);
3527 case 6: // hver readonly
3535 #ifdef TARGET_SPARC64
3536 case 0x2c: /* V9 movcc */
3538 int cc
= GET_FIELD_SP(insn
, 11, 12);
3539 int cond
= GET_FIELD_SP(insn
, 14, 17);
3543 r_cond
= tcg_temp_new(TCG_TYPE_TL
);
3544 if (insn
& (1 << 18)) {
3546 gen_cond(r_cond
, 0, cond
);
3548 gen_cond(r_cond
, 1, cond
);
3552 gen_fcond(r_cond
, cc
, cond
);
3555 l1
= gen_new_label();
3557 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3558 if (IS_IMM
) { /* immediate */
3561 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3562 r_const
= tcg_const_tl((int)rs2
);
3563 gen_movl_TN_reg(rd
, r_const
);
3564 tcg_temp_free(r_const
);
3566 rs2
= GET_FIELD_SP(insn
, 0, 4);
3567 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3568 gen_movl_TN_reg(rd
, cpu_tmp0
);
3571 tcg_temp_free(r_cond
);
3574 case 0x2d: /* V9 sdivx */
3575 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3576 gen_movl_TN_reg(rd
, cpu_dst
);
3578 case 0x2e: /* V9 popc */
3580 cpu_src2
= get_src2(insn
, cpu_src2
);
3581 tcg_gen_helper_1_1(helper_popc
, cpu_dst
,
3583 gen_movl_TN_reg(rd
, cpu_dst
);
3585 case 0x2f: /* V9 movr */
3587 int cond
= GET_FIELD_SP(insn
, 10, 12);
3590 cpu_src1
= get_src1(insn
, cpu_src1
);
3592 l1
= gen_new_label();
3594 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3596 if (IS_IMM
) { /* immediate */
3599 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3600 r_const
= tcg_const_tl((int)rs2
);
3601 gen_movl_TN_reg(rd
, r_const
);
3602 tcg_temp_free(r_const
);
3604 rs2
= GET_FIELD_SP(insn
, 0, 4);
3605 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3606 gen_movl_TN_reg(rd
, cpu_tmp0
);
3616 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3617 #ifdef TARGET_SPARC64
3618 int opf
= GET_FIELD_SP(insn
, 5, 13);
3619 rs1
= GET_FIELD(insn
, 13, 17);
3620 rs2
= GET_FIELD(insn
, 27, 31);
3621 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3625 case 0x000: /* VIS I edge8cc */
3626 case 0x001: /* VIS II edge8n */
3627 case 0x002: /* VIS I edge8lcc */
3628 case 0x003: /* VIS II edge8ln */
3629 case 0x004: /* VIS I edge16cc */
3630 case 0x005: /* VIS II edge16n */
3631 case 0x006: /* VIS I edge16lcc */
3632 case 0x007: /* VIS II edge16ln */
3633 case 0x008: /* VIS I edge32cc */
3634 case 0x009: /* VIS II edge32n */
3635 case 0x00a: /* VIS I edge32lcc */
3636 case 0x00b: /* VIS II edge32ln */
3639 case 0x010: /* VIS I array8 */
3640 CHECK_FPU_FEATURE(dc
, VIS1
);
3641 cpu_src1
= get_src1(insn
, cpu_src1
);
3642 gen_movl_reg_TN(rs2
, cpu_src2
);
3643 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3645 gen_movl_TN_reg(rd
, cpu_dst
);
3647 case 0x012: /* VIS I array16 */
3648 CHECK_FPU_FEATURE(dc
, VIS1
);
3649 cpu_src1
= get_src1(insn
, cpu_src1
);
3650 gen_movl_reg_TN(rs2
, cpu_src2
);
3651 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3653 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3654 gen_movl_TN_reg(rd
, cpu_dst
);
3656 case 0x014: /* VIS I array32 */
3657 CHECK_FPU_FEATURE(dc
, VIS1
);
3658 cpu_src1
= get_src1(insn
, cpu_src1
);
3659 gen_movl_reg_TN(rs2
, cpu_src2
);
3660 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3662 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3663 gen_movl_TN_reg(rd
, cpu_dst
);
3665 case 0x018: /* VIS I alignaddr */
3666 CHECK_FPU_FEATURE(dc
, VIS1
);
3667 cpu_src1
= get_src1(insn
, cpu_src1
);
3668 gen_movl_reg_TN(rs2
, cpu_src2
);
3669 tcg_gen_helper_1_2(helper_alignaddr
, cpu_dst
, cpu_src1
,
3671 gen_movl_TN_reg(rd
, cpu_dst
);
3673 case 0x019: /* VIS II bmask */
3674 case 0x01a: /* VIS I alignaddrl */
3677 case 0x020: /* VIS I fcmple16 */
3678 CHECK_FPU_FEATURE(dc
, VIS1
);
3679 gen_op_load_fpr_DT0(DFPREG(rs1
));
3680 gen_op_load_fpr_DT1(DFPREG(rs2
));
3681 tcg_gen_helper_0_0(helper_fcmple16
);
3682 gen_op_store_DT0_fpr(DFPREG(rd
));
3684 case 0x022: /* VIS I fcmpne16 */
3685 CHECK_FPU_FEATURE(dc
, VIS1
);
3686 gen_op_load_fpr_DT0(DFPREG(rs1
));
3687 gen_op_load_fpr_DT1(DFPREG(rs2
));
3688 tcg_gen_helper_0_0(helper_fcmpne16
);
3689 gen_op_store_DT0_fpr(DFPREG(rd
));
3691 case 0x024: /* VIS I fcmple32 */
3692 CHECK_FPU_FEATURE(dc
, VIS1
);
3693 gen_op_load_fpr_DT0(DFPREG(rs1
));
3694 gen_op_load_fpr_DT1(DFPREG(rs2
));
3695 tcg_gen_helper_0_0(helper_fcmple32
);
3696 gen_op_store_DT0_fpr(DFPREG(rd
));
3698 case 0x026: /* VIS I fcmpne32 */
3699 CHECK_FPU_FEATURE(dc
, VIS1
);
3700 gen_op_load_fpr_DT0(DFPREG(rs1
));
3701 gen_op_load_fpr_DT1(DFPREG(rs2
));
3702 tcg_gen_helper_0_0(helper_fcmpne32
);
3703 gen_op_store_DT0_fpr(DFPREG(rd
));
3705 case 0x028: /* VIS I fcmpgt16 */
3706 CHECK_FPU_FEATURE(dc
, VIS1
);
3707 gen_op_load_fpr_DT0(DFPREG(rs1
));
3708 gen_op_load_fpr_DT1(DFPREG(rs2
));
3709 tcg_gen_helper_0_0(helper_fcmpgt16
);
3710 gen_op_store_DT0_fpr(DFPREG(rd
));
3712 case 0x02a: /* VIS I fcmpeq16 */
3713 CHECK_FPU_FEATURE(dc
, VIS1
);
3714 gen_op_load_fpr_DT0(DFPREG(rs1
));
3715 gen_op_load_fpr_DT1(DFPREG(rs2
));
3716 tcg_gen_helper_0_0(helper_fcmpeq16
);
3717 gen_op_store_DT0_fpr(DFPREG(rd
));
3719 case 0x02c: /* VIS I fcmpgt32 */
3720 CHECK_FPU_FEATURE(dc
, VIS1
);
3721 gen_op_load_fpr_DT0(DFPREG(rs1
));
3722 gen_op_load_fpr_DT1(DFPREG(rs2
));
3723 tcg_gen_helper_0_0(helper_fcmpgt32
);
3724 gen_op_store_DT0_fpr(DFPREG(rd
));
3726 case 0x02e: /* VIS I fcmpeq32 */
3727 CHECK_FPU_FEATURE(dc
, VIS1
);
3728 gen_op_load_fpr_DT0(DFPREG(rs1
));
3729 gen_op_load_fpr_DT1(DFPREG(rs2
));
3730 tcg_gen_helper_0_0(helper_fcmpeq32
);
3731 gen_op_store_DT0_fpr(DFPREG(rd
));
3733 case 0x031: /* VIS I fmul8x16 */
3734 CHECK_FPU_FEATURE(dc
, VIS1
);
3735 gen_op_load_fpr_DT0(DFPREG(rs1
));
3736 gen_op_load_fpr_DT1(DFPREG(rs2
));
3737 tcg_gen_helper_0_0(helper_fmul8x16
);
3738 gen_op_store_DT0_fpr(DFPREG(rd
));
3740 case 0x033: /* VIS I fmul8x16au */
3741 CHECK_FPU_FEATURE(dc
, VIS1
);
3742 gen_op_load_fpr_DT0(DFPREG(rs1
));
3743 gen_op_load_fpr_DT1(DFPREG(rs2
));
3744 tcg_gen_helper_0_0(helper_fmul8x16au
);
3745 gen_op_store_DT0_fpr(DFPREG(rd
));
3747 case 0x035: /* VIS I fmul8x16al */
3748 CHECK_FPU_FEATURE(dc
, VIS1
);
3749 gen_op_load_fpr_DT0(DFPREG(rs1
));
3750 gen_op_load_fpr_DT1(DFPREG(rs2
));
3751 tcg_gen_helper_0_0(helper_fmul8x16al
);
3752 gen_op_store_DT0_fpr(DFPREG(rd
));
3754 case 0x036: /* VIS I fmul8sux16 */
3755 CHECK_FPU_FEATURE(dc
, VIS1
);
3756 gen_op_load_fpr_DT0(DFPREG(rs1
));
3757 gen_op_load_fpr_DT1(DFPREG(rs2
));
3758 tcg_gen_helper_0_0(helper_fmul8sux16
);
3759 gen_op_store_DT0_fpr(DFPREG(rd
));
3761 case 0x037: /* VIS I fmul8ulx16 */
3762 CHECK_FPU_FEATURE(dc
, VIS1
);
3763 gen_op_load_fpr_DT0(DFPREG(rs1
));
3764 gen_op_load_fpr_DT1(DFPREG(rs2
));
3765 tcg_gen_helper_0_0(helper_fmul8ulx16
);
3766 gen_op_store_DT0_fpr(DFPREG(rd
));
3768 case 0x038: /* VIS I fmuld8sux16 */
3769 CHECK_FPU_FEATURE(dc
, VIS1
);
3770 gen_op_load_fpr_DT0(DFPREG(rs1
));
3771 gen_op_load_fpr_DT1(DFPREG(rs2
));
3772 tcg_gen_helper_0_0(helper_fmuld8sux16
);
3773 gen_op_store_DT0_fpr(DFPREG(rd
));
3775 case 0x039: /* VIS I fmuld8ulx16 */
3776 CHECK_FPU_FEATURE(dc
, VIS1
);
3777 gen_op_load_fpr_DT0(DFPREG(rs1
));
3778 gen_op_load_fpr_DT1(DFPREG(rs2
));
3779 tcg_gen_helper_0_0(helper_fmuld8ulx16
);
3780 gen_op_store_DT0_fpr(DFPREG(rd
));
3782 case 0x03a: /* VIS I fpack32 */
3783 case 0x03b: /* VIS I fpack16 */
3784 case 0x03d: /* VIS I fpackfix */
3785 case 0x03e: /* VIS I pdist */
3788 case 0x048: /* VIS I faligndata */
3789 CHECK_FPU_FEATURE(dc
, VIS1
);
3790 gen_op_load_fpr_DT0(DFPREG(rs1
));
3791 gen_op_load_fpr_DT1(DFPREG(rs2
));
3792 tcg_gen_helper_0_0(helper_faligndata
);
3793 gen_op_store_DT0_fpr(DFPREG(rd
));
3795 case 0x04b: /* VIS I fpmerge */
3796 CHECK_FPU_FEATURE(dc
, VIS1
);
3797 gen_op_load_fpr_DT0(DFPREG(rs1
));
3798 gen_op_load_fpr_DT1(DFPREG(rs2
));
3799 tcg_gen_helper_0_0(helper_fpmerge
);
3800 gen_op_store_DT0_fpr(DFPREG(rd
));
3802 case 0x04c: /* VIS II bshuffle */
3805 case 0x04d: /* VIS I fexpand */
3806 CHECK_FPU_FEATURE(dc
, VIS1
);
3807 gen_op_load_fpr_DT0(DFPREG(rs1
));
3808 gen_op_load_fpr_DT1(DFPREG(rs2
));
3809 tcg_gen_helper_0_0(helper_fexpand
);
3810 gen_op_store_DT0_fpr(DFPREG(rd
));
3812 case 0x050: /* VIS I fpadd16 */
3813 CHECK_FPU_FEATURE(dc
, VIS1
);
3814 gen_op_load_fpr_DT0(DFPREG(rs1
));
3815 gen_op_load_fpr_DT1(DFPREG(rs2
));
3816 tcg_gen_helper_0_0(helper_fpadd16
);
3817 gen_op_store_DT0_fpr(DFPREG(rd
));
3819 case 0x051: /* VIS I fpadd16s */
3820 CHECK_FPU_FEATURE(dc
, VIS1
);
3821 tcg_gen_helper_1_2(helper_fpadd16s
, cpu_fpr
[rd
],
3822 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3824 case 0x052: /* VIS I fpadd32 */
3825 CHECK_FPU_FEATURE(dc
, VIS1
);
3826 gen_op_load_fpr_DT0(DFPREG(rs1
));
3827 gen_op_load_fpr_DT1(DFPREG(rs2
));
3828 tcg_gen_helper_0_0(helper_fpadd32
);
3829 gen_op_store_DT0_fpr(DFPREG(rd
));
3831 case 0x053: /* VIS I fpadd32s */
3832 CHECK_FPU_FEATURE(dc
, VIS1
);
3833 tcg_gen_helper_1_2(helper_fpadd32s
, cpu_fpr
[rd
],
3834 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3836 case 0x054: /* VIS I fpsub16 */
3837 CHECK_FPU_FEATURE(dc
, VIS1
);
3838 gen_op_load_fpr_DT0(DFPREG(rs1
));
3839 gen_op_load_fpr_DT1(DFPREG(rs2
));
3840 tcg_gen_helper_0_0(helper_fpsub16
);
3841 gen_op_store_DT0_fpr(DFPREG(rd
));
3843 case 0x055: /* VIS I fpsub16s */
3844 CHECK_FPU_FEATURE(dc
, VIS1
);
3845 tcg_gen_helper_1_2(helper_fpsub16s
, cpu_fpr
[rd
],
3846 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3848 case 0x056: /* VIS I fpsub32 */
3849 CHECK_FPU_FEATURE(dc
, VIS1
);
3850 gen_op_load_fpr_DT0(DFPREG(rs1
));
3851 gen_op_load_fpr_DT1(DFPREG(rs2
));
3852 tcg_gen_helper_0_0(helper_fpsub32
);
3853 gen_op_store_DT0_fpr(DFPREG(rd
));
3855 case 0x057: /* VIS I fpsub32s */
3856 CHECK_FPU_FEATURE(dc
, VIS1
);
3857 tcg_gen_helper_1_2(helper_fpsub32s
, cpu_fpr
[rd
],
3858 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3860 case 0x060: /* VIS I fzero */
3861 CHECK_FPU_FEATURE(dc
, VIS1
);
3862 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3863 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3865 case 0x061: /* VIS I fzeros */
3866 CHECK_FPU_FEATURE(dc
, VIS1
);
3867 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3869 case 0x062: /* VIS I fnor */
3870 CHECK_FPU_FEATURE(dc
, VIS1
);
3871 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3872 cpu_fpr
[DFPREG(rs2
)]);
3873 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3874 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3875 cpu_fpr
[DFPREG(rs2
) + 1]);
3876 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3878 case 0x063: /* VIS I fnors */
3879 CHECK_FPU_FEATURE(dc
, VIS1
);
3880 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3881 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3883 case 0x064: /* VIS I fandnot2 */
3884 CHECK_FPU_FEATURE(dc
, VIS1
);
3885 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
3886 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3887 cpu_fpr
[DFPREG(rs2
)]);
3888 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
3889 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3890 cpu_fpr
[DFPREG(rs2
) + 1]);
3892 case 0x065: /* VIS I fandnot2s */
3893 CHECK_FPU_FEATURE(dc
, VIS1
);
3894 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
3895 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
3897 case 0x066: /* VIS I fnot2 */
3898 CHECK_FPU_FEATURE(dc
, VIS1
);
3899 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3901 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3902 cpu_fpr
[DFPREG(rs2
) + 1], -1);
3904 case 0x067: /* VIS I fnot2s */
3905 CHECK_FPU_FEATURE(dc
, VIS1
);
3906 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], -1);
3908 case 0x068: /* VIS I fandnot1 */
3909 CHECK_FPU_FEATURE(dc
, VIS1
);
3910 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3911 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3912 cpu_fpr
[DFPREG(rs1
)]);
3913 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3914 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3915 cpu_fpr
[DFPREG(rs1
) + 1]);
3917 case 0x069: /* VIS I fandnot1s */
3918 CHECK_FPU_FEATURE(dc
, VIS1
);
3919 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3920 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3922 case 0x06a: /* VIS I fnot1 */
3923 CHECK_FPU_FEATURE(dc
, VIS1
);
3924 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3926 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3927 cpu_fpr
[DFPREG(rs1
) + 1], -1);
3929 case 0x06b: /* VIS I fnot1s */
3930 CHECK_FPU_FEATURE(dc
, VIS1
);
3931 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], -1);
3933 case 0x06c: /* VIS I fxor */
3934 CHECK_FPU_FEATURE(dc
, VIS1
);
3935 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3936 cpu_fpr
[DFPREG(rs2
)]);
3937 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3938 cpu_fpr
[DFPREG(rs1
) + 1],
3939 cpu_fpr
[DFPREG(rs2
) + 1]);
3941 case 0x06d: /* VIS I fxors */
3942 CHECK_FPU_FEATURE(dc
, VIS1
);
3943 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3945 case 0x06e: /* VIS I fnand */
3946 CHECK_FPU_FEATURE(dc
, VIS1
);
3947 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3948 cpu_fpr
[DFPREG(rs2
)]);
3949 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3950 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3951 cpu_fpr
[DFPREG(rs2
) + 1]);
3952 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3954 case 0x06f: /* VIS I fnands */
3955 CHECK_FPU_FEATURE(dc
, VIS1
);
3956 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3957 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3959 case 0x070: /* VIS I fand */
3960 CHECK_FPU_FEATURE(dc
, VIS1
);
3961 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3962 cpu_fpr
[DFPREG(rs2
)]);
3963 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3964 cpu_fpr
[DFPREG(rs1
) + 1],
3965 cpu_fpr
[DFPREG(rs2
) + 1]);
3967 case 0x071: /* VIS I fands */
3968 CHECK_FPU_FEATURE(dc
, VIS1
);
3969 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3971 case 0x072: /* VIS I fxnor */
3972 CHECK_FPU_FEATURE(dc
, VIS1
);
3973 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3974 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3975 cpu_fpr
[DFPREG(rs1
)]);
3976 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3977 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3978 cpu_fpr
[DFPREG(rs1
) + 1]);
3980 case 0x073: /* VIS I fxnors */
3981 CHECK_FPU_FEATURE(dc
, VIS1
);
3982 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3983 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3985 case 0x074: /* VIS I fsrc1 */
3986 CHECK_FPU_FEATURE(dc
, VIS1
);
3987 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3988 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3989 cpu_fpr
[DFPREG(rs1
) + 1]);
3991 case 0x075: /* VIS I fsrc1s */
3992 CHECK_FPU_FEATURE(dc
, VIS1
);
3993 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3995 case 0x076: /* VIS I fornot2 */
3996 CHECK_FPU_FEATURE(dc
, VIS1
);
3997 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
3998 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3999 cpu_fpr
[DFPREG(rs2
)]);
4000 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
4001 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4002 cpu_fpr
[DFPREG(rs2
) + 1]);
4004 case 0x077: /* VIS I fornot2s */
4005 CHECK_FPU_FEATURE(dc
, VIS1
);
4006 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
4007 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
4009 case 0x078: /* VIS I fsrc2 */
4010 CHECK_FPU_FEATURE(dc
, VIS1
);
4011 gen_op_load_fpr_DT0(DFPREG(rs2
));
4012 gen_op_store_DT0_fpr(DFPREG(rd
));
4014 case 0x079: /* VIS I fsrc2s */
4015 CHECK_FPU_FEATURE(dc
, VIS1
);
4016 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4018 case 0x07a: /* VIS I fornot1 */
4019 CHECK_FPU_FEATURE(dc
, VIS1
);
4020 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4021 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4022 cpu_fpr
[DFPREG(rs1
)]);
4023 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4024 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4025 cpu_fpr
[DFPREG(rs1
) + 1]);
4027 case 0x07b: /* VIS I fornot1s */
4028 CHECK_FPU_FEATURE(dc
, VIS1
);
4029 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4030 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4032 case 0x07c: /* VIS I for */
4033 CHECK_FPU_FEATURE(dc
, VIS1
);
4034 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4035 cpu_fpr
[DFPREG(rs2
)]);
4036 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4037 cpu_fpr
[DFPREG(rs1
) + 1],
4038 cpu_fpr
[DFPREG(rs2
) + 1]);
4040 case 0x07d: /* VIS I fors */
4041 CHECK_FPU_FEATURE(dc
, VIS1
);
4042 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4044 case 0x07e: /* VIS I fone */
4045 CHECK_FPU_FEATURE(dc
, VIS1
);
4046 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4047 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4049 case 0x07f: /* VIS I fones */
4050 CHECK_FPU_FEATURE(dc
, VIS1
);
4051 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4053 case 0x080: /* VIS I shutdown */
4054 case 0x081: /* VIS II siam */
4063 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4064 #ifdef TARGET_SPARC64
4069 #ifdef TARGET_SPARC64
4070 } else if (xop
== 0x39) { /* V9 return */
4073 save_state(dc
, cpu_cond
);
4074 cpu_src1
= get_src1(insn
, cpu_src1
);
4075 if (IS_IMM
) { /* immediate */
4076 rs2
= GET_FIELDs(insn
, 19, 31);
4077 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4078 } else { /* register */
4079 rs2
= GET_FIELD(insn
, 27, 31);
4081 gen_movl_reg_TN(rs2
, cpu_src2
);
4082 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4084 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4086 tcg_gen_helper_0_0(helper_restore
);
4087 gen_mov_pc_npc(dc
, cpu_cond
);
4088 r_const
= tcg_const_i32(3);
4089 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, r_const
);
4090 tcg_temp_free(r_const
);
4091 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4092 dc
->npc
= DYNAMIC_PC
;
4096 cpu_src1
= get_src1(insn
, cpu_src1
);
4097 if (IS_IMM
) { /* immediate */
4098 rs2
= GET_FIELDs(insn
, 19, 31);
4099 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4100 } else { /* register */
4101 rs2
= GET_FIELD(insn
, 27, 31);
4103 gen_movl_reg_TN(rs2
, cpu_src2
);
4104 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4106 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4109 case 0x38: /* jmpl */
4113 r_const
= tcg_const_tl(dc
->pc
);
4114 gen_movl_TN_reg(rd
, r_const
);
4115 tcg_temp_free(r_const
);
4116 gen_mov_pc_npc(dc
, cpu_cond
);
4117 r_const
= tcg_const_i32(3);
4118 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4120 tcg_temp_free(r_const
);
4121 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4122 dc
->npc
= DYNAMIC_PC
;
4125 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4126 case 0x39: /* rett, V9 return */
4130 if (!supervisor(dc
))
4132 gen_mov_pc_npc(dc
, cpu_cond
);
4133 r_const
= tcg_const_i32(3);
4134 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4136 tcg_temp_free(r_const
);
4137 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4138 dc
->npc
= DYNAMIC_PC
;
4139 tcg_gen_helper_0_0(helper_rett
);
4143 case 0x3b: /* flush */
4144 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4146 tcg_gen_helper_0_1(helper_flush
, cpu_dst
);
4148 case 0x3c: /* save */
4149 save_state(dc
, cpu_cond
);
4150 tcg_gen_helper_0_0(helper_save
);
4151 gen_movl_TN_reg(rd
, cpu_dst
);
4153 case 0x3d: /* restore */
4154 save_state(dc
, cpu_cond
);
4155 tcg_gen_helper_0_0(helper_restore
);
4156 gen_movl_TN_reg(rd
, cpu_dst
);
4158 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4159 case 0x3e: /* V9 done/retry */
4163 if (!supervisor(dc
))
4165 dc
->npc
= DYNAMIC_PC
;
4166 dc
->pc
= DYNAMIC_PC
;
4167 tcg_gen_helper_0_0(helper_done
);
4170 if (!supervisor(dc
))
4172 dc
->npc
= DYNAMIC_PC
;
4173 dc
->pc
= DYNAMIC_PC
;
4174 tcg_gen_helper_0_0(helper_retry
);
4189 case 3: /* load/store instructions */
4191 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4193 cpu_src1
= get_src1(insn
, cpu_src1
);
4194 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4195 rs2
= GET_FIELD(insn
, 27, 31);
4196 gen_movl_reg_TN(rs2
, cpu_src2
);
4197 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4198 } else if (IS_IMM
) { /* immediate */
4199 rs2
= GET_FIELDs(insn
, 19, 31);
4200 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
4201 } else { /* register */
4202 rs2
= GET_FIELD(insn
, 27, 31);
4204 gen_movl_reg_TN(rs2
, cpu_src2
);
4205 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4207 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4209 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4210 (xop
> 0x17 && xop
<= 0x1d ) ||
4211 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4213 case 0x0: /* load unsigned word */
4214 gen_address_mask(dc
, cpu_addr
);
4215 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4217 case 0x1: /* load unsigned byte */
4218 gen_address_mask(dc
, cpu_addr
);
4219 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4221 case 0x2: /* load unsigned halfword */
4222 gen_address_mask(dc
, cpu_addr
);
4223 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4225 case 0x3: /* load double word */
4231 save_state(dc
, cpu_cond
);
4232 r_const
= tcg_const_i32(7);
4233 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4234 r_const
); // XXX remove
4235 tcg_temp_free(r_const
);
4236 gen_address_mask(dc
, cpu_addr
);
4237 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4238 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4239 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4240 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4241 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4242 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4243 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4246 case 0x9: /* load signed byte */
4247 gen_address_mask(dc
, cpu_addr
);
4248 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4250 case 0xa: /* load signed halfword */
4251 gen_address_mask(dc
, cpu_addr
);
4252 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4254 case 0xd: /* ldstub -- XXX: should be atomically */
4258 gen_address_mask(dc
, cpu_addr
);
4259 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4260 r_const
= tcg_const_tl(0xff);
4261 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4262 tcg_temp_free(r_const
);
4265 case 0x0f: /* swap register with memory. Also
4267 CHECK_IU_FEATURE(dc
, SWAP
);
4268 gen_movl_reg_TN(rd
, cpu_val
);
4269 gen_address_mask(dc
, cpu_addr
);
4270 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4271 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4272 tcg_gen_extu_i32_tl(cpu_val
, cpu_tmp32
);
4274 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4275 case 0x10: /* load word alternate */
4276 #ifndef TARGET_SPARC64
4279 if (!supervisor(dc
))
4282 save_state(dc
, cpu_cond
);
4283 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4285 case 0x11: /* load unsigned byte alternate */
4286 #ifndef TARGET_SPARC64
4289 if (!supervisor(dc
))
4292 save_state(dc
, cpu_cond
);
4293 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4295 case 0x12: /* load unsigned halfword alternate */
4296 #ifndef TARGET_SPARC64
4299 if (!supervisor(dc
))
4302 save_state(dc
, cpu_cond
);
4303 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4305 case 0x13: /* load double word alternate */
4306 #ifndef TARGET_SPARC64
4309 if (!supervisor(dc
))
4314 save_state(dc
, cpu_cond
);
4315 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4317 case 0x19: /* load signed byte alternate */
4318 #ifndef TARGET_SPARC64
4321 if (!supervisor(dc
))
4324 save_state(dc
, cpu_cond
);
4325 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4327 case 0x1a: /* load signed halfword alternate */
4328 #ifndef TARGET_SPARC64
4331 if (!supervisor(dc
))
4334 save_state(dc
, cpu_cond
);
4335 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4337 case 0x1d: /* ldstuba -- XXX: should be atomically */
4338 #ifndef TARGET_SPARC64
4341 if (!supervisor(dc
))
4344 save_state(dc
, cpu_cond
);
4345 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4347 case 0x1f: /* swap reg with alt. memory. Also
4349 CHECK_IU_FEATURE(dc
, SWAP
);
4350 #ifndef TARGET_SPARC64
4353 if (!supervisor(dc
))
4356 save_state(dc
, cpu_cond
);
4357 gen_movl_reg_TN(rd
, cpu_val
);
4358 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4361 #ifndef TARGET_SPARC64
4362 case 0x30: /* ldc */
4363 case 0x31: /* ldcsr */
4364 case 0x33: /* lddc */
4368 #ifdef TARGET_SPARC64
4369 case 0x08: /* V9 ldsw */
4370 gen_address_mask(dc
, cpu_addr
);
4371 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4373 case 0x0b: /* V9 ldx */
4374 gen_address_mask(dc
, cpu_addr
);
4375 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4377 case 0x18: /* V9 ldswa */
4378 save_state(dc
, cpu_cond
);
4379 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4381 case 0x1b: /* V9 ldxa */
4382 save_state(dc
, cpu_cond
);
4383 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4385 case 0x2d: /* V9 prefetch, no effect */
4387 case 0x30: /* V9 ldfa */
4388 save_state(dc
, cpu_cond
);
4389 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4391 case 0x33: /* V9 lddfa */
4392 save_state(dc
, cpu_cond
);
4393 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4395 case 0x3d: /* V9 prefetcha, no effect */
4397 case 0x32: /* V9 ldqfa */
4398 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4399 save_state(dc
, cpu_cond
);
4400 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4406 gen_movl_TN_reg(rd
, cpu_val
);
4407 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4410 } else if (xop
>= 0x20 && xop
< 0x24) {
4411 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4413 save_state(dc
, cpu_cond
);
4415 case 0x20: /* load fpreg */
4416 gen_address_mask(dc
, cpu_addr
);
4417 tcg_gen_qemu_ld32u(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4419 case 0x21: /* ldfsr, V9 ldxfsr */
4420 #ifdef TARGET_SPARC64
4421 gen_address_mask(dc
, cpu_addr
);
4423 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4424 tcg_gen_helper_0_1(helper_ldxfsr
, cpu_tmp64
);
4428 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4429 tcg_gen_helper_0_1(helper_ldfsr
, cpu_tmp32
);
4433 case 0x22: /* load quad fpreg */
4437 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4438 r_const
= tcg_const_i32(dc
->mem_idx
);
4439 tcg_gen_helper_0_2(helper_ldqf
, cpu_addr
, r_const
);
4440 tcg_temp_free(r_const
);
4441 gen_op_store_QT0_fpr(QFPREG(rd
));
4444 case 0x23: /* load double fpreg */
4448 r_const
= tcg_const_i32(dc
->mem_idx
);
4449 tcg_gen_helper_0_2(helper_lddf
, cpu_addr
, r_const
);
4450 tcg_temp_free(r_const
);
4451 gen_op_store_DT0_fpr(DFPREG(rd
));
4457 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4458 xop
== 0xe || xop
== 0x1e) {
4459 gen_movl_reg_TN(rd
, cpu_val
);
4461 case 0x4: /* store word */
4462 gen_address_mask(dc
, cpu_addr
);
4463 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4465 case 0x5: /* store byte */
4466 gen_address_mask(dc
, cpu_addr
);
4467 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4469 case 0x6: /* store halfword */
4470 gen_address_mask(dc
, cpu_addr
);
4471 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4473 case 0x7: /* store double word */
4477 TCGv r_low
, r_const
;
4479 save_state(dc
, cpu_cond
);
4480 gen_address_mask(dc
, cpu_addr
);
4481 r_const
= tcg_const_i32(7);
4482 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4483 r_const
); // XXX remove
4484 tcg_temp_free(r_const
);
4485 r_low
= tcg_temp_new(TCG_TYPE_TL
);
4486 gen_movl_reg_TN(rd
+ 1, r_low
);
4487 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, cpu_val
,
4489 tcg_temp_free(r_low
);
4490 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4493 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4494 case 0x14: /* store word alternate */
4495 #ifndef TARGET_SPARC64
4498 if (!supervisor(dc
))
4501 save_state(dc
, cpu_cond
);
4502 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4504 case 0x15: /* store byte alternate */
4505 #ifndef TARGET_SPARC64
4508 if (!supervisor(dc
))
4511 save_state(dc
, cpu_cond
);
4512 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4514 case 0x16: /* store halfword alternate */
4515 #ifndef TARGET_SPARC64
4518 if (!supervisor(dc
))
4521 save_state(dc
, cpu_cond
);
4522 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4524 case 0x17: /* store double word alternate */
4525 #ifndef TARGET_SPARC64
4528 if (!supervisor(dc
))
4534 save_state(dc
, cpu_cond
);
4535 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4539 #ifdef TARGET_SPARC64
4540 case 0x0e: /* V9 stx */
4541 gen_address_mask(dc
, cpu_addr
);
4542 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4544 case 0x1e: /* V9 stxa */
4545 save_state(dc
, cpu_cond
);
4546 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4552 } else if (xop
> 0x23 && xop
< 0x28) {
4553 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4555 save_state(dc
, cpu_cond
);
4557 case 0x24: /* store fpreg */
4558 gen_address_mask(dc
, cpu_addr
);
4559 tcg_gen_qemu_st32(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4561 case 0x25: /* stfsr, V9 stxfsr */
4562 #ifdef TARGET_SPARC64
4563 gen_address_mask(dc
, cpu_addr
);
4564 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4566 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4568 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp64
);
4569 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4572 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4573 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4577 #ifdef TARGET_SPARC64
4578 /* V9 stqf, store quad fpreg */
4582 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4583 gen_op_load_fpr_QT0(QFPREG(rd
));
4584 r_const
= tcg_const_i32(dc
->mem_idx
);
4585 tcg_gen_helper_0_2(helper_stqf
, cpu_addr
, r_const
);
4586 tcg_temp_free(r_const
);
4589 #else /* !TARGET_SPARC64 */
4590 /* stdfq, store floating point queue */
4591 #if defined(CONFIG_USER_ONLY)
4594 if (!supervisor(dc
))
4596 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4601 case 0x27: /* store double fpreg */
4605 gen_op_load_fpr_DT0(DFPREG(rd
));
4606 r_const
= tcg_const_i32(dc
->mem_idx
);
4607 tcg_gen_helper_0_2(helper_stdf
, cpu_addr
, r_const
);
4608 tcg_temp_free(r_const
);
4614 } else if (xop
> 0x33 && xop
< 0x3f) {
4615 save_state(dc
, cpu_cond
);
4617 #ifdef TARGET_SPARC64
4618 case 0x34: /* V9 stfa */
4619 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4621 case 0x36: /* V9 stqfa */
4625 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4626 r_const
= tcg_const_i32(7);
4627 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4629 tcg_temp_free(r_const
);
4630 gen_op_load_fpr_QT0(QFPREG(rd
));
4631 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4634 case 0x37: /* V9 stdfa */
4635 gen_op_load_fpr_DT0(DFPREG(rd
));
4636 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4638 case 0x3c: /* V9 casa */
4639 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4640 gen_movl_TN_reg(rd
, cpu_val
);
4642 case 0x3e: /* V9 casxa */
4643 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4644 gen_movl_TN_reg(rd
, cpu_val
);
4647 case 0x34: /* stc */
4648 case 0x35: /* stcsr */
4649 case 0x36: /* stdcq */
4650 case 0x37: /* stdc */
4662 /* default case for non jump instructions */
4663 if (dc
->npc
== DYNAMIC_PC
) {
4664 dc
->pc
= DYNAMIC_PC
;
4666 } else if (dc
->npc
== JUMP_PC
) {
4667 /* we can do a static jump */
4668 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4672 dc
->npc
= dc
->npc
+ 4;
4680 save_state(dc
, cpu_cond
);
4681 r_const
= tcg_const_i32(TT_ILL_INSN
);
4682 tcg_gen_helper_0_1(raise_exception
, r_const
);
4683 tcg_temp_free(r_const
);
4691 save_state(dc
, cpu_cond
);
4692 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4693 tcg_gen_helper_0_1(raise_exception
, r_const
);
4694 tcg_temp_free(r_const
);
4698 #if !defined(CONFIG_USER_ONLY)
4703 save_state(dc
, cpu_cond
);
4704 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4705 tcg_gen_helper_0_1(raise_exception
, r_const
);
4706 tcg_temp_free(r_const
);
4712 save_state(dc
, cpu_cond
);
4713 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4716 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4718 save_state(dc
, cpu_cond
);
4719 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4723 #ifndef TARGET_SPARC64
4728 save_state(dc
, cpu_cond
);
4729 r_const
= tcg_const_i32(TT_NCP_INSN
);
4730 tcg_gen_helper_0_1(raise_exception
, r_const
);
4731 tcg_temp_free(r_const
);
4738 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4739 int spc
, CPUSPARCState
*env
)
4741 target_ulong pc_start
, last_pc
;
4742 uint16_t *gen_opc_end
;
4743 DisasContext dc1
, *dc
= &dc1
;
4748 memset(dc
, 0, sizeof(DisasContext
));
4753 dc
->npc
= (target_ulong
) tb
->cs_base
;
4754 dc
->mem_idx
= cpu_mmu_index(env
);
4756 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4757 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4759 dc
->fpu_enabled
= 0;
4760 #ifdef TARGET_SPARC64
4761 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4763 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4765 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4766 cpu_tmp32
= tcg_temp_new(TCG_TYPE_I32
);
4767 cpu_tmp64
= tcg_temp_new(TCG_TYPE_I64
);
4769 cpu_dst
= tcg_temp_local_new(TCG_TYPE_TL
);
4772 cpu_val
= tcg_temp_local_new(TCG_TYPE_TL
);
4773 cpu_addr
= tcg_temp_local_new(TCG_TYPE_TL
);
4776 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4778 max_insns
= CF_COUNT_MASK
;
4781 if (env
->nb_breakpoints
> 0) {
4782 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4783 if (env
->breakpoints
[j
] == dc
->pc
) {
4784 if (dc
->pc
!= pc_start
)
4785 save_state(dc
, cpu_cond
);
4786 tcg_gen_helper_0_0(helper_debug
);
4795 fprintf(logfile
, "Search PC...\n");
4796 j
= gen_opc_ptr
- gen_opc_buf
;
4800 gen_opc_instr_start
[lj
++] = 0;
4801 gen_opc_pc
[lj
] = dc
->pc
;
4802 gen_opc_npc
[lj
] = dc
->npc
;
4803 gen_opc_instr_start
[lj
] = 1;
4804 gen_opc_icount
[lj
] = num_insns
;
4807 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4810 disas_sparc_insn(dc
);
4815 /* if the next PC is different, we abort now */
4816 if (dc
->pc
!= (last_pc
+ 4))
4818 /* if we reach a page boundary, we stop generation so that the
4819 PC of a TT_TFAULT exception is always in the right page */
4820 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4822 /* if single step mode, we generate only one instruction and
4823 generate an exception */
4824 if (env
->singlestep_enabled
) {
4825 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4829 } while ((gen_opc_ptr
< gen_opc_end
) &&
4830 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4831 num_insns
< max_insns
);
4834 tcg_temp_free(cpu_addr
);
4835 tcg_temp_free(cpu_val
);
4836 tcg_temp_free(cpu_dst
);
4837 tcg_temp_free(cpu_tmp64
);
4838 tcg_temp_free(cpu_tmp32
);
4839 tcg_temp_free(cpu_tmp0
);
4840 if (tb
->cflags
& CF_LAST_IO
)
4843 if (dc
->pc
!= DYNAMIC_PC
&&
4844 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4845 /* static PC and NPC: we can use direct chaining */
4846 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4848 if (dc
->pc
!= DYNAMIC_PC
)
4849 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4850 save_npc(dc
, cpu_cond
);
4854 gen_icount_end(tb
, num_insns
);
4855 *gen_opc_ptr
= INDEX_op_end
;
4857 j
= gen_opc_ptr
- gen_opc_buf
;
4860 gen_opc_instr_start
[lj
++] = 0;
4866 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4867 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4869 tb
->size
= last_pc
+ 4 - pc_start
;
4870 tb
->icount
= num_insns
;
4873 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4874 fprintf(logfile
, "--------------\n");
4875 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4876 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4877 fprintf(logfile
, "\n");
4882 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4884 gen_intermediate_code_internal(tb
, 0, env
);
4887 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4889 gen_intermediate_code_internal(tb
, 1, env
);
4892 void gen_intermediate_code_init(CPUSPARCState
*env
)
4896 static const char * const gregnames
[8] = {
4897 NULL
, // g0 not used
4906 static const char * const fregnames
[64] = {
4907 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4908 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4909 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4910 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4911 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4912 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4913 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4914 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4917 /* init various static tables */
4921 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4922 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4923 offsetof(CPUState
, regwptr
),
4925 #ifdef TARGET_SPARC64
4926 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4927 TCG_AREG0
, offsetof(CPUState
, xcc
),
4929 cpu_asi
= tcg_global_mem_new(TCG_TYPE_I32
,
4930 TCG_AREG0
, offsetof(CPUState
, asi
),
4932 cpu_fprs
= tcg_global_mem_new(TCG_TYPE_I32
,
4933 TCG_AREG0
, offsetof(CPUState
, fprs
),
4935 cpu_gsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4936 TCG_AREG0
, offsetof(CPUState
, gsr
),
4938 cpu_tick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4940 offsetof(CPUState
, tick_cmpr
),
4942 cpu_stick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4944 offsetof(CPUState
, stick_cmpr
),
4946 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4948 offsetof(CPUState
, hstick_cmpr
),
4950 cpu_hintp
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4951 offsetof(CPUState
, hintp
),
4953 cpu_htba
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4954 offsetof(CPUState
, htba
),
4956 cpu_hver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4957 offsetof(CPUState
, hver
),
4959 cpu_ssr
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4960 offsetof(CPUState
, ssr
), "ssr");
4961 cpu_ver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4962 offsetof(CPUState
, version
), "ver");
4964 cpu_wim
= tcg_global_mem_new(TCG_TYPE_I32
,
4965 TCG_AREG0
, offsetof(CPUState
, wim
),
4968 cpu_cond
= tcg_global_mem_new(TCG_TYPE_TL
,
4969 TCG_AREG0
, offsetof(CPUState
, cond
),
4971 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4972 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4974 cpu_cc_src2
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4975 offsetof(CPUState
, cc_src2
),
4977 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4978 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4980 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4981 TCG_AREG0
, offsetof(CPUState
, psr
),
4983 cpu_fsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4984 TCG_AREG0
, offsetof(CPUState
, fsr
),
4986 cpu_pc
= tcg_global_mem_new(TCG_TYPE_TL
,
4987 TCG_AREG0
, offsetof(CPUState
, pc
),
4989 cpu_npc
= tcg_global_mem_new(TCG_TYPE_TL
,
4990 TCG_AREG0
, offsetof(CPUState
, npc
),
4992 cpu_y
= tcg_global_mem_new(TCG_TYPE_TL
,
4993 TCG_AREG0
, offsetof(CPUState
, y
), "y");
4994 #ifndef CONFIG_USER_ONLY
4995 cpu_tbr
= tcg_global_mem_new(TCG_TYPE_TL
,
4996 TCG_AREG0
, offsetof(CPUState
, tbr
),
4999 for (i
= 1; i
< 8; i
++)
5000 cpu_gregs
[i
] = tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
5001 offsetof(CPUState
, gregs
[i
]),
5003 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5004 cpu_fpr
[i
] = tcg_global_mem_new(TCG_TYPE_I32
, TCG_AREG0
,
5005 offsetof(CPUState
, fpr
[i
]),
5008 /* register helpers */
5011 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5016 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5017 unsigned long searched_pc
, int pc_pos
, void *puc
)
5020 env
->pc
= gen_opc_pc
[pc_pos
];
5021 npc
= gen_opc_npc
[pc_pos
];
5023 /* dynamic NPC: already stored */
5024 } else if (npc
== 2) {
5025 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5026 /* jump PC: use T2 and the jump targets of the translation */
5028 env
->npc
= gen_opc_jump_pc
[0];
5030 env
->npc
= gen_opc_jump_pc
[1];