4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env
, cpu_regwptr
;
42 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
43 static TCGv cpu_psr
, cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
45 #ifndef CONFIG_USER_ONLY
48 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
50 static TCGv cpu_xcc
, cpu_asi
, cpu_fprs
, cpu_gsr
;
51 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
52 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0
, cpu_tmp32
, cpu_tmp64
;
58 /* Floating point registers */
59 static TCGv cpu_fpr
[TARGET_FPREGS
];
61 #include "gen-icount.h"
63 typedef struct DisasContext
{
64 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
70 int address_mask_32bit
;
71 struct TranslationBlock
*tb
;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
96 static int sign_extend(int x
, int len
)
99 return (x
<< len
) >> len
;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src
)
107 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
108 offsetof(CPU_DoubleU
, l
.upper
));
109 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
110 offsetof(CPU_DoubleU
, l
.lower
));
113 static void gen_op_load_fpr_DT1(unsigned int src
)
115 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
116 offsetof(CPU_DoubleU
, l
.upper
));
117 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
118 offsetof(CPU_DoubleU
, l
.lower
));
121 static void gen_op_store_DT0_fpr(unsigned int dst
)
123 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.upper
));
125 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
126 offsetof(CPU_DoubleU
, l
.lower
));
129 static void gen_op_load_fpr_QT0(unsigned int src
)
131 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
132 offsetof(CPU_QuadU
, l
.upmost
));
133 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
134 offsetof(CPU_QuadU
, l
.upper
));
135 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
136 offsetof(CPU_QuadU
, l
.lower
));
137 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
138 offsetof(CPU_QuadU
, l
.lowest
));
141 static void gen_op_load_fpr_QT1(unsigned int src
)
143 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
144 offsetof(CPU_QuadU
, l
.upmost
));
145 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
146 offsetof(CPU_QuadU
, l
.upper
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
148 offsetof(CPU_QuadU
, l
.lower
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
150 offsetof(CPU_QuadU
, l
.lowest
));
153 static void gen_op_store_QT0_fpr(unsigned int dst
)
155 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
156 offsetof(CPU_QuadU
, l
.upmost
));
157 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
158 offsetof(CPU_QuadU
, l
.upper
));
159 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
160 offsetof(CPU_QuadU
, l
.lower
));
161 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
162 offsetof(CPU_QuadU
, l
.lowest
));
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
179 #ifdef TARGET_SPARC64
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
183 #define AM_CHECK(dc) (1)
187 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
189 #ifdef TARGET_SPARC64
191 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
195 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
198 tcg_gen_movi_tl(tn
, 0);
200 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
202 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
206 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
211 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
213 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
217 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
218 target_ulong pc
, target_ulong npc
)
220 TranslationBlock
*tb
;
223 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
224 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num
);
227 tcg_gen_movi_tl(cpu_pc
, pc
);
228 tcg_gen_movi_tl(cpu_npc
, npc
);
229 tcg_gen_exit_tb((long)tb
+ tb_num
);
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc
, pc
);
233 tcg_gen_movi_tl(cpu_npc
, npc
);
239 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
241 tcg_gen_extu_i32_tl(reg
, src
);
242 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
243 tcg_gen_andi_tl(reg
, reg
, 0x1);
246 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
248 tcg_gen_extu_i32_tl(reg
, src
);
249 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
250 tcg_gen_andi_tl(reg
, reg
, 0x1);
253 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
255 tcg_gen_extu_i32_tl(reg
, src
);
256 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
257 tcg_gen_andi_tl(reg
, reg
, 0x1);
260 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
262 tcg_gen_extu_i32_tl(reg
, src
);
263 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
264 tcg_gen_andi_tl(reg
, reg
, 0x1);
267 static inline void gen_cc_clear_icc(void)
269 tcg_gen_movi_i32(cpu_psr
, 0);
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
275 tcg_gen_movi_i32(cpu_xcc
, 0);
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
285 static inline void gen_cc_NZ_icc(TCGv dst
)
290 l1
= gen_new_label();
291 l2
= gen_new_label();
292 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
293 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
294 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
295 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
297 tcg_gen_ext_i32_tl(r_temp
, dst
);
298 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
299 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
301 tcg_temp_free(r_temp
);
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst
)
309 l1
= gen_new_label();
310 l2
= gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
312 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
314 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
315 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
322 env->psr |= PSR_CARRY;
324 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
326 TCGv r_temp1
, r_temp2
;
329 l1
= gen_new_label();
330 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
331 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
332 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
333 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
334 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
335 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
337 tcg_temp_free(r_temp1
);
338 tcg_temp_free(r_temp2
);
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
346 l1
= gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
348 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
357 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
361 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
362 tcg_gen_xor_tl(r_temp
, src1
, src2
);
363 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
364 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
365 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
366 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
368 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
369 tcg_temp_free(r_temp
);
370 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
378 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
379 tcg_gen_xor_tl(r_temp
, src1
, src2
);
380 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
381 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
382 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
383 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
385 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
386 tcg_temp_free(r_temp
);
387 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
391 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
393 TCGv r_temp
, r_const
;
396 l1
= gen_new_label();
398 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
399 tcg_gen_xor_tl(r_temp
, src1
, src2
);
400 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
401 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
402 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
403 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
405 r_const
= tcg_const_i32(TT_TOVF
);
406 tcg_gen_helper_0_1(raise_exception
, r_const
);
407 tcg_temp_free(r_const
);
409 tcg_temp_free(r_temp
);
412 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
416 l1
= gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
418 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
420 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
424 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
429 l1
= gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
431 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
433 r_const
= tcg_const_i32(TT_TOVF
);
434 tcg_gen_helper_0_1(raise_exception
, r_const
);
435 tcg_temp_free(r_const
);
439 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
441 tcg_gen_mov_tl(cpu_cc_src
, src1
);
442 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
443 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
445 gen_cc_NZ_icc(cpu_cc_dst
);
446 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
447 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
448 #ifdef TARGET_SPARC64
450 gen_cc_NZ_xcc(cpu_cc_dst
);
451 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
452 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
454 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
457 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
459 tcg_gen_mov_tl(cpu_cc_src
, src1
);
460 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
461 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
462 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
464 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
465 #ifdef TARGET_SPARC64
467 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
469 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
470 gen_cc_NZ_icc(cpu_cc_dst
);
471 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
472 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst
);
475 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
476 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
483 tcg_gen_mov_tl(cpu_cc_src
, src1
);
484 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
485 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
487 gen_cc_NZ_icc(cpu_cc_dst
);
488 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
489 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
490 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
491 #ifdef TARGET_SPARC64
493 gen_cc_NZ_xcc(cpu_cc_dst
);
494 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
495 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
497 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
500 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
502 tcg_gen_mov_tl(cpu_cc_src
, src1
);
503 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
504 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
505 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
506 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
508 gen_cc_NZ_icc(cpu_cc_dst
);
509 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
510 #ifdef TARGET_SPARC64
512 gen_cc_NZ_xcc(cpu_cc_dst
);
513 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
514 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
516 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
521 env->psr |= PSR_CARRY;
523 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
525 TCGv r_temp1
, r_temp2
;
528 l1
= gen_new_label();
529 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
530 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
531 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
532 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
533 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
534 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
536 tcg_temp_free(r_temp1
);
537 tcg_temp_free(r_temp2
);
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
545 l1
= gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
547 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
556 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
560 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
561 tcg_gen_xor_tl(r_temp
, src1
, src2
);
562 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
563 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
564 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
566 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
567 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
568 tcg_temp_free(r_temp
);
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
576 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
577 tcg_gen_xor_tl(r_temp
, src1
, src2
);
578 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
579 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
580 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
582 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
583 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
584 tcg_temp_free(r_temp
);
588 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
590 TCGv r_temp
, r_const
;
593 l1
= gen_new_label();
595 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
596 tcg_gen_xor_tl(r_temp
, src1
, src2
);
597 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
598 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
599 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
601 r_const
= tcg_const_i32(TT_TOVF
);
602 tcg_gen_helper_0_1(raise_exception
, r_const
);
603 tcg_temp_free(r_const
);
605 tcg_temp_free(r_temp
);
608 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
610 tcg_gen_mov_tl(cpu_cc_src
, src1
);
611 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
612 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
614 gen_cc_NZ_icc(cpu_cc_dst
);
615 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
616 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 #ifdef TARGET_SPARC64
619 gen_cc_NZ_xcc(cpu_cc_dst
);
620 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
621 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
623 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
626 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
628 tcg_gen_mov_tl(cpu_cc_src
, src1
);
629 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
630 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
631 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
633 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
634 #ifdef TARGET_SPARC64
636 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
638 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
639 gen_cc_NZ_icc(cpu_cc_dst
);
640 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
641 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst
);
644 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
645 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
647 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
650 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
652 tcg_gen_mov_tl(cpu_cc_src
, src1
);
653 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
654 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
656 gen_cc_NZ_icc(cpu_cc_dst
);
657 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
658 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
659 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
660 #ifdef TARGET_SPARC64
662 gen_cc_NZ_xcc(cpu_cc_dst
);
663 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
664 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
666 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
669 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
671 tcg_gen_mov_tl(cpu_cc_src
, src1
);
672 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
673 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
674 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
675 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
677 gen_cc_NZ_icc(cpu_cc_dst
);
678 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
679 #ifdef TARGET_SPARC64
681 gen_cc_NZ_xcc(cpu_cc_dst
);
682 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
683 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
685 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
688 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
693 l1
= gen_new_label();
694 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
700 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
701 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
702 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
703 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
704 tcg_gen_movi_tl(cpu_cc_src2
, 0);
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
710 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
711 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
712 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
713 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
714 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
717 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
718 gen_mov_reg_V(r_temp
, cpu_psr
);
719 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
720 tcg_temp_free(r_temp
);
722 // T0 = (b1 << 31) | (T0 >> 1);
724 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
725 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
726 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
728 /* do addition and update flags */
729 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
732 gen_cc_NZ_icc(cpu_cc_dst
);
733 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
734 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
735 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
738 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
740 TCGv r_temp
, r_temp2
;
742 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
743 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
745 tcg_gen_extu_i32_i64(r_temp
, src2
);
746 tcg_gen_extu_i32_i64(r_temp2
, src1
);
747 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
749 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
750 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
751 tcg_temp_free(r_temp
);
752 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst
, r_temp2
);
756 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
758 tcg_temp_free(r_temp2
);
761 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
763 TCGv r_temp
, r_temp2
;
765 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
766 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
768 tcg_gen_ext_i32_i64(r_temp
, src2
);
769 tcg_gen_ext_i32_i64(r_temp2
, src1
);
770 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
772 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
773 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
774 tcg_temp_free(r_temp
);
775 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(dst
, r_temp2
);
779 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
781 tcg_temp_free(r_temp2
);
784 #ifdef TARGET_SPARC64
785 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
790 l1
= gen_new_label();
791 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
792 r_const
= tcg_const_i32(TT_DIV_ZERO
);
793 tcg_gen_helper_0_1(raise_exception
, r_const
);
794 tcg_temp_free(r_const
);
798 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
802 l1
= gen_new_label();
803 l2
= gen_new_label();
804 tcg_gen_mov_tl(cpu_cc_src
, src1
);
805 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
806 gen_trap_ifdivzero_tl(cpu_cc_src2
);
807 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
808 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
809 tcg_gen_movi_i64(dst
, INT64_MIN
);
812 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
817 static inline void gen_op_div_cc(TCGv dst
)
821 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
823 gen_cc_NZ_icc(cpu_cc_dst
);
824 l1
= gen_new_label();
825 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
826 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
830 static inline void gen_op_logic_cc(TCGv dst
)
832 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
835 gen_cc_NZ_icc(cpu_cc_dst
);
836 #ifdef TARGET_SPARC64
838 gen_cc_NZ_xcc(cpu_cc_dst
);
843 static inline void gen_op_eval_ba(TCGv dst
)
845 tcg_gen_movi_tl(dst
, 1);
849 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
851 gen_mov_reg_Z(dst
, src
);
855 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
857 gen_mov_reg_N(cpu_tmp0
, src
);
858 gen_mov_reg_V(dst
, src
);
859 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
860 gen_mov_reg_Z(cpu_tmp0
, src
);
861 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
865 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
867 gen_mov_reg_V(cpu_tmp0
, src
);
868 gen_mov_reg_N(dst
, src
);
869 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
873 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
875 gen_mov_reg_Z(cpu_tmp0
, src
);
876 gen_mov_reg_C(dst
, src
);
877 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
881 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
883 gen_mov_reg_C(dst
, src
);
887 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
889 gen_mov_reg_V(dst
, src
);
893 static inline void gen_op_eval_bn(TCGv dst
)
895 tcg_gen_movi_tl(dst
, 0);
899 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
901 gen_mov_reg_N(dst
, src
);
905 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
907 gen_mov_reg_Z(dst
, src
);
908 tcg_gen_xori_tl(dst
, dst
, 0x1);
912 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
914 gen_mov_reg_N(cpu_tmp0
, src
);
915 gen_mov_reg_V(dst
, src
);
916 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
917 gen_mov_reg_Z(cpu_tmp0
, src
);
918 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
919 tcg_gen_xori_tl(dst
, dst
, 0x1);
923 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
925 gen_mov_reg_V(cpu_tmp0
, src
);
926 gen_mov_reg_N(dst
, src
);
927 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
928 tcg_gen_xori_tl(dst
, dst
, 0x1);
932 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
934 gen_mov_reg_Z(cpu_tmp0
, src
);
935 gen_mov_reg_C(dst
, src
);
936 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
937 tcg_gen_xori_tl(dst
, dst
, 0x1);
941 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
943 gen_mov_reg_C(dst
, src
);
944 tcg_gen_xori_tl(dst
, dst
, 0x1);
948 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
950 gen_mov_reg_N(dst
, src
);
951 tcg_gen_xori_tl(dst
, dst
, 0x1);
955 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
957 gen_mov_reg_V(dst
, src
);
958 tcg_gen_xori_tl(dst
, dst
, 0x1);
962 FPSR bit field FCC1 | FCC0:
968 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
969 unsigned int fcc_offset
)
971 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
972 tcg_gen_andi_tl(reg
, reg
, 0x1);
975 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
976 unsigned int fcc_offset
)
978 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
979 tcg_gen_andi_tl(reg
, reg
, 0x1);
983 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
984 unsigned int fcc_offset
)
986 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
987 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
988 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
991 // 1 or 2: FCC0 ^ FCC1
992 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
993 unsigned int fcc_offset
)
995 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
996 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
997 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1001 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1002 unsigned int fcc_offset
)
1004 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1008 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1009 unsigned int fcc_offset
)
1011 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1012 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1013 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1014 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1018 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1019 unsigned int fcc_offset
)
1021 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1025 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1026 unsigned int fcc_offset
)
1028 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1029 tcg_gen_xori_tl(dst
, dst
, 0x1);
1030 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1031 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1035 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1036 unsigned int fcc_offset
)
1038 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1039 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1040 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1043 // 0: !(FCC0 | FCC1)
1044 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1045 unsigned int fcc_offset
)
1047 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1048 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1049 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1050 tcg_gen_xori_tl(dst
, dst
, 0x1);
1053 // 0 or 3: !(FCC0 ^ FCC1)
1054 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1055 unsigned int fcc_offset
)
1057 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1058 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1059 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1060 tcg_gen_xori_tl(dst
, dst
, 0x1);
1064 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1065 unsigned int fcc_offset
)
1067 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1068 tcg_gen_xori_tl(dst
, dst
, 0x1);
1071 // !1: !(FCC0 & !FCC1)
1072 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1073 unsigned int fcc_offset
)
1075 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1076 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1077 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1078 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1079 tcg_gen_xori_tl(dst
, dst
, 0x1);
1083 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1084 unsigned int fcc_offset
)
1086 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1087 tcg_gen_xori_tl(dst
, dst
, 0x1);
1090 // !2: !(!FCC0 & FCC1)
1091 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1092 unsigned int fcc_offset
)
1094 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1095 tcg_gen_xori_tl(dst
, dst
, 0x1);
1096 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1097 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1098 tcg_gen_xori_tl(dst
, dst
, 0x1);
1101 // !3: !(FCC0 & FCC1)
1102 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1103 unsigned int fcc_offset
)
1105 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1106 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1107 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1108 tcg_gen_xori_tl(dst
, dst
, 0x1);
1111 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1112 target_ulong pc2
, TCGv r_cond
)
1116 l1
= gen_new_label();
1118 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1120 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1123 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1126 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1127 target_ulong pc2
, TCGv r_cond
)
1131 l1
= gen_new_label();
1133 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1135 gen_goto_tb(dc
, 0, pc2
, pc1
);
1138 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1141 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1146 l1
= gen_new_label();
1147 l2
= gen_new_label();
1149 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1151 tcg_gen_movi_tl(cpu_npc
, npc1
);
1155 tcg_gen_movi_tl(cpu_npc
, npc2
);
1159 /* call this function before using the condition register as it may
1160 have been set for a jump */
1161 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1163 if (dc
->npc
== JUMP_PC
) {
1164 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1165 dc
->npc
= DYNAMIC_PC
;
1169 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1171 if (dc
->npc
== JUMP_PC
) {
1172 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1173 dc
->npc
= DYNAMIC_PC
;
1174 } else if (dc
->npc
!= DYNAMIC_PC
) {
1175 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1179 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1181 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1185 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1187 if (dc
->npc
== JUMP_PC
) {
1188 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1189 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1190 dc
->pc
= DYNAMIC_PC
;
1191 } else if (dc
->npc
== DYNAMIC_PC
) {
1192 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1193 dc
->pc
= DYNAMIC_PC
;
1199 static inline void gen_op_next_insn(void)
1201 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1202 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1205 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1209 #ifdef TARGET_SPARC64
1219 gen_op_eval_bn(r_dst
);
1222 gen_op_eval_be(r_dst
, r_src
);
1225 gen_op_eval_ble(r_dst
, r_src
);
1228 gen_op_eval_bl(r_dst
, r_src
);
1231 gen_op_eval_bleu(r_dst
, r_src
);
1234 gen_op_eval_bcs(r_dst
, r_src
);
1237 gen_op_eval_bneg(r_dst
, r_src
);
1240 gen_op_eval_bvs(r_dst
, r_src
);
1243 gen_op_eval_ba(r_dst
);
1246 gen_op_eval_bne(r_dst
, r_src
);
1249 gen_op_eval_bg(r_dst
, r_src
);
1252 gen_op_eval_bge(r_dst
, r_src
);
1255 gen_op_eval_bgu(r_dst
, r_src
);
1258 gen_op_eval_bcc(r_dst
, r_src
);
1261 gen_op_eval_bpos(r_dst
, r_src
);
1264 gen_op_eval_bvc(r_dst
, r_src
);
1269 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1271 unsigned int offset
;
1291 gen_op_eval_bn(r_dst
);
1294 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1297 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1300 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1303 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1306 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1309 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1312 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1315 gen_op_eval_ba(r_dst
);
1318 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1321 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1324 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1327 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1330 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1333 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1336 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1341 #ifdef TARGET_SPARC64
1343 static const int gen_tcg_cond_reg
[8] = {
1354 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1358 l1
= gen_new_label();
1359 tcg_gen_movi_tl(r_dst
, 0);
1360 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1361 tcg_gen_movi_tl(r_dst
, 1);
1366 /* XXX: potentially incorrect if dynamic npc */
1367 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1370 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1371 target_ulong target
= dc
->pc
+ offset
;
1374 /* unconditional not taken */
1376 dc
->pc
= dc
->npc
+ 4;
1377 dc
->npc
= dc
->pc
+ 4;
1380 dc
->npc
= dc
->pc
+ 4;
1382 } else if (cond
== 0x8) {
1383 /* unconditional taken */
1386 dc
->npc
= dc
->pc
+ 4;
1392 flush_cond(dc
, r_cond
);
1393 gen_cond(r_cond
, cc
, cond
);
1395 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1399 dc
->jump_pc
[0] = target
;
1400 dc
->jump_pc
[1] = dc
->npc
+ 4;
1406 /* XXX: potentially incorrect if dynamic npc */
1407 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1410 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1411 target_ulong target
= dc
->pc
+ offset
;
1414 /* unconditional not taken */
1416 dc
->pc
= dc
->npc
+ 4;
1417 dc
->npc
= dc
->pc
+ 4;
1420 dc
->npc
= dc
->pc
+ 4;
1422 } else if (cond
== 0x8) {
1423 /* unconditional taken */
1426 dc
->npc
= dc
->pc
+ 4;
1432 flush_cond(dc
, r_cond
);
1433 gen_fcond(r_cond
, cc
, cond
);
1435 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1439 dc
->jump_pc
[0] = target
;
1440 dc
->jump_pc
[1] = dc
->npc
+ 4;
1446 #ifdef TARGET_SPARC64
1447 /* XXX: potentially incorrect if dynamic npc */
1448 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1449 TCGv r_cond
, TCGv r_reg
)
1451 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1452 target_ulong target
= dc
->pc
+ offset
;
1454 flush_cond(dc
, r_cond
);
1455 gen_cond_reg(r_cond
, cond
, r_reg
);
1457 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1461 dc
->jump_pc
[0] = target
;
1462 dc
->jump_pc
[1] = dc
->npc
+ 4;
1467 static GenOpFunc
* const gen_fcmpd
[4] = {
1474 static GenOpFunc
* const gen_fcmpq
[4] = {
1481 static GenOpFunc
* const gen_fcmped
[4] = {
1488 static GenOpFunc
* const gen_fcmpeq
[4] = {
1495 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1499 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1502 tcg_gen_helper_0_2(helper_fcmps_fcc1
, r_rs1
, r_rs2
);
1505 tcg_gen_helper_0_2(helper_fcmps_fcc2
, r_rs1
, r_rs2
);
1508 tcg_gen_helper_0_2(helper_fcmps_fcc3
, r_rs1
, r_rs2
);
1513 static inline void gen_op_fcmpd(int fccno
)
1515 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1518 static inline void gen_op_fcmpq(int fccno
)
1520 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1523 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1527 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1530 tcg_gen_helper_0_2(helper_fcmpes_fcc1
, r_rs1
, r_rs2
);
1533 tcg_gen_helper_0_2(helper_fcmpes_fcc2
, r_rs1
, r_rs2
);
1536 tcg_gen_helper_0_2(helper_fcmpes_fcc3
, r_rs1
, r_rs2
);
1541 static inline void gen_op_fcmped(int fccno
)
1543 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1546 static inline void gen_op_fcmpeq(int fccno
)
1548 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1553 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1555 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1558 static inline void gen_op_fcmpd(int fccno
)
1560 tcg_gen_helper_0_0(helper_fcmpd
);
1563 static inline void gen_op_fcmpq(int fccno
)
1565 tcg_gen_helper_0_0(helper_fcmpq
);
1568 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1570 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1573 static inline void gen_op_fcmped(int fccno
)
1575 tcg_gen_helper_0_0(helper_fcmped
);
1578 static inline void gen_op_fcmpeq(int fccno
)
1580 tcg_gen_helper_0_0(helper_fcmpeq
);
1584 static inline void gen_op_fpexception_im(int fsr_flags
)
1588 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1589 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1590 r_const
= tcg_const_i32(TT_FP_EXCP
);
1591 tcg_gen_helper_0_1(raise_exception
, r_const
);
1592 tcg_temp_free(r_const
);
1595 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1597 #if !defined(CONFIG_USER_ONLY)
1598 if (!dc
->fpu_enabled
) {
1601 save_state(dc
, r_cond
);
1602 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1603 tcg_gen_helper_0_1(raise_exception
, r_const
);
1604 tcg_temp_free(r_const
);
1612 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1614 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1617 static inline void gen_clear_float_exceptions(void)
1619 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1623 #ifdef TARGET_SPARC64
1624 static inline TCGv
gen_get_asi(int insn
, TCGv r_addr
)
1630 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1631 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1633 asi
= GET_FIELD(insn
, 19, 26);
1634 r_asi
= tcg_const_i32(asi
);
1639 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1642 TCGv r_asi
, r_size
, r_sign
;
1644 r_asi
= gen_get_asi(insn
, addr
);
1645 r_size
= tcg_const_i32(size
);
1646 r_sign
= tcg_const_i32(sign
);
1647 tcg_gen_helper_1_4(helper_ld_asi
, dst
, addr
, r_asi
, r_size
, r_sign
);
1648 tcg_temp_free(r_sign
);
1649 tcg_temp_free(r_size
);
1650 tcg_temp_free(r_asi
);
1653 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1657 r_asi
= gen_get_asi(insn
, addr
);
1658 r_size
= tcg_const_i32(size
);
1659 tcg_gen_helper_0_4(helper_st_asi
, addr
, src
, r_asi
, r_size
);
1660 tcg_temp_free(r_size
);
1661 tcg_temp_free(r_asi
);
1664 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1666 TCGv r_asi
, r_size
, r_rd
;
1668 r_asi
= gen_get_asi(insn
, addr
);
1669 r_size
= tcg_const_i32(size
);
1670 r_rd
= tcg_const_i32(rd
);
1671 tcg_gen_helper_0_4(helper_ldf_asi
, addr
, r_asi
, r_size
, r_rd
);
1672 tcg_temp_free(r_rd
);
1673 tcg_temp_free(r_size
);
1674 tcg_temp_free(r_asi
);
1677 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1679 TCGv r_asi
, r_size
, r_rd
;
1681 r_asi
= gen_get_asi(insn
, addr
);
1682 r_size
= tcg_const_i32(size
);
1683 r_rd
= tcg_const_i32(rd
);
1684 tcg_gen_helper_0_4(helper_stf_asi
, addr
, r_asi
, r_size
, r_rd
);
1685 tcg_temp_free(r_rd
);
1686 tcg_temp_free(r_size
);
1687 tcg_temp_free(r_asi
);
1690 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1692 TCGv r_asi
, r_size
, r_sign
;
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 r_size
= tcg_const_i32(4);
1696 r_sign
= tcg_const_i32(0);
1697 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1698 tcg_temp_free(r_sign
);
1699 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1700 tcg_temp_free(r_size
);
1701 tcg_temp_free(r_asi
);
1702 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1705 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1709 r_asi
= gen_get_asi(insn
, addr
);
1710 r_rd
= tcg_const_i32(rd
);
1711 tcg_gen_helper_0_3(helper_ldda_asi
, addr
, r_asi
, r_rd
);
1712 tcg_temp_free(r_rd
);
1713 tcg_temp_free(r_asi
);
1716 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1720 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1721 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1722 r_asi
= gen_get_asi(insn
, addr
);
1723 r_size
= tcg_const_i32(8);
1724 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1725 tcg_temp_free(r_size
);
1726 tcg_temp_free(r_asi
);
1729 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1734 r_val1
= tcg_temp_new(TCG_TYPE_TL
);
1735 gen_movl_reg_TN(rd
, r_val1
);
1736 r_asi
= gen_get_asi(insn
, addr
);
1737 tcg_gen_helper_1_4(helper_cas_asi
, dst
, addr
, r_val1
, val2
, r_asi
);
1738 tcg_temp_free(r_asi
);
1739 tcg_temp_free(r_val1
);
1742 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1747 gen_movl_reg_TN(rd
, cpu_tmp64
);
1748 r_asi
= gen_get_asi(insn
, addr
);
1749 tcg_gen_helper_1_4(helper_casx_asi
, dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1750 tcg_temp_free(r_asi
);
1753 #elif !defined(CONFIG_USER_ONLY)
1755 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1758 TCGv r_asi
, r_size
, r_sign
;
1760 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1761 r_size
= tcg_const_i32(size
);
1762 r_sign
= tcg_const_i32(sign
);
1763 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1764 tcg_temp_free(r_sign
);
1765 tcg_temp_free(r_size
);
1766 tcg_temp_free(r_asi
);
1767 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1770 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1774 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1775 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1776 r_size
= tcg_const_i32(size
);
1777 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1778 tcg_temp_free(r_size
);
1779 tcg_temp_free(r_asi
);
1782 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1784 TCGv r_asi
, r_size
, r_sign
;
1786 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1787 r_size
= tcg_const_i32(4);
1788 r_sign
= tcg_const_i32(0);
1789 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1790 tcg_temp_free(r_sign
);
1791 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1792 tcg_temp_free(r_size
);
1793 tcg_temp_free(r_asi
);
1794 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1797 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1799 TCGv r_asi
, r_size
, r_sign
;
1801 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1802 r_size
= tcg_const_i32(8);
1803 r_sign
= tcg_const_i32(0);
1804 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1805 tcg_temp_free(r_sign
);
1806 tcg_temp_free(r_size
);
1807 tcg_temp_free(r_asi
);
1808 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1809 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1810 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1811 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1812 gen_movl_TN_reg(rd
, hi
);
1815 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1819 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1820 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1821 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1822 r_size
= tcg_const_i32(8);
1823 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1824 tcg_temp_free(r_size
);
1825 tcg_temp_free(r_asi
);
1829 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1830 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1832 TCGv r_val
, r_asi
, r_size
;
1834 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1836 r_val
= tcg_const_i64(0xffULL
);
1837 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1838 r_size
= tcg_const_i32(1);
1839 tcg_gen_helper_0_4(helper_st_asi
, addr
, r_val
, r_asi
, r_size
);
1840 tcg_temp_free(r_size
);
1841 tcg_temp_free(r_asi
);
1842 tcg_temp_free(r_val
);
1846 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1851 rs1
= GET_FIELD(insn
, 13, 17);
1853 r_rs1
= tcg_const_tl(0); // XXX how to free?
1855 r_rs1
= cpu_gregs
[rs1
];
1857 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1861 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1866 if (IS_IMM
) { /* immediate */
1867 rs2
= GET_FIELDs(insn
, 19, 31);
1868 r_rs2
= tcg_const_tl((int)rs2
); // XXX how to free?
1869 } else { /* register */
1870 rs2
= GET_FIELD(insn
, 27, 31);
1872 r_rs2
= tcg_const_tl(0); // XXX how to free?
1874 r_rs2
= cpu_gregs
[rs2
];
1876 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1881 #define CHECK_IU_FEATURE(dc, FEATURE) \
1882 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1884 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1885 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1888 /* before an instruction, dc->pc must be static */
1889 static void disas_sparc_insn(DisasContext
* dc
)
1891 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1893 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
1894 tcg_gen_debug_insn_start(dc
->pc
);
1895 insn
= ldl_code(dc
->pc
);
1896 opc
= GET_FIELD(insn
, 0, 1);
1898 rd
= GET_FIELD(insn
, 2, 6);
1900 cpu_src1
= tcg_temp_new(TCG_TYPE_TL
); // const
1901 cpu_src2
= tcg_temp_new(TCG_TYPE_TL
); // const
1904 case 0: /* branches/sethi */
1906 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1909 #ifdef TARGET_SPARC64
1910 case 0x1: /* V9 BPcc */
1914 target
= GET_FIELD_SP(insn
, 0, 18);
1915 target
= sign_extend(target
, 18);
1917 cc
= GET_FIELD_SP(insn
, 20, 21);
1919 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1921 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1926 case 0x3: /* V9 BPr */
1928 target
= GET_FIELD_SP(insn
, 0, 13) |
1929 (GET_FIELD_SP(insn
, 20, 21) << 14);
1930 target
= sign_extend(target
, 16);
1932 cpu_src1
= get_src1(insn
, cpu_src1
);
1933 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1936 case 0x5: /* V9 FBPcc */
1938 int cc
= GET_FIELD_SP(insn
, 20, 21);
1939 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1941 target
= GET_FIELD_SP(insn
, 0, 18);
1942 target
= sign_extend(target
, 19);
1944 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1948 case 0x7: /* CBN+x */
1953 case 0x2: /* BN+x */
1955 target
= GET_FIELD(insn
, 10, 31);
1956 target
= sign_extend(target
, 22);
1958 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1961 case 0x6: /* FBN+x */
1963 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1965 target
= GET_FIELD(insn
, 10, 31);
1966 target
= sign_extend(target
, 22);
1968 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1971 case 0x4: /* SETHI */
1973 uint32_t value
= GET_FIELD(insn
, 10, 31);
1976 r_const
= tcg_const_tl(value
<< 10);
1977 gen_movl_TN_reg(rd
, r_const
);
1978 tcg_temp_free(r_const
);
1981 case 0x0: /* UNIMPL */
1990 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1993 r_const
= tcg_const_tl(dc
->pc
);
1994 gen_movl_TN_reg(15, r_const
);
1995 tcg_temp_free(r_const
);
1997 gen_mov_pc_npc(dc
, cpu_cond
);
2001 case 2: /* FPU & Logical Operations */
2003 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2004 if (xop
== 0x3a) { /* generate trap */
2007 cpu_src1
= get_src1(insn
, cpu_src1
);
2009 rs2
= GET_FIELD(insn
, 25, 31);
2010 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2012 rs2
= GET_FIELD(insn
, 27, 31);
2014 gen_movl_reg_TN(rs2
, cpu_src2
);
2015 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2017 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2019 cond
= GET_FIELD(insn
, 3, 6);
2021 save_state(dc
, cpu_cond
);
2022 tcg_gen_helper_0_1(helper_trap
, cpu_dst
);
2023 } else if (cond
!= 0) {
2024 TCGv r_cond
= tcg_temp_new(TCG_TYPE_TL
);
2025 #ifdef TARGET_SPARC64
2027 int cc
= GET_FIELD_SP(insn
, 11, 12);
2029 save_state(dc
, cpu_cond
);
2031 gen_cond(r_cond
, 0, cond
);
2033 gen_cond(r_cond
, 1, cond
);
2037 save_state(dc
, cpu_cond
);
2038 gen_cond(r_cond
, 0, cond
);
2040 tcg_gen_helper_0_2(helper_trapcc
, cpu_dst
, r_cond
);
2041 tcg_temp_free(r_cond
);
2047 } else if (xop
== 0x28) {
2048 rs1
= GET_FIELD(insn
, 13, 17);
2051 #ifndef TARGET_SPARC64
2052 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2053 manual, rdy on the microSPARC
2055 case 0x0f: /* stbar in the SPARCv8 manual,
2056 rdy on the microSPARC II */
2057 case 0x10 ... 0x1f: /* implementation-dependent in the
2058 SPARCv8 manual, rdy on the
2061 gen_movl_TN_reg(rd
, cpu_y
);
2063 #ifdef TARGET_SPARC64
2064 case 0x2: /* V9 rdccr */
2065 tcg_gen_helper_1_0(helper_rdccr
, cpu_dst
);
2066 gen_movl_TN_reg(rd
, cpu_dst
);
2068 case 0x3: /* V9 rdasi */
2069 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2070 gen_movl_TN_reg(rd
, cpu_dst
);
2072 case 0x4: /* V9 rdtick */
2076 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2077 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2078 offsetof(CPUState
, tick
));
2079 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2081 tcg_temp_free(r_tickptr
);
2082 gen_movl_TN_reg(rd
, cpu_dst
);
2085 case 0x5: /* V9 rdpc */
2089 r_const
= tcg_const_tl(dc
->pc
);
2090 gen_movl_TN_reg(rd
, r_const
);
2091 tcg_temp_free(r_const
);
2094 case 0x6: /* V9 rdfprs */
2095 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2096 gen_movl_TN_reg(rd
, cpu_dst
);
2098 case 0xf: /* V9 membar */
2099 break; /* no effect */
2100 case 0x13: /* Graphics Status */
2101 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2103 gen_movl_TN_reg(rd
, cpu_gsr
);
2105 case 0x17: /* Tick compare */
2106 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2108 case 0x18: /* System tick */
2112 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2113 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2114 offsetof(CPUState
, stick
));
2115 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2117 tcg_temp_free(r_tickptr
);
2118 gen_movl_TN_reg(rd
, cpu_dst
);
2121 case 0x19: /* System tick compare */
2122 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2124 case 0x10: /* Performance Control */
2125 case 0x11: /* Performance Instrumentation Counter */
2126 case 0x12: /* Dispatch Control */
2127 case 0x14: /* Softint set, WO */
2128 case 0x15: /* Softint clear, WO */
2129 case 0x16: /* Softint write */
2134 #if !defined(CONFIG_USER_ONLY)
2135 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2136 #ifndef TARGET_SPARC64
2137 if (!supervisor(dc
))
2139 tcg_gen_helper_1_0(helper_rdpsr
, cpu_dst
);
2141 CHECK_IU_FEATURE(dc
, HYPV
);
2142 if (!hypervisor(dc
))
2144 rs1
= GET_FIELD(insn
, 13, 17);
2147 // gen_op_rdhpstate();
2150 // gen_op_rdhtstate();
2153 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2156 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2159 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2161 case 31: // hstick_cmpr
2162 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2168 gen_movl_TN_reg(rd
, cpu_dst
);
2170 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2171 if (!supervisor(dc
))
2173 #ifdef TARGET_SPARC64
2174 rs1
= GET_FIELD(insn
, 13, 17);
2180 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2181 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2182 offsetof(CPUState
, tsptr
));
2183 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2184 offsetof(trap_state
, tpc
));
2185 tcg_temp_free(r_tsptr
);
2192 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2193 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2194 offsetof(CPUState
, tsptr
));
2195 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2196 offsetof(trap_state
, tnpc
));
2197 tcg_temp_free(r_tsptr
);
2204 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2205 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2206 offsetof(CPUState
, tsptr
));
2207 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2208 offsetof(trap_state
, tstate
));
2209 tcg_temp_free(r_tsptr
);
2216 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2217 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2218 offsetof(CPUState
, tsptr
));
2219 tcg_gen_ld_i32(cpu_tmp0
, r_tsptr
,
2220 offsetof(trap_state
, tt
));
2221 tcg_temp_free(r_tsptr
);
2228 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2229 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2230 offsetof(CPUState
, tick
));
2231 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_tmp0
,
2233 gen_movl_TN_reg(rd
, cpu_tmp0
);
2234 tcg_temp_free(r_tickptr
);
2238 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2241 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2242 offsetof(CPUSPARCState
, pstate
));
2243 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2246 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2247 offsetof(CPUSPARCState
, tl
));
2248 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2251 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2252 offsetof(CPUSPARCState
, psrpil
));
2253 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2256 tcg_gen_helper_1_0(helper_rdcwp
, cpu_tmp0
);
2259 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2260 offsetof(CPUSPARCState
, cansave
));
2261 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2263 case 11: // canrestore
2264 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2265 offsetof(CPUSPARCState
, canrestore
));
2266 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2268 case 12: // cleanwin
2269 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2270 offsetof(CPUSPARCState
, cleanwin
));
2271 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2273 case 13: // otherwin
2274 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2275 offsetof(CPUSPARCState
, otherwin
));
2276 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2279 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2280 offsetof(CPUSPARCState
, wstate
));
2281 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2283 case 16: // UA2005 gl
2284 CHECK_IU_FEATURE(dc
, GL
);
2285 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2286 offsetof(CPUSPARCState
, gl
));
2287 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2289 case 26: // UA2005 strand status
2290 CHECK_IU_FEATURE(dc
, HYPV
);
2291 if (!hypervisor(dc
))
2293 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_ssr
);
2296 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2303 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2305 gen_movl_TN_reg(rd
, cpu_tmp0
);
2307 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2308 #ifdef TARGET_SPARC64
2309 save_state(dc
, cpu_cond
);
2310 tcg_gen_helper_0_0(helper_flushw
);
2312 if (!supervisor(dc
))
2314 gen_movl_TN_reg(rd
, cpu_tbr
);
2318 } else if (xop
== 0x34) { /* FPU Operations */
2319 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2321 gen_op_clear_ieee_excp_and_FTT();
2322 rs1
= GET_FIELD(insn
, 13, 17);
2323 rs2
= GET_FIELD(insn
, 27, 31);
2324 xop
= GET_FIELD(insn
, 18, 26);
2326 case 0x1: /* fmovs */
2327 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2329 case 0x5: /* fnegs */
2330 tcg_gen_helper_1_1(helper_fnegs
, cpu_fpr
[rd
],
2333 case 0x9: /* fabss */
2334 tcg_gen_helper_1_1(helper_fabss
, cpu_fpr
[rd
],
2337 case 0x29: /* fsqrts */
2338 CHECK_FPU_FEATURE(dc
, FSQRT
);
2339 gen_clear_float_exceptions();
2340 tcg_gen_helper_1_1(helper_fsqrts
, cpu_tmp32
,
2342 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2343 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2345 case 0x2a: /* fsqrtd */
2346 CHECK_FPU_FEATURE(dc
, FSQRT
);
2347 gen_op_load_fpr_DT1(DFPREG(rs2
));
2348 gen_clear_float_exceptions();
2349 tcg_gen_helper_0_0(helper_fsqrtd
);
2350 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2351 gen_op_store_DT0_fpr(DFPREG(rd
));
2353 case 0x2b: /* fsqrtq */
2354 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2355 gen_op_load_fpr_QT1(QFPREG(rs2
));
2356 gen_clear_float_exceptions();
2357 tcg_gen_helper_0_0(helper_fsqrtq
);
2358 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2359 gen_op_store_QT0_fpr(QFPREG(rd
));
2361 case 0x41: /* fadds */
2362 gen_clear_float_exceptions();
2363 tcg_gen_helper_1_2(helper_fadds
, cpu_tmp32
,
2364 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2365 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2366 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2369 gen_op_load_fpr_DT0(DFPREG(rs1
));
2370 gen_op_load_fpr_DT1(DFPREG(rs2
));
2371 gen_clear_float_exceptions();
2372 tcg_gen_helper_0_0(helper_faddd
);
2373 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2374 gen_op_store_DT0_fpr(DFPREG(rd
));
2376 case 0x43: /* faddq */
2377 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2378 gen_op_load_fpr_QT0(QFPREG(rs1
));
2379 gen_op_load_fpr_QT1(QFPREG(rs2
));
2380 gen_clear_float_exceptions();
2381 tcg_gen_helper_0_0(helper_faddq
);
2382 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2383 gen_op_store_QT0_fpr(QFPREG(rd
));
2385 case 0x45: /* fsubs */
2386 gen_clear_float_exceptions();
2387 tcg_gen_helper_1_2(helper_fsubs
, cpu_tmp32
,
2388 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2389 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2390 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2393 gen_op_load_fpr_DT0(DFPREG(rs1
));
2394 gen_op_load_fpr_DT1(DFPREG(rs2
));
2395 gen_clear_float_exceptions();
2396 tcg_gen_helper_0_0(helper_fsubd
);
2397 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2398 gen_op_store_DT0_fpr(DFPREG(rd
));
2400 case 0x47: /* fsubq */
2401 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2402 gen_op_load_fpr_QT0(QFPREG(rs1
));
2403 gen_op_load_fpr_QT1(QFPREG(rs2
));
2404 gen_clear_float_exceptions();
2405 tcg_gen_helper_0_0(helper_fsubq
);
2406 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2407 gen_op_store_QT0_fpr(QFPREG(rd
));
2409 case 0x49: /* fmuls */
2410 CHECK_FPU_FEATURE(dc
, FMUL
);
2411 gen_clear_float_exceptions();
2412 tcg_gen_helper_1_2(helper_fmuls
, cpu_tmp32
,
2413 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2414 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2415 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2417 case 0x4a: /* fmuld */
2418 CHECK_FPU_FEATURE(dc
, FMUL
);
2419 gen_op_load_fpr_DT0(DFPREG(rs1
));
2420 gen_op_load_fpr_DT1(DFPREG(rs2
));
2421 gen_clear_float_exceptions();
2422 tcg_gen_helper_0_0(helper_fmuld
);
2423 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2424 gen_op_store_DT0_fpr(DFPREG(rd
));
2426 case 0x4b: /* fmulq */
2427 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2428 CHECK_FPU_FEATURE(dc
, FMUL
);
2429 gen_op_load_fpr_QT0(QFPREG(rs1
));
2430 gen_op_load_fpr_QT1(QFPREG(rs2
));
2431 gen_clear_float_exceptions();
2432 tcg_gen_helper_0_0(helper_fmulq
);
2433 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2434 gen_op_store_QT0_fpr(QFPREG(rd
));
2436 case 0x4d: /* fdivs */
2437 gen_clear_float_exceptions();
2438 tcg_gen_helper_1_2(helper_fdivs
, cpu_tmp32
,
2439 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2440 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2441 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2444 gen_op_load_fpr_DT0(DFPREG(rs1
));
2445 gen_op_load_fpr_DT1(DFPREG(rs2
));
2446 gen_clear_float_exceptions();
2447 tcg_gen_helper_0_0(helper_fdivd
);
2448 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2449 gen_op_store_DT0_fpr(DFPREG(rd
));
2451 case 0x4f: /* fdivq */
2452 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2453 gen_op_load_fpr_QT0(QFPREG(rs1
));
2454 gen_op_load_fpr_QT1(QFPREG(rs2
));
2455 gen_clear_float_exceptions();
2456 tcg_gen_helper_0_0(helper_fdivq
);
2457 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2458 gen_op_store_QT0_fpr(QFPREG(rd
));
2460 case 0x69: /* fsmuld */
2461 CHECK_FPU_FEATURE(dc
, FSMULD
);
2462 gen_clear_float_exceptions();
2463 tcg_gen_helper_0_2(helper_fsmuld
, cpu_fpr
[rs1
],
2465 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2466 gen_op_store_DT0_fpr(DFPREG(rd
));
2468 case 0x6e: /* fdmulq */
2469 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2470 gen_op_load_fpr_DT0(DFPREG(rs1
));
2471 gen_op_load_fpr_DT1(DFPREG(rs2
));
2472 gen_clear_float_exceptions();
2473 tcg_gen_helper_0_0(helper_fdmulq
);
2474 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2475 gen_op_store_QT0_fpr(QFPREG(rd
));
2477 case 0xc4: /* fitos */
2478 gen_clear_float_exceptions();
2479 tcg_gen_helper_1_1(helper_fitos
, cpu_tmp32
,
2481 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2482 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2484 case 0xc6: /* fdtos */
2485 gen_op_load_fpr_DT1(DFPREG(rs2
));
2486 gen_clear_float_exceptions();
2487 tcg_gen_helper_1_0(helper_fdtos
, cpu_tmp32
);
2488 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2489 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2491 case 0xc7: /* fqtos */
2492 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2493 gen_op_load_fpr_QT1(QFPREG(rs2
));
2494 gen_clear_float_exceptions();
2495 tcg_gen_helper_1_0(helper_fqtos
, cpu_tmp32
);
2496 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2497 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2499 case 0xc8: /* fitod */
2500 tcg_gen_helper_0_1(helper_fitod
, cpu_fpr
[rs2
]);
2501 gen_op_store_DT0_fpr(DFPREG(rd
));
2503 case 0xc9: /* fstod */
2504 tcg_gen_helper_0_1(helper_fstod
, cpu_fpr
[rs2
]);
2505 gen_op_store_DT0_fpr(DFPREG(rd
));
2507 case 0xcb: /* fqtod */
2508 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2509 gen_op_load_fpr_QT1(QFPREG(rs2
));
2510 gen_clear_float_exceptions();
2511 tcg_gen_helper_0_0(helper_fqtod
);
2512 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2513 gen_op_store_DT0_fpr(DFPREG(rd
));
2515 case 0xcc: /* fitoq */
2516 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2517 tcg_gen_helper_0_1(helper_fitoq
, cpu_fpr
[rs2
]);
2518 gen_op_store_QT0_fpr(QFPREG(rd
));
2520 case 0xcd: /* fstoq */
2521 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2522 tcg_gen_helper_0_1(helper_fstoq
, cpu_fpr
[rs2
]);
2523 gen_op_store_QT0_fpr(QFPREG(rd
));
2525 case 0xce: /* fdtoq */
2526 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2527 gen_op_load_fpr_DT1(DFPREG(rs2
));
2528 tcg_gen_helper_0_0(helper_fdtoq
);
2529 gen_op_store_QT0_fpr(QFPREG(rd
));
2531 case 0xd1: /* fstoi */
2532 gen_clear_float_exceptions();
2533 tcg_gen_helper_1_1(helper_fstoi
, cpu_tmp32
,
2535 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2536 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2538 case 0xd2: /* fdtoi */
2539 gen_op_load_fpr_DT1(DFPREG(rs2
));
2540 gen_clear_float_exceptions();
2541 tcg_gen_helper_1_0(helper_fdtoi
, cpu_tmp32
);
2542 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2543 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2545 case 0xd3: /* fqtoi */
2546 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2547 gen_op_load_fpr_QT1(QFPREG(rs2
));
2548 gen_clear_float_exceptions();
2549 tcg_gen_helper_1_0(helper_fqtoi
, cpu_tmp32
);
2550 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2551 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2553 #ifdef TARGET_SPARC64
2554 case 0x2: /* V9 fmovd */
2555 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)],
2556 cpu_fpr
[DFPREG(rs2
)]);
2557 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2558 cpu_fpr
[DFPREG(rs2
) + 1]);
2560 case 0x3: /* V9 fmovq */
2561 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2562 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)],
2563 cpu_fpr
[QFPREG(rs2
)]);
2564 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2565 cpu_fpr
[QFPREG(rs2
) + 1]);
2566 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2567 cpu_fpr
[QFPREG(rs2
) + 2]);
2568 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2569 cpu_fpr
[QFPREG(rs2
) + 3]);
2571 case 0x6: /* V9 fnegd */
2572 gen_op_load_fpr_DT1(DFPREG(rs2
));
2573 tcg_gen_helper_0_0(helper_fnegd
);
2574 gen_op_store_DT0_fpr(DFPREG(rd
));
2576 case 0x7: /* V9 fnegq */
2577 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2578 gen_op_load_fpr_QT1(QFPREG(rs2
));
2579 tcg_gen_helper_0_0(helper_fnegq
);
2580 gen_op_store_QT0_fpr(QFPREG(rd
));
2582 case 0xa: /* V9 fabsd */
2583 gen_op_load_fpr_DT1(DFPREG(rs2
));
2584 tcg_gen_helper_0_0(helper_fabsd
);
2585 gen_op_store_DT0_fpr(DFPREG(rd
));
2587 case 0xb: /* V9 fabsq */
2588 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2589 gen_op_load_fpr_QT1(QFPREG(rs2
));
2590 tcg_gen_helper_0_0(helper_fabsq
);
2591 gen_op_store_QT0_fpr(QFPREG(rd
));
2593 case 0x81: /* V9 fstox */
2594 gen_clear_float_exceptions();
2595 tcg_gen_helper_0_1(helper_fstox
, cpu_fpr
[rs2
]);
2596 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2597 gen_op_store_DT0_fpr(DFPREG(rd
));
2599 case 0x82: /* V9 fdtox */
2600 gen_op_load_fpr_DT1(DFPREG(rs2
));
2601 gen_clear_float_exceptions();
2602 tcg_gen_helper_0_0(helper_fdtox
);
2603 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2604 gen_op_store_DT0_fpr(DFPREG(rd
));
2606 case 0x83: /* V9 fqtox */
2607 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2608 gen_op_load_fpr_QT1(QFPREG(rs2
));
2609 gen_clear_float_exceptions();
2610 tcg_gen_helper_0_0(helper_fqtox
);
2611 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2612 gen_op_store_DT0_fpr(DFPREG(rd
));
2614 case 0x84: /* V9 fxtos */
2615 gen_op_load_fpr_DT1(DFPREG(rs2
));
2616 gen_clear_float_exceptions();
2617 tcg_gen_helper_1_0(helper_fxtos
, cpu_tmp32
);
2618 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2619 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2621 case 0x88: /* V9 fxtod */
2622 gen_op_load_fpr_DT1(DFPREG(rs2
));
2623 gen_clear_float_exceptions();
2624 tcg_gen_helper_0_0(helper_fxtod
);
2625 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2626 gen_op_store_DT0_fpr(DFPREG(rd
));
2628 case 0x8c: /* V9 fxtoq */
2629 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2630 gen_op_load_fpr_DT1(DFPREG(rs2
));
2631 gen_clear_float_exceptions();
2632 tcg_gen_helper_0_0(helper_fxtoq
);
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2634 gen_op_store_QT0_fpr(QFPREG(rd
));
2640 } else if (xop
== 0x35) { /* FPU Operations */
2641 #ifdef TARGET_SPARC64
2644 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2646 gen_op_clear_ieee_excp_and_FTT();
2647 rs1
= GET_FIELD(insn
, 13, 17);
2648 rs2
= GET_FIELD(insn
, 27, 31);
2649 xop
= GET_FIELD(insn
, 18, 26);
2650 #ifdef TARGET_SPARC64
2651 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2654 l1
= gen_new_label();
2655 cond
= GET_FIELD_SP(insn
, 14, 17);
2656 cpu_src1
= get_src1(insn
, cpu_src1
);
2657 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2659 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2662 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2665 l1
= gen_new_label();
2666 cond
= GET_FIELD_SP(insn
, 14, 17);
2667 cpu_src1
= get_src1(insn
, cpu_src1
);
2668 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2670 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2671 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2674 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2677 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2678 l1
= gen_new_label();
2679 cond
= GET_FIELD_SP(insn
, 14, 17);
2680 cpu_src1
= get_src1(insn
, cpu_src1
);
2681 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2683 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2684 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2685 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2686 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2692 #ifdef TARGET_SPARC64
2693 #define FMOVSCC(fcc) \
2698 l1 = gen_new_label(); \
2699 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2700 cond = GET_FIELD_SP(insn, 14, 17); \
2701 gen_fcond(r_cond, fcc, cond); \
2702 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2704 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2705 gen_set_label(l1); \
2706 tcg_temp_free(r_cond); \
2708 #define FMOVDCC(fcc) \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_fcond(r_cond, fcc, cond); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2719 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2720 cpu_fpr[DFPREG(rs2)]); \
2721 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2722 cpu_fpr[DFPREG(rs2) + 1]); \
2723 gen_set_label(l1); \
2724 tcg_temp_free(r_cond); \
2726 #define FMOVQCC(fcc) \
2731 l1 = gen_new_label(); \
2732 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2733 cond = GET_FIELD_SP(insn, 14, 17); \
2734 gen_fcond(r_cond, fcc, cond); \
2735 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2737 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2738 cpu_fpr[QFPREG(rs2)]); \
2739 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2740 cpu_fpr[QFPREG(rs2) + 1]); \
2741 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2742 cpu_fpr[QFPREG(rs2) + 2]); \
2743 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2744 cpu_fpr[QFPREG(rs2) + 3]); \
2745 gen_set_label(l1); \
2746 tcg_temp_free(r_cond); \
2748 case 0x001: /* V9 fmovscc %fcc0 */
2751 case 0x002: /* V9 fmovdcc %fcc0 */
2754 case 0x003: /* V9 fmovqcc %fcc0 */
2755 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2758 case 0x041: /* V9 fmovscc %fcc1 */
2761 case 0x042: /* V9 fmovdcc %fcc1 */
2764 case 0x043: /* V9 fmovqcc %fcc1 */
2765 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2768 case 0x081: /* V9 fmovscc %fcc2 */
2771 case 0x082: /* V9 fmovdcc %fcc2 */
2774 case 0x083: /* V9 fmovqcc %fcc2 */
2775 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2778 case 0x0c1: /* V9 fmovscc %fcc3 */
2781 case 0x0c2: /* V9 fmovdcc %fcc3 */
2784 case 0x0c3: /* V9 fmovqcc %fcc3 */
2785 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2791 #define FMOVCC(size_FDQ, icc) \
2796 l1 = gen_new_label(); \
2797 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2798 cond = GET_FIELD_SP(insn, 14, 17); \
2799 gen_cond(r_cond, icc, cond); \
2800 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2802 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2803 (glue(size_FDQ, FPREG(rs2))); \
2804 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2805 (glue(size_FDQ, FPREG(rd))); \
2806 gen_set_label(l1); \
2807 tcg_temp_free(r_cond); \
2809 #define FMOVSCC(icc) \
2814 l1 = gen_new_label(); \
2815 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2816 cond = GET_FIELD_SP(insn, 14, 17); \
2817 gen_cond(r_cond, icc, cond); \
2818 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2820 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2821 gen_set_label(l1); \
2822 tcg_temp_free(r_cond); \
2824 #define FMOVDCC(icc) \
2829 l1 = gen_new_label(); \
2830 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2831 cond = GET_FIELD_SP(insn, 14, 17); \
2832 gen_cond(r_cond, icc, cond); \
2833 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2835 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2836 cpu_fpr[DFPREG(rs2)]); \
2837 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2838 cpu_fpr[DFPREG(rs2) + 1]); \
2839 gen_set_label(l1); \
2840 tcg_temp_free(r_cond); \
2842 #define FMOVQCC(icc) \
2847 l1 = gen_new_label(); \
2848 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2849 cond = GET_FIELD_SP(insn, 14, 17); \
2850 gen_cond(r_cond, icc, cond); \
2851 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2853 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2854 cpu_fpr[QFPREG(rs2)]); \
2855 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2856 cpu_fpr[QFPREG(rs2) + 1]); \
2857 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2858 cpu_fpr[QFPREG(rs2) + 2]); \
2859 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2860 cpu_fpr[QFPREG(rs2) + 3]); \
2861 gen_set_label(l1); \
2862 tcg_temp_free(r_cond); \
2865 case 0x101: /* V9 fmovscc %icc */
2868 case 0x102: /* V9 fmovdcc %icc */
2870 case 0x103: /* V9 fmovqcc %icc */
2871 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2874 case 0x181: /* V9 fmovscc %xcc */
2877 case 0x182: /* V9 fmovdcc %xcc */
2880 case 0x183: /* V9 fmovqcc %xcc */
2881 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2888 case 0x51: /* fcmps, V9 %fcc */
2889 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2891 case 0x52: /* fcmpd, V9 %fcc */
2892 gen_op_load_fpr_DT0(DFPREG(rs1
));
2893 gen_op_load_fpr_DT1(DFPREG(rs2
));
2894 gen_op_fcmpd(rd
& 3);
2896 case 0x53: /* fcmpq, V9 %fcc */
2897 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2898 gen_op_load_fpr_QT0(QFPREG(rs1
));
2899 gen_op_load_fpr_QT1(QFPREG(rs2
));
2900 gen_op_fcmpq(rd
& 3);
2902 case 0x55: /* fcmpes, V9 %fcc */
2903 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2905 case 0x56: /* fcmped, V9 %fcc */
2906 gen_op_load_fpr_DT0(DFPREG(rs1
));
2907 gen_op_load_fpr_DT1(DFPREG(rs2
));
2908 gen_op_fcmped(rd
& 3);
2910 case 0x57: /* fcmpeq, V9 %fcc */
2911 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2912 gen_op_load_fpr_QT0(QFPREG(rs1
));
2913 gen_op_load_fpr_QT1(QFPREG(rs2
));
2914 gen_op_fcmpeq(rd
& 3);
2919 } else if (xop
== 0x2) {
2922 rs1
= GET_FIELD(insn
, 13, 17);
2924 // or %g0, x, y -> mov T0, x; mov y, T0
2925 if (IS_IMM
) { /* immediate */
2928 rs2
= GET_FIELDs(insn
, 19, 31);
2929 r_const
= tcg_const_tl((int)rs2
);
2930 gen_movl_TN_reg(rd
, r_const
);
2931 tcg_temp_free(r_const
);
2932 } else { /* register */
2933 rs2
= GET_FIELD(insn
, 27, 31);
2934 gen_movl_reg_TN(rs2
, cpu_dst
);
2935 gen_movl_TN_reg(rd
, cpu_dst
);
2938 cpu_src1
= get_src1(insn
, cpu_src1
);
2939 if (IS_IMM
) { /* immediate */
2940 rs2
= GET_FIELDs(insn
, 19, 31);
2941 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
2942 gen_movl_TN_reg(rd
, cpu_dst
);
2943 } else { /* register */
2944 // or x, %g0, y -> mov T1, x; mov y, T1
2945 rs2
= GET_FIELD(insn
, 27, 31);
2947 gen_movl_reg_TN(rs2
, cpu_src2
);
2948 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2949 gen_movl_TN_reg(rd
, cpu_dst
);
2951 gen_movl_TN_reg(rd
, cpu_src1
);
2954 #ifdef TARGET_SPARC64
2955 } else if (xop
== 0x25) { /* sll, V9 sllx */
2956 cpu_src1
= get_src1(insn
, cpu_src1
);
2957 if (IS_IMM
) { /* immediate */
2958 rs2
= GET_FIELDs(insn
, 20, 31);
2959 if (insn
& (1 << 12)) {
2960 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2962 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x1f);
2964 } else { /* register */
2965 rs2
= GET_FIELD(insn
, 27, 31);
2966 gen_movl_reg_TN(rs2
, cpu_src2
);
2967 if (insn
& (1 << 12)) {
2968 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2970 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2972 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2974 gen_movl_TN_reg(rd
, cpu_dst
);
2975 } else if (xop
== 0x26) { /* srl, V9 srlx */
2976 cpu_src1
= get_src1(insn
, cpu_src1
);
2977 if (IS_IMM
) { /* immediate */
2978 rs2
= GET_FIELDs(insn
, 20, 31);
2979 if (insn
& (1 << 12)) {
2980 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2982 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2983 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2985 } else { /* register */
2986 rs2
= GET_FIELD(insn
, 27, 31);
2987 gen_movl_reg_TN(rs2
, cpu_src2
);
2988 if (insn
& (1 << 12)) {
2989 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2990 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2992 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2993 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2994 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2997 gen_movl_TN_reg(rd
, cpu_dst
);
2998 } else if (xop
== 0x27) { /* sra, V9 srax */
2999 cpu_src1
= get_src1(insn
, cpu_src1
);
3000 if (IS_IMM
) { /* immediate */
3001 rs2
= GET_FIELDs(insn
, 20, 31);
3002 if (insn
& (1 << 12)) {
3003 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3005 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3006 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3007 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3009 } else { /* register */
3010 rs2
= GET_FIELD(insn
, 27, 31);
3011 gen_movl_reg_TN(rs2
, cpu_src2
);
3012 if (insn
& (1 << 12)) {
3013 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3014 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3016 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3017 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3018 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3019 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3022 gen_movl_TN_reg(rd
, cpu_dst
);
3024 } else if (xop
< 0x36) {
3025 cpu_src1
= get_src1(insn
, cpu_src1
);
3026 cpu_src2
= get_src2(insn
, cpu_src2
);
3028 switch (xop
& ~0x10) {
3031 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3033 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3036 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3038 gen_op_logic_cc(cpu_dst
);
3041 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3043 gen_op_logic_cc(cpu_dst
);
3046 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3048 gen_op_logic_cc(cpu_dst
);
3052 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3054 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3057 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3058 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3060 gen_op_logic_cc(cpu_dst
);
3063 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3064 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3066 gen_op_logic_cc(cpu_dst
);
3069 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3070 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3072 gen_op_logic_cc(cpu_dst
);
3076 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3078 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3079 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3080 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3083 #ifdef TARGET_SPARC64
3084 case 0x9: /* V9 mulx */
3085 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3089 CHECK_IU_FEATURE(dc
, MUL
);
3090 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3092 gen_op_logic_cc(cpu_dst
);
3095 CHECK_IU_FEATURE(dc
, MUL
);
3096 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3098 gen_op_logic_cc(cpu_dst
);
3102 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3104 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3105 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3106 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3109 #ifdef TARGET_SPARC64
3110 case 0xd: /* V9 udivx */
3111 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3112 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3113 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3114 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3118 CHECK_IU_FEATURE(dc
, DIV
);
3119 tcg_gen_helper_1_2(helper_udiv
, cpu_dst
, cpu_src1
,
3122 gen_op_div_cc(cpu_dst
);
3125 CHECK_IU_FEATURE(dc
, DIV
);
3126 tcg_gen_helper_1_2(helper_sdiv
, cpu_dst
, cpu_src1
,
3129 gen_op_div_cc(cpu_dst
);
3134 gen_movl_TN_reg(rd
, cpu_dst
);
3137 case 0x20: /* taddcc */
3138 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3139 gen_movl_TN_reg(rd
, cpu_dst
);
3141 case 0x21: /* tsubcc */
3142 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3143 gen_movl_TN_reg(rd
, cpu_dst
);
3145 case 0x22: /* taddcctv */
3146 save_state(dc
, cpu_cond
);
3147 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3148 gen_movl_TN_reg(rd
, cpu_dst
);
3150 case 0x23: /* tsubcctv */
3151 save_state(dc
, cpu_cond
);
3152 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3153 gen_movl_TN_reg(rd
, cpu_dst
);
3155 case 0x24: /* mulscc */
3156 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3157 gen_movl_TN_reg(rd
, cpu_dst
);
3159 #ifndef TARGET_SPARC64
3160 case 0x25: /* sll */
3161 if (IS_IMM
) { /* immediate */
3162 rs2
= GET_FIELDs(insn
, 20, 31);
3163 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3164 } else { /* register */
3165 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3166 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3168 gen_movl_TN_reg(rd
, cpu_dst
);
3170 case 0x26: /* srl */
3171 if (IS_IMM
) { /* immediate */
3172 rs2
= GET_FIELDs(insn
, 20, 31);
3173 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3174 } else { /* register */
3175 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3176 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3178 gen_movl_TN_reg(rd
, cpu_dst
);
3180 case 0x27: /* sra */
3181 if (IS_IMM
) { /* immediate */
3182 rs2
= GET_FIELDs(insn
, 20, 31);
3183 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3184 } else { /* register */
3185 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3186 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3188 gen_movl_TN_reg(rd
, cpu_dst
);
3195 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3196 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3198 #ifndef TARGET_SPARC64
3199 case 0x01 ... 0x0f: /* undefined in the
3203 case 0x10 ... 0x1f: /* implementation-dependent
3209 case 0x2: /* V9 wrccr */
3210 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3211 tcg_gen_helper_0_1(helper_wrccr
, cpu_dst
);
3213 case 0x3: /* V9 wrasi */
3214 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3215 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3217 case 0x6: /* V9 wrfprs */
3218 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3219 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3220 save_state(dc
, cpu_cond
);
3225 case 0xf: /* V9 sir, nop if user */
3226 #if !defined(CONFIG_USER_ONLY)
3231 case 0x13: /* Graphics Status */
3232 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3234 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3236 case 0x17: /* Tick compare */
3237 #if !defined(CONFIG_USER_ONLY)
3238 if (!supervisor(dc
))
3244 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3246 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3247 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3248 offsetof(CPUState
, tick
));
3249 tcg_gen_helper_0_2(helper_tick_set_limit
,
3250 r_tickptr
, cpu_tick_cmpr
);
3251 tcg_temp_free(r_tickptr
);
3254 case 0x18: /* System tick */
3255 #if !defined(CONFIG_USER_ONLY)
3256 if (!supervisor(dc
))
3262 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3264 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3265 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3266 offsetof(CPUState
, stick
));
3267 tcg_gen_helper_0_2(helper_tick_set_count
,
3268 r_tickptr
, cpu_dst
);
3269 tcg_temp_free(r_tickptr
);
3272 case 0x19: /* System tick compare */
3273 #if !defined(CONFIG_USER_ONLY)
3274 if (!supervisor(dc
))
3280 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3282 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3283 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3284 offsetof(CPUState
, stick
));
3285 tcg_gen_helper_0_2(helper_tick_set_limit
,
3286 r_tickptr
, cpu_stick_cmpr
);
3287 tcg_temp_free(r_tickptr
);
3291 case 0x10: /* Performance Control */
3292 case 0x11: /* Performance Instrumentation
3294 case 0x12: /* Dispatch Control */
3295 case 0x14: /* Softint set */
3296 case 0x15: /* Softint clear */
3297 case 0x16: /* Softint write */
3304 #if !defined(CONFIG_USER_ONLY)
3305 case 0x31: /* wrpsr, V9 saved, restored */
3307 if (!supervisor(dc
))
3309 #ifdef TARGET_SPARC64
3312 tcg_gen_helper_0_0(helper_saved
);
3315 tcg_gen_helper_0_0(helper_restored
);
3317 case 2: /* UA2005 allclean */
3318 case 3: /* UA2005 otherw */
3319 case 4: /* UA2005 normalw */
3320 case 5: /* UA2005 invalw */
3326 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3327 tcg_gen_helper_0_1(helper_wrpsr
, cpu_dst
);
3328 save_state(dc
, cpu_cond
);
3335 case 0x32: /* wrwim, V9 wrpr */
3337 if (!supervisor(dc
))
3339 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3340 #ifdef TARGET_SPARC64
3346 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3347 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3348 offsetof(CPUState
, tsptr
));
3349 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3350 offsetof(trap_state
, tpc
));
3351 tcg_temp_free(r_tsptr
);
3358 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3359 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3360 offsetof(CPUState
, tsptr
));
3361 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3362 offsetof(trap_state
, tnpc
));
3363 tcg_temp_free(r_tsptr
);
3370 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3371 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3372 offsetof(CPUState
, tsptr
));
3373 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3374 offsetof(trap_state
,
3376 tcg_temp_free(r_tsptr
);
3383 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3384 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3385 offsetof(CPUState
, tsptr
));
3386 tcg_gen_st_i32(cpu_tmp0
, r_tsptr
,
3387 offsetof(trap_state
, tt
));
3388 tcg_temp_free(r_tsptr
);
3395 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3396 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3397 offsetof(CPUState
, tick
));
3398 tcg_gen_helper_0_2(helper_tick_set_count
,
3399 r_tickptr
, cpu_tmp0
);
3400 tcg_temp_free(r_tickptr
);
3404 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3407 save_state(dc
, cpu_cond
);
3408 tcg_gen_helper_0_1(helper_wrpstate
, cpu_tmp0
);
3414 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3415 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3416 offsetof(CPUSPARCState
, tl
));
3419 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3420 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3421 offsetof(CPUSPARCState
,
3425 tcg_gen_helper_0_1(helper_wrcwp
, cpu_tmp0
);
3428 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3429 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3430 offsetof(CPUSPARCState
,
3433 case 11: // canrestore
3434 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3435 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3436 offsetof(CPUSPARCState
,
3439 case 12: // cleanwin
3440 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3441 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3442 offsetof(CPUSPARCState
,
3445 case 13: // otherwin
3446 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3447 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3448 offsetof(CPUSPARCState
,
3452 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3453 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3454 offsetof(CPUSPARCState
,
3457 case 16: // UA2005 gl
3458 CHECK_IU_FEATURE(dc
, GL
);
3459 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3460 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3461 offsetof(CPUSPARCState
, gl
));
3463 case 26: // UA2005 strand status
3464 CHECK_IU_FEATURE(dc
, HYPV
);
3465 if (!hypervisor(dc
))
3467 tcg_gen_trunc_tl_i32(cpu_ssr
, cpu_tmp0
);
3473 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3474 if (dc
->def
->nwindows
!= 32)
3475 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3476 (1 << dc
->def
->nwindows
) - 1);
3477 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3481 case 0x33: /* wrtbr, UA2005 wrhpr */
3483 #ifndef TARGET_SPARC64
3484 if (!supervisor(dc
))
3486 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3488 CHECK_IU_FEATURE(dc
, HYPV
);
3489 if (!hypervisor(dc
))
3491 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3494 // XXX gen_op_wrhpstate();
3495 save_state(dc
, cpu_cond
);
3501 // XXX gen_op_wrhtstate();
3504 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3507 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3509 case 31: // hstick_cmpr
3513 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3514 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3515 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3516 offsetof(CPUState
, hstick
));
3517 tcg_gen_helper_0_2(helper_tick_set_limit
,
3518 r_tickptr
, cpu_hstick_cmpr
);
3519 tcg_temp_free(r_tickptr
);
3522 case 6: // hver readonly
3530 #ifdef TARGET_SPARC64
3531 case 0x2c: /* V9 movcc */
3533 int cc
= GET_FIELD_SP(insn
, 11, 12);
3534 int cond
= GET_FIELD_SP(insn
, 14, 17);
3538 r_cond
= tcg_temp_new(TCG_TYPE_TL
);
3539 if (insn
& (1 << 18)) {
3541 gen_cond(r_cond
, 0, cond
);
3543 gen_cond(r_cond
, 1, cond
);
3547 gen_fcond(r_cond
, cc
, cond
);
3550 l1
= gen_new_label();
3552 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3553 if (IS_IMM
) { /* immediate */
3556 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3557 r_const
= tcg_const_tl((int)rs2
);
3558 gen_movl_TN_reg(rd
, r_const
);
3559 tcg_temp_free(r_const
);
3561 rs2
= GET_FIELD_SP(insn
, 0, 4);
3562 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3563 gen_movl_TN_reg(rd
, cpu_tmp0
);
3566 tcg_temp_free(r_cond
);
3569 case 0x2d: /* V9 sdivx */
3570 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3571 gen_movl_TN_reg(rd
, cpu_dst
);
3573 case 0x2e: /* V9 popc */
3575 cpu_src2
= get_src2(insn
, cpu_src2
);
3576 tcg_gen_helper_1_1(helper_popc
, cpu_dst
,
3578 gen_movl_TN_reg(rd
, cpu_dst
);
3580 case 0x2f: /* V9 movr */
3582 int cond
= GET_FIELD_SP(insn
, 10, 12);
3585 cpu_src1
= get_src1(insn
, cpu_src1
);
3587 l1
= gen_new_label();
3589 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3591 if (IS_IMM
) { /* immediate */
3594 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3595 r_const
= tcg_const_tl((int)rs2
);
3596 gen_movl_TN_reg(rd
, r_const
);
3597 tcg_temp_free(r_const
);
3599 rs2
= GET_FIELD_SP(insn
, 0, 4);
3600 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3601 gen_movl_TN_reg(rd
, cpu_tmp0
);
3611 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3612 #ifdef TARGET_SPARC64
3613 int opf
= GET_FIELD_SP(insn
, 5, 13);
3614 rs1
= GET_FIELD(insn
, 13, 17);
3615 rs2
= GET_FIELD(insn
, 27, 31);
3616 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3620 case 0x000: /* VIS I edge8cc */
3621 case 0x001: /* VIS II edge8n */
3622 case 0x002: /* VIS I edge8lcc */
3623 case 0x003: /* VIS II edge8ln */
3624 case 0x004: /* VIS I edge16cc */
3625 case 0x005: /* VIS II edge16n */
3626 case 0x006: /* VIS I edge16lcc */
3627 case 0x007: /* VIS II edge16ln */
3628 case 0x008: /* VIS I edge32cc */
3629 case 0x009: /* VIS II edge32n */
3630 case 0x00a: /* VIS I edge32lcc */
3631 case 0x00b: /* VIS II edge32ln */
3634 case 0x010: /* VIS I array8 */
3635 CHECK_FPU_FEATURE(dc
, VIS1
);
3636 cpu_src1
= get_src1(insn
, cpu_src1
);
3637 gen_movl_reg_TN(rs2
, cpu_src2
);
3638 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3640 gen_movl_TN_reg(rd
, cpu_dst
);
3642 case 0x012: /* VIS I array16 */
3643 CHECK_FPU_FEATURE(dc
, VIS1
);
3644 cpu_src1
= get_src1(insn
, cpu_src1
);
3645 gen_movl_reg_TN(rs2
, cpu_src2
);
3646 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3648 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3649 gen_movl_TN_reg(rd
, cpu_dst
);
3651 case 0x014: /* VIS I array32 */
3652 CHECK_FPU_FEATURE(dc
, VIS1
);
3653 cpu_src1
= get_src1(insn
, cpu_src1
);
3654 gen_movl_reg_TN(rs2
, cpu_src2
);
3655 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3657 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3658 gen_movl_TN_reg(rd
, cpu_dst
);
3660 case 0x018: /* VIS I alignaddr */
3661 CHECK_FPU_FEATURE(dc
, VIS1
);
3662 cpu_src1
= get_src1(insn
, cpu_src1
);
3663 gen_movl_reg_TN(rs2
, cpu_src2
);
3664 tcg_gen_helper_1_2(helper_alignaddr
, cpu_dst
, cpu_src1
,
3666 gen_movl_TN_reg(rd
, cpu_dst
);
3668 case 0x019: /* VIS II bmask */
3669 case 0x01a: /* VIS I alignaddrl */
3672 case 0x020: /* VIS I fcmple16 */
3673 CHECK_FPU_FEATURE(dc
, VIS1
);
3674 gen_op_load_fpr_DT0(DFPREG(rs1
));
3675 gen_op_load_fpr_DT1(DFPREG(rs2
));
3676 tcg_gen_helper_0_0(helper_fcmple16
);
3677 gen_op_store_DT0_fpr(DFPREG(rd
));
3679 case 0x022: /* VIS I fcmpne16 */
3680 CHECK_FPU_FEATURE(dc
, VIS1
);
3681 gen_op_load_fpr_DT0(DFPREG(rs1
));
3682 gen_op_load_fpr_DT1(DFPREG(rs2
));
3683 tcg_gen_helper_0_0(helper_fcmpne16
);
3684 gen_op_store_DT0_fpr(DFPREG(rd
));
3686 case 0x024: /* VIS I fcmple32 */
3687 CHECK_FPU_FEATURE(dc
, VIS1
);
3688 gen_op_load_fpr_DT0(DFPREG(rs1
));
3689 gen_op_load_fpr_DT1(DFPREG(rs2
));
3690 tcg_gen_helper_0_0(helper_fcmple32
);
3691 gen_op_store_DT0_fpr(DFPREG(rd
));
3693 case 0x026: /* VIS I fcmpne32 */
3694 CHECK_FPU_FEATURE(dc
, VIS1
);
3695 gen_op_load_fpr_DT0(DFPREG(rs1
));
3696 gen_op_load_fpr_DT1(DFPREG(rs2
));
3697 tcg_gen_helper_0_0(helper_fcmpne32
);
3698 gen_op_store_DT0_fpr(DFPREG(rd
));
3700 case 0x028: /* VIS I fcmpgt16 */
3701 CHECK_FPU_FEATURE(dc
, VIS1
);
3702 gen_op_load_fpr_DT0(DFPREG(rs1
));
3703 gen_op_load_fpr_DT1(DFPREG(rs2
));
3704 tcg_gen_helper_0_0(helper_fcmpgt16
);
3705 gen_op_store_DT0_fpr(DFPREG(rd
));
3707 case 0x02a: /* VIS I fcmpeq16 */
3708 CHECK_FPU_FEATURE(dc
, VIS1
);
3709 gen_op_load_fpr_DT0(DFPREG(rs1
));
3710 gen_op_load_fpr_DT1(DFPREG(rs2
));
3711 tcg_gen_helper_0_0(helper_fcmpeq16
);
3712 gen_op_store_DT0_fpr(DFPREG(rd
));
3714 case 0x02c: /* VIS I fcmpgt32 */
3715 CHECK_FPU_FEATURE(dc
, VIS1
);
3716 gen_op_load_fpr_DT0(DFPREG(rs1
));
3717 gen_op_load_fpr_DT1(DFPREG(rs2
));
3718 tcg_gen_helper_0_0(helper_fcmpgt32
);
3719 gen_op_store_DT0_fpr(DFPREG(rd
));
3721 case 0x02e: /* VIS I fcmpeq32 */
3722 CHECK_FPU_FEATURE(dc
, VIS1
);
3723 gen_op_load_fpr_DT0(DFPREG(rs1
));
3724 gen_op_load_fpr_DT1(DFPREG(rs2
));
3725 tcg_gen_helper_0_0(helper_fcmpeq32
);
3726 gen_op_store_DT0_fpr(DFPREG(rd
));
3728 case 0x031: /* VIS I fmul8x16 */
3729 CHECK_FPU_FEATURE(dc
, VIS1
);
3730 gen_op_load_fpr_DT0(DFPREG(rs1
));
3731 gen_op_load_fpr_DT1(DFPREG(rs2
));
3732 tcg_gen_helper_0_0(helper_fmul8x16
);
3733 gen_op_store_DT0_fpr(DFPREG(rd
));
3735 case 0x033: /* VIS I fmul8x16au */
3736 CHECK_FPU_FEATURE(dc
, VIS1
);
3737 gen_op_load_fpr_DT0(DFPREG(rs1
));
3738 gen_op_load_fpr_DT1(DFPREG(rs2
));
3739 tcg_gen_helper_0_0(helper_fmul8x16au
);
3740 gen_op_store_DT0_fpr(DFPREG(rd
));
3742 case 0x035: /* VIS I fmul8x16al */
3743 CHECK_FPU_FEATURE(dc
, VIS1
);
3744 gen_op_load_fpr_DT0(DFPREG(rs1
));
3745 gen_op_load_fpr_DT1(DFPREG(rs2
));
3746 tcg_gen_helper_0_0(helper_fmul8x16al
);
3747 gen_op_store_DT0_fpr(DFPREG(rd
));
3749 case 0x036: /* VIS I fmul8sux16 */
3750 CHECK_FPU_FEATURE(dc
, VIS1
);
3751 gen_op_load_fpr_DT0(DFPREG(rs1
));
3752 gen_op_load_fpr_DT1(DFPREG(rs2
));
3753 tcg_gen_helper_0_0(helper_fmul8sux16
);
3754 gen_op_store_DT0_fpr(DFPREG(rd
));
3756 case 0x037: /* VIS I fmul8ulx16 */
3757 CHECK_FPU_FEATURE(dc
, VIS1
);
3758 gen_op_load_fpr_DT0(DFPREG(rs1
));
3759 gen_op_load_fpr_DT1(DFPREG(rs2
));
3760 tcg_gen_helper_0_0(helper_fmul8ulx16
);
3761 gen_op_store_DT0_fpr(DFPREG(rd
));
3763 case 0x038: /* VIS I fmuld8sux16 */
3764 CHECK_FPU_FEATURE(dc
, VIS1
);
3765 gen_op_load_fpr_DT0(DFPREG(rs1
));
3766 gen_op_load_fpr_DT1(DFPREG(rs2
));
3767 tcg_gen_helper_0_0(helper_fmuld8sux16
);
3768 gen_op_store_DT0_fpr(DFPREG(rd
));
3770 case 0x039: /* VIS I fmuld8ulx16 */
3771 CHECK_FPU_FEATURE(dc
, VIS1
);
3772 gen_op_load_fpr_DT0(DFPREG(rs1
));
3773 gen_op_load_fpr_DT1(DFPREG(rs2
));
3774 tcg_gen_helper_0_0(helper_fmuld8ulx16
);
3775 gen_op_store_DT0_fpr(DFPREG(rd
));
3777 case 0x03a: /* VIS I fpack32 */
3778 case 0x03b: /* VIS I fpack16 */
3779 case 0x03d: /* VIS I fpackfix */
3780 case 0x03e: /* VIS I pdist */
3783 case 0x048: /* VIS I faligndata */
3784 CHECK_FPU_FEATURE(dc
, VIS1
);
3785 gen_op_load_fpr_DT0(DFPREG(rs1
));
3786 gen_op_load_fpr_DT1(DFPREG(rs2
));
3787 tcg_gen_helper_0_0(helper_faligndata
);
3788 gen_op_store_DT0_fpr(DFPREG(rd
));
3790 case 0x04b: /* VIS I fpmerge */
3791 CHECK_FPU_FEATURE(dc
, VIS1
);
3792 gen_op_load_fpr_DT0(DFPREG(rs1
));
3793 gen_op_load_fpr_DT1(DFPREG(rs2
));
3794 tcg_gen_helper_0_0(helper_fpmerge
);
3795 gen_op_store_DT0_fpr(DFPREG(rd
));
3797 case 0x04c: /* VIS II bshuffle */
3800 case 0x04d: /* VIS I fexpand */
3801 CHECK_FPU_FEATURE(dc
, VIS1
);
3802 gen_op_load_fpr_DT0(DFPREG(rs1
));
3803 gen_op_load_fpr_DT1(DFPREG(rs2
));
3804 tcg_gen_helper_0_0(helper_fexpand
);
3805 gen_op_store_DT0_fpr(DFPREG(rd
));
3807 case 0x050: /* VIS I fpadd16 */
3808 CHECK_FPU_FEATURE(dc
, VIS1
);
3809 gen_op_load_fpr_DT0(DFPREG(rs1
));
3810 gen_op_load_fpr_DT1(DFPREG(rs2
));
3811 tcg_gen_helper_0_0(helper_fpadd16
);
3812 gen_op_store_DT0_fpr(DFPREG(rd
));
3814 case 0x051: /* VIS I fpadd16s */
3815 CHECK_FPU_FEATURE(dc
, VIS1
);
3816 tcg_gen_helper_1_2(helper_fpadd16s
, cpu_fpr
[rd
],
3817 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3819 case 0x052: /* VIS I fpadd32 */
3820 CHECK_FPU_FEATURE(dc
, VIS1
);
3821 gen_op_load_fpr_DT0(DFPREG(rs1
));
3822 gen_op_load_fpr_DT1(DFPREG(rs2
));
3823 tcg_gen_helper_0_0(helper_fpadd32
);
3824 gen_op_store_DT0_fpr(DFPREG(rd
));
3826 case 0x053: /* VIS I fpadd32s */
3827 CHECK_FPU_FEATURE(dc
, VIS1
);
3828 tcg_gen_helper_1_2(helper_fpadd32s
, cpu_fpr
[rd
],
3829 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3831 case 0x054: /* VIS I fpsub16 */
3832 CHECK_FPU_FEATURE(dc
, VIS1
);
3833 gen_op_load_fpr_DT0(DFPREG(rs1
));
3834 gen_op_load_fpr_DT1(DFPREG(rs2
));
3835 tcg_gen_helper_0_0(helper_fpsub16
);
3836 gen_op_store_DT0_fpr(DFPREG(rd
));
3838 case 0x055: /* VIS I fpsub16s */
3839 CHECK_FPU_FEATURE(dc
, VIS1
);
3840 tcg_gen_helper_1_2(helper_fpsub16s
, cpu_fpr
[rd
],
3841 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3843 case 0x056: /* VIS I fpsub32 */
3844 CHECK_FPU_FEATURE(dc
, VIS1
);
3845 gen_op_load_fpr_DT0(DFPREG(rs1
));
3846 gen_op_load_fpr_DT1(DFPREG(rs2
));
3847 tcg_gen_helper_0_0(helper_fpsub32
);
3848 gen_op_store_DT0_fpr(DFPREG(rd
));
3850 case 0x057: /* VIS I fpsub32s */
3851 CHECK_FPU_FEATURE(dc
, VIS1
);
3852 tcg_gen_helper_1_2(helper_fpsub32s
, cpu_fpr
[rd
],
3853 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3855 case 0x060: /* VIS I fzero */
3856 CHECK_FPU_FEATURE(dc
, VIS1
);
3857 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3858 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3860 case 0x061: /* VIS I fzeros */
3861 CHECK_FPU_FEATURE(dc
, VIS1
);
3862 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3864 case 0x062: /* VIS I fnor */
3865 CHECK_FPU_FEATURE(dc
, VIS1
);
3866 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3867 cpu_fpr
[DFPREG(rs2
)]);
3868 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3869 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3870 cpu_fpr
[DFPREG(rs2
) + 1]);
3871 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3873 case 0x063: /* VIS I fnors */
3874 CHECK_FPU_FEATURE(dc
, VIS1
);
3875 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3876 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3878 case 0x064: /* VIS I fandnot2 */
3879 CHECK_FPU_FEATURE(dc
, VIS1
);
3880 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
3881 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3882 cpu_fpr
[DFPREG(rs2
)]);
3883 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
3884 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3885 cpu_fpr
[DFPREG(rs2
) + 1]);
3887 case 0x065: /* VIS I fandnot2s */
3888 CHECK_FPU_FEATURE(dc
, VIS1
);
3889 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
3890 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
3892 case 0x066: /* VIS I fnot2 */
3893 CHECK_FPU_FEATURE(dc
, VIS1
);
3894 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3896 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3897 cpu_fpr
[DFPREG(rs2
) + 1], -1);
3899 case 0x067: /* VIS I fnot2s */
3900 CHECK_FPU_FEATURE(dc
, VIS1
);
3901 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], -1);
3903 case 0x068: /* VIS I fandnot1 */
3904 CHECK_FPU_FEATURE(dc
, VIS1
);
3905 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3906 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3907 cpu_fpr
[DFPREG(rs1
)]);
3908 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3909 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3910 cpu_fpr
[DFPREG(rs1
) + 1]);
3912 case 0x069: /* VIS I fandnot1s */
3913 CHECK_FPU_FEATURE(dc
, VIS1
);
3914 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3915 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3917 case 0x06a: /* VIS I fnot1 */
3918 CHECK_FPU_FEATURE(dc
, VIS1
);
3919 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3921 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3922 cpu_fpr
[DFPREG(rs1
) + 1], -1);
3924 case 0x06b: /* VIS I fnot1s */
3925 CHECK_FPU_FEATURE(dc
, VIS1
);
3926 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], -1);
3928 case 0x06c: /* VIS I fxor */
3929 CHECK_FPU_FEATURE(dc
, VIS1
);
3930 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3931 cpu_fpr
[DFPREG(rs2
)]);
3932 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3933 cpu_fpr
[DFPREG(rs1
) + 1],
3934 cpu_fpr
[DFPREG(rs2
) + 1]);
3936 case 0x06d: /* VIS I fxors */
3937 CHECK_FPU_FEATURE(dc
, VIS1
);
3938 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3940 case 0x06e: /* VIS I fnand */
3941 CHECK_FPU_FEATURE(dc
, VIS1
);
3942 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3943 cpu_fpr
[DFPREG(rs2
)]);
3944 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3945 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3946 cpu_fpr
[DFPREG(rs2
) + 1]);
3947 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3949 case 0x06f: /* VIS I fnands */
3950 CHECK_FPU_FEATURE(dc
, VIS1
);
3951 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3952 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3954 case 0x070: /* VIS I fand */
3955 CHECK_FPU_FEATURE(dc
, VIS1
);
3956 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3957 cpu_fpr
[DFPREG(rs2
)]);
3958 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3959 cpu_fpr
[DFPREG(rs1
) + 1],
3960 cpu_fpr
[DFPREG(rs2
) + 1]);
3962 case 0x071: /* VIS I fands */
3963 CHECK_FPU_FEATURE(dc
, VIS1
);
3964 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3966 case 0x072: /* VIS I fxnor */
3967 CHECK_FPU_FEATURE(dc
, VIS1
);
3968 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3969 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3970 cpu_fpr
[DFPREG(rs1
)]);
3971 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3972 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3973 cpu_fpr
[DFPREG(rs1
) + 1]);
3975 case 0x073: /* VIS I fxnors */
3976 CHECK_FPU_FEATURE(dc
, VIS1
);
3977 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3978 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3980 case 0x074: /* VIS I fsrc1 */
3981 CHECK_FPU_FEATURE(dc
, VIS1
);
3982 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3983 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3984 cpu_fpr
[DFPREG(rs1
) + 1]);
3986 case 0x075: /* VIS I fsrc1s */
3987 CHECK_FPU_FEATURE(dc
, VIS1
);
3988 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3990 case 0x076: /* VIS I fornot2 */
3991 CHECK_FPU_FEATURE(dc
, VIS1
);
3992 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
3993 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3994 cpu_fpr
[DFPREG(rs2
)]);
3995 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
3996 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3997 cpu_fpr
[DFPREG(rs2
) + 1]);
3999 case 0x077: /* VIS I fornot2s */
4000 CHECK_FPU_FEATURE(dc
, VIS1
);
4001 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
4002 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
4004 case 0x078: /* VIS I fsrc2 */
4005 CHECK_FPU_FEATURE(dc
, VIS1
);
4006 gen_op_load_fpr_DT0(DFPREG(rs2
));
4007 gen_op_store_DT0_fpr(DFPREG(rd
));
4009 case 0x079: /* VIS I fsrc2s */
4010 CHECK_FPU_FEATURE(dc
, VIS1
);
4011 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4013 case 0x07a: /* VIS I fornot1 */
4014 CHECK_FPU_FEATURE(dc
, VIS1
);
4015 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4016 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4017 cpu_fpr
[DFPREG(rs1
)]);
4018 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4019 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4020 cpu_fpr
[DFPREG(rs1
) + 1]);
4022 case 0x07b: /* VIS I fornot1s */
4023 CHECK_FPU_FEATURE(dc
, VIS1
);
4024 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4025 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4027 case 0x07c: /* VIS I for */
4028 CHECK_FPU_FEATURE(dc
, VIS1
);
4029 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4030 cpu_fpr
[DFPREG(rs2
)]);
4031 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4032 cpu_fpr
[DFPREG(rs1
) + 1],
4033 cpu_fpr
[DFPREG(rs2
) + 1]);
4035 case 0x07d: /* VIS I fors */
4036 CHECK_FPU_FEATURE(dc
, VIS1
);
4037 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4039 case 0x07e: /* VIS I fone */
4040 CHECK_FPU_FEATURE(dc
, VIS1
);
4041 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4042 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4044 case 0x07f: /* VIS I fones */
4045 CHECK_FPU_FEATURE(dc
, VIS1
);
4046 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4048 case 0x080: /* VIS I shutdown */
4049 case 0x081: /* VIS II siam */
4058 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4059 #ifdef TARGET_SPARC64
4064 #ifdef TARGET_SPARC64
4065 } else if (xop
== 0x39) { /* V9 return */
4068 save_state(dc
, cpu_cond
);
4069 cpu_src1
= get_src1(insn
, cpu_src1
);
4070 if (IS_IMM
) { /* immediate */
4071 rs2
= GET_FIELDs(insn
, 19, 31);
4072 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4073 } else { /* register */
4074 rs2
= GET_FIELD(insn
, 27, 31);
4076 gen_movl_reg_TN(rs2
, cpu_src2
);
4077 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4079 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4081 tcg_gen_helper_0_0(helper_restore
);
4082 gen_mov_pc_npc(dc
, cpu_cond
);
4083 r_const
= tcg_const_i32(3);
4084 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, r_const
);
4085 tcg_temp_free(r_const
);
4086 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4087 dc
->npc
= DYNAMIC_PC
;
4091 cpu_src1
= get_src1(insn
, cpu_src1
);
4092 if (IS_IMM
) { /* immediate */
4093 rs2
= GET_FIELDs(insn
, 19, 31);
4094 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4095 } else { /* register */
4096 rs2
= GET_FIELD(insn
, 27, 31);
4098 gen_movl_reg_TN(rs2
, cpu_src2
);
4099 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4101 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4104 case 0x38: /* jmpl */
4108 r_const
= tcg_const_tl(dc
->pc
);
4109 gen_movl_TN_reg(rd
, r_const
);
4110 tcg_temp_free(r_const
);
4111 gen_mov_pc_npc(dc
, cpu_cond
);
4112 r_const
= tcg_const_i32(3);
4113 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4115 tcg_temp_free(r_const
);
4116 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4117 dc
->npc
= DYNAMIC_PC
;
4120 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4121 case 0x39: /* rett, V9 return */
4125 if (!supervisor(dc
))
4127 gen_mov_pc_npc(dc
, cpu_cond
);
4128 r_const
= tcg_const_i32(3);
4129 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4131 tcg_temp_free(r_const
);
4132 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4133 dc
->npc
= DYNAMIC_PC
;
4134 tcg_gen_helper_0_0(helper_rett
);
4138 case 0x3b: /* flush */
4139 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4141 tcg_gen_helper_0_1(helper_flush
, cpu_dst
);
4143 case 0x3c: /* save */
4144 save_state(dc
, cpu_cond
);
4145 tcg_gen_helper_0_0(helper_save
);
4146 gen_movl_TN_reg(rd
, cpu_dst
);
4148 case 0x3d: /* restore */
4149 save_state(dc
, cpu_cond
);
4150 tcg_gen_helper_0_0(helper_restore
);
4151 gen_movl_TN_reg(rd
, cpu_dst
);
4153 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4154 case 0x3e: /* V9 done/retry */
4158 if (!supervisor(dc
))
4160 dc
->npc
= DYNAMIC_PC
;
4161 dc
->pc
= DYNAMIC_PC
;
4162 tcg_gen_helper_0_0(helper_done
);
4165 if (!supervisor(dc
))
4167 dc
->npc
= DYNAMIC_PC
;
4168 dc
->pc
= DYNAMIC_PC
;
4169 tcg_gen_helper_0_0(helper_retry
);
4184 case 3: /* load/store instructions */
4186 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4188 cpu_src1
= get_src1(insn
, cpu_src1
);
4189 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4190 rs2
= GET_FIELD(insn
, 27, 31);
4191 gen_movl_reg_TN(rs2
, cpu_src2
);
4192 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4193 } else if (IS_IMM
) { /* immediate */
4194 rs2
= GET_FIELDs(insn
, 19, 31);
4195 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
4196 } else { /* register */
4197 rs2
= GET_FIELD(insn
, 27, 31);
4199 gen_movl_reg_TN(rs2
, cpu_src2
);
4200 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4202 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4204 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4205 (xop
> 0x17 && xop
<= 0x1d ) ||
4206 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4208 case 0x0: /* load unsigned word */
4209 gen_address_mask(dc
, cpu_addr
);
4210 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4212 case 0x1: /* load unsigned byte */
4213 gen_address_mask(dc
, cpu_addr
);
4214 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4216 case 0x2: /* load unsigned halfword */
4217 gen_address_mask(dc
, cpu_addr
);
4218 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4220 case 0x3: /* load double word */
4226 save_state(dc
, cpu_cond
);
4227 r_const
= tcg_const_i32(7);
4228 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4229 r_const
); // XXX remove
4230 tcg_temp_free(r_const
);
4231 gen_address_mask(dc
, cpu_addr
);
4232 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4233 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4234 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4235 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4236 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4237 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4238 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4241 case 0x9: /* load signed byte */
4242 gen_address_mask(dc
, cpu_addr
);
4243 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4245 case 0xa: /* load signed halfword */
4246 gen_address_mask(dc
, cpu_addr
);
4247 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4249 case 0xd: /* ldstub -- XXX: should be atomically */
4253 gen_address_mask(dc
, cpu_addr
);
4254 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4255 r_const
= tcg_const_tl(0xff);
4256 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4257 tcg_temp_free(r_const
);
4260 case 0x0f: /* swap register with memory. Also
4262 CHECK_IU_FEATURE(dc
, SWAP
);
4263 gen_movl_reg_TN(rd
, cpu_val
);
4264 gen_address_mask(dc
, cpu_addr
);
4265 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4266 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4267 tcg_gen_extu_i32_tl(cpu_val
, cpu_tmp32
);
4269 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4270 case 0x10: /* load word alternate */
4271 #ifndef TARGET_SPARC64
4274 if (!supervisor(dc
))
4277 save_state(dc
, cpu_cond
);
4278 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4280 case 0x11: /* load unsigned byte alternate */
4281 #ifndef TARGET_SPARC64
4284 if (!supervisor(dc
))
4287 save_state(dc
, cpu_cond
);
4288 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4290 case 0x12: /* load unsigned halfword alternate */
4291 #ifndef TARGET_SPARC64
4294 if (!supervisor(dc
))
4297 save_state(dc
, cpu_cond
);
4298 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4300 case 0x13: /* load double word alternate */
4301 #ifndef TARGET_SPARC64
4304 if (!supervisor(dc
))
4309 save_state(dc
, cpu_cond
);
4310 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4312 case 0x19: /* load signed byte alternate */
4313 #ifndef TARGET_SPARC64
4316 if (!supervisor(dc
))
4319 save_state(dc
, cpu_cond
);
4320 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4322 case 0x1a: /* load signed halfword alternate */
4323 #ifndef TARGET_SPARC64
4326 if (!supervisor(dc
))
4329 save_state(dc
, cpu_cond
);
4330 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4332 case 0x1d: /* ldstuba -- XXX: should be atomically */
4333 #ifndef TARGET_SPARC64
4336 if (!supervisor(dc
))
4339 save_state(dc
, cpu_cond
);
4340 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4342 case 0x1f: /* swap reg with alt. memory. Also
4344 CHECK_IU_FEATURE(dc
, SWAP
);
4345 #ifndef TARGET_SPARC64
4348 if (!supervisor(dc
))
4351 save_state(dc
, cpu_cond
);
4352 gen_movl_reg_TN(rd
, cpu_val
);
4353 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4356 #ifndef TARGET_SPARC64
4357 case 0x30: /* ldc */
4358 case 0x31: /* ldcsr */
4359 case 0x33: /* lddc */
4363 #ifdef TARGET_SPARC64
4364 case 0x08: /* V9 ldsw */
4365 gen_address_mask(dc
, cpu_addr
);
4366 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4368 case 0x0b: /* V9 ldx */
4369 gen_address_mask(dc
, cpu_addr
);
4370 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4372 case 0x18: /* V9 ldswa */
4373 save_state(dc
, cpu_cond
);
4374 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4376 case 0x1b: /* V9 ldxa */
4377 save_state(dc
, cpu_cond
);
4378 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4380 case 0x2d: /* V9 prefetch, no effect */
4382 case 0x30: /* V9 ldfa */
4383 save_state(dc
, cpu_cond
);
4384 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4386 case 0x33: /* V9 lddfa */
4387 save_state(dc
, cpu_cond
);
4388 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4390 case 0x3d: /* V9 prefetcha, no effect */
4392 case 0x32: /* V9 ldqfa */
4393 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4394 save_state(dc
, cpu_cond
);
4395 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4401 gen_movl_TN_reg(rd
, cpu_val
);
4402 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4405 } else if (xop
>= 0x20 && xop
< 0x24) {
4406 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4408 save_state(dc
, cpu_cond
);
4410 case 0x20: /* load fpreg */
4411 gen_address_mask(dc
, cpu_addr
);
4412 tcg_gen_qemu_ld32u(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4414 case 0x21: /* ldfsr, V9 ldxfsr */
4415 #ifdef TARGET_SPARC64
4416 gen_address_mask(dc
, cpu_addr
);
4418 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4419 tcg_gen_helper_0_1(helper_ldxfsr
, cpu_tmp64
);
4423 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4424 tcg_gen_helper_0_1(helper_ldfsr
, cpu_tmp32
);
4428 case 0x22: /* load quad fpreg */
4432 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4433 r_const
= tcg_const_i32(dc
->mem_idx
);
4434 tcg_gen_helper_0_2(helper_ldqf
, cpu_addr
, r_const
);
4435 tcg_temp_free(r_const
);
4436 gen_op_store_QT0_fpr(QFPREG(rd
));
4439 case 0x23: /* load double fpreg */
4443 r_const
= tcg_const_i32(dc
->mem_idx
);
4444 tcg_gen_helper_0_2(helper_lddf
, cpu_addr
, r_const
);
4445 tcg_temp_free(r_const
);
4446 gen_op_store_DT0_fpr(DFPREG(rd
));
4452 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4453 xop
== 0xe || xop
== 0x1e) {
4454 gen_movl_reg_TN(rd
, cpu_val
);
4456 case 0x4: /* store word */
4457 gen_address_mask(dc
, cpu_addr
);
4458 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4460 case 0x5: /* store byte */
4461 gen_address_mask(dc
, cpu_addr
);
4462 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4464 case 0x6: /* store halfword */
4465 gen_address_mask(dc
, cpu_addr
);
4466 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4468 case 0x7: /* store double word */
4474 save_state(dc
, cpu_cond
);
4475 gen_address_mask(dc
, cpu_addr
);
4476 r_const
= tcg_const_i32(7);
4477 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4478 r_const
); // XXX remove
4479 tcg_temp_free(r_const
);
4480 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4481 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4482 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4485 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4486 case 0x14: /* store word alternate */
4487 #ifndef TARGET_SPARC64
4490 if (!supervisor(dc
))
4493 save_state(dc
, cpu_cond
);
4494 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4496 case 0x15: /* store byte alternate */
4497 #ifndef TARGET_SPARC64
4500 if (!supervisor(dc
))
4503 save_state(dc
, cpu_cond
);
4504 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4506 case 0x16: /* store halfword alternate */
4507 #ifndef TARGET_SPARC64
4510 if (!supervisor(dc
))
4513 save_state(dc
, cpu_cond
);
4514 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4516 case 0x17: /* store double word alternate */
4517 #ifndef TARGET_SPARC64
4520 if (!supervisor(dc
))
4526 save_state(dc
, cpu_cond
);
4527 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4531 #ifdef TARGET_SPARC64
4532 case 0x0e: /* V9 stx */
4533 gen_address_mask(dc
, cpu_addr
);
4534 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4536 case 0x1e: /* V9 stxa */
4537 save_state(dc
, cpu_cond
);
4538 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4544 } else if (xop
> 0x23 && xop
< 0x28) {
4545 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4547 save_state(dc
, cpu_cond
);
4549 case 0x24: /* store fpreg */
4550 gen_address_mask(dc
, cpu_addr
);
4551 tcg_gen_qemu_st32(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4553 case 0x25: /* stfsr, V9 stxfsr */
4554 #ifdef TARGET_SPARC64
4555 gen_address_mask(dc
, cpu_addr
);
4556 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4558 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4560 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp64
);
4561 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4564 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4565 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4569 #ifdef TARGET_SPARC64
4570 /* V9 stqf, store quad fpreg */
4574 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4575 gen_op_load_fpr_QT0(QFPREG(rd
));
4576 r_const
= tcg_const_i32(dc
->mem_idx
);
4577 tcg_gen_helper_0_2(helper_stqf
, cpu_addr
, r_const
);
4578 tcg_temp_free(r_const
);
4581 #else /* !TARGET_SPARC64 */
4582 /* stdfq, store floating point queue */
4583 #if defined(CONFIG_USER_ONLY)
4586 if (!supervisor(dc
))
4588 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4593 case 0x27: /* store double fpreg */
4597 gen_op_load_fpr_DT0(DFPREG(rd
));
4598 r_const
= tcg_const_i32(dc
->mem_idx
);
4599 tcg_gen_helper_0_2(helper_stdf
, cpu_addr
, r_const
);
4600 tcg_temp_free(r_const
);
4606 } else if (xop
> 0x33 && xop
< 0x3f) {
4607 save_state(dc
, cpu_cond
);
4609 #ifdef TARGET_SPARC64
4610 case 0x34: /* V9 stfa */
4611 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4613 case 0x36: /* V9 stqfa */
4617 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4618 r_const
= tcg_const_i32(7);
4619 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4621 tcg_temp_free(r_const
);
4622 gen_op_load_fpr_QT0(QFPREG(rd
));
4623 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4626 case 0x37: /* V9 stdfa */
4627 gen_op_load_fpr_DT0(DFPREG(rd
));
4628 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4630 case 0x3c: /* V9 casa */
4631 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4632 gen_movl_TN_reg(rd
, cpu_val
);
4634 case 0x3e: /* V9 casxa */
4635 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4636 gen_movl_TN_reg(rd
, cpu_val
);
4639 case 0x34: /* stc */
4640 case 0x35: /* stcsr */
4641 case 0x36: /* stdcq */
4642 case 0x37: /* stdc */
4654 /* default case for non jump instructions */
4655 if (dc
->npc
== DYNAMIC_PC
) {
4656 dc
->pc
= DYNAMIC_PC
;
4658 } else if (dc
->npc
== JUMP_PC
) {
4659 /* we can do a static jump */
4660 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4664 dc
->npc
= dc
->npc
+ 4;
4672 save_state(dc
, cpu_cond
);
4673 r_const
= tcg_const_i32(TT_ILL_INSN
);
4674 tcg_gen_helper_0_1(raise_exception
, r_const
);
4675 tcg_temp_free(r_const
);
4683 save_state(dc
, cpu_cond
);
4684 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4685 tcg_gen_helper_0_1(raise_exception
, r_const
);
4686 tcg_temp_free(r_const
);
4690 #if !defined(CONFIG_USER_ONLY)
4695 save_state(dc
, cpu_cond
);
4696 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4697 tcg_gen_helper_0_1(raise_exception
, r_const
);
4698 tcg_temp_free(r_const
);
4704 save_state(dc
, cpu_cond
);
4705 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4708 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4710 save_state(dc
, cpu_cond
);
4711 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4715 #ifndef TARGET_SPARC64
4720 save_state(dc
, cpu_cond
);
4721 r_const
= tcg_const_i32(TT_NCP_INSN
);
4722 tcg_gen_helper_0_1(raise_exception
, r_const
);
4723 tcg_temp_free(r_const
);
4730 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4731 int spc
, CPUSPARCState
*env
)
4733 target_ulong pc_start
, last_pc
;
4734 uint16_t *gen_opc_end
;
4735 DisasContext dc1
, *dc
= &dc1
;
4740 memset(dc
, 0, sizeof(DisasContext
));
4745 dc
->npc
= (target_ulong
) tb
->cs_base
;
4746 dc
->mem_idx
= cpu_mmu_index(env
);
4748 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4749 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4751 dc
->fpu_enabled
= 0;
4752 #ifdef TARGET_SPARC64
4753 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4755 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4757 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4758 cpu_tmp32
= tcg_temp_new(TCG_TYPE_I32
);
4759 cpu_tmp64
= tcg_temp_new(TCG_TYPE_I64
);
4761 cpu_dst
= tcg_temp_local_new(TCG_TYPE_TL
);
4764 cpu_val
= tcg_temp_local_new(TCG_TYPE_TL
);
4765 cpu_addr
= tcg_temp_local_new(TCG_TYPE_TL
);
4768 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4770 max_insns
= CF_COUNT_MASK
;
4773 if (env
->nb_breakpoints
> 0) {
4774 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4775 if (env
->breakpoints
[j
] == dc
->pc
) {
4776 if (dc
->pc
!= pc_start
)
4777 save_state(dc
, cpu_cond
);
4778 tcg_gen_helper_0_0(helper_debug
);
4787 fprintf(logfile
, "Search PC...\n");
4788 j
= gen_opc_ptr
- gen_opc_buf
;
4792 gen_opc_instr_start
[lj
++] = 0;
4793 gen_opc_pc
[lj
] = dc
->pc
;
4794 gen_opc_npc
[lj
] = dc
->npc
;
4795 gen_opc_instr_start
[lj
] = 1;
4796 gen_opc_icount
[lj
] = num_insns
;
4799 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4802 disas_sparc_insn(dc
);
4807 /* if the next PC is different, we abort now */
4808 if (dc
->pc
!= (last_pc
+ 4))
4810 /* if we reach a page boundary, we stop generation so that the
4811 PC of a TT_TFAULT exception is always in the right page */
4812 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4814 /* if single step mode, we generate only one instruction and
4815 generate an exception */
4816 if (env
->singlestep_enabled
) {
4817 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4821 } while ((gen_opc_ptr
< gen_opc_end
) &&
4822 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4823 num_insns
< max_insns
);
4826 tcg_temp_free(cpu_addr
);
4827 tcg_temp_free(cpu_val
);
4828 tcg_temp_free(cpu_dst
);
4829 tcg_temp_free(cpu_tmp64
);
4830 tcg_temp_free(cpu_tmp32
);
4831 tcg_temp_free(cpu_tmp0
);
4832 if (tb
->cflags
& CF_LAST_IO
)
4835 if (dc
->pc
!= DYNAMIC_PC
&&
4836 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4837 /* static PC and NPC: we can use direct chaining */
4838 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4840 if (dc
->pc
!= DYNAMIC_PC
)
4841 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4842 save_npc(dc
, cpu_cond
);
4846 gen_icount_end(tb
, num_insns
);
4847 *gen_opc_ptr
= INDEX_op_end
;
4849 j
= gen_opc_ptr
- gen_opc_buf
;
4852 gen_opc_instr_start
[lj
++] = 0;
4858 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4859 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4861 tb
->size
= last_pc
+ 4 - pc_start
;
4862 tb
->icount
= num_insns
;
4865 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4866 fprintf(logfile
, "--------------\n");
4867 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4868 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4869 fprintf(logfile
, "\n");
4874 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4876 gen_intermediate_code_internal(tb
, 0, env
);
4879 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4881 gen_intermediate_code_internal(tb
, 1, env
);
4884 void gen_intermediate_code_init(CPUSPARCState
*env
)
4888 static const char * const gregnames
[8] = {
4889 NULL
, // g0 not used
4898 static const char * const fregnames
[64] = {
4899 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4900 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4901 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4902 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4903 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4904 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4905 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4906 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4909 /* init various static tables */
4913 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4914 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4915 offsetof(CPUState
, regwptr
),
4917 #ifdef TARGET_SPARC64
4918 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4919 TCG_AREG0
, offsetof(CPUState
, xcc
),
4921 cpu_asi
= tcg_global_mem_new(TCG_TYPE_I32
,
4922 TCG_AREG0
, offsetof(CPUState
, asi
),
4924 cpu_fprs
= tcg_global_mem_new(TCG_TYPE_I32
,
4925 TCG_AREG0
, offsetof(CPUState
, fprs
),
4927 cpu_gsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4928 TCG_AREG0
, offsetof(CPUState
, gsr
),
4930 cpu_tick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4932 offsetof(CPUState
, tick_cmpr
),
4934 cpu_stick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4936 offsetof(CPUState
, stick_cmpr
),
4938 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4940 offsetof(CPUState
, hstick_cmpr
),
4942 cpu_hintp
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4943 offsetof(CPUState
, hintp
),
4945 cpu_htba
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4946 offsetof(CPUState
, htba
),
4948 cpu_hver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4949 offsetof(CPUState
, hver
),
4951 cpu_ssr
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4952 offsetof(CPUState
, ssr
), "ssr");
4953 cpu_ver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4954 offsetof(CPUState
, version
), "ver");
4956 cpu_wim
= tcg_global_mem_new(TCG_TYPE_I32
,
4957 TCG_AREG0
, offsetof(CPUState
, wim
),
4960 cpu_cond
= tcg_global_mem_new(TCG_TYPE_TL
,
4961 TCG_AREG0
, offsetof(CPUState
, cond
),
4963 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4964 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4966 cpu_cc_src2
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4967 offsetof(CPUState
, cc_src2
),
4969 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4970 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4972 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4973 TCG_AREG0
, offsetof(CPUState
, psr
),
4975 cpu_fsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4976 TCG_AREG0
, offsetof(CPUState
, fsr
),
4978 cpu_pc
= tcg_global_mem_new(TCG_TYPE_TL
,
4979 TCG_AREG0
, offsetof(CPUState
, pc
),
4981 cpu_npc
= tcg_global_mem_new(TCG_TYPE_TL
,
4982 TCG_AREG0
, offsetof(CPUState
, npc
),
4984 cpu_y
= tcg_global_mem_new(TCG_TYPE_TL
,
4985 TCG_AREG0
, offsetof(CPUState
, y
), "y");
4986 #ifndef CONFIG_USER_ONLY
4987 cpu_tbr
= tcg_global_mem_new(TCG_TYPE_TL
,
4988 TCG_AREG0
, offsetof(CPUState
, tbr
),
4991 for (i
= 1; i
< 8; i
++)
4992 cpu_gregs
[i
] = tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4993 offsetof(CPUState
, gregs
[i
]),
4995 for (i
= 0; i
< TARGET_FPREGS
; i
++)
4996 cpu_fpr
[i
] = tcg_global_mem_new(TCG_TYPE_I32
, TCG_AREG0
,
4997 offsetof(CPUState
, fpr
[i
]),
5000 /* register helpers */
5003 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5008 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5009 unsigned long searched_pc
, int pc_pos
, void *puc
)
5012 env
->pc
= gen_opc_pc
[pc_pos
];
5013 npc
= gen_opc_npc
[pc_pos
];
5015 /* dynamic NPC: already stored */
5016 } else if (npc
== 2) {
5017 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5018 /* jump PC: use T2 and the jump targets of the translation */
5020 env
->npc
= gen_opc_jump_pc
[0];
5022 env
->npc
= gen_opc_jump_pc
[1];