4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env
, cpu_regwptr
;
42 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
43 static TCGv cpu_psr
, cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
45 #ifndef CONFIG_USER_ONLY
48 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
50 static TCGv cpu_xcc
, cpu_asi
, cpu_fprs
, cpu_gsr
;
51 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
52 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0
, cpu_tmp32
, cpu_tmp64
;
58 /* Floating point registers */
59 static TCGv cpu_fpr
[TARGET_FPREGS
];
61 #include "gen-icount.h"
63 typedef struct DisasContext
{
64 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
70 int address_mask_32bit
;
71 struct TranslationBlock
*tb
;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
96 static int sign_extend(int x
, int len
)
99 return (x
<< len
) >> len
;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src
)
107 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
108 offsetof(CPU_DoubleU
, l
.upper
));
109 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
110 offsetof(CPU_DoubleU
, l
.lower
));
113 static void gen_op_load_fpr_DT1(unsigned int src
)
115 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
116 offsetof(CPU_DoubleU
, l
.upper
));
117 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
118 offsetof(CPU_DoubleU
, l
.lower
));
121 static void gen_op_store_DT0_fpr(unsigned int dst
)
123 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.upper
));
125 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
126 offsetof(CPU_DoubleU
, l
.lower
));
129 static void gen_op_load_fpr_QT0(unsigned int src
)
131 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
132 offsetof(CPU_QuadU
, l
.upmost
));
133 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
134 offsetof(CPU_QuadU
, l
.upper
));
135 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
136 offsetof(CPU_QuadU
, l
.lower
));
137 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
138 offsetof(CPU_QuadU
, l
.lowest
));
141 static void gen_op_load_fpr_QT1(unsigned int src
)
143 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
144 offsetof(CPU_QuadU
, l
.upmost
));
145 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
146 offsetof(CPU_QuadU
, l
.upper
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
148 offsetof(CPU_QuadU
, l
.lower
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
150 offsetof(CPU_QuadU
, l
.lowest
));
153 static void gen_op_store_QT0_fpr(unsigned int dst
)
155 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
156 offsetof(CPU_QuadU
, l
.upmost
));
157 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
158 offsetof(CPU_QuadU
, l
.upper
));
159 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
160 offsetof(CPU_QuadU
, l
.lower
));
161 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
162 offsetof(CPU_QuadU
, l
.lowest
));
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
179 #ifdef TARGET_SPARC64
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
183 #define AM_CHECK(dc) (1)
187 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
189 #ifdef TARGET_SPARC64
191 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
195 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
198 tcg_gen_movi_tl(tn
, 0);
200 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
202 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
206 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
211 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
213 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
217 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
218 target_ulong pc
, target_ulong npc
)
220 TranslationBlock
*tb
;
223 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
224 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num
);
227 tcg_gen_movi_tl(cpu_pc
, pc
);
228 tcg_gen_movi_tl(cpu_npc
, npc
);
229 tcg_gen_exit_tb((long)tb
+ tb_num
);
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc
, pc
);
233 tcg_gen_movi_tl(cpu_npc
, npc
);
239 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
241 tcg_gen_extu_i32_tl(reg
, src
);
242 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
243 tcg_gen_andi_tl(reg
, reg
, 0x1);
246 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
248 tcg_gen_extu_i32_tl(reg
, src
);
249 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
250 tcg_gen_andi_tl(reg
, reg
, 0x1);
253 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
255 tcg_gen_extu_i32_tl(reg
, src
);
256 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
257 tcg_gen_andi_tl(reg
, reg
, 0x1);
260 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
262 tcg_gen_extu_i32_tl(reg
, src
);
263 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
264 tcg_gen_andi_tl(reg
, reg
, 0x1);
267 static inline void gen_cc_clear_icc(void)
269 tcg_gen_movi_i32(cpu_psr
, 0);
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
275 tcg_gen_movi_i32(cpu_xcc
, 0);
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
285 static inline void gen_cc_NZ_icc(TCGv dst
)
290 l1
= gen_new_label();
291 l2
= gen_new_label();
292 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
293 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
294 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
295 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
297 tcg_gen_ext_i32_tl(r_temp
, dst
);
298 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
299 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
301 tcg_temp_free(r_temp
);
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst
)
309 l1
= gen_new_label();
310 l2
= gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
312 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
314 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
315 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
322 env->psr |= PSR_CARRY;
324 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
326 TCGv r_temp1
, r_temp2
;
329 l1
= gen_new_label();
330 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
331 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
332 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
333 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
334 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
335 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
337 tcg_temp_free(r_temp1
);
338 tcg_temp_free(r_temp2
);
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
346 l1
= gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
348 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
357 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
361 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
362 tcg_gen_xor_tl(r_temp
, src1
, src2
);
363 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
364 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
365 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
366 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
368 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
369 tcg_temp_free(r_temp
);
370 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
378 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
379 tcg_gen_xor_tl(r_temp
, src1
, src2
);
380 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
381 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
382 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
383 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
385 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
386 tcg_temp_free(r_temp
);
387 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
391 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
393 TCGv r_temp
, r_const
;
396 l1
= gen_new_label();
398 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
399 tcg_gen_xor_tl(r_temp
, src1
, src2
);
400 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
401 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
402 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
403 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
405 r_const
= tcg_const_i32(TT_TOVF
);
406 tcg_gen_helper_0_1(raise_exception
, r_const
);
407 tcg_temp_free(r_const
);
409 tcg_temp_free(r_temp
);
412 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
416 l1
= gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
418 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
420 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
424 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
429 l1
= gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
431 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
433 r_const
= tcg_const_i32(TT_TOVF
);
434 tcg_gen_helper_0_1(raise_exception
, r_const
);
435 tcg_temp_free(r_const
);
439 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
441 tcg_gen_mov_tl(cpu_cc_src
, src1
);
442 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
443 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
445 gen_cc_NZ_icc(cpu_cc_dst
);
446 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
447 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
448 #ifdef TARGET_SPARC64
450 gen_cc_NZ_xcc(cpu_cc_dst
);
451 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
452 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
454 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
457 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
459 tcg_gen_mov_tl(cpu_cc_src
, src1
);
460 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
461 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
462 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
464 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
465 #ifdef TARGET_SPARC64
467 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
469 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
470 gen_cc_NZ_icc(cpu_cc_dst
);
471 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
472 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst
);
475 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
476 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
483 tcg_gen_mov_tl(cpu_cc_src
, src1
);
484 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
485 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
487 gen_cc_NZ_icc(cpu_cc_dst
);
488 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
489 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
490 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
491 #ifdef TARGET_SPARC64
493 gen_cc_NZ_xcc(cpu_cc_dst
);
494 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
495 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
497 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
500 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
502 tcg_gen_mov_tl(cpu_cc_src
, src1
);
503 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
504 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
505 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
506 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
508 gen_cc_NZ_icc(cpu_cc_dst
);
509 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
510 #ifdef TARGET_SPARC64
512 gen_cc_NZ_xcc(cpu_cc_dst
);
513 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
514 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
516 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
521 env->psr |= PSR_CARRY;
523 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
525 TCGv r_temp1
, r_temp2
;
528 l1
= gen_new_label();
529 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
530 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
531 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
532 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
533 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
534 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
536 tcg_temp_free(r_temp1
);
537 tcg_temp_free(r_temp2
);
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
545 l1
= gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
547 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
556 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
560 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
561 tcg_gen_xor_tl(r_temp
, src1
, src2
);
562 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
563 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
564 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
566 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
567 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
568 tcg_temp_free(r_temp
);
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
576 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
577 tcg_gen_xor_tl(r_temp
, src1
, src2
);
578 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
579 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
580 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
582 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
583 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
584 tcg_temp_free(r_temp
);
588 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
590 TCGv r_temp
, r_const
;
593 l1
= gen_new_label();
595 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
596 tcg_gen_xor_tl(r_temp
, src1
, src2
);
597 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
598 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
599 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
601 r_const
= tcg_const_i32(TT_TOVF
);
602 tcg_gen_helper_0_1(raise_exception
, r_const
);
603 tcg_temp_free(r_const
);
605 tcg_temp_free(r_temp
);
608 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
610 tcg_gen_mov_tl(cpu_cc_src
, src1
);
611 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
612 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
614 gen_cc_NZ_icc(cpu_cc_dst
);
615 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
616 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 #ifdef TARGET_SPARC64
619 gen_cc_NZ_xcc(cpu_cc_dst
);
620 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
621 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
623 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
626 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
628 tcg_gen_mov_tl(cpu_cc_src
, src1
);
629 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
630 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
631 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
633 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
634 #ifdef TARGET_SPARC64
636 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
638 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
639 gen_cc_NZ_icc(cpu_cc_dst
);
640 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
641 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst
);
644 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
645 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
647 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
650 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
652 tcg_gen_mov_tl(cpu_cc_src
, src1
);
653 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
654 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
656 gen_cc_NZ_icc(cpu_cc_dst
);
657 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
658 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
659 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
660 #ifdef TARGET_SPARC64
662 gen_cc_NZ_xcc(cpu_cc_dst
);
663 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
664 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
666 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
669 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
671 tcg_gen_mov_tl(cpu_cc_src
, src1
);
672 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
673 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
674 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
675 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
677 gen_cc_NZ_icc(cpu_cc_dst
);
678 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
679 #ifdef TARGET_SPARC64
681 gen_cc_NZ_xcc(cpu_cc_dst
);
682 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
683 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
685 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
688 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
693 l1
= gen_new_label();
694 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
700 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
701 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
702 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
703 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
704 tcg_gen_movi_tl(cpu_cc_src2
, 0);
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
710 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
711 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
712 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
713 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
714 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
717 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
718 gen_mov_reg_V(r_temp
, cpu_psr
);
719 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
720 tcg_temp_free(r_temp
);
722 // T0 = (b1 << 31) | (T0 >> 1);
724 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
725 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
726 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
728 /* do addition and update flags */
729 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
732 gen_cc_NZ_icc(cpu_cc_dst
);
733 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
734 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
735 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
738 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
740 TCGv r_temp
, r_temp2
;
742 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
743 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
745 tcg_gen_extu_i32_i64(r_temp
, src2
);
746 tcg_gen_extu_i32_i64(r_temp2
, src1
);
747 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
749 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
750 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
751 tcg_temp_free(r_temp
);
752 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst
, r_temp2
);
756 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
758 tcg_temp_free(r_temp2
);
761 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
763 TCGv r_temp
, r_temp2
;
765 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
766 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
768 tcg_gen_ext_i32_i64(r_temp
, src2
);
769 tcg_gen_ext_i32_i64(r_temp2
, src1
);
770 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
772 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
773 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
774 tcg_temp_free(r_temp
);
775 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(dst
, r_temp2
);
779 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
781 tcg_temp_free(r_temp2
);
784 #ifdef TARGET_SPARC64
785 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
790 l1
= gen_new_label();
791 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
792 r_const
= tcg_const_i32(TT_DIV_ZERO
);
793 tcg_gen_helper_0_1(raise_exception
, r_const
);
794 tcg_temp_free(r_const
);
798 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
802 l1
= gen_new_label();
803 l2
= gen_new_label();
804 tcg_gen_mov_tl(cpu_cc_src
, src1
);
805 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
806 gen_trap_ifdivzero_tl(cpu_cc_src2
);
807 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
808 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
809 tcg_gen_movi_i64(dst
, INT64_MIN
);
812 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
817 static inline void gen_op_div_cc(TCGv dst
)
821 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
823 gen_cc_NZ_icc(cpu_cc_dst
);
824 l1
= gen_new_label();
825 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
826 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
830 static inline void gen_op_logic_cc(TCGv dst
)
832 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
835 gen_cc_NZ_icc(cpu_cc_dst
);
836 #ifdef TARGET_SPARC64
838 gen_cc_NZ_xcc(cpu_cc_dst
);
843 static inline void gen_op_eval_ba(TCGv dst
)
845 tcg_gen_movi_tl(dst
, 1);
849 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
851 gen_mov_reg_Z(dst
, src
);
855 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
857 gen_mov_reg_N(cpu_tmp0
, src
);
858 gen_mov_reg_V(dst
, src
);
859 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
860 gen_mov_reg_Z(cpu_tmp0
, src
);
861 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
865 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
867 gen_mov_reg_V(cpu_tmp0
, src
);
868 gen_mov_reg_N(dst
, src
);
869 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
873 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
875 gen_mov_reg_Z(cpu_tmp0
, src
);
876 gen_mov_reg_C(dst
, src
);
877 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
881 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
883 gen_mov_reg_C(dst
, src
);
887 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
889 gen_mov_reg_V(dst
, src
);
893 static inline void gen_op_eval_bn(TCGv dst
)
895 tcg_gen_movi_tl(dst
, 0);
899 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
901 gen_mov_reg_N(dst
, src
);
905 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
907 gen_mov_reg_Z(dst
, src
);
908 tcg_gen_xori_tl(dst
, dst
, 0x1);
912 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
914 gen_mov_reg_N(cpu_tmp0
, src
);
915 gen_mov_reg_V(dst
, src
);
916 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
917 gen_mov_reg_Z(cpu_tmp0
, src
);
918 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
919 tcg_gen_xori_tl(dst
, dst
, 0x1);
923 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
925 gen_mov_reg_V(cpu_tmp0
, src
);
926 gen_mov_reg_N(dst
, src
);
927 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
928 tcg_gen_xori_tl(dst
, dst
, 0x1);
932 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
934 gen_mov_reg_Z(cpu_tmp0
, src
);
935 gen_mov_reg_C(dst
, src
);
936 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
937 tcg_gen_xori_tl(dst
, dst
, 0x1);
941 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
943 gen_mov_reg_C(dst
, src
);
944 tcg_gen_xori_tl(dst
, dst
, 0x1);
948 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
950 gen_mov_reg_N(dst
, src
);
951 tcg_gen_xori_tl(dst
, dst
, 0x1);
955 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
957 gen_mov_reg_V(dst
, src
);
958 tcg_gen_xori_tl(dst
, dst
, 0x1);
962 FPSR bit field FCC1 | FCC0:
968 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
969 unsigned int fcc_offset
)
971 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
972 tcg_gen_andi_tl(reg
, reg
, 0x1);
975 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
976 unsigned int fcc_offset
)
978 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
979 tcg_gen_andi_tl(reg
, reg
, 0x1);
983 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
984 unsigned int fcc_offset
)
986 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
987 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
988 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
991 // 1 or 2: FCC0 ^ FCC1
992 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
993 unsigned int fcc_offset
)
995 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
996 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
997 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1001 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1002 unsigned int fcc_offset
)
1004 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1008 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1009 unsigned int fcc_offset
)
1011 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1012 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1013 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1014 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1018 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1019 unsigned int fcc_offset
)
1021 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1025 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1026 unsigned int fcc_offset
)
1028 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1029 tcg_gen_xori_tl(dst
, dst
, 0x1);
1030 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1031 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1035 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1036 unsigned int fcc_offset
)
1038 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1039 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1040 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1043 // 0: !(FCC0 | FCC1)
1044 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1045 unsigned int fcc_offset
)
1047 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1048 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1049 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1050 tcg_gen_xori_tl(dst
, dst
, 0x1);
1053 // 0 or 3: !(FCC0 ^ FCC1)
1054 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1055 unsigned int fcc_offset
)
1057 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1058 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1059 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1060 tcg_gen_xori_tl(dst
, dst
, 0x1);
1064 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1065 unsigned int fcc_offset
)
1067 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1068 tcg_gen_xori_tl(dst
, dst
, 0x1);
1071 // !1: !(FCC0 & !FCC1)
1072 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1073 unsigned int fcc_offset
)
1075 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1076 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1077 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1078 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1079 tcg_gen_xori_tl(dst
, dst
, 0x1);
1083 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1084 unsigned int fcc_offset
)
1086 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1087 tcg_gen_xori_tl(dst
, dst
, 0x1);
1090 // !2: !(!FCC0 & FCC1)
1091 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1092 unsigned int fcc_offset
)
1094 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1095 tcg_gen_xori_tl(dst
, dst
, 0x1);
1096 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1097 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1098 tcg_gen_xori_tl(dst
, dst
, 0x1);
1101 // !3: !(FCC0 & FCC1)
1102 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1103 unsigned int fcc_offset
)
1105 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1106 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1107 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1108 tcg_gen_xori_tl(dst
, dst
, 0x1);
1111 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1112 target_ulong pc2
, TCGv r_cond
)
1116 l1
= gen_new_label();
1118 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1120 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1123 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1126 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1127 target_ulong pc2
, TCGv r_cond
)
1131 l1
= gen_new_label();
1133 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1135 gen_goto_tb(dc
, 0, pc2
, pc1
);
1138 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1141 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1146 l1
= gen_new_label();
1147 l2
= gen_new_label();
1149 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1151 tcg_gen_movi_tl(cpu_npc
, npc1
);
1155 tcg_gen_movi_tl(cpu_npc
, npc2
);
1159 /* call this function before using the condition register as it may
1160 have been set for a jump */
1161 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1163 if (dc
->npc
== JUMP_PC
) {
1164 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1165 dc
->npc
= DYNAMIC_PC
;
1169 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1171 if (dc
->npc
== JUMP_PC
) {
1172 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1173 dc
->npc
= DYNAMIC_PC
;
1174 } else if (dc
->npc
!= DYNAMIC_PC
) {
1175 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1179 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1181 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1185 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1187 if (dc
->npc
== JUMP_PC
) {
1188 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1189 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1190 dc
->pc
= DYNAMIC_PC
;
1191 } else if (dc
->npc
== DYNAMIC_PC
) {
1192 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1193 dc
->pc
= DYNAMIC_PC
;
1199 static inline void gen_op_next_insn(void)
1201 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1202 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1205 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1209 #ifdef TARGET_SPARC64
1219 gen_op_eval_bn(r_dst
);
1222 gen_op_eval_be(r_dst
, r_src
);
1225 gen_op_eval_ble(r_dst
, r_src
);
1228 gen_op_eval_bl(r_dst
, r_src
);
1231 gen_op_eval_bleu(r_dst
, r_src
);
1234 gen_op_eval_bcs(r_dst
, r_src
);
1237 gen_op_eval_bneg(r_dst
, r_src
);
1240 gen_op_eval_bvs(r_dst
, r_src
);
1243 gen_op_eval_ba(r_dst
);
1246 gen_op_eval_bne(r_dst
, r_src
);
1249 gen_op_eval_bg(r_dst
, r_src
);
1252 gen_op_eval_bge(r_dst
, r_src
);
1255 gen_op_eval_bgu(r_dst
, r_src
);
1258 gen_op_eval_bcc(r_dst
, r_src
);
1261 gen_op_eval_bpos(r_dst
, r_src
);
1264 gen_op_eval_bvc(r_dst
, r_src
);
1269 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1271 unsigned int offset
;
1291 gen_op_eval_bn(r_dst
);
1294 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1297 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1300 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1303 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1306 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1309 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1312 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1315 gen_op_eval_ba(r_dst
);
1318 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1321 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1324 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1327 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1330 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1333 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1336 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1341 #ifdef TARGET_SPARC64
1343 static const int gen_tcg_cond_reg
[8] = {
1354 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1358 l1
= gen_new_label();
1359 tcg_gen_movi_tl(r_dst
, 0);
1360 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1361 tcg_gen_movi_tl(r_dst
, 1);
1366 /* XXX: potentially incorrect if dynamic npc */
1367 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1370 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1371 target_ulong target
= dc
->pc
+ offset
;
1374 /* unconditional not taken */
1376 dc
->pc
= dc
->npc
+ 4;
1377 dc
->npc
= dc
->pc
+ 4;
1380 dc
->npc
= dc
->pc
+ 4;
1382 } else if (cond
== 0x8) {
1383 /* unconditional taken */
1386 dc
->npc
= dc
->pc
+ 4;
1392 flush_cond(dc
, r_cond
);
1393 gen_cond(r_cond
, cc
, cond
);
1395 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1399 dc
->jump_pc
[0] = target
;
1400 dc
->jump_pc
[1] = dc
->npc
+ 4;
1406 /* XXX: potentially incorrect if dynamic npc */
1407 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1410 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1411 target_ulong target
= dc
->pc
+ offset
;
1414 /* unconditional not taken */
1416 dc
->pc
= dc
->npc
+ 4;
1417 dc
->npc
= dc
->pc
+ 4;
1420 dc
->npc
= dc
->pc
+ 4;
1422 } else if (cond
== 0x8) {
1423 /* unconditional taken */
1426 dc
->npc
= dc
->pc
+ 4;
1432 flush_cond(dc
, r_cond
);
1433 gen_fcond(r_cond
, cc
, cond
);
1435 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1439 dc
->jump_pc
[0] = target
;
1440 dc
->jump_pc
[1] = dc
->npc
+ 4;
1446 #ifdef TARGET_SPARC64
1447 /* XXX: potentially incorrect if dynamic npc */
1448 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1449 TCGv r_cond
, TCGv r_reg
)
1451 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1452 target_ulong target
= dc
->pc
+ offset
;
1454 flush_cond(dc
, r_cond
);
1455 gen_cond_reg(r_cond
, cond
, r_reg
);
1457 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1461 dc
->jump_pc
[0] = target
;
1462 dc
->jump_pc
[1] = dc
->npc
+ 4;
1467 static GenOpFunc
* const gen_fcmpd
[4] = {
1474 static GenOpFunc
* const gen_fcmpq
[4] = {
1481 static GenOpFunc
* const gen_fcmped
[4] = {
1488 static GenOpFunc
* const gen_fcmpeq
[4] = {
1495 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1499 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1502 tcg_gen_helper_0_2(helper_fcmps_fcc1
, r_rs1
, r_rs2
);
1505 tcg_gen_helper_0_2(helper_fcmps_fcc2
, r_rs1
, r_rs2
);
1508 tcg_gen_helper_0_2(helper_fcmps_fcc3
, r_rs1
, r_rs2
);
1513 static inline void gen_op_fcmpd(int fccno
)
1515 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1518 static inline void gen_op_fcmpq(int fccno
)
1520 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1523 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1527 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1530 tcg_gen_helper_0_2(helper_fcmpes_fcc1
, r_rs1
, r_rs2
);
1533 tcg_gen_helper_0_2(helper_fcmpes_fcc2
, r_rs1
, r_rs2
);
1536 tcg_gen_helper_0_2(helper_fcmpes_fcc3
, r_rs1
, r_rs2
);
1541 static inline void gen_op_fcmped(int fccno
)
1543 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1546 static inline void gen_op_fcmpeq(int fccno
)
1548 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1553 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1555 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1558 static inline void gen_op_fcmpd(int fccno
)
1560 tcg_gen_helper_0_0(helper_fcmpd
);
1563 static inline void gen_op_fcmpq(int fccno
)
1565 tcg_gen_helper_0_0(helper_fcmpq
);
1568 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1570 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1573 static inline void gen_op_fcmped(int fccno
)
1575 tcg_gen_helper_0_0(helper_fcmped
);
1578 static inline void gen_op_fcmpeq(int fccno
)
1580 tcg_gen_helper_0_0(helper_fcmpeq
);
1584 static inline void gen_op_fpexception_im(int fsr_flags
)
1588 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1589 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1590 r_const
= tcg_const_i32(TT_FP_EXCP
);
1591 tcg_gen_helper_0_1(raise_exception
, r_const
);
1592 tcg_temp_free(r_const
);
1595 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1597 #if !defined(CONFIG_USER_ONLY)
1598 if (!dc
->fpu_enabled
) {
1601 save_state(dc
, r_cond
);
1602 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1603 tcg_gen_helper_0_1(raise_exception
, r_const
);
1604 tcg_temp_free(r_const
);
1612 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1614 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1617 static inline void gen_clear_float_exceptions(void)
1619 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1623 #ifdef TARGET_SPARC64
1624 static inline TCGv
gen_get_asi(int insn
, TCGv r_addr
)
1630 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1631 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1633 asi
= GET_FIELD(insn
, 19, 26);
1634 r_asi
= tcg_const_i32(asi
);
1639 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1642 TCGv r_asi
, r_size
, r_sign
;
1644 r_asi
= gen_get_asi(insn
, addr
);
1645 r_size
= tcg_const_i32(size
);
1646 r_sign
= tcg_const_i32(sign
);
1647 tcg_gen_helper_1_4(helper_ld_asi
, dst
, addr
, r_asi
, r_size
, r_sign
);
1648 tcg_temp_free(r_sign
);
1649 tcg_temp_free(r_size
);
1650 tcg_temp_free(r_asi
);
1653 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1657 r_asi
= gen_get_asi(insn
, addr
);
1658 r_size
= tcg_const_i32(size
);
1659 tcg_gen_helper_0_4(helper_st_asi
, addr
, src
, r_asi
, r_size
);
1660 tcg_temp_free(r_size
);
1661 tcg_temp_free(r_asi
);
1664 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1666 TCGv r_asi
, r_size
, r_rd
;
1668 r_asi
= gen_get_asi(insn
, addr
);
1669 r_size
= tcg_const_i32(size
);
1670 r_rd
= tcg_const_i32(rd
);
1671 tcg_gen_helper_0_4(helper_ldf_asi
, addr
, r_asi
, r_size
, r_rd
);
1672 tcg_temp_free(r_rd
);
1673 tcg_temp_free(r_size
);
1674 tcg_temp_free(r_asi
);
1677 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1679 TCGv r_asi
, r_size
, r_rd
;
1681 r_asi
= gen_get_asi(insn
, addr
);
1682 r_size
= tcg_const_i32(size
);
1683 r_rd
= tcg_const_i32(rd
);
1684 tcg_gen_helper_0_4(helper_stf_asi
, addr
, r_asi
, r_size
, r_rd
);
1685 tcg_temp_free(r_rd
);
1686 tcg_temp_free(r_size
);
1687 tcg_temp_free(r_asi
);
1690 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1692 TCGv r_asi
, r_size
, r_sign
;
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 r_size
= tcg_const_i32(4);
1696 r_sign
= tcg_const_i32(0);
1697 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1698 tcg_temp_free(r_sign
);
1699 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1700 tcg_temp_free(r_size
);
1701 tcg_temp_free(r_asi
);
1702 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1705 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1709 r_asi
= gen_get_asi(insn
, addr
);
1710 r_rd
= tcg_const_i32(rd
);
1711 tcg_gen_helper_0_3(helper_ldda_asi
, addr
, r_asi
, r_rd
);
1712 tcg_temp_free(r_rd
);
1713 tcg_temp_free(r_asi
);
1716 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1718 TCGv r_low
, r_asi
, r_size
;
1720 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1721 r_low
= tcg_temp_new(TCG_TYPE_I32
);
1722 tcg_gen_trunc_tl_i32(r_low
, cpu_tmp0
);
1723 tcg_gen_trunc_tl_i32(cpu_tmp32
, hi
);
1724 tcg_gen_concat_i32_i64(cpu_tmp64
, r_low
, cpu_tmp32
);
1725 tcg_temp_free(r_low
);
1726 r_asi
= gen_get_asi(insn
, addr
);
1727 r_size
= tcg_const_i32(8);
1728 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1729 tcg_temp_free(r_size
);
1730 tcg_temp_free(r_asi
);
1733 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1738 r_val1
= tcg_temp_new(TCG_TYPE_TL
);
1739 gen_movl_reg_TN(rd
, r_val1
);
1740 r_asi
= gen_get_asi(insn
, addr
);
1741 tcg_gen_helper_1_4(helper_cas_asi
, dst
, addr
, r_val1
, val2
, r_asi
);
1742 tcg_temp_free(r_asi
);
1743 tcg_temp_free(r_val1
);
1746 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1751 gen_movl_reg_TN(rd
, cpu_tmp64
);
1752 r_asi
= gen_get_asi(insn
, addr
);
1753 tcg_gen_helper_1_4(helper_casx_asi
, dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1754 tcg_temp_free(r_asi
);
1757 #elif !defined(CONFIG_USER_ONLY)
1759 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1762 TCGv r_asi
, r_size
, r_sign
;
1764 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1765 r_size
= tcg_const_i32(size
);
1766 r_sign
= tcg_const_i32(sign
);
1767 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1768 tcg_temp_free(r_sign
);
1769 tcg_temp_free(r_size
);
1770 tcg_temp_free(r_asi
);
1771 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1774 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1778 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1779 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1780 r_size
= tcg_const_i32(size
);
1781 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1782 tcg_temp_free(r_size
);
1783 tcg_temp_free(r_asi
);
1786 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1788 TCGv r_asi
, r_size
, r_sign
;
1790 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1791 r_size
= tcg_const_i32(4);
1792 r_sign
= tcg_const_i32(0);
1793 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1794 tcg_temp_free(r_sign
);
1795 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1796 tcg_temp_free(r_size
);
1797 tcg_temp_free(r_asi
);
1798 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1801 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1803 TCGv r_asi
, r_size
, r_sign
;
1805 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1806 r_size
= tcg_const_i32(8);
1807 r_sign
= tcg_const_i32(0);
1808 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1809 tcg_temp_free(r_sign
);
1810 tcg_temp_free(r_size
);
1811 tcg_temp_free(r_asi
);
1812 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1813 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1814 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1815 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1816 gen_movl_TN_reg(rd
, hi
);
1819 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1821 TCGv r_low
, r_asi
, r_size
;
1823 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1824 r_low
= tcg_temp_new(TCG_TYPE_I32
);
1825 tcg_gen_trunc_tl_i32(r_low
, cpu_tmp0
);
1826 tcg_gen_trunc_tl_i32(cpu_tmp32
, hi
);
1827 tcg_gen_concat_i32_i64(cpu_tmp64
, r_low
, cpu_tmp32
);
1828 tcg_temp_free(r_low
);
1829 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1830 r_size
= tcg_const_i32(8);
1831 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1832 tcg_temp_free(r_size
);
1833 tcg_temp_free(r_asi
);
1837 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1838 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1840 TCGv r_val
, r_asi
, r_size
;
1842 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1844 r_val
= tcg_const_i64(0xffULL
);
1845 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1846 r_size
= tcg_const_i32(1);
1847 tcg_gen_helper_0_4(helper_st_asi
, addr
, r_val
, r_asi
, r_size
);
1848 tcg_temp_free(r_size
);
1849 tcg_temp_free(r_asi
);
1850 tcg_temp_free(r_val
);
1854 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1859 rs1
= GET_FIELD(insn
, 13, 17);
1861 r_rs1
= tcg_const_tl(0); // XXX how to free?
1863 r_rs1
= cpu_gregs
[rs1
];
1865 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1869 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1874 if (IS_IMM
) { /* immediate */
1875 rs2
= GET_FIELDs(insn
, 19, 31);
1876 r_rs2
= tcg_const_tl((int)rs2
); // XXX how to free?
1877 } else { /* register */
1878 rs2
= GET_FIELD(insn
, 27, 31);
1880 r_rs2
= tcg_const_tl(0); // XXX how to free?
1882 r_rs2
= cpu_gregs
[rs2
];
1884 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1889 #define CHECK_IU_FEATURE(dc, FEATURE) \
1890 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1892 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1893 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1896 /* before an instruction, dc->pc must be static */
1897 static void disas_sparc_insn(DisasContext
* dc
)
1899 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1901 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
1902 tcg_gen_debug_insn_start(dc
->pc
);
1903 insn
= ldl_code(dc
->pc
);
1904 opc
= GET_FIELD(insn
, 0, 1);
1906 rd
= GET_FIELD(insn
, 2, 6);
1908 cpu_src1
= tcg_temp_new(TCG_TYPE_TL
); // const
1909 cpu_src2
= tcg_temp_new(TCG_TYPE_TL
); // const
1912 case 0: /* branches/sethi */
1914 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1917 #ifdef TARGET_SPARC64
1918 case 0x1: /* V9 BPcc */
1922 target
= GET_FIELD_SP(insn
, 0, 18);
1923 target
= sign_extend(target
, 18);
1925 cc
= GET_FIELD_SP(insn
, 20, 21);
1927 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1929 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1934 case 0x3: /* V9 BPr */
1936 target
= GET_FIELD_SP(insn
, 0, 13) |
1937 (GET_FIELD_SP(insn
, 20, 21) << 14);
1938 target
= sign_extend(target
, 16);
1940 cpu_src1
= get_src1(insn
, cpu_src1
);
1941 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1944 case 0x5: /* V9 FBPcc */
1946 int cc
= GET_FIELD_SP(insn
, 20, 21);
1947 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1949 target
= GET_FIELD_SP(insn
, 0, 18);
1950 target
= sign_extend(target
, 19);
1952 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1956 case 0x7: /* CBN+x */
1961 case 0x2: /* BN+x */
1963 target
= GET_FIELD(insn
, 10, 31);
1964 target
= sign_extend(target
, 22);
1966 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1969 case 0x6: /* FBN+x */
1971 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1973 target
= GET_FIELD(insn
, 10, 31);
1974 target
= sign_extend(target
, 22);
1976 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1979 case 0x4: /* SETHI */
1981 uint32_t value
= GET_FIELD(insn
, 10, 31);
1984 r_const
= tcg_const_tl(value
<< 10);
1985 gen_movl_TN_reg(rd
, r_const
);
1986 tcg_temp_free(r_const
);
1989 case 0x0: /* UNIMPL */
1998 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2001 r_const
= tcg_const_tl(dc
->pc
);
2002 gen_movl_TN_reg(15, r_const
);
2003 tcg_temp_free(r_const
);
2005 gen_mov_pc_npc(dc
, cpu_cond
);
2009 case 2: /* FPU & Logical Operations */
2011 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2012 if (xop
== 0x3a) { /* generate trap */
2015 cpu_src1
= get_src1(insn
, cpu_src1
);
2017 rs2
= GET_FIELD(insn
, 25, 31);
2018 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2020 rs2
= GET_FIELD(insn
, 27, 31);
2022 gen_movl_reg_TN(rs2
, cpu_src2
);
2023 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2025 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2027 cond
= GET_FIELD(insn
, 3, 6);
2029 save_state(dc
, cpu_cond
);
2030 tcg_gen_helper_0_1(helper_trap
, cpu_dst
);
2031 } else if (cond
!= 0) {
2032 TCGv r_cond
= tcg_temp_new(TCG_TYPE_TL
);
2033 #ifdef TARGET_SPARC64
2035 int cc
= GET_FIELD_SP(insn
, 11, 12);
2037 save_state(dc
, cpu_cond
);
2039 gen_cond(r_cond
, 0, cond
);
2041 gen_cond(r_cond
, 1, cond
);
2045 save_state(dc
, cpu_cond
);
2046 gen_cond(r_cond
, 0, cond
);
2048 tcg_gen_helper_0_2(helper_trapcc
, cpu_dst
, r_cond
);
2049 tcg_temp_free(r_cond
);
2055 } else if (xop
== 0x28) {
2056 rs1
= GET_FIELD(insn
, 13, 17);
2059 #ifndef TARGET_SPARC64
2060 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2061 manual, rdy on the microSPARC
2063 case 0x0f: /* stbar in the SPARCv8 manual,
2064 rdy on the microSPARC II */
2065 case 0x10 ... 0x1f: /* implementation-dependent in the
2066 SPARCv8 manual, rdy on the
2069 gen_movl_TN_reg(rd
, cpu_y
);
2071 #ifdef TARGET_SPARC64
2072 case 0x2: /* V9 rdccr */
2073 tcg_gen_helper_1_0(helper_rdccr
, cpu_dst
);
2074 gen_movl_TN_reg(rd
, cpu_dst
);
2076 case 0x3: /* V9 rdasi */
2077 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2078 gen_movl_TN_reg(rd
, cpu_dst
);
2080 case 0x4: /* V9 rdtick */
2084 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2085 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2086 offsetof(CPUState
, tick
));
2087 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2089 tcg_temp_free(r_tickptr
);
2090 gen_movl_TN_reg(rd
, cpu_dst
);
2093 case 0x5: /* V9 rdpc */
2097 r_const
= tcg_const_tl(dc
->pc
);
2098 gen_movl_TN_reg(rd
, r_const
);
2099 tcg_temp_free(r_const
);
2102 case 0x6: /* V9 rdfprs */
2103 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2104 gen_movl_TN_reg(rd
, cpu_dst
);
2106 case 0xf: /* V9 membar */
2107 break; /* no effect */
2108 case 0x13: /* Graphics Status */
2109 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2111 gen_movl_TN_reg(rd
, cpu_gsr
);
2113 case 0x17: /* Tick compare */
2114 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2116 case 0x18: /* System tick */
2120 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2121 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2122 offsetof(CPUState
, stick
));
2123 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2125 tcg_temp_free(r_tickptr
);
2126 gen_movl_TN_reg(rd
, cpu_dst
);
2129 case 0x19: /* System tick compare */
2130 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2132 case 0x10: /* Performance Control */
2133 case 0x11: /* Performance Instrumentation Counter */
2134 case 0x12: /* Dispatch Control */
2135 case 0x14: /* Softint set, WO */
2136 case 0x15: /* Softint clear, WO */
2137 case 0x16: /* Softint write */
2142 #if !defined(CONFIG_USER_ONLY)
2143 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2144 #ifndef TARGET_SPARC64
2145 if (!supervisor(dc
))
2147 tcg_gen_helper_1_0(helper_rdpsr
, cpu_dst
);
2149 CHECK_IU_FEATURE(dc
, HYPV
);
2150 if (!hypervisor(dc
))
2152 rs1
= GET_FIELD(insn
, 13, 17);
2155 // gen_op_rdhpstate();
2158 // gen_op_rdhtstate();
2161 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2164 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2167 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2169 case 31: // hstick_cmpr
2170 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2176 gen_movl_TN_reg(rd
, cpu_dst
);
2178 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2179 if (!supervisor(dc
))
2181 #ifdef TARGET_SPARC64
2182 rs1
= GET_FIELD(insn
, 13, 17);
2188 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2189 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2190 offsetof(CPUState
, tsptr
));
2191 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2192 offsetof(trap_state
, tpc
));
2193 tcg_temp_free(r_tsptr
);
2200 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2201 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2202 offsetof(CPUState
, tsptr
));
2203 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2204 offsetof(trap_state
, tnpc
));
2205 tcg_temp_free(r_tsptr
);
2212 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2213 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2214 offsetof(CPUState
, tsptr
));
2215 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2216 offsetof(trap_state
, tstate
));
2217 tcg_temp_free(r_tsptr
);
2224 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2225 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2226 offsetof(CPUState
, tsptr
));
2227 tcg_gen_ld_i32(cpu_tmp0
, r_tsptr
,
2228 offsetof(trap_state
, tt
));
2229 tcg_temp_free(r_tsptr
);
2236 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2237 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2238 offsetof(CPUState
, tick
));
2239 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_tmp0
,
2241 gen_movl_TN_reg(rd
, cpu_tmp0
);
2242 tcg_temp_free(r_tickptr
);
2246 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2249 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2250 offsetof(CPUSPARCState
, pstate
));
2251 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2254 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2255 offsetof(CPUSPARCState
, tl
));
2256 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2259 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2260 offsetof(CPUSPARCState
, psrpil
));
2261 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2264 tcg_gen_helper_1_0(helper_rdcwp
, cpu_tmp0
);
2267 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2268 offsetof(CPUSPARCState
, cansave
));
2269 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2271 case 11: // canrestore
2272 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2273 offsetof(CPUSPARCState
, canrestore
));
2274 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2276 case 12: // cleanwin
2277 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2278 offsetof(CPUSPARCState
, cleanwin
));
2279 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2281 case 13: // otherwin
2282 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2283 offsetof(CPUSPARCState
, otherwin
));
2284 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2287 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2288 offsetof(CPUSPARCState
, wstate
));
2289 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2291 case 16: // UA2005 gl
2292 CHECK_IU_FEATURE(dc
, GL
);
2293 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2294 offsetof(CPUSPARCState
, gl
));
2295 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2297 case 26: // UA2005 strand status
2298 CHECK_IU_FEATURE(dc
, HYPV
);
2299 if (!hypervisor(dc
))
2301 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_ssr
);
2304 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2311 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2313 gen_movl_TN_reg(rd
, cpu_tmp0
);
2315 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2316 #ifdef TARGET_SPARC64
2317 save_state(dc
, cpu_cond
);
2318 tcg_gen_helper_0_0(helper_flushw
);
2320 if (!supervisor(dc
))
2322 gen_movl_TN_reg(rd
, cpu_tbr
);
2326 } else if (xop
== 0x34) { /* FPU Operations */
2327 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2329 gen_op_clear_ieee_excp_and_FTT();
2330 rs1
= GET_FIELD(insn
, 13, 17);
2331 rs2
= GET_FIELD(insn
, 27, 31);
2332 xop
= GET_FIELD(insn
, 18, 26);
2334 case 0x1: /* fmovs */
2335 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2337 case 0x5: /* fnegs */
2338 tcg_gen_helper_1_1(helper_fnegs
, cpu_fpr
[rd
],
2341 case 0x9: /* fabss */
2342 tcg_gen_helper_1_1(helper_fabss
, cpu_fpr
[rd
],
2345 case 0x29: /* fsqrts */
2346 CHECK_FPU_FEATURE(dc
, FSQRT
);
2347 gen_clear_float_exceptions();
2348 tcg_gen_helper_1_1(helper_fsqrts
, cpu_tmp32
,
2350 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2351 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2353 case 0x2a: /* fsqrtd */
2354 CHECK_FPU_FEATURE(dc
, FSQRT
);
2355 gen_op_load_fpr_DT1(DFPREG(rs2
));
2356 gen_clear_float_exceptions();
2357 tcg_gen_helper_0_0(helper_fsqrtd
);
2358 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2359 gen_op_store_DT0_fpr(DFPREG(rd
));
2361 case 0x2b: /* fsqrtq */
2362 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2363 gen_op_load_fpr_QT1(QFPREG(rs2
));
2364 gen_clear_float_exceptions();
2365 tcg_gen_helper_0_0(helper_fsqrtq
);
2366 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2367 gen_op_store_QT0_fpr(QFPREG(rd
));
2369 case 0x41: /* fadds */
2370 gen_clear_float_exceptions();
2371 tcg_gen_helper_1_2(helper_fadds
, cpu_tmp32
,
2372 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2373 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2374 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2377 gen_op_load_fpr_DT0(DFPREG(rs1
));
2378 gen_op_load_fpr_DT1(DFPREG(rs2
));
2379 gen_clear_float_exceptions();
2380 tcg_gen_helper_0_0(helper_faddd
);
2381 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2382 gen_op_store_DT0_fpr(DFPREG(rd
));
2384 case 0x43: /* faddq */
2385 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2386 gen_op_load_fpr_QT0(QFPREG(rs1
));
2387 gen_op_load_fpr_QT1(QFPREG(rs2
));
2388 gen_clear_float_exceptions();
2389 tcg_gen_helper_0_0(helper_faddq
);
2390 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2391 gen_op_store_QT0_fpr(QFPREG(rd
));
2393 case 0x45: /* fsubs */
2394 gen_clear_float_exceptions();
2395 tcg_gen_helper_1_2(helper_fsubs
, cpu_tmp32
,
2396 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2397 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2398 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2401 gen_op_load_fpr_DT0(DFPREG(rs1
));
2402 gen_op_load_fpr_DT1(DFPREG(rs2
));
2403 gen_clear_float_exceptions();
2404 tcg_gen_helper_0_0(helper_fsubd
);
2405 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2406 gen_op_store_DT0_fpr(DFPREG(rd
));
2408 case 0x47: /* fsubq */
2409 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2410 gen_op_load_fpr_QT0(QFPREG(rs1
));
2411 gen_op_load_fpr_QT1(QFPREG(rs2
));
2412 gen_clear_float_exceptions();
2413 tcg_gen_helper_0_0(helper_fsubq
);
2414 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2415 gen_op_store_QT0_fpr(QFPREG(rd
));
2417 case 0x49: /* fmuls */
2418 CHECK_FPU_FEATURE(dc
, FMUL
);
2419 gen_clear_float_exceptions();
2420 tcg_gen_helper_1_2(helper_fmuls
, cpu_tmp32
,
2421 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2422 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2423 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2425 case 0x4a: /* fmuld */
2426 CHECK_FPU_FEATURE(dc
, FMUL
);
2427 gen_op_load_fpr_DT0(DFPREG(rs1
));
2428 gen_op_load_fpr_DT1(DFPREG(rs2
));
2429 gen_clear_float_exceptions();
2430 tcg_gen_helper_0_0(helper_fmuld
);
2431 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2432 gen_op_store_DT0_fpr(DFPREG(rd
));
2434 case 0x4b: /* fmulq */
2435 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2436 CHECK_FPU_FEATURE(dc
, FMUL
);
2437 gen_op_load_fpr_QT0(QFPREG(rs1
));
2438 gen_op_load_fpr_QT1(QFPREG(rs2
));
2439 gen_clear_float_exceptions();
2440 tcg_gen_helper_0_0(helper_fmulq
);
2441 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2442 gen_op_store_QT0_fpr(QFPREG(rd
));
2444 case 0x4d: /* fdivs */
2445 gen_clear_float_exceptions();
2446 tcg_gen_helper_1_2(helper_fdivs
, cpu_tmp32
,
2447 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2448 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2449 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2452 gen_op_load_fpr_DT0(DFPREG(rs1
));
2453 gen_op_load_fpr_DT1(DFPREG(rs2
));
2454 gen_clear_float_exceptions();
2455 tcg_gen_helper_0_0(helper_fdivd
);
2456 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2457 gen_op_store_DT0_fpr(DFPREG(rd
));
2459 case 0x4f: /* fdivq */
2460 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2461 gen_op_load_fpr_QT0(QFPREG(rs1
));
2462 gen_op_load_fpr_QT1(QFPREG(rs2
));
2463 gen_clear_float_exceptions();
2464 tcg_gen_helper_0_0(helper_fdivq
);
2465 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2466 gen_op_store_QT0_fpr(QFPREG(rd
));
2468 case 0x69: /* fsmuld */
2469 CHECK_FPU_FEATURE(dc
, FSMULD
);
2470 gen_clear_float_exceptions();
2471 tcg_gen_helper_0_2(helper_fsmuld
, cpu_fpr
[rs1
],
2473 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2474 gen_op_store_DT0_fpr(DFPREG(rd
));
2476 case 0x6e: /* fdmulq */
2477 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2478 gen_op_load_fpr_DT0(DFPREG(rs1
));
2479 gen_op_load_fpr_DT1(DFPREG(rs2
));
2480 gen_clear_float_exceptions();
2481 tcg_gen_helper_0_0(helper_fdmulq
);
2482 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2483 gen_op_store_QT0_fpr(QFPREG(rd
));
2485 case 0xc4: /* fitos */
2486 gen_clear_float_exceptions();
2487 tcg_gen_helper_1_1(helper_fitos
, cpu_tmp32
,
2489 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2490 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2492 case 0xc6: /* fdtos */
2493 gen_op_load_fpr_DT1(DFPREG(rs2
));
2494 gen_clear_float_exceptions();
2495 tcg_gen_helper_1_0(helper_fdtos
, cpu_tmp32
);
2496 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2497 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2499 case 0xc7: /* fqtos */
2500 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2501 gen_op_load_fpr_QT1(QFPREG(rs2
));
2502 gen_clear_float_exceptions();
2503 tcg_gen_helper_1_0(helper_fqtos
, cpu_tmp32
);
2504 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2505 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2507 case 0xc8: /* fitod */
2508 tcg_gen_helper_0_1(helper_fitod
, cpu_fpr
[rs2
]);
2509 gen_op_store_DT0_fpr(DFPREG(rd
));
2511 case 0xc9: /* fstod */
2512 tcg_gen_helper_0_1(helper_fstod
, cpu_fpr
[rs2
]);
2513 gen_op_store_DT0_fpr(DFPREG(rd
));
2515 case 0xcb: /* fqtod */
2516 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2517 gen_op_load_fpr_QT1(QFPREG(rs2
));
2518 gen_clear_float_exceptions();
2519 tcg_gen_helper_0_0(helper_fqtod
);
2520 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2521 gen_op_store_DT0_fpr(DFPREG(rd
));
2523 case 0xcc: /* fitoq */
2524 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2525 tcg_gen_helper_0_1(helper_fitoq
, cpu_fpr
[rs2
]);
2526 gen_op_store_QT0_fpr(QFPREG(rd
));
2528 case 0xcd: /* fstoq */
2529 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2530 tcg_gen_helper_0_1(helper_fstoq
, cpu_fpr
[rs2
]);
2531 gen_op_store_QT0_fpr(QFPREG(rd
));
2533 case 0xce: /* fdtoq */
2534 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2535 gen_op_load_fpr_DT1(DFPREG(rs2
));
2536 tcg_gen_helper_0_0(helper_fdtoq
);
2537 gen_op_store_QT0_fpr(QFPREG(rd
));
2539 case 0xd1: /* fstoi */
2540 gen_clear_float_exceptions();
2541 tcg_gen_helper_1_1(helper_fstoi
, cpu_tmp32
,
2543 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2544 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2546 case 0xd2: /* fdtoi */
2547 gen_op_load_fpr_DT1(DFPREG(rs2
));
2548 gen_clear_float_exceptions();
2549 tcg_gen_helper_1_0(helper_fdtoi
, cpu_tmp32
);
2550 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2551 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2553 case 0xd3: /* fqtoi */
2554 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2555 gen_op_load_fpr_QT1(QFPREG(rs2
));
2556 gen_clear_float_exceptions();
2557 tcg_gen_helper_1_0(helper_fqtoi
, cpu_tmp32
);
2558 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2559 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2561 #ifdef TARGET_SPARC64
2562 case 0x2: /* V9 fmovd */
2563 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)],
2564 cpu_fpr
[DFPREG(rs2
)]);
2565 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2566 cpu_fpr
[DFPREG(rs2
) + 1]);
2568 case 0x3: /* V9 fmovq */
2569 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2570 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)],
2571 cpu_fpr
[QFPREG(rs2
)]);
2572 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2573 cpu_fpr
[QFPREG(rs2
) + 1]);
2574 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2575 cpu_fpr
[QFPREG(rs2
) + 2]);
2576 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2577 cpu_fpr
[QFPREG(rs2
) + 3]);
2579 case 0x6: /* V9 fnegd */
2580 gen_op_load_fpr_DT1(DFPREG(rs2
));
2581 tcg_gen_helper_0_0(helper_fnegd
);
2582 gen_op_store_DT0_fpr(DFPREG(rd
));
2584 case 0x7: /* V9 fnegq */
2585 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2586 gen_op_load_fpr_QT1(QFPREG(rs2
));
2587 tcg_gen_helper_0_0(helper_fnegq
);
2588 gen_op_store_QT0_fpr(QFPREG(rd
));
2590 case 0xa: /* V9 fabsd */
2591 gen_op_load_fpr_DT1(DFPREG(rs2
));
2592 tcg_gen_helper_0_0(helper_fabsd
);
2593 gen_op_store_DT0_fpr(DFPREG(rd
));
2595 case 0xb: /* V9 fabsq */
2596 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2597 gen_op_load_fpr_QT1(QFPREG(rs2
));
2598 tcg_gen_helper_0_0(helper_fabsq
);
2599 gen_op_store_QT0_fpr(QFPREG(rd
));
2601 case 0x81: /* V9 fstox */
2602 gen_clear_float_exceptions();
2603 tcg_gen_helper_0_1(helper_fstox
, cpu_fpr
[rs2
]);
2604 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2605 gen_op_store_DT0_fpr(DFPREG(rd
));
2607 case 0x82: /* V9 fdtox */
2608 gen_op_load_fpr_DT1(DFPREG(rs2
));
2609 gen_clear_float_exceptions();
2610 tcg_gen_helper_0_0(helper_fdtox
);
2611 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2612 gen_op_store_DT0_fpr(DFPREG(rd
));
2614 case 0x83: /* V9 fqtox */
2615 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2616 gen_op_load_fpr_QT1(QFPREG(rs2
));
2617 gen_clear_float_exceptions();
2618 tcg_gen_helper_0_0(helper_fqtox
);
2619 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2620 gen_op_store_DT0_fpr(DFPREG(rd
));
2622 case 0x84: /* V9 fxtos */
2623 gen_op_load_fpr_DT1(DFPREG(rs2
));
2624 gen_clear_float_exceptions();
2625 tcg_gen_helper_1_0(helper_fxtos
, cpu_tmp32
);
2626 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2627 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2629 case 0x88: /* V9 fxtod */
2630 gen_op_load_fpr_DT1(DFPREG(rs2
));
2631 gen_clear_float_exceptions();
2632 tcg_gen_helper_0_0(helper_fxtod
);
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2634 gen_op_store_DT0_fpr(DFPREG(rd
));
2636 case 0x8c: /* V9 fxtoq */
2637 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2638 gen_op_load_fpr_DT1(DFPREG(rs2
));
2639 gen_clear_float_exceptions();
2640 tcg_gen_helper_0_0(helper_fxtoq
);
2641 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2642 gen_op_store_QT0_fpr(QFPREG(rd
));
2648 } else if (xop
== 0x35) { /* FPU Operations */
2649 #ifdef TARGET_SPARC64
2652 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2654 gen_op_clear_ieee_excp_and_FTT();
2655 rs1
= GET_FIELD(insn
, 13, 17);
2656 rs2
= GET_FIELD(insn
, 27, 31);
2657 xop
= GET_FIELD(insn
, 18, 26);
2658 #ifdef TARGET_SPARC64
2659 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2662 l1
= gen_new_label();
2663 cond
= GET_FIELD_SP(insn
, 14, 17);
2664 cpu_src1
= get_src1(insn
, cpu_src1
);
2665 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2667 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2670 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2673 l1
= gen_new_label();
2674 cond
= GET_FIELD_SP(insn
, 14, 17);
2675 cpu_src1
= get_src1(insn
, cpu_src1
);
2676 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2678 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2679 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2682 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2685 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2686 l1
= gen_new_label();
2687 cond
= GET_FIELD_SP(insn
, 14, 17);
2688 cpu_src1
= get_src1(insn
, cpu_src1
);
2689 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2691 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2692 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2693 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2694 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2700 #ifdef TARGET_SPARC64
2701 #define FMOVSCC(fcc) \
2706 l1 = gen_new_label(); \
2707 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2708 cond = GET_FIELD_SP(insn, 14, 17); \
2709 gen_fcond(r_cond, fcc, cond); \
2710 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2712 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2713 gen_set_label(l1); \
2714 tcg_temp_free(r_cond); \
2716 #define FMOVDCC(fcc) \
2721 l1 = gen_new_label(); \
2722 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2723 cond = GET_FIELD_SP(insn, 14, 17); \
2724 gen_fcond(r_cond, fcc, cond); \
2725 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2727 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2728 cpu_fpr[DFPREG(rs2)]); \
2729 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2730 cpu_fpr[DFPREG(rs2) + 1]); \
2731 gen_set_label(l1); \
2732 tcg_temp_free(r_cond); \
2734 #define FMOVQCC(fcc) \
2739 l1 = gen_new_label(); \
2740 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2741 cond = GET_FIELD_SP(insn, 14, 17); \
2742 gen_fcond(r_cond, fcc, cond); \
2743 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2745 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2746 cpu_fpr[QFPREG(rs2)]); \
2747 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2748 cpu_fpr[QFPREG(rs2) + 1]); \
2749 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2750 cpu_fpr[QFPREG(rs2) + 2]); \
2751 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2752 cpu_fpr[QFPREG(rs2) + 3]); \
2753 gen_set_label(l1); \
2754 tcg_temp_free(r_cond); \
2756 case 0x001: /* V9 fmovscc %fcc0 */
2759 case 0x002: /* V9 fmovdcc %fcc0 */
2762 case 0x003: /* V9 fmovqcc %fcc0 */
2763 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2766 case 0x041: /* V9 fmovscc %fcc1 */
2769 case 0x042: /* V9 fmovdcc %fcc1 */
2772 case 0x043: /* V9 fmovqcc %fcc1 */
2773 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2776 case 0x081: /* V9 fmovscc %fcc2 */
2779 case 0x082: /* V9 fmovdcc %fcc2 */
2782 case 0x083: /* V9 fmovqcc %fcc2 */
2783 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2786 case 0x0c1: /* V9 fmovscc %fcc3 */
2789 case 0x0c2: /* V9 fmovdcc %fcc3 */
2792 case 0x0c3: /* V9 fmovqcc %fcc3 */
2793 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2799 #define FMOVCC(size_FDQ, icc) \
2804 l1 = gen_new_label(); \
2805 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2806 cond = GET_FIELD_SP(insn, 14, 17); \
2807 gen_cond(r_cond, icc, cond); \
2808 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2810 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2811 (glue(size_FDQ, FPREG(rs2))); \
2812 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2813 (glue(size_FDQ, FPREG(rd))); \
2814 gen_set_label(l1); \
2815 tcg_temp_free(r_cond); \
2817 #define FMOVSCC(icc) \
2822 l1 = gen_new_label(); \
2823 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2824 cond = GET_FIELD_SP(insn, 14, 17); \
2825 gen_cond(r_cond, icc, cond); \
2826 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2828 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2829 gen_set_label(l1); \
2830 tcg_temp_free(r_cond); \
2832 #define FMOVDCC(icc) \
2837 l1 = gen_new_label(); \
2838 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2839 cond = GET_FIELD_SP(insn, 14, 17); \
2840 gen_cond(r_cond, icc, cond); \
2841 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2843 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2844 cpu_fpr[DFPREG(rs2)]); \
2845 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2846 cpu_fpr[DFPREG(rs2) + 1]); \
2847 gen_set_label(l1); \
2848 tcg_temp_free(r_cond); \
2850 #define FMOVQCC(icc) \
2855 l1 = gen_new_label(); \
2856 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2857 cond = GET_FIELD_SP(insn, 14, 17); \
2858 gen_cond(r_cond, icc, cond); \
2859 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2861 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2862 cpu_fpr[QFPREG(rs2)]); \
2863 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2864 cpu_fpr[QFPREG(rs2) + 1]); \
2865 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2866 cpu_fpr[QFPREG(rs2) + 2]); \
2867 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2868 cpu_fpr[QFPREG(rs2) + 3]); \
2869 gen_set_label(l1); \
2870 tcg_temp_free(r_cond); \
2873 case 0x101: /* V9 fmovscc %icc */
2876 case 0x102: /* V9 fmovdcc %icc */
2878 case 0x103: /* V9 fmovqcc %icc */
2879 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2882 case 0x181: /* V9 fmovscc %xcc */
2885 case 0x182: /* V9 fmovdcc %xcc */
2888 case 0x183: /* V9 fmovqcc %xcc */
2889 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2896 case 0x51: /* fcmps, V9 %fcc */
2897 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2899 case 0x52: /* fcmpd, V9 %fcc */
2900 gen_op_load_fpr_DT0(DFPREG(rs1
));
2901 gen_op_load_fpr_DT1(DFPREG(rs2
));
2902 gen_op_fcmpd(rd
& 3);
2904 case 0x53: /* fcmpq, V9 %fcc */
2905 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2906 gen_op_load_fpr_QT0(QFPREG(rs1
));
2907 gen_op_load_fpr_QT1(QFPREG(rs2
));
2908 gen_op_fcmpq(rd
& 3);
2910 case 0x55: /* fcmpes, V9 %fcc */
2911 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2913 case 0x56: /* fcmped, V9 %fcc */
2914 gen_op_load_fpr_DT0(DFPREG(rs1
));
2915 gen_op_load_fpr_DT1(DFPREG(rs2
));
2916 gen_op_fcmped(rd
& 3);
2918 case 0x57: /* fcmpeq, V9 %fcc */
2919 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2920 gen_op_load_fpr_QT0(QFPREG(rs1
));
2921 gen_op_load_fpr_QT1(QFPREG(rs2
));
2922 gen_op_fcmpeq(rd
& 3);
2927 } else if (xop
== 0x2) {
2930 rs1
= GET_FIELD(insn
, 13, 17);
2932 // or %g0, x, y -> mov T0, x; mov y, T0
2933 if (IS_IMM
) { /* immediate */
2936 rs2
= GET_FIELDs(insn
, 19, 31);
2937 r_const
= tcg_const_tl((int)rs2
);
2938 gen_movl_TN_reg(rd
, r_const
);
2939 tcg_temp_free(r_const
);
2940 } else { /* register */
2941 rs2
= GET_FIELD(insn
, 27, 31);
2942 gen_movl_reg_TN(rs2
, cpu_dst
);
2943 gen_movl_TN_reg(rd
, cpu_dst
);
2946 cpu_src1
= get_src1(insn
, cpu_src1
);
2947 if (IS_IMM
) { /* immediate */
2948 rs2
= GET_FIELDs(insn
, 19, 31);
2949 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
2950 gen_movl_TN_reg(rd
, cpu_dst
);
2951 } else { /* register */
2952 // or x, %g0, y -> mov T1, x; mov y, T1
2953 rs2
= GET_FIELD(insn
, 27, 31);
2955 gen_movl_reg_TN(rs2
, cpu_src2
);
2956 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2957 gen_movl_TN_reg(rd
, cpu_dst
);
2959 gen_movl_TN_reg(rd
, cpu_src1
);
2962 #ifdef TARGET_SPARC64
2963 } else if (xop
== 0x25) { /* sll, V9 sllx */
2964 cpu_src1
= get_src1(insn
, cpu_src1
);
2965 if (IS_IMM
) { /* immediate */
2966 rs2
= GET_FIELDs(insn
, 20, 31);
2967 if (insn
& (1 << 12)) {
2968 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2970 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x1f);
2972 } else { /* register */
2973 rs2
= GET_FIELD(insn
, 27, 31);
2974 gen_movl_reg_TN(rs2
, cpu_src2
);
2975 if (insn
& (1 << 12)) {
2976 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2978 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2980 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2982 gen_movl_TN_reg(rd
, cpu_dst
);
2983 } else if (xop
== 0x26) { /* srl, V9 srlx */
2984 cpu_src1
= get_src1(insn
, cpu_src1
);
2985 if (IS_IMM
) { /* immediate */
2986 rs2
= GET_FIELDs(insn
, 20, 31);
2987 if (insn
& (1 << 12)) {
2988 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2990 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2991 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2993 } else { /* register */
2994 rs2
= GET_FIELD(insn
, 27, 31);
2995 gen_movl_reg_TN(rs2
, cpu_src2
);
2996 if (insn
& (1 << 12)) {
2997 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2998 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3000 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3001 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3002 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3005 gen_movl_TN_reg(rd
, cpu_dst
);
3006 } else if (xop
== 0x27) { /* sra, V9 srax */
3007 cpu_src1
= get_src1(insn
, cpu_src1
);
3008 if (IS_IMM
) { /* immediate */
3009 rs2
= GET_FIELDs(insn
, 20, 31);
3010 if (insn
& (1 << 12)) {
3011 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3013 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3014 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3015 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3017 } else { /* register */
3018 rs2
= GET_FIELD(insn
, 27, 31);
3019 gen_movl_reg_TN(rs2
, cpu_src2
);
3020 if (insn
& (1 << 12)) {
3021 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3022 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3024 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3025 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3026 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3027 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3030 gen_movl_TN_reg(rd
, cpu_dst
);
3032 } else if (xop
< 0x36) {
3033 cpu_src1
= get_src1(insn
, cpu_src1
);
3034 cpu_src2
= get_src2(insn
, cpu_src2
);
3036 switch (xop
& ~0x10) {
3039 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3041 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3044 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3046 gen_op_logic_cc(cpu_dst
);
3049 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3051 gen_op_logic_cc(cpu_dst
);
3054 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3056 gen_op_logic_cc(cpu_dst
);
3060 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3062 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3065 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3066 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3068 gen_op_logic_cc(cpu_dst
);
3071 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3072 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3074 gen_op_logic_cc(cpu_dst
);
3077 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3078 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3080 gen_op_logic_cc(cpu_dst
);
3084 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3086 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3087 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3088 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3091 #ifdef TARGET_SPARC64
3092 case 0x9: /* V9 mulx */
3093 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3097 CHECK_IU_FEATURE(dc
, MUL
);
3098 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3100 gen_op_logic_cc(cpu_dst
);
3103 CHECK_IU_FEATURE(dc
, MUL
);
3104 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3106 gen_op_logic_cc(cpu_dst
);
3110 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3112 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3113 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3114 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3117 #ifdef TARGET_SPARC64
3118 case 0xd: /* V9 udivx */
3119 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3120 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3121 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3122 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3126 CHECK_IU_FEATURE(dc
, DIV
);
3127 tcg_gen_helper_1_2(helper_udiv
, cpu_dst
, cpu_src1
,
3130 gen_op_div_cc(cpu_dst
);
3133 CHECK_IU_FEATURE(dc
, DIV
);
3134 tcg_gen_helper_1_2(helper_sdiv
, cpu_dst
, cpu_src1
,
3137 gen_op_div_cc(cpu_dst
);
3142 gen_movl_TN_reg(rd
, cpu_dst
);
3145 case 0x20: /* taddcc */
3146 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3147 gen_movl_TN_reg(rd
, cpu_dst
);
3149 case 0x21: /* tsubcc */
3150 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3151 gen_movl_TN_reg(rd
, cpu_dst
);
3153 case 0x22: /* taddcctv */
3154 save_state(dc
, cpu_cond
);
3155 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3156 gen_movl_TN_reg(rd
, cpu_dst
);
3158 case 0x23: /* tsubcctv */
3159 save_state(dc
, cpu_cond
);
3160 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3161 gen_movl_TN_reg(rd
, cpu_dst
);
3163 case 0x24: /* mulscc */
3164 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3165 gen_movl_TN_reg(rd
, cpu_dst
);
3167 #ifndef TARGET_SPARC64
3168 case 0x25: /* sll */
3169 if (IS_IMM
) { /* immediate */
3170 rs2
= GET_FIELDs(insn
, 20, 31);
3171 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3172 } else { /* register */
3173 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3174 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3176 gen_movl_TN_reg(rd
, cpu_dst
);
3178 case 0x26: /* srl */
3179 if (IS_IMM
) { /* immediate */
3180 rs2
= GET_FIELDs(insn
, 20, 31);
3181 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3182 } else { /* register */
3183 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3184 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3186 gen_movl_TN_reg(rd
, cpu_dst
);
3188 case 0x27: /* sra */
3189 if (IS_IMM
) { /* immediate */
3190 rs2
= GET_FIELDs(insn
, 20, 31);
3191 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3192 } else { /* register */
3193 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3194 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3196 gen_movl_TN_reg(rd
, cpu_dst
);
3203 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3204 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3206 #ifndef TARGET_SPARC64
3207 case 0x01 ... 0x0f: /* undefined in the
3211 case 0x10 ... 0x1f: /* implementation-dependent
3217 case 0x2: /* V9 wrccr */
3218 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3219 tcg_gen_helper_0_1(helper_wrccr
, cpu_dst
);
3221 case 0x3: /* V9 wrasi */
3222 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3223 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3225 case 0x6: /* V9 wrfprs */
3226 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3227 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3228 save_state(dc
, cpu_cond
);
3233 case 0xf: /* V9 sir, nop if user */
3234 #if !defined(CONFIG_USER_ONLY)
3239 case 0x13: /* Graphics Status */
3240 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3242 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3244 case 0x17: /* Tick compare */
3245 #if !defined(CONFIG_USER_ONLY)
3246 if (!supervisor(dc
))
3252 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3254 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3255 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3256 offsetof(CPUState
, tick
));
3257 tcg_gen_helper_0_2(helper_tick_set_limit
,
3258 r_tickptr
, cpu_tick_cmpr
);
3259 tcg_temp_free(r_tickptr
);
3262 case 0x18: /* System tick */
3263 #if !defined(CONFIG_USER_ONLY)
3264 if (!supervisor(dc
))
3270 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3272 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3273 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3274 offsetof(CPUState
, stick
));
3275 tcg_gen_helper_0_2(helper_tick_set_count
,
3276 r_tickptr
, cpu_dst
);
3277 tcg_temp_free(r_tickptr
);
3280 case 0x19: /* System tick compare */
3281 #if !defined(CONFIG_USER_ONLY)
3282 if (!supervisor(dc
))
3288 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3290 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3291 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3292 offsetof(CPUState
, stick
));
3293 tcg_gen_helper_0_2(helper_tick_set_limit
,
3294 r_tickptr
, cpu_stick_cmpr
);
3295 tcg_temp_free(r_tickptr
);
3299 case 0x10: /* Performance Control */
3300 case 0x11: /* Performance Instrumentation
3302 case 0x12: /* Dispatch Control */
3303 case 0x14: /* Softint set */
3304 case 0x15: /* Softint clear */
3305 case 0x16: /* Softint write */
3312 #if !defined(CONFIG_USER_ONLY)
3313 case 0x31: /* wrpsr, V9 saved, restored */
3315 if (!supervisor(dc
))
3317 #ifdef TARGET_SPARC64
3320 tcg_gen_helper_0_0(helper_saved
);
3323 tcg_gen_helper_0_0(helper_restored
);
3325 case 2: /* UA2005 allclean */
3326 case 3: /* UA2005 otherw */
3327 case 4: /* UA2005 normalw */
3328 case 5: /* UA2005 invalw */
3334 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3335 tcg_gen_helper_0_1(helper_wrpsr
, cpu_dst
);
3336 save_state(dc
, cpu_cond
);
3343 case 0x32: /* wrwim, V9 wrpr */
3345 if (!supervisor(dc
))
3347 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3348 #ifdef TARGET_SPARC64
3354 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3355 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3356 offsetof(CPUState
, tsptr
));
3357 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3358 offsetof(trap_state
, tpc
));
3359 tcg_temp_free(r_tsptr
);
3366 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3367 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3368 offsetof(CPUState
, tsptr
));
3369 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3370 offsetof(trap_state
, tnpc
));
3371 tcg_temp_free(r_tsptr
);
3378 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3379 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3380 offsetof(CPUState
, tsptr
));
3381 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3382 offsetof(trap_state
,
3384 tcg_temp_free(r_tsptr
);
3391 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3392 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3393 offsetof(CPUState
, tsptr
));
3394 tcg_gen_st_i32(cpu_tmp0
, r_tsptr
,
3395 offsetof(trap_state
, tt
));
3396 tcg_temp_free(r_tsptr
);
3403 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3404 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3405 offsetof(CPUState
, tick
));
3406 tcg_gen_helper_0_2(helper_tick_set_count
,
3407 r_tickptr
, cpu_tmp0
);
3408 tcg_temp_free(r_tickptr
);
3412 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3415 save_state(dc
, cpu_cond
);
3416 tcg_gen_helper_0_1(helper_wrpstate
, cpu_tmp0
);
3422 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3423 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3424 offsetof(CPUSPARCState
, tl
));
3427 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3428 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3429 offsetof(CPUSPARCState
,
3433 tcg_gen_helper_0_1(helper_wrcwp
, cpu_tmp0
);
3436 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3437 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3438 offsetof(CPUSPARCState
,
3441 case 11: // canrestore
3442 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3443 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3444 offsetof(CPUSPARCState
,
3447 case 12: // cleanwin
3448 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3449 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3450 offsetof(CPUSPARCState
,
3453 case 13: // otherwin
3454 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3455 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3456 offsetof(CPUSPARCState
,
3460 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3461 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3462 offsetof(CPUSPARCState
,
3465 case 16: // UA2005 gl
3466 CHECK_IU_FEATURE(dc
, GL
);
3467 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3468 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3469 offsetof(CPUSPARCState
, gl
));
3471 case 26: // UA2005 strand status
3472 CHECK_IU_FEATURE(dc
, HYPV
);
3473 if (!hypervisor(dc
))
3475 tcg_gen_trunc_tl_i32(cpu_ssr
, cpu_tmp0
);
3481 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3482 if (dc
->def
->nwindows
!= 32)
3483 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3484 (1 << dc
->def
->nwindows
) - 1);
3485 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3489 case 0x33: /* wrtbr, UA2005 wrhpr */
3491 #ifndef TARGET_SPARC64
3492 if (!supervisor(dc
))
3494 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3496 CHECK_IU_FEATURE(dc
, HYPV
);
3497 if (!hypervisor(dc
))
3499 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3502 // XXX gen_op_wrhpstate();
3503 save_state(dc
, cpu_cond
);
3509 // XXX gen_op_wrhtstate();
3512 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3515 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3517 case 31: // hstick_cmpr
3521 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3522 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3523 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3524 offsetof(CPUState
, hstick
));
3525 tcg_gen_helper_0_2(helper_tick_set_limit
,
3526 r_tickptr
, cpu_hstick_cmpr
);
3527 tcg_temp_free(r_tickptr
);
3530 case 6: // hver readonly
3538 #ifdef TARGET_SPARC64
3539 case 0x2c: /* V9 movcc */
3541 int cc
= GET_FIELD_SP(insn
, 11, 12);
3542 int cond
= GET_FIELD_SP(insn
, 14, 17);
3546 r_cond
= tcg_temp_new(TCG_TYPE_TL
);
3547 if (insn
& (1 << 18)) {
3549 gen_cond(r_cond
, 0, cond
);
3551 gen_cond(r_cond
, 1, cond
);
3555 gen_fcond(r_cond
, cc
, cond
);
3558 l1
= gen_new_label();
3560 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3561 if (IS_IMM
) { /* immediate */
3564 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3565 r_const
= tcg_const_tl((int)rs2
);
3566 gen_movl_TN_reg(rd
, r_const
);
3567 tcg_temp_free(r_const
);
3569 rs2
= GET_FIELD_SP(insn
, 0, 4);
3570 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3571 gen_movl_TN_reg(rd
, cpu_tmp0
);
3574 tcg_temp_free(r_cond
);
3577 case 0x2d: /* V9 sdivx */
3578 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3579 gen_movl_TN_reg(rd
, cpu_dst
);
3581 case 0x2e: /* V9 popc */
3583 cpu_src2
= get_src2(insn
, cpu_src2
);
3584 tcg_gen_helper_1_1(helper_popc
, cpu_dst
,
3586 gen_movl_TN_reg(rd
, cpu_dst
);
3588 case 0x2f: /* V9 movr */
3590 int cond
= GET_FIELD_SP(insn
, 10, 12);
3593 cpu_src1
= get_src1(insn
, cpu_src1
);
3595 l1
= gen_new_label();
3597 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3599 if (IS_IMM
) { /* immediate */
3602 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3603 r_const
= tcg_const_tl((int)rs2
);
3604 gen_movl_TN_reg(rd
, r_const
);
3605 tcg_temp_free(r_const
);
3607 rs2
= GET_FIELD_SP(insn
, 0, 4);
3608 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3609 gen_movl_TN_reg(rd
, cpu_tmp0
);
3619 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3620 #ifdef TARGET_SPARC64
3621 int opf
= GET_FIELD_SP(insn
, 5, 13);
3622 rs1
= GET_FIELD(insn
, 13, 17);
3623 rs2
= GET_FIELD(insn
, 27, 31);
3624 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3628 case 0x000: /* VIS I edge8cc */
3629 case 0x001: /* VIS II edge8n */
3630 case 0x002: /* VIS I edge8lcc */
3631 case 0x003: /* VIS II edge8ln */
3632 case 0x004: /* VIS I edge16cc */
3633 case 0x005: /* VIS II edge16n */
3634 case 0x006: /* VIS I edge16lcc */
3635 case 0x007: /* VIS II edge16ln */
3636 case 0x008: /* VIS I edge32cc */
3637 case 0x009: /* VIS II edge32n */
3638 case 0x00a: /* VIS I edge32lcc */
3639 case 0x00b: /* VIS II edge32ln */
3642 case 0x010: /* VIS I array8 */
3643 CHECK_FPU_FEATURE(dc
, VIS1
);
3644 cpu_src1
= get_src1(insn
, cpu_src1
);
3645 gen_movl_reg_TN(rs2
, cpu_src2
);
3646 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3648 gen_movl_TN_reg(rd
, cpu_dst
);
3650 case 0x012: /* VIS I array16 */
3651 CHECK_FPU_FEATURE(dc
, VIS1
);
3652 cpu_src1
= get_src1(insn
, cpu_src1
);
3653 gen_movl_reg_TN(rs2
, cpu_src2
);
3654 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3656 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3657 gen_movl_TN_reg(rd
, cpu_dst
);
3659 case 0x014: /* VIS I array32 */
3660 CHECK_FPU_FEATURE(dc
, VIS1
);
3661 cpu_src1
= get_src1(insn
, cpu_src1
);
3662 gen_movl_reg_TN(rs2
, cpu_src2
);
3663 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3665 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3666 gen_movl_TN_reg(rd
, cpu_dst
);
3668 case 0x018: /* VIS I alignaddr */
3669 CHECK_FPU_FEATURE(dc
, VIS1
);
3670 cpu_src1
= get_src1(insn
, cpu_src1
);
3671 gen_movl_reg_TN(rs2
, cpu_src2
);
3672 tcg_gen_helper_1_2(helper_alignaddr
, cpu_dst
, cpu_src1
,
3674 gen_movl_TN_reg(rd
, cpu_dst
);
3676 case 0x019: /* VIS II bmask */
3677 case 0x01a: /* VIS I alignaddrl */
3680 case 0x020: /* VIS I fcmple16 */
3681 CHECK_FPU_FEATURE(dc
, VIS1
);
3682 gen_op_load_fpr_DT0(DFPREG(rs1
));
3683 gen_op_load_fpr_DT1(DFPREG(rs2
));
3684 tcg_gen_helper_0_0(helper_fcmple16
);
3685 gen_op_store_DT0_fpr(DFPREG(rd
));
3687 case 0x022: /* VIS I fcmpne16 */
3688 CHECK_FPU_FEATURE(dc
, VIS1
);
3689 gen_op_load_fpr_DT0(DFPREG(rs1
));
3690 gen_op_load_fpr_DT1(DFPREG(rs2
));
3691 tcg_gen_helper_0_0(helper_fcmpne16
);
3692 gen_op_store_DT0_fpr(DFPREG(rd
));
3694 case 0x024: /* VIS I fcmple32 */
3695 CHECK_FPU_FEATURE(dc
, VIS1
);
3696 gen_op_load_fpr_DT0(DFPREG(rs1
));
3697 gen_op_load_fpr_DT1(DFPREG(rs2
));
3698 tcg_gen_helper_0_0(helper_fcmple32
);
3699 gen_op_store_DT0_fpr(DFPREG(rd
));
3701 case 0x026: /* VIS I fcmpne32 */
3702 CHECK_FPU_FEATURE(dc
, VIS1
);
3703 gen_op_load_fpr_DT0(DFPREG(rs1
));
3704 gen_op_load_fpr_DT1(DFPREG(rs2
));
3705 tcg_gen_helper_0_0(helper_fcmpne32
);
3706 gen_op_store_DT0_fpr(DFPREG(rd
));
3708 case 0x028: /* VIS I fcmpgt16 */
3709 CHECK_FPU_FEATURE(dc
, VIS1
);
3710 gen_op_load_fpr_DT0(DFPREG(rs1
));
3711 gen_op_load_fpr_DT1(DFPREG(rs2
));
3712 tcg_gen_helper_0_0(helper_fcmpgt16
);
3713 gen_op_store_DT0_fpr(DFPREG(rd
));
3715 case 0x02a: /* VIS I fcmpeq16 */
3716 CHECK_FPU_FEATURE(dc
, VIS1
);
3717 gen_op_load_fpr_DT0(DFPREG(rs1
));
3718 gen_op_load_fpr_DT1(DFPREG(rs2
));
3719 tcg_gen_helper_0_0(helper_fcmpeq16
);
3720 gen_op_store_DT0_fpr(DFPREG(rd
));
3722 case 0x02c: /* VIS I fcmpgt32 */
3723 CHECK_FPU_FEATURE(dc
, VIS1
);
3724 gen_op_load_fpr_DT0(DFPREG(rs1
));
3725 gen_op_load_fpr_DT1(DFPREG(rs2
));
3726 tcg_gen_helper_0_0(helper_fcmpgt32
);
3727 gen_op_store_DT0_fpr(DFPREG(rd
));
3729 case 0x02e: /* VIS I fcmpeq32 */
3730 CHECK_FPU_FEATURE(dc
, VIS1
);
3731 gen_op_load_fpr_DT0(DFPREG(rs1
));
3732 gen_op_load_fpr_DT1(DFPREG(rs2
));
3733 tcg_gen_helper_0_0(helper_fcmpeq32
);
3734 gen_op_store_DT0_fpr(DFPREG(rd
));
3736 case 0x031: /* VIS I fmul8x16 */
3737 CHECK_FPU_FEATURE(dc
, VIS1
);
3738 gen_op_load_fpr_DT0(DFPREG(rs1
));
3739 gen_op_load_fpr_DT1(DFPREG(rs2
));
3740 tcg_gen_helper_0_0(helper_fmul8x16
);
3741 gen_op_store_DT0_fpr(DFPREG(rd
));
3743 case 0x033: /* VIS I fmul8x16au */
3744 CHECK_FPU_FEATURE(dc
, VIS1
);
3745 gen_op_load_fpr_DT0(DFPREG(rs1
));
3746 gen_op_load_fpr_DT1(DFPREG(rs2
));
3747 tcg_gen_helper_0_0(helper_fmul8x16au
);
3748 gen_op_store_DT0_fpr(DFPREG(rd
));
3750 case 0x035: /* VIS I fmul8x16al */
3751 CHECK_FPU_FEATURE(dc
, VIS1
);
3752 gen_op_load_fpr_DT0(DFPREG(rs1
));
3753 gen_op_load_fpr_DT1(DFPREG(rs2
));
3754 tcg_gen_helper_0_0(helper_fmul8x16al
);
3755 gen_op_store_DT0_fpr(DFPREG(rd
));
3757 case 0x036: /* VIS I fmul8sux16 */
3758 CHECK_FPU_FEATURE(dc
, VIS1
);
3759 gen_op_load_fpr_DT0(DFPREG(rs1
));
3760 gen_op_load_fpr_DT1(DFPREG(rs2
));
3761 tcg_gen_helper_0_0(helper_fmul8sux16
);
3762 gen_op_store_DT0_fpr(DFPREG(rd
));
3764 case 0x037: /* VIS I fmul8ulx16 */
3765 CHECK_FPU_FEATURE(dc
, VIS1
);
3766 gen_op_load_fpr_DT0(DFPREG(rs1
));
3767 gen_op_load_fpr_DT1(DFPREG(rs2
));
3768 tcg_gen_helper_0_0(helper_fmul8ulx16
);
3769 gen_op_store_DT0_fpr(DFPREG(rd
));
3771 case 0x038: /* VIS I fmuld8sux16 */
3772 CHECK_FPU_FEATURE(dc
, VIS1
);
3773 gen_op_load_fpr_DT0(DFPREG(rs1
));
3774 gen_op_load_fpr_DT1(DFPREG(rs2
));
3775 tcg_gen_helper_0_0(helper_fmuld8sux16
);
3776 gen_op_store_DT0_fpr(DFPREG(rd
));
3778 case 0x039: /* VIS I fmuld8ulx16 */
3779 CHECK_FPU_FEATURE(dc
, VIS1
);
3780 gen_op_load_fpr_DT0(DFPREG(rs1
));
3781 gen_op_load_fpr_DT1(DFPREG(rs2
));
3782 tcg_gen_helper_0_0(helper_fmuld8ulx16
);
3783 gen_op_store_DT0_fpr(DFPREG(rd
));
3785 case 0x03a: /* VIS I fpack32 */
3786 case 0x03b: /* VIS I fpack16 */
3787 case 0x03d: /* VIS I fpackfix */
3788 case 0x03e: /* VIS I pdist */
3791 case 0x048: /* VIS I faligndata */
3792 CHECK_FPU_FEATURE(dc
, VIS1
);
3793 gen_op_load_fpr_DT0(DFPREG(rs1
));
3794 gen_op_load_fpr_DT1(DFPREG(rs2
));
3795 tcg_gen_helper_0_0(helper_faligndata
);
3796 gen_op_store_DT0_fpr(DFPREG(rd
));
3798 case 0x04b: /* VIS I fpmerge */
3799 CHECK_FPU_FEATURE(dc
, VIS1
);
3800 gen_op_load_fpr_DT0(DFPREG(rs1
));
3801 gen_op_load_fpr_DT1(DFPREG(rs2
));
3802 tcg_gen_helper_0_0(helper_fpmerge
);
3803 gen_op_store_DT0_fpr(DFPREG(rd
));
3805 case 0x04c: /* VIS II bshuffle */
3808 case 0x04d: /* VIS I fexpand */
3809 CHECK_FPU_FEATURE(dc
, VIS1
);
3810 gen_op_load_fpr_DT0(DFPREG(rs1
));
3811 gen_op_load_fpr_DT1(DFPREG(rs2
));
3812 tcg_gen_helper_0_0(helper_fexpand
);
3813 gen_op_store_DT0_fpr(DFPREG(rd
));
3815 case 0x050: /* VIS I fpadd16 */
3816 CHECK_FPU_FEATURE(dc
, VIS1
);
3817 gen_op_load_fpr_DT0(DFPREG(rs1
));
3818 gen_op_load_fpr_DT1(DFPREG(rs2
));
3819 tcg_gen_helper_0_0(helper_fpadd16
);
3820 gen_op_store_DT0_fpr(DFPREG(rd
));
3822 case 0x051: /* VIS I fpadd16s */
3823 CHECK_FPU_FEATURE(dc
, VIS1
);
3824 tcg_gen_helper_1_2(helper_fpadd16s
, cpu_fpr
[rd
],
3825 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3827 case 0x052: /* VIS I fpadd32 */
3828 CHECK_FPU_FEATURE(dc
, VIS1
);
3829 gen_op_load_fpr_DT0(DFPREG(rs1
));
3830 gen_op_load_fpr_DT1(DFPREG(rs2
));
3831 tcg_gen_helper_0_0(helper_fpadd32
);
3832 gen_op_store_DT0_fpr(DFPREG(rd
));
3834 case 0x053: /* VIS I fpadd32s */
3835 CHECK_FPU_FEATURE(dc
, VIS1
);
3836 tcg_gen_helper_1_2(helper_fpadd32s
, cpu_fpr
[rd
],
3837 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3839 case 0x054: /* VIS I fpsub16 */
3840 CHECK_FPU_FEATURE(dc
, VIS1
);
3841 gen_op_load_fpr_DT0(DFPREG(rs1
));
3842 gen_op_load_fpr_DT1(DFPREG(rs2
));
3843 tcg_gen_helper_0_0(helper_fpsub16
);
3844 gen_op_store_DT0_fpr(DFPREG(rd
));
3846 case 0x055: /* VIS I fpsub16s */
3847 CHECK_FPU_FEATURE(dc
, VIS1
);
3848 tcg_gen_helper_1_2(helper_fpsub16s
, cpu_fpr
[rd
],
3849 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3851 case 0x056: /* VIS I fpsub32 */
3852 CHECK_FPU_FEATURE(dc
, VIS1
);
3853 gen_op_load_fpr_DT0(DFPREG(rs1
));
3854 gen_op_load_fpr_DT1(DFPREG(rs2
));
3855 tcg_gen_helper_0_0(helper_fpsub32
);
3856 gen_op_store_DT0_fpr(DFPREG(rd
));
3858 case 0x057: /* VIS I fpsub32s */
3859 CHECK_FPU_FEATURE(dc
, VIS1
);
3860 tcg_gen_helper_1_2(helper_fpsub32s
, cpu_fpr
[rd
],
3861 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3863 case 0x060: /* VIS I fzero */
3864 CHECK_FPU_FEATURE(dc
, VIS1
);
3865 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3866 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3868 case 0x061: /* VIS I fzeros */
3869 CHECK_FPU_FEATURE(dc
, VIS1
);
3870 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3872 case 0x062: /* VIS I fnor */
3873 CHECK_FPU_FEATURE(dc
, VIS1
);
3874 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3875 cpu_fpr
[DFPREG(rs2
)]);
3876 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3877 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3878 cpu_fpr
[DFPREG(rs2
) + 1]);
3879 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3881 case 0x063: /* VIS I fnors */
3882 CHECK_FPU_FEATURE(dc
, VIS1
);
3883 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3884 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3886 case 0x064: /* VIS I fandnot2 */
3887 CHECK_FPU_FEATURE(dc
, VIS1
);
3888 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
3889 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3890 cpu_fpr
[DFPREG(rs2
)]);
3891 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
3892 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3893 cpu_fpr
[DFPREG(rs2
) + 1]);
3895 case 0x065: /* VIS I fandnot2s */
3896 CHECK_FPU_FEATURE(dc
, VIS1
);
3897 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
3898 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
3900 case 0x066: /* VIS I fnot2 */
3901 CHECK_FPU_FEATURE(dc
, VIS1
);
3902 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3904 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3905 cpu_fpr
[DFPREG(rs2
) + 1], -1);
3907 case 0x067: /* VIS I fnot2s */
3908 CHECK_FPU_FEATURE(dc
, VIS1
);
3909 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], -1);
3911 case 0x068: /* VIS I fandnot1 */
3912 CHECK_FPU_FEATURE(dc
, VIS1
);
3913 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3914 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3915 cpu_fpr
[DFPREG(rs1
)]);
3916 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3917 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3918 cpu_fpr
[DFPREG(rs1
) + 1]);
3920 case 0x069: /* VIS I fandnot1s */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3923 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3925 case 0x06a: /* VIS I fnot1 */
3926 CHECK_FPU_FEATURE(dc
, VIS1
);
3927 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3929 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3930 cpu_fpr
[DFPREG(rs1
) + 1], -1);
3932 case 0x06b: /* VIS I fnot1s */
3933 CHECK_FPU_FEATURE(dc
, VIS1
);
3934 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], -1);
3936 case 0x06c: /* VIS I fxor */
3937 CHECK_FPU_FEATURE(dc
, VIS1
);
3938 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3939 cpu_fpr
[DFPREG(rs2
)]);
3940 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3941 cpu_fpr
[DFPREG(rs1
) + 1],
3942 cpu_fpr
[DFPREG(rs2
) + 1]);
3944 case 0x06d: /* VIS I fxors */
3945 CHECK_FPU_FEATURE(dc
, VIS1
);
3946 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3948 case 0x06e: /* VIS I fnand */
3949 CHECK_FPU_FEATURE(dc
, VIS1
);
3950 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3951 cpu_fpr
[DFPREG(rs2
)]);
3952 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3953 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3954 cpu_fpr
[DFPREG(rs2
) + 1]);
3955 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3957 case 0x06f: /* VIS I fnands */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3960 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3962 case 0x070: /* VIS I fand */
3963 CHECK_FPU_FEATURE(dc
, VIS1
);
3964 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3965 cpu_fpr
[DFPREG(rs2
)]);
3966 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3967 cpu_fpr
[DFPREG(rs1
) + 1],
3968 cpu_fpr
[DFPREG(rs2
) + 1]);
3970 case 0x071: /* VIS I fands */
3971 CHECK_FPU_FEATURE(dc
, VIS1
);
3972 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3974 case 0x072: /* VIS I fxnor */
3975 CHECK_FPU_FEATURE(dc
, VIS1
);
3976 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3977 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3978 cpu_fpr
[DFPREG(rs1
)]);
3979 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3980 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3981 cpu_fpr
[DFPREG(rs1
) + 1]);
3983 case 0x073: /* VIS I fxnors */
3984 CHECK_FPU_FEATURE(dc
, VIS1
);
3985 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3986 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3988 case 0x074: /* VIS I fsrc1 */
3989 CHECK_FPU_FEATURE(dc
, VIS1
);
3990 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3991 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3992 cpu_fpr
[DFPREG(rs1
) + 1]);
3994 case 0x075: /* VIS I fsrc1s */
3995 CHECK_FPU_FEATURE(dc
, VIS1
);
3996 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3998 case 0x076: /* VIS I fornot2 */
3999 CHECK_FPU_FEATURE(dc
, VIS1
);
4000 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
4001 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4002 cpu_fpr
[DFPREG(rs2
)]);
4003 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
4004 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4005 cpu_fpr
[DFPREG(rs2
) + 1]);
4007 case 0x077: /* VIS I fornot2s */
4008 CHECK_FPU_FEATURE(dc
, VIS1
);
4009 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
4010 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
4012 case 0x078: /* VIS I fsrc2 */
4013 CHECK_FPU_FEATURE(dc
, VIS1
);
4014 gen_op_load_fpr_DT0(DFPREG(rs2
));
4015 gen_op_store_DT0_fpr(DFPREG(rd
));
4017 case 0x079: /* VIS I fsrc2s */
4018 CHECK_FPU_FEATURE(dc
, VIS1
);
4019 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4021 case 0x07a: /* VIS I fornot1 */
4022 CHECK_FPU_FEATURE(dc
, VIS1
);
4023 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4024 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4025 cpu_fpr
[DFPREG(rs1
)]);
4026 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4027 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4028 cpu_fpr
[DFPREG(rs1
) + 1]);
4030 case 0x07b: /* VIS I fornot1s */
4031 CHECK_FPU_FEATURE(dc
, VIS1
);
4032 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4033 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4035 case 0x07c: /* VIS I for */
4036 CHECK_FPU_FEATURE(dc
, VIS1
);
4037 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4038 cpu_fpr
[DFPREG(rs2
)]);
4039 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4040 cpu_fpr
[DFPREG(rs1
) + 1],
4041 cpu_fpr
[DFPREG(rs2
) + 1]);
4043 case 0x07d: /* VIS I fors */
4044 CHECK_FPU_FEATURE(dc
, VIS1
);
4045 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4047 case 0x07e: /* VIS I fone */
4048 CHECK_FPU_FEATURE(dc
, VIS1
);
4049 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4050 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4052 case 0x07f: /* VIS I fones */
4053 CHECK_FPU_FEATURE(dc
, VIS1
);
4054 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4056 case 0x080: /* VIS I shutdown */
4057 case 0x081: /* VIS II siam */
4066 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4067 #ifdef TARGET_SPARC64
4072 #ifdef TARGET_SPARC64
4073 } else if (xop
== 0x39) { /* V9 return */
4076 save_state(dc
, cpu_cond
);
4077 cpu_src1
= get_src1(insn
, cpu_src1
);
4078 if (IS_IMM
) { /* immediate */
4079 rs2
= GET_FIELDs(insn
, 19, 31);
4080 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4081 } else { /* register */
4082 rs2
= GET_FIELD(insn
, 27, 31);
4084 gen_movl_reg_TN(rs2
, cpu_src2
);
4085 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4087 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4089 tcg_gen_helper_0_0(helper_restore
);
4090 gen_mov_pc_npc(dc
, cpu_cond
);
4091 r_const
= tcg_const_i32(3);
4092 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, r_const
);
4093 tcg_temp_free(r_const
);
4094 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4095 dc
->npc
= DYNAMIC_PC
;
4099 cpu_src1
= get_src1(insn
, cpu_src1
);
4100 if (IS_IMM
) { /* immediate */
4101 rs2
= GET_FIELDs(insn
, 19, 31);
4102 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4103 } else { /* register */
4104 rs2
= GET_FIELD(insn
, 27, 31);
4106 gen_movl_reg_TN(rs2
, cpu_src2
);
4107 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4109 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4112 case 0x38: /* jmpl */
4116 r_const
= tcg_const_tl(dc
->pc
);
4117 gen_movl_TN_reg(rd
, r_const
);
4118 tcg_temp_free(r_const
);
4119 gen_mov_pc_npc(dc
, cpu_cond
);
4120 r_const
= tcg_const_i32(3);
4121 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4123 tcg_temp_free(r_const
);
4124 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4125 dc
->npc
= DYNAMIC_PC
;
4128 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4129 case 0x39: /* rett, V9 return */
4133 if (!supervisor(dc
))
4135 gen_mov_pc_npc(dc
, cpu_cond
);
4136 r_const
= tcg_const_i32(3);
4137 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4139 tcg_temp_free(r_const
);
4140 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4141 dc
->npc
= DYNAMIC_PC
;
4142 tcg_gen_helper_0_0(helper_rett
);
4146 case 0x3b: /* flush */
4147 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4149 tcg_gen_helper_0_1(helper_flush
, cpu_dst
);
4151 case 0x3c: /* save */
4152 save_state(dc
, cpu_cond
);
4153 tcg_gen_helper_0_0(helper_save
);
4154 gen_movl_TN_reg(rd
, cpu_dst
);
4156 case 0x3d: /* restore */
4157 save_state(dc
, cpu_cond
);
4158 tcg_gen_helper_0_0(helper_restore
);
4159 gen_movl_TN_reg(rd
, cpu_dst
);
4161 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4162 case 0x3e: /* V9 done/retry */
4166 if (!supervisor(dc
))
4168 dc
->npc
= DYNAMIC_PC
;
4169 dc
->pc
= DYNAMIC_PC
;
4170 tcg_gen_helper_0_0(helper_done
);
4173 if (!supervisor(dc
))
4175 dc
->npc
= DYNAMIC_PC
;
4176 dc
->pc
= DYNAMIC_PC
;
4177 tcg_gen_helper_0_0(helper_retry
);
4192 case 3: /* load/store instructions */
4194 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4196 cpu_src1
= get_src1(insn
, cpu_src1
);
4197 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4198 rs2
= GET_FIELD(insn
, 27, 31);
4199 gen_movl_reg_TN(rs2
, cpu_src2
);
4200 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4201 } else if (IS_IMM
) { /* immediate */
4202 rs2
= GET_FIELDs(insn
, 19, 31);
4203 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
4204 } else { /* register */
4205 rs2
= GET_FIELD(insn
, 27, 31);
4207 gen_movl_reg_TN(rs2
, cpu_src2
);
4208 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4210 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4212 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4213 (xop
> 0x17 && xop
<= 0x1d ) ||
4214 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4216 case 0x0: /* load unsigned word */
4217 gen_address_mask(dc
, cpu_addr
);
4218 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4220 case 0x1: /* load unsigned byte */
4221 gen_address_mask(dc
, cpu_addr
);
4222 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4224 case 0x2: /* load unsigned halfword */
4225 gen_address_mask(dc
, cpu_addr
);
4226 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4228 case 0x3: /* load double word */
4234 save_state(dc
, cpu_cond
);
4235 r_const
= tcg_const_i32(7);
4236 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4237 r_const
); // XXX remove
4238 tcg_temp_free(r_const
);
4239 gen_address_mask(dc
, cpu_addr
);
4240 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4241 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4242 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4243 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4244 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4245 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4246 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4249 case 0x9: /* load signed byte */
4250 gen_address_mask(dc
, cpu_addr
);
4251 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4253 case 0xa: /* load signed halfword */
4254 gen_address_mask(dc
, cpu_addr
);
4255 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4257 case 0xd: /* ldstub -- XXX: should be atomically */
4261 gen_address_mask(dc
, cpu_addr
);
4262 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4263 r_const
= tcg_const_tl(0xff);
4264 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4265 tcg_temp_free(r_const
);
4268 case 0x0f: /* swap register with memory. Also
4270 CHECK_IU_FEATURE(dc
, SWAP
);
4271 gen_movl_reg_TN(rd
, cpu_val
);
4272 gen_address_mask(dc
, cpu_addr
);
4273 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4274 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4275 tcg_gen_extu_i32_tl(cpu_val
, cpu_tmp32
);
4277 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4278 case 0x10: /* load word alternate */
4279 #ifndef TARGET_SPARC64
4282 if (!supervisor(dc
))
4285 save_state(dc
, cpu_cond
);
4286 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4288 case 0x11: /* load unsigned byte alternate */
4289 #ifndef TARGET_SPARC64
4292 if (!supervisor(dc
))
4295 save_state(dc
, cpu_cond
);
4296 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4298 case 0x12: /* load unsigned halfword alternate */
4299 #ifndef TARGET_SPARC64
4302 if (!supervisor(dc
))
4305 save_state(dc
, cpu_cond
);
4306 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4308 case 0x13: /* load double word alternate */
4309 #ifndef TARGET_SPARC64
4312 if (!supervisor(dc
))
4317 save_state(dc
, cpu_cond
);
4318 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4320 case 0x19: /* load signed byte alternate */
4321 #ifndef TARGET_SPARC64
4324 if (!supervisor(dc
))
4327 save_state(dc
, cpu_cond
);
4328 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4330 case 0x1a: /* load signed halfword alternate */
4331 #ifndef TARGET_SPARC64
4334 if (!supervisor(dc
))
4337 save_state(dc
, cpu_cond
);
4338 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4340 case 0x1d: /* ldstuba -- XXX: should be atomically */
4341 #ifndef TARGET_SPARC64
4344 if (!supervisor(dc
))
4347 save_state(dc
, cpu_cond
);
4348 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4350 case 0x1f: /* swap reg with alt. memory. Also
4352 CHECK_IU_FEATURE(dc
, SWAP
);
4353 #ifndef TARGET_SPARC64
4356 if (!supervisor(dc
))
4359 save_state(dc
, cpu_cond
);
4360 gen_movl_reg_TN(rd
, cpu_val
);
4361 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4364 #ifndef TARGET_SPARC64
4365 case 0x30: /* ldc */
4366 case 0x31: /* ldcsr */
4367 case 0x33: /* lddc */
4371 #ifdef TARGET_SPARC64
4372 case 0x08: /* V9 ldsw */
4373 gen_address_mask(dc
, cpu_addr
);
4374 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4376 case 0x0b: /* V9 ldx */
4377 gen_address_mask(dc
, cpu_addr
);
4378 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4380 case 0x18: /* V9 ldswa */
4381 save_state(dc
, cpu_cond
);
4382 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4384 case 0x1b: /* V9 ldxa */
4385 save_state(dc
, cpu_cond
);
4386 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4388 case 0x2d: /* V9 prefetch, no effect */
4390 case 0x30: /* V9 ldfa */
4391 save_state(dc
, cpu_cond
);
4392 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4394 case 0x33: /* V9 lddfa */
4395 save_state(dc
, cpu_cond
);
4396 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4398 case 0x3d: /* V9 prefetcha, no effect */
4400 case 0x32: /* V9 ldqfa */
4401 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4402 save_state(dc
, cpu_cond
);
4403 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4409 gen_movl_TN_reg(rd
, cpu_val
);
4410 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4413 } else if (xop
>= 0x20 && xop
< 0x24) {
4414 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4416 save_state(dc
, cpu_cond
);
4418 case 0x20: /* load fpreg */
4419 gen_address_mask(dc
, cpu_addr
);
4420 tcg_gen_qemu_ld32u(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4422 case 0x21: /* ldfsr, V9 ldxfsr */
4423 #ifdef TARGET_SPARC64
4424 gen_address_mask(dc
, cpu_addr
);
4426 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4427 tcg_gen_helper_0_1(helper_ldxfsr
, cpu_tmp64
);
4431 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4432 tcg_gen_helper_0_1(helper_ldfsr
, cpu_tmp32
);
4436 case 0x22: /* load quad fpreg */
4440 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4441 r_const
= tcg_const_i32(dc
->mem_idx
);
4442 tcg_gen_helper_0_2(helper_ldqf
, cpu_addr
, r_const
);
4443 tcg_temp_free(r_const
);
4444 gen_op_store_QT0_fpr(QFPREG(rd
));
4447 case 0x23: /* load double fpreg */
4451 r_const
= tcg_const_i32(dc
->mem_idx
);
4452 tcg_gen_helper_0_2(helper_lddf
, cpu_addr
, r_const
);
4453 tcg_temp_free(r_const
);
4454 gen_op_store_DT0_fpr(DFPREG(rd
));
4460 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4461 xop
== 0xe || xop
== 0x1e) {
4462 gen_movl_reg_TN(rd
, cpu_val
);
4464 case 0x4: /* store word */
4465 gen_address_mask(dc
, cpu_addr
);
4466 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4468 case 0x5: /* store byte */
4469 gen_address_mask(dc
, cpu_addr
);
4470 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4472 case 0x6: /* store halfword */
4473 gen_address_mask(dc
, cpu_addr
);
4474 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4476 case 0x7: /* store double word */
4480 TCGv r_low
, r_const
;
4482 save_state(dc
, cpu_cond
);
4483 gen_address_mask(dc
, cpu_addr
);
4484 r_const
= tcg_const_i32(7);
4485 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4486 r_const
); // XXX remove
4487 tcg_temp_free(r_const
);
4488 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4489 r_low
= tcg_temp_new(TCG_TYPE_I32
);
4490 tcg_gen_trunc_tl_i32(r_low
, cpu_tmp0
);
4491 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_val
);
4492 tcg_gen_concat_i32_i64(cpu_tmp64
, r_low
, cpu_tmp32
);
4493 tcg_temp_free(r_low
);
4494 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4497 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4498 case 0x14: /* store word alternate */
4499 #ifndef TARGET_SPARC64
4502 if (!supervisor(dc
))
4505 save_state(dc
, cpu_cond
);
4506 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4508 case 0x15: /* store byte alternate */
4509 #ifndef TARGET_SPARC64
4512 if (!supervisor(dc
))
4515 save_state(dc
, cpu_cond
);
4516 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4518 case 0x16: /* store halfword alternate */
4519 #ifndef TARGET_SPARC64
4522 if (!supervisor(dc
))
4525 save_state(dc
, cpu_cond
);
4526 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4528 case 0x17: /* store double word alternate */
4529 #ifndef TARGET_SPARC64
4532 if (!supervisor(dc
))
4538 save_state(dc
, cpu_cond
);
4539 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4543 #ifdef TARGET_SPARC64
4544 case 0x0e: /* V9 stx */
4545 gen_address_mask(dc
, cpu_addr
);
4546 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4548 case 0x1e: /* V9 stxa */
4549 save_state(dc
, cpu_cond
);
4550 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4556 } else if (xop
> 0x23 && xop
< 0x28) {
4557 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4559 save_state(dc
, cpu_cond
);
4561 case 0x24: /* store fpreg */
4562 gen_address_mask(dc
, cpu_addr
);
4563 tcg_gen_qemu_st32(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4565 case 0x25: /* stfsr, V9 stxfsr */
4566 #ifdef TARGET_SPARC64
4567 gen_address_mask(dc
, cpu_addr
);
4568 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4570 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4572 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp64
);
4573 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4576 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4577 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4581 #ifdef TARGET_SPARC64
4582 /* V9 stqf, store quad fpreg */
4586 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4587 gen_op_load_fpr_QT0(QFPREG(rd
));
4588 r_const
= tcg_const_i32(dc
->mem_idx
);
4589 tcg_gen_helper_0_2(helper_stqf
, cpu_addr
, r_const
);
4590 tcg_temp_free(r_const
);
4593 #else /* !TARGET_SPARC64 */
4594 /* stdfq, store floating point queue */
4595 #if defined(CONFIG_USER_ONLY)
4598 if (!supervisor(dc
))
4600 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4605 case 0x27: /* store double fpreg */
4609 gen_op_load_fpr_DT0(DFPREG(rd
));
4610 r_const
= tcg_const_i32(dc
->mem_idx
);
4611 tcg_gen_helper_0_2(helper_stdf
, cpu_addr
, r_const
);
4612 tcg_temp_free(r_const
);
4618 } else if (xop
> 0x33 && xop
< 0x3f) {
4619 save_state(dc
, cpu_cond
);
4621 #ifdef TARGET_SPARC64
4622 case 0x34: /* V9 stfa */
4623 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4625 case 0x36: /* V9 stqfa */
4629 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4630 r_const
= tcg_const_i32(7);
4631 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4633 tcg_temp_free(r_const
);
4634 gen_op_load_fpr_QT0(QFPREG(rd
));
4635 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4638 case 0x37: /* V9 stdfa */
4639 gen_op_load_fpr_DT0(DFPREG(rd
));
4640 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4642 case 0x3c: /* V9 casa */
4643 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4644 gen_movl_TN_reg(rd
, cpu_val
);
4646 case 0x3e: /* V9 casxa */
4647 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4648 gen_movl_TN_reg(rd
, cpu_val
);
4651 case 0x34: /* stc */
4652 case 0x35: /* stcsr */
4653 case 0x36: /* stdcq */
4654 case 0x37: /* stdc */
4666 /* default case for non jump instructions */
4667 if (dc
->npc
== DYNAMIC_PC
) {
4668 dc
->pc
= DYNAMIC_PC
;
4670 } else if (dc
->npc
== JUMP_PC
) {
4671 /* we can do a static jump */
4672 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4676 dc
->npc
= dc
->npc
+ 4;
4684 save_state(dc
, cpu_cond
);
4685 r_const
= tcg_const_i32(TT_ILL_INSN
);
4686 tcg_gen_helper_0_1(raise_exception
, r_const
);
4687 tcg_temp_free(r_const
);
4695 save_state(dc
, cpu_cond
);
4696 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4697 tcg_gen_helper_0_1(raise_exception
, r_const
);
4698 tcg_temp_free(r_const
);
4702 #if !defined(CONFIG_USER_ONLY)
4707 save_state(dc
, cpu_cond
);
4708 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4709 tcg_gen_helper_0_1(raise_exception
, r_const
);
4710 tcg_temp_free(r_const
);
4716 save_state(dc
, cpu_cond
);
4717 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4720 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4722 save_state(dc
, cpu_cond
);
4723 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4727 #ifndef TARGET_SPARC64
4732 save_state(dc
, cpu_cond
);
4733 r_const
= tcg_const_i32(TT_NCP_INSN
);
4734 tcg_gen_helper_0_1(raise_exception
, r_const
);
4735 tcg_temp_free(r_const
);
4742 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4743 int spc
, CPUSPARCState
*env
)
4745 target_ulong pc_start
, last_pc
;
4746 uint16_t *gen_opc_end
;
4747 DisasContext dc1
, *dc
= &dc1
;
4752 memset(dc
, 0, sizeof(DisasContext
));
4757 dc
->npc
= (target_ulong
) tb
->cs_base
;
4758 dc
->mem_idx
= cpu_mmu_index(env
);
4760 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4761 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4763 dc
->fpu_enabled
= 0;
4764 #ifdef TARGET_SPARC64
4765 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4767 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4769 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4770 cpu_tmp32
= tcg_temp_new(TCG_TYPE_I32
);
4771 cpu_tmp64
= tcg_temp_new(TCG_TYPE_I64
);
4773 cpu_dst
= tcg_temp_local_new(TCG_TYPE_TL
);
4776 cpu_val
= tcg_temp_local_new(TCG_TYPE_TL
);
4777 cpu_addr
= tcg_temp_local_new(TCG_TYPE_TL
);
4780 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4782 max_insns
= CF_COUNT_MASK
;
4785 if (env
->nb_breakpoints
> 0) {
4786 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4787 if (env
->breakpoints
[j
] == dc
->pc
) {
4788 if (dc
->pc
!= pc_start
)
4789 save_state(dc
, cpu_cond
);
4790 tcg_gen_helper_0_0(helper_debug
);
4799 fprintf(logfile
, "Search PC...\n");
4800 j
= gen_opc_ptr
- gen_opc_buf
;
4804 gen_opc_instr_start
[lj
++] = 0;
4805 gen_opc_pc
[lj
] = dc
->pc
;
4806 gen_opc_npc
[lj
] = dc
->npc
;
4807 gen_opc_instr_start
[lj
] = 1;
4808 gen_opc_icount
[lj
] = num_insns
;
4811 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4814 disas_sparc_insn(dc
);
4819 /* if the next PC is different, we abort now */
4820 if (dc
->pc
!= (last_pc
+ 4))
4822 /* if we reach a page boundary, we stop generation so that the
4823 PC of a TT_TFAULT exception is always in the right page */
4824 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4826 /* if single step mode, we generate only one instruction and
4827 generate an exception */
4828 if (env
->singlestep_enabled
) {
4829 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4833 } while ((gen_opc_ptr
< gen_opc_end
) &&
4834 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4835 num_insns
< max_insns
);
4838 tcg_temp_free(cpu_addr
);
4839 tcg_temp_free(cpu_val
);
4840 tcg_temp_free(cpu_dst
);
4841 tcg_temp_free(cpu_tmp64
);
4842 tcg_temp_free(cpu_tmp32
);
4843 tcg_temp_free(cpu_tmp0
);
4844 if (tb
->cflags
& CF_LAST_IO
)
4847 if (dc
->pc
!= DYNAMIC_PC
&&
4848 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4849 /* static PC and NPC: we can use direct chaining */
4850 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4852 if (dc
->pc
!= DYNAMIC_PC
)
4853 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4854 save_npc(dc
, cpu_cond
);
4858 gen_icount_end(tb
, num_insns
);
4859 *gen_opc_ptr
= INDEX_op_end
;
4861 j
= gen_opc_ptr
- gen_opc_buf
;
4864 gen_opc_instr_start
[lj
++] = 0;
4870 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4871 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4873 tb
->size
= last_pc
+ 4 - pc_start
;
4874 tb
->icount
= num_insns
;
4877 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4878 fprintf(logfile
, "--------------\n");
4879 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4880 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4881 fprintf(logfile
, "\n");
4886 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4888 gen_intermediate_code_internal(tb
, 0, env
);
4891 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4893 gen_intermediate_code_internal(tb
, 1, env
);
4896 void gen_intermediate_code_init(CPUSPARCState
*env
)
4900 static const char * const gregnames
[8] = {
4901 NULL
, // g0 not used
4910 static const char * const fregnames
[64] = {
4911 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4912 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4913 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4914 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4915 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4916 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4917 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4918 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4921 /* init various static tables */
4925 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4926 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4927 offsetof(CPUState
, regwptr
),
4929 #ifdef TARGET_SPARC64
4930 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4931 TCG_AREG0
, offsetof(CPUState
, xcc
),
4933 cpu_asi
= tcg_global_mem_new(TCG_TYPE_I32
,
4934 TCG_AREG0
, offsetof(CPUState
, asi
),
4936 cpu_fprs
= tcg_global_mem_new(TCG_TYPE_I32
,
4937 TCG_AREG0
, offsetof(CPUState
, fprs
),
4939 cpu_gsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4940 TCG_AREG0
, offsetof(CPUState
, gsr
),
4942 cpu_tick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4944 offsetof(CPUState
, tick_cmpr
),
4946 cpu_stick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4948 offsetof(CPUState
, stick_cmpr
),
4950 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4952 offsetof(CPUState
, hstick_cmpr
),
4954 cpu_hintp
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4955 offsetof(CPUState
, hintp
),
4957 cpu_htba
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4958 offsetof(CPUState
, htba
),
4960 cpu_hver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4961 offsetof(CPUState
, hver
),
4963 cpu_ssr
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4964 offsetof(CPUState
, ssr
), "ssr");
4965 cpu_ver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4966 offsetof(CPUState
, version
), "ver");
4968 cpu_wim
= tcg_global_mem_new(TCG_TYPE_I32
,
4969 TCG_AREG0
, offsetof(CPUState
, wim
),
4972 cpu_cond
= tcg_global_mem_new(TCG_TYPE_TL
,
4973 TCG_AREG0
, offsetof(CPUState
, cond
),
4975 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4976 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4978 cpu_cc_src2
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4979 offsetof(CPUState
, cc_src2
),
4981 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4982 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4984 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4985 TCG_AREG0
, offsetof(CPUState
, psr
),
4987 cpu_fsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4988 TCG_AREG0
, offsetof(CPUState
, fsr
),
4990 cpu_pc
= tcg_global_mem_new(TCG_TYPE_TL
,
4991 TCG_AREG0
, offsetof(CPUState
, pc
),
4993 cpu_npc
= tcg_global_mem_new(TCG_TYPE_TL
,
4994 TCG_AREG0
, offsetof(CPUState
, npc
),
4996 cpu_y
= tcg_global_mem_new(TCG_TYPE_TL
,
4997 TCG_AREG0
, offsetof(CPUState
, y
), "y");
4998 #ifndef CONFIG_USER_ONLY
4999 cpu_tbr
= tcg_global_mem_new(TCG_TYPE_TL
,
5000 TCG_AREG0
, offsetof(CPUState
, tbr
),
5003 for (i
= 1; i
< 8; i
++)
5004 cpu_gregs
[i
] = tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
5005 offsetof(CPUState
, gregs
[i
]),
5007 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5008 cpu_fpr
[i
] = tcg_global_mem_new(TCG_TYPE_I32
, TCG_AREG0
,
5009 offsetof(CPUState
, fpr
[i
]),
5012 /* register helpers */
5015 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5020 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5021 unsigned long searched_pc
, int pc_pos
, void *puc
)
5024 env
->pc
= gen_opc_pc
[pc_pos
];
5025 npc
= gen_opc_npc
[pc_pos
];
5027 /* dynamic NPC: already stored */
5028 } else if (npc
== 2) {
5029 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5030 /* jump PC: use T2 and the jump targets of the translation */
5032 env
->npc
= gen_opc_jump_pc
[0];
5034 env
->npc
= gen_opc_jump_pc
[1];