4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 #include "gen-icount.h"
71 typedef struct DisasContext
{
72 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit
;
79 struct TranslationBlock
*tb
;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
105 static int sign_extend(int x
, int len
)
108 return (x
<< len
) >> len
;
111 #define IS_IMM (insn & (1<<13))
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src
)
116 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
117 offsetof(CPU_DoubleU
, l
.upper
));
118 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
119 offsetof(CPU_DoubleU
, l
.lower
));
122 static void gen_op_load_fpr_DT1(unsigned int src
)
124 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
125 offsetof(CPU_DoubleU
, l
.upper
));
126 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
127 offsetof(CPU_DoubleU
, l
.lower
));
130 static void gen_op_store_DT0_fpr(unsigned int dst
)
132 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
133 offsetof(CPU_DoubleU
, l
.upper
));
134 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
135 offsetof(CPU_DoubleU
, l
.lower
));
138 static void gen_op_load_fpr_QT0(unsigned int src
)
140 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
141 offsetof(CPU_QuadU
, l
.upmost
));
142 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
143 offsetof(CPU_QuadU
, l
.upper
));
144 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.lower
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.lowest
));
150 static void gen_op_load_fpr_QT1(unsigned int src
)
152 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
153 offsetof(CPU_QuadU
, l
.upmost
));
154 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
155 offsetof(CPU_QuadU
, l
.upper
));
156 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.lower
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.lowest
));
162 static void gen_op_store_QT0_fpr(unsigned int dst
)
164 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
165 offsetof(CPU_QuadU
, l
.upmost
));
166 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
167 offsetof(CPU_QuadU
, l
.upper
));
168 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.lower
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.lowest
));
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
188 #ifdef TARGET_SPARC64
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #define AM_CHECK(dc) (1)
196 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
198 #ifdef TARGET_SPARC64
200 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
204 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
207 tcg_gen_movi_tl(tn
, 0);
209 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
211 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
215 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
220 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
222 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
226 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
227 target_ulong pc
, target_ulong npc
)
229 TranslationBlock
*tb
;
232 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
233 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num
);
236 tcg_gen_movi_tl(cpu_pc
, pc
);
237 tcg_gen_movi_tl(cpu_npc
, npc
);
238 tcg_gen_exit_tb((long)tb
+ tb_num
);
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
248 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
250 tcg_gen_extu_i32_tl(reg
, src
);
251 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
252 tcg_gen_andi_tl(reg
, reg
, 0x1);
255 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
257 tcg_gen_extu_i32_tl(reg
, src
);
258 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
259 tcg_gen_andi_tl(reg
, reg
, 0x1);
262 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
264 tcg_gen_extu_i32_tl(reg
, src
);
265 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
266 tcg_gen_andi_tl(reg
, reg
, 0x1);
269 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
271 tcg_gen_extu_i32_tl(reg
, src
);
272 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
273 tcg_gen_andi_tl(reg
, reg
, 0x1);
276 static inline void gen_cc_clear_icc(void)
278 tcg_gen_movi_i32(cpu_psr
, 0);
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
284 tcg_gen_movi_i32(cpu_xcc
, 0);
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
294 static inline void gen_cc_NZ_icc(TCGv dst
)
299 l1
= gen_new_label();
300 l2
= gen_new_label();
301 r_temp
= tcg_temp_new();
302 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
303 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
304 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
306 tcg_gen_ext32s_tl(r_temp
, dst
);
307 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
308 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
310 tcg_temp_free(r_temp
);
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst
)
318 l1
= gen_new_label();
319 l2
= gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
321 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
323 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
324 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
331 env->psr |= PSR_CARRY;
333 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
335 TCGv r_temp1
, r_temp2
;
338 l1
= gen_new_label();
339 r_temp1
= tcg_temp_new();
340 r_temp2
= tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
342 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
343 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
344 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
346 tcg_temp_free(r_temp1
);
347 tcg_temp_free(r_temp2
);
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
355 l1
= gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
357 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
366 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
370 r_temp
= tcg_temp_new();
371 tcg_gen_xor_tl(r_temp
, src1
, src2
);
372 tcg_gen_not_tl(r_temp
, r_temp
);
373 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
374 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
375 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
377 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
378 tcg_temp_free(r_temp
);
379 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
387 r_temp
= tcg_temp_new();
388 tcg_gen_xor_tl(r_temp
, src1
, src2
);
389 tcg_gen_not_tl(r_temp
, r_temp
);
390 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
391 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
392 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
394 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
395 tcg_temp_free(r_temp
);
396 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
400 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
406 l1
= gen_new_label();
408 r_temp
= tcg_temp_new();
409 tcg_gen_xor_tl(r_temp
, src1
, src2
);
410 tcg_gen_not_tl(r_temp
, r_temp
);
411 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
412 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
413 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
415 r_const
= tcg_const_i32(TT_TOVF
);
416 gen_helper_raise_exception(r_const
);
417 tcg_temp_free_i32(r_const
);
419 tcg_temp_free(r_temp
);
422 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
426 l1
= gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
428 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
430 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
434 static inline void gen_op_logic_cc(TCGv dst
)
436 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
439 gen_cc_NZ_icc(cpu_cc_dst
);
440 #ifdef TARGET_SPARC64
442 gen_cc_NZ_xcc(cpu_cc_dst
);
446 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
451 l1
= gen_new_label();
452 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
453 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
454 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
455 r_const
= tcg_const_i32(TT_TOVF
);
456 gen_helper_raise_exception(r_const
);
457 tcg_temp_free_i32(r_const
);
461 static inline void gen_op_add_cc2(TCGv dst
)
464 gen_cc_NZ_icc(cpu_cc_dst
);
465 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
466 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
467 #ifdef TARGET_SPARC64
469 gen_cc_NZ_xcc(cpu_cc_dst
);
470 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
471 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
473 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
476 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
478 tcg_gen_mov_tl(cpu_cc_src
, src1
);
479 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
480 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
484 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
486 tcg_gen_mov_tl(cpu_cc_src
, src1
);
487 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
488 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
492 static inline void gen_op_addx_cc2(TCGv dst
)
494 gen_cc_NZ_icc(cpu_cc_dst
);
495 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
496 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
497 #ifdef TARGET_SPARC64
498 gen_cc_NZ_xcc(cpu_cc_dst
);
499 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
500 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
502 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
505 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
507 tcg_gen_mov_tl(cpu_cc_src
, src1
);
508 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
509 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
510 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
512 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
513 #ifdef TARGET_SPARC64
515 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
517 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
518 gen_op_addx_cc2(dst
);
521 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
523 tcg_gen_mov_tl(cpu_cc_src
, src1
);
524 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
525 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
526 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
528 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
529 #ifdef TARGET_SPARC64
531 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
533 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
534 gen_op_addx_cc2(dst
);
537 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
539 tcg_gen_mov_tl(cpu_cc_src
, src1
);
540 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
541 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
543 gen_cc_NZ_icc(cpu_cc_dst
);
544 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
545 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
546 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
547 #ifdef TARGET_SPARC64
549 gen_cc_NZ_xcc(cpu_cc_dst
);
550 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
551 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
553 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
556 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
558 tcg_gen_mov_tl(cpu_cc_src
, src1
);
559 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
560 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
561 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
562 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
564 gen_cc_NZ_icc(cpu_cc_dst
);
565 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
566 #ifdef TARGET_SPARC64
568 gen_cc_NZ_xcc(cpu_cc_dst
);
569 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
570 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
572 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
577 env->psr |= PSR_CARRY;
579 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
581 TCGv r_temp1
, r_temp2
;
584 l1
= gen_new_label();
585 r_temp1
= tcg_temp_new();
586 r_temp2
= tcg_temp_new();
587 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
588 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
589 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
590 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
592 tcg_temp_free(r_temp1
);
593 tcg_temp_free(r_temp2
);
596 #ifdef TARGET_SPARC64
597 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
601 l1
= gen_new_label();
602 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
603 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
609 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
612 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
616 r_temp
= tcg_temp_new();
617 tcg_gen_xor_tl(r_temp
, src1
, src2
);
618 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
619 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
620 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
621 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
622 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
623 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
624 tcg_temp_free(r_temp
);
627 #ifdef TARGET_SPARC64
628 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
632 r_temp
= tcg_temp_new();
633 tcg_gen_xor_tl(r_temp
, src1
, src2
);
634 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
635 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
636 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
637 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
638 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
639 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
640 tcg_temp_free(r_temp
);
644 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
650 l1
= gen_new_label();
652 r_temp
= tcg_temp_new();
653 tcg_gen_xor_tl(r_temp
, src1
, src2
);
654 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
655 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
656 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
657 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
658 r_const
= tcg_const_i32(TT_TOVF
);
659 gen_helper_raise_exception(r_const
);
660 tcg_temp_free_i32(r_const
);
662 tcg_temp_free(r_temp
);
665 static inline void gen_op_sub_cc2(TCGv dst
)
668 gen_cc_NZ_icc(cpu_cc_dst
);
669 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
670 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
671 #ifdef TARGET_SPARC64
673 gen_cc_NZ_xcc(cpu_cc_dst
);
674 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
675 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
677 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
680 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
)
682 tcg_gen_mov_tl(cpu_cc_src
, src1
);
683 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
685 tcg_gen_mov_tl(dst
, src1
);
686 gen_op_logic_cc(dst
);
688 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
693 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
695 tcg_gen_mov_tl(cpu_cc_src
, src1
);
696 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
697 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
701 static inline void gen_op_subx_cc2(TCGv dst
)
703 gen_cc_NZ_icc(cpu_cc_dst
);
704 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
705 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
706 #ifdef TARGET_SPARC64
707 gen_cc_NZ_xcc(cpu_cc_dst
);
708 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
709 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
711 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
714 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
716 tcg_gen_mov_tl(cpu_cc_src
, src1
);
717 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
718 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
719 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
721 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
722 #ifdef TARGET_SPARC64
724 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
726 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
727 gen_op_subx_cc2(dst
);
730 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
732 tcg_gen_mov_tl(cpu_cc_src
, src1
);
733 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
734 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
735 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
737 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
738 #ifdef TARGET_SPARC64
740 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
742 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
743 gen_op_subx_cc2(dst
);
746 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
748 tcg_gen_mov_tl(cpu_cc_src
, src1
);
749 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
750 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
752 gen_cc_NZ_icc(cpu_cc_dst
);
753 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
754 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
755 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
756 #ifdef TARGET_SPARC64
758 gen_cc_NZ_xcc(cpu_cc_dst
);
759 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
760 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
762 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
765 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
767 tcg_gen_mov_tl(cpu_cc_src
, src1
);
768 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
769 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
770 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
771 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
773 gen_cc_NZ_icc(cpu_cc_dst
);
774 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
775 #ifdef TARGET_SPARC64
777 gen_cc_NZ_xcc(cpu_cc_dst
);
778 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
779 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
781 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
784 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
789 l1
= gen_new_label();
790 r_temp
= tcg_temp_new();
796 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
797 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
798 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
799 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
800 tcg_gen_movi_tl(cpu_cc_src2
, 0);
804 // env->y = (b2 << 31) | (env->y >> 1);
805 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
806 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
807 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
808 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
809 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
810 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
813 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
814 gen_mov_reg_V(r_temp
, cpu_psr
);
815 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
816 tcg_temp_free(r_temp
);
818 // T0 = (b1 << 31) | (T0 >> 1);
820 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
821 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
822 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
824 /* do addition and update flags */
825 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
828 gen_cc_NZ_icc(cpu_cc_dst
);
829 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
830 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
831 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
834 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
836 TCGv_i64 r_temp
, r_temp2
;
838 r_temp
= tcg_temp_new_i64();
839 r_temp2
= tcg_temp_new_i64();
841 tcg_gen_extu_tl_i64(r_temp
, src2
);
842 tcg_gen_extu_tl_i64(r_temp2
, src1
);
843 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
845 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
846 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
847 tcg_temp_free_i64(r_temp
);
848 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
849 #ifdef TARGET_SPARC64
850 tcg_gen_mov_i64(dst
, r_temp2
);
852 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
854 tcg_temp_free_i64(r_temp2
);
857 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
859 TCGv_i64 r_temp
, r_temp2
;
861 r_temp
= tcg_temp_new_i64();
862 r_temp2
= tcg_temp_new_i64();
864 tcg_gen_ext_tl_i64(r_temp
, src2
);
865 tcg_gen_ext_tl_i64(r_temp2
, src1
);
866 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
868 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
869 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
870 tcg_temp_free_i64(r_temp
);
871 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
872 #ifdef TARGET_SPARC64
873 tcg_gen_mov_i64(dst
, r_temp2
);
875 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
877 tcg_temp_free_i64(r_temp2
);
880 #ifdef TARGET_SPARC64
881 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
886 l1
= gen_new_label();
887 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
888 r_const
= tcg_const_i32(TT_DIV_ZERO
);
889 gen_helper_raise_exception(r_const
);
890 tcg_temp_free_i32(r_const
);
894 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
898 l1
= gen_new_label();
899 l2
= gen_new_label();
900 tcg_gen_mov_tl(cpu_cc_src
, src1
);
901 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
902 gen_trap_ifdivzero_tl(cpu_cc_src2
);
903 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
904 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
905 tcg_gen_movi_i64(dst
, INT64_MIN
);
908 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
913 static inline void gen_op_div_cc(TCGv dst
)
917 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
919 gen_cc_NZ_icc(cpu_cc_dst
);
920 l1
= gen_new_label();
921 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
922 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
927 static inline void gen_op_eval_ba(TCGv dst
)
929 tcg_gen_movi_tl(dst
, 1);
933 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
935 gen_mov_reg_Z(dst
, src
);
939 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
941 gen_mov_reg_N(cpu_tmp0
, src
);
942 gen_mov_reg_V(dst
, src
);
943 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
944 gen_mov_reg_Z(cpu_tmp0
, src
);
945 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
949 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
951 gen_mov_reg_V(cpu_tmp0
, src
);
952 gen_mov_reg_N(dst
, src
);
953 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
957 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
959 gen_mov_reg_Z(cpu_tmp0
, src
);
960 gen_mov_reg_C(dst
, src
);
961 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
965 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
967 gen_mov_reg_C(dst
, src
);
971 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
973 gen_mov_reg_V(dst
, src
);
977 static inline void gen_op_eval_bn(TCGv dst
)
979 tcg_gen_movi_tl(dst
, 0);
983 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
985 gen_mov_reg_N(dst
, src
);
989 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
991 gen_mov_reg_Z(dst
, src
);
992 tcg_gen_xori_tl(dst
, dst
, 0x1);
996 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
998 gen_mov_reg_N(cpu_tmp0
, src
);
999 gen_mov_reg_V(dst
, src
);
1000 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1001 gen_mov_reg_Z(cpu_tmp0
, src
);
1002 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1003 tcg_gen_xori_tl(dst
, dst
, 0x1);
1007 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
1009 gen_mov_reg_V(cpu_tmp0
, src
);
1010 gen_mov_reg_N(dst
, src
);
1011 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1012 tcg_gen_xori_tl(dst
, dst
, 0x1);
1016 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
1018 gen_mov_reg_Z(cpu_tmp0
, src
);
1019 gen_mov_reg_C(dst
, src
);
1020 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1021 tcg_gen_xori_tl(dst
, dst
, 0x1);
1025 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
1027 gen_mov_reg_C(dst
, src
);
1028 tcg_gen_xori_tl(dst
, dst
, 0x1);
1032 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
1034 gen_mov_reg_N(dst
, src
);
1035 tcg_gen_xori_tl(dst
, dst
, 0x1);
1039 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
1041 gen_mov_reg_V(dst
, src
);
1042 tcg_gen_xori_tl(dst
, dst
, 0x1);
1046 FPSR bit field FCC1 | FCC0:
1052 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
1053 unsigned int fcc_offset
)
1055 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
1056 tcg_gen_andi_tl(reg
, reg
, 0x1);
1059 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
1060 unsigned int fcc_offset
)
1062 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
1063 tcg_gen_andi_tl(reg
, reg
, 0x1);
1067 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
1068 unsigned int fcc_offset
)
1070 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1071 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1072 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1075 // 1 or 2: FCC0 ^ FCC1
1076 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
1077 unsigned int fcc_offset
)
1079 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1080 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1081 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1085 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1086 unsigned int fcc_offset
)
1088 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1092 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1093 unsigned int fcc_offset
)
1095 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1096 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1097 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1098 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1102 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1103 unsigned int fcc_offset
)
1105 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1109 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1110 unsigned int fcc_offset
)
1112 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1113 tcg_gen_xori_tl(dst
, dst
, 0x1);
1114 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1115 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1119 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1120 unsigned int fcc_offset
)
1122 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1123 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1124 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1127 // 0: !(FCC0 | FCC1)
1128 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1129 unsigned int fcc_offset
)
1131 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1132 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1133 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1134 tcg_gen_xori_tl(dst
, dst
, 0x1);
1137 // 0 or 3: !(FCC0 ^ FCC1)
1138 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1139 unsigned int fcc_offset
)
1141 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1142 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1143 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1144 tcg_gen_xori_tl(dst
, dst
, 0x1);
1148 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1149 unsigned int fcc_offset
)
1151 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1152 tcg_gen_xori_tl(dst
, dst
, 0x1);
1155 // !1: !(FCC0 & !FCC1)
1156 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1157 unsigned int fcc_offset
)
1159 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1160 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1161 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1162 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1163 tcg_gen_xori_tl(dst
, dst
, 0x1);
1167 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1168 unsigned int fcc_offset
)
1170 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1171 tcg_gen_xori_tl(dst
, dst
, 0x1);
1174 // !2: !(!FCC0 & FCC1)
1175 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1176 unsigned int fcc_offset
)
1178 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1179 tcg_gen_xori_tl(dst
, dst
, 0x1);
1180 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1181 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1182 tcg_gen_xori_tl(dst
, dst
, 0x1);
1185 // !3: !(FCC0 & FCC1)
1186 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1187 unsigned int fcc_offset
)
1189 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1190 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1191 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1192 tcg_gen_xori_tl(dst
, dst
, 0x1);
1195 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1196 target_ulong pc2
, TCGv r_cond
)
1200 l1
= gen_new_label();
1202 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1204 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1207 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1210 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1211 target_ulong pc2
, TCGv r_cond
)
1215 l1
= gen_new_label();
1217 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1219 gen_goto_tb(dc
, 0, pc2
, pc1
);
1222 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1225 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1230 l1
= gen_new_label();
1231 l2
= gen_new_label();
1233 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1235 tcg_gen_movi_tl(cpu_npc
, npc1
);
1239 tcg_gen_movi_tl(cpu_npc
, npc2
);
1243 /* call this function before using the condition register as it may
1244 have been set for a jump */
1245 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1247 if (dc
->npc
== JUMP_PC
) {
1248 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1249 dc
->npc
= DYNAMIC_PC
;
1253 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1255 if (dc
->npc
== JUMP_PC
) {
1256 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1257 dc
->npc
= DYNAMIC_PC
;
1258 } else if (dc
->npc
!= DYNAMIC_PC
) {
1259 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1263 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1265 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1269 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1271 if (dc
->npc
== JUMP_PC
) {
1272 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1273 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1274 dc
->pc
= DYNAMIC_PC
;
1275 } else if (dc
->npc
== DYNAMIC_PC
) {
1276 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1277 dc
->pc
= DYNAMIC_PC
;
1283 static inline void gen_op_next_insn(void)
1285 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1286 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1289 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1293 #ifdef TARGET_SPARC64
1303 gen_op_eval_bn(r_dst
);
1306 gen_op_eval_be(r_dst
, r_src
);
1309 gen_op_eval_ble(r_dst
, r_src
);
1312 gen_op_eval_bl(r_dst
, r_src
);
1315 gen_op_eval_bleu(r_dst
, r_src
);
1318 gen_op_eval_bcs(r_dst
, r_src
);
1321 gen_op_eval_bneg(r_dst
, r_src
);
1324 gen_op_eval_bvs(r_dst
, r_src
);
1327 gen_op_eval_ba(r_dst
);
1330 gen_op_eval_bne(r_dst
, r_src
);
1333 gen_op_eval_bg(r_dst
, r_src
);
1336 gen_op_eval_bge(r_dst
, r_src
);
1339 gen_op_eval_bgu(r_dst
, r_src
);
1342 gen_op_eval_bcc(r_dst
, r_src
);
1345 gen_op_eval_bpos(r_dst
, r_src
);
1348 gen_op_eval_bvc(r_dst
, r_src
);
1353 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1355 unsigned int offset
;
1375 gen_op_eval_bn(r_dst
);
1378 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1381 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1384 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1387 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1390 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1393 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1396 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1399 gen_op_eval_ba(r_dst
);
1402 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1405 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1408 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1411 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1414 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1417 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1420 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1425 #ifdef TARGET_SPARC64
1427 static const int gen_tcg_cond_reg
[8] = {
1438 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1442 l1
= gen_new_label();
1443 tcg_gen_movi_tl(r_dst
, 0);
1444 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1445 tcg_gen_movi_tl(r_dst
, 1);
1450 /* XXX: potentially incorrect if dynamic npc */
1451 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1454 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1455 target_ulong target
= dc
->pc
+ offset
;
1458 /* unconditional not taken */
1460 dc
->pc
= dc
->npc
+ 4;
1461 dc
->npc
= dc
->pc
+ 4;
1464 dc
->npc
= dc
->pc
+ 4;
1466 } else if (cond
== 0x8) {
1467 /* unconditional taken */
1470 dc
->npc
= dc
->pc
+ 4;
1476 flush_cond(dc
, r_cond
);
1477 gen_cond(r_cond
, cc
, cond
);
1479 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1483 dc
->jump_pc
[0] = target
;
1484 dc
->jump_pc
[1] = dc
->npc
+ 4;
1490 /* XXX: potentially incorrect if dynamic npc */
1491 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1494 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1495 target_ulong target
= dc
->pc
+ offset
;
1498 /* unconditional not taken */
1500 dc
->pc
= dc
->npc
+ 4;
1501 dc
->npc
= dc
->pc
+ 4;
1504 dc
->npc
= dc
->pc
+ 4;
1506 } else if (cond
== 0x8) {
1507 /* unconditional taken */
1510 dc
->npc
= dc
->pc
+ 4;
1516 flush_cond(dc
, r_cond
);
1517 gen_fcond(r_cond
, cc
, cond
);
1519 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1523 dc
->jump_pc
[0] = target
;
1524 dc
->jump_pc
[1] = dc
->npc
+ 4;
1530 #ifdef TARGET_SPARC64
1531 /* XXX: potentially incorrect if dynamic npc */
1532 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1533 TCGv r_cond
, TCGv r_reg
)
1535 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1536 target_ulong target
= dc
->pc
+ offset
;
1538 flush_cond(dc
, r_cond
);
1539 gen_cond_reg(r_cond
, cond
, r_reg
);
1541 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1545 dc
->jump_pc
[0] = target
;
1546 dc
->jump_pc
[1] = dc
->npc
+ 4;
1551 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1555 gen_helper_fcmps(r_rs1
, r_rs2
);
1558 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1561 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1564 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1569 static inline void gen_op_fcmpd(int fccno
)
1576 gen_helper_fcmpd_fcc1();
1579 gen_helper_fcmpd_fcc2();
1582 gen_helper_fcmpd_fcc3();
1587 static inline void gen_op_fcmpq(int fccno
)
1594 gen_helper_fcmpq_fcc1();
1597 gen_helper_fcmpq_fcc2();
1600 gen_helper_fcmpq_fcc3();
1605 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1609 gen_helper_fcmpes(r_rs1
, r_rs2
);
1612 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1615 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1618 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1623 static inline void gen_op_fcmped(int fccno
)
1627 gen_helper_fcmped();
1630 gen_helper_fcmped_fcc1();
1633 gen_helper_fcmped_fcc2();
1636 gen_helper_fcmped_fcc3();
1641 static inline void gen_op_fcmpeq(int fccno
)
1645 gen_helper_fcmpeq();
1648 gen_helper_fcmpeq_fcc1();
1651 gen_helper_fcmpeq_fcc2();
1654 gen_helper_fcmpeq_fcc3();
1661 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1663 gen_helper_fcmps(r_rs1
, r_rs2
);
1666 static inline void gen_op_fcmpd(int fccno
)
1671 static inline void gen_op_fcmpq(int fccno
)
1676 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1678 gen_helper_fcmpes(r_rs1
, r_rs2
);
1681 static inline void gen_op_fcmped(int fccno
)
1683 gen_helper_fcmped();
1686 static inline void gen_op_fcmpeq(int fccno
)
1688 gen_helper_fcmpeq();
1692 static inline void gen_op_fpexception_im(int fsr_flags
)
1696 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1697 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1698 r_const
= tcg_const_i32(TT_FP_EXCP
);
1699 gen_helper_raise_exception(r_const
);
1700 tcg_temp_free_i32(r_const
);
1703 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1705 #if !defined(CONFIG_USER_ONLY)
1706 if (!dc
->fpu_enabled
) {
1709 save_state(dc
, r_cond
);
1710 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1711 gen_helper_raise_exception(r_const
);
1712 tcg_temp_free_i32(r_const
);
1720 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1722 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1725 static inline void gen_clear_float_exceptions(void)
1727 gen_helper_clear_float_exceptions();
1731 #ifdef TARGET_SPARC64
1732 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1738 r_asi
= tcg_temp_new_i32();
1739 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1741 asi
= GET_FIELD(insn
, 19, 26);
1742 r_asi
= tcg_const_i32(asi
);
1747 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1750 TCGv_i32 r_asi
, r_size
, r_sign
;
1752 r_asi
= gen_get_asi(insn
, addr
);
1753 r_size
= tcg_const_i32(size
);
1754 r_sign
= tcg_const_i32(sign
);
1755 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1756 tcg_temp_free_i32(r_sign
);
1757 tcg_temp_free_i32(r_size
);
1758 tcg_temp_free_i32(r_asi
);
1761 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1763 TCGv_i32 r_asi
, r_size
;
1765 r_asi
= gen_get_asi(insn
, addr
);
1766 r_size
= tcg_const_i32(size
);
1767 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1768 tcg_temp_free_i32(r_size
);
1769 tcg_temp_free_i32(r_asi
);
1772 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1774 TCGv_i32 r_asi
, r_size
, r_rd
;
1776 r_asi
= gen_get_asi(insn
, addr
);
1777 r_size
= tcg_const_i32(size
);
1778 r_rd
= tcg_const_i32(rd
);
1779 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1780 tcg_temp_free_i32(r_rd
);
1781 tcg_temp_free_i32(r_size
);
1782 tcg_temp_free_i32(r_asi
);
1785 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1787 TCGv_i32 r_asi
, r_size
, r_rd
;
1789 r_asi
= gen_get_asi(insn
, addr
);
1790 r_size
= tcg_const_i32(size
);
1791 r_rd
= tcg_const_i32(rd
);
1792 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1793 tcg_temp_free_i32(r_rd
);
1794 tcg_temp_free_i32(r_size
);
1795 tcg_temp_free_i32(r_asi
);
1798 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1800 TCGv_i32 r_asi
, r_size
, r_sign
;
1802 r_asi
= gen_get_asi(insn
, addr
);
1803 r_size
= tcg_const_i32(4);
1804 r_sign
= tcg_const_i32(0);
1805 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1806 tcg_temp_free_i32(r_sign
);
1807 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1808 tcg_temp_free_i32(r_size
);
1809 tcg_temp_free_i32(r_asi
);
1810 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1813 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1815 TCGv_i32 r_asi
, r_rd
;
1817 r_asi
= gen_get_asi(insn
, addr
);
1818 r_rd
= tcg_const_i32(rd
);
1819 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1820 tcg_temp_free_i32(r_rd
);
1821 tcg_temp_free_i32(r_asi
);
1824 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1826 TCGv_i32 r_asi
, r_size
;
1828 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1829 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1830 r_asi
= gen_get_asi(insn
, addr
);
1831 r_size
= tcg_const_i32(8);
1832 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1833 tcg_temp_free_i32(r_size
);
1834 tcg_temp_free_i32(r_asi
);
1837 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1843 r_val1
= tcg_temp_new();
1844 gen_movl_reg_TN(rd
, r_val1
);
1845 r_asi
= gen_get_asi(insn
, addr
);
1846 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1847 tcg_temp_free_i32(r_asi
);
1848 tcg_temp_free(r_val1
);
1851 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1856 gen_movl_reg_TN(rd
, cpu_tmp64
);
1857 r_asi
= gen_get_asi(insn
, addr
);
1858 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1859 tcg_temp_free_i32(r_asi
);
1862 #elif !defined(CONFIG_USER_ONLY)
1864 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1867 TCGv_i32 r_asi
, r_size
, r_sign
;
1869 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1870 r_size
= tcg_const_i32(size
);
1871 r_sign
= tcg_const_i32(sign
);
1872 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1873 tcg_temp_free(r_sign
);
1874 tcg_temp_free(r_size
);
1875 tcg_temp_free(r_asi
);
1876 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1879 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1881 TCGv_i32 r_asi
, r_size
;
1883 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1884 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1885 r_size
= tcg_const_i32(size
);
1886 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1887 tcg_temp_free(r_size
);
1888 tcg_temp_free(r_asi
);
1891 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1893 TCGv_i32 r_asi
, r_size
, r_sign
;
1896 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1897 r_size
= tcg_const_i32(4);
1898 r_sign
= tcg_const_i32(0);
1899 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1900 tcg_temp_free(r_sign
);
1901 r_val
= tcg_temp_new_i64();
1902 tcg_gen_extu_tl_i64(r_val
, dst
);
1903 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1904 tcg_temp_free_i64(r_val
);
1905 tcg_temp_free(r_size
);
1906 tcg_temp_free(r_asi
);
1907 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1910 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1912 TCGv_i32 r_asi
, r_size
, r_sign
;
1914 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1915 r_size
= tcg_const_i32(8);
1916 r_sign
= tcg_const_i32(0);
1917 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1918 tcg_temp_free(r_sign
);
1919 tcg_temp_free(r_size
);
1920 tcg_temp_free(r_asi
);
1921 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1922 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1923 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1924 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1925 gen_movl_TN_reg(rd
, hi
);
1928 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1930 TCGv_i32 r_asi
, r_size
;
1932 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1933 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1934 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1935 r_size
= tcg_const_i32(8);
1936 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1937 tcg_temp_free(r_size
);
1938 tcg_temp_free(r_asi
);
1942 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1943 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1946 TCGv_i32 r_asi
, r_size
;
1948 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1950 r_val
= tcg_const_i64(0xffULL
);
1951 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1952 r_size
= tcg_const_i32(1);
1953 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1954 tcg_temp_free_i32(r_size
);
1955 tcg_temp_free_i32(r_asi
);
1956 tcg_temp_free_i64(r_val
);
1960 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1965 rs1
= GET_FIELD(insn
, 13, 17);
1967 r_rs1
= tcg_const_tl(0); // XXX how to free?
1969 r_rs1
= cpu_gregs
[rs1
];
1971 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1975 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1979 if (IS_IMM
) { /* immediate */
1982 simm
= GET_FIELDs(insn
, 19, 31);
1983 r_rs2
= tcg_const_tl(simm
); // XXX how to free?
1984 } else { /* register */
1987 rs2
= GET_FIELD(insn
, 27, 31);
1989 r_rs2
= tcg_const_tl(0); // XXX how to free?
1991 r_rs2
= cpu_gregs
[rs2
];
1993 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1998 #define CHECK_IU_FEATURE(dc, FEATURE) \
1999 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2001 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2002 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2005 /* before an instruction, dc->pc must be static */
2006 static void disas_sparc_insn(DisasContext
* dc
)
2008 unsigned int insn
, opc
, rs1
, rs2
, rd
;
2011 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
2012 tcg_gen_debug_insn_start(dc
->pc
);
2013 insn
= ldl_code(dc
->pc
);
2014 opc
= GET_FIELD(insn
, 0, 1);
2016 rd
= GET_FIELD(insn
, 2, 6);
2018 cpu_src1
= tcg_temp_new(); // const
2019 cpu_src2
= tcg_temp_new(); // const
2022 case 0: /* branches/sethi */
2024 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2027 #ifdef TARGET_SPARC64
2028 case 0x1: /* V9 BPcc */
2032 target
= GET_FIELD_SP(insn
, 0, 18);
2033 target
= sign_extend(target
, 18);
2035 cc
= GET_FIELD_SP(insn
, 20, 21);
2037 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2039 do_branch(dc
, target
, insn
, 1, cpu_cond
);
2044 case 0x3: /* V9 BPr */
2046 target
= GET_FIELD_SP(insn
, 0, 13) |
2047 (GET_FIELD_SP(insn
, 20, 21) << 14);
2048 target
= sign_extend(target
, 16);
2050 cpu_src1
= get_src1(insn
, cpu_src1
);
2051 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
2054 case 0x5: /* V9 FBPcc */
2056 int cc
= GET_FIELD_SP(insn
, 20, 21);
2057 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2059 target
= GET_FIELD_SP(insn
, 0, 18);
2060 target
= sign_extend(target
, 19);
2062 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
2066 case 0x7: /* CBN+x */
2071 case 0x2: /* BN+x */
2073 target
= GET_FIELD(insn
, 10, 31);
2074 target
= sign_extend(target
, 22);
2076 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2079 case 0x6: /* FBN+x */
2081 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2083 target
= GET_FIELD(insn
, 10, 31);
2084 target
= sign_extend(target
, 22);
2086 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
2089 case 0x4: /* SETHI */
2091 uint32_t value
= GET_FIELD(insn
, 10, 31);
2094 r_const
= tcg_const_tl(value
<< 10);
2095 gen_movl_TN_reg(rd
, r_const
);
2096 tcg_temp_free(r_const
);
2099 case 0x0: /* UNIMPL */
2108 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2111 r_const
= tcg_const_tl(dc
->pc
);
2112 gen_movl_TN_reg(15, r_const
);
2113 tcg_temp_free(r_const
);
2115 gen_mov_pc_npc(dc
, cpu_cond
);
2119 case 2: /* FPU & Logical Operations */
2121 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2122 if (xop
== 0x3a) { /* generate trap */
2125 cpu_src1
= get_src1(insn
, cpu_src1
);
2127 rs2
= GET_FIELD(insn
, 25, 31);
2128 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2130 rs2
= GET_FIELD(insn
, 27, 31);
2132 gen_movl_reg_TN(rs2
, cpu_src2
);
2133 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2135 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2137 cond
= GET_FIELD(insn
, 3, 6);
2139 save_state(dc
, cpu_cond
);
2140 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2142 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2144 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2145 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2146 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2147 gen_helper_raise_exception(cpu_tmp32
);
2148 } else if (cond
!= 0) {
2149 TCGv r_cond
= tcg_temp_new();
2151 #ifdef TARGET_SPARC64
2153 int cc
= GET_FIELD_SP(insn
, 11, 12);
2155 save_state(dc
, cpu_cond
);
2157 gen_cond(r_cond
, 0, cond
);
2159 gen_cond(r_cond
, 1, cond
);
2163 save_state(dc
, cpu_cond
);
2164 gen_cond(r_cond
, 0, cond
);
2166 l1
= gen_new_label();
2167 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2169 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2171 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2173 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2174 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2175 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2176 gen_helper_raise_exception(cpu_tmp32
);
2179 tcg_temp_free(r_cond
);
2185 } else if (xop
== 0x28) {
2186 rs1
= GET_FIELD(insn
, 13, 17);
2189 #ifndef TARGET_SPARC64
2190 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2191 manual, rdy on the microSPARC
2193 case 0x0f: /* stbar in the SPARCv8 manual,
2194 rdy on the microSPARC II */
2195 case 0x10 ... 0x1f: /* implementation-dependent in the
2196 SPARCv8 manual, rdy on the
2199 gen_movl_TN_reg(rd
, cpu_y
);
2201 #ifdef TARGET_SPARC64
2202 case 0x2: /* V9 rdccr */
2203 gen_helper_rdccr(cpu_dst
);
2204 gen_movl_TN_reg(rd
, cpu_dst
);
2206 case 0x3: /* V9 rdasi */
2207 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2208 gen_movl_TN_reg(rd
, cpu_dst
);
2210 case 0x4: /* V9 rdtick */
2214 r_tickptr
= tcg_temp_new_ptr();
2215 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2216 offsetof(CPUState
, tick
));
2217 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2218 tcg_temp_free_ptr(r_tickptr
);
2219 gen_movl_TN_reg(rd
, cpu_dst
);
2222 case 0x5: /* V9 rdpc */
2226 r_const
= tcg_const_tl(dc
->pc
);
2227 gen_movl_TN_reg(rd
, r_const
);
2228 tcg_temp_free(r_const
);
2231 case 0x6: /* V9 rdfprs */
2232 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2233 gen_movl_TN_reg(rd
, cpu_dst
);
2235 case 0xf: /* V9 membar */
2236 break; /* no effect */
2237 case 0x13: /* Graphics Status */
2238 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2240 gen_movl_TN_reg(rd
, cpu_gsr
);
2242 case 0x16: /* Softint */
2243 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2244 gen_movl_TN_reg(rd
, cpu_dst
);
2246 case 0x17: /* Tick compare */
2247 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2249 case 0x18: /* System tick */
2253 r_tickptr
= tcg_temp_new_ptr();
2254 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2255 offsetof(CPUState
, stick
));
2256 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2257 tcg_temp_free_ptr(r_tickptr
);
2258 gen_movl_TN_reg(rd
, cpu_dst
);
2261 case 0x19: /* System tick compare */
2262 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2264 case 0x10: /* Performance Control */
2265 case 0x11: /* Performance Instrumentation Counter */
2266 case 0x12: /* Dispatch Control */
2267 case 0x14: /* Softint set, WO */
2268 case 0x15: /* Softint clear, WO */
2273 #if !defined(CONFIG_USER_ONLY)
2274 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2275 #ifndef TARGET_SPARC64
2276 if (!supervisor(dc
))
2278 gen_helper_rdpsr(cpu_dst
);
2280 CHECK_IU_FEATURE(dc
, HYPV
);
2281 if (!hypervisor(dc
))
2283 rs1
= GET_FIELD(insn
, 13, 17);
2286 // gen_op_rdhpstate();
2289 // gen_op_rdhtstate();
2292 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2295 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2298 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2300 case 31: // hstick_cmpr
2301 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2307 gen_movl_TN_reg(rd
, cpu_dst
);
2309 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2310 if (!supervisor(dc
))
2312 #ifdef TARGET_SPARC64
2313 rs1
= GET_FIELD(insn
, 13, 17);
2319 r_tsptr
= tcg_temp_new_ptr();
2320 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2321 offsetof(CPUState
, tsptr
));
2322 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2323 offsetof(trap_state
, tpc
));
2324 tcg_temp_free_ptr(r_tsptr
);
2331 r_tsptr
= tcg_temp_new_ptr();
2332 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2333 offsetof(CPUState
, tsptr
));
2334 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2335 offsetof(trap_state
, tnpc
));
2336 tcg_temp_free_ptr(r_tsptr
);
2343 r_tsptr
= tcg_temp_new_ptr();
2344 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2345 offsetof(CPUState
, tsptr
));
2346 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2347 offsetof(trap_state
, tstate
));
2348 tcg_temp_free_ptr(r_tsptr
);
2355 r_tsptr
= tcg_temp_new_ptr();
2356 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2357 offsetof(CPUState
, tsptr
));
2358 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2359 offsetof(trap_state
, tt
));
2360 tcg_temp_free_ptr(r_tsptr
);
2361 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2368 r_tickptr
= tcg_temp_new_ptr();
2369 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2370 offsetof(CPUState
, tick
));
2371 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2372 gen_movl_TN_reg(rd
, cpu_tmp0
);
2373 tcg_temp_free_ptr(r_tickptr
);
2377 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2380 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2381 offsetof(CPUSPARCState
, pstate
));
2382 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2385 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2386 offsetof(CPUSPARCState
, tl
));
2387 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2390 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2391 offsetof(CPUSPARCState
, psrpil
));
2392 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2395 gen_helper_rdcwp(cpu_tmp0
);
2398 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2399 offsetof(CPUSPARCState
, cansave
));
2400 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2402 case 11: // canrestore
2403 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2404 offsetof(CPUSPARCState
, canrestore
));
2405 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2407 case 12: // cleanwin
2408 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2409 offsetof(CPUSPARCState
, cleanwin
));
2410 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2412 case 13: // otherwin
2413 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2414 offsetof(CPUSPARCState
, otherwin
));
2415 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2418 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2419 offsetof(CPUSPARCState
, wstate
));
2420 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2422 case 16: // UA2005 gl
2423 CHECK_IU_FEATURE(dc
, GL
);
2424 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2425 offsetof(CPUSPARCState
, gl
));
2426 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2428 case 26: // UA2005 strand status
2429 CHECK_IU_FEATURE(dc
, HYPV
);
2430 if (!hypervisor(dc
))
2432 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2435 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2442 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2444 gen_movl_TN_reg(rd
, cpu_tmp0
);
2446 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2447 #ifdef TARGET_SPARC64
2448 save_state(dc
, cpu_cond
);
2449 gen_helper_flushw();
2451 if (!supervisor(dc
))
2453 gen_movl_TN_reg(rd
, cpu_tbr
);
2457 } else if (xop
== 0x34) { /* FPU Operations */
2458 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2460 gen_op_clear_ieee_excp_and_FTT();
2461 rs1
= GET_FIELD(insn
, 13, 17);
2462 rs2
= GET_FIELD(insn
, 27, 31);
2463 xop
= GET_FIELD(insn
, 18, 26);
2465 case 0x1: /* fmovs */
2466 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2468 case 0x5: /* fnegs */
2469 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2471 case 0x9: /* fabss */
2472 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2474 case 0x29: /* fsqrts */
2475 CHECK_FPU_FEATURE(dc
, FSQRT
);
2476 gen_clear_float_exceptions();
2477 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2478 gen_helper_check_ieee_exceptions();
2479 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2481 case 0x2a: /* fsqrtd */
2482 CHECK_FPU_FEATURE(dc
, FSQRT
);
2483 gen_op_load_fpr_DT1(DFPREG(rs2
));
2484 gen_clear_float_exceptions();
2485 gen_helper_fsqrtd();
2486 gen_helper_check_ieee_exceptions();
2487 gen_op_store_DT0_fpr(DFPREG(rd
));
2489 case 0x2b: /* fsqrtq */
2490 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2491 gen_op_load_fpr_QT1(QFPREG(rs2
));
2492 gen_clear_float_exceptions();
2493 gen_helper_fsqrtq();
2494 gen_helper_check_ieee_exceptions();
2495 gen_op_store_QT0_fpr(QFPREG(rd
));
2497 case 0x41: /* fadds */
2498 gen_clear_float_exceptions();
2499 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2500 gen_helper_check_ieee_exceptions();
2501 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2503 case 0x42: /* faddd */
2504 gen_op_load_fpr_DT0(DFPREG(rs1
));
2505 gen_op_load_fpr_DT1(DFPREG(rs2
));
2506 gen_clear_float_exceptions();
2508 gen_helper_check_ieee_exceptions();
2509 gen_op_store_DT0_fpr(DFPREG(rd
));
2511 case 0x43: /* faddq */
2512 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2513 gen_op_load_fpr_QT0(QFPREG(rs1
));
2514 gen_op_load_fpr_QT1(QFPREG(rs2
));
2515 gen_clear_float_exceptions();
2517 gen_helper_check_ieee_exceptions();
2518 gen_op_store_QT0_fpr(QFPREG(rd
));
2520 case 0x45: /* fsubs */
2521 gen_clear_float_exceptions();
2522 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2523 gen_helper_check_ieee_exceptions();
2524 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2526 case 0x46: /* fsubd */
2527 gen_op_load_fpr_DT0(DFPREG(rs1
));
2528 gen_op_load_fpr_DT1(DFPREG(rs2
));
2529 gen_clear_float_exceptions();
2531 gen_helper_check_ieee_exceptions();
2532 gen_op_store_DT0_fpr(DFPREG(rd
));
2534 case 0x47: /* fsubq */
2535 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2536 gen_op_load_fpr_QT0(QFPREG(rs1
));
2537 gen_op_load_fpr_QT1(QFPREG(rs2
));
2538 gen_clear_float_exceptions();
2540 gen_helper_check_ieee_exceptions();
2541 gen_op_store_QT0_fpr(QFPREG(rd
));
2543 case 0x49: /* fmuls */
2544 CHECK_FPU_FEATURE(dc
, FMUL
);
2545 gen_clear_float_exceptions();
2546 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2547 gen_helper_check_ieee_exceptions();
2548 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2550 case 0x4a: /* fmuld */
2551 CHECK_FPU_FEATURE(dc
, FMUL
);
2552 gen_op_load_fpr_DT0(DFPREG(rs1
));
2553 gen_op_load_fpr_DT1(DFPREG(rs2
));
2554 gen_clear_float_exceptions();
2556 gen_helper_check_ieee_exceptions();
2557 gen_op_store_DT0_fpr(DFPREG(rd
));
2559 case 0x4b: /* fmulq */
2560 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2561 CHECK_FPU_FEATURE(dc
, FMUL
);
2562 gen_op_load_fpr_QT0(QFPREG(rs1
));
2563 gen_op_load_fpr_QT1(QFPREG(rs2
));
2564 gen_clear_float_exceptions();
2566 gen_helper_check_ieee_exceptions();
2567 gen_op_store_QT0_fpr(QFPREG(rd
));
2569 case 0x4d: /* fdivs */
2570 gen_clear_float_exceptions();
2571 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2572 gen_helper_check_ieee_exceptions();
2573 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2575 case 0x4e: /* fdivd */
2576 gen_op_load_fpr_DT0(DFPREG(rs1
));
2577 gen_op_load_fpr_DT1(DFPREG(rs2
));
2578 gen_clear_float_exceptions();
2580 gen_helper_check_ieee_exceptions();
2581 gen_op_store_DT0_fpr(DFPREG(rd
));
2583 case 0x4f: /* fdivq */
2584 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2585 gen_op_load_fpr_QT0(QFPREG(rs1
));
2586 gen_op_load_fpr_QT1(QFPREG(rs2
));
2587 gen_clear_float_exceptions();
2589 gen_helper_check_ieee_exceptions();
2590 gen_op_store_QT0_fpr(QFPREG(rd
));
2592 case 0x69: /* fsmuld */
2593 CHECK_FPU_FEATURE(dc
, FSMULD
);
2594 gen_clear_float_exceptions();
2595 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2596 gen_helper_check_ieee_exceptions();
2597 gen_op_store_DT0_fpr(DFPREG(rd
));
2599 case 0x6e: /* fdmulq */
2600 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2601 gen_op_load_fpr_DT0(DFPREG(rs1
));
2602 gen_op_load_fpr_DT1(DFPREG(rs2
));
2603 gen_clear_float_exceptions();
2604 gen_helper_fdmulq();
2605 gen_helper_check_ieee_exceptions();
2606 gen_op_store_QT0_fpr(QFPREG(rd
));
2608 case 0xc4: /* fitos */
2609 gen_clear_float_exceptions();
2610 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2611 gen_helper_check_ieee_exceptions();
2612 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2614 case 0xc6: /* fdtos */
2615 gen_op_load_fpr_DT1(DFPREG(rs2
));
2616 gen_clear_float_exceptions();
2617 gen_helper_fdtos(cpu_tmp32
);
2618 gen_helper_check_ieee_exceptions();
2619 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2621 case 0xc7: /* fqtos */
2622 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2623 gen_op_load_fpr_QT1(QFPREG(rs2
));
2624 gen_clear_float_exceptions();
2625 gen_helper_fqtos(cpu_tmp32
);
2626 gen_helper_check_ieee_exceptions();
2627 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2629 case 0xc8: /* fitod */
2630 gen_helper_fitod(cpu_fpr
[rs2
]);
2631 gen_op_store_DT0_fpr(DFPREG(rd
));
2633 case 0xc9: /* fstod */
2634 gen_helper_fstod(cpu_fpr
[rs2
]);
2635 gen_op_store_DT0_fpr(DFPREG(rd
));
2637 case 0xcb: /* fqtod */
2638 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2639 gen_op_load_fpr_QT1(QFPREG(rs2
));
2640 gen_clear_float_exceptions();
2642 gen_helper_check_ieee_exceptions();
2643 gen_op_store_DT0_fpr(DFPREG(rd
));
2645 case 0xcc: /* fitoq */
2646 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2647 gen_helper_fitoq(cpu_fpr
[rs2
]);
2648 gen_op_store_QT0_fpr(QFPREG(rd
));
2650 case 0xcd: /* fstoq */
2651 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2652 gen_helper_fstoq(cpu_fpr
[rs2
]);
2653 gen_op_store_QT0_fpr(QFPREG(rd
));
2655 case 0xce: /* fdtoq */
2656 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2657 gen_op_load_fpr_DT1(DFPREG(rs2
));
2659 gen_op_store_QT0_fpr(QFPREG(rd
));
2661 case 0xd1: /* fstoi */
2662 gen_clear_float_exceptions();
2663 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2664 gen_helper_check_ieee_exceptions();
2665 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2667 case 0xd2: /* fdtoi */
2668 gen_op_load_fpr_DT1(DFPREG(rs2
));
2669 gen_clear_float_exceptions();
2670 gen_helper_fdtoi(cpu_tmp32
);
2671 gen_helper_check_ieee_exceptions();
2672 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2674 case 0xd3: /* fqtoi */
2675 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2676 gen_op_load_fpr_QT1(QFPREG(rs2
));
2677 gen_clear_float_exceptions();
2678 gen_helper_fqtoi(cpu_tmp32
);
2679 gen_helper_check_ieee_exceptions();
2680 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2682 #ifdef TARGET_SPARC64
2683 case 0x2: /* V9 fmovd */
2684 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2685 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2686 cpu_fpr
[DFPREG(rs2
) + 1]);
2688 case 0x3: /* V9 fmovq */
2689 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2690 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2691 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2692 cpu_fpr
[QFPREG(rs2
) + 1]);
2693 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2694 cpu_fpr
[QFPREG(rs2
) + 2]);
2695 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2696 cpu_fpr
[QFPREG(rs2
) + 3]);
2698 case 0x6: /* V9 fnegd */
2699 gen_op_load_fpr_DT1(DFPREG(rs2
));
2701 gen_op_store_DT0_fpr(DFPREG(rd
));
2703 case 0x7: /* V9 fnegq */
2704 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2705 gen_op_load_fpr_QT1(QFPREG(rs2
));
2707 gen_op_store_QT0_fpr(QFPREG(rd
));
2709 case 0xa: /* V9 fabsd */
2710 gen_op_load_fpr_DT1(DFPREG(rs2
));
2712 gen_op_store_DT0_fpr(DFPREG(rd
));
2714 case 0xb: /* V9 fabsq */
2715 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2716 gen_op_load_fpr_QT1(QFPREG(rs2
));
2718 gen_op_store_QT0_fpr(QFPREG(rd
));
2720 case 0x81: /* V9 fstox */
2721 gen_clear_float_exceptions();
2722 gen_helper_fstox(cpu_fpr
[rs2
]);
2723 gen_helper_check_ieee_exceptions();
2724 gen_op_store_DT0_fpr(DFPREG(rd
));
2726 case 0x82: /* V9 fdtox */
2727 gen_op_load_fpr_DT1(DFPREG(rs2
));
2728 gen_clear_float_exceptions();
2730 gen_helper_check_ieee_exceptions();
2731 gen_op_store_DT0_fpr(DFPREG(rd
));
2733 case 0x83: /* V9 fqtox */
2734 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2735 gen_op_load_fpr_QT1(QFPREG(rs2
));
2736 gen_clear_float_exceptions();
2738 gen_helper_check_ieee_exceptions();
2739 gen_op_store_DT0_fpr(DFPREG(rd
));
2741 case 0x84: /* V9 fxtos */
2742 gen_op_load_fpr_DT1(DFPREG(rs2
));
2743 gen_clear_float_exceptions();
2744 gen_helper_fxtos(cpu_tmp32
);
2745 gen_helper_check_ieee_exceptions();
2746 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2748 case 0x88: /* V9 fxtod */
2749 gen_op_load_fpr_DT1(DFPREG(rs2
));
2750 gen_clear_float_exceptions();
2752 gen_helper_check_ieee_exceptions();
2753 gen_op_store_DT0_fpr(DFPREG(rd
));
2755 case 0x8c: /* V9 fxtoq */
2756 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2757 gen_op_load_fpr_DT1(DFPREG(rs2
));
2758 gen_clear_float_exceptions();
2760 gen_helper_check_ieee_exceptions();
2761 gen_op_store_QT0_fpr(QFPREG(rd
));
2767 } else if (xop
== 0x35) { /* FPU Operations */
2768 #ifdef TARGET_SPARC64
2771 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2773 gen_op_clear_ieee_excp_and_FTT();
2774 rs1
= GET_FIELD(insn
, 13, 17);
2775 rs2
= GET_FIELD(insn
, 27, 31);
2776 xop
= GET_FIELD(insn
, 18, 26);
2777 #ifdef TARGET_SPARC64
2778 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2781 l1
= gen_new_label();
2782 cond
= GET_FIELD_SP(insn
, 14, 17);
2783 cpu_src1
= get_src1(insn
, cpu_src1
);
2784 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2786 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2789 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2792 l1
= gen_new_label();
2793 cond
= GET_FIELD_SP(insn
, 14, 17);
2794 cpu_src1
= get_src1(insn
, cpu_src1
);
2795 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2797 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2798 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2801 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2804 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2805 l1
= gen_new_label();
2806 cond
= GET_FIELD_SP(insn
, 14, 17);
2807 cpu_src1
= get_src1(insn
, cpu_src1
);
2808 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2810 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2811 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2812 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2813 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2819 #ifdef TARGET_SPARC64
2820 #define FMOVSCC(fcc) \
2825 l1 = gen_new_label(); \
2826 r_cond = tcg_temp_new(); \
2827 cond = GET_FIELD_SP(insn, 14, 17); \
2828 gen_fcond(r_cond, fcc, cond); \
2829 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2831 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2832 gen_set_label(l1); \
2833 tcg_temp_free(r_cond); \
2835 #define FMOVDCC(fcc) \
2840 l1 = gen_new_label(); \
2841 r_cond = tcg_temp_new(); \
2842 cond = GET_FIELD_SP(insn, 14, 17); \
2843 gen_fcond(r_cond, fcc, cond); \
2844 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2846 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2847 cpu_fpr[DFPREG(rs2)]); \
2848 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2849 cpu_fpr[DFPREG(rs2) + 1]); \
2850 gen_set_label(l1); \
2851 tcg_temp_free(r_cond); \
2853 #define FMOVQCC(fcc) \
2858 l1 = gen_new_label(); \
2859 r_cond = tcg_temp_new(); \
2860 cond = GET_FIELD_SP(insn, 14, 17); \
2861 gen_fcond(r_cond, fcc, cond); \
2862 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2864 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2865 cpu_fpr[QFPREG(rs2)]); \
2866 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2867 cpu_fpr[QFPREG(rs2) + 1]); \
2868 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2869 cpu_fpr[QFPREG(rs2) + 2]); \
2870 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2871 cpu_fpr[QFPREG(rs2) + 3]); \
2872 gen_set_label(l1); \
2873 tcg_temp_free(r_cond); \
2875 case 0x001: /* V9 fmovscc %fcc0 */
2878 case 0x002: /* V9 fmovdcc %fcc0 */
2881 case 0x003: /* V9 fmovqcc %fcc0 */
2882 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2885 case 0x041: /* V9 fmovscc %fcc1 */
2888 case 0x042: /* V9 fmovdcc %fcc1 */
2891 case 0x043: /* V9 fmovqcc %fcc1 */
2892 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2895 case 0x081: /* V9 fmovscc %fcc2 */
2898 case 0x082: /* V9 fmovdcc %fcc2 */
2901 case 0x083: /* V9 fmovqcc %fcc2 */
2902 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2905 case 0x0c1: /* V9 fmovscc %fcc3 */
2908 case 0x0c2: /* V9 fmovdcc %fcc3 */
2911 case 0x0c3: /* V9 fmovqcc %fcc3 */
2912 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2918 #define FMOVSCC(icc) \
2923 l1 = gen_new_label(); \
2924 r_cond = tcg_temp_new(); \
2925 cond = GET_FIELD_SP(insn, 14, 17); \
2926 gen_cond(r_cond, icc, cond); \
2927 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2929 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2930 gen_set_label(l1); \
2931 tcg_temp_free(r_cond); \
2933 #define FMOVDCC(icc) \
2938 l1 = gen_new_label(); \
2939 r_cond = tcg_temp_new(); \
2940 cond = GET_FIELD_SP(insn, 14, 17); \
2941 gen_cond(r_cond, icc, cond); \
2942 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2944 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2945 cpu_fpr[DFPREG(rs2)]); \
2946 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2947 cpu_fpr[DFPREG(rs2) + 1]); \
2948 gen_set_label(l1); \
2949 tcg_temp_free(r_cond); \
2951 #define FMOVQCC(icc) \
2956 l1 = gen_new_label(); \
2957 r_cond = tcg_temp_new(); \
2958 cond = GET_FIELD_SP(insn, 14, 17); \
2959 gen_cond(r_cond, icc, cond); \
2960 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2962 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2963 cpu_fpr[QFPREG(rs2)]); \
2964 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2965 cpu_fpr[QFPREG(rs2) + 1]); \
2966 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2967 cpu_fpr[QFPREG(rs2) + 2]); \
2968 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2969 cpu_fpr[QFPREG(rs2) + 3]); \
2970 gen_set_label(l1); \
2971 tcg_temp_free(r_cond); \
2974 case 0x101: /* V9 fmovscc %icc */
2977 case 0x102: /* V9 fmovdcc %icc */
2979 case 0x103: /* V9 fmovqcc %icc */
2980 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2983 case 0x181: /* V9 fmovscc %xcc */
2986 case 0x182: /* V9 fmovdcc %xcc */
2989 case 0x183: /* V9 fmovqcc %xcc */
2990 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2997 case 0x51: /* fcmps, V9 %fcc */
2998 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3000 case 0x52: /* fcmpd, V9 %fcc */
3001 gen_op_load_fpr_DT0(DFPREG(rs1
));
3002 gen_op_load_fpr_DT1(DFPREG(rs2
));
3003 gen_op_fcmpd(rd
& 3);
3005 case 0x53: /* fcmpq, V9 %fcc */
3006 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3007 gen_op_load_fpr_QT0(QFPREG(rs1
));
3008 gen_op_load_fpr_QT1(QFPREG(rs2
));
3009 gen_op_fcmpq(rd
& 3);
3011 case 0x55: /* fcmpes, V9 %fcc */
3012 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3014 case 0x56: /* fcmped, V9 %fcc */
3015 gen_op_load_fpr_DT0(DFPREG(rs1
));
3016 gen_op_load_fpr_DT1(DFPREG(rs2
));
3017 gen_op_fcmped(rd
& 3);
3019 case 0x57: /* fcmpeq, V9 %fcc */
3020 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3021 gen_op_load_fpr_QT0(QFPREG(rs1
));
3022 gen_op_load_fpr_QT1(QFPREG(rs2
));
3023 gen_op_fcmpeq(rd
& 3);
3028 } else if (xop
== 0x2) {
3031 rs1
= GET_FIELD(insn
, 13, 17);
3033 // or %g0, x, y -> mov T0, x; mov y, T0
3034 if (IS_IMM
) { /* immediate */
3037 simm
= GET_FIELDs(insn
, 19, 31);
3038 r_const
= tcg_const_tl(simm
);
3039 gen_movl_TN_reg(rd
, r_const
);
3040 tcg_temp_free(r_const
);
3041 } else { /* register */
3042 rs2
= GET_FIELD(insn
, 27, 31);
3043 gen_movl_reg_TN(rs2
, cpu_dst
);
3044 gen_movl_TN_reg(rd
, cpu_dst
);
3047 cpu_src1
= get_src1(insn
, cpu_src1
);
3048 if (IS_IMM
) { /* immediate */
3049 simm
= GET_FIELDs(insn
, 19, 31);
3050 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3051 gen_movl_TN_reg(rd
, cpu_dst
);
3052 } else { /* register */
3053 // or x, %g0, y -> mov T1, x; mov y, T1
3054 rs2
= GET_FIELD(insn
, 27, 31);
3056 gen_movl_reg_TN(rs2
, cpu_src2
);
3057 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3058 gen_movl_TN_reg(rd
, cpu_dst
);
3060 gen_movl_TN_reg(rd
, cpu_src1
);
3063 #ifdef TARGET_SPARC64
3064 } else if (xop
== 0x25) { /* sll, V9 sllx */
3065 cpu_src1
= get_src1(insn
, cpu_src1
);
3066 if (IS_IMM
) { /* immediate */
3067 simm
= GET_FIELDs(insn
, 20, 31);
3068 if (insn
& (1 << 12)) {
3069 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3071 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3073 } else { /* register */
3074 rs2
= GET_FIELD(insn
, 27, 31);
3075 gen_movl_reg_TN(rs2
, cpu_src2
);
3076 if (insn
& (1 << 12)) {
3077 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3079 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3081 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3083 gen_movl_TN_reg(rd
, cpu_dst
);
3084 } else if (xop
== 0x26) { /* srl, V9 srlx */
3085 cpu_src1
= get_src1(insn
, cpu_src1
);
3086 if (IS_IMM
) { /* immediate */
3087 simm
= GET_FIELDs(insn
, 20, 31);
3088 if (insn
& (1 << 12)) {
3089 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3091 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3092 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3094 } else { /* register */
3095 rs2
= GET_FIELD(insn
, 27, 31);
3096 gen_movl_reg_TN(rs2
, cpu_src2
);
3097 if (insn
& (1 << 12)) {
3098 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3099 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3101 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3102 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3103 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3106 gen_movl_TN_reg(rd
, cpu_dst
);
3107 } else if (xop
== 0x27) { /* sra, V9 srax */
3108 cpu_src1
= get_src1(insn
, cpu_src1
);
3109 if (IS_IMM
) { /* immediate */
3110 simm
= GET_FIELDs(insn
, 20, 31);
3111 if (insn
& (1 << 12)) {
3112 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3114 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3115 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3116 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3118 } else { /* register */
3119 rs2
= GET_FIELD(insn
, 27, 31);
3120 gen_movl_reg_TN(rs2
, cpu_src2
);
3121 if (insn
& (1 << 12)) {
3122 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3123 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3125 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3126 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3127 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3128 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3131 gen_movl_TN_reg(rd
, cpu_dst
);
3133 } else if (xop
< 0x36) {
3135 cpu_src1
= get_src1(insn
, cpu_src1
);
3136 cpu_src2
= get_src2(insn
, cpu_src2
);
3137 switch (xop
& ~0x10) {
3140 simm
= GET_FIELDs(insn
, 19, 31);
3142 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3144 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3148 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3150 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3156 simm
= GET_FIELDs(insn
, 19, 31);
3157 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3159 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3162 gen_op_logic_cc(cpu_dst
);
3167 simm
= GET_FIELDs(insn
, 19, 31);
3168 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3170 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3173 gen_op_logic_cc(cpu_dst
);
3177 simm
= GET_FIELDs(insn
, 19, 31);
3178 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3180 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3183 gen_op_logic_cc(cpu_dst
);
3187 simm
= GET_FIELDs(insn
, 19, 31);
3189 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
);
3191 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3195 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3197 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3201 case 0x5: /* andn */
3203 simm
= GET_FIELDs(insn
, 19, 31);
3204 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3206 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3209 gen_op_logic_cc(cpu_dst
);
3213 simm
= GET_FIELDs(insn
, 19, 31);
3214 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3216 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3219 gen_op_logic_cc(cpu_dst
);
3221 case 0x7: /* xorn */
3223 simm
= GET_FIELDs(insn
, 19, 31);
3224 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3226 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3227 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3230 gen_op_logic_cc(cpu_dst
);
3232 case 0x8: /* addx, V9 addc */
3234 simm
= GET_FIELDs(insn
, 19, 31);
3236 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
3238 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3239 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3240 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3244 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3246 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3247 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3248 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3252 #ifdef TARGET_SPARC64
3253 case 0x9: /* V9 mulx */
3255 simm
= GET_FIELDs(insn
, 19, 31);
3256 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3258 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3262 case 0xa: /* umul */
3263 CHECK_IU_FEATURE(dc
, MUL
);
3264 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3266 gen_op_logic_cc(cpu_dst
);
3268 case 0xb: /* smul */
3269 CHECK_IU_FEATURE(dc
, MUL
);
3270 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3272 gen_op_logic_cc(cpu_dst
);
3274 case 0xc: /* subx, V9 subc */
3276 simm
= GET_FIELDs(insn
, 19, 31);
3278 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
3280 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3281 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3282 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3286 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3288 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3289 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3290 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3294 #ifdef TARGET_SPARC64
3295 case 0xd: /* V9 udivx */
3296 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3297 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3298 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3299 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3302 case 0xe: /* udiv */
3303 CHECK_IU_FEATURE(dc
, DIV
);
3304 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3306 gen_op_div_cc(cpu_dst
);
3308 case 0xf: /* sdiv */
3309 CHECK_IU_FEATURE(dc
, DIV
);
3310 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3312 gen_op_div_cc(cpu_dst
);
3317 gen_movl_TN_reg(rd
, cpu_dst
);
3319 cpu_src1
= get_src1(insn
, cpu_src1
);
3320 cpu_src2
= get_src2(insn
, cpu_src2
);
3322 case 0x20: /* taddcc */
3323 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3324 gen_movl_TN_reg(rd
, cpu_dst
);
3326 case 0x21: /* tsubcc */
3327 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3328 gen_movl_TN_reg(rd
, cpu_dst
);
3330 case 0x22: /* taddcctv */
3331 save_state(dc
, cpu_cond
);
3332 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3333 gen_movl_TN_reg(rd
, cpu_dst
);
3335 case 0x23: /* tsubcctv */
3336 save_state(dc
, cpu_cond
);
3337 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3338 gen_movl_TN_reg(rd
, cpu_dst
);
3340 case 0x24: /* mulscc */
3341 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3342 gen_movl_TN_reg(rd
, cpu_dst
);
3344 #ifndef TARGET_SPARC64
3345 case 0x25: /* sll */
3346 if (IS_IMM
) { /* immediate */
3347 simm
= GET_FIELDs(insn
, 20, 31);
3348 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3349 } else { /* register */
3350 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3351 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3353 gen_movl_TN_reg(rd
, cpu_dst
);
3355 case 0x26: /* srl */
3356 if (IS_IMM
) { /* immediate */
3357 simm
= GET_FIELDs(insn
, 20, 31);
3358 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3359 } else { /* register */
3360 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3361 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3363 gen_movl_TN_reg(rd
, cpu_dst
);
3365 case 0x27: /* sra */
3366 if (IS_IMM
) { /* immediate */
3367 simm
= GET_FIELDs(insn
, 20, 31);
3368 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3369 } else { /* register */
3370 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3371 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3373 gen_movl_TN_reg(rd
, cpu_dst
);
3380 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3381 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3383 #ifndef TARGET_SPARC64
3384 case 0x01 ... 0x0f: /* undefined in the
3388 case 0x10 ... 0x1f: /* implementation-dependent
3394 case 0x2: /* V9 wrccr */
3395 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3396 gen_helper_wrccr(cpu_dst
);
3398 case 0x3: /* V9 wrasi */
3399 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3400 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3402 case 0x6: /* V9 wrfprs */
3403 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3404 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3405 save_state(dc
, cpu_cond
);
3410 case 0xf: /* V9 sir, nop if user */
3411 #if !defined(CONFIG_USER_ONLY)
3416 case 0x13: /* Graphics Status */
3417 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3419 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3421 case 0x14: /* Softint set */
3422 if (!supervisor(dc
))
3424 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3425 gen_helper_set_softint(cpu_tmp64
);
3427 case 0x15: /* Softint clear */
3428 if (!supervisor(dc
))
3430 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3431 gen_helper_clear_softint(cpu_tmp64
);
3433 case 0x16: /* Softint write */
3434 if (!supervisor(dc
))
3436 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3437 gen_helper_write_softint(cpu_tmp64
);
3439 case 0x17: /* Tick compare */
3440 #if !defined(CONFIG_USER_ONLY)
3441 if (!supervisor(dc
))
3447 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3449 r_tickptr
= tcg_temp_new_ptr();
3450 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3451 offsetof(CPUState
, tick
));
3452 gen_helper_tick_set_limit(r_tickptr
,
3454 tcg_temp_free_ptr(r_tickptr
);
3457 case 0x18: /* System tick */
3458 #if !defined(CONFIG_USER_ONLY)
3459 if (!supervisor(dc
))
3465 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3467 r_tickptr
= tcg_temp_new_ptr();
3468 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3469 offsetof(CPUState
, stick
));
3470 gen_helper_tick_set_count(r_tickptr
,
3472 tcg_temp_free_ptr(r_tickptr
);
3475 case 0x19: /* System tick compare */
3476 #if !defined(CONFIG_USER_ONLY)
3477 if (!supervisor(dc
))
3483 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3485 r_tickptr
= tcg_temp_new_ptr();
3486 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3487 offsetof(CPUState
, stick
));
3488 gen_helper_tick_set_limit(r_tickptr
,
3490 tcg_temp_free_ptr(r_tickptr
);
3494 case 0x10: /* Performance Control */
3495 case 0x11: /* Performance Instrumentation
3497 case 0x12: /* Dispatch Control */
3504 #if !defined(CONFIG_USER_ONLY)
3505 case 0x31: /* wrpsr, V9 saved, restored */
3507 if (!supervisor(dc
))
3509 #ifdef TARGET_SPARC64
3515 gen_helper_restored();
3517 case 2: /* UA2005 allclean */
3518 case 3: /* UA2005 otherw */
3519 case 4: /* UA2005 normalw */
3520 case 5: /* UA2005 invalw */
3526 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3527 gen_helper_wrpsr(cpu_dst
);
3528 save_state(dc
, cpu_cond
);
3535 case 0x32: /* wrwim, V9 wrpr */
3537 if (!supervisor(dc
))
3539 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3540 #ifdef TARGET_SPARC64
3546 r_tsptr
= tcg_temp_new_ptr();
3547 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3548 offsetof(CPUState
, tsptr
));
3549 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3550 offsetof(trap_state
, tpc
));
3551 tcg_temp_free_ptr(r_tsptr
);
3558 r_tsptr
= tcg_temp_new_ptr();
3559 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3560 offsetof(CPUState
, tsptr
));
3561 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3562 offsetof(trap_state
, tnpc
));
3563 tcg_temp_free_ptr(r_tsptr
);
3570 r_tsptr
= tcg_temp_new_ptr();
3571 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3572 offsetof(CPUState
, tsptr
));
3573 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3574 offsetof(trap_state
,
3576 tcg_temp_free_ptr(r_tsptr
);
3583 r_tsptr
= tcg_temp_new_ptr();
3584 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3585 offsetof(CPUState
, tsptr
));
3586 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3587 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3588 offsetof(trap_state
, tt
));
3589 tcg_temp_free_ptr(r_tsptr
);
3596 r_tickptr
= tcg_temp_new_ptr();
3597 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3598 offsetof(CPUState
, tick
));
3599 gen_helper_tick_set_count(r_tickptr
,
3601 tcg_temp_free_ptr(r_tickptr
);
3605 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3608 save_state(dc
, cpu_cond
);
3609 gen_helper_wrpstate(cpu_tmp0
);
3615 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3616 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3617 offsetof(CPUSPARCState
, tl
));
3620 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3621 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3622 offsetof(CPUSPARCState
,
3626 gen_helper_wrcwp(cpu_tmp0
);
3629 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3630 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3631 offsetof(CPUSPARCState
,
3634 case 11: // canrestore
3635 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3636 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3637 offsetof(CPUSPARCState
,
3640 case 12: // cleanwin
3641 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3642 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3643 offsetof(CPUSPARCState
,
3646 case 13: // otherwin
3647 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3648 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3649 offsetof(CPUSPARCState
,
3653 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3654 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3655 offsetof(CPUSPARCState
,
3658 case 16: // UA2005 gl
3659 CHECK_IU_FEATURE(dc
, GL
);
3660 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3661 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3662 offsetof(CPUSPARCState
, gl
));
3664 case 26: // UA2005 strand status
3665 CHECK_IU_FEATURE(dc
, HYPV
);
3666 if (!hypervisor(dc
))
3668 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3674 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3675 if (dc
->def
->nwindows
!= 32)
3676 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3677 (1 << dc
->def
->nwindows
) - 1);
3678 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3682 case 0x33: /* wrtbr, UA2005 wrhpr */
3684 #ifndef TARGET_SPARC64
3685 if (!supervisor(dc
))
3687 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3689 CHECK_IU_FEATURE(dc
, HYPV
);
3690 if (!hypervisor(dc
))
3692 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3695 // XXX gen_op_wrhpstate();
3696 save_state(dc
, cpu_cond
);
3702 // XXX gen_op_wrhtstate();
3705 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3708 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3710 case 31: // hstick_cmpr
3714 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3715 r_tickptr
= tcg_temp_new_ptr();
3716 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3717 offsetof(CPUState
, hstick
));
3718 gen_helper_tick_set_limit(r_tickptr
,
3720 tcg_temp_free_ptr(r_tickptr
);
3723 case 6: // hver readonly
3731 #ifdef TARGET_SPARC64
3732 case 0x2c: /* V9 movcc */
3734 int cc
= GET_FIELD_SP(insn
, 11, 12);
3735 int cond
= GET_FIELD_SP(insn
, 14, 17);
3739 r_cond
= tcg_temp_new();
3740 if (insn
& (1 << 18)) {
3742 gen_cond(r_cond
, 0, cond
);
3744 gen_cond(r_cond
, 1, cond
);
3748 gen_fcond(r_cond
, cc
, cond
);
3751 l1
= gen_new_label();
3753 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3754 if (IS_IMM
) { /* immediate */
3757 simm
= GET_FIELD_SPs(insn
, 0, 10);
3758 r_const
= tcg_const_tl(simm
);
3759 gen_movl_TN_reg(rd
, r_const
);
3760 tcg_temp_free(r_const
);
3762 rs2
= GET_FIELD_SP(insn
, 0, 4);
3763 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3764 gen_movl_TN_reg(rd
, cpu_tmp0
);
3767 tcg_temp_free(r_cond
);
3770 case 0x2d: /* V9 sdivx */
3771 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3772 gen_movl_TN_reg(rd
, cpu_dst
);
3774 case 0x2e: /* V9 popc */
3776 cpu_src2
= get_src2(insn
, cpu_src2
);
3777 gen_helper_popc(cpu_dst
, cpu_src2
);
3778 gen_movl_TN_reg(rd
, cpu_dst
);
3780 case 0x2f: /* V9 movr */
3782 int cond
= GET_FIELD_SP(insn
, 10, 12);
3785 cpu_src1
= get_src1(insn
, cpu_src1
);
3787 l1
= gen_new_label();
3789 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3791 if (IS_IMM
) { /* immediate */
3794 simm
= GET_FIELD_SPs(insn
, 0, 9);
3795 r_const
= tcg_const_tl(simm
);
3796 gen_movl_TN_reg(rd
, r_const
);
3797 tcg_temp_free(r_const
);
3799 rs2
= GET_FIELD_SP(insn
, 0, 4);
3800 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3801 gen_movl_TN_reg(rd
, cpu_tmp0
);
3811 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3812 #ifdef TARGET_SPARC64
3813 int opf
= GET_FIELD_SP(insn
, 5, 13);
3814 rs1
= GET_FIELD(insn
, 13, 17);
3815 rs2
= GET_FIELD(insn
, 27, 31);
3816 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3820 case 0x000: /* VIS I edge8cc */
3821 case 0x001: /* VIS II edge8n */
3822 case 0x002: /* VIS I edge8lcc */
3823 case 0x003: /* VIS II edge8ln */
3824 case 0x004: /* VIS I edge16cc */
3825 case 0x005: /* VIS II edge16n */
3826 case 0x006: /* VIS I edge16lcc */
3827 case 0x007: /* VIS II edge16ln */
3828 case 0x008: /* VIS I edge32cc */
3829 case 0x009: /* VIS II edge32n */
3830 case 0x00a: /* VIS I edge32lcc */
3831 case 0x00b: /* VIS II edge32ln */
3834 case 0x010: /* VIS I array8 */
3835 CHECK_FPU_FEATURE(dc
, VIS1
);
3836 cpu_src1
= get_src1(insn
, cpu_src1
);
3837 gen_movl_reg_TN(rs2
, cpu_src2
);
3838 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3839 gen_movl_TN_reg(rd
, cpu_dst
);
3841 case 0x012: /* VIS I array16 */
3842 CHECK_FPU_FEATURE(dc
, VIS1
);
3843 cpu_src1
= get_src1(insn
, cpu_src1
);
3844 gen_movl_reg_TN(rs2
, cpu_src2
);
3845 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3846 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3847 gen_movl_TN_reg(rd
, cpu_dst
);
3849 case 0x014: /* VIS I array32 */
3850 CHECK_FPU_FEATURE(dc
, VIS1
);
3851 cpu_src1
= get_src1(insn
, cpu_src1
);
3852 gen_movl_reg_TN(rs2
, cpu_src2
);
3853 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3854 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3855 gen_movl_TN_reg(rd
, cpu_dst
);
3857 case 0x018: /* VIS I alignaddr */
3858 CHECK_FPU_FEATURE(dc
, VIS1
);
3859 cpu_src1
= get_src1(insn
, cpu_src1
);
3860 gen_movl_reg_TN(rs2
, cpu_src2
);
3861 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3862 gen_movl_TN_reg(rd
, cpu_dst
);
3864 case 0x019: /* VIS II bmask */
3865 case 0x01a: /* VIS I alignaddrl */
3868 case 0x020: /* VIS I fcmple16 */
3869 CHECK_FPU_FEATURE(dc
, VIS1
);
3870 gen_op_load_fpr_DT0(DFPREG(rs1
));
3871 gen_op_load_fpr_DT1(DFPREG(rs2
));
3872 gen_helper_fcmple16();
3873 gen_op_store_DT0_fpr(DFPREG(rd
));
3875 case 0x022: /* VIS I fcmpne16 */
3876 CHECK_FPU_FEATURE(dc
, VIS1
);
3877 gen_op_load_fpr_DT0(DFPREG(rs1
));
3878 gen_op_load_fpr_DT1(DFPREG(rs2
));
3879 gen_helper_fcmpne16();
3880 gen_op_store_DT0_fpr(DFPREG(rd
));
3882 case 0x024: /* VIS I fcmple32 */
3883 CHECK_FPU_FEATURE(dc
, VIS1
);
3884 gen_op_load_fpr_DT0(DFPREG(rs1
));
3885 gen_op_load_fpr_DT1(DFPREG(rs2
));
3886 gen_helper_fcmple32();
3887 gen_op_store_DT0_fpr(DFPREG(rd
));
3889 case 0x026: /* VIS I fcmpne32 */
3890 CHECK_FPU_FEATURE(dc
, VIS1
);
3891 gen_op_load_fpr_DT0(DFPREG(rs1
));
3892 gen_op_load_fpr_DT1(DFPREG(rs2
));
3893 gen_helper_fcmpne32();
3894 gen_op_store_DT0_fpr(DFPREG(rd
));
3896 case 0x028: /* VIS I fcmpgt16 */
3897 CHECK_FPU_FEATURE(dc
, VIS1
);
3898 gen_op_load_fpr_DT0(DFPREG(rs1
));
3899 gen_op_load_fpr_DT1(DFPREG(rs2
));
3900 gen_helper_fcmpgt16();
3901 gen_op_store_DT0_fpr(DFPREG(rd
));
3903 case 0x02a: /* VIS I fcmpeq16 */
3904 CHECK_FPU_FEATURE(dc
, VIS1
);
3905 gen_op_load_fpr_DT0(DFPREG(rs1
));
3906 gen_op_load_fpr_DT1(DFPREG(rs2
));
3907 gen_helper_fcmpeq16();
3908 gen_op_store_DT0_fpr(DFPREG(rd
));
3910 case 0x02c: /* VIS I fcmpgt32 */
3911 CHECK_FPU_FEATURE(dc
, VIS1
);
3912 gen_op_load_fpr_DT0(DFPREG(rs1
));
3913 gen_op_load_fpr_DT1(DFPREG(rs2
));
3914 gen_helper_fcmpgt32();
3915 gen_op_store_DT0_fpr(DFPREG(rd
));
3917 case 0x02e: /* VIS I fcmpeq32 */
3918 CHECK_FPU_FEATURE(dc
, VIS1
);
3919 gen_op_load_fpr_DT0(DFPREG(rs1
));
3920 gen_op_load_fpr_DT1(DFPREG(rs2
));
3921 gen_helper_fcmpeq32();
3922 gen_op_store_DT0_fpr(DFPREG(rd
));
3924 case 0x031: /* VIS I fmul8x16 */
3925 CHECK_FPU_FEATURE(dc
, VIS1
);
3926 gen_op_load_fpr_DT0(DFPREG(rs1
));
3927 gen_op_load_fpr_DT1(DFPREG(rs2
));
3928 gen_helper_fmul8x16();
3929 gen_op_store_DT0_fpr(DFPREG(rd
));
3931 case 0x033: /* VIS I fmul8x16au */
3932 CHECK_FPU_FEATURE(dc
, VIS1
);
3933 gen_op_load_fpr_DT0(DFPREG(rs1
));
3934 gen_op_load_fpr_DT1(DFPREG(rs2
));
3935 gen_helper_fmul8x16au();
3936 gen_op_store_DT0_fpr(DFPREG(rd
));
3938 case 0x035: /* VIS I fmul8x16al */
3939 CHECK_FPU_FEATURE(dc
, VIS1
);
3940 gen_op_load_fpr_DT0(DFPREG(rs1
));
3941 gen_op_load_fpr_DT1(DFPREG(rs2
));
3942 gen_helper_fmul8x16al();
3943 gen_op_store_DT0_fpr(DFPREG(rd
));
3945 case 0x036: /* VIS I fmul8sux16 */
3946 CHECK_FPU_FEATURE(dc
, VIS1
);
3947 gen_op_load_fpr_DT0(DFPREG(rs1
));
3948 gen_op_load_fpr_DT1(DFPREG(rs2
));
3949 gen_helper_fmul8sux16();
3950 gen_op_store_DT0_fpr(DFPREG(rd
));
3952 case 0x037: /* VIS I fmul8ulx16 */
3953 CHECK_FPU_FEATURE(dc
, VIS1
);
3954 gen_op_load_fpr_DT0(DFPREG(rs1
));
3955 gen_op_load_fpr_DT1(DFPREG(rs2
));
3956 gen_helper_fmul8ulx16();
3957 gen_op_store_DT0_fpr(DFPREG(rd
));
3959 case 0x038: /* VIS I fmuld8sux16 */
3960 CHECK_FPU_FEATURE(dc
, VIS1
);
3961 gen_op_load_fpr_DT0(DFPREG(rs1
));
3962 gen_op_load_fpr_DT1(DFPREG(rs2
));
3963 gen_helper_fmuld8sux16();
3964 gen_op_store_DT0_fpr(DFPREG(rd
));
3966 case 0x039: /* VIS I fmuld8ulx16 */
3967 CHECK_FPU_FEATURE(dc
, VIS1
);
3968 gen_op_load_fpr_DT0(DFPREG(rs1
));
3969 gen_op_load_fpr_DT1(DFPREG(rs2
));
3970 gen_helper_fmuld8ulx16();
3971 gen_op_store_DT0_fpr(DFPREG(rd
));
3973 case 0x03a: /* VIS I fpack32 */
3974 case 0x03b: /* VIS I fpack16 */
3975 case 0x03d: /* VIS I fpackfix */
3976 case 0x03e: /* VIS I pdist */
3979 case 0x048: /* VIS I faligndata */
3980 CHECK_FPU_FEATURE(dc
, VIS1
);
3981 gen_op_load_fpr_DT0(DFPREG(rs1
));
3982 gen_op_load_fpr_DT1(DFPREG(rs2
));
3983 gen_helper_faligndata();
3984 gen_op_store_DT0_fpr(DFPREG(rd
));
3986 case 0x04b: /* VIS I fpmerge */
3987 CHECK_FPU_FEATURE(dc
, VIS1
);
3988 gen_op_load_fpr_DT0(DFPREG(rs1
));
3989 gen_op_load_fpr_DT1(DFPREG(rs2
));
3990 gen_helper_fpmerge();
3991 gen_op_store_DT0_fpr(DFPREG(rd
));
3993 case 0x04c: /* VIS II bshuffle */
3996 case 0x04d: /* VIS I fexpand */
3997 CHECK_FPU_FEATURE(dc
, VIS1
);
3998 gen_op_load_fpr_DT0(DFPREG(rs1
));
3999 gen_op_load_fpr_DT1(DFPREG(rs2
));
4000 gen_helper_fexpand();
4001 gen_op_store_DT0_fpr(DFPREG(rd
));
4003 case 0x050: /* VIS I fpadd16 */
4004 CHECK_FPU_FEATURE(dc
, VIS1
);
4005 gen_op_load_fpr_DT0(DFPREG(rs1
));
4006 gen_op_load_fpr_DT1(DFPREG(rs2
));
4007 gen_helper_fpadd16();
4008 gen_op_store_DT0_fpr(DFPREG(rd
));
4010 case 0x051: /* VIS I fpadd16s */
4011 CHECK_FPU_FEATURE(dc
, VIS1
);
4012 gen_helper_fpadd16s(cpu_fpr
[rd
],
4013 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4015 case 0x052: /* VIS I fpadd32 */
4016 CHECK_FPU_FEATURE(dc
, VIS1
);
4017 gen_op_load_fpr_DT0(DFPREG(rs1
));
4018 gen_op_load_fpr_DT1(DFPREG(rs2
));
4019 gen_helper_fpadd32();
4020 gen_op_store_DT0_fpr(DFPREG(rd
));
4022 case 0x053: /* VIS I fpadd32s */
4023 CHECK_FPU_FEATURE(dc
, VIS1
);
4024 gen_helper_fpadd32s(cpu_fpr
[rd
],
4025 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4027 case 0x054: /* VIS I fpsub16 */
4028 CHECK_FPU_FEATURE(dc
, VIS1
);
4029 gen_op_load_fpr_DT0(DFPREG(rs1
));
4030 gen_op_load_fpr_DT1(DFPREG(rs2
));
4031 gen_helper_fpsub16();
4032 gen_op_store_DT0_fpr(DFPREG(rd
));
4034 case 0x055: /* VIS I fpsub16s */
4035 CHECK_FPU_FEATURE(dc
, VIS1
);
4036 gen_helper_fpsub16s(cpu_fpr
[rd
],
4037 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4039 case 0x056: /* VIS I fpsub32 */
4040 CHECK_FPU_FEATURE(dc
, VIS1
);
4041 gen_op_load_fpr_DT0(DFPREG(rs1
));
4042 gen_op_load_fpr_DT1(DFPREG(rs2
));
4043 gen_helper_fpsub32();
4044 gen_op_store_DT0_fpr(DFPREG(rd
));
4046 case 0x057: /* VIS I fpsub32s */
4047 CHECK_FPU_FEATURE(dc
, VIS1
);
4048 gen_helper_fpsub32s(cpu_fpr
[rd
],
4049 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4051 case 0x060: /* VIS I fzero */
4052 CHECK_FPU_FEATURE(dc
, VIS1
);
4053 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
4054 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
4056 case 0x061: /* VIS I fzeros */
4057 CHECK_FPU_FEATURE(dc
, VIS1
);
4058 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
4060 case 0x062: /* VIS I fnor */
4061 CHECK_FPU_FEATURE(dc
, VIS1
);
4062 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4063 cpu_fpr
[DFPREG(rs2
)]);
4064 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4065 cpu_fpr
[DFPREG(rs2
) + 1]);
4067 case 0x063: /* VIS I fnors */
4068 CHECK_FPU_FEATURE(dc
, VIS1
);
4069 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4071 case 0x064: /* VIS I fandnot2 */
4072 CHECK_FPU_FEATURE(dc
, VIS1
);
4073 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4074 cpu_fpr
[DFPREG(rs2
)]);
4075 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4076 cpu_fpr
[DFPREG(rs1
) + 1],
4077 cpu_fpr
[DFPREG(rs2
) + 1]);
4079 case 0x065: /* VIS I fandnot2s */
4080 CHECK_FPU_FEATURE(dc
, VIS1
);
4081 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4083 case 0x066: /* VIS I fnot2 */
4084 CHECK_FPU_FEATURE(dc
, VIS1
);
4085 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
4086 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4087 cpu_fpr
[DFPREG(rs2
) + 1]);
4089 case 0x067: /* VIS I fnot2s */
4090 CHECK_FPU_FEATURE(dc
, VIS1
);
4091 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4093 case 0x068: /* VIS I fandnot1 */
4094 CHECK_FPU_FEATURE(dc
, VIS1
);
4095 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4096 cpu_fpr
[DFPREG(rs1
)]);
4097 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4098 cpu_fpr
[DFPREG(rs2
) + 1],
4099 cpu_fpr
[DFPREG(rs1
) + 1]);
4101 case 0x069: /* VIS I fandnot1s */
4102 CHECK_FPU_FEATURE(dc
, VIS1
);
4103 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4105 case 0x06a: /* VIS I fnot1 */
4106 CHECK_FPU_FEATURE(dc
, VIS1
);
4107 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4108 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4109 cpu_fpr
[DFPREG(rs1
) + 1]);
4111 case 0x06b: /* VIS I fnot1s */
4112 CHECK_FPU_FEATURE(dc
, VIS1
);
4113 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4115 case 0x06c: /* VIS I fxor */
4116 CHECK_FPU_FEATURE(dc
, VIS1
);
4117 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4118 cpu_fpr
[DFPREG(rs2
)]);
4119 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4120 cpu_fpr
[DFPREG(rs1
) + 1],
4121 cpu_fpr
[DFPREG(rs2
) + 1]);
4123 case 0x06d: /* VIS I fxors */
4124 CHECK_FPU_FEATURE(dc
, VIS1
);
4125 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4127 case 0x06e: /* VIS I fnand */
4128 CHECK_FPU_FEATURE(dc
, VIS1
);
4129 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4130 cpu_fpr
[DFPREG(rs2
)]);
4131 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4132 cpu_fpr
[DFPREG(rs2
) + 1]);
4134 case 0x06f: /* VIS I fnands */
4135 CHECK_FPU_FEATURE(dc
, VIS1
);
4136 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4138 case 0x070: /* VIS I fand */
4139 CHECK_FPU_FEATURE(dc
, VIS1
);
4140 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4141 cpu_fpr
[DFPREG(rs2
)]);
4142 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4143 cpu_fpr
[DFPREG(rs1
) + 1],
4144 cpu_fpr
[DFPREG(rs2
) + 1]);
4146 case 0x071: /* VIS I fands */
4147 CHECK_FPU_FEATURE(dc
, VIS1
);
4148 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4150 case 0x072: /* VIS I fxnor */
4151 CHECK_FPU_FEATURE(dc
, VIS1
);
4152 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4153 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4154 cpu_fpr
[DFPREG(rs1
)]);
4155 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4156 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4157 cpu_fpr
[DFPREG(rs1
) + 1]);
4159 case 0x073: /* VIS I fxnors */
4160 CHECK_FPU_FEATURE(dc
, VIS1
);
4161 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4162 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4164 case 0x074: /* VIS I fsrc1 */
4165 CHECK_FPU_FEATURE(dc
, VIS1
);
4166 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4167 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4168 cpu_fpr
[DFPREG(rs1
) + 1]);
4170 case 0x075: /* VIS I fsrc1s */
4171 CHECK_FPU_FEATURE(dc
, VIS1
);
4172 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4174 case 0x076: /* VIS I fornot2 */
4175 CHECK_FPU_FEATURE(dc
, VIS1
);
4176 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4177 cpu_fpr
[DFPREG(rs2
)]);
4178 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4179 cpu_fpr
[DFPREG(rs1
) + 1],
4180 cpu_fpr
[DFPREG(rs2
) + 1]);
4182 case 0x077: /* VIS I fornot2s */
4183 CHECK_FPU_FEATURE(dc
, VIS1
);
4184 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4186 case 0x078: /* VIS I fsrc2 */
4187 CHECK_FPU_FEATURE(dc
, VIS1
);
4188 gen_op_load_fpr_DT0(DFPREG(rs2
));
4189 gen_op_store_DT0_fpr(DFPREG(rd
));
4191 case 0x079: /* VIS I fsrc2s */
4192 CHECK_FPU_FEATURE(dc
, VIS1
);
4193 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4195 case 0x07a: /* VIS I fornot1 */
4196 CHECK_FPU_FEATURE(dc
, VIS1
);
4197 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4198 cpu_fpr
[DFPREG(rs1
)]);
4199 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4200 cpu_fpr
[DFPREG(rs2
) + 1],
4201 cpu_fpr
[DFPREG(rs1
) + 1]);
4203 case 0x07b: /* VIS I fornot1s */
4204 CHECK_FPU_FEATURE(dc
, VIS1
);
4205 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4207 case 0x07c: /* VIS I for */
4208 CHECK_FPU_FEATURE(dc
, VIS1
);
4209 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4210 cpu_fpr
[DFPREG(rs2
)]);
4211 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4212 cpu_fpr
[DFPREG(rs1
) + 1],
4213 cpu_fpr
[DFPREG(rs2
) + 1]);
4215 case 0x07d: /* VIS I fors */
4216 CHECK_FPU_FEATURE(dc
, VIS1
);
4217 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4219 case 0x07e: /* VIS I fone */
4220 CHECK_FPU_FEATURE(dc
, VIS1
);
4221 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4222 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4224 case 0x07f: /* VIS I fones */
4225 CHECK_FPU_FEATURE(dc
, VIS1
);
4226 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4228 case 0x080: /* VIS I shutdown */
4229 case 0x081: /* VIS II siam */
4238 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4239 #ifdef TARGET_SPARC64
4244 #ifdef TARGET_SPARC64
4245 } else if (xop
== 0x39) { /* V9 return */
4248 save_state(dc
, cpu_cond
);
4249 cpu_src1
= get_src1(insn
, cpu_src1
);
4250 if (IS_IMM
) { /* immediate */
4251 simm
= GET_FIELDs(insn
, 19, 31);
4252 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4253 } else { /* register */
4254 rs2
= GET_FIELD(insn
, 27, 31);
4256 gen_movl_reg_TN(rs2
, cpu_src2
);
4257 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4259 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4261 gen_helper_restore();
4262 gen_mov_pc_npc(dc
, cpu_cond
);
4263 r_const
= tcg_const_i32(3);
4264 gen_helper_check_align(cpu_dst
, r_const
);
4265 tcg_temp_free_i32(r_const
);
4266 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4267 dc
->npc
= DYNAMIC_PC
;
4271 cpu_src1
= get_src1(insn
, cpu_src1
);
4272 if (IS_IMM
) { /* immediate */
4273 simm
= GET_FIELDs(insn
, 19, 31);
4274 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4275 } else { /* register */
4276 rs2
= GET_FIELD(insn
, 27, 31);
4278 gen_movl_reg_TN(rs2
, cpu_src2
);
4279 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4281 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4284 case 0x38: /* jmpl */
4289 r_pc
= tcg_const_tl(dc
->pc
);
4290 gen_movl_TN_reg(rd
, r_pc
);
4291 tcg_temp_free(r_pc
);
4292 gen_mov_pc_npc(dc
, cpu_cond
);
4293 r_const
= tcg_const_i32(3);
4294 gen_helper_check_align(cpu_dst
, r_const
);
4295 tcg_temp_free_i32(r_const
);
4296 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4297 dc
->npc
= DYNAMIC_PC
;
4300 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4301 case 0x39: /* rett, V9 return */
4305 if (!supervisor(dc
))
4307 gen_mov_pc_npc(dc
, cpu_cond
);
4308 r_const
= tcg_const_i32(3);
4309 gen_helper_check_align(cpu_dst
, r_const
);
4310 tcg_temp_free_i32(r_const
);
4311 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4312 dc
->npc
= DYNAMIC_PC
;
4317 case 0x3b: /* flush */
4318 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4320 gen_helper_flush(cpu_dst
);
4322 case 0x3c: /* save */
4323 save_state(dc
, cpu_cond
);
4325 gen_movl_TN_reg(rd
, cpu_dst
);
4327 case 0x3d: /* restore */
4328 save_state(dc
, cpu_cond
);
4329 gen_helper_restore();
4330 gen_movl_TN_reg(rd
, cpu_dst
);
4332 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4333 case 0x3e: /* V9 done/retry */
4337 if (!supervisor(dc
))
4339 dc
->npc
= DYNAMIC_PC
;
4340 dc
->pc
= DYNAMIC_PC
;
4344 if (!supervisor(dc
))
4346 dc
->npc
= DYNAMIC_PC
;
4347 dc
->pc
= DYNAMIC_PC
;
4363 case 3: /* load/store instructions */
4365 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4367 cpu_src1
= get_src1(insn
, cpu_src1
);
4368 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4369 rs2
= GET_FIELD(insn
, 27, 31);
4370 gen_movl_reg_TN(rs2
, cpu_src2
);
4371 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4372 } else if (IS_IMM
) { /* immediate */
4373 simm
= GET_FIELDs(insn
, 19, 31);
4374 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4375 } else { /* register */
4376 rs2
= GET_FIELD(insn
, 27, 31);
4378 gen_movl_reg_TN(rs2
, cpu_src2
);
4379 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4381 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4383 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4384 (xop
> 0x17 && xop
<= 0x1d ) ||
4385 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4387 case 0x0: /* ld, V9 lduw, load unsigned word */
4388 gen_address_mask(dc
, cpu_addr
);
4389 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4391 case 0x1: /* ldub, load unsigned byte */
4392 gen_address_mask(dc
, cpu_addr
);
4393 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4395 case 0x2: /* lduh, load unsigned halfword */
4396 gen_address_mask(dc
, cpu_addr
);
4397 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4399 case 0x3: /* ldd, load double word */
4405 save_state(dc
, cpu_cond
);
4406 r_const
= tcg_const_i32(7);
4407 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4408 tcg_temp_free_i32(r_const
);
4409 gen_address_mask(dc
, cpu_addr
);
4410 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4411 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4412 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4413 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4414 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4415 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4416 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4419 case 0x9: /* ldsb, load signed byte */
4420 gen_address_mask(dc
, cpu_addr
);
4421 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4423 case 0xa: /* ldsh, load signed halfword */
4424 gen_address_mask(dc
, cpu_addr
);
4425 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4427 case 0xd: /* ldstub -- XXX: should be atomically */
4431 gen_address_mask(dc
, cpu_addr
);
4432 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4433 r_const
= tcg_const_tl(0xff);
4434 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4435 tcg_temp_free(r_const
);
4438 case 0x0f: /* swap, swap register with memory. Also
4440 CHECK_IU_FEATURE(dc
, SWAP
);
4441 gen_movl_reg_TN(rd
, cpu_val
);
4442 gen_address_mask(dc
, cpu_addr
);
4443 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4444 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4445 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4447 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4448 case 0x10: /* lda, V9 lduwa, load word alternate */
4449 #ifndef TARGET_SPARC64
4452 if (!supervisor(dc
))
4455 save_state(dc
, cpu_cond
);
4456 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4458 case 0x11: /* lduba, load unsigned byte alternate */
4459 #ifndef TARGET_SPARC64
4462 if (!supervisor(dc
))
4465 save_state(dc
, cpu_cond
);
4466 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4468 case 0x12: /* lduha, load unsigned halfword alternate */
4469 #ifndef TARGET_SPARC64
4472 if (!supervisor(dc
))
4475 save_state(dc
, cpu_cond
);
4476 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4478 case 0x13: /* ldda, load double word alternate */
4479 #ifndef TARGET_SPARC64
4482 if (!supervisor(dc
))
4487 save_state(dc
, cpu_cond
);
4488 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4490 case 0x19: /* ldsba, load signed byte alternate */
4491 #ifndef TARGET_SPARC64
4494 if (!supervisor(dc
))
4497 save_state(dc
, cpu_cond
);
4498 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4500 case 0x1a: /* ldsha, load signed halfword alternate */
4501 #ifndef TARGET_SPARC64
4504 if (!supervisor(dc
))
4507 save_state(dc
, cpu_cond
);
4508 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4510 case 0x1d: /* ldstuba -- XXX: should be atomically */
4511 #ifndef TARGET_SPARC64
4514 if (!supervisor(dc
))
4517 save_state(dc
, cpu_cond
);
4518 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4520 case 0x1f: /* swapa, swap reg with alt. memory. Also
4522 CHECK_IU_FEATURE(dc
, SWAP
);
4523 #ifndef TARGET_SPARC64
4526 if (!supervisor(dc
))
4529 save_state(dc
, cpu_cond
);
4530 gen_movl_reg_TN(rd
, cpu_val
);
4531 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4534 #ifndef TARGET_SPARC64
4535 case 0x30: /* ldc */
4536 case 0x31: /* ldcsr */
4537 case 0x33: /* lddc */
4541 #ifdef TARGET_SPARC64
4542 case 0x08: /* V9 ldsw */
4543 gen_address_mask(dc
, cpu_addr
);
4544 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4546 case 0x0b: /* V9 ldx */
4547 gen_address_mask(dc
, cpu_addr
);
4548 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4550 case 0x18: /* V9 ldswa */
4551 save_state(dc
, cpu_cond
);
4552 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4554 case 0x1b: /* V9 ldxa */
4555 save_state(dc
, cpu_cond
);
4556 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4558 case 0x2d: /* V9 prefetch, no effect */
4560 case 0x30: /* V9 ldfa */
4561 save_state(dc
, cpu_cond
);
4562 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4564 case 0x33: /* V9 lddfa */
4565 save_state(dc
, cpu_cond
);
4566 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4568 case 0x3d: /* V9 prefetcha, no effect */
4570 case 0x32: /* V9 ldqfa */
4571 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4572 save_state(dc
, cpu_cond
);
4573 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4579 gen_movl_TN_reg(rd
, cpu_val
);
4580 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4583 } else if (xop
>= 0x20 && xop
< 0x24) {
4584 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4586 save_state(dc
, cpu_cond
);
4588 case 0x20: /* ldf, load fpreg */
4589 gen_address_mask(dc
, cpu_addr
);
4590 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4591 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4593 case 0x21: /* ldfsr, V9 ldxfsr */
4594 #ifdef TARGET_SPARC64
4595 gen_address_mask(dc
, cpu_addr
);
4597 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4598 gen_helper_ldxfsr(cpu_tmp64
);
4602 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4603 gen_helper_ldfsr(cpu_tmp32
);
4607 case 0x22: /* ldqf, load quad fpreg */
4611 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4612 r_const
= tcg_const_i32(dc
->mem_idx
);
4613 gen_helper_ldqf(cpu_addr
, r_const
);
4614 tcg_temp_free_i32(r_const
);
4615 gen_op_store_QT0_fpr(QFPREG(rd
));
4618 case 0x23: /* lddf, load double fpreg */
4622 r_const
= tcg_const_i32(dc
->mem_idx
);
4623 gen_helper_lddf(cpu_addr
, r_const
);
4624 tcg_temp_free_i32(r_const
);
4625 gen_op_store_DT0_fpr(DFPREG(rd
));
4631 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4632 xop
== 0xe || xop
== 0x1e) {
4633 gen_movl_reg_TN(rd
, cpu_val
);
4635 case 0x4: /* st, store word */
4636 gen_address_mask(dc
, cpu_addr
);
4637 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4639 case 0x5: /* stb, store byte */
4640 gen_address_mask(dc
, cpu_addr
);
4641 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4643 case 0x6: /* sth, store halfword */
4644 gen_address_mask(dc
, cpu_addr
);
4645 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4647 case 0x7: /* std, store double word */
4653 save_state(dc
, cpu_cond
);
4654 gen_address_mask(dc
, cpu_addr
);
4655 r_const
= tcg_const_i32(7);
4656 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4657 tcg_temp_free_i32(r_const
);
4658 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4659 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4660 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4663 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4664 case 0x14: /* sta, V9 stwa, store word alternate */
4665 #ifndef TARGET_SPARC64
4668 if (!supervisor(dc
))
4671 save_state(dc
, cpu_cond
);
4672 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4674 case 0x15: /* stba, store byte alternate */
4675 #ifndef TARGET_SPARC64
4678 if (!supervisor(dc
))
4681 save_state(dc
, cpu_cond
);
4682 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4684 case 0x16: /* stha, store halfword alternate */
4685 #ifndef TARGET_SPARC64
4688 if (!supervisor(dc
))
4691 save_state(dc
, cpu_cond
);
4692 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4694 case 0x17: /* stda, store double word alternate */
4695 #ifndef TARGET_SPARC64
4698 if (!supervisor(dc
))
4704 save_state(dc
, cpu_cond
);
4705 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4709 #ifdef TARGET_SPARC64
4710 case 0x0e: /* V9 stx */
4711 gen_address_mask(dc
, cpu_addr
);
4712 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4714 case 0x1e: /* V9 stxa */
4715 save_state(dc
, cpu_cond
);
4716 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4722 } else if (xop
> 0x23 && xop
< 0x28) {
4723 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4725 save_state(dc
, cpu_cond
);
4727 case 0x24: /* stf, store fpreg */
4728 gen_address_mask(dc
, cpu_addr
);
4729 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4730 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4732 case 0x25: /* stfsr, V9 stxfsr */
4733 #ifdef TARGET_SPARC64
4734 gen_address_mask(dc
, cpu_addr
);
4735 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4737 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4739 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4741 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4742 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4746 #ifdef TARGET_SPARC64
4747 /* V9 stqf, store quad fpreg */
4751 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4752 gen_op_load_fpr_QT0(QFPREG(rd
));
4753 r_const
= tcg_const_i32(dc
->mem_idx
);
4754 gen_helper_stqf(cpu_addr
, r_const
);
4755 tcg_temp_free_i32(r_const
);
4758 #else /* !TARGET_SPARC64 */
4759 /* stdfq, store floating point queue */
4760 #if defined(CONFIG_USER_ONLY)
4763 if (!supervisor(dc
))
4765 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4770 case 0x27: /* stdf, store double fpreg */
4774 gen_op_load_fpr_DT0(DFPREG(rd
));
4775 r_const
= tcg_const_i32(dc
->mem_idx
);
4776 gen_helper_stdf(cpu_addr
, r_const
);
4777 tcg_temp_free_i32(r_const
);
4783 } else if (xop
> 0x33 && xop
< 0x3f) {
4784 save_state(dc
, cpu_cond
);
4786 #ifdef TARGET_SPARC64
4787 case 0x34: /* V9 stfa */
4788 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4790 case 0x36: /* V9 stqfa */
4794 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4795 r_const
= tcg_const_i32(7);
4796 gen_helper_check_align(cpu_addr
, r_const
);
4797 tcg_temp_free_i32(r_const
);
4798 gen_op_load_fpr_QT0(QFPREG(rd
));
4799 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4802 case 0x37: /* V9 stdfa */
4803 gen_op_load_fpr_DT0(DFPREG(rd
));
4804 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4806 case 0x3c: /* V9 casa */
4807 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4808 gen_movl_TN_reg(rd
, cpu_val
);
4810 case 0x3e: /* V9 casxa */
4811 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4812 gen_movl_TN_reg(rd
, cpu_val
);
4815 case 0x34: /* stc */
4816 case 0x35: /* stcsr */
4817 case 0x36: /* stdcq */
4818 case 0x37: /* stdc */
4829 /* default case for non jump instructions */
4830 if (dc
->npc
== DYNAMIC_PC
) {
4831 dc
->pc
= DYNAMIC_PC
;
4833 } else if (dc
->npc
== JUMP_PC
) {
4834 /* we can do a static jump */
4835 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4839 dc
->npc
= dc
->npc
+ 4;
4847 save_state(dc
, cpu_cond
);
4848 r_const
= tcg_const_i32(TT_ILL_INSN
);
4849 gen_helper_raise_exception(r_const
);
4850 tcg_temp_free_i32(r_const
);
4858 save_state(dc
, cpu_cond
);
4859 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4860 gen_helper_raise_exception(r_const
);
4861 tcg_temp_free_i32(r_const
);
4865 #if !defined(CONFIG_USER_ONLY)
4870 save_state(dc
, cpu_cond
);
4871 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4872 gen_helper_raise_exception(r_const
);
4873 tcg_temp_free_i32(r_const
);
4879 save_state(dc
, cpu_cond
);
4880 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4883 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4885 save_state(dc
, cpu_cond
);
4886 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4890 #ifndef TARGET_SPARC64
4895 save_state(dc
, cpu_cond
);
4896 r_const
= tcg_const_i32(TT_NCP_INSN
);
4897 gen_helper_raise_exception(r_const
);
4898 tcg_temp_free(r_const
);
4905 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4906 int spc
, CPUSPARCState
*env
)
4908 target_ulong pc_start
, last_pc
;
4909 uint16_t *gen_opc_end
;
4910 DisasContext dc1
, *dc
= &dc1
;
4916 memset(dc
, 0, sizeof(DisasContext
));
4921 dc
->npc
= (target_ulong
) tb
->cs_base
;
4922 dc
->mem_idx
= cpu_mmu_index(env
);
4924 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4925 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4927 dc
->fpu_enabled
= 0;
4928 #ifdef TARGET_SPARC64
4929 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4931 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4933 cpu_tmp0
= tcg_temp_new();
4934 cpu_tmp32
= tcg_temp_new_i32();
4935 cpu_tmp64
= tcg_temp_new_i64();
4937 cpu_dst
= tcg_temp_local_new();
4940 cpu_val
= tcg_temp_local_new();
4941 cpu_addr
= tcg_temp_local_new();
4944 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4946 max_insns
= CF_COUNT_MASK
;
4949 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4950 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4951 if (bp
->pc
== dc
->pc
) {
4952 if (dc
->pc
!= pc_start
)
4953 save_state(dc
, cpu_cond
);
4962 qemu_log("Search PC...\n");
4963 j
= gen_opc_ptr
- gen_opc_buf
;
4967 gen_opc_instr_start
[lj
++] = 0;
4968 gen_opc_pc
[lj
] = dc
->pc
;
4969 gen_opc_npc
[lj
] = dc
->npc
;
4970 gen_opc_instr_start
[lj
] = 1;
4971 gen_opc_icount
[lj
] = num_insns
;
4974 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4977 disas_sparc_insn(dc
);
4982 /* if the next PC is different, we abort now */
4983 if (dc
->pc
!= (last_pc
+ 4))
4985 /* if we reach a page boundary, we stop generation so that the
4986 PC of a TT_TFAULT exception is always in the right page */
4987 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4989 /* if single step mode, we generate only one instruction and
4990 generate an exception */
4991 if (env
->singlestep_enabled
|| singlestep
) {
4992 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4996 } while ((gen_opc_ptr
< gen_opc_end
) &&
4997 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4998 num_insns
< max_insns
);
5001 tcg_temp_free(cpu_addr
);
5002 tcg_temp_free(cpu_val
);
5003 tcg_temp_free(cpu_dst
);
5004 tcg_temp_free_i64(cpu_tmp64
);
5005 tcg_temp_free_i32(cpu_tmp32
);
5006 tcg_temp_free(cpu_tmp0
);
5007 if (tb
->cflags
& CF_LAST_IO
)
5010 if (dc
->pc
!= DYNAMIC_PC
&&
5011 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5012 /* static PC and NPC: we can use direct chaining */
5013 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5015 if (dc
->pc
!= DYNAMIC_PC
)
5016 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5017 save_npc(dc
, cpu_cond
);
5021 gen_icount_end(tb
, num_insns
);
5022 *gen_opc_ptr
= INDEX_op_end
;
5024 j
= gen_opc_ptr
- gen_opc_buf
;
5027 gen_opc_instr_start
[lj
++] = 0;
5031 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5032 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5034 tb
->size
= last_pc
+ 4 - pc_start
;
5035 tb
->icount
= num_insns
;
5038 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5039 qemu_log("--------------\n");
5040 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5041 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5047 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5049 gen_intermediate_code_internal(tb
, 0, env
);
5052 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5054 gen_intermediate_code_internal(tb
, 1, env
);
5057 void gen_intermediate_code_init(CPUSPARCState
*env
)
5061 static const char * const gregnames
[8] = {
5062 NULL
, // g0 not used
5071 static const char * const fregnames
[64] = {
5072 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5073 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5074 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5075 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5076 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5077 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5078 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5079 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5082 /* init various static tables */
5086 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5087 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5088 offsetof(CPUState
, regwptr
),
5090 #ifdef TARGET_SPARC64
5091 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5093 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5095 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5097 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5099 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5100 offsetof(CPUState
, tick_cmpr
),
5102 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5103 offsetof(CPUState
, stick_cmpr
),
5105 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5106 offsetof(CPUState
, hstick_cmpr
),
5108 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5110 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5112 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5114 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5115 offsetof(CPUState
, ssr
), "ssr");
5116 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5117 offsetof(CPUState
, version
), "ver");
5118 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5119 offsetof(CPUState
, softint
),
5122 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5125 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5127 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5129 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5130 offsetof(CPUState
, cc_src2
),
5132 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5134 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5136 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5138 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5140 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5142 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5143 #ifndef CONFIG_USER_ONLY
5144 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5147 for (i
= 1; i
< 8; i
++)
5148 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5149 offsetof(CPUState
, gregs
[i
]),
5151 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5152 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5153 offsetof(CPUState
, fpr
[i
]),
5156 /* register helpers */
5158 #define GEN_HELPER 2
5163 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5164 unsigned long searched_pc
, int pc_pos
, void *puc
)
5167 env
->pc
= gen_opc_pc
[pc_pos
];
5168 npc
= gen_opc_npc
[pc_pos
];
5170 /* dynamic NPC: already stored */
5171 } else if (npc
== 2) {
5172 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5173 /* jump PC: use T2 and the jump targets of the translation */
5175 env
->npc
= gen_opc_jump_pc
[0];
5177 env
->npc
= gen_opc_jump_pc
[1];