4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 #include "gen-icount.h"
71 typedef struct DisasContext
{
72 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit
;
79 struct TranslationBlock
*tb
;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
100 #define DFPREG(r) (r & 0x1e)
101 #define QFPREG(r) (r & 0x1c)
104 #define UA2005_HTRAP_MASK 0xff
105 #define V8_TRAP_MASK 0x7f
107 static int sign_extend(int x
, int len
)
110 return (x
<< len
) >> len
;
113 #define IS_IMM (insn & (1<<13))
115 /* floating point registers moves */
116 static void gen_op_load_fpr_DT0(unsigned int src
)
118 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
119 offsetof(CPU_DoubleU
, l
.upper
));
120 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
121 offsetof(CPU_DoubleU
, l
.lower
));
124 static void gen_op_load_fpr_DT1(unsigned int src
)
126 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
127 offsetof(CPU_DoubleU
, l
.upper
));
128 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
129 offsetof(CPU_DoubleU
, l
.lower
));
132 static void gen_op_store_DT0_fpr(unsigned int dst
)
134 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
135 offsetof(CPU_DoubleU
, l
.upper
));
136 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
137 offsetof(CPU_DoubleU
, l
.lower
));
140 static void gen_op_load_fpr_QT0(unsigned int src
)
142 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
143 offsetof(CPU_QuadU
, l
.upmost
));
144 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.upper
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.lower
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.lowest
));
152 static void gen_op_load_fpr_QT1(unsigned int src
)
154 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
155 offsetof(CPU_QuadU
, l
.upmost
));
156 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.upper
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.lower
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.lowest
));
164 static void gen_op_store_QT0_fpr(unsigned int dst
)
166 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
167 offsetof(CPU_QuadU
, l
.upmost
));
168 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.upper
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.lower
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.lowest
));
177 #ifdef CONFIG_USER_ONLY
178 #define supervisor(dc) 0
179 #ifdef TARGET_SPARC64
180 #define hypervisor(dc) 0
183 #define supervisor(dc) (dc->mem_idx >= 1)
184 #ifdef TARGET_SPARC64
185 #define hypervisor(dc) (dc->mem_idx == 2)
190 #ifdef TARGET_SPARC64
192 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
194 #define AM_CHECK(dc) (1)
198 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
200 #ifdef TARGET_SPARC64
202 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
206 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
209 tcg_gen_movi_tl(tn
, 0);
211 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
213 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
217 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
222 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
224 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
228 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
229 target_ulong pc
, target_ulong npc
)
231 TranslationBlock
*tb
;
234 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
235 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
236 /* jump to same page: we can use a direct jump */
237 tcg_gen_goto_tb(tb_num
);
238 tcg_gen_movi_tl(cpu_pc
, pc
);
239 tcg_gen_movi_tl(cpu_npc
, npc
);
240 tcg_gen_exit_tb((long)tb
+ tb_num
);
242 /* jump to another page: currently not optimized */
243 tcg_gen_movi_tl(cpu_pc
, pc
);
244 tcg_gen_movi_tl(cpu_npc
, npc
);
250 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
252 tcg_gen_extu_i32_tl(reg
, src
);
253 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
254 tcg_gen_andi_tl(reg
, reg
, 0x1);
257 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
259 tcg_gen_extu_i32_tl(reg
, src
);
260 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
261 tcg_gen_andi_tl(reg
, reg
, 0x1);
264 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
266 tcg_gen_extu_i32_tl(reg
, src
);
267 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
268 tcg_gen_andi_tl(reg
, reg
, 0x1);
271 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
273 tcg_gen_extu_i32_tl(reg
, src
);
274 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
275 tcg_gen_andi_tl(reg
, reg
, 0x1);
278 static inline void gen_cc_clear_icc(void)
280 tcg_gen_movi_i32(cpu_psr
, 0);
283 #ifdef TARGET_SPARC64
284 static inline void gen_cc_clear_xcc(void)
286 tcg_gen_movi_i32(cpu_xcc
, 0);
292 env->psr |= PSR_ZERO;
293 if ((int32_t) T0 < 0)
296 static inline void gen_cc_NZ_icc(TCGv dst
)
301 l1
= gen_new_label();
302 l2
= gen_new_label();
303 r_temp
= tcg_temp_new();
304 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
305 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
306 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
308 tcg_gen_ext32s_tl(r_temp
, dst
);
309 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
310 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
312 tcg_temp_free(r_temp
);
315 #ifdef TARGET_SPARC64
316 static inline void gen_cc_NZ_xcc(TCGv dst
)
320 l1
= gen_new_label();
321 l2
= gen_new_label();
322 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
323 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
325 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
326 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
333 env->psr |= PSR_CARRY;
335 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
337 TCGv r_temp1
, r_temp2
;
340 l1
= gen_new_label();
341 r_temp1
= tcg_temp_new();
342 r_temp2
= tcg_temp_new();
343 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
344 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
345 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
346 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
348 tcg_temp_free(r_temp1
);
349 tcg_temp_free(r_temp2
);
352 #ifdef TARGET_SPARC64
353 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
357 l1
= gen_new_label();
358 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
359 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
365 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
368 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
372 r_temp
= tcg_temp_new();
373 tcg_gen_xor_tl(r_temp
, src1
, src2
);
374 tcg_gen_not_tl(r_temp
, r_temp
);
375 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
376 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
377 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
378 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
379 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
380 tcg_temp_free(r_temp
);
381 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
384 #ifdef TARGET_SPARC64
385 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
389 r_temp
= tcg_temp_new();
390 tcg_gen_xor_tl(r_temp
, src1
, src2
);
391 tcg_gen_not_tl(r_temp
, r_temp
);
392 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
393 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
394 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
395 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
396 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
397 tcg_temp_free(r_temp
);
398 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
402 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
408 l1
= gen_new_label();
410 r_temp
= tcg_temp_new();
411 tcg_gen_xor_tl(r_temp
, src1
, src2
);
412 tcg_gen_not_tl(r_temp
, r_temp
);
413 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
414 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
415 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
416 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
417 r_const
= tcg_const_i32(TT_TOVF
);
418 gen_helper_raise_exception(r_const
);
419 tcg_temp_free_i32(r_const
);
421 tcg_temp_free(r_temp
);
424 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
428 l1
= gen_new_label();
429 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
430 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
431 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
432 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
436 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
441 l1
= gen_new_label();
442 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
443 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
444 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
445 r_const
= tcg_const_i32(TT_TOVF
);
446 gen_helper_raise_exception(r_const
);
447 tcg_temp_free_i32(r_const
);
451 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
453 tcg_gen_mov_tl(cpu_cc_src
, src1
);
454 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
455 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
457 gen_cc_NZ_icc(cpu_cc_dst
);
458 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
459 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
460 #ifdef TARGET_SPARC64
462 gen_cc_NZ_xcc(cpu_cc_dst
);
463 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
464 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
466 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
469 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
471 tcg_gen_mov_tl(cpu_cc_src
, src1
);
472 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
473 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
474 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
476 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
477 #ifdef TARGET_SPARC64
479 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
481 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
482 gen_cc_NZ_icc(cpu_cc_dst
);
483 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
484 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
485 #ifdef TARGET_SPARC64
486 gen_cc_NZ_xcc(cpu_cc_dst
);
487 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
488 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
490 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
493 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
495 tcg_gen_mov_tl(cpu_cc_src
, src1
);
496 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
497 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
499 gen_cc_NZ_icc(cpu_cc_dst
);
500 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
501 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
502 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
503 #ifdef TARGET_SPARC64
505 gen_cc_NZ_xcc(cpu_cc_dst
);
506 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
507 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
509 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
512 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
514 tcg_gen_mov_tl(cpu_cc_src
, src1
);
515 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
516 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
517 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
518 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
520 gen_cc_NZ_icc(cpu_cc_dst
);
521 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
522 #ifdef TARGET_SPARC64
524 gen_cc_NZ_xcc(cpu_cc_dst
);
525 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
526 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
528 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
533 env->psr |= PSR_CARRY;
535 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
537 TCGv r_temp1
, r_temp2
;
540 l1
= gen_new_label();
541 r_temp1
= tcg_temp_new();
542 r_temp2
= tcg_temp_new();
543 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
544 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
545 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
546 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
548 tcg_temp_free(r_temp1
);
549 tcg_temp_free(r_temp2
);
552 #ifdef TARGET_SPARC64
553 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
557 l1
= gen_new_label();
558 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
559 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
565 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
568 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
572 r_temp
= tcg_temp_new();
573 tcg_gen_xor_tl(r_temp
, src1
, src2
);
574 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
575 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
576 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
577 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
578 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
579 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
580 tcg_temp_free(r_temp
);
583 #ifdef TARGET_SPARC64
584 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
588 r_temp
= tcg_temp_new();
589 tcg_gen_xor_tl(r_temp
, src1
, src2
);
590 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
591 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
592 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
593 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
594 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
595 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
596 tcg_temp_free(r_temp
);
600 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
606 l1
= gen_new_label();
608 r_temp
= tcg_temp_new();
609 tcg_gen_xor_tl(r_temp
, src1
, src2
);
610 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
611 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
612 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
613 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
614 r_const
= tcg_const_i32(TT_TOVF
);
615 gen_helper_raise_exception(r_const
);
616 tcg_temp_free_i32(r_const
);
618 tcg_temp_free(r_temp
);
621 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
623 tcg_gen_mov_tl(cpu_cc_src
, src1
);
624 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
625 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
627 gen_cc_NZ_icc(cpu_cc_dst
);
628 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
629 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
630 #ifdef TARGET_SPARC64
632 gen_cc_NZ_xcc(cpu_cc_dst
);
633 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
634 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
636 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
639 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
641 tcg_gen_mov_tl(cpu_cc_src
, src1
);
642 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
643 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
644 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
646 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
647 #ifdef TARGET_SPARC64
649 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
651 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
652 gen_cc_NZ_icc(cpu_cc_dst
);
653 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
654 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
655 #ifdef TARGET_SPARC64
656 gen_cc_NZ_xcc(cpu_cc_dst
);
657 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
658 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
660 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
663 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
665 tcg_gen_mov_tl(cpu_cc_src
, src1
);
666 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
667 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
669 gen_cc_NZ_icc(cpu_cc_dst
);
670 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
671 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
672 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
673 #ifdef TARGET_SPARC64
675 gen_cc_NZ_xcc(cpu_cc_dst
);
676 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
677 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
679 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
682 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
684 tcg_gen_mov_tl(cpu_cc_src
, src1
);
685 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
686 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
687 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
688 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
690 gen_cc_NZ_icc(cpu_cc_dst
);
691 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
692 #ifdef TARGET_SPARC64
694 gen_cc_NZ_xcc(cpu_cc_dst
);
695 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
696 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
698 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
701 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
706 l1
= gen_new_label();
707 r_temp
= tcg_temp_new();
713 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
714 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
715 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
716 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
717 tcg_gen_movi_tl(cpu_cc_src2
, 0);
721 // env->y = (b2 << 31) | (env->y >> 1);
722 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
723 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
724 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
725 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
726 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
727 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
730 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
731 gen_mov_reg_V(r_temp
, cpu_psr
);
732 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
733 tcg_temp_free(r_temp
);
735 // T0 = (b1 << 31) | (T0 >> 1);
737 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
738 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
739 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
741 /* do addition and update flags */
742 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
745 gen_cc_NZ_icc(cpu_cc_dst
);
746 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
747 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
748 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
751 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
753 TCGv_i64 r_temp
, r_temp2
;
755 r_temp
= tcg_temp_new_i64();
756 r_temp2
= tcg_temp_new_i64();
758 tcg_gen_extu_tl_i64(r_temp
, src2
);
759 tcg_gen_extu_tl_i64(r_temp2
, src1
);
760 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
762 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
763 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
764 tcg_temp_free_i64(r_temp
);
765 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
766 #ifdef TARGET_SPARC64
767 tcg_gen_mov_i64(dst
, r_temp2
);
769 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
771 tcg_temp_free_i64(r_temp2
);
774 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
776 TCGv_i64 r_temp
, r_temp2
;
778 r_temp
= tcg_temp_new_i64();
779 r_temp2
= tcg_temp_new_i64();
781 tcg_gen_ext_tl_i64(r_temp
, src2
);
782 tcg_gen_ext_tl_i64(r_temp2
, src1
);
783 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
785 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
786 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
787 tcg_temp_free_i64(r_temp
);
788 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
789 #ifdef TARGET_SPARC64
790 tcg_gen_mov_i64(dst
, r_temp2
);
792 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
794 tcg_temp_free_i64(r_temp2
);
797 #ifdef TARGET_SPARC64
798 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
803 l1
= gen_new_label();
804 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
805 r_const
= tcg_const_i32(TT_DIV_ZERO
);
806 gen_helper_raise_exception(r_const
);
807 tcg_temp_free_i32(r_const
);
811 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
815 l1
= gen_new_label();
816 l2
= gen_new_label();
817 tcg_gen_mov_tl(cpu_cc_src
, src1
);
818 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
819 gen_trap_ifdivzero_tl(cpu_cc_src2
);
820 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
821 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
822 tcg_gen_movi_i64(dst
, INT64_MIN
);
825 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
830 static inline void gen_op_div_cc(TCGv dst
)
834 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
836 gen_cc_NZ_icc(cpu_cc_dst
);
837 l1
= gen_new_label();
838 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
839 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
843 static inline void gen_op_logic_cc(TCGv dst
)
845 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
848 gen_cc_NZ_icc(cpu_cc_dst
);
849 #ifdef TARGET_SPARC64
851 gen_cc_NZ_xcc(cpu_cc_dst
);
856 static inline void gen_op_eval_ba(TCGv dst
)
858 tcg_gen_movi_tl(dst
, 1);
862 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
864 gen_mov_reg_Z(dst
, src
);
868 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
870 gen_mov_reg_N(cpu_tmp0
, src
);
871 gen_mov_reg_V(dst
, src
);
872 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
873 gen_mov_reg_Z(cpu_tmp0
, src
);
874 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
878 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
880 gen_mov_reg_V(cpu_tmp0
, src
);
881 gen_mov_reg_N(dst
, src
);
882 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
886 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
888 gen_mov_reg_Z(cpu_tmp0
, src
);
889 gen_mov_reg_C(dst
, src
);
890 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
894 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
896 gen_mov_reg_C(dst
, src
);
900 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
902 gen_mov_reg_V(dst
, src
);
906 static inline void gen_op_eval_bn(TCGv dst
)
908 tcg_gen_movi_tl(dst
, 0);
912 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
914 gen_mov_reg_N(dst
, src
);
918 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
920 gen_mov_reg_Z(dst
, src
);
921 tcg_gen_xori_tl(dst
, dst
, 0x1);
925 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
927 gen_mov_reg_N(cpu_tmp0
, src
);
928 gen_mov_reg_V(dst
, src
);
929 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
930 gen_mov_reg_Z(cpu_tmp0
, src
);
931 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
932 tcg_gen_xori_tl(dst
, dst
, 0x1);
936 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
938 gen_mov_reg_V(cpu_tmp0
, src
);
939 gen_mov_reg_N(dst
, src
);
940 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
941 tcg_gen_xori_tl(dst
, dst
, 0x1);
945 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
947 gen_mov_reg_Z(cpu_tmp0
, src
);
948 gen_mov_reg_C(dst
, src
);
949 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
950 tcg_gen_xori_tl(dst
, dst
, 0x1);
954 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
956 gen_mov_reg_C(dst
, src
);
957 tcg_gen_xori_tl(dst
, dst
, 0x1);
961 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
963 gen_mov_reg_N(dst
, src
);
964 tcg_gen_xori_tl(dst
, dst
, 0x1);
968 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
970 gen_mov_reg_V(dst
, src
);
971 tcg_gen_xori_tl(dst
, dst
, 0x1);
975 FPSR bit field FCC1 | FCC0:
981 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
982 unsigned int fcc_offset
)
984 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
985 tcg_gen_andi_tl(reg
, reg
, 0x1);
988 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
989 unsigned int fcc_offset
)
991 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
992 tcg_gen_andi_tl(reg
, reg
, 0x1);
996 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
997 unsigned int fcc_offset
)
999 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1000 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1001 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1004 // 1 or 2: FCC0 ^ FCC1
1005 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
1006 unsigned int fcc_offset
)
1008 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1009 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1010 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1014 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1015 unsigned int fcc_offset
)
1017 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1021 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1022 unsigned int fcc_offset
)
1024 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1025 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1026 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1027 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1031 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1032 unsigned int fcc_offset
)
1034 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1038 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1039 unsigned int fcc_offset
)
1041 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1042 tcg_gen_xori_tl(dst
, dst
, 0x1);
1043 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1044 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1048 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1049 unsigned int fcc_offset
)
1051 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1052 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1053 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1056 // 0: !(FCC0 | FCC1)
1057 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1058 unsigned int fcc_offset
)
1060 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1061 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1062 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1063 tcg_gen_xori_tl(dst
, dst
, 0x1);
1066 // 0 or 3: !(FCC0 ^ FCC1)
1067 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1068 unsigned int fcc_offset
)
1070 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1071 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1072 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1073 tcg_gen_xori_tl(dst
, dst
, 0x1);
1077 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1078 unsigned int fcc_offset
)
1080 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1081 tcg_gen_xori_tl(dst
, dst
, 0x1);
1084 // !1: !(FCC0 & !FCC1)
1085 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1086 unsigned int fcc_offset
)
1088 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1089 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1090 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1091 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1092 tcg_gen_xori_tl(dst
, dst
, 0x1);
1096 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1097 unsigned int fcc_offset
)
1099 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1100 tcg_gen_xori_tl(dst
, dst
, 0x1);
1103 // !2: !(!FCC0 & FCC1)
1104 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1105 unsigned int fcc_offset
)
1107 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1108 tcg_gen_xori_tl(dst
, dst
, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1110 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1111 tcg_gen_xori_tl(dst
, dst
, 0x1);
1114 // !3: !(FCC0 & FCC1)
1115 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1116 unsigned int fcc_offset
)
1118 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1119 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1120 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1121 tcg_gen_xori_tl(dst
, dst
, 0x1);
1124 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1125 target_ulong pc2
, TCGv r_cond
)
1129 l1
= gen_new_label();
1131 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1133 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1136 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1139 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1140 target_ulong pc2
, TCGv r_cond
)
1144 l1
= gen_new_label();
1146 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1148 gen_goto_tb(dc
, 0, pc2
, pc1
);
1151 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1154 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1159 l1
= gen_new_label();
1160 l2
= gen_new_label();
1162 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1164 tcg_gen_movi_tl(cpu_npc
, npc1
);
1168 tcg_gen_movi_tl(cpu_npc
, npc2
);
1172 /* call this function before using the condition register as it may
1173 have been set for a jump */
1174 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1176 if (dc
->npc
== JUMP_PC
) {
1177 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1178 dc
->npc
= DYNAMIC_PC
;
1182 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1184 if (dc
->npc
== JUMP_PC
) {
1185 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1186 dc
->npc
= DYNAMIC_PC
;
1187 } else if (dc
->npc
!= DYNAMIC_PC
) {
1188 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1192 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1194 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1198 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1200 if (dc
->npc
== JUMP_PC
) {
1201 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1202 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1203 dc
->pc
= DYNAMIC_PC
;
1204 } else if (dc
->npc
== DYNAMIC_PC
) {
1205 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1206 dc
->pc
= DYNAMIC_PC
;
1212 static inline void gen_op_next_insn(void)
1214 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1215 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1218 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1222 #ifdef TARGET_SPARC64
1232 gen_op_eval_bn(r_dst
);
1235 gen_op_eval_be(r_dst
, r_src
);
1238 gen_op_eval_ble(r_dst
, r_src
);
1241 gen_op_eval_bl(r_dst
, r_src
);
1244 gen_op_eval_bleu(r_dst
, r_src
);
1247 gen_op_eval_bcs(r_dst
, r_src
);
1250 gen_op_eval_bneg(r_dst
, r_src
);
1253 gen_op_eval_bvs(r_dst
, r_src
);
1256 gen_op_eval_ba(r_dst
);
1259 gen_op_eval_bne(r_dst
, r_src
);
1262 gen_op_eval_bg(r_dst
, r_src
);
1265 gen_op_eval_bge(r_dst
, r_src
);
1268 gen_op_eval_bgu(r_dst
, r_src
);
1271 gen_op_eval_bcc(r_dst
, r_src
);
1274 gen_op_eval_bpos(r_dst
, r_src
);
1277 gen_op_eval_bvc(r_dst
, r_src
);
1282 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1284 unsigned int offset
;
1304 gen_op_eval_bn(r_dst
);
1307 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1310 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1313 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1316 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1319 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1322 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1325 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1328 gen_op_eval_ba(r_dst
);
1331 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1334 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1337 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1340 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1343 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1346 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1349 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1354 #ifdef TARGET_SPARC64
1356 static const int gen_tcg_cond_reg
[8] = {
1367 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1371 l1
= gen_new_label();
1372 tcg_gen_movi_tl(r_dst
, 0);
1373 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1374 tcg_gen_movi_tl(r_dst
, 1);
1379 /* XXX: potentially incorrect if dynamic npc */
1380 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1383 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1384 target_ulong target
= dc
->pc
+ offset
;
1387 /* unconditional not taken */
1389 dc
->pc
= dc
->npc
+ 4;
1390 dc
->npc
= dc
->pc
+ 4;
1393 dc
->npc
= dc
->pc
+ 4;
1395 } else if (cond
== 0x8) {
1396 /* unconditional taken */
1399 dc
->npc
= dc
->pc
+ 4;
1405 flush_cond(dc
, r_cond
);
1406 gen_cond(r_cond
, cc
, cond
);
1408 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1412 dc
->jump_pc
[0] = target
;
1413 dc
->jump_pc
[1] = dc
->npc
+ 4;
1419 /* XXX: potentially incorrect if dynamic npc */
1420 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1423 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1424 target_ulong target
= dc
->pc
+ offset
;
1427 /* unconditional not taken */
1429 dc
->pc
= dc
->npc
+ 4;
1430 dc
->npc
= dc
->pc
+ 4;
1433 dc
->npc
= dc
->pc
+ 4;
1435 } else if (cond
== 0x8) {
1436 /* unconditional taken */
1439 dc
->npc
= dc
->pc
+ 4;
1445 flush_cond(dc
, r_cond
);
1446 gen_fcond(r_cond
, cc
, cond
);
1448 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1452 dc
->jump_pc
[0] = target
;
1453 dc
->jump_pc
[1] = dc
->npc
+ 4;
1459 #ifdef TARGET_SPARC64
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1462 TCGv r_cond
, TCGv r_reg
)
1464 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1465 target_ulong target
= dc
->pc
+ offset
;
1467 flush_cond(dc
, r_cond
);
1468 gen_cond_reg(r_cond
, cond
, r_reg
);
1470 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1474 dc
->jump_pc
[0] = target
;
1475 dc
->jump_pc
[1] = dc
->npc
+ 4;
1480 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1484 gen_helper_fcmps(r_rs1
, r_rs2
);
1487 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1490 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1493 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1498 static inline void gen_op_fcmpd(int fccno
)
1505 gen_helper_fcmpd_fcc1();
1508 gen_helper_fcmpd_fcc2();
1511 gen_helper_fcmpd_fcc3();
1516 static inline void gen_op_fcmpq(int fccno
)
1523 gen_helper_fcmpq_fcc1();
1526 gen_helper_fcmpq_fcc2();
1529 gen_helper_fcmpq_fcc3();
1534 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1538 gen_helper_fcmpes(r_rs1
, r_rs2
);
1541 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1544 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1547 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1552 static inline void gen_op_fcmped(int fccno
)
1556 gen_helper_fcmped();
1559 gen_helper_fcmped_fcc1();
1562 gen_helper_fcmped_fcc2();
1565 gen_helper_fcmped_fcc3();
1570 static inline void gen_op_fcmpeq(int fccno
)
1574 gen_helper_fcmpeq();
1577 gen_helper_fcmpeq_fcc1();
1580 gen_helper_fcmpeq_fcc2();
1583 gen_helper_fcmpeq_fcc3();
1590 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1592 gen_helper_fcmps(r_rs1
, r_rs2
);
1595 static inline void gen_op_fcmpd(int fccno
)
1600 static inline void gen_op_fcmpq(int fccno
)
1605 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1607 gen_helper_fcmpes(r_rs1
, r_rs2
);
1610 static inline void gen_op_fcmped(int fccno
)
1612 gen_helper_fcmped();
1615 static inline void gen_op_fcmpeq(int fccno
)
1617 gen_helper_fcmpeq();
1621 static inline void gen_op_fpexception_im(int fsr_flags
)
1625 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1626 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1627 r_const
= tcg_const_i32(TT_FP_EXCP
);
1628 gen_helper_raise_exception(r_const
);
1629 tcg_temp_free_i32(r_const
);
1632 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1634 #if !defined(CONFIG_USER_ONLY)
1635 if (!dc
->fpu_enabled
) {
1638 save_state(dc
, r_cond
);
1639 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1640 gen_helper_raise_exception(r_const
);
1641 tcg_temp_free_i32(r_const
);
1649 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1651 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1654 static inline void gen_clear_float_exceptions(void)
1656 gen_helper_clear_float_exceptions();
1660 #ifdef TARGET_SPARC64
1661 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1667 r_asi
= tcg_temp_new_i32();
1668 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1670 asi
= GET_FIELD(insn
, 19, 26);
1671 r_asi
= tcg_const_i32(asi
);
1676 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1679 TCGv_i32 r_asi
, r_size
, r_sign
;
1681 r_asi
= gen_get_asi(insn
, addr
);
1682 r_size
= tcg_const_i32(size
);
1683 r_sign
= tcg_const_i32(sign
);
1684 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1685 tcg_temp_free_i32(r_sign
);
1686 tcg_temp_free_i32(r_size
);
1687 tcg_temp_free_i32(r_asi
);
1690 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1692 TCGv_i32 r_asi
, r_size
;
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 r_size
= tcg_const_i32(size
);
1696 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1697 tcg_temp_free_i32(r_size
);
1698 tcg_temp_free_i32(r_asi
);
1701 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1703 TCGv_i32 r_asi
, r_size
, r_rd
;
1705 r_asi
= gen_get_asi(insn
, addr
);
1706 r_size
= tcg_const_i32(size
);
1707 r_rd
= tcg_const_i32(rd
);
1708 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1709 tcg_temp_free_i32(r_rd
);
1710 tcg_temp_free_i32(r_size
);
1711 tcg_temp_free_i32(r_asi
);
1714 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1716 TCGv_i32 r_asi
, r_size
, r_rd
;
1718 r_asi
= gen_get_asi(insn
, addr
);
1719 r_size
= tcg_const_i32(size
);
1720 r_rd
= tcg_const_i32(rd
);
1721 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1722 tcg_temp_free_i32(r_rd
);
1723 tcg_temp_free_i32(r_size
);
1724 tcg_temp_free_i32(r_asi
);
1727 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1729 TCGv_i32 r_asi
, r_size
, r_sign
;
1731 r_asi
= gen_get_asi(insn
, addr
);
1732 r_size
= tcg_const_i32(4);
1733 r_sign
= tcg_const_i32(0);
1734 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1735 tcg_temp_free_i32(r_sign
);
1736 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1737 tcg_temp_free_i32(r_size
);
1738 tcg_temp_free_i32(r_asi
);
1739 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1742 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1744 TCGv_i32 r_asi
, r_rd
;
1746 r_asi
= gen_get_asi(insn
, addr
);
1747 r_rd
= tcg_const_i32(rd
);
1748 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1749 tcg_temp_free_i32(r_rd
);
1750 tcg_temp_free_i32(r_asi
);
1753 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1755 TCGv_i32 r_asi
, r_size
;
1757 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1758 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1759 r_asi
= gen_get_asi(insn
, addr
);
1760 r_size
= tcg_const_i32(8);
1761 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1762 tcg_temp_free_i32(r_size
);
1763 tcg_temp_free_i32(r_asi
);
1766 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1772 r_val1
= tcg_temp_new();
1773 gen_movl_reg_TN(rd
, r_val1
);
1774 r_asi
= gen_get_asi(insn
, addr
);
1775 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1776 tcg_temp_free_i32(r_asi
);
1777 tcg_temp_free(r_val1
);
1780 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1785 gen_movl_reg_TN(rd
, cpu_tmp64
);
1786 r_asi
= gen_get_asi(insn
, addr
);
1787 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1788 tcg_temp_free_i32(r_asi
);
1791 #elif !defined(CONFIG_USER_ONLY)
1793 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1796 TCGv_i32 r_asi
, r_size
, r_sign
;
1798 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1799 r_size
= tcg_const_i32(size
);
1800 r_sign
= tcg_const_i32(sign
);
1801 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1802 tcg_temp_free(r_sign
);
1803 tcg_temp_free(r_size
);
1804 tcg_temp_free(r_asi
);
1805 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1808 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1810 TCGv_i32 r_asi
, r_size
;
1812 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1813 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1814 r_size
= tcg_const_i32(size
);
1815 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1816 tcg_temp_free(r_size
);
1817 tcg_temp_free(r_asi
);
1820 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1822 TCGv_i32 r_asi
, r_size
, r_sign
;
1825 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1826 r_size
= tcg_const_i32(4);
1827 r_sign
= tcg_const_i32(0);
1828 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1829 tcg_temp_free(r_sign
);
1830 r_val
= tcg_temp_new_i64();
1831 tcg_gen_extu_tl_i64(r_val
, dst
);
1832 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1833 tcg_temp_free_i64(r_val
);
1834 tcg_temp_free(r_size
);
1835 tcg_temp_free(r_asi
);
1836 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1839 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1841 TCGv_i32 r_asi
, r_size
, r_sign
;
1843 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1844 r_size
= tcg_const_i32(8);
1845 r_sign
= tcg_const_i32(0);
1846 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1847 tcg_temp_free(r_sign
);
1848 tcg_temp_free(r_size
);
1849 tcg_temp_free(r_asi
);
1850 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1851 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1852 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1853 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1854 gen_movl_TN_reg(rd
, hi
);
1857 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1859 TCGv_i32 r_asi
, r_size
;
1861 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1862 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1863 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1864 r_size
= tcg_const_i32(8);
1865 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1866 tcg_temp_free(r_size
);
1867 tcg_temp_free(r_asi
);
1871 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1872 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1875 TCGv_i32 r_asi
, r_size
;
1877 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1879 r_val
= tcg_const_i64(0xffULL
);
1880 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1881 r_size
= tcg_const_i32(1);
1882 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1883 tcg_temp_free_i32(r_size
);
1884 tcg_temp_free_i32(r_asi
);
1885 tcg_temp_free_i64(r_val
);
1889 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1894 rs1
= GET_FIELD(insn
, 13, 17);
1896 r_rs1
= tcg_const_tl(0); // XXX how to free?
1898 r_rs1
= cpu_gregs
[rs1
];
1900 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1904 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1909 if (IS_IMM
) { /* immediate */
1910 rs2
= GET_FIELDs(insn
, 19, 31);
1911 r_rs2
= tcg_const_tl((int)rs2
); // XXX how to free?
1912 } else { /* register */
1913 rs2
= GET_FIELD(insn
, 27, 31);
1915 r_rs2
= tcg_const_tl(0); // XXX how to free?
1917 r_rs2
= cpu_gregs
[rs2
];
1919 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1924 #define CHECK_IU_FEATURE(dc, FEATURE) \
1925 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1927 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1928 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1931 /* before an instruction, dc->pc must be static */
1932 static void disas_sparc_insn(DisasContext
* dc
)
1934 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1936 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
1937 tcg_gen_debug_insn_start(dc
->pc
);
1938 insn
= ldl_code(dc
->pc
);
1939 opc
= GET_FIELD(insn
, 0, 1);
1941 rd
= GET_FIELD(insn
, 2, 6);
1943 cpu_src1
= tcg_temp_new(); // const
1944 cpu_src2
= tcg_temp_new(); // const
1947 case 0: /* branches/sethi */
1949 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1952 #ifdef TARGET_SPARC64
1953 case 0x1: /* V9 BPcc */
1957 target
= GET_FIELD_SP(insn
, 0, 18);
1958 target
= sign_extend(target
, 18);
1960 cc
= GET_FIELD_SP(insn
, 20, 21);
1962 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1964 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1969 case 0x3: /* V9 BPr */
1971 target
= GET_FIELD_SP(insn
, 0, 13) |
1972 (GET_FIELD_SP(insn
, 20, 21) << 14);
1973 target
= sign_extend(target
, 16);
1975 cpu_src1
= get_src1(insn
, cpu_src1
);
1976 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1979 case 0x5: /* V9 FBPcc */
1981 int cc
= GET_FIELD_SP(insn
, 20, 21);
1982 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1984 target
= GET_FIELD_SP(insn
, 0, 18);
1985 target
= sign_extend(target
, 19);
1987 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1991 case 0x7: /* CBN+x */
1996 case 0x2: /* BN+x */
1998 target
= GET_FIELD(insn
, 10, 31);
1999 target
= sign_extend(target
, 22);
2001 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2004 case 0x6: /* FBN+x */
2006 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2008 target
= GET_FIELD(insn
, 10, 31);
2009 target
= sign_extend(target
, 22);
2011 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
2014 case 0x4: /* SETHI */
2016 uint32_t value
= GET_FIELD(insn
, 10, 31);
2019 r_const
= tcg_const_tl(value
<< 10);
2020 gen_movl_TN_reg(rd
, r_const
);
2021 tcg_temp_free(r_const
);
2024 case 0x0: /* UNIMPL */
2033 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2036 r_const
= tcg_const_tl(dc
->pc
);
2037 gen_movl_TN_reg(15, r_const
);
2038 tcg_temp_free(r_const
);
2040 gen_mov_pc_npc(dc
, cpu_cond
);
2044 case 2: /* FPU & Logical Operations */
2046 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2047 if (xop
== 0x3a) { /* generate trap */
2050 cpu_src1
= get_src1(insn
, cpu_src1
);
2052 rs2
= GET_FIELD(insn
, 25, 31);
2053 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2055 rs2
= GET_FIELD(insn
, 27, 31);
2057 gen_movl_reg_TN(rs2
, cpu_src2
);
2058 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2060 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2062 cond
= GET_FIELD(insn
, 3, 6);
2064 save_state(dc
, cpu_cond
);
2065 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2067 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2069 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2070 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2071 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2072 gen_helper_raise_exception(cpu_tmp32
);
2073 } else if (cond
!= 0) {
2074 TCGv r_cond
= tcg_temp_new();
2076 #ifdef TARGET_SPARC64
2078 int cc
= GET_FIELD_SP(insn
, 11, 12);
2080 save_state(dc
, cpu_cond
);
2082 gen_cond(r_cond
, 0, cond
);
2084 gen_cond(r_cond
, 1, cond
);
2088 save_state(dc
, cpu_cond
);
2089 gen_cond(r_cond
, 0, cond
);
2091 l1
= gen_new_label();
2092 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2094 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2096 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2098 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2099 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2100 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2101 gen_helper_raise_exception(cpu_tmp32
);
2104 tcg_temp_free(r_cond
);
2110 } else if (xop
== 0x28) {
2111 rs1
= GET_FIELD(insn
, 13, 17);
2114 #ifndef TARGET_SPARC64
2115 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2116 manual, rdy on the microSPARC
2118 case 0x0f: /* stbar in the SPARCv8 manual,
2119 rdy on the microSPARC II */
2120 case 0x10 ... 0x1f: /* implementation-dependent in the
2121 SPARCv8 manual, rdy on the
2124 gen_movl_TN_reg(rd
, cpu_y
);
2126 #ifdef TARGET_SPARC64
2127 case 0x2: /* V9 rdccr */
2128 gen_helper_rdccr(cpu_dst
);
2129 gen_movl_TN_reg(rd
, cpu_dst
);
2131 case 0x3: /* V9 rdasi */
2132 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2133 gen_movl_TN_reg(rd
, cpu_dst
);
2135 case 0x4: /* V9 rdtick */
2139 r_tickptr
= tcg_temp_new_ptr();
2140 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2141 offsetof(CPUState
, tick
));
2142 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2143 tcg_temp_free_ptr(r_tickptr
);
2144 gen_movl_TN_reg(rd
, cpu_dst
);
2147 case 0x5: /* V9 rdpc */
2151 r_const
= tcg_const_tl(dc
->pc
);
2152 gen_movl_TN_reg(rd
, r_const
);
2153 tcg_temp_free(r_const
);
2156 case 0x6: /* V9 rdfprs */
2157 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2158 gen_movl_TN_reg(rd
, cpu_dst
);
2160 case 0xf: /* V9 membar */
2161 break; /* no effect */
2162 case 0x13: /* Graphics Status */
2163 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2165 gen_movl_TN_reg(rd
, cpu_gsr
);
2167 case 0x16: /* Softint */
2168 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2169 gen_movl_TN_reg(rd
, cpu_dst
);
2171 case 0x17: /* Tick compare */
2172 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2174 case 0x18: /* System tick */
2178 r_tickptr
= tcg_temp_new_ptr();
2179 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2180 offsetof(CPUState
, stick
));
2181 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2182 tcg_temp_free_ptr(r_tickptr
);
2183 gen_movl_TN_reg(rd
, cpu_dst
);
2186 case 0x19: /* System tick compare */
2187 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2189 case 0x10: /* Performance Control */
2190 case 0x11: /* Performance Instrumentation Counter */
2191 case 0x12: /* Dispatch Control */
2192 case 0x14: /* Softint set, WO */
2193 case 0x15: /* Softint clear, WO */
2198 #if !defined(CONFIG_USER_ONLY)
2199 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2200 #ifndef TARGET_SPARC64
2201 if (!supervisor(dc
))
2203 gen_helper_rdpsr(cpu_dst
);
2205 CHECK_IU_FEATURE(dc
, HYPV
);
2206 if (!hypervisor(dc
))
2208 rs1
= GET_FIELD(insn
, 13, 17);
2211 // gen_op_rdhpstate();
2214 // gen_op_rdhtstate();
2217 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2220 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2223 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2225 case 31: // hstick_cmpr
2226 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2232 gen_movl_TN_reg(rd
, cpu_dst
);
2234 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2235 if (!supervisor(dc
))
2237 #ifdef TARGET_SPARC64
2238 rs1
= GET_FIELD(insn
, 13, 17);
2244 r_tsptr
= tcg_temp_new_ptr();
2245 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2246 offsetof(CPUState
, tsptr
));
2247 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2248 offsetof(trap_state
, tpc
));
2249 tcg_temp_free_ptr(r_tsptr
);
2256 r_tsptr
= tcg_temp_new_ptr();
2257 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2258 offsetof(CPUState
, tsptr
));
2259 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2260 offsetof(trap_state
, tnpc
));
2261 tcg_temp_free_ptr(r_tsptr
);
2268 r_tsptr
= tcg_temp_new_ptr();
2269 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2270 offsetof(CPUState
, tsptr
));
2271 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2272 offsetof(trap_state
, tstate
));
2273 tcg_temp_free_ptr(r_tsptr
);
2280 r_tsptr
= tcg_temp_new_ptr();
2281 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2282 offsetof(CPUState
, tsptr
));
2283 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2284 offsetof(trap_state
, tt
));
2285 tcg_temp_free_ptr(r_tsptr
);
2286 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2293 r_tickptr
= tcg_temp_new_ptr();
2294 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2295 offsetof(CPUState
, tick
));
2296 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2297 gen_movl_TN_reg(rd
, cpu_tmp0
);
2298 tcg_temp_free_ptr(r_tickptr
);
2302 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2305 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2306 offsetof(CPUSPARCState
, pstate
));
2307 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2310 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2311 offsetof(CPUSPARCState
, tl
));
2312 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2315 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2316 offsetof(CPUSPARCState
, psrpil
));
2317 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2320 gen_helper_rdcwp(cpu_tmp0
);
2323 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2324 offsetof(CPUSPARCState
, cansave
));
2325 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2327 case 11: // canrestore
2328 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2329 offsetof(CPUSPARCState
, canrestore
));
2330 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2332 case 12: // cleanwin
2333 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2334 offsetof(CPUSPARCState
, cleanwin
));
2335 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2337 case 13: // otherwin
2338 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2339 offsetof(CPUSPARCState
, otherwin
));
2340 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2343 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2344 offsetof(CPUSPARCState
, wstate
));
2345 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2347 case 16: // UA2005 gl
2348 CHECK_IU_FEATURE(dc
, GL
);
2349 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2350 offsetof(CPUSPARCState
, gl
));
2351 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2353 case 26: // UA2005 strand status
2354 CHECK_IU_FEATURE(dc
, HYPV
);
2355 if (!hypervisor(dc
))
2357 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2360 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2367 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2369 gen_movl_TN_reg(rd
, cpu_tmp0
);
2371 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2372 #ifdef TARGET_SPARC64
2373 save_state(dc
, cpu_cond
);
2374 gen_helper_flushw();
2376 if (!supervisor(dc
))
2378 gen_movl_TN_reg(rd
, cpu_tbr
);
2382 } else if (xop
== 0x34) { /* FPU Operations */
2383 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2385 gen_op_clear_ieee_excp_and_FTT();
2386 rs1
= GET_FIELD(insn
, 13, 17);
2387 rs2
= GET_FIELD(insn
, 27, 31);
2388 xop
= GET_FIELD(insn
, 18, 26);
2390 case 0x1: /* fmovs */
2391 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2393 case 0x5: /* fnegs */
2394 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2396 case 0x9: /* fabss */
2397 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2399 case 0x29: /* fsqrts */
2400 CHECK_FPU_FEATURE(dc
, FSQRT
);
2401 gen_clear_float_exceptions();
2402 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2403 gen_helper_check_ieee_exceptions();
2404 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2406 case 0x2a: /* fsqrtd */
2407 CHECK_FPU_FEATURE(dc
, FSQRT
);
2408 gen_op_load_fpr_DT1(DFPREG(rs2
));
2409 gen_clear_float_exceptions();
2410 gen_helper_fsqrtd();
2411 gen_helper_check_ieee_exceptions();
2412 gen_op_store_DT0_fpr(DFPREG(rd
));
2414 case 0x2b: /* fsqrtq */
2415 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2416 gen_op_load_fpr_QT1(QFPREG(rs2
));
2417 gen_clear_float_exceptions();
2418 gen_helper_fsqrtq();
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_QT0_fpr(QFPREG(rd
));
2422 case 0x41: /* fadds */
2423 gen_clear_float_exceptions();
2424 gen_helper_fadds(cpu_tmp32
,
2425 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2426 gen_helper_check_ieee_exceptions();
2427 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2430 gen_op_load_fpr_DT0(DFPREG(rs1
));
2431 gen_op_load_fpr_DT1(DFPREG(rs2
));
2432 gen_clear_float_exceptions();
2434 gen_helper_check_ieee_exceptions();
2435 gen_op_store_DT0_fpr(DFPREG(rd
));
2437 case 0x43: /* faddq */
2438 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2439 gen_op_load_fpr_QT0(QFPREG(rs1
));
2440 gen_op_load_fpr_QT1(QFPREG(rs2
));
2441 gen_clear_float_exceptions();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_QT0_fpr(QFPREG(rd
));
2446 case 0x45: /* fsubs */
2447 gen_clear_float_exceptions();
2448 gen_helper_fsubs(cpu_tmp32
,
2449 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2450 gen_helper_check_ieee_exceptions();
2451 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2454 gen_op_load_fpr_DT0(DFPREG(rs1
));
2455 gen_op_load_fpr_DT1(DFPREG(rs2
));
2456 gen_clear_float_exceptions();
2458 gen_helper_check_ieee_exceptions();
2459 gen_op_store_DT0_fpr(DFPREG(rd
));
2461 case 0x47: /* fsubq */
2462 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2463 gen_op_load_fpr_QT0(QFPREG(rs1
));
2464 gen_op_load_fpr_QT1(QFPREG(rs2
));
2465 gen_clear_float_exceptions();
2467 gen_helper_check_ieee_exceptions();
2468 gen_op_store_QT0_fpr(QFPREG(rd
));
2470 case 0x49: /* fmuls */
2471 CHECK_FPU_FEATURE(dc
, FMUL
);
2472 gen_clear_float_exceptions();
2473 gen_helper_fmuls(cpu_tmp32
,
2474 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2475 gen_helper_check_ieee_exceptions();
2476 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2478 case 0x4a: /* fmuld */
2479 CHECK_FPU_FEATURE(dc
, FMUL
);
2480 gen_op_load_fpr_DT0(DFPREG(rs1
));
2481 gen_op_load_fpr_DT1(DFPREG(rs2
));
2482 gen_clear_float_exceptions();
2484 gen_helper_check_ieee_exceptions();
2485 gen_op_store_DT0_fpr(DFPREG(rd
));
2487 case 0x4b: /* fmulq */
2488 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2489 CHECK_FPU_FEATURE(dc
, FMUL
);
2490 gen_op_load_fpr_QT0(QFPREG(rs1
));
2491 gen_op_load_fpr_QT1(QFPREG(rs2
));
2492 gen_clear_float_exceptions();
2494 gen_helper_check_ieee_exceptions();
2495 gen_op_store_QT0_fpr(QFPREG(rd
));
2497 case 0x4d: /* fdivs */
2498 gen_clear_float_exceptions();
2499 gen_helper_fdivs(cpu_tmp32
,
2500 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2501 gen_helper_check_ieee_exceptions();
2502 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2505 gen_op_load_fpr_DT0(DFPREG(rs1
));
2506 gen_op_load_fpr_DT1(DFPREG(rs2
));
2507 gen_clear_float_exceptions();
2509 gen_helper_check_ieee_exceptions();
2510 gen_op_store_DT0_fpr(DFPREG(rd
));
2512 case 0x4f: /* fdivq */
2513 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2514 gen_op_load_fpr_QT0(QFPREG(rs1
));
2515 gen_op_load_fpr_QT1(QFPREG(rs2
));
2516 gen_clear_float_exceptions();
2518 gen_helper_check_ieee_exceptions();
2519 gen_op_store_QT0_fpr(QFPREG(rd
));
2521 case 0x69: /* fsmuld */
2522 CHECK_FPU_FEATURE(dc
, FSMULD
);
2523 gen_clear_float_exceptions();
2524 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2525 gen_helper_check_ieee_exceptions();
2526 gen_op_store_DT0_fpr(DFPREG(rd
));
2528 case 0x6e: /* fdmulq */
2529 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2530 gen_op_load_fpr_DT0(DFPREG(rs1
));
2531 gen_op_load_fpr_DT1(DFPREG(rs2
));
2532 gen_clear_float_exceptions();
2533 gen_helper_fdmulq();
2534 gen_helper_check_ieee_exceptions();
2535 gen_op_store_QT0_fpr(QFPREG(rd
));
2537 case 0xc4: /* fitos */
2538 gen_clear_float_exceptions();
2539 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2540 gen_helper_check_ieee_exceptions();
2541 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2543 case 0xc6: /* fdtos */
2544 gen_op_load_fpr_DT1(DFPREG(rs2
));
2545 gen_clear_float_exceptions();
2546 gen_helper_fdtos(cpu_tmp32
);
2547 gen_helper_check_ieee_exceptions();
2548 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2550 case 0xc7: /* fqtos */
2551 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2552 gen_op_load_fpr_QT1(QFPREG(rs2
));
2553 gen_clear_float_exceptions();
2554 gen_helper_fqtos(cpu_tmp32
);
2555 gen_helper_check_ieee_exceptions();
2556 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2558 case 0xc8: /* fitod */
2559 gen_helper_fitod(cpu_fpr
[rs2
]);
2560 gen_op_store_DT0_fpr(DFPREG(rd
));
2562 case 0xc9: /* fstod */
2563 gen_helper_fstod(cpu_fpr
[rs2
]);
2564 gen_op_store_DT0_fpr(DFPREG(rd
));
2566 case 0xcb: /* fqtod */
2567 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2568 gen_op_load_fpr_QT1(QFPREG(rs2
));
2569 gen_clear_float_exceptions();
2571 gen_helper_check_ieee_exceptions();
2572 gen_op_store_DT0_fpr(DFPREG(rd
));
2574 case 0xcc: /* fitoq */
2575 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2576 gen_helper_fitoq(cpu_fpr
[rs2
]);
2577 gen_op_store_QT0_fpr(QFPREG(rd
));
2579 case 0xcd: /* fstoq */
2580 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2581 gen_helper_fstoq(cpu_fpr
[rs2
]);
2582 gen_op_store_QT0_fpr(QFPREG(rd
));
2584 case 0xce: /* fdtoq */
2585 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2586 gen_op_load_fpr_DT1(DFPREG(rs2
));
2588 gen_op_store_QT0_fpr(QFPREG(rd
));
2590 case 0xd1: /* fstoi */
2591 gen_clear_float_exceptions();
2592 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2593 gen_helper_check_ieee_exceptions();
2594 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2596 case 0xd2: /* fdtoi */
2597 gen_op_load_fpr_DT1(DFPREG(rs2
));
2598 gen_clear_float_exceptions();
2599 gen_helper_fdtoi(cpu_tmp32
);
2600 gen_helper_check_ieee_exceptions();
2601 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2603 case 0xd3: /* fqtoi */
2604 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2605 gen_op_load_fpr_QT1(QFPREG(rs2
));
2606 gen_clear_float_exceptions();
2607 gen_helper_fqtoi(cpu_tmp32
);
2608 gen_helper_check_ieee_exceptions();
2609 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2611 #ifdef TARGET_SPARC64
2612 case 0x2: /* V9 fmovd */
2613 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)],
2614 cpu_fpr
[DFPREG(rs2
)]);
2615 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2616 cpu_fpr
[DFPREG(rs2
) + 1]);
2618 case 0x3: /* V9 fmovq */
2619 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2620 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)],
2621 cpu_fpr
[QFPREG(rs2
)]);
2622 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2623 cpu_fpr
[QFPREG(rs2
) + 1]);
2624 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2625 cpu_fpr
[QFPREG(rs2
) + 2]);
2626 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2627 cpu_fpr
[QFPREG(rs2
) + 3]);
2629 case 0x6: /* V9 fnegd */
2630 gen_op_load_fpr_DT1(DFPREG(rs2
));
2632 gen_op_store_DT0_fpr(DFPREG(rd
));
2634 case 0x7: /* V9 fnegq */
2635 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2636 gen_op_load_fpr_QT1(QFPREG(rs2
));
2638 gen_op_store_QT0_fpr(QFPREG(rd
));
2640 case 0xa: /* V9 fabsd */
2641 gen_op_load_fpr_DT1(DFPREG(rs2
));
2643 gen_op_store_DT0_fpr(DFPREG(rd
));
2645 case 0xb: /* V9 fabsq */
2646 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2647 gen_op_load_fpr_QT1(QFPREG(rs2
));
2649 gen_op_store_QT0_fpr(QFPREG(rd
));
2651 case 0x81: /* V9 fstox */
2652 gen_clear_float_exceptions();
2653 gen_helper_fstox(cpu_fpr
[rs2
]);
2654 gen_helper_check_ieee_exceptions();
2655 gen_op_store_DT0_fpr(DFPREG(rd
));
2657 case 0x82: /* V9 fdtox */
2658 gen_op_load_fpr_DT1(DFPREG(rs2
));
2659 gen_clear_float_exceptions();
2661 gen_helper_check_ieee_exceptions();
2662 gen_op_store_DT0_fpr(DFPREG(rd
));
2664 case 0x83: /* V9 fqtox */
2665 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2666 gen_op_load_fpr_QT1(QFPREG(rs2
));
2667 gen_clear_float_exceptions();
2669 gen_helper_check_ieee_exceptions();
2670 gen_op_store_DT0_fpr(DFPREG(rd
));
2672 case 0x84: /* V9 fxtos */
2673 gen_op_load_fpr_DT1(DFPREG(rs2
));
2674 gen_clear_float_exceptions();
2675 gen_helper_fxtos(cpu_tmp32
);
2676 gen_helper_check_ieee_exceptions();
2677 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2679 case 0x88: /* V9 fxtod */
2680 gen_op_load_fpr_DT1(DFPREG(rs2
));
2681 gen_clear_float_exceptions();
2683 gen_helper_check_ieee_exceptions();
2684 gen_op_store_DT0_fpr(DFPREG(rd
));
2686 case 0x8c: /* V9 fxtoq */
2687 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2688 gen_op_load_fpr_DT1(DFPREG(rs2
));
2689 gen_clear_float_exceptions();
2691 gen_helper_check_ieee_exceptions();
2692 gen_op_store_QT0_fpr(QFPREG(rd
));
2698 } else if (xop
== 0x35) { /* FPU Operations */
2699 #ifdef TARGET_SPARC64
2702 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2704 gen_op_clear_ieee_excp_and_FTT();
2705 rs1
= GET_FIELD(insn
, 13, 17);
2706 rs2
= GET_FIELD(insn
, 27, 31);
2707 xop
= GET_FIELD(insn
, 18, 26);
2708 #ifdef TARGET_SPARC64
2709 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2712 l1
= gen_new_label();
2713 cond
= GET_FIELD_SP(insn
, 14, 17);
2714 cpu_src1
= get_src1(insn
, cpu_src1
);
2715 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2717 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2720 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2723 l1
= gen_new_label();
2724 cond
= GET_FIELD_SP(insn
, 14, 17);
2725 cpu_src1
= get_src1(insn
, cpu_src1
);
2726 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2728 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2729 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2732 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2735 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2736 l1
= gen_new_label();
2737 cond
= GET_FIELD_SP(insn
, 14, 17);
2738 cpu_src1
= get_src1(insn
, cpu_src1
);
2739 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2741 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2742 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2743 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2744 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2750 #ifdef TARGET_SPARC64
2751 #define FMOVSCC(fcc) \
2756 l1 = gen_new_label(); \
2757 r_cond = tcg_temp_new(); \
2758 cond = GET_FIELD_SP(insn, 14, 17); \
2759 gen_fcond(r_cond, fcc, cond); \
2760 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2762 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2763 gen_set_label(l1); \
2764 tcg_temp_free(r_cond); \
2766 #define FMOVDCC(fcc) \
2771 l1 = gen_new_label(); \
2772 r_cond = tcg_temp_new(); \
2773 cond = GET_FIELD_SP(insn, 14, 17); \
2774 gen_fcond(r_cond, fcc, cond); \
2775 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2777 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2778 cpu_fpr[DFPREG(rs2)]); \
2779 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2780 cpu_fpr[DFPREG(rs2) + 1]); \
2781 gen_set_label(l1); \
2782 tcg_temp_free(r_cond); \
2784 #define FMOVQCC(fcc) \
2789 l1 = gen_new_label(); \
2790 r_cond = tcg_temp_new(); \
2791 cond = GET_FIELD_SP(insn, 14, 17); \
2792 gen_fcond(r_cond, fcc, cond); \
2793 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2795 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2796 cpu_fpr[QFPREG(rs2)]); \
2797 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2798 cpu_fpr[QFPREG(rs2) + 1]); \
2799 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2800 cpu_fpr[QFPREG(rs2) + 2]); \
2801 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2802 cpu_fpr[QFPREG(rs2) + 3]); \
2803 gen_set_label(l1); \
2804 tcg_temp_free(r_cond); \
2806 case 0x001: /* V9 fmovscc %fcc0 */
2809 case 0x002: /* V9 fmovdcc %fcc0 */
2812 case 0x003: /* V9 fmovqcc %fcc0 */
2813 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2816 case 0x041: /* V9 fmovscc %fcc1 */
2819 case 0x042: /* V9 fmovdcc %fcc1 */
2822 case 0x043: /* V9 fmovqcc %fcc1 */
2823 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2826 case 0x081: /* V9 fmovscc %fcc2 */
2829 case 0x082: /* V9 fmovdcc %fcc2 */
2832 case 0x083: /* V9 fmovqcc %fcc2 */
2833 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2836 case 0x0c1: /* V9 fmovscc %fcc3 */
2839 case 0x0c2: /* V9 fmovdcc %fcc3 */
2842 case 0x0c3: /* V9 fmovqcc %fcc3 */
2843 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2849 #define FMOVCC(size_FDQ, icc) \
2854 l1 = gen_new_label(); \
2855 r_cond = tcg_temp_new(); \
2856 cond = GET_FIELD_SP(insn, 14, 17); \
2857 gen_cond(r_cond, icc, cond); \
2858 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2860 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2861 (glue(size_FDQ, FPREG(rs2))); \
2862 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2863 (glue(size_FDQ, FPREG(rd))); \
2864 gen_set_label(l1); \
2865 tcg_temp_free(r_cond); \
2867 #define FMOVSCC(icc) \
2872 l1 = gen_new_label(); \
2873 r_cond = tcg_temp_new(); \
2874 cond = GET_FIELD_SP(insn, 14, 17); \
2875 gen_cond(r_cond, icc, cond); \
2876 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2878 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2879 gen_set_label(l1); \
2880 tcg_temp_free(r_cond); \
2882 #define FMOVDCC(icc) \
2887 l1 = gen_new_label(); \
2888 r_cond = tcg_temp_new(); \
2889 cond = GET_FIELD_SP(insn, 14, 17); \
2890 gen_cond(r_cond, icc, cond); \
2891 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2893 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2894 cpu_fpr[DFPREG(rs2)]); \
2895 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2896 cpu_fpr[DFPREG(rs2) + 1]); \
2897 gen_set_label(l1); \
2898 tcg_temp_free(r_cond); \
2900 #define FMOVQCC(icc) \
2905 l1 = gen_new_label(); \
2906 r_cond = tcg_temp_new(); \
2907 cond = GET_FIELD_SP(insn, 14, 17); \
2908 gen_cond(r_cond, icc, cond); \
2909 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2911 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2912 cpu_fpr[QFPREG(rs2)]); \
2913 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2914 cpu_fpr[QFPREG(rs2) + 1]); \
2915 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2916 cpu_fpr[QFPREG(rs2) + 2]); \
2917 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2918 cpu_fpr[QFPREG(rs2) + 3]); \
2919 gen_set_label(l1); \
2920 tcg_temp_free(r_cond); \
2923 case 0x101: /* V9 fmovscc %icc */
2926 case 0x102: /* V9 fmovdcc %icc */
2928 case 0x103: /* V9 fmovqcc %icc */
2929 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2932 case 0x181: /* V9 fmovscc %xcc */
2935 case 0x182: /* V9 fmovdcc %xcc */
2938 case 0x183: /* V9 fmovqcc %xcc */
2939 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2946 case 0x51: /* fcmps, V9 %fcc */
2947 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2949 case 0x52: /* fcmpd, V9 %fcc */
2950 gen_op_load_fpr_DT0(DFPREG(rs1
));
2951 gen_op_load_fpr_DT1(DFPREG(rs2
));
2952 gen_op_fcmpd(rd
& 3);
2954 case 0x53: /* fcmpq, V9 %fcc */
2955 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2956 gen_op_load_fpr_QT0(QFPREG(rs1
));
2957 gen_op_load_fpr_QT1(QFPREG(rs2
));
2958 gen_op_fcmpq(rd
& 3);
2960 case 0x55: /* fcmpes, V9 %fcc */
2961 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2963 case 0x56: /* fcmped, V9 %fcc */
2964 gen_op_load_fpr_DT0(DFPREG(rs1
));
2965 gen_op_load_fpr_DT1(DFPREG(rs2
));
2966 gen_op_fcmped(rd
& 3);
2968 case 0x57: /* fcmpeq, V9 %fcc */
2969 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2970 gen_op_load_fpr_QT0(QFPREG(rs1
));
2971 gen_op_load_fpr_QT1(QFPREG(rs2
));
2972 gen_op_fcmpeq(rd
& 3);
2977 } else if (xop
== 0x2) {
2980 rs1
= GET_FIELD(insn
, 13, 17);
2982 // or %g0, x, y -> mov T0, x; mov y, T0
2983 if (IS_IMM
) { /* immediate */
2986 rs2
= GET_FIELDs(insn
, 19, 31);
2987 r_const
= tcg_const_tl((int)rs2
);
2988 gen_movl_TN_reg(rd
, r_const
);
2989 tcg_temp_free(r_const
);
2990 } else { /* register */
2991 rs2
= GET_FIELD(insn
, 27, 31);
2992 gen_movl_reg_TN(rs2
, cpu_dst
);
2993 gen_movl_TN_reg(rd
, cpu_dst
);
2996 cpu_src1
= get_src1(insn
, cpu_src1
);
2997 if (IS_IMM
) { /* immediate */
2998 rs2
= GET_FIELDs(insn
, 19, 31);
2999 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
3000 gen_movl_TN_reg(rd
, cpu_dst
);
3001 } else { /* register */
3002 // or x, %g0, y -> mov T1, x; mov y, T1
3003 rs2
= GET_FIELD(insn
, 27, 31);
3005 gen_movl_reg_TN(rs2
, cpu_src2
);
3006 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3007 gen_movl_TN_reg(rd
, cpu_dst
);
3009 gen_movl_TN_reg(rd
, cpu_src1
);
3012 #ifdef TARGET_SPARC64
3013 } else if (xop
== 0x25) { /* sll, V9 sllx */
3014 cpu_src1
= get_src1(insn
, cpu_src1
);
3015 if (IS_IMM
) { /* immediate */
3016 rs2
= GET_FIELDs(insn
, 20, 31);
3017 if (insn
& (1 << 12)) {
3018 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3020 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3022 } else { /* register */
3023 rs2
= GET_FIELD(insn
, 27, 31);
3024 gen_movl_reg_TN(rs2
, cpu_src2
);
3025 if (insn
& (1 << 12)) {
3026 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3028 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3030 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3032 gen_movl_TN_reg(rd
, cpu_dst
);
3033 } else if (xop
== 0x26) { /* srl, V9 srlx */
3034 cpu_src1
= get_src1(insn
, cpu_src1
);
3035 if (IS_IMM
) { /* immediate */
3036 rs2
= GET_FIELDs(insn
, 20, 31);
3037 if (insn
& (1 << 12)) {
3038 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3040 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3041 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3043 } else { /* register */
3044 rs2
= GET_FIELD(insn
, 27, 31);
3045 gen_movl_reg_TN(rs2
, cpu_src2
);
3046 if (insn
& (1 << 12)) {
3047 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3048 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3050 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3051 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3052 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3055 gen_movl_TN_reg(rd
, cpu_dst
);
3056 } else if (xop
== 0x27) { /* sra, V9 srax */
3057 cpu_src1
= get_src1(insn
, cpu_src1
);
3058 if (IS_IMM
) { /* immediate */
3059 rs2
= GET_FIELDs(insn
, 20, 31);
3060 if (insn
& (1 << 12)) {
3061 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3063 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3064 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3065 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3067 } else { /* register */
3068 rs2
= GET_FIELD(insn
, 27, 31);
3069 gen_movl_reg_TN(rs2
, cpu_src2
);
3070 if (insn
& (1 << 12)) {
3071 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3072 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3074 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3075 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3076 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3077 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3080 gen_movl_TN_reg(rd
, cpu_dst
);
3082 } else if (xop
< 0x36) {
3083 cpu_src1
= get_src1(insn
, cpu_src1
);
3084 cpu_src2
= get_src2(insn
, cpu_src2
);
3086 switch (xop
& ~0x10) {
3089 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3091 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3094 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3096 gen_op_logic_cc(cpu_dst
);
3099 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3101 gen_op_logic_cc(cpu_dst
);
3104 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3106 gen_op_logic_cc(cpu_dst
);
3110 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3112 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3115 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3117 gen_op_logic_cc(cpu_dst
);
3120 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3122 gen_op_logic_cc(cpu_dst
);
3125 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3126 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3128 gen_op_logic_cc(cpu_dst
);
3132 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3134 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3135 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3136 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3139 #ifdef TARGET_SPARC64
3140 case 0x9: /* V9 mulx */
3141 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3145 CHECK_IU_FEATURE(dc
, MUL
);
3146 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3148 gen_op_logic_cc(cpu_dst
);
3151 CHECK_IU_FEATURE(dc
, MUL
);
3152 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3154 gen_op_logic_cc(cpu_dst
);
3158 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3160 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3161 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3162 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3165 #ifdef TARGET_SPARC64
3166 case 0xd: /* V9 udivx */
3167 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3168 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3169 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3170 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3174 CHECK_IU_FEATURE(dc
, DIV
);
3175 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3177 gen_op_div_cc(cpu_dst
);
3180 CHECK_IU_FEATURE(dc
, DIV
);
3181 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3183 gen_op_div_cc(cpu_dst
);
3188 gen_movl_TN_reg(rd
, cpu_dst
);
3191 case 0x20: /* taddcc */
3192 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3193 gen_movl_TN_reg(rd
, cpu_dst
);
3195 case 0x21: /* tsubcc */
3196 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3197 gen_movl_TN_reg(rd
, cpu_dst
);
3199 case 0x22: /* taddcctv */
3200 save_state(dc
, cpu_cond
);
3201 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3202 gen_movl_TN_reg(rd
, cpu_dst
);
3204 case 0x23: /* tsubcctv */
3205 save_state(dc
, cpu_cond
);
3206 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3207 gen_movl_TN_reg(rd
, cpu_dst
);
3209 case 0x24: /* mulscc */
3210 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3211 gen_movl_TN_reg(rd
, cpu_dst
);
3213 #ifndef TARGET_SPARC64
3214 case 0x25: /* sll */
3215 if (IS_IMM
) { /* immediate */
3216 rs2
= GET_FIELDs(insn
, 20, 31);
3217 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3218 } else { /* register */
3219 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3220 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3222 gen_movl_TN_reg(rd
, cpu_dst
);
3224 case 0x26: /* srl */
3225 if (IS_IMM
) { /* immediate */
3226 rs2
= GET_FIELDs(insn
, 20, 31);
3227 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3228 } else { /* register */
3229 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3230 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3232 gen_movl_TN_reg(rd
, cpu_dst
);
3234 case 0x27: /* sra */
3235 if (IS_IMM
) { /* immediate */
3236 rs2
= GET_FIELDs(insn
, 20, 31);
3237 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3238 } else { /* register */
3239 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3240 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3242 gen_movl_TN_reg(rd
, cpu_dst
);
3249 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3250 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3252 #ifndef TARGET_SPARC64
3253 case 0x01 ... 0x0f: /* undefined in the
3257 case 0x10 ... 0x1f: /* implementation-dependent
3263 case 0x2: /* V9 wrccr */
3264 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3265 gen_helper_wrccr(cpu_dst
);
3267 case 0x3: /* V9 wrasi */
3268 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3269 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3271 case 0x6: /* V9 wrfprs */
3272 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3273 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3274 save_state(dc
, cpu_cond
);
3279 case 0xf: /* V9 sir, nop if user */
3280 #if !defined(CONFIG_USER_ONLY)
3285 case 0x13: /* Graphics Status */
3286 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3288 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3290 case 0x14: /* Softint set */
3291 if (!supervisor(dc
))
3293 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3294 gen_helper_set_softint(cpu_tmp64
);
3296 case 0x15: /* Softint clear */
3297 if (!supervisor(dc
))
3299 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3300 gen_helper_clear_softint(cpu_tmp64
);
3302 case 0x16: /* Softint write */
3303 if (!supervisor(dc
))
3305 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3306 gen_helper_write_softint(cpu_tmp64
);
3308 case 0x17: /* Tick compare */
3309 #if !defined(CONFIG_USER_ONLY)
3310 if (!supervisor(dc
))
3316 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3318 r_tickptr
= tcg_temp_new_ptr();
3319 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3320 offsetof(CPUState
, tick
));
3321 gen_helper_tick_set_limit(r_tickptr
,
3323 tcg_temp_free_ptr(r_tickptr
);
3326 case 0x18: /* System tick */
3327 #if !defined(CONFIG_USER_ONLY)
3328 if (!supervisor(dc
))
3334 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3336 r_tickptr
= tcg_temp_new_ptr();
3337 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3338 offsetof(CPUState
, stick
));
3339 gen_helper_tick_set_count(r_tickptr
,
3341 tcg_temp_free_ptr(r_tickptr
);
3344 case 0x19: /* System tick compare */
3345 #if !defined(CONFIG_USER_ONLY)
3346 if (!supervisor(dc
))
3352 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3354 r_tickptr
= tcg_temp_new_ptr();
3355 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3356 offsetof(CPUState
, stick
));
3357 gen_helper_tick_set_limit(r_tickptr
,
3359 tcg_temp_free_ptr(r_tickptr
);
3363 case 0x10: /* Performance Control */
3364 case 0x11: /* Performance Instrumentation
3366 case 0x12: /* Dispatch Control */
3373 #if !defined(CONFIG_USER_ONLY)
3374 case 0x31: /* wrpsr, V9 saved, restored */
3376 if (!supervisor(dc
))
3378 #ifdef TARGET_SPARC64
3384 gen_helper_restored();
3386 case 2: /* UA2005 allclean */
3387 case 3: /* UA2005 otherw */
3388 case 4: /* UA2005 normalw */
3389 case 5: /* UA2005 invalw */
3395 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3396 gen_helper_wrpsr(cpu_dst
);
3397 save_state(dc
, cpu_cond
);
3404 case 0x32: /* wrwim, V9 wrpr */
3406 if (!supervisor(dc
))
3408 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3409 #ifdef TARGET_SPARC64
3415 r_tsptr
= tcg_temp_new_ptr();
3416 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3417 offsetof(CPUState
, tsptr
));
3418 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3419 offsetof(trap_state
, tpc
));
3420 tcg_temp_free_ptr(r_tsptr
);
3427 r_tsptr
= tcg_temp_new_ptr();
3428 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3429 offsetof(CPUState
, tsptr
));
3430 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3431 offsetof(trap_state
, tnpc
));
3432 tcg_temp_free_ptr(r_tsptr
);
3439 r_tsptr
= tcg_temp_new_ptr();
3440 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3441 offsetof(CPUState
, tsptr
));
3442 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3443 offsetof(trap_state
,
3445 tcg_temp_free_ptr(r_tsptr
);
3452 r_tsptr
= tcg_temp_new_ptr();
3453 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3454 offsetof(CPUState
, tsptr
));
3455 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3456 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3457 offsetof(trap_state
, tt
));
3458 tcg_temp_free_ptr(r_tsptr
);
3465 r_tickptr
= tcg_temp_new_ptr();
3466 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3467 offsetof(CPUState
, tick
));
3468 gen_helper_tick_set_count(r_tickptr
,
3470 tcg_temp_free_ptr(r_tickptr
);
3474 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3477 save_state(dc
, cpu_cond
);
3478 gen_helper_wrpstate(cpu_tmp0
);
3484 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3485 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3486 offsetof(CPUSPARCState
, tl
));
3489 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3490 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3491 offsetof(CPUSPARCState
,
3495 gen_helper_wrcwp(cpu_tmp0
);
3498 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3499 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3500 offsetof(CPUSPARCState
,
3503 case 11: // canrestore
3504 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3505 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3506 offsetof(CPUSPARCState
,
3509 case 12: // cleanwin
3510 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3511 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3512 offsetof(CPUSPARCState
,
3515 case 13: // otherwin
3516 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3517 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3518 offsetof(CPUSPARCState
,
3522 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3523 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3524 offsetof(CPUSPARCState
,
3527 case 16: // UA2005 gl
3528 CHECK_IU_FEATURE(dc
, GL
);
3529 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3530 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3531 offsetof(CPUSPARCState
, gl
));
3533 case 26: // UA2005 strand status
3534 CHECK_IU_FEATURE(dc
, HYPV
);
3535 if (!hypervisor(dc
))
3537 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3543 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3544 if (dc
->def
->nwindows
!= 32)
3545 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3546 (1 << dc
->def
->nwindows
) - 1);
3547 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3551 case 0x33: /* wrtbr, UA2005 wrhpr */
3553 #ifndef TARGET_SPARC64
3554 if (!supervisor(dc
))
3556 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3558 CHECK_IU_FEATURE(dc
, HYPV
);
3559 if (!hypervisor(dc
))
3561 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3564 // XXX gen_op_wrhpstate();
3565 save_state(dc
, cpu_cond
);
3571 // XXX gen_op_wrhtstate();
3574 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3577 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3579 case 31: // hstick_cmpr
3583 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3584 r_tickptr
= tcg_temp_new_ptr();
3585 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3586 offsetof(CPUState
, hstick
));
3587 gen_helper_tick_set_limit(r_tickptr
,
3589 tcg_temp_free_ptr(r_tickptr
);
3592 case 6: // hver readonly
3600 #ifdef TARGET_SPARC64
3601 case 0x2c: /* V9 movcc */
3603 int cc
= GET_FIELD_SP(insn
, 11, 12);
3604 int cond
= GET_FIELD_SP(insn
, 14, 17);
3608 r_cond
= tcg_temp_new();
3609 if (insn
& (1 << 18)) {
3611 gen_cond(r_cond
, 0, cond
);
3613 gen_cond(r_cond
, 1, cond
);
3617 gen_fcond(r_cond
, cc
, cond
);
3620 l1
= gen_new_label();
3622 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3623 if (IS_IMM
) { /* immediate */
3626 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3627 r_const
= tcg_const_tl((int)rs2
);
3628 gen_movl_TN_reg(rd
, r_const
);
3629 tcg_temp_free(r_const
);
3631 rs2
= GET_FIELD_SP(insn
, 0, 4);
3632 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3633 gen_movl_TN_reg(rd
, cpu_tmp0
);
3636 tcg_temp_free(r_cond
);
3639 case 0x2d: /* V9 sdivx */
3640 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3641 gen_movl_TN_reg(rd
, cpu_dst
);
3643 case 0x2e: /* V9 popc */
3645 cpu_src2
= get_src2(insn
, cpu_src2
);
3646 gen_helper_popc(cpu_dst
, cpu_src2
);
3647 gen_movl_TN_reg(rd
, cpu_dst
);
3649 case 0x2f: /* V9 movr */
3651 int cond
= GET_FIELD_SP(insn
, 10, 12);
3654 cpu_src1
= get_src1(insn
, cpu_src1
);
3656 l1
= gen_new_label();
3658 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3660 if (IS_IMM
) { /* immediate */
3663 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3664 r_const
= tcg_const_tl((int)rs2
);
3665 gen_movl_TN_reg(rd
, r_const
);
3666 tcg_temp_free(r_const
);
3668 rs2
= GET_FIELD_SP(insn
, 0, 4);
3669 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3670 gen_movl_TN_reg(rd
, cpu_tmp0
);
3680 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3681 #ifdef TARGET_SPARC64
3682 int opf
= GET_FIELD_SP(insn
, 5, 13);
3683 rs1
= GET_FIELD(insn
, 13, 17);
3684 rs2
= GET_FIELD(insn
, 27, 31);
3685 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3689 case 0x000: /* VIS I edge8cc */
3690 case 0x001: /* VIS II edge8n */
3691 case 0x002: /* VIS I edge8lcc */
3692 case 0x003: /* VIS II edge8ln */
3693 case 0x004: /* VIS I edge16cc */
3694 case 0x005: /* VIS II edge16n */
3695 case 0x006: /* VIS I edge16lcc */
3696 case 0x007: /* VIS II edge16ln */
3697 case 0x008: /* VIS I edge32cc */
3698 case 0x009: /* VIS II edge32n */
3699 case 0x00a: /* VIS I edge32lcc */
3700 case 0x00b: /* VIS II edge32ln */
3703 case 0x010: /* VIS I array8 */
3704 CHECK_FPU_FEATURE(dc
, VIS1
);
3705 cpu_src1
= get_src1(insn
, cpu_src1
);
3706 gen_movl_reg_TN(rs2
, cpu_src2
);
3707 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3708 gen_movl_TN_reg(rd
, cpu_dst
);
3710 case 0x012: /* VIS I array16 */
3711 CHECK_FPU_FEATURE(dc
, VIS1
);
3712 cpu_src1
= get_src1(insn
, cpu_src1
);
3713 gen_movl_reg_TN(rs2
, cpu_src2
);
3714 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3715 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3716 gen_movl_TN_reg(rd
, cpu_dst
);
3718 case 0x014: /* VIS I array32 */
3719 CHECK_FPU_FEATURE(dc
, VIS1
);
3720 cpu_src1
= get_src1(insn
, cpu_src1
);
3721 gen_movl_reg_TN(rs2
, cpu_src2
);
3722 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3723 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3724 gen_movl_TN_reg(rd
, cpu_dst
);
3726 case 0x018: /* VIS I alignaddr */
3727 CHECK_FPU_FEATURE(dc
, VIS1
);
3728 cpu_src1
= get_src1(insn
, cpu_src1
);
3729 gen_movl_reg_TN(rs2
, cpu_src2
);
3730 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3731 gen_movl_TN_reg(rd
, cpu_dst
);
3733 case 0x019: /* VIS II bmask */
3734 case 0x01a: /* VIS I alignaddrl */
3737 case 0x020: /* VIS I fcmple16 */
3738 CHECK_FPU_FEATURE(dc
, VIS1
);
3739 gen_op_load_fpr_DT0(DFPREG(rs1
));
3740 gen_op_load_fpr_DT1(DFPREG(rs2
));
3741 gen_helper_fcmple16();
3742 gen_op_store_DT0_fpr(DFPREG(rd
));
3744 case 0x022: /* VIS I fcmpne16 */
3745 CHECK_FPU_FEATURE(dc
, VIS1
);
3746 gen_op_load_fpr_DT0(DFPREG(rs1
));
3747 gen_op_load_fpr_DT1(DFPREG(rs2
));
3748 gen_helper_fcmpne16();
3749 gen_op_store_DT0_fpr(DFPREG(rd
));
3751 case 0x024: /* VIS I fcmple32 */
3752 CHECK_FPU_FEATURE(dc
, VIS1
);
3753 gen_op_load_fpr_DT0(DFPREG(rs1
));
3754 gen_op_load_fpr_DT1(DFPREG(rs2
));
3755 gen_helper_fcmple32();
3756 gen_op_store_DT0_fpr(DFPREG(rd
));
3758 case 0x026: /* VIS I fcmpne32 */
3759 CHECK_FPU_FEATURE(dc
, VIS1
);
3760 gen_op_load_fpr_DT0(DFPREG(rs1
));
3761 gen_op_load_fpr_DT1(DFPREG(rs2
));
3762 gen_helper_fcmpne32();
3763 gen_op_store_DT0_fpr(DFPREG(rd
));
3765 case 0x028: /* VIS I fcmpgt16 */
3766 CHECK_FPU_FEATURE(dc
, VIS1
);
3767 gen_op_load_fpr_DT0(DFPREG(rs1
));
3768 gen_op_load_fpr_DT1(DFPREG(rs2
));
3769 gen_helper_fcmpgt16();
3770 gen_op_store_DT0_fpr(DFPREG(rd
));
3772 case 0x02a: /* VIS I fcmpeq16 */
3773 CHECK_FPU_FEATURE(dc
, VIS1
);
3774 gen_op_load_fpr_DT0(DFPREG(rs1
));
3775 gen_op_load_fpr_DT1(DFPREG(rs2
));
3776 gen_helper_fcmpeq16();
3777 gen_op_store_DT0_fpr(DFPREG(rd
));
3779 case 0x02c: /* VIS I fcmpgt32 */
3780 CHECK_FPU_FEATURE(dc
, VIS1
);
3781 gen_op_load_fpr_DT0(DFPREG(rs1
));
3782 gen_op_load_fpr_DT1(DFPREG(rs2
));
3783 gen_helper_fcmpgt32();
3784 gen_op_store_DT0_fpr(DFPREG(rd
));
3786 case 0x02e: /* VIS I fcmpeq32 */
3787 CHECK_FPU_FEATURE(dc
, VIS1
);
3788 gen_op_load_fpr_DT0(DFPREG(rs1
));
3789 gen_op_load_fpr_DT1(DFPREG(rs2
));
3790 gen_helper_fcmpeq32();
3791 gen_op_store_DT0_fpr(DFPREG(rd
));
3793 case 0x031: /* VIS I fmul8x16 */
3794 CHECK_FPU_FEATURE(dc
, VIS1
);
3795 gen_op_load_fpr_DT0(DFPREG(rs1
));
3796 gen_op_load_fpr_DT1(DFPREG(rs2
));
3797 gen_helper_fmul8x16();
3798 gen_op_store_DT0_fpr(DFPREG(rd
));
3800 case 0x033: /* VIS I fmul8x16au */
3801 CHECK_FPU_FEATURE(dc
, VIS1
);
3802 gen_op_load_fpr_DT0(DFPREG(rs1
));
3803 gen_op_load_fpr_DT1(DFPREG(rs2
));
3804 gen_helper_fmul8x16au();
3805 gen_op_store_DT0_fpr(DFPREG(rd
));
3807 case 0x035: /* VIS I fmul8x16al */
3808 CHECK_FPU_FEATURE(dc
, VIS1
);
3809 gen_op_load_fpr_DT0(DFPREG(rs1
));
3810 gen_op_load_fpr_DT1(DFPREG(rs2
));
3811 gen_helper_fmul8x16al();
3812 gen_op_store_DT0_fpr(DFPREG(rd
));
3814 case 0x036: /* VIS I fmul8sux16 */
3815 CHECK_FPU_FEATURE(dc
, VIS1
);
3816 gen_op_load_fpr_DT0(DFPREG(rs1
));
3817 gen_op_load_fpr_DT1(DFPREG(rs2
));
3818 gen_helper_fmul8sux16();
3819 gen_op_store_DT0_fpr(DFPREG(rd
));
3821 case 0x037: /* VIS I fmul8ulx16 */
3822 CHECK_FPU_FEATURE(dc
, VIS1
);
3823 gen_op_load_fpr_DT0(DFPREG(rs1
));
3824 gen_op_load_fpr_DT1(DFPREG(rs2
));
3825 gen_helper_fmul8ulx16();
3826 gen_op_store_DT0_fpr(DFPREG(rd
));
3828 case 0x038: /* VIS I fmuld8sux16 */
3829 CHECK_FPU_FEATURE(dc
, VIS1
);
3830 gen_op_load_fpr_DT0(DFPREG(rs1
));
3831 gen_op_load_fpr_DT1(DFPREG(rs2
));
3832 gen_helper_fmuld8sux16();
3833 gen_op_store_DT0_fpr(DFPREG(rd
));
3835 case 0x039: /* VIS I fmuld8ulx16 */
3836 CHECK_FPU_FEATURE(dc
, VIS1
);
3837 gen_op_load_fpr_DT0(DFPREG(rs1
));
3838 gen_op_load_fpr_DT1(DFPREG(rs2
));
3839 gen_helper_fmuld8ulx16();
3840 gen_op_store_DT0_fpr(DFPREG(rd
));
3842 case 0x03a: /* VIS I fpack32 */
3843 case 0x03b: /* VIS I fpack16 */
3844 case 0x03d: /* VIS I fpackfix */
3845 case 0x03e: /* VIS I pdist */
3848 case 0x048: /* VIS I faligndata */
3849 CHECK_FPU_FEATURE(dc
, VIS1
);
3850 gen_op_load_fpr_DT0(DFPREG(rs1
));
3851 gen_op_load_fpr_DT1(DFPREG(rs2
));
3852 gen_helper_faligndata();
3853 gen_op_store_DT0_fpr(DFPREG(rd
));
3855 case 0x04b: /* VIS I fpmerge */
3856 CHECK_FPU_FEATURE(dc
, VIS1
);
3857 gen_op_load_fpr_DT0(DFPREG(rs1
));
3858 gen_op_load_fpr_DT1(DFPREG(rs2
));
3859 gen_helper_fpmerge();
3860 gen_op_store_DT0_fpr(DFPREG(rd
));
3862 case 0x04c: /* VIS II bshuffle */
3865 case 0x04d: /* VIS I fexpand */
3866 CHECK_FPU_FEATURE(dc
, VIS1
);
3867 gen_op_load_fpr_DT0(DFPREG(rs1
));
3868 gen_op_load_fpr_DT1(DFPREG(rs2
));
3869 gen_helper_fexpand();
3870 gen_op_store_DT0_fpr(DFPREG(rd
));
3872 case 0x050: /* VIS I fpadd16 */
3873 CHECK_FPU_FEATURE(dc
, VIS1
);
3874 gen_op_load_fpr_DT0(DFPREG(rs1
));
3875 gen_op_load_fpr_DT1(DFPREG(rs2
));
3876 gen_helper_fpadd16();
3877 gen_op_store_DT0_fpr(DFPREG(rd
));
3879 case 0x051: /* VIS I fpadd16s */
3880 CHECK_FPU_FEATURE(dc
, VIS1
);
3881 gen_helper_fpadd16s(cpu_fpr
[rd
],
3882 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3884 case 0x052: /* VIS I fpadd32 */
3885 CHECK_FPU_FEATURE(dc
, VIS1
);
3886 gen_op_load_fpr_DT0(DFPREG(rs1
));
3887 gen_op_load_fpr_DT1(DFPREG(rs2
));
3888 gen_helper_fpadd32();
3889 gen_op_store_DT0_fpr(DFPREG(rd
));
3891 case 0x053: /* VIS I fpadd32s */
3892 CHECK_FPU_FEATURE(dc
, VIS1
);
3893 gen_helper_fpadd32s(cpu_fpr
[rd
],
3894 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3896 case 0x054: /* VIS I fpsub16 */
3897 CHECK_FPU_FEATURE(dc
, VIS1
);
3898 gen_op_load_fpr_DT0(DFPREG(rs1
));
3899 gen_op_load_fpr_DT1(DFPREG(rs2
));
3900 gen_helper_fpsub16();
3901 gen_op_store_DT0_fpr(DFPREG(rd
));
3903 case 0x055: /* VIS I fpsub16s */
3904 CHECK_FPU_FEATURE(dc
, VIS1
);
3905 gen_helper_fpsub16s(cpu_fpr
[rd
],
3906 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3908 case 0x056: /* VIS I fpsub32 */
3909 CHECK_FPU_FEATURE(dc
, VIS1
);
3910 gen_op_load_fpr_DT0(DFPREG(rs1
));
3911 gen_op_load_fpr_DT1(DFPREG(rs2
));
3912 gen_helper_fpsub32();
3913 gen_op_store_DT0_fpr(DFPREG(rd
));
3915 case 0x057: /* VIS I fpsub32s */
3916 CHECK_FPU_FEATURE(dc
, VIS1
);
3917 gen_helper_fpsub32s(cpu_fpr
[rd
],
3918 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3920 case 0x060: /* VIS I fzero */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3923 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3925 case 0x061: /* VIS I fzeros */
3926 CHECK_FPU_FEATURE(dc
, VIS1
);
3927 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3929 case 0x062: /* VIS I fnor */
3930 CHECK_FPU_FEATURE(dc
, VIS1
);
3931 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3932 cpu_fpr
[DFPREG(rs2
)]);
3933 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3934 cpu_fpr
[DFPREG(rs2
) + 1]);
3936 case 0x063: /* VIS I fnors */
3937 CHECK_FPU_FEATURE(dc
, VIS1
);
3938 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3940 case 0x064: /* VIS I fandnot2 */
3941 CHECK_FPU_FEATURE(dc
, VIS1
);
3942 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3943 cpu_fpr
[DFPREG(rs2
)]);
3944 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3945 cpu_fpr
[DFPREG(rs1
) + 1],
3946 cpu_fpr
[DFPREG(rs2
) + 1]);
3948 case 0x065: /* VIS I fandnot2s */
3949 CHECK_FPU_FEATURE(dc
, VIS1
);
3950 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3952 case 0x066: /* VIS I fnot2 */
3953 CHECK_FPU_FEATURE(dc
, VIS1
);
3954 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3955 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3956 cpu_fpr
[DFPREG(rs2
) + 1]);
3958 case 0x067: /* VIS I fnot2s */
3959 CHECK_FPU_FEATURE(dc
, VIS1
);
3960 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3962 case 0x068: /* VIS I fandnot1 */
3963 CHECK_FPU_FEATURE(dc
, VIS1
);
3964 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3965 cpu_fpr
[DFPREG(rs1
)]);
3966 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3967 cpu_fpr
[DFPREG(rs2
) + 1],
3968 cpu_fpr
[DFPREG(rs1
) + 1]);
3970 case 0x069: /* VIS I fandnot1s */
3971 CHECK_FPU_FEATURE(dc
, VIS1
);
3972 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3974 case 0x06a: /* VIS I fnot1 */
3975 CHECK_FPU_FEATURE(dc
, VIS1
);
3976 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3977 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3978 cpu_fpr
[DFPREG(rs1
) + 1]);
3980 case 0x06b: /* VIS I fnot1s */
3981 CHECK_FPU_FEATURE(dc
, VIS1
);
3982 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3984 case 0x06c: /* VIS I fxor */
3985 CHECK_FPU_FEATURE(dc
, VIS1
);
3986 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3987 cpu_fpr
[DFPREG(rs2
)]);
3988 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3989 cpu_fpr
[DFPREG(rs1
) + 1],
3990 cpu_fpr
[DFPREG(rs2
) + 1]);
3992 case 0x06d: /* VIS I fxors */
3993 CHECK_FPU_FEATURE(dc
, VIS1
);
3994 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3996 case 0x06e: /* VIS I fnand */
3997 CHECK_FPU_FEATURE(dc
, VIS1
);
3998 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3999 cpu_fpr
[DFPREG(rs2
)]);
4000 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4001 cpu_fpr
[DFPREG(rs2
) + 1]);
4003 case 0x06f: /* VIS I fnands */
4004 CHECK_FPU_FEATURE(dc
, VIS1
);
4005 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4007 case 0x070: /* VIS I fand */
4008 CHECK_FPU_FEATURE(dc
, VIS1
);
4009 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4010 cpu_fpr
[DFPREG(rs2
)]);
4011 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4012 cpu_fpr
[DFPREG(rs1
) + 1],
4013 cpu_fpr
[DFPREG(rs2
) + 1]);
4015 case 0x071: /* VIS I fands */
4016 CHECK_FPU_FEATURE(dc
, VIS1
);
4017 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4019 case 0x072: /* VIS I fxnor */
4020 CHECK_FPU_FEATURE(dc
, VIS1
);
4021 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4022 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4023 cpu_fpr
[DFPREG(rs1
)]);
4024 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4025 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4026 cpu_fpr
[DFPREG(rs1
) + 1]);
4028 case 0x073: /* VIS I fxnors */
4029 CHECK_FPU_FEATURE(dc
, VIS1
);
4030 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4031 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4033 case 0x074: /* VIS I fsrc1 */
4034 CHECK_FPU_FEATURE(dc
, VIS1
);
4035 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4036 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4037 cpu_fpr
[DFPREG(rs1
) + 1]);
4039 case 0x075: /* VIS I fsrc1s */
4040 CHECK_FPU_FEATURE(dc
, VIS1
);
4041 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4043 case 0x076: /* VIS I fornot2 */
4044 CHECK_FPU_FEATURE(dc
, VIS1
);
4045 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4046 cpu_fpr
[DFPREG(rs2
)]);
4047 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4048 cpu_fpr
[DFPREG(rs1
) + 1],
4049 cpu_fpr
[DFPREG(rs2
) + 1]);
4051 case 0x077: /* VIS I fornot2s */
4052 CHECK_FPU_FEATURE(dc
, VIS1
);
4053 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4055 case 0x078: /* VIS I fsrc2 */
4056 CHECK_FPU_FEATURE(dc
, VIS1
);
4057 gen_op_load_fpr_DT0(DFPREG(rs2
));
4058 gen_op_store_DT0_fpr(DFPREG(rd
));
4060 case 0x079: /* VIS I fsrc2s */
4061 CHECK_FPU_FEATURE(dc
, VIS1
);
4062 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4064 case 0x07a: /* VIS I fornot1 */
4065 CHECK_FPU_FEATURE(dc
, VIS1
);
4066 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4067 cpu_fpr
[DFPREG(rs1
)]);
4068 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4069 cpu_fpr
[DFPREG(rs2
) + 1],
4070 cpu_fpr
[DFPREG(rs1
) + 1]);
4072 case 0x07b: /* VIS I fornot1s */
4073 CHECK_FPU_FEATURE(dc
, VIS1
);
4074 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4076 case 0x07c: /* VIS I for */
4077 CHECK_FPU_FEATURE(dc
, VIS1
);
4078 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4079 cpu_fpr
[DFPREG(rs2
)]);
4080 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4081 cpu_fpr
[DFPREG(rs1
) + 1],
4082 cpu_fpr
[DFPREG(rs2
) + 1]);
4084 case 0x07d: /* VIS I fors */
4085 CHECK_FPU_FEATURE(dc
, VIS1
);
4086 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4088 case 0x07e: /* VIS I fone */
4089 CHECK_FPU_FEATURE(dc
, VIS1
);
4090 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4091 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4093 case 0x07f: /* VIS I fones */
4094 CHECK_FPU_FEATURE(dc
, VIS1
);
4095 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4097 case 0x080: /* VIS I shutdown */
4098 case 0x081: /* VIS II siam */
4107 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4108 #ifdef TARGET_SPARC64
4113 #ifdef TARGET_SPARC64
4114 } else if (xop
== 0x39) { /* V9 return */
4117 save_state(dc
, cpu_cond
);
4118 cpu_src1
= get_src1(insn
, cpu_src1
);
4119 if (IS_IMM
) { /* immediate */
4120 rs2
= GET_FIELDs(insn
, 19, 31);
4121 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4122 } else { /* register */
4123 rs2
= GET_FIELD(insn
, 27, 31);
4125 gen_movl_reg_TN(rs2
, cpu_src2
);
4126 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4128 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4130 gen_helper_restore();
4131 gen_mov_pc_npc(dc
, cpu_cond
);
4132 r_const
= tcg_const_i32(3);
4133 gen_helper_check_align(cpu_dst
, r_const
);
4134 tcg_temp_free_i32(r_const
);
4135 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4136 dc
->npc
= DYNAMIC_PC
;
4140 cpu_src1
= get_src1(insn
, cpu_src1
);
4141 if (IS_IMM
) { /* immediate */
4142 rs2
= GET_FIELDs(insn
, 19, 31);
4143 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4144 } else { /* register */
4145 rs2
= GET_FIELD(insn
, 27, 31);
4147 gen_movl_reg_TN(rs2
, cpu_src2
);
4148 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4150 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4153 case 0x38: /* jmpl */
4158 r_pc
= tcg_const_tl(dc
->pc
);
4159 gen_movl_TN_reg(rd
, r_pc
);
4160 tcg_temp_free(r_pc
);
4161 gen_mov_pc_npc(dc
, cpu_cond
);
4162 r_const
= tcg_const_i32(3);
4163 gen_helper_check_align(cpu_dst
, r_const
);
4164 tcg_temp_free_i32(r_const
);
4165 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4166 dc
->npc
= DYNAMIC_PC
;
4169 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4170 case 0x39: /* rett, V9 return */
4174 if (!supervisor(dc
))
4176 gen_mov_pc_npc(dc
, cpu_cond
);
4177 r_const
= tcg_const_i32(3);
4178 gen_helper_check_align(cpu_dst
, r_const
);
4179 tcg_temp_free_i32(r_const
);
4180 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4181 dc
->npc
= DYNAMIC_PC
;
4186 case 0x3b: /* flush */
4187 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4189 gen_helper_flush(cpu_dst
);
4191 case 0x3c: /* save */
4192 save_state(dc
, cpu_cond
);
4194 gen_movl_TN_reg(rd
, cpu_dst
);
4196 case 0x3d: /* restore */
4197 save_state(dc
, cpu_cond
);
4198 gen_helper_restore();
4199 gen_movl_TN_reg(rd
, cpu_dst
);
4201 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4202 case 0x3e: /* V9 done/retry */
4206 if (!supervisor(dc
))
4208 dc
->npc
= DYNAMIC_PC
;
4209 dc
->pc
= DYNAMIC_PC
;
4213 if (!supervisor(dc
))
4215 dc
->npc
= DYNAMIC_PC
;
4216 dc
->pc
= DYNAMIC_PC
;
4232 case 3: /* load/store instructions */
4234 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4236 cpu_src1
= get_src1(insn
, cpu_src1
);
4237 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4238 rs2
= GET_FIELD(insn
, 27, 31);
4239 gen_movl_reg_TN(rs2
, cpu_src2
);
4240 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4241 } else if (IS_IMM
) { /* immediate */
4242 rs2
= GET_FIELDs(insn
, 19, 31);
4243 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
4244 } else { /* register */
4245 rs2
= GET_FIELD(insn
, 27, 31);
4247 gen_movl_reg_TN(rs2
, cpu_src2
);
4248 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4250 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4252 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4253 (xop
> 0x17 && xop
<= 0x1d ) ||
4254 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4256 case 0x0: /* load unsigned word */
4257 gen_address_mask(dc
, cpu_addr
);
4258 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4260 case 0x1: /* load unsigned byte */
4261 gen_address_mask(dc
, cpu_addr
);
4262 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4264 case 0x2: /* load unsigned halfword */
4265 gen_address_mask(dc
, cpu_addr
);
4266 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4268 case 0x3: /* load double word */
4274 save_state(dc
, cpu_cond
);
4275 r_const
= tcg_const_i32(7);
4276 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4277 tcg_temp_free_i32(r_const
);
4278 gen_address_mask(dc
, cpu_addr
);
4279 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4280 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4281 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4282 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4283 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4284 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4285 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4288 case 0x9: /* load signed byte */
4289 gen_address_mask(dc
, cpu_addr
);
4290 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4292 case 0xa: /* load signed halfword */
4293 gen_address_mask(dc
, cpu_addr
);
4294 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4296 case 0xd: /* ldstub -- XXX: should be atomically */
4300 gen_address_mask(dc
, cpu_addr
);
4301 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4302 r_const
= tcg_const_tl(0xff);
4303 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4304 tcg_temp_free(r_const
);
4307 case 0x0f: /* swap register with memory. Also
4309 CHECK_IU_FEATURE(dc
, SWAP
);
4310 gen_movl_reg_TN(rd
, cpu_val
);
4311 gen_address_mask(dc
, cpu_addr
);
4312 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4313 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4314 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4316 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4317 case 0x10: /* load word alternate */
4318 #ifndef TARGET_SPARC64
4321 if (!supervisor(dc
))
4324 save_state(dc
, cpu_cond
);
4325 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4327 case 0x11: /* load unsigned byte alternate */
4328 #ifndef TARGET_SPARC64
4331 if (!supervisor(dc
))
4334 save_state(dc
, cpu_cond
);
4335 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4337 case 0x12: /* load unsigned halfword alternate */
4338 #ifndef TARGET_SPARC64
4341 if (!supervisor(dc
))
4344 save_state(dc
, cpu_cond
);
4345 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4347 case 0x13: /* load double word alternate */
4348 #ifndef TARGET_SPARC64
4351 if (!supervisor(dc
))
4356 save_state(dc
, cpu_cond
);
4357 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4359 case 0x19: /* load signed byte alternate */
4360 #ifndef TARGET_SPARC64
4363 if (!supervisor(dc
))
4366 save_state(dc
, cpu_cond
);
4367 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4369 case 0x1a: /* load signed halfword alternate */
4370 #ifndef TARGET_SPARC64
4373 if (!supervisor(dc
))
4376 save_state(dc
, cpu_cond
);
4377 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4379 case 0x1d: /* ldstuba -- XXX: should be atomically */
4380 #ifndef TARGET_SPARC64
4383 if (!supervisor(dc
))
4386 save_state(dc
, cpu_cond
);
4387 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4389 case 0x1f: /* swap reg with alt. memory. Also
4391 CHECK_IU_FEATURE(dc
, SWAP
);
4392 #ifndef TARGET_SPARC64
4395 if (!supervisor(dc
))
4398 save_state(dc
, cpu_cond
);
4399 gen_movl_reg_TN(rd
, cpu_val
);
4400 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4403 #ifndef TARGET_SPARC64
4404 case 0x30: /* ldc */
4405 case 0x31: /* ldcsr */
4406 case 0x33: /* lddc */
4410 #ifdef TARGET_SPARC64
4411 case 0x08: /* V9 ldsw */
4412 gen_address_mask(dc
, cpu_addr
);
4413 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4415 case 0x0b: /* V9 ldx */
4416 gen_address_mask(dc
, cpu_addr
);
4417 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4419 case 0x18: /* V9 ldswa */
4420 save_state(dc
, cpu_cond
);
4421 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4423 case 0x1b: /* V9 ldxa */
4424 save_state(dc
, cpu_cond
);
4425 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4427 case 0x2d: /* V9 prefetch, no effect */
4429 case 0x30: /* V9 ldfa */
4430 save_state(dc
, cpu_cond
);
4431 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4433 case 0x33: /* V9 lddfa */
4434 save_state(dc
, cpu_cond
);
4435 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4437 case 0x3d: /* V9 prefetcha, no effect */
4439 case 0x32: /* V9 ldqfa */
4440 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4441 save_state(dc
, cpu_cond
);
4442 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4448 gen_movl_TN_reg(rd
, cpu_val
);
4449 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4452 } else if (xop
>= 0x20 && xop
< 0x24) {
4453 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4455 save_state(dc
, cpu_cond
);
4457 case 0x20: /* load fpreg */
4458 gen_address_mask(dc
, cpu_addr
);
4459 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4460 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4462 case 0x21: /* ldfsr, V9 ldxfsr */
4463 #ifdef TARGET_SPARC64
4464 gen_address_mask(dc
, cpu_addr
);
4466 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4467 gen_helper_ldxfsr(cpu_tmp64
);
4471 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4472 gen_helper_ldfsr(cpu_tmp32
);
4476 case 0x22: /* load quad fpreg */
4480 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4481 r_const
= tcg_const_i32(dc
->mem_idx
);
4482 gen_helper_ldqf(cpu_addr
, r_const
);
4483 tcg_temp_free_i32(r_const
);
4484 gen_op_store_QT0_fpr(QFPREG(rd
));
4487 case 0x23: /* load double fpreg */
4491 r_const
= tcg_const_i32(dc
->mem_idx
);
4492 gen_helper_lddf(cpu_addr
, r_const
);
4493 tcg_temp_free_i32(r_const
);
4494 gen_op_store_DT0_fpr(DFPREG(rd
));
4500 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4501 xop
== 0xe || xop
== 0x1e) {
4502 gen_movl_reg_TN(rd
, cpu_val
);
4504 case 0x4: /* store word */
4505 gen_address_mask(dc
, cpu_addr
);
4506 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4508 case 0x5: /* store byte */
4509 gen_address_mask(dc
, cpu_addr
);
4510 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4512 case 0x6: /* store halfword */
4513 gen_address_mask(dc
, cpu_addr
);
4514 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4516 case 0x7: /* store double word */
4522 save_state(dc
, cpu_cond
);
4523 gen_address_mask(dc
, cpu_addr
);
4524 r_const
= tcg_const_i32(7);
4525 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4526 tcg_temp_free_i32(r_const
);
4527 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4528 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4529 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4532 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4533 case 0x14: /* store word alternate */
4534 #ifndef TARGET_SPARC64
4537 if (!supervisor(dc
))
4540 save_state(dc
, cpu_cond
);
4541 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4543 case 0x15: /* store byte alternate */
4544 #ifndef TARGET_SPARC64
4547 if (!supervisor(dc
))
4550 save_state(dc
, cpu_cond
);
4551 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4553 case 0x16: /* store halfword alternate */
4554 #ifndef TARGET_SPARC64
4557 if (!supervisor(dc
))
4560 save_state(dc
, cpu_cond
);
4561 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4563 case 0x17: /* store double word alternate */
4564 #ifndef TARGET_SPARC64
4567 if (!supervisor(dc
))
4573 save_state(dc
, cpu_cond
);
4574 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4578 #ifdef TARGET_SPARC64
4579 case 0x0e: /* V9 stx */
4580 gen_address_mask(dc
, cpu_addr
);
4581 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4583 case 0x1e: /* V9 stxa */
4584 save_state(dc
, cpu_cond
);
4585 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4591 } else if (xop
> 0x23 && xop
< 0x28) {
4592 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4594 save_state(dc
, cpu_cond
);
4596 case 0x24: /* store fpreg */
4597 gen_address_mask(dc
, cpu_addr
);
4598 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4599 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4601 case 0x25: /* stfsr, V9 stxfsr */
4602 #ifdef TARGET_SPARC64
4603 gen_address_mask(dc
, cpu_addr
);
4604 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4606 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4608 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4610 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4611 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4615 #ifdef TARGET_SPARC64
4616 /* V9 stqf, store quad fpreg */
4620 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4621 gen_op_load_fpr_QT0(QFPREG(rd
));
4622 r_const
= tcg_const_i32(dc
->mem_idx
);
4623 gen_helper_stqf(cpu_addr
, r_const
);
4624 tcg_temp_free_i32(r_const
);
4627 #else /* !TARGET_SPARC64 */
4628 /* stdfq, store floating point queue */
4629 #if defined(CONFIG_USER_ONLY)
4632 if (!supervisor(dc
))
4634 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4639 case 0x27: /* store double fpreg */
4643 gen_op_load_fpr_DT0(DFPREG(rd
));
4644 r_const
= tcg_const_i32(dc
->mem_idx
);
4645 gen_helper_stdf(cpu_addr
, r_const
);
4646 tcg_temp_free_i32(r_const
);
4652 } else if (xop
> 0x33 && xop
< 0x3f) {
4653 save_state(dc
, cpu_cond
);
4655 #ifdef TARGET_SPARC64
4656 case 0x34: /* V9 stfa */
4657 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4659 case 0x36: /* V9 stqfa */
4663 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4664 r_const
= tcg_const_i32(7);
4665 gen_helper_check_align(cpu_addr
, r_const
);
4666 tcg_temp_free_i32(r_const
);
4667 gen_op_load_fpr_QT0(QFPREG(rd
));
4668 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4671 case 0x37: /* V9 stdfa */
4672 gen_op_load_fpr_DT0(DFPREG(rd
));
4673 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4675 case 0x3c: /* V9 casa */
4676 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4677 gen_movl_TN_reg(rd
, cpu_val
);
4679 case 0x3e: /* V9 casxa */
4680 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4681 gen_movl_TN_reg(rd
, cpu_val
);
4684 case 0x34: /* stc */
4685 case 0x35: /* stcsr */
4686 case 0x36: /* stdcq */
4687 case 0x37: /* stdc */
4699 /* default case for non jump instructions */
4700 if (dc
->npc
== DYNAMIC_PC
) {
4701 dc
->pc
= DYNAMIC_PC
;
4703 } else if (dc
->npc
== JUMP_PC
) {
4704 /* we can do a static jump */
4705 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4709 dc
->npc
= dc
->npc
+ 4;
4717 save_state(dc
, cpu_cond
);
4718 r_const
= tcg_const_i32(TT_ILL_INSN
);
4719 gen_helper_raise_exception(r_const
);
4720 tcg_temp_free_i32(r_const
);
4728 save_state(dc
, cpu_cond
);
4729 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4730 gen_helper_raise_exception(r_const
);
4731 tcg_temp_free_i32(r_const
);
4735 #if !defined(CONFIG_USER_ONLY)
4740 save_state(dc
, cpu_cond
);
4741 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4742 gen_helper_raise_exception(r_const
);
4743 tcg_temp_free_i32(r_const
);
4749 save_state(dc
, cpu_cond
);
4750 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4753 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4755 save_state(dc
, cpu_cond
);
4756 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4760 #ifndef TARGET_SPARC64
4765 save_state(dc
, cpu_cond
);
4766 r_const
= tcg_const_i32(TT_NCP_INSN
);
4767 gen_helper_raise_exception(r_const
);
4768 tcg_temp_free(r_const
);
4775 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4776 int spc
, CPUSPARCState
*env
)
4778 target_ulong pc_start
, last_pc
;
4779 uint16_t *gen_opc_end
;
4780 DisasContext dc1
, *dc
= &dc1
;
4786 memset(dc
, 0, sizeof(DisasContext
));
4791 dc
->npc
= (target_ulong
) tb
->cs_base
;
4792 dc
->mem_idx
= cpu_mmu_index(env
);
4794 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4795 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4797 dc
->fpu_enabled
= 0;
4798 #ifdef TARGET_SPARC64
4799 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4801 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4803 cpu_tmp0
= tcg_temp_new();
4804 cpu_tmp32
= tcg_temp_new_i32();
4805 cpu_tmp64
= tcg_temp_new_i64();
4807 cpu_dst
= tcg_temp_local_new();
4810 cpu_val
= tcg_temp_local_new();
4811 cpu_addr
= tcg_temp_local_new();
4814 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4816 max_insns
= CF_COUNT_MASK
;
4819 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4820 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4821 if (bp
->pc
== dc
->pc
) {
4822 if (dc
->pc
!= pc_start
)
4823 save_state(dc
, cpu_cond
);
4833 fprintf(logfile
, "Search PC...\n");
4834 j
= gen_opc_ptr
- gen_opc_buf
;
4838 gen_opc_instr_start
[lj
++] = 0;
4839 gen_opc_pc
[lj
] = dc
->pc
;
4840 gen_opc_npc
[lj
] = dc
->npc
;
4841 gen_opc_instr_start
[lj
] = 1;
4842 gen_opc_icount
[lj
] = num_insns
;
4845 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4848 disas_sparc_insn(dc
);
4853 /* if the next PC is different, we abort now */
4854 if (dc
->pc
!= (last_pc
+ 4))
4856 /* if we reach a page boundary, we stop generation so that the
4857 PC of a TT_TFAULT exception is always in the right page */
4858 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4860 /* if single step mode, we generate only one instruction and
4861 generate an exception */
4862 if (env
->singlestep_enabled
) {
4863 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4867 } while ((gen_opc_ptr
< gen_opc_end
) &&
4868 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4869 num_insns
< max_insns
);
4872 tcg_temp_free(cpu_addr
);
4873 tcg_temp_free(cpu_val
);
4874 tcg_temp_free(cpu_dst
);
4875 tcg_temp_free_i64(cpu_tmp64
);
4876 tcg_temp_free_i32(cpu_tmp32
);
4877 tcg_temp_free(cpu_tmp0
);
4878 if (tb
->cflags
& CF_LAST_IO
)
4881 if (dc
->pc
!= DYNAMIC_PC
&&
4882 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4883 /* static PC and NPC: we can use direct chaining */
4884 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4886 if (dc
->pc
!= DYNAMIC_PC
)
4887 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4888 save_npc(dc
, cpu_cond
);
4892 gen_icount_end(tb
, num_insns
);
4893 *gen_opc_ptr
= INDEX_op_end
;
4895 j
= gen_opc_ptr
- gen_opc_buf
;
4898 gen_opc_instr_start
[lj
++] = 0;
4904 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4905 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4907 tb
->size
= last_pc
+ 4 - pc_start
;
4908 tb
->icount
= num_insns
;
4911 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4912 fprintf(logfile
, "--------------\n");
4913 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4914 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4915 fprintf(logfile
, "\n");
4920 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4922 gen_intermediate_code_internal(tb
, 0, env
);
4925 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4927 gen_intermediate_code_internal(tb
, 1, env
);
4930 void gen_intermediate_code_init(CPUSPARCState
*env
)
4934 static const char * const gregnames
[8] = {
4935 NULL
, // g0 not used
4944 static const char * const fregnames
[64] = {
4945 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4946 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4947 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4948 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4949 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4950 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4951 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4952 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4955 /* init various static tables */
4959 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4960 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4961 offsetof(CPUState
, regwptr
),
4963 #ifdef TARGET_SPARC64
4964 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4966 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4968 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4970 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4972 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4973 offsetof(CPUState
, tick_cmpr
),
4975 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4976 offsetof(CPUState
, stick_cmpr
),
4978 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4979 offsetof(CPUState
, hstick_cmpr
),
4981 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4983 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4985 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4987 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4988 offsetof(CPUState
, ssr
), "ssr");
4989 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4990 offsetof(CPUState
, version
), "ver");
4991 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4992 offsetof(CPUState
, softint
),
4995 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4998 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5000 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5002 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5003 offsetof(CPUState
, cc_src2
),
5005 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5007 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5009 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5011 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5013 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5015 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5016 #ifndef CONFIG_USER_ONLY
5017 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5020 for (i
= 1; i
< 8; i
++)
5021 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5022 offsetof(CPUState
, gregs
[i
]),
5024 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5025 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5026 offsetof(CPUState
, fpr
[i
]),
5029 /* register helpers */
5031 #define GEN_HELPER 2
5036 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5037 unsigned long searched_pc
, int pc_pos
, void *puc
)
5040 env
->pc
= gen_opc_pc
[pc_pos
];
5041 npc
= gen_opc_npc
[pc_pos
];
5043 /* dynamic NPC: already stored */
5044 } else if (npc
== 2) {
5045 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5046 /* jump PC: use T2 and the jump targets of the translation */
5048 env
->npc
= gen_opc_jump_pc
[0];
5050 env
->npc
= gen_opc_jump_pc
[1];