4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
, cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 #include "gen-icount.h"
71 typedef struct DisasContext
{
72 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit
;
79 uint32_t cc_op
; /* current CC operation */
80 struct TranslationBlock
*tb
;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x
, int len
)
109 return (x
<< len
) >> len
;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src
)
117 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
118 offsetof(CPU_DoubleU
, l
.upper
));
119 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
120 offsetof(CPU_DoubleU
, l
.lower
));
123 static void gen_op_load_fpr_DT1(unsigned int src
)
125 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
126 offsetof(CPU_DoubleU
, l
.upper
));
127 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
128 offsetof(CPU_DoubleU
, l
.lower
));
131 static void gen_op_store_DT0_fpr(unsigned int dst
)
133 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
134 offsetof(CPU_DoubleU
, l
.upper
));
135 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
136 offsetof(CPU_DoubleU
, l
.lower
));
139 static void gen_op_load_fpr_QT0(unsigned int src
)
141 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
142 offsetof(CPU_QuadU
, l
.upmost
));
143 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
144 offsetof(CPU_QuadU
, l
.upper
));
145 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.lower
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.lowest
));
151 static void gen_op_load_fpr_QT1(unsigned int src
)
153 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
154 offsetof(CPU_QuadU
, l
.upmost
));
155 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
156 offsetof(CPU_QuadU
, l
.upper
));
157 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.lower
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.lowest
));
163 static void gen_op_store_QT0_fpr(unsigned int dst
)
165 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
166 offsetof(CPU_QuadU
, l
.upmost
));
167 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
168 offsetof(CPU_QuadU
, l
.upper
));
169 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.lower
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.lowest
));
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
189 #ifdef TARGET_SPARC64
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #define AM_CHECK(dc) (1)
197 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
199 #ifdef TARGET_SPARC64
201 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
205 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
208 tcg_gen_movi_tl(tn
, 0);
210 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
212 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
216 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
221 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
223 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
227 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
228 target_ulong pc
, target_ulong npc
)
230 TranslationBlock
*tb
;
233 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
234 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num
);
237 tcg_gen_movi_tl(cpu_pc
, pc
);
238 tcg_gen_movi_tl(cpu_npc
, npc
);
239 tcg_gen_exit_tb((long)tb
+ tb_num
);
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
249 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
251 tcg_gen_extu_i32_tl(reg
, src
);
252 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
253 tcg_gen_andi_tl(reg
, reg
, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
258 tcg_gen_extu_i32_tl(reg
, src
);
259 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
260 tcg_gen_andi_tl(reg
, reg
, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
265 tcg_gen_extu_i32_tl(reg
, src
);
266 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
267 tcg_gen_andi_tl(reg
, reg
, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
272 tcg_gen_extu_i32_tl(reg
, src
);
273 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
274 tcg_gen_andi_tl(reg
, reg
, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr
, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc
, 0);
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
295 static inline void gen_cc_NZ_icc(TCGv dst
)
300 l1
= gen_new_label();
301 l2
= gen_new_label();
302 r_temp
= tcg_temp_new();
303 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
304 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
305 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
307 tcg_gen_ext32s_tl(r_temp
, dst
);
308 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
309 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
311 tcg_temp_free(r_temp
);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst
)
319 l1
= gen_new_label();
320 l2
= gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
322 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
324 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
325 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
336 TCGv r_temp1
, r_temp2
;
339 l1
= gen_new_label();
340 r_temp1
= tcg_temp_new();
341 r_temp2
= tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
343 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
344 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
345 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
347 tcg_temp_free(r_temp1
);
348 tcg_temp_free(r_temp2
);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
356 l1
= gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
358 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
367 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
371 r_temp
= tcg_temp_new();
372 tcg_gen_xor_tl(r_temp
, src1
, src2
);
373 tcg_gen_not_tl(r_temp
, r_temp
);
374 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
375 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
376 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
378 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
379 tcg_temp_free(r_temp
);
380 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
388 r_temp
= tcg_temp_new();
389 tcg_gen_xor_tl(r_temp
, src1
, src2
);
390 tcg_gen_not_tl(r_temp
, r_temp
);
391 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
392 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
393 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
395 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
396 tcg_temp_free(r_temp
);
397 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
401 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
407 l1
= gen_new_label();
409 r_temp
= tcg_temp_new();
410 tcg_gen_xor_tl(r_temp
, src1
, src2
);
411 tcg_gen_not_tl(r_temp
, r_temp
);
412 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
413 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
414 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
416 r_const
= tcg_const_i32(TT_TOVF
);
417 gen_helper_raise_exception(r_const
);
418 tcg_temp_free_i32(r_const
);
420 tcg_temp_free(r_temp
);
423 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
428 l1
= gen_new_label();
429 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
430 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
431 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
432 r_const
= tcg_const_i32(TT_TOVF
);
433 gen_helper_raise_exception(r_const
);
434 tcg_temp_free_i32(r_const
);
438 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
440 tcg_gen_mov_tl(cpu_cc_src
, src1
);
441 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
442 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
443 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
446 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
448 tcg_gen_mov_tl(cpu_cc_src
, src1
);
449 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
450 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
451 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
454 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
456 tcg_gen_mov_tl(cpu_cc_src
, src1
);
457 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
458 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
459 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
460 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
461 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
464 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
466 tcg_gen_mov_tl(cpu_cc_src
, src1
);
467 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
468 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
469 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
470 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
471 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
474 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
476 tcg_gen_mov_tl(cpu_cc_src
, src1
);
477 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
478 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
479 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
482 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
484 tcg_gen_mov_tl(cpu_cc_src
, src1
);
485 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
486 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
487 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
488 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
489 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
492 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
498 l1
= gen_new_label();
500 r_temp
= tcg_temp_new();
501 tcg_gen_xor_tl(r_temp
, src1
, src2
);
502 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
503 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
504 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
505 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
506 r_const
= tcg_const_i32(TT_TOVF
);
507 gen_helper_raise_exception(r_const
);
508 tcg_temp_free_i32(r_const
);
510 tcg_temp_free(r_temp
);
513 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
515 tcg_gen_mov_tl(cpu_cc_src
, src1
);
516 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
518 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
519 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
520 dc
->cc_op
= CC_OP_LOGIC
;
522 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
523 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
524 dc
->cc_op
= CC_OP_SUB
;
526 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
529 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
531 tcg_gen_mov_tl(cpu_cc_src
, src1
);
532 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
533 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
534 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
537 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
539 tcg_gen_mov_tl(cpu_cc_src
, src1
);
540 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
541 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
542 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
543 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
544 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
547 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
549 tcg_gen_mov_tl(cpu_cc_src
, src1
);
550 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
551 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
552 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
553 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
554 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
557 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
559 tcg_gen_mov_tl(cpu_cc_src
, src1
);
560 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
561 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
562 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
565 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
567 tcg_gen_mov_tl(cpu_cc_src
, src1
);
568 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
569 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
570 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
571 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
572 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
575 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
580 l1
= gen_new_label();
581 r_temp
= tcg_temp_new();
587 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
588 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
589 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
590 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
591 tcg_gen_movi_tl(cpu_cc_src2
, 0);
595 // env->y = (b2 << 31) | (env->y >> 1);
596 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
597 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
598 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
599 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
600 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
601 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
604 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
605 gen_mov_reg_V(r_temp
, cpu_psr
);
606 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
607 tcg_temp_free(r_temp
);
609 // T0 = (b1 << 31) | (T0 >> 1);
611 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
612 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
613 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
615 /* do addition and update flags */
616 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
619 gen_cc_NZ_icc(cpu_cc_dst
);
620 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
621 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
622 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
625 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
627 TCGv_i64 r_temp
, r_temp2
;
629 r_temp
= tcg_temp_new_i64();
630 r_temp2
= tcg_temp_new_i64();
632 tcg_gen_extu_tl_i64(r_temp
, src2
);
633 tcg_gen_extu_tl_i64(r_temp2
, src1
);
634 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
636 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
637 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
638 tcg_temp_free_i64(r_temp
);
639 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
640 #ifdef TARGET_SPARC64
641 tcg_gen_mov_i64(dst
, r_temp2
);
643 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
645 tcg_temp_free_i64(r_temp2
);
648 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
650 TCGv_i64 r_temp
, r_temp2
;
652 r_temp
= tcg_temp_new_i64();
653 r_temp2
= tcg_temp_new_i64();
655 tcg_gen_ext_tl_i64(r_temp
, src2
);
656 tcg_gen_ext_tl_i64(r_temp2
, src1
);
657 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
659 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
660 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
661 tcg_temp_free_i64(r_temp
);
662 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
663 #ifdef TARGET_SPARC64
664 tcg_gen_mov_i64(dst
, r_temp2
);
666 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
668 tcg_temp_free_i64(r_temp2
);
671 #ifdef TARGET_SPARC64
672 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
677 l1
= gen_new_label();
678 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
679 r_const
= tcg_const_i32(TT_DIV_ZERO
);
680 gen_helper_raise_exception(r_const
);
681 tcg_temp_free_i32(r_const
);
685 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
689 l1
= gen_new_label();
690 l2
= gen_new_label();
691 tcg_gen_mov_tl(cpu_cc_src
, src1
);
692 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
693 gen_trap_ifdivzero_tl(cpu_cc_src2
);
694 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
695 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
696 tcg_gen_movi_i64(dst
, INT64_MIN
);
699 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
705 static inline void gen_op_eval_ba(TCGv dst
)
707 tcg_gen_movi_tl(dst
, 1);
711 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
713 gen_mov_reg_Z(dst
, src
);
717 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
719 gen_mov_reg_N(cpu_tmp0
, src
);
720 gen_mov_reg_V(dst
, src
);
721 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
722 gen_mov_reg_Z(cpu_tmp0
, src
);
723 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
727 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
729 gen_mov_reg_V(cpu_tmp0
, src
);
730 gen_mov_reg_N(dst
, src
);
731 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
735 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
737 gen_mov_reg_Z(cpu_tmp0
, src
);
738 gen_mov_reg_C(dst
, src
);
739 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
743 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
745 gen_mov_reg_C(dst
, src
);
749 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
751 gen_mov_reg_V(dst
, src
);
755 static inline void gen_op_eval_bn(TCGv dst
)
757 tcg_gen_movi_tl(dst
, 0);
761 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
763 gen_mov_reg_N(dst
, src
);
767 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
769 gen_mov_reg_Z(dst
, src
);
770 tcg_gen_xori_tl(dst
, dst
, 0x1);
774 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
776 gen_mov_reg_N(cpu_tmp0
, src
);
777 gen_mov_reg_V(dst
, src
);
778 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
779 gen_mov_reg_Z(cpu_tmp0
, src
);
780 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
781 tcg_gen_xori_tl(dst
, dst
, 0x1);
785 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
787 gen_mov_reg_V(cpu_tmp0
, src
);
788 gen_mov_reg_N(dst
, src
);
789 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
790 tcg_gen_xori_tl(dst
, dst
, 0x1);
794 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
796 gen_mov_reg_Z(cpu_tmp0
, src
);
797 gen_mov_reg_C(dst
, src
);
798 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
799 tcg_gen_xori_tl(dst
, dst
, 0x1);
803 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
805 gen_mov_reg_C(dst
, src
);
806 tcg_gen_xori_tl(dst
, dst
, 0x1);
810 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
812 gen_mov_reg_N(dst
, src
);
813 tcg_gen_xori_tl(dst
, dst
, 0x1);
817 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
819 gen_mov_reg_V(dst
, src
);
820 tcg_gen_xori_tl(dst
, dst
, 0x1);
824 FPSR bit field FCC1 | FCC0:
830 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
831 unsigned int fcc_offset
)
833 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
834 tcg_gen_andi_tl(reg
, reg
, 0x1);
837 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
838 unsigned int fcc_offset
)
840 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
841 tcg_gen_andi_tl(reg
, reg
, 0x1);
845 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
846 unsigned int fcc_offset
)
848 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
849 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
850 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
853 // 1 or 2: FCC0 ^ FCC1
854 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
855 unsigned int fcc_offset
)
857 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
858 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
859 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
863 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
864 unsigned int fcc_offset
)
866 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
870 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
871 unsigned int fcc_offset
)
873 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
874 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
875 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
876 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
880 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
881 unsigned int fcc_offset
)
883 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
887 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
888 unsigned int fcc_offset
)
890 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
891 tcg_gen_xori_tl(dst
, dst
, 0x1);
892 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
893 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
897 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
898 unsigned int fcc_offset
)
900 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
901 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
902 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
906 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
907 unsigned int fcc_offset
)
909 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
910 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
911 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
912 tcg_gen_xori_tl(dst
, dst
, 0x1);
915 // 0 or 3: !(FCC0 ^ FCC1)
916 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
917 unsigned int fcc_offset
)
919 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
920 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
921 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
922 tcg_gen_xori_tl(dst
, dst
, 0x1);
926 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
927 unsigned int fcc_offset
)
929 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
930 tcg_gen_xori_tl(dst
, dst
, 0x1);
933 // !1: !(FCC0 & !FCC1)
934 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
935 unsigned int fcc_offset
)
937 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
938 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
939 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
940 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
941 tcg_gen_xori_tl(dst
, dst
, 0x1);
945 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
946 unsigned int fcc_offset
)
948 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
949 tcg_gen_xori_tl(dst
, dst
, 0x1);
952 // !2: !(!FCC0 & FCC1)
953 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
954 unsigned int fcc_offset
)
956 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
957 tcg_gen_xori_tl(dst
, dst
, 0x1);
958 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
959 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
960 tcg_gen_xori_tl(dst
, dst
, 0x1);
963 // !3: !(FCC0 & FCC1)
964 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
965 unsigned int fcc_offset
)
967 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
968 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
969 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
970 tcg_gen_xori_tl(dst
, dst
, 0x1);
973 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
974 target_ulong pc2
, TCGv r_cond
)
978 l1
= gen_new_label();
980 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
982 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
985 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
988 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
989 target_ulong pc2
, TCGv r_cond
)
993 l1
= gen_new_label();
995 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
997 gen_goto_tb(dc
, 0, pc2
, pc1
);
1000 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1003 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1008 l1
= gen_new_label();
1009 l2
= gen_new_label();
1011 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1013 tcg_gen_movi_tl(cpu_npc
, npc1
);
1017 tcg_gen_movi_tl(cpu_npc
, npc2
);
1021 /* call this function before using the condition register as it may
1022 have been set for a jump */
1023 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1025 if (dc
->npc
== JUMP_PC
) {
1026 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1027 dc
->npc
= DYNAMIC_PC
;
1031 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1033 if (dc
->npc
== JUMP_PC
) {
1034 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1035 dc
->npc
= DYNAMIC_PC
;
1036 } else if (dc
->npc
!= DYNAMIC_PC
) {
1037 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1041 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1043 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1047 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1049 if (dc
->npc
== JUMP_PC
) {
1050 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1051 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1052 dc
->pc
= DYNAMIC_PC
;
1053 } else if (dc
->npc
== DYNAMIC_PC
) {
1054 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1055 dc
->pc
= DYNAMIC_PC
;
1061 static inline void gen_op_next_insn(void)
1063 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1064 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1067 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1072 #ifdef TARGET_SPARC64
1080 switch (dc
->cc_op
) {
1084 gen_helper_compute_psr();
1085 dc
->cc_op
= CC_OP_FLAGS
;
1090 gen_op_eval_bn(r_dst
);
1093 gen_op_eval_be(r_dst
, r_src
);
1096 gen_op_eval_ble(r_dst
, r_src
);
1099 gen_op_eval_bl(r_dst
, r_src
);
1102 gen_op_eval_bleu(r_dst
, r_src
);
1105 gen_op_eval_bcs(r_dst
, r_src
);
1108 gen_op_eval_bneg(r_dst
, r_src
);
1111 gen_op_eval_bvs(r_dst
, r_src
);
1114 gen_op_eval_ba(r_dst
);
1117 gen_op_eval_bne(r_dst
, r_src
);
1120 gen_op_eval_bg(r_dst
, r_src
);
1123 gen_op_eval_bge(r_dst
, r_src
);
1126 gen_op_eval_bgu(r_dst
, r_src
);
1129 gen_op_eval_bcc(r_dst
, r_src
);
1132 gen_op_eval_bpos(r_dst
, r_src
);
1135 gen_op_eval_bvc(r_dst
, r_src
);
1140 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1142 unsigned int offset
;
1162 gen_op_eval_bn(r_dst
);
1165 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1168 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1171 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1174 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1177 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1180 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1183 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1186 gen_op_eval_ba(r_dst
);
1189 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1192 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1195 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1198 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1201 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1204 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1207 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1212 #ifdef TARGET_SPARC64
1214 static const int gen_tcg_cond_reg
[8] = {
1225 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1229 l1
= gen_new_label();
1230 tcg_gen_movi_tl(r_dst
, 0);
1231 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1232 tcg_gen_movi_tl(r_dst
, 1);
1237 /* XXX: potentially incorrect if dynamic npc */
1238 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1241 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1242 target_ulong target
= dc
->pc
+ offset
;
1245 /* unconditional not taken */
1247 dc
->pc
= dc
->npc
+ 4;
1248 dc
->npc
= dc
->pc
+ 4;
1251 dc
->npc
= dc
->pc
+ 4;
1253 } else if (cond
== 0x8) {
1254 /* unconditional taken */
1257 dc
->npc
= dc
->pc
+ 4;
1263 flush_cond(dc
, r_cond
);
1264 gen_cond(r_cond
, cc
, cond
, dc
);
1266 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1270 dc
->jump_pc
[0] = target
;
1271 dc
->jump_pc
[1] = dc
->npc
+ 4;
1277 /* XXX: potentially incorrect if dynamic npc */
1278 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1281 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1282 target_ulong target
= dc
->pc
+ offset
;
1285 /* unconditional not taken */
1287 dc
->pc
= dc
->npc
+ 4;
1288 dc
->npc
= dc
->pc
+ 4;
1291 dc
->npc
= dc
->pc
+ 4;
1293 } else if (cond
== 0x8) {
1294 /* unconditional taken */
1297 dc
->npc
= dc
->pc
+ 4;
1303 flush_cond(dc
, r_cond
);
1304 gen_fcond(r_cond
, cc
, cond
);
1306 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1310 dc
->jump_pc
[0] = target
;
1311 dc
->jump_pc
[1] = dc
->npc
+ 4;
1317 #ifdef TARGET_SPARC64
1318 /* XXX: potentially incorrect if dynamic npc */
1319 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1320 TCGv r_cond
, TCGv r_reg
)
1322 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1323 target_ulong target
= dc
->pc
+ offset
;
1325 flush_cond(dc
, r_cond
);
1326 gen_cond_reg(r_cond
, cond
, r_reg
);
1328 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1332 dc
->jump_pc
[0] = target
;
1333 dc
->jump_pc
[1] = dc
->npc
+ 4;
1338 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1342 gen_helper_fcmps(r_rs1
, r_rs2
);
1345 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1348 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1351 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1356 static inline void gen_op_fcmpd(int fccno
)
1363 gen_helper_fcmpd_fcc1();
1366 gen_helper_fcmpd_fcc2();
1369 gen_helper_fcmpd_fcc3();
1374 static inline void gen_op_fcmpq(int fccno
)
1381 gen_helper_fcmpq_fcc1();
1384 gen_helper_fcmpq_fcc2();
1387 gen_helper_fcmpq_fcc3();
1392 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1396 gen_helper_fcmpes(r_rs1
, r_rs2
);
1399 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1402 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1405 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1410 static inline void gen_op_fcmped(int fccno
)
1414 gen_helper_fcmped();
1417 gen_helper_fcmped_fcc1();
1420 gen_helper_fcmped_fcc2();
1423 gen_helper_fcmped_fcc3();
1428 static inline void gen_op_fcmpeq(int fccno
)
1432 gen_helper_fcmpeq();
1435 gen_helper_fcmpeq_fcc1();
1438 gen_helper_fcmpeq_fcc2();
1441 gen_helper_fcmpeq_fcc3();
1448 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1450 gen_helper_fcmps(r_rs1
, r_rs2
);
1453 static inline void gen_op_fcmpd(int fccno
)
1458 static inline void gen_op_fcmpq(int fccno
)
1463 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1465 gen_helper_fcmpes(r_rs1
, r_rs2
);
1468 static inline void gen_op_fcmped(int fccno
)
1470 gen_helper_fcmped();
1473 static inline void gen_op_fcmpeq(int fccno
)
1475 gen_helper_fcmpeq();
1479 static inline void gen_op_fpexception_im(int fsr_flags
)
1483 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1484 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1485 r_const
= tcg_const_i32(TT_FP_EXCP
);
1486 gen_helper_raise_exception(r_const
);
1487 tcg_temp_free_i32(r_const
);
1490 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1492 #if !defined(CONFIG_USER_ONLY)
1493 if (!dc
->fpu_enabled
) {
1496 save_state(dc
, r_cond
);
1497 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1498 gen_helper_raise_exception(r_const
);
1499 tcg_temp_free_i32(r_const
);
1507 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1509 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1512 static inline void gen_clear_float_exceptions(void)
1514 gen_helper_clear_float_exceptions();
1518 #ifdef TARGET_SPARC64
1519 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1525 r_asi
= tcg_temp_new_i32();
1526 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1528 asi
= GET_FIELD(insn
, 19, 26);
1529 r_asi
= tcg_const_i32(asi
);
1534 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1537 TCGv_i32 r_asi
, r_size
, r_sign
;
1539 r_asi
= gen_get_asi(insn
, addr
);
1540 r_size
= tcg_const_i32(size
);
1541 r_sign
= tcg_const_i32(sign
);
1542 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1543 tcg_temp_free_i32(r_sign
);
1544 tcg_temp_free_i32(r_size
);
1545 tcg_temp_free_i32(r_asi
);
1548 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1550 TCGv_i32 r_asi
, r_size
;
1552 r_asi
= gen_get_asi(insn
, addr
);
1553 r_size
= tcg_const_i32(size
);
1554 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1555 tcg_temp_free_i32(r_size
);
1556 tcg_temp_free_i32(r_asi
);
1559 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1561 TCGv_i32 r_asi
, r_size
, r_rd
;
1563 r_asi
= gen_get_asi(insn
, addr
);
1564 r_size
= tcg_const_i32(size
);
1565 r_rd
= tcg_const_i32(rd
);
1566 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1567 tcg_temp_free_i32(r_rd
);
1568 tcg_temp_free_i32(r_size
);
1569 tcg_temp_free_i32(r_asi
);
1572 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1574 TCGv_i32 r_asi
, r_size
, r_rd
;
1576 r_asi
= gen_get_asi(insn
, addr
);
1577 r_size
= tcg_const_i32(size
);
1578 r_rd
= tcg_const_i32(rd
);
1579 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1580 tcg_temp_free_i32(r_rd
);
1581 tcg_temp_free_i32(r_size
);
1582 tcg_temp_free_i32(r_asi
);
1585 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1587 TCGv_i32 r_asi
, r_size
, r_sign
;
1589 r_asi
= gen_get_asi(insn
, addr
);
1590 r_size
= tcg_const_i32(4);
1591 r_sign
= tcg_const_i32(0);
1592 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1593 tcg_temp_free_i32(r_sign
);
1594 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1595 tcg_temp_free_i32(r_size
);
1596 tcg_temp_free_i32(r_asi
);
1597 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1600 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1602 TCGv_i32 r_asi
, r_rd
;
1604 r_asi
= gen_get_asi(insn
, addr
);
1605 r_rd
= tcg_const_i32(rd
);
1606 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1607 tcg_temp_free_i32(r_rd
);
1608 tcg_temp_free_i32(r_asi
);
1611 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1613 TCGv_i32 r_asi
, r_size
;
1615 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1616 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1617 r_asi
= gen_get_asi(insn
, addr
);
1618 r_size
= tcg_const_i32(8);
1619 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1620 tcg_temp_free_i32(r_size
);
1621 tcg_temp_free_i32(r_asi
);
1624 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1630 r_val1
= tcg_temp_new();
1631 gen_movl_reg_TN(rd
, r_val1
);
1632 r_asi
= gen_get_asi(insn
, addr
);
1633 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1634 tcg_temp_free_i32(r_asi
);
1635 tcg_temp_free(r_val1
);
1638 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1643 gen_movl_reg_TN(rd
, cpu_tmp64
);
1644 r_asi
= gen_get_asi(insn
, addr
);
1645 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1646 tcg_temp_free_i32(r_asi
);
1649 #elif !defined(CONFIG_USER_ONLY)
1651 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1654 TCGv_i32 r_asi
, r_size
, r_sign
;
1656 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1657 r_size
= tcg_const_i32(size
);
1658 r_sign
= tcg_const_i32(sign
);
1659 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1660 tcg_temp_free(r_sign
);
1661 tcg_temp_free(r_size
);
1662 tcg_temp_free(r_asi
);
1663 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1666 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1668 TCGv_i32 r_asi
, r_size
;
1670 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1671 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1672 r_size
= tcg_const_i32(size
);
1673 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1674 tcg_temp_free(r_size
);
1675 tcg_temp_free(r_asi
);
1678 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1680 TCGv_i32 r_asi
, r_size
, r_sign
;
1683 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1684 r_size
= tcg_const_i32(4);
1685 r_sign
= tcg_const_i32(0);
1686 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1687 tcg_temp_free(r_sign
);
1688 r_val
= tcg_temp_new_i64();
1689 tcg_gen_extu_tl_i64(r_val
, dst
);
1690 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1691 tcg_temp_free_i64(r_val
);
1692 tcg_temp_free(r_size
);
1693 tcg_temp_free(r_asi
);
1694 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1697 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1699 TCGv_i32 r_asi
, r_size
, r_sign
;
1701 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1702 r_size
= tcg_const_i32(8);
1703 r_sign
= tcg_const_i32(0);
1704 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1705 tcg_temp_free(r_sign
);
1706 tcg_temp_free(r_size
);
1707 tcg_temp_free(r_asi
);
1708 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1709 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1710 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1711 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1712 gen_movl_TN_reg(rd
, hi
);
1715 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1717 TCGv_i32 r_asi
, r_size
;
1719 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1720 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1721 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1722 r_size
= tcg_const_i32(8);
1723 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1724 tcg_temp_free(r_size
);
1725 tcg_temp_free(r_asi
);
1729 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1730 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1733 TCGv_i32 r_asi
, r_size
;
1735 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1737 r_val
= tcg_const_i64(0xffULL
);
1738 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1739 r_size
= tcg_const_i32(1);
1740 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1741 tcg_temp_free_i32(r_size
);
1742 tcg_temp_free_i32(r_asi
);
1743 tcg_temp_free_i64(r_val
);
1747 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1752 rs1
= GET_FIELD(insn
, 13, 17);
1754 r_rs1
= tcg_const_tl(0); // XXX how to free?
1756 r_rs1
= cpu_gregs
[rs1
];
1758 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1762 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1766 if (IS_IMM
) { /* immediate */
1769 simm
= GET_FIELDs(insn
, 19, 31);
1770 r_rs2
= tcg_const_tl(simm
); // XXX how to free?
1771 } else { /* register */
1774 rs2
= GET_FIELD(insn
, 27, 31);
1776 r_rs2
= tcg_const_tl(0); // XXX how to free?
1778 r_rs2
= cpu_gregs
[rs2
];
1780 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1785 #define CHECK_IU_FEATURE(dc, FEATURE) \
1786 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1788 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1789 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1792 /* before an instruction, dc->pc must be static */
1793 static void disas_sparc_insn(DisasContext
* dc
)
1795 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1798 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1799 tcg_gen_debug_insn_start(dc
->pc
);
1800 insn
= ldl_code(dc
->pc
);
1801 opc
= GET_FIELD(insn
, 0, 1);
1803 rd
= GET_FIELD(insn
, 2, 6);
1805 cpu_src1
= tcg_temp_new(); // const
1806 cpu_src2
= tcg_temp_new(); // const
1809 case 0: /* branches/sethi */
1811 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1814 #ifdef TARGET_SPARC64
1815 case 0x1: /* V9 BPcc */
1819 target
= GET_FIELD_SP(insn
, 0, 18);
1820 target
= sign_extend(target
, 18);
1822 cc
= GET_FIELD_SP(insn
, 20, 21);
1824 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1826 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1831 case 0x3: /* V9 BPr */
1833 target
= GET_FIELD_SP(insn
, 0, 13) |
1834 (GET_FIELD_SP(insn
, 20, 21) << 14);
1835 target
= sign_extend(target
, 16);
1837 cpu_src1
= get_src1(insn
, cpu_src1
);
1838 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1841 case 0x5: /* V9 FBPcc */
1843 int cc
= GET_FIELD_SP(insn
, 20, 21);
1844 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1846 target
= GET_FIELD_SP(insn
, 0, 18);
1847 target
= sign_extend(target
, 19);
1849 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1853 case 0x7: /* CBN+x */
1858 case 0x2: /* BN+x */
1860 target
= GET_FIELD(insn
, 10, 31);
1861 target
= sign_extend(target
, 22);
1863 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1866 case 0x6: /* FBN+x */
1868 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1870 target
= GET_FIELD(insn
, 10, 31);
1871 target
= sign_extend(target
, 22);
1873 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1876 case 0x4: /* SETHI */
1878 uint32_t value
= GET_FIELD(insn
, 10, 31);
1881 r_const
= tcg_const_tl(value
<< 10);
1882 gen_movl_TN_reg(rd
, r_const
);
1883 tcg_temp_free(r_const
);
1886 case 0x0: /* UNIMPL */
1895 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1898 r_const
= tcg_const_tl(dc
->pc
);
1899 gen_movl_TN_reg(15, r_const
);
1900 tcg_temp_free(r_const
);
1902 gen_mov_pc_npc(dc
, cpu_cond
);
1906 case 2: /* FPU & Logical Operations */
1908 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1909 if (xop
== 0x3a) { /* generate trap */
1912 cpu_src1
= get_src1(insn
, cpu_src1
);
1914 rs2
= GET_FIELD(insn
, 25, 31);
1915 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1917 rs2
= GET_FIELD(insn
, 27, 31);
1919 gen_movl_reg_TN(rs2
, cpu_src2
);
1920 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1922 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1924 cond
= GET_FIELD(insn
, 3, 6);
1926 save_state(dc
, cpu_cond
);
1927 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1929 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1931 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1932 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1933 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1934 gen_helper_raise_exception(cpu_tmp32
);
1935 } else if (cond
!= 0) {
1936 TCGv r_cond
= tcg_temp_new();
1938 #ifdef TARGET_SPARC64
1940 int cc
= GET_FIELD_SP(insn
, 11, 12);
1942 save_state(dc
, cpu_cond
);
1944 gen_cond(r_cond
, 0, cond
, dc
);
1946 gen_cond(r_cond
, 1, cond
, dc
);
1950 save_state(dc
, cpu_cond
);
1951 gen_cond(r_cond
, 0, cond
, dc
);
1953 l1
= gen_new_label();
1954 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1956 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1958 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1960 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1961 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1962 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1963 gen_helper_raise_exception(cpu_tmp32
);
1966 tcg_temp_free(r_cond
);
1972 } else if (xop
== 0x28) {
1973 rs1
= GET_FIELD(insn
, 13, 17);
1976 #ifndef TARGET_SPARC64
1977 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1978 manual, rdy on the microSPARC
1980 case 0x0f: /* stbar in the SPARCv8 manual,
1981 rdy on the microSPARC II */
1982 case 0x10 ... 0x1f: /* implementation-dependent in the
1983 SPARCv8 manual, rdy on the
1986 gen_movl_TN_reg(rd
, cpu_y
);
1988 #ifdef TARGET_SPARC64
1989 case 0x2: /* V9 rdccr */
1990 gen_helper_compute_psr();
1991 gen_helper_rdccr(cpu_dst
);
1992 gen_movl_TN_reg(rd
, cpu_dst
);
1994 case 0x3: /* V9 rdasi */
1995 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
1996 gen_movl_TN_reg(rd
, cpu_dst
);
1998 case 0x4: /* V9 rdtick */
2002 r_tickptr
= tcg_temp_new_ptr();
2003 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2004 offsetof(CPUState
, tick
));
2005 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2006 tcg_temp_free_ptr(r_tickptr
);
2007 gen_movl_TN_reg(rd
, cpu_dst
);
2010 case 0x5: /* V9 rdpc */
2014 r_const
= tcg_const_tl(dc
->pc
);
2015 gen_movl_TN_reg(rd
, r_const
);
2016 tcg_temp_free(r_const
);
2019 case 0x6: /* V9 rdfprs */
2020 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2021 gen_movl_TN_reg(rd
, cpu_dst
);
2023 case 0xf: /* V9 membar */
2024 break; /* no effect */
2025 case 0x13: /* Graphics Status */
2026 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2028 gen_movl_TN_reg(rd
, cpu_gsr
);
2030 case 0x16: /* Softint */
2031 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2032 gen_movl_TN_reg(rd
, cpu_dst
);
2034 case 0x17: /* Tick compare */
2035 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2037 case 0x18: /* System tick */
2041 r_tickptr
= tcg_temp_new_ptr();
2042 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2043 offsetof(CPUState
, stick
));
2044 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2045 tcg_temp_free_ptr(r_tickptr
);
2046 gen_movl_TN_reg(rd
, cpu_dst
);
2049 case 0x19: /* System tick compare */
2050 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2052 case 0x10: /* Performance Control */
2053 case 0x11: /* Performance Instrumentation Counter */
2054 case 0x12: /* Dispatch Control */
2055 case 0x14: /* Softint set, WO */
2056 case 0x15: /* Softint clear, WO */
2061 #if !defined(CONFIG_USER_ONLY)
2062 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2063 #ifndef TARGET_SPARC64
2064 if (!supervisor(dc
))
2066 gen_helper_compute_psr();
2067 dc
->cc_op
= CC_OP_FLAGS
;
2068 gen_helper_rdpsr(cpu_dst
);
2070 CHECK_IU_FEATURE(dc
, HYPV
);
2071 if (!hypervisor(dc
))
2073 rs1
= GET_FIELD(insn
, 13, 17);
2076 // gen_op_rdhpstate();
2079 // gen_op_rdhtstate();
2082 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2085 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2088 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2090 case 31: // hstick_cmpr
2091 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2097 gen_movl_TN_reg(rd
, cpu_dst
);
2099 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2100 if (!supervisor(dc
))
2102 #ifdef TARGET_SPARC64
2103 rs1
= GET_FIELD(insn
, 13, 17);
2109 r_tsptr
= tcg_temp_new_ptr();
2110 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2111 offsetof(CPUState
, tsptr
));
2112 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2113 offsetof(trap_state
, tpc
));
2114 tcg_temp_free_ptr(r_tsptr
);
2121 r_tsptr
= tcg_temp_new_ptr();
2122 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2123 offsetof(CPUState
, tsptr
));
2124 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2125 offsetof(trap_state
, tnpc
));
2126 tcg_temp_free_ptr(r_tsptr
);
2133 r_tsptr
= tcg_temp_new_ptr();
2134 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2135 offsetof(CPUState
, tsptr
));
2136 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2137 offsetof(trap_state
, tstate
));
2138 tcg_temp_free_ptr(r_tsptr
);
2145 r_tsptr
= tcg_temp_new_ptr();
2146 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2147 offsetof(CPUState
, tsptr
));
2148 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2149 offsetof(trap_state
, tt
));
2150 tcg_temp_free_ptr(r_tsptr
);
2151 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2158 r_tickptr
= tcg_temp_new_ptr();
2159 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2160 offsetof(CPUState
, tick
));
2161 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2162 gen_movl_TN_reg(rd
, cpu_tmp0
);
2163 tcg_temp_free_ptr(r_tickptr
);
2167 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2170 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2171 offsetof(CPUSPARCState
, pstate
));
2172 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2175 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2176 offsetof(CPUSPARCState
, tl
));
2177 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2180 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2181 offsetof(CPUSPARCState
, psrpil
));
2182 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2185 gen_helper_rdcwp(cpu_tmp0
);
2188 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2189 offsetof(CPUSPARCState
, cansave
));
2190 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2192 case 11: // canrestore
2193 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2194 offsetof(CPUSPARCState
, canrestore
));
2195 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2197 case 12: // cleanwin
2198 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2199 offsetof(CPUSPARCState
, cleanwin
));
2200 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2202 case 13: // otherwin
2203 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2204 offsetof(CPUSPARCState
, otherwin
));
2205 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2208 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2209 offsetof(CPUSPARCState
, wstate
));
2210 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2212 case 16: // UA2005 gl
2213 CHECK_IU_FEATURE(dc
, GL
);
2214 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2215 offsetof(CPUSPARCState
, gl
));
2216 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2218 case 26: // UA2005 strand status
2219 CHECK_IU_FEATURE(dc
, HYPV
);
2220 if (!hypervisor(dc
))
2222 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2225 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2232 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2234 gen_movl_TN_reg(rd
, cpu_tmp0
);
2236 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2237 #ifdef TARGET_SPARC64
2238 save_state(dc
, cpu_cond
);
2239 gen_helper_flushw();
2241 if (!supervisor(dc
))
2243 gen_movl_TN_reg(rd
, cpu_tbr
);
2247 } else if (xop
== 0x34) { /* FPU Operations */
2248 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2250 gen_op_clear_ieee_excp_and_FTT();
2251 rs1
= GET_FIELD(insn
, 13, 17);
2252 rs2
= GET_FIELD(insn
, 27, 31);
2253 xop
= GET_FIELD(insn
, 18, 26);
2255 case 0x1: /* fmovs */
2256 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2258 case 0x5: /* fnegs */
2259 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2261 case 0x9: /* fabss */
2262 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2264 case 0x29: /* fsqrts */
2265 CHECK_FPU_FEATURE(dc
, FSQRT
);
2266 gen_clear_float_exceptions();
2267 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2268 gen_helper_check_ieee_exceptions();
2269 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2271 case 0x2a: /* fsqrtd */
2272 CHECK_FPU_FEATURE(dc
, FSQRT
);
2273 gen_op_load_fpr_DT1(DFPREG(rs2
));
2274 gen_clear_float_exceptions();
2275 gen_helper_fsqrtd();
2276 gen_helper_check_ieee_exceptions();
2277 gen_op_store_DT0_fpr(DFPREG(rd
));
2279 case 0x2b: /* fsqrtq */
2280 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2281 gen_op_load_fpr_QT1(QFPREG(rs2
));
2282 gen_clear_float_exceptions();
2283 gen_helper_fsqrtq();
2284 gen_helper_check_ieee_exceptions();
2285 gen_op_store_QT0_fpr(QFPREG(rd
));
2287 case 0x41: /* fadds */
2288 gen_clear_float_exceptions();
2289 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2290 gen_helper_check_ieee_exceptions();
2291 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2293 case 0x42: /* faddd */
2294 gen_op_load_fpr_DT0(DFPREG(rs1
));
2295 gen_op_load_fpr_DT1(DFPREG(rs2
));
2296 gen_clear_float_exceptions();
2298 gen_helper_check_ieee_exceptions();
2299 gen_op_store_DT0_fpr(DFPREG(rd
));
2301 case 0x43: /* faddq */
2302 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2303 gen_op_load_fpr_QT0(QFPREG(rs1
));
2304 gen_op_load_fpr_QT1(QFPREG(rs2
));
2305 gen_clear_float_exceptions();
2307 gen_helper_check_ieee_exceptions();
2308 gen_op_store_QT0_fpr(QFPREG(rd
));
2310 case 0x45: /* fsubs */
2311 gen_clear_float_exceptions();
2312 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2313 gen_helper_check_ieee_exceptions();
2314 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2316 case 0x46: /* fsubd */
2317 gen_op_load_fpr_DT0(DFPREG(rs1
));
2318 gen_op_load_fpr_DT1(DFPREG(rs2
));
2319 gen_clear_float_exceptions();
2321 gen_helper_check_ieee_exceptions();
2322 gen_op_store_DT0_fpr(DFPREG(rd
));
2324 case 0x47: /* fsubq */
2325 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2326 gen_op_load_fpr_QT0(QFPREG(rs1
));
2327 gen_op_load_fpr_QT1(QFPREG(rs2
));
2328 gen_clear_float_exceptions();
2330 gen_helper_check_ieee_exceptions();
2331 gen_op_store_QT0_fpr(QFPREG(rd
));
2333 case 0x49: /* fmuls */
2334 CHECK_FPU_FEATURE(dc
, FMUL
);
2335 gen_clear_float_exceptions();
2336 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2337 gen_helper_check_ieee_exceptions();
2338 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2340 case 0x4a: /* fmuld */
2341 CHECK_FPU_FEATURE(dc
, FMUL
);
2342 gen_op_load_fpr_DT0(DFPREG(rs1
));
2343 gen_op_load_fpr_DT1(DFPREG(rs2
));
2344 gen_clear_float_exceptions();
2346 gen_helper_check_ieee_exceptions();
2347 gen_op_store_DT0_fpr(DFPREG(rd
));
2349 case 0x4b: /* fmulq */
2350 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2351 CHECK_FPU_FEATURE(dc
, FMUL
);
2352 gen_op_load_fpr_QT0(QFPREG(rs1
));
2353 gen_op_load_fpr_QT1(QFPREG(rs2
));
2354 gen_clear_float_exceptions();
2356 gen_helper_check_ieee_exceptions();
2357 gen_op_store_QT0_fpr(QFPREG(rd
));
2359 case 0x4d: /* fdivs */
2360 gen_clear_float_exceptions();
2361 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2362 gen_helper_check_ieee_exceptions();
2363 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2365 case 0x4e: /* fdivd */
2366 gen_op_load_fpr_DT0(DFPREG(rs1
));
2367 gen_op_load_fpr_DT1(DFPREG(rs2
));
2368 gen_clear_float_exceptions();
2370 gen_helper_check_ieee_exceptions();
2371 gen_op_store_DT0_fpr(DFPREG(rd
));
2373 case 0x4f: /* fdivq */
2374 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2375 gen_op_load_fpr_QT0(QFPREG(rs1
));
2376 gen_op_load_fpr_QT1(QFPREG(rs2
));
2377 gen_clear_float_exceptions();
2379 gen_helper_check_ieee_exceptions();
2380 gen_op_store_QT0_fpr(QFPREG(rd
));
2382 case 0x69: /* fsmuld */
2383 CHECK_FPU_FEATURE(dc
, FSMULD
);
2384 gen_clear_float_exceptions();
2385 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2386 gen_helper_check_ieee_exceptions();
2387 gen_op_store_DT0_fpr(DFPREG(rd
));
2389 case 0x6e: /* fdmulq */
2390 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2391 gen_op_load_fpr_DT0(DFPREG(rs1
));
2392 gen_op_load_fpr_DT1(DFPREG(rs2
));
2393 gen_clear_float_exceptions();
2394 gen_helper_fdmulq();
2395 gen_helper_check_ieee_exceptions();
2396 gen_op_store_QT0_fpr(QFPREG(rd
));
2398 case 0xc4: /* fitos */
2399 gen_clear_float_exceptions();
2400 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2401 gen_helper_check_ieee_exceptions();
2402 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2404 case 0xc6: /* fdtos */
2405 gen_op_load_fpr_DT1(DFPREG(rs2
));
2406 gen_clear_float_exceptions();
2407 gen_helper_fdtos(cpu_tmp32
);
2408 gen_helper_check_ieee_exceptions();
2409 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2411 case 0xc7: /* fqtos */
2412 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2413 gen_op_load_fpr_QT1(QFPREG(rs2
));
2414 gen_clear_float_exceptions();
2415 gen_helper_fqtos(cpu_tmp32
);
2416 gen_helper_check_ieee_exceptions();
2417 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2419 case 0xc8: /* fitod */
2420 gen_helper_fitod(cpu_fpr
[rs2
]);
2421 gen_op_store_DT0_fpr(DFPREG(rd
));
2423 case 0xc9: /* fstod */
2424 gen_helper_fstod(cpu_fpr
[rs2
]);
2425 gen_op_store_DT0_fpr(DFPREG(rd
));
2427 case 0xcb: /* fqtod */
2428 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2429 gen_op_load_fpr_QT1(QFPREG(rs2
));
2430 gen_clear_float_exceptions();
2432 gen_helper_check_ieee_exceptions();
2433 gen_op_store_DT0_fpr(DFPREG(rd
));
2435 case 0xcc: /* fitoq */
2436 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2437 gen_helper_fitoq(cpu_fpr
[rs2
]);
2438 gen_op_store_QT0_fpr(QFPREG(rd
));
2440 case 0xcd: /* fstoq */
2441 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2442 gen_helper_fstoq(cpu_fpr
[rs2
]);
2443 gen_op_store_QT0_fpr(QFPREG(rd
));
2445 case 0xce: /* fdtoq */
2446 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2447 gen_op_load_fpr_DT1(DFPREG(rs2
));
2449 gen_op_store_QT0_fpr(QFPREG(rd
));
2451 case 0xd1: /* fstoi */
2452 gen_clear_float_exceptions();
2453 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2454 gen_helper_check_ieee_exceptions();
2455 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2457 case 0xd2: /* fdtoi */
2458 gen_op_load_fpr_DT1(DFPREG(rs2
));
2459 gen_clear_float_exceptions();
2460 gen_helper_fdtoi(cpu_tmp32
);
2461 gen_helper_check_ieee_exceptions();
2462 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2464 case 0xd3: /* fqtoi */
2465 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2466 gen_op_load_fpr_QT1(QFPREG(rs2
));
2467 gen_clear_float_exceptions();
2468 gen_helper_fqtoi(cpu_tmp32
);
2469 gen_helper_check_ieee_exceptions();
2470 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2472 #ifdef TARGET_SPARC64
2473 case 0x2: /* V9 fmovd */
2474 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2475 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2476 cpu_fpr
[DFPREG(rs2
) + 1]);
2478 case 0x3: /* V9 fmovq */
2479 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2480 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2481 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2482 cpu_fpr
[QFPREG(rs2
) + 1]);
2483 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2484 cpu_fpr
[QFPREG(rs2
) + 2]);
2485 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2486 cpu_fpr
[QFPREG(rs2
) + 3]);
2488 case 0x6: /* V9 fnegd */
2489 gen_op_load_fpr_DT1(DFPREG(rs2
));
2491 gen_op_store_DT0_fpr(DFPREG(rd
));
2493 case 0x7: /* V9 fnegq */
2494 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2495 gen_op_load_fpr_QT1(QFPREG(rs2
));
2497 gen_op_store_QT0_fpr(QFPREG(rd
));
2499 case 0xa: /* V9 fabsd */
2500 gen_op_load_fpr_DT1(DFPREG(rs2
));
2502 gen_op_store_DT0_fpr(DFPREG(rd
));
2504 case 0xb: /* V9 fabsq */
2505 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2506 gen_op_load_fpr_QT1(QFPREG(rs2
));
2508 gen_op_store_QT0_fpr(QFPREG(rd
));
2510 case 0x81: /* V9 fstox */
2511 gen_clear_float_exceptions();
2512 gen_helper_fstox(cpu_fpr
[rs2
]);
2513 gen_helper_check_ieee_exceptions();
2514 gen_op_store_DT0_fpr(DFPREG(rd
));
2516 case 0x82: /* V9 fdtox */
2517 gen_op_load_fpr_DT1(DFPREG(rs2
));
2518 gen_clear_float_exceptions();
2520 gen_helper_check_ieee_exceptions();
2521 gen_op_store_DT0_fpr(DFPREG(rd
));
2523 case 0x83: /* V9 fqtox */
2524 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2525 gen_op_load_fpr_QT1(QFPREG(rs2
));
2526 gen_clear_float_exceptions();
2528 gen_helper_check_ieee_exceptions();
2529 gen_op_store_DT0_fpr(DFPREG(rd
));
2531 case 0x84: /* V9 fxtos */
2532 gen_op_load_fpr_DT1(DFPREG(rs2
));
2533 gen_clear_float_exceptions();
2534 gen_helper_fxtos(cpu_tmp32
);
2535 gen_helper_check_ieee_exceptions();
2536 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2538 case 0x88: /* V9 fxtod */
2539 gen_op_load_fpr_DT1(DFPREG(rs2
));
2540 gen_clear_float_exceptions();
2542 gen_helper_check_ieee_exceptions();
2543 gen_op_store_DT0_fpr(DFPREG(rd
));
2545 case 0x8c: /* V9 fxtoq */
2546 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2547 gen_op_load_fpr_DT1(DFPREG(rs2
));
2548 gen_clear_float_exceptions();
2550 gen_helper_check_ieee_exceptions();
2551 gen_op_store_QT0_fpr(QFPREG(rd
));
2557 } else if (xop
== 0x35) { /* FPU Operations */
2558 #ifdef TARGET_SPARC64
2561 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2563 gen_op_clear_ieee_excp_and_FTT();
2564 rs1
= GET_FIELD(insn
, 13, 17);
2565 rs2
= GET_FIELD(insn
, 27, 31);
2566 xop
= GET_FIELD(insn
, 18, 26);
2567 #ifdef TARGET_SPARC64
2568 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2571 l1
= gen_new_label();
2572 cond
= GET_FIELD_SP(insn
, 14, 17);
2573 cpu_src1
= get_src1(insn
, cpu_src1
);
2574 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2576 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2579 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2582 l1
= gen_new_label();
2583 cond
= GET_FIELD_SP(insn
, 14, 17);
2584 cpu_src1
= get_src1(insn
, cpu_src1
);
2585 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2587 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2588 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2591 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2594 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2595 l1
= gen_new_label();
2596 cond
= GET_FIELD_SP(insn
, 14, 17);
2597 cpu_src1
= get_src1(insn
, cpu_src1
);
2598 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2600 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2601 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2602 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2603 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2609 #ifdef TARGET_SPARC64
2610 #define FMOVSCC(fcc) \
2615 l1 = gen_new_label(); \
2616 r_cond = tcg_temp_new(); \
2617 cond = GET_FIELD_SP(insn, 14, 17); \
2618 gen_fcond(r_cond, fcc, cond); \
2619 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2621 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2622 gen_set_label(l1); \
2623 tcg_temp_free(r_cond); \
2625 #define FMOVDCC(fcc) \
2630 l1 = gen_new_label(); \
2631 r_cond = tcg_temp_new(); \
2632 cond = GET_FIELD_SP(insn, 14, 17); \
2633 gen_fcond(r_cond, fcc, cond); \
2634 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2636 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2637 cpu_fpr[DFPREG(rs2)]); \
2638 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2639 cpu_fpr[DFPREG(rs2) + 1]); \
2640 gen_set_label(l1); \
2641 tcg_temp_free(r_cond); \
2643 #define FMOVQCC(fcc) \
2648 l1 = gen_new_label(); \
2649 r_cond = tcg_temp_new(); \
2650 cond = GET_FIELD_SP(insn, 14, 17); \
2651 gen_fcond(r_cond, fcc, cond); \
2652 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2654 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2655 cpu_fpr[QFPREG(rs2)]); \
2656 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2657 cpu_fpr[QFPREG(rs2) + 1]); \
2658 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2659 cpu_fpr[QFPREG(rs2) + 2]); \
2660 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2661 cpu_fpr[QFPREG(rs2) + 3]); \
2662 gen_set_label(l1); \
2663 tcg_temp_free(r_cond); \
2665 case 0x001: /* V9 fmovscc %fcc0 */
2668 case 0x002: /* V9 fmovdcc %fcc0 */
2671 case 0x003: /* V9 fmovqcc %fcc0 */
2672 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2675 case 0x041: /* V9 fmovscc %fcc1 */
2678 case 0x042: /* V9 fmovdcc %fcc1 */
2681 case 0x043: /* V9 fmovqcc %fcc1 */
2682 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2685 case 0x081: /* V9 fmovscc %fcc2 */
2688 case 0x082: /* V9 fmovdcc %fcc2 */
2691 case 0x083: /* V9 fmovqcc %fcc2 */
2692 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2695 case 0x0c1: /* V9 fmovscc %fcc3 */
2698 case 0x0c2: /* V9 fmovdcc %fcc3 */
2701 case 0x0c3: /* V9 fmovqcc %fcc3 */
2702 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2708 #define FMOVSCC(icc) \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_cond(r_cond, icc, cond, dc); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2719 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2720 gen_set_label(l1); \
2721 tcg_temp_free(r_cond); \
2723 #define FMOVDCC(icc) \
2728 l1 = gen_new_label(); \
2729 r_cond = tcg_temp_new(); \
2730 cond = GET_FIELD_SP(insn, 14, 17); \
2731 gen_cond(r_cond, icc, cond, dc); \
2732 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2734 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2735 cpu_fpr[DFPREG(rs2)]); \
2736 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2737 cpu_fpr[DFPREG(rs2) + 1]); \
2738 gen_set_label(l1); \
2739 tcg_temp_free(r_cond); \
2741 #define FMOVQCC(icc) \
2746 l1 = gen_new_label(); \
2747 r_cond = tcg_temp_new(); \
2748 cond = GET_FIELD_SP(insn, 14, 17); \
2749 gen_cond(r_cond, icc, cond, dc); \
2750 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2752 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2753 cpu_fpr[QFPREG(rs2)]); \
2754 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2755 cpu_fpr[QFPREG(rs2) + 1]); \
2756 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2757 cpu_fpr[QFPREG(rs2) + 2]); \
2758 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2759 cpu_fpr[QFPREG(rs2) + 3]); \
2760 gen_set_label(l1); \
2761 tcg_temp_free(r_cond); \
2764 case 0x101: /* V9 fmovscc %icc */
2767 case 0x102: /* V9 fmovdcc %icc */
2769 case 0x103: /* V9 fmovqcc %icc */
2770 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2773 case 0x181: /* V9 fmovscc %xcc */
2776 case 0x182: /* V9 fmovdcc %xcc */
2779 case 0x183: /* V9 fmovqcc %xcc */
2780 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2787 case 0x51: /* fcmps, V9 %fcc */
2788 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2790 case 0x52: /* fcmpd, V9 %fcc */
2791 gen_op_load_fpr_DT0(DFPREG(rs1
));
2792 gen_op_load_fpr_DT1(DFPREG(rs2
));
2793 gen_op_fcmpd(rd
& 3);
2795 case 0x53: /* fcmpq, V9 %fcc */
2796 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2797 gen_op_load_fpr_QT0(QFPREG(rs1
));
2798 gen_op_load_fpr_QT1(QFPREG(rs2
));
2799 gen_op_fcmpq(rd
& 3);
2801 case 0x55: /* fcmpes, V9 %fcc */
2802 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2804 case 0x56: /* fcmped, V9 %fcc */
2805 gen_op_load_fpr_DT0(DFPREG(rs1
));
2806 gen_op_load_fpr_DT1(DFPREG(rs2
));
2807 gen_op_fcmped(rd
& 3);
2809 case 0x57: /* fcmpeq, V9 %fcc */
2810 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2811 gen_op_load_fpr_QT0(QFPREG(rs1
));
2812 gen_op_load_fpr_QT1(QFPREG(rs2
));
2813 gen_op_fcmpeq(rd
& 3);
2818 } else if (xop
== 0x2) {
2821 rs1
= GET_FIELD(insn
, 13, 17);
2823 // or %g0, x, y -> mov T0, x; mov y, T0
2824 if (IS_IMM
) { /* immediate */
2827 simm
= GET_FIELDs(insn
, 19, 31);
2828 r_const
= tcg_const_tl(simm
);
2829 gen_movl_TN_reg(rd
, r_const
);
2830 tcg_temp_free(r_const
);
2831 } else { /* register */
2832 rs2
= GET_FIELD(insn
, 27, 31);
2833 gen_movl_reg_TN(rs2
, cpu_dst
);
2834 gen_movl_TN_reg(rd
, cpu_dst
);
2837 cpu_src1
= get_src1(insn
, cpu_src1
);
2838 if (IS_IMM
) { /* immediate */
2839 simm
= GET_FIELDs(insn
, 19, 31);
2840 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2841 gen_movl_TN_reg(rd
, cpu_dst
);
2842 } else { /* register */
2843 // or x, %g0, y -> mov T1, x; mov y, T1
2844 rs2
= GET_FIELD(insn
, 27, 31);
2846 gen_movl_reg_TN(rs2
, cpu_src2
);
2847 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2848 gen_movl_TN_reg(rd
, cpu_dst
);
2850 gen_movl_TN_reg(rd
, cpu_src1
);
2853 #ifdef TARGET_SPARC64
2854 } else if (xop
== 0x25) { /* sll, V9 sllx */
2855 cpu_src1
= get_src1(insn
, cpu_src1
);
2856 if (IS_IMM
) { /* immediate */
2857 simm
= GET_FIELDs(insn
, 20, 31);
2858 if (insn
& (1 << 12)) {
2859 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2861 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2863 } else { /* register */
2864 rs2
= GET_FIELD(insn
, 27, 31);
2865 gen_movl_reg_TN(rs2
, cpu_src2
);
2866 if (insn
& (1 << 12)) {
2867 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2869 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2871 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2873 gen_movl_TN_reg(rd
, cpu_dst
);
2874 } else if (xop
== 0x26) { /* srl, V9 srlx */
2875 cpu_src1
= get_src1(insn
, cpu_src1
);
2876 if (IS_IMM
) { /* immediate */
2877 simm
= GET_FIELDs(insn
, 20, 31);
2878 if (insn
& (1 << 12)) {
2879 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2881 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2882 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2884 } else { /* register */
2885 rs2
= GET_FIELD(insn
, 27, 31);
2886 gen_movl_reg_TN(rs2
, cpu_src2
);
2887 if (insn
& (1 << 12)) {
2888 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2889 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2891 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2892 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2893 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2896 gen_movl_TN_reg(rd
, cpu_dst
);
2897 } else if (xop
== 0x27) { /* sra, V9 srax */
2898 cpu_src1
= get_src1(insn
, cpu_src1
);
2899 if (IS_IMM
) { /* immediate */
2900 simm
= GET_FIELDs(insn
, 20, 31);
2901 if (insn
& (1 << 12)) {
2902 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2904 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2905 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2906 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2908 } else { /* register */
2909 rs2
= GET_FIELD(insn
, 27, 31);
2910 gen_movl_reg_TN(rs2
, cpu_src2
);
2911 if (insn
& (1 << 12)) {
2912 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2913 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2915 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2916 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2917 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2918 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2921 gen_movl_TN_reg(rd
, cpu_dst
);
2923 } else if (xop
< 0x36) {
2925 cpu_src1
= get_src1(insn
, cpu_src1
);
2926 cpu_src2
= get_src2(insn
, cpu_src2
);
2927 switch (xop
& ~0x10) {
2930 simm
= GET_FIELDs(insn
, 19, 31);
2932 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
2933 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2934 dc
->cc_op
= CC_OP_ADD
;
2936 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
2940 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2941 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2942 dc
->cc_op
= CC_OP_ADD
;
2944 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2950 simm
= GET_FIELDs(insn
, 19, 31);
2951 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
2953 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2956 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2957 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2958 dc
->cc_op
= CC_OP_LOGIC
;
2963 simm
= GET_FIELDs(insn
, 19, 31);
2964 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2966 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2969 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2970 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2971 dc
->cc_op
= CC_OP_LOGIC
;
2976 simm
= GET_FIELDs(insn
, 19, 31);
2977 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
2979 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2982 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2983 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2984 dc
->cc_op
= CC_OP_LOGIC
;
2989 simm
= GET_FIELDs(insn
, 19, 31);
2991 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
2993 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
2997 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2998 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2999 dc
->cc_op
= CC_OP_SUB
;
3001 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3005 case 0x5: /* andn */
3007 simm
= GET_FIELDs(insn
, 19, 31);
3008 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3010 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3013 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3014 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3015 dc
->cc_op
= CC_OP_LOGIC
;
3020 simm
= GET_FIELDs(insn
, 19, 31);
3021 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3023 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3026 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3027 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3028 dc
->cc_op
= CC_OP_LOGIC
;
3031 case 0x7: /* xorn */
3033 simm
= GET_FIELDs(insn
, 19, 31);
3034 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3036 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3037 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3040 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3041 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3042 dc
->cc_op
= CC_OP_LOGIC
;
3045 case 0x8: /* addx, V9 addc */
3047 simm
= GET_FIELDs(insn
, 19, 31);
3049 gen_helper_compute_psr();
3050 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
3051 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
3052 dc
->cc_op
= CC_OP_ADDX
;
3054 gen_helper_compute_psr();
3055 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3056 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3057 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3061 gen_helper_compute_psr();
3062 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3063 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
3064 dc
->cc_op
= CC_OP_ADDX
;
3066 gen_helper_compute_psr();
3067 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3068 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3069 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3073 #ifdef TARGET_SPARC64
3074 case 0x9: /* V9 mulx */
3076 simm
= GET_FIELDs(insn
, 19, 31);
3077 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3079 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3083 case 0xa: /* umul */
3084 CHECK_IU_FEATURE(dc
, MUL
);
3085 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3087 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3088 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3089 dc
->cc_op
= CC_OP_LOGIC
;
3092 case 0xb: /* smul */
3093 CHECK_IU_FEATURE(dc
, MUL
);
3094 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3096 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3097 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3098 dc
->cc_op
= CC_OP_LOGIC
;
3101 case 0xc: /* subx, V9 subc */
3103 simm
= GET_FIELDs(insn
, 19, 31);
3105 gen_helper_compute_psr();
3106 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
3107 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3108 dc
->cc_op
= CC_OP_SUBX
;
3110 gen_helper_compute_psr();
3111 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3112 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3113 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3117 gen_helper_compute_psr();
3118 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3119 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3120 dc
->cc_op
= CC_OP_SUBX
;
3122 gen_helper_compute_psr();
3123 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3124 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3125 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3129 #ifdef TARGET_SPARC64
3130 case 0xd: /* V9 udivx */
3131 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3132 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3133 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3134 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3137 case 0xe: /* udiv */
3138 CHECK_IU_FEATURE(dc
, DIV
);
3139 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3141 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3142 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3143 dc
->cc_op
= CC_OP_DIV
;
3146 case 0xf: /* sdiv */
3147 CHECK_IU_FEATURE(dc
, DIV
);
3148 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3150 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3151 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3152 dc
->cc_op
= CC_OP_DIV
;
3158 gen_movl_TN_reg(rd
, cpu_dst
);
3160 cpu_src1
= get_src1(insn
, cpu_src1
);
3161 cpu_src2
= get_src2(insn
, cpu_src2
);
3163 case 0x20: /* taddcc */
3164 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3165 gen_movl_TN_reg(rd
, cpu_dst
);
3166 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3167 dc
->cc_op
= CC_OP_TADD
;
3169 case 0x21: /* tsubcc */
3170 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3171 gen_movl_TN_reg(rd
, cpu_dst
);
3172 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3173 dc
->cc_op
= CC_OP_TSUB
;
3175 case 0x22: /* taddcctv */
3176 save_state(dc
, cpu_cond
);
3177 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3178 gen_movl_TN_reg(rd
, cpu_dst
);
3179 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3180 dc
->cc_op
= CC_OP_TADDTV
;
3182 case 0x23: /* tsubcctv */
3183 save_state(dc
, cpu_cond
);
3184 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3185 gen_movl_TN_reg(rd
, cpu_dst
);
3186 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3187 dc
->cc_op
= CC_OP_TSUBTV
;
3189 case 0x24: /* mulscc */
3190 gen_helper_compute_psr();
3191 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3192 gen_movl_TN_reg(rd
, cpu_dst
);
3193 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3194 dc
->cc_op
= CC_OP_FLAGS
;
3196 #ifndef TARGET_SPARC64
3197 case 0x25: /* sll */
3198 if (IS_IMM
) { /* immediate */
3199 simm
= GET_FIELDs(insn
, 20, 31);
3200 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3201 } else { /* register */
3202 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3203 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3205 gen_movl_TN_reg(rd
, cpu_dst
);
3207 case 0x26: /* srl */
3208 if (IS_IMM
) { /* immediate */
3209 simm
= GET_FIELDs(insn
, 20, 31);
3210 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3211 } else { /* register */
3212 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3213 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3215 gen_movl_TN_reg(rd
, cpu_dst
);
3217 case 0x27: /* sra */
3218 if (IS_IMM
) { /* immediate */
3219 simm
= GET_FIELDs(insn
, 20, 31);
3220 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3221 } else { /* register */
3222 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3223 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3225 gen_movl_TN_reg(rd
, cpu_dst
);
3232 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3233 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3235 #ifndef TARGET_SPARC64
3236 case 0x01 ... 0x0f: /* undefined in the
3240 case 0x10 ... 0x1f: /* implementation-dependent
3246 case 0x2: /* V9 wrccr */
3247 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3248 gen_helper_wrccr(cpu_dst
);
3249 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3250 dc
->cc_op
= CC_OP_FLAGS
;
3252 case 0x3: /* V9 wrasi */
3253 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3254 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3256 case 0x6: /* V9 wrfprs */
3257 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3258 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3259 save_state(dc
, cpu_cond
);
3264 case 0xf: /* V9 sir, nop if user */
3265 #if !defined(CONFIG_USER_ONLY)
3270 case 0x13: /* Graphics Status */
3271 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3273 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3275 case 0x14: /* Softint set */
3276 if (!supervisor(dc
))
3278 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3279 gen_helper_set_softint(cpu_tmp64
);
3281 case 0x15: /* Softint clear */
3282 if (!supervisor(dc
))
3284 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3285 gen_helper_clear_softint(cpu_tmp64
);
3287 case 0x16: /* Softint write */
3288 if (!supervisor(dc
))
3290 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3291 gen_helper_write_softint(cpu_tmp64
);
3293 case 0x17: /* Tick compare */
3294 #if !defined(CONFIG_USER_ONLY)
3295 if (!supervisor(dc
))
3301 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3303 r_tickptr
= tcg_temp_new_ptr();
3304 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3305 offsetof(CPUState
, tick
));
3306 gen_helper_tick_set_limit(r_tickptr
,
3308 tcg_temp_free_ptr(r_tickptr
);
3311 case 0x18: /* System tick */
3312 #if !defined(CONFIG_USER_ONLY)
3313 if (!supervisor(dc
))
3319 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3321 r_tickptr
= tcg_temp_new_ptr();
3322 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3323 offsetof(CPUState
, stick
));
3324 gen_helper_tick_set_count(r_tickptr
,
3326 tcg_temp_free_ptr(r_tickptr
);
3329 case 0x19: /* System tick compare */
3330 #if !defined(CONFIG_USER_ONLY)
3331 if (!supervisor(dc
))
3337 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3339 r_tickptr
= tcg_temp_new_ptr();
3340 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3341 offsetof(CPUState
, stick
));
3342 gen_helper_tick_set_limit(r_tickptr
,
3344 tcg_temp_free_ptr(r_tickptr
);
3348 case 0x10: /* Performance Control */
3349 case 0x11: /* Performance Instrumentation
3351 case 0x12: /* Dispatch Control */
3358 #if !defined(CONFIG_USER_ONLY)
3359 case 0x31: /* wrpsr, V9 saved, restored */
3361 if (!supervisor(dc
))
3363 #ifdef TARGET_SPARC64
3369 gen_helper_restored();
3371 case 2: /* UA2005 allclean */
3372 case 3: /* UA2005 otherw */
3373 case 4: /* UA2005 normalw */
3374 case 5: /* UA2005 invalw */
3380 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3381 gen_helper_wrpsr(cpu_dst
);
3382 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3383 dc
->cc_op
= CC_OP_FLAGS
;
3384 save_state(dc
, cpu_cond
);
3391 case 0x32: /* wrwim, V9 wrpr */
3393 if (!supervisor(dc
))
3395 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3396 #ifdef TARGET_SPARC64
3402 r_tsptr
= tcg_temp_new_ptr();
3403 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3404 offsetof(CPUState
, tsptr
));
3405 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3406 offsetof(trap_state
, tpc
));
3407 tcg_temp_free_ptr(r_tsptr
);
3414 r_tsptr
= tcg_temp_new_ptr();
3415 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3416 offsetof(CPUState
, tsptr
));
3417 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3418 offsetof(trap_state
, tnpc
));
3419 tcg_temp_free_ptr(r_tsptr
);
3426 r_tsptr
= tcg_temp_new_ptr();
3427 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3428 offsetof(CPUState
, tsptr
));
3429 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3430 offsetof(trap_state
,
3432 tcg_temp_free_ptr(r_tsptr
);
3439 r_tsptr
= tcg_temp_new_ptr();
3440 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3441 offsetof(CPUState
, tsptr
));
3442 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3443 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3444 offsetof(trap_state
, tt
));
3445 tcg_temp_free_ptr(r_tsptr
);
3452 r_tickptr
= tcg_temp_new_ptr();
3453 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3454 offsetof(CPUState
, tick
));
3455 gen_helper_tick_set_count(r_tickptr
,
3457 tcg_temp_free_ptr(r_tickptr
);
3461 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3464 save_state(dc
, cpu_cond
);
3465 gen_helper_wrpstate(cpu_tmp0
);
3471 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3472 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3473 offsetof(CPUSPARCState
, tl
));
3476 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3477 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3478 offsetof(CPUSPARCState
,
3482 gen_helper_wrcwp(cpu_tmp0
);
3485 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3486 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3487 offsetof(CPUSPARCState
,
3490 case 11: // canrestore
3491 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3492 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3493 offsetof(CPUSPARCState
,
3496 case 12: // cleanwin
3497 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3498 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3499 offsetof(CPUSPARCState
,
3502 case 13: // otherwin
3503 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3504 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3505 offsetof(CPUSPARCState
,
3509 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3510 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3511 offsetof(CPUSPARCState
,
3514 case 16: // UA2005 gl
3515 CHECK_IU_FEATURE(dc
, GL
);
3516 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3517 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3518 offsetof(CPUSPARCState
, gl
));
3520 case 26: // UA2005 strand status
3521 CHECK_IU_FEATURE(dc
, HYPV
);
3522 if (!hypervisor(dc
))
3524 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3530 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3531 if (dc
->def
->nwindows
!= 32)
3532 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3533 (1 << dc
->def
->nwindows
) - 1);
3534 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3538 case 0x33: /* wrtbr, UA2005 wrhpr */
3540 #ifndef TARGET_SPARC64
3541 if (!supervisor(dc
))
3543 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3545 CHECK_IU_FEATURE(dc
, HYPV
);
3546 if (!hypervisor(dc
))
3548 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3551 // XXX gen_op_wrhpstate();
3552 save_state(dc
, cpu_cond
);
3558 // XXX gen_op_wrhtstate();
3561 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3564 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3566 case 31: // hstick_cmpr
3570 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3571 r_tickptr
= tcg_temp_new_ptr();
3572 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3573 offsetof(CPUState
, hstick
));
3574 gen_helper_tick_set_limit(r_tickptr
,
3576 tcg_temp_free_ptr(r_tickptr
);
3579 case 6: // hver readonly
3587 #ifdef TARGET_SPARC64
3588 case 0x2c: /* V9 movcc */
3590 int cc
= GET_FIELD_SP(insn
, 11, 12);
3591 int cond
= GET_FIELD_SP(insn
, 14, 17);
3595 r_cond
= tcg_temp_new();
3596 if (insn
& (1 << 18)) {
3598 gen_cond(r_cond
, 0, cond
, dc
);
3600 gen_cond(r_cond
, 1, cond
, dc
);
3604 gen_fcond(r_cond
, cc
, cond
);
3607 l1
= gen_new_label();
3609 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3610 if (IS_IMM
) { /* immediate */
3613 simm
= GET_FIELD_SPs(insn
, 0, 10);
3614 r_const
= tcg_const_tl(simm
);
3615 gen_movl_TN_reg(rd
, r_const
);
3616 tcg_temp_free(r_const
);
3618 rs2
= GET_FIELD_SP(insn
, 0, 4);
3619 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3620 gen_movl_TN_reg(rd
, cpu_tmp0
);
3623 tcg_temp_free(r_cond
);
3626 case 0x2d: /* V9 sdivx */
3627 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3628 gen_movl_TN_reg(rd
, cpu_dst
);
3630 case 0x2e: /* V9 popc */
3632 cpu_src2
= get_src2(insn
, cpu_src2
);
3633 gen_helper_popc(cpu_dst
, cpu_src2
);
3634 gen_movl_TN_reg(rd
, cpu_dst
);
3636 case 0x2f: /* V9 movr */
3638 int cond
= GET_FIELD_SP(insn
, 10, 12);
3641 cpu_src1
= get_src1(insn
, cpu_src1
);
3643 l1
= gen_new_label();
3645 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3647 if (IS_IMM
) { /* immediate */
3650 simm
= GET_FIELD_SPs(insn
, 0, 9);
3651 r_const
= tcg_const_tl(simm
);
3652 gen_movl_TN_reg(rd
, r_const
);
3653 tcg_temp_free(r_const
);
3655 rs2
= GET_FIELD_SP(insn
, 0, 4);
3656 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3657 gen_movl_TN_reg(rd
, cpu_tmp0
);
3667 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3668 #ifdef TARGET_SPARC64
3669 int opf
= GET_FIELD_SP(insn
, 5, 13);
3670 rs1
= GET_FIELD(insn
, 13, 17);
3671 rs2
= GET_FIELD(insn
, 27, 31);
3672 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3676 case 0x000: /* VIS I edge8cc */
3677 case 0x001: /* VIS II edge8n */
3678 case 0x002: /* VIS I edge8lcc */
3679 case 0x003: /* VIS II edge8ln */
3680 case 0x004: /* VIS I edge16cc */
3681 case 0x005: /* VIS II edge16n */
3682 case 0x006: /* VIS I edge16lcc */
3683 case 0x007: /* VIS II edge16ln */
3684 case 0x008: /* VIS I edge32cc */
3685 case 0x009: /* VIS II edge32n */
3686 case 0x00a: /* VIS I edge32lcc */
3687 case 0x00b: /* VIS II edge32ln */
3690 case 0x010: /* VIS I array8 */
3691 CHECK_FPU_FEATURE(dc
, VIS1
);
3692 cpu_src1
= get_src1(insn
, cpu_src1
);
3693 gen_movl_reg_TN(rs2
, cpu_src2
);
3694 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3695 gen_movl_TN_reg(rd
, cpu_dst
);
3697 case 0x012: /* VIS I array16 */
3698 CHECK_FPU_FEATURE(dc
, VIS1
);
3699 cpu_src1
= get_src1(insn
, cpu_src1
);
3700 gen_movl_reg_TN(rs2
, cpu_src2
);
3701 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3702 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3703 gen_movl_TN_reg(rd
, cpu_dst
);
3705 case 0x014: /* VIS I array32 */
3706 CHECK_FPU_FEATURE(dc
, VIS1
);
3707 cpu_src1
= get_src1(insn
, cpu_src1
);
3708 gen_movl_reg_TN(rs2
, cpu_src2
);
3709 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3710 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3711 gen_movl_TN_reg(rd
, cpu_dst
);
3713 case 0x018: /* VIS I alignaddr */
3714 CHECK_FPU_FEATURE(dc
, VIS1
);
3715 cpu_src1
= get_src1(insn
, cpu_src1
);
3716 gen_movl_reg_TN(rs2
, cpu_src2
);
3717 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3718 gen_movl_TN_reg(rd
, cpu_dst
);
3720 case 0x019: /* VIS II bmask */
3721 case 0x01a: /* VIS I alignaddrl */
3724 case 0x020: /* VIS I fcmple16 */
3725 CHECK_FPU_FEATURE(dc
, VIS1
);
3726 gen_op_load_fpr_DT0(DFPREG(rs1
));
3727 gen_op_load_fpr_DT1(DFPREG(rs2
));
3728 gen_helper_fcmple16();
3729 gen_op_store_DT0_fpr(DFPREG(rd
));
3731 case 0x022: /* VIS I fcmpne16 */
3732 CHECK_FPU_FEATURE(dc
, VIS1
);
3733 gen_op_load_fpr_DT0(DFPREG(rs1
));
3734 gen_op_load_fpr_DT1(DFPREG(rs2
));
3735 gen_helper_fcmpne16();
3736 gen_op_store_DT0_fpr(DFPREG(rd
));
3738 case 0x024: /* VIS I fcmple32 */
3739 CHECK_FPU_FEATURE(dc
, VIS1
);
3740 gen_op_load_fpr_DT0(DFPREG(rs1
));
3741 gen_op_load_fpr_DT1(DFPREG(rs2
));
3742 gen_helper_fcmple32();
3743 gen_op_store_DT0_fpr(DFPREG(rd
));
3745 case 0x026: /* VIS I fcmpne32 */
3746 CHECK_FPU_FEATURE(dc
, VIS1
);
3747 gen_op_load_fpr_DT0(DFPREG(rs1
));
3748 gen_op_load_fpr_DT1(DFPREG(rs2
));
3749 gen_helper_fcmpne32();
3750 gen_op_store_DT0_fpr(DFPREG(rd
));
3752 case 0x028: /* VIS I fcmpgt16 */
3753 CHECK_FPU_FEATURE(dc
, VIS1
);
3754 gen_op_load_fpr_DT0(DFPREG(rs1
));
3755 gen_op_load_fpr_DT1(DFPREG(rs2
));
3756 gen_helper_fcmpgt16();
3757 gen_op_store_DT0_fpr(DFPREG(rd
));
3759 case 0x02a: /* VIS I fcmpeq16 */
3760 CHECK_FPU_FEATURE(dc
, VIS1
);
3761 gen_op_load_fpr_DT0(DFPREG(rs1
));
3762 gen_op_load_fpr_DT1(DFPREG(rs2
));
3763 gen_helper_fcmpeq16();
3764 gen_op_store_DT0_fpr(DFPREG(rd
));
3766 case 0x02c: /* VIS I fcmpgt32 */
3767 CHECK_FPU_FEATURE(dc
, VIS1
);
3768 gen_op_load_fpr_DT0(DFPREG(rs1
));
3769 gen_op_load_fpr_DT1(DFPREG(rs2
));
3770 gen_helper_fcmpgt32();
3771 gen_op_store_DT0_fpr(DFPREG(rd
));
3773 case 0x02e: /* VIS I fcmpeq32 */
3774 CHECK_FPU_FEATURE(dc
, VIS1
);
3775 gen_op_load_fpr_DT0(DFPREG(rs1
));
3776 gen_op_load_fpr_DT1(DFPREG(rs2
));
3777 gen_helper_fcmpeq32();
3778 gen_op_store_DT0_fpr(DFPREG(rd
));
3780 case 0x031: /* VIS I fmul8x16 */
3781 CHECK_FPU_FEATURE(dc
, VIS1
);
3782 gen_op_load_fpr_DT0(DFPREG(rs1
));
3783 gen_op_load_fpr_DT1(DFPREG(rs2
));
3784 gen_helper_fmul8x16();
3785 gen_op_store_DT0_fpr(DFPREG(rd
));
3787 case 0x033: /* VIS I fmul8x16au */
3788 CHECK_FPU_FEATURE(dc
, VIS1
);
3789 gen_op_load_fpr_DT0(DFPREG(rs1
));
3790 gen_op_load_fpr_DT1(DFPREG(rs2
));
3791 gen_helper_fmul8x16au();
3792 gen_op_store_DT0_fpr(DFPREG(rd
));
3794 case 0x035: /* VIS I fmul8x16al */
3795 CHECK_FPU_FEATURE(dc
, VIS1
);
3796 gen_op_load_fpr_DT0(DFPREG(rs1
));
3797 gen_op_load_fpr_DT1(DFPREG(rs2
));
3798 gen_helper_fmul8x16al();
3799 gen_op_store_DT0_fpr(DFPREG(rd
));
3801 case 0x036: /* VIS I fmul8sux16 */
3802 CHECK_FPU_FEATURE(dc
, VIS1
);
3803 gen_op_load_fpr_DT0(DFPREG(rs1
));
3804 gen_op_load_fpr_DT1(DFPREG(rs2
));
3805 gen_helper_fmul8sux16();
3806 gen_op_store_DT0_fpr(DFPREG(rd
));
3808 case 0x037: /* VIS I fmul8ulx16 */
3809 CHECK_FPU_FEATURE(dc
, VIS1
);
3810 gen_op_load_fpr_DT0(DFPREG(rs1
));
3811 gen_op_load_fpr_DT1(DFPREG(rs2
));
3812 gen_helper_fmul8ulx16();
3813 gen_op_store_DT0_fpr(DFPREG(rd
));
3815 case 0x038: /* VIS I fmuld8sux16 */
3816 CHECK_FPU_FEATURE(dc
, VIS1
);
3817 gen_op_load_fpr_DT0(DFPREG(rs1
));
3818 gen_op_load_fpr_DT1(DFPREG(rs2
));
3819 gen_helper_fmuld8sux16();
3820 gen_op_store_DT0_fpr(DFPREG(rd
));
3822 case 0x039: /* VIS I fmuld8ulx16 */
3823 CHECK_FPU_FEATURE(dc
, VIS1
);
3824 gen_op_load_fpr_DT0(DFPREG(rs1
));
3825 gen_op_load_fpr_DT1(DFPREG(rs2
));
3826 gen_helper_fmuld8ulx16();
3827 gen_op_store_DT0_fpr(DFPREG(rd
));
3829 case 0x03a: /* VIS I fpack32 */
3830 case 0x03b: /* VIS I fpack16 */
3831 case 0x03d: /* VIS I fpackfix */
3832 case 0x03e: /* VIS I pdist */
3835 case 0x048: /* VIS I faligndata */
3836 CHECK_FPU_FEATURE(dc
, VIS1
);
3837 gen_op_load_fpr_DT0(DFPREG(rs1
));
3838 gen_op_load_fpr_DT1(DFPREG(rs2
));
3839 gen_helper_faligndata();
3840 gen_op_store_DT0_fpr(DFPREG(rd
));
3842 case 0x04b: /* VIS I fpmerge */
3843 CHECK_FPU_FEATURE(dc
, VIS1
);
3844 gen_op_load_fpr_DT0(DFPREG(rs1
));
3845 gen_op_load_fpr_DT1(DFPREG(rs2
));
3846 gen_helper_fpmerge();
3847 gen_op_store_DT0_fpr(DFPREG(rd
));
3849 case 0x04c: /* VIS II bshuffle */
3852 case 0x04d: /* VIS I fexpand */
3853 CHECK_FPU_FEATURE(dc
, VIS1
);
3854 gen_op_load_fpr_DT0(DFPREG(rs1
));
3855 gen_op_load_fpr_DT1(DFPREG(rs2
));
3856 gen_helper_fexpand();
3857 gen_op_store_DT0_fpr(DFPREG(rd
));
3859 case 0x050: /* VIS I fpadd16 */
3860 CHECK_FPU_FEATURE(dc
, VIS1
);
3861 gen_op_load_fpr_DT0(DFPREG(rs1
));
3862 gen_op_load_fpr_DT1(DFPREG(rs2
));
3863 gen_helper_fpadd16();
3864 gen_op_store_DT0_fpr(DFPREG(rd
));
3866 case 0x051: /* VIS I fpadd16s */
3867 CHECK_FPU_FEATURE(dc
, VIS1
);
3868 gen_helper_fpadd16s(cpu_fpr
[rd
],
3869 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3871 case 0x052: /* VIS I fpadd32 */
3872 CHECK_FPU_FEATURE(dc
, VIS1
);
3873 gen_op_load_fpr_DT0(DFPREG(rs1
));
3874 gen_op_load_fpr_DT1(DFPREG(rs2
));
3875 gen_helper_fpadd32();
3876 gen_op_store_DT0_fpr(DFPREG(rd
));
3878 case 0x053: /* VIS I fpadd32s */
3879 CHECK_FPU_FEATURE(dc
, VIS1
);
3880 gen_helper_fpadd32s(cpu_fpr
[rd
],
3881 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3883 case 0x054: /* VIS I fpsub16 */
3884 CHECK_FPU_FEATURE(dc
, VIS1
);
3885 gen_op_load_fpr_DT0(DFPREG(rs1
));
3886 gen_op_load_fpr_DT1(DFPREG(rs2
));
3887 gen_helper_fpsub16();
3888 gen_op_store_DT0_fpr(DFPREG(rd
));
3890 case 0x055: /* VIS I fpsub16s */
3891 CHECK_FPU_FEATURE(dc
, VIS1
);
3892 gen_helper_fpsub16s(cpu_fpr
[rd
],
3893 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3895 case 0x056: /* VIS I fpsub32 */
3896 CHECK_FPU_FEATURE(dc
, VIS1
);
3897 gen_op_load_fpr_DT0(DFPREG(rs1
));
3898 gen_op_load_fpr_DT1(DFPREG(rs2
));
3899 gen_helper_fpsub32();
3900 gen_op_store_DT0_fpr(DFPREG(rd
));
3902 case 0x057: /* VIS I fpsub32s */
3903 CHECK_FPU_FEATURE(dc
, VIS1
);
3904 gen_helper_fpsub32s(cpu_fpr
[rd
],
3905 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3907 case 0x060: /* VIS I fzero */
3908 CHECK_FPU_FEATURE(dc
, VIS1
);
3909 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3910 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3912 case 0x061: /* VIS I fzeros */
3913 CHECK_FPU_FEATURE(dc
, VIS1
);
3914 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3916 case 0x062: /* VIS I fnor */
3917 CHECK_FPU_FEATURE(dc
, VIS1
);
3918 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3919 cpu_fpr
[DFPREG(rs2
)]);
3920 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3921 cpu_fpr
[DFPREG(rs2
) + 1]);
3923 case 0x063: /* VIS I fnors */
3924 CHECK_FPU_FEATURE(dc
, VIS1
);
3925 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3927 case 0x064: /* VIS I fandnot2 */
3928 CHECK_FPU_FEATURE(dc
, VIS1
);
3929 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3930 cpu_fpr
[DFPREG(rs2
)]);
3931 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3932 cpu_fpr
[DFPREG(rs1
) + 1],
3933 cpu_fpr
[DFPREG(rs2
) + 1]);
3935 case 0x065: /* VIS I fandnot2s */
3936 CHECK_FPU_FEATURE(dc
, VIS1
);
3937 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3939 case 0x066: /* VIS I fnot2 */
3940 CHECK_FPU_FEATURE(dc
, VIS1
);
3941 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3942 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3943 cpu_fpr
[DFPREG(rs2
) + 1]);
3945 case 0x067: /* VIS I fnot2s */
3946 CHECK_FPU_FEATURE(dc
, VIS1
);
3947 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3949 case 0x068: /* VIS I fandnot1 */
3950 CHECK_FPU_FEATURE(dc
, VIS1
);
3951 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3952 cpu_fpr
[DFPREG(rs1
)]);
3953 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3954 cpu_fpr
[DFPREG(rs2
) + 1],
3955 cpu_fpr
[DFPREG(rs1
) + 1]);
3957 case 0x069: /* VIS I fandnot1s */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3961 case 0x06a: /* VIS I fnot1 */
3962 CHECK_FPU_FEATURE(dc
, VIS1
);
3963 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3964 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3965 cpu_fpr
[DFPREG(rs1
) + 1]);
3967 case 0x06b: /* VIS I fnot1s */
3968 CHECK_FPU_FEATURE(dc
, VIS1
);
3969 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3971 case 0x06c: /* VIS I fxor */
3972 CHECK_FPU_FEATURE(dc
, VIS1
);
3973 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3974 cpu_fpr
[DFPREG(rs2
)]);
3975 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3976 cpu_fpr
[DFPREG(rs1
) + 1],
3977 cpu_fpr
[DFPREG(rs2
) + 1]);
3979 case 0x06d: /* VIS I fxors */
3980 CHECK_FPU_FEATURE(dc
, VIS1
);
3981 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3983 case 0x06e: /* VIS I fnand */
3984 CHECK_FPU_FEATURE(dc
, VIS1
);
3985 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3986 cpu_fpr
[DFPREG(rs2
)]);
3987 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3988 cpu_fpr
[DFPREG(rs2
) + 1]);
3990 case 0x06f: /* VIS I fnands */
3991 CHECK_FPU_FEATURE(dc
, VIS1
);
3992 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3994 case 0x070: /* VIS I fand */
3995 CHECK_FPU_FEATURE(dc
, VIS1
);
3996 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3997 cpu_fpr
[DFPREG(rs2
)]);
3998 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3999 cpu_fpr
[DFPREG(rs1
) + 1],
4000 cpu_fpr
[DFPREG(rs2
) + 1]);
4002 case 0x071: /* VIS I fands */
4003 CHECK_FPU_FEATURE(dc
, VIS1
);
4004 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4006 case 0x072: /* VIS I fxnor */
4007 CHECK_FPU_FEATURE(dc
, VIS1
);
4008 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4009 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4010 cpu_fpr
[DFPREG(rs1
)]);
4011 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4012 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4013 cpu_fpr
[DFPREG(rs1
) + 1]);
4015 case 0x073: /* VIS I fxnors */
4016 CHECK_FPU_FEATURE(dc
, VIS1
);
4017 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4018 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4020 case 0x074: /* VIS I fsrc1 */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4023 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4024 cpu_fpr
[DFPREG(rs1
) + 1]);
4026 case 0x075: /* VIS I fsrc1s */
4027 CHECK_FPU_FEATURE(dc
, VIS1
);
4028 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4030 case 0x076: /* VIS I fornot2 */
4031 CHECK_FPU_FEATURE(dc
, VIS1
);
4032 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4033 cpu_fpr
[DFPREG(rs2
)]);
4034 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4035 cpu_fpr
[DFPREG(rs1
) + 1],
4036 cpu_fpr
[DFPREG(rs2
) + 1]);
4038 case 0x077: /* VIS I fornot2s */
4039 CHECK_FPU_FEATURE(dc
, VIS1
);
4040 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4042 case 0x078: /* VIS I fsrc2 */
4043 CHECK_FPU_FEATURE(dc
, VIS1
);
4044 gen_op_load_fpr_DT0(DFPREG(rs2
));
4045 gen_op_store_DT0_fpr(DFPREG(rd
));
4047 case 0x079: /* VIS I fsrc2s */
4048 CHECK_FPU_FEATURE(dc
, VIS1
);
4049 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4051 case 0x07a: /* VIS I fornot1 */
4052 CHECK_FPU_FEATURE(dc
, VIS1
);
4053 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4054 cpu_fpr
[DFPREG(rs1
)]);
4055 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4056 cpu_fpr
[DFPREG(rs2
) + 1],
4057 cpu_fpr
[DFPREG(rs1
) + 1]);
4059 case 0x07b: /* VIS I fornot1s */
4060 CHECK_FPU_FEATURE(dc
, VIS1
);
4061 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4063 case 0x07c: /* VIS I for */
4064 CHECK_FPU_FEATURE(dc
, VIS1
);
4065 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4066 cpu_fpr
[DFPREG(rs2
)]);
4067 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4068 cpu_fpr
[DFPREG(rs1
) + 1],
4069 cpu_fpr
[DFPREG(rs2
) + 1]);
4071 case 0x07d: /* VIS I fors */
4072 CHECK_FPU_FEATURE(dc
, VIS1
);
4073 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4075 case 0x07e: /* VIS I fone */
4076 CHECK_FPU_FEATURE(dc
, VIS1
);
4077 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4078 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4080 case 0x07f: /* VIS I fones */
4081 CHECK_FPU_FEATURE(dc
, VIS1
);
4082 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4084 case 0x080: /* VIS I shutdown */
4085 case 0x081: /* VIS II siam */
4094 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4095 #ifdef TARGET_SPARC64
4100 #ifdef TARGET_SPARC64
4101 } else if (xop
== 0x39) { /* V9 return */
4104 save_state(dc
, cpu_cond
);
4105 cpu_src1
= get_src1(insn
, cpu_src1
);
4106 if (IS_IMM
) { /* immediate */
4107 simm
= GET_FIELDs(insn
, 19, 31);
4108 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4109 } else { /* register */
4110 rs2
= GET_FIELD(insn
, 27, 31);
4112 gen_movl_reg_TN(rs2
, cpu_src2
);
4113 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4115 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4117 gen_helper_restore();
4118 gen_mov_pc_npc(dc
, cpu_cond
);
4119 r_const
= tcg_const_i32(3);
4120 gen_helper_check_align(cpu_dst
, r_const
);
4121 tcg_temp_free_i32(r_const
);
4122 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4123 dc
->npc
= DYNAMIC_PC
;
4127 cpu_src1
= get_src1(insn
, cpu_src1
);
4128 if (IS_IMM
) { /* immediate */
4129 simm
= GET_FIELDs(insn
, 19, 31);
4130 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4131 } else { /* register */
4132 rs2
= GET_FIELD(insn
, 27, 31);
4134 gen_movl_reg_TN(rs2
, cpu_src2
);
4135 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4137 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4140 case 0x38: /* jmpl */
4145 r_pc
= tcg_const_tl(dc
->pc
);
4146 gen_movl_TN_reg(rd
, r_pc
);
4147 tcg_temp_free(r_pc
);
4148 gen_mov_pc_npc(dc
, cpu_cond
);
4149 r_const
= tcg_const_i32(3);
4150 gen_helper_check_align(cpu_dst
, r_const
);
4151 tcg_temp_free_i32(r_const
);
4152 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4153 dc
->npc
= DYNAMIC_PC
;
4156 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4157 case 0x39: /* rett, V9 return */
4161 if (!supervisor(dc
))
4163 gen_mov_pc_npc(dc
, cpu_cond
);
4164 r_const
= tcg_const_i32(3);
4165 gen_helper_check_align(cpu_dst
, r_const
);
4166 tcg_temp_free_i32(r_const
);
4167 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4168 dc
->npc
= DYNAMIC_PC
;
4173 case 0x3b: /* flush */
4174 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4176 gen_helper_flush(cpu_dst
);
4178 case 0x3c: /* save */
4179 save_state(dc
, cpu_cond
);
4181 gen_movl_TN_reg(rd
, cpu_dst
);
4183 case 0x3d: /* restore */
4184 save_state(dc
, cpu_cond
);
4185 gen_helper_restore();
4186 gen_movl_TN_reg(rd
, cpu_dst
);
4188 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4189 case 0x3e: /* V9 done/retry */
4193 if (!supervisor(dc
))
4195 dc
->npc
= DYNAMIC_PC
;
4196 dc
->pc
= DYNAMIC_PC
;
4200 if (!supervisor(dc
))
4202 dc
->npc
= DYNAMIC_PC
;
4203 dc
->pc
= DYNAMIC_PC
;
4219 case 3: /* load/store instructions */
4221 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4223 cpu_src1
= get_src1(insn
, cpu_src1
);
4224 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4225 rs2
= GET_FIELD(insn
, 27, 31);
4226 gen_movl_reg_TN(rs2
, cpu_src2
);
4227 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4228 } else if (IS_IMM
) { /* immediate */
4229 simm
= GET_FIELDs(insn
, 19, 31);
4230 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4231 } else { /* register */
4232 rs2
= GET_FIELD(insn
, 27, 31);
4234 gen_movl_reg_TN(rs2
, cpu_src2
);
4235 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4237 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4239 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4240 (xop
> 0x17 && xop
<= 0x1d ) ||
4241 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4243 case 0x0: /* ld, V9 lduw, load unsigned word */
4244 gen_address_mask(dc
, cpu_addr
);
4245 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4247 case 0x1: /* ldub, load unsigned byte */
4248 gen_address_mask(dc
, cpu_addr
);
4249 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4251 case 0x2: /* lduh, load unsigned halfword */
4252 gen_address_mask(dc
, cpu_addr
);
4253 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4255 case 0x3: /* ldd, load double word */
4261 save_state(dc
, cpu_cond
);
4262 r_const
= tcg_const_i32(7);
4263 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4264 tcg_temp_free_i32(r_const
);
4265 gen_address_mask(dc
, cpu_addr
);
4266 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4267 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4268 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4269 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4270 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4271 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4272 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4275 case 0x9: /* ldsb, load signed byte */
4276 gen_address_mask(dc
, cpu_addr
);
4277 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4279 case 0xa: /* ldsh, load signed halfword */
4280 gen_address_mask(dc
, cpu_addr
);
4281 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4283 case 0xd: /* ldstub -- XXX: should be atomically */
4287 gen_address_mask(dc
, cpu_addr
);
4288 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4289 r_const
= tcg_const_tl(0xff);
4290 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4291 tcg_temp_free(r_const
);
4294 case 0x0f: /* swap, swap register with memory. Also
4296 CHECK_IU_FEATURE(dc
, SWAP
);
4297 gen_movl_reg_TN(rd
, cpu_val
);
4298 gen_address_mask(dc
, cpu_addr
);
4299 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4300 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4301 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4303 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4304 case 0x10: /* lda, V9 lduwa, load word alternate */
4305 #ifndef TARGET_SPARC64
4308 if (!supervisor(dc
))
4311 save_state(dc
, cpu_cond
);
4312 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4314 case 0x11: /* lduba, load unsigned byte alternate */
4315 #ifndef TARGET_SPARC64
4318 if (!supervisor(dc
))
4321 save_state(dc
, cpu_cond
);
4322 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4324 case 0x12: /* lduha, load unsigned halfword alternate */
4325 #ifndef TARGET_SPARC64
4328 if (!supervisor(dc
))
4331 save_state(dc
, cpu_cond
);
4332 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4334 case 0x13: /* ldda, load double word alternate */
4335 #ifndef TARGET_SPARC64
4338 if (!supervisor(dc
))
4343 save_state(dc
, cpu_cond
);
4344 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4346 case 0x19: /* ldsba, load signed byte alternate */
4347 #ifndef TARGET_SPARC64
4350 if (!supervisor(dc
))
4353 save_state(dc
, cpu_cond
);
4354 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4356 case 0x1a: /* ldsha, load signed halfword alternate */
4357 #ifndef TARGET_SPARC64
4360 if (!supervisor(dc
))
4363 save_state(dc
, cpu_cond
);
4364 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4366 case 0x1d: /* ldstuba -- XXX: should be atomically */
4367 #ifndef TARGET_SPARC64
4370 if (!supervisor(dc
))
4373 save_state(dc
, cpu_cond
);
4374 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4376 case 0x1f: /* swapa, swap reg with alt. memory. Also
4378 CHECK_IU_FEATURE(dc
, SWAP
);
4379 #ifndef TARGET_SPARC64
4382 if (!supervisor(dc
))
4385 save_state(dc
, cpu_cond
);
4386 gen_movl_reg_TN(rd
, cpu_val
);
4387 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4390 #ifndef TARGET_SPARC64
4391 case 0x30: /* ldc */
4392 case 0x31: /* ldcsr */
4393 case 0x33: /* lddc */
4397 #ifdef TARGET_SPARC64
4398 case 0x08: /* V9 ldsw */
4399 gen_address_mask(dc
, cpu_addr
);
4400 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4402 case 0x0b: /* V9 ldx */
4403 gen_address_mask(dc
, cpu_addr
);
4404 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4406 case 0x18: /* V9 ldswa */
4407 save_state(dc
, cpu_cond
);
4408 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4410 case 0x1b: /* V9 ldxa */
4411 save_state(dc
, cpu_cond
);
4412 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4414 case 0x2d: /* V9 prefetch, no effect */
4416 case 0x30: /* V9 ldfa */
4417 save_state(dc
, cpu_cond
);
4418 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4420 case 0x33: /* V9 lddfa */
4421 save_state(dc
, cpu_cond
);
4422 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4424 case 0x3d: /* V9 prefetcha, no effect */
4426 case 0x32: /* V9 ldqfa */
4427 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4428 save_state(dc
, cpu_cond
);
4429 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4435 gen_movl_TN_reg(rd
, cpu_val
);
4436 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4439 } else if (xop
>= 0x20 && xop
< 0x24) {
4440 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4442 save_state(dc
, cpu_cond
);
4444 case 0x20: /* ldf, load fpreg */
4445 gen_address_mask(dc
, cpu_addr
);
4446 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4447 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4449 case 0x21: /* ldfsr, V9 ldxfsr */
4450 #ifdef TARGET_SPARC64
4451 gen_address_mask(dc
, cpu_addr
);
4453 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4454 gen_helper_ldxfsr(cpu_tmp64
);
4458 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4459 gen_helper_ldfsr(cpu_tmp32
);
4463 case 0x22: /* ldqf, load quad fpreg */
4467 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4468 r_const
= tcg_const_i32(dc
->mem_idx
);
4469 gen_helper_ldqf(cpu_addr
, r_const
);
4470 tcg_temp_free_i32(r_const
);
4471 gen_op_store_QT0_fpr(QFPREG(rd
));
4474 case 0x23: /* lddf, load double fpreg */
4478 r_const
= tcg_const_i32(dc
->mem_idx
);
4479 gen_helper_lddf(cpu_addr
, r_const
);
4480 tcg_temp_free_i32(r_const
);
4481 gen_op_store_DT0_fpr(DFPREG(rd
));
4487 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4488 xop
== 0xe || xop
== 0x1e) {
4489 gen_movl_reg_TN(rd
, cpu_val
);
4491 case 0x4: /* st, store word */
4492 gen_address_mask(dc
, cpu_addr
);
4493 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4495 case 0x5: /* stb, store byte */
4496 gen_address_mask(dc
, cpu_addr
);
4497 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4499 case 0x6: /* sth, store halfword */
4500 gen_address_mask(dc
, cpu_addr
);
4501 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4503 case 0x7: /* std, store double word */
4509 save_state(dc
, cpu_cond
);
4510 gen_address_mask(dc
, cpu_addr
);
4511 r_const
= tcg_const_i32(7);
4512 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4513 tcg_temp_free_i32(r_const
);
4514 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4515 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4516 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4519 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4520 case 0x14: /* sta, V9 stwa, store word alternate */
4521 #ifndef TARGET_SPARC64
4524 if (!supervisor(dc
))
4527 save_state(dc
, cpu_cond
);
4528 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4530 case 0x15: /* stba, store byte alternate */
4531 #ifndef TARGET_SPARC64
4534 if (!supervisor(dc
))
4537 save_state(dc
, cpu_cond
);
4538 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4540 case 0x16: /* stha, store halfword alternate */
4541 #ifndef TARGET_SPARC64
4544 if (!supervisor(dc
))
4547 save_state(dc
, cpu_cond
);
4548 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4550 case 0x17: /* stda, store double word alternate */
4551 #ifndef TARGET_SPARC64
4554 if (!supervisor(dc
))
4560 save_state(dc
, cpu_cond
);
4561 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4565 #ifdef TARGET_SPARC64
4566 case 0x0e: /* V9 stx */
4567 gen_address_mask(dc
, cpu_addr
);
4568 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4570 case 0x1e: /* V9 stxa */
4571 save_state(dc
, cpu_cond
);
4572 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4578 } else if (xop
> 0x23 && xop
< 0x28) {
4579 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4581 save_state(dc
, cpu_cond
);
4583 case 0x24: /* stf, store fpreg */
4584 gen_address_mask(dc
, cpu_addr
);
4585 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4586 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4588 case 0x25: /* stfsr, V9 stxfsr */
4589 #ifdef TARGET_SPARC64
4590 gen_address_mask(dc
, cpu_addr
);
4591 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4593 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4595 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4597 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4598 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4602 #ifdef TARGET_SPARC64
4603 /* V9 stqf, store quad fpreg */
4607 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4608 gen_op_load_fpr_QT0(QFPREG(rd
));
4609 r_const
= tcg_const_i32(dc
->mem_idx
);
4610 gen_helper_stqf(cpu_addr
, r_const
);
4611 tcg_temp_free_i32(r_const
);
4614 #else /* !TARGET_SPARC64 */
4615 /* stdfq, store floating point queue */
4616 #if defined(CONFIG_USER_ONLY)
4619 if (!supervisor(dc
))
4621 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4626 case 0x27: /* stdf, store double fpreg */
4630 gen_op_load_fpr_DT0(DFPREG(rd
));
4631 r_const
= tcg_const_i32(dc
->mem_idx
);
4632 gen_helper_stdf(cpu_addr
, r_const
);
4633 tcg_temp_free_i32(r_const
);
4639 } else if (xop
> 0x33 && xop
< 0x3f) {
4640 save_state(dc
, cpu_cond
);
4642 #ifdef TARGET_SPARC64
4643 case 0x34: /* V9 stfa */
4644 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4646 case 0x36: /* V9 stqfa */
4650 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4651 r_const
= tcg_const_i32(7);
4652 gen_helper_check_align(cpu_addr
, r_const
);
4653 tcg_temp_free_i32(r_const
);
4654 gen_op_load_fpr_QT0(QFPREG(rd
));
4655 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4658 case 0x37: /* V9 stdfa */
4659 gen_op_load_fpr_DT0(DFPREG(rd
));
4660 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4662 case 0x3c: /* V9 casa */
4663 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4664 gen_movl_TN_reg(rd
, cpu_val
);
4666 case 0x3e: /* V9 casxa */
4667 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4668 gen_movl_TN_reg(rd
, cpu_val
);
4671 case 0x34: /* stc */
4672 case 0x35: /* stcsr */
4673 case 0x36: /* stdcq */
4674 case 0x37: /* stdc */
4685 /* default case for non jump instructions */
4686 if (dc
->npc
== DYNAMIC_PC
) {
4687 dc
->pc
= DYNAMIC_PC
;
4689 } else if (dc
->npc
== JUMP_PC
) {
4690 /* we can do a static jump */
4691 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4695 dc
->npc
= dc
->npc
+ 4;
4703 save_state(dc
, cpu_cond
);
4704 r_const
= tcg_const_i32(TT_ILL_INSN
);
4705 gen_helper_raise_exception(r_const
);
4706 tcg_temp_free_i32(r_const
);
4714 save_state(dc
, cpu_cond
);
4715 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4716 gen_helper_raise_exception(r_const
);
4717 tcg_temp_free_i32(r_const
);
4721 #if !defined(CONFIG_USER_ONLY)
4726 save_state(dc
, cpu_cond
);
4727 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4728 gen_helper_raise_exception(r_const
);
4729 tcg_temp_free_i32(r_const
);
4735 save_state(dc
, cpu_cond
);
4736 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4739 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4741 save_state(dc
, cpu_cond
);
4742 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4746 #ifndef TARGET_SPARC64
4751 save_state(dc
, cpu_cond
);
4752 r_const
= tcg_const_i32(TT_NCP_INSN
);
4753 gen_helper_raise_exception(r_const
);
4754 tcg_temp_free(r_const
);
4761 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4762 int spc
, CPUSPARCState
*env
)
4764 target_ulong pc_start
, last_pc
;
4765 uint16_t *gen_opc_end
;
4766 DisasContext dc1
, *dc
= &dc1
;
4772 memset(dc
, 0, sizeof(DisasContext
));
4777 dc
->npc
= (target_ulong
) tb
->cs_base
;
4778 dc
->cc_op
= CC_OP_DYNAMIC
;
4779 dc
->mem_idx
= cpu_mmu_index(env
);
4781 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4782 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4784 dc
->fpu_enabled
= 0;
4785 #ifdef TARGET_SPARC64
4786 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4788 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4790 cpu_tmp0
= tcg_temp_new();
4791 cpu_tmp32
= tcg_temp_new_i32();
4792 cpu_tmp64
= tcg_temp_new_i64();
4794 cpu_dst
= tcg_temp_local_new();
4797 cpu_val
= tcg_temp_local_new();
4798 cpu_addr
= tcg_temp_local_new();
4801 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4803 max_insns
= CF_COUNT_MASK
;
4806 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4807 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4808 if (bp
->pc
== dc
->pc
) {
4809 if (dc
->pc
!= pc_start
)
4810 save_state(dc
, cpu_cond
);
4819 qemu_log("Search PC...\n");
4820 j
= gen_opc_ptr
- gen_opc_buf
;
4824 gen_opc_instr_start
[lj
++] = 0;
4825 gen_opc_pc
[lj
] = dc
->pc
;
4826 gen_opc_npc
[lj
] = dc
->npc
;
4827 gen_opc_instr_start
[lj
] = 1;
4828 gen_opc_icount
[lj
] = num_insns
;
4831 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4834 disas_sparc_insn(dc
);
4839 /* if the next PC is different, we abort now */
4840 if (dc
->pc
!= (last_pc
+ 4))
4842 /* if we reach a page boundary, we stop generation so that the
4843 PC of a TT_TFAULT exception is always in the right page */
4844 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4846 /* if single step mode, we generate only one instruction and
4847 generate an exception */
4848 if (env
->singlestep_enabled
|| singlestep
) {
4849 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4853 } while ((gen_opc_ptr
< gen_opc_end
) &&
4854 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4855 num_insns
< max_insns
);
4858 tcg_temp_free(cpu_addr
);
4859 tcg_temp_free(cpu_val
);
4860 tcg_temp_free(cpu_dst
);
4861 tcg_temp_free_i64(cpu_tmp64
);
4862 tcg_temp_free_i32(cpu_tmp32
);
4863 tcg_temp_free(cpu_tmp0
);
4864 if (tb
->cflags
& CF_LAST_IO
)
4867 if (dc
->pc
!= DYNAMIC_PC
&&
4868 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4869 /* static PC and NPC: we can use direct chaining */
4870 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4872 if (dc
->pc
!= DYNAMIC_PC
)
4873 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4874 save_npc(dc
, cpu_cond
);
4878 gen_icount_end(tb
, num_insns
);
4879 *gen_opc_ptr
= INDEX_op_end
;
4881 j
= gen_opc_ptr
- gen_opc_buf
;
4884 gen_opc_instr_start
[lj
++] = 0;
4888 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4889 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4891 tb
->size
= last_pc
+ 4 - pc_start
;
4892 tb
->icount
= num_insns
;
4895 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4896 qemu_log("--------------\n");
4897 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4898 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4904 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4906 gen_intermediate_code_internal(tb
, 0, env
);
4909 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4911 gen_intermediate_code_internal(tb
, 1, env
);
4914 void gen_intermediate_code_init(CPUSPARCState
*env
)
4918 static const char * const gregnames
[8] = {
4919 NULL
, // g0 not used
4928 static const char * const fregnames
[64] = {
4929 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4930 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4931 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4932 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4933 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4934 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4935 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4936 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4939 /* init various static tables */
4943 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4944 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4945 offsetof(CPUState
, regwptr
),
4947 #ifdef TARGET_SPARC64
4948 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4950 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4952 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4954 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4956 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4957 offsetof(CPUState
, tick_cmpr
),
4959 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4960 offsetof(CPUState
, stick_cmpr
),
4962 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4963 offsetof(CPUState
, hstick_cmpr
),
4965 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4967 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4969 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4971 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4972 offsetof(CPUState
, ssr
), "ssr");
4973 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4974 offsetof(CPUState
, version
), "ver");
4975 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4976 offsetof(CPUState
, softint
),
4979 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4982 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
4984 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
4986 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
4987 offsetof(CPUState
, cc_src2
),
4989 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4991 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
4993 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
4995 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
4997 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
4999 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5001 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5002 #ifndef CONFIG_USER_ONLY
5003 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5006 for (i
= 1; i
< 8; i
++)
5007 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5008 offsetof(CPUState
, gregs
[i
]),
5010 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5011 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5012 offsetof(CPUState
, fpr
[i
]),
5015 /* register helpers */
5017 #define GEN_HELPER 2
5022 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5023 unsigned long searched_pc
, int pc_pos
, void *puc
)
5026 env
->pc
= gen_opc_pc
[pc_pos
];
5027 npc
= gen_opc_npc
[pc_pos
];
5029 /* dynamic NPC: already stored */
5030 } else if (npc
== 2) {
5031 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5032 /* jump PC: use T2 and the jump targets of the translation */
5034 env
->npc
= gen_opc_jump_pc
[0];
5036 env
->npc
= gen_opc_jump_pc
[1];