4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
83 uint32_t cc_op
; /* current CC operation */
84 struct TranslationBlock
*tb
;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x
, int len
)
113 return (x
<< len
) >> len
;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
122 offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
132 offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
138 offsetof(CPU_DoubleU
, l
.upper
));
139 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
140 offsetof(CPU_DoubleU
, l
.lower
));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.upmost
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.upper
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.lower
));
151 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
152 offsetof(CPU_QuadU
, l
.lowest
));
155 static void gen_op_load_fpr_QT1(unsigned int src
)
157 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.upmost
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.upper
));
161 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
162 offsetof(CPU_QuadU
, l
.lower
));
163 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
164 offsetof(CPU_QuadU
, l
.lowest
));
167 static void gen_op_store_QT0_fpr(unsigned int dst
)
169 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
174 offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
176 offsetof(CPU_QuadU
, l
.lowest
));
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
186 #define supervisor(dc) (dc->mem_idx >= 1)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == 2)
193 #ifdef TARGET_SPARC64
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #define AM_CHECK(dc) (1)
201 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
203 #ifdef TARGET_SPARC64
205 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
209 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
212 tcg_gen_movi_tl(tn
, 0);
214 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
216 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
220 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
225 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
227 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
232 target_ulong pc
, target_ulong npc
)
234 TranslationBlock
*tb
;
237 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
238 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((long)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
288 l1
= gen_new_label();
290 r_temp
= tcg_temp_new();
291 tcg_gen_xor_tl(r_temp
, src1
, src2
);
292 tcg_gen_not_tl(r_temp
, r_temp
);
293 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
294 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
295 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
297 r_const
= tcg_const_i32(TT_TOVF
);
298 gen_helper_raise_exception(r_const
);
299 tcg_temp_free_i32(r_const
);
301 tcg_temp_free(r_temp
);
304 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
309 l1
= gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
311 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
313 r_const
= tcg_const_i32(TT_TOVF
);
314 gen_helper_raise_exception(r_const
);
315 tcg_temp_free_i32(r_const
);
319 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
321 tcg_gen_mov_tl(cpu_cc_src
, src1
);
322 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
323 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
324 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
327 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
329 tcg_gen_mov_tl(cpu_cc_src
, src1
);
330 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
331 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
332 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
335 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
337 gen_helper_compute_C_icc(cpu_tmp0
);
338 tcg_gen_mov_tl(cpu_cc_src
, src1
);
339 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
340 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
341 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
342 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
345 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
347 gen_helper_compute_C_icc(cpu_tmp0
);
348 tcg_gen_mov_tl(cpu_cc_src
, src1
);
349 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
350 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
351 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
352 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
355 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
357 tcg_gen_mov_tl(cpu_cc_src
, src1
);
358 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
359 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
360 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
363 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
365 tcg_gen_mov_tl(cpu_cc_src
, src1
);
366 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
367 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
368 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
369 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
370 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
373 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
379 l1
= gen_new_label();
381 r_temp
= tcg_temp_new();
382 tcg_gen_xor_tl(r_temp
, src1
, src2
);
383 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
384 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
385 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
386 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
387 r_const
= tcg_const_i32(TT_TOVF
);
388 gen_helper_raise_exception(r_const
);
389 tcg_temp_free_i32(r_const
);
391 tcg_temp_free(r_temp
);
394 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
396 tcg_gen_mov_tl(cpu_cc_src
, src1
);
397 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
399 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
400 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
401 dc
->cc_op
= CC_OP_LOGIC
;
403 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
404 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
405 dc
->cc_op
= CC_OP_SUB
;
407 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
410 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
412 tcg_gen_mov_tl(cpu_cc_src
, src1
);
413 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
414 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
415 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
418 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
420 gen_helper_compute_C_icc(cpu_tmp0
);
421 tcg_gen_mov_tl(cpu_cc_src
, src1
);
422 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
423 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
424 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
425 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
428 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
430 gen_helper_compute_C_icc(cpu_tmp0
);
431 tcg_gen_mov_tl(cpu_cc_src
, src1
);
432 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
433 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
434 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
435 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
438 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
440 tcg_gen_mov_tl(cpu_cc_src
, src1
);
441 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
442 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
443 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
446 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
448 tcg_gen_mov_tl(cpu_cc_src
, src1
);
449 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
450 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
451 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
452 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
453 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
456 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
461 l1
= gen_new_label();
462 r_temp
= tcg_temp_new();
468 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
469 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
470 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
471 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
472 tcg_gen_movi_tl(cpu_cc_src2
, 0);
476 // env->y = (b2 << 31) | (env->y >> 1);
477 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
478 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
479 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
480 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
481 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
482 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
485 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
486 gen_mov_reg_V(r_temp
, cpu_psr
);
487 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
488 tcg_temp_free(r_temp
);
490 // T0 = (b1 << 31) | (T0 >> 1);
492 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
493 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
494 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
496 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
498 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
501 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
503 TCGv_i64 r_temp
, r_temp2
;
505 r_temp
= tcg_temp_new_i64();
506 r_temp2
= tcg_temp_new_i64();
508 tcg_gen_extu_tl_i64(r_temp
, src2
);
509 tcg_gen_extu_tl_i64(r_temp2
, src1
);
510 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
512 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
513 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
514 tcg_temp_free_i64(r_temp
);
515 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
516 #ifdef TARGET_SPARC64
517 tcg_gen_mov_i64(dst
, r_temp2
);
519 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
521 tcg_temp_free_i64(r_temp2
);
524 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
526 TCGv_i64 r_temp
, r_temp2
;
528 r_temp
= tcg_temp_new_i64();
529 r_temp2
= tcg_temp_new_i64();
531 tcg_gen_ext_tl_i64(r_temp
, src2
);
532 tcg_gen_ext_tl_i64(r_temp2
, src1
);
533 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
535 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
536 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
537 tcg_temp_free_i64(r_temp
);
538 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
539 #ifdef TARGET_SPARC64
540 tcg_gen_mov_i64(dst
, r_temp2
);
542 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
544 tcg_temp_free_i64(r_temp2
);
547 #ifdef TARGET_SPARC64
548 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
553 l1
= gen_new_label();
554 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
555 r_const
= tcg_const_i32(TT_DIV_ZERO
);
556 gen_helper_raise_exception(r_const
);
557 tcg_temp_free_i32(r_const
);
561 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
565 l1
= gen_new_label();
566 l2
= gen_new_label();
567 tcg_gen_mov_tl(cpu_cc_src
, src1
);
568 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
569 gen_trap_ifdivzero_tl(cpu_cc_src2
);
570 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
571 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
572 tcg_gen_movi_i64(dst
, INT64_MIN
);
575 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
581 static inline void gen_op_eval_ba(TCGv dst
)
583 tcg_gen_movi_tl(dst
, 1);
587 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
589 gen_mov_reg_Z(dst
, src
);
593 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
595 gen_mov_reg_N(cpu_tmp0
, src
);
596 gen_mov_reg_V(dst
, src
);
597 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
598 gen_mov_reg_Z(cpu_tmp0
, src
);
599 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
603 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
605 gen_mov_reg_V(cpu_tmp0
, src
);
606 gen_mov_reg_N(dst
, src
);
607 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
611 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
613 gen_mov_reg_Z(cpu_tmp0
, src
);
614 gen_mov_reg_C(dst
, src
);
615 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
619 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
621 gen_mov_reg_C(dst
, src
);
625 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
627 gen_mov_reg_V(dst
, src
);
631 static inline void gen_op_eval_bn(TCGv dst
)
633 tcg_gen_movi_tl(dst
, 0);
637 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
639 gen_mov_reg_N(dst
, src
);
643 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
645 gen_mov_reg_Z(dst
, src
);
646 tcg_gen_xori_tl(dst
, dst
, 0x1);
650 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
652 gen_mov_reg_N(cpu_tmp0
, src
);
653 gen_mov_reg_V(dst
, src
);
654 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
655 gen_mov_reg_Z(cpu_tmp0
, src
);
656 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
657 tcg_gen_xori_tl(dst
, dst
, 0x1);
661 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
663 gen_mov_reg_V(cpu_tmp0
, src
);
664 gen_mov_reg_N(dst
, src
);
665 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
666 tcg_gen_xori_tl(dst
, dst
, 0x1);
670 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
672 gen_mov_reg_Z(cpu_tmp0
, src
);
673 gen_mov_reg_C(dst
, src
);
674 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
675 tcg_gen_xori_tl(dst
, dst
, 0x1);
679 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
681 gen_mov_reg_C(dst
, src
);
682 tcg_gen_xori_tl(dst
, dst
, 0x1);
686 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
688 gen_mov_reg_N(dst
, src
);
689 tcg_gen_xori_tl(dst
, dst
, 0x1);
693 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
695 gen_mov_reg_V(dst
, src
);
696 tcg_gen_xori_tl(dst
, dst
, 0x1);
700 FPSR bit field FCC1 | FCC0:
706 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
707 unsigned int fcc_offset
)
709 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
710 tcg_gen_andi_tl(reg
, reg
, 0x1);
713 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
714 unsigned int fcc_offset
)
716 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
717 tcg_gen_andi_tl(reg
, reg
, 0x1);
721 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
722 unsigned int fcc_offset
)
724 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
725 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
726 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
729 // 1 or 2: FCC0 ^ FCC1
730 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
731 unsigned int fcc_offset
)
733 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
734 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
735 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
739 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
740 unsigned int fcc_offset
)
742 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
746 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
747 unsigned int fcc_offset
)
749 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
750 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
751 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
752 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
756 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
757 unsigned int fcc_offset
)
759 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
763 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
764 unsigned int fcc_offset
)
766 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
767 tcg_gen_xori_tl(dst
, dst
, 0x1);
768 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
769 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
773 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
774 unsigned int fcc_offset
)
776 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
777 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
778 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
782 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
783 unsigned int fcc_offset
)
785 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
786 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
787 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
788 tcg_gen_xori_tl(dst
, dst
, 0x1);
791 // 0 or 3: !(FCC0 ^ FCC1)
792 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
793 unsigned int fcc_offset
)
795 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
796 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
797 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
798 tcg_gen_xori_tl(dst
, dst
, 0x1);
802 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
803 unsigned int fcc_offset
)
805 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
806 tcg_gen_xori_tl(dst
, dst
, 0x1);
809 // !1: !(FCC0 & !FCC1)
810 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
811 unsigned int fcc_offset
)
813 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
814 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
815 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
816 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
817 tcg_gen_xori_tl(dst
, dst
, 0x1);
821 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
822 unsigned int fcc_offset
)
824 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
825 tcg_gen_xori_tl(dst
, dst
, 0x1);
828 // !2: !(!FCC0 & FCC1)
829 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
830 unsigned int fcc_offset
)
832 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
833 tcg_gen_xori_tl(dst
, dst
, 0x1);
834 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
835 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
836 tcg_gen_xori_tl(dst
, dst
, 0x1);
839 // !3: !(FCC0 & FCC1)
840 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
841 unsigned int fcc_offset
)
843 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
844 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
845 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
846 tcg_gen_xori_tl(dst
, dst
, 0x1);
849 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
850 target_ulong pc2
, TCGv r_cond
)
854 l1
= gen_new_label();
856 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
858 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
861 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
864 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
865 target_ulong pc2
, TCGv r_cond
)
869 l1
= gen_new_label();
871 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
873 gen_goto_tb(dc
, 0, pc2
, pc1
);
876 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
879 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
884 l1
= gen_new_label();
885 l2
= gen_new_label();
887 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
889 tcg_gen_movi_tl(cpu_npc
, npc1
);
893 tcg_gen_movi_tl(cpu_npc
, npc2
);
897 /* call this function before using the condition register as it may
898 have been set for a jump */
899 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
901 if (dc
->npc
== JUMP_PC
) {
902 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
903 dc
->npc
= DYNAMIC_PC
;
907 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
909 if (dc
->npc
== JUMP_PC
) {
910 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
911 dc
->npc
= DYNAMIC_PC
;
912 } else if (dc
->npc
!= DYNAMIC_PC
) {
913 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
917 static inline void save_state(DisasContext
*dc
, TCGv cond
)
919 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
920 /* flush pending conditional evaluations before exposing cpu state */
921 if (dc
->cc_op
!= CC_OP_FLAGS
) {
922 dc
->cc_op
= CC_OP_FLAGS
;
923 gen_helper_compute_psr();
928 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
930 if (dc
->npc
== JUMP_PC
) {
931 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
932 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
934 } else if (dc
->npc
== DYNAMIC_PC
) {
935 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
942 static inline void gen_op_next_insn(void)
944 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
945 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
948 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
953 #ifdef TARGET_SPARC64
965 gen_helper_compute_psr();
966 dc
->cc_op
= CC_OP_FLAGS
;
971 gen_op_eval_bn(r_dst
);
974 gen_op_eval_be(r_dst
, r_src
);
977 gen_op_eval_ble(r_dst
, r_src
);
980 gen_op_eval_bl(r_dst
, r_src
);
983 gen_op_eval_bleu(r_dst
, r_src
);
986 gen_op_eval_bcs(r_dst
, r_src
);
989 gen_op_eval_bneg(r_dst
, r_src
);
992 gen_op_eval_bvs(r_dst
, r_src
);
995 gen_op_eval_ba(r_dst
);
998 gen_op_eval_bne(r_dst
, r_src
);
1001 gen_op_eval_bg(r_dst
, r_src
);
1004 gen_op_eval_bge(r_dst
, r_src
);
1007 gen_op_eval_bgu(r_dst
, r_src
);
1010 gen_op_eval_bcc(r_dst
, r_src
);
1013 gen_op_eval_bpos(r_dst
, r_src
);
1016 gen_op_eval_bvc(r_dst
, r_src
);
1021 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1023 unsigned int offset
;
1043 gen_op_eval_bn(r_dst
);
1046 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1049 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1052 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1055 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1058 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1061 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1064 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1067 gen_op_eval_ba(r_dst
);
1070 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1073 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1076 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1079 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1082 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1085 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1088 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1093 #ifdef TARGET_SPARC64
1095 static const int gen_tcg_cond_reg
[8] = {
1106 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1110 l1
= gen_new_label();
1111 tcg_gen_movi_tl(r_dst
, 0);
1112 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1113 tcg_gen_movi_tl(r_dst
, 1);
1118 /* XXX: potentially incorrect if dynamic npc */
1119 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1122 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1123 target_ulong target
= dc
->pc
+ offset
;
1126 /* unconditional not taken */
1128 dc
->pc
= dc
->npc
+ 4;
1129 dc
->npc
= dc
->pc
+ 4;
1132 dc
->npc
= dc
->pc
+ 4;
1134 } else if (cond
== 0x8) {
1135 /* unconditional taken */
1138 dc
->npc
= dc
->pc
+ 4;
1142 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1145 flush_cond(dc
, r_cond
);
1146 gen_cond(r_cond
, cc
, cond
, dc
);
1148 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1152 dc
->jump_pc
[0] = target
;
1153 dc
->jump_pc
[1] = dc
->npc
+ 4;
1159 /* XXX: potentially incorrect if dynamic npc */
1160 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1163 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1164 target_ulong target
= dc
->pc
+ offset
;
1167 /* unconditional not taken */
1169 dc
->pc
= dc
->npc
+ 4;
1170 dc
->npc
= dc
->pc
+ 4;
1173 dc
->npc
= dc
->pc
+ 4;
1175 } else if (cond
== 0x8) {
1176 /* unconditional taken */
1179 dc
->npc
= dc
->pc
+ 4;
1183 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1186 flush_cond(dc
, r_cond
);
1187 gen_fcond(r_cond
, cc
, cond
);
1189 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1193 dc
->jump_pc
[0] = target
;
1194 dc
->jump_pc
[1] = dc
->npc
+ 4;
1200 #ifdef TARGET_SPARC64
1201 /* XXX: potentially incorrect if dynamic npc */
1202 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1203 TCGv r_cond
, TCGv r_reg
)
1205 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1206 target_ulong target
= dc
->pc
+ offset
;
1208 flush_cond(dc
, r_cond
);
1209 gen_cond_reg(r_cond
, cond
, r_reg
);
1211 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1215 dc
->jump_pc
[0] = target
;
1216 dc
->jump_pc
[1] = dc
->npc
+ 4;
1221 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1225 gen_helper_fcmps(r_rs1
, r_rs2
);
1228 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1231 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1234 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1239 static inline void gen_op_fcmpd(int fccno
)
1246 gen_helper_fcmpd_fcc1();
1249 gen_helper_fcmpd_fcc2();
1252 gen_helper_fcmpd_fcc3();
1257 static inline void gen_op_fcmpq(int fccno
)
1264 gen_helper_fcmpq_fcc1();
1267 gen_helper_fcmpq_fcc2();
1270 gen_helper_fcmpq_fcc3();
1275 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1279 gen_helper_fcmpes(r_rs1
, r_rs2
);
1282 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1285 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1288 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1293 static inline void gen_op_fcmped(int fccno
)
1297 gen_helper_fcmped();
1300 gen_helper_fcmped_fcc1();
1303 gen_helper_fcmped_fcc2();
1306 gen_helper_fcmped_fcc3();
1311 static inline void gen_op_fcmpeq(int fccno
)
1315 gen_helper_fcmpeq();
1318 gen_helper_fcmpeq_fcc1();
1321 gen_helper_fcmpeq_fcc2();
1324 gen_helper_fcmpeq_fcc3();
1331 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1333 gen_helper_fcmps(r_rs1
, r_rs2
);
1336 static inline void gen_op_fcmpd(int fccno
)
1341 static inline void gen_op_fcmpq(int fccno
)
1346 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1348 gen_helper_fcmpes(r_rs1
, r_rs2
);
1351 static inline void gen_op_fcmped(int fccno
)
1353 gen_helper_fcmped();
1356 static inline void gen_op_fcmpeq(int fccno
)
1358 gen_helper_fcmpeq();
1362 static inline void gen_op_fpexception_im(int fsr_flags
)
1366 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1367 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1368 r_const
= tcg_const_i32(TT_FP_EXCP
);
1369 gen_helper_raise_exception(r_const
);
1370 tcg_temp_free_i32(r_const
);
1373 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1375 #if !defined(CONFIG_USER_ONLY)
1376 if (!dc
->fpu_enabled
) {
1379 save_state(dc
, r_cond
);
1380 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1381 gen_helper_raise_exception(r_const
);
1382 tcg_temp_free_i32(r_const
);
1390 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1392 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1395 static inline void gen_clear_float_exceptions(void)
1397 gen_helper_clear_float_exceptions();
1401 #ifdef TARGET_SPARC64
1402 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1408 r_asi
= tcg_temp_new_i32();
1409 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1411 asi
= GET_FIELD(insn
, 19, 26);
1412 r_asi
= tcg_const_i32(asi
);
1417 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1420 TCGv_i32 r_asi
, r_size
, r_sign
;
1422 r_asi
= gen_get_asi(insn
, addr
);
1423 r_size
= tcg_const_i32(size
);
1424 r_sign
= tcg_const_i32(sign
);
1425 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1426 tcg_temp_free_i32(r_sign
);
1427 tcg_temp_free_i32(r_size
);
1428 tcg_temp_free_i32(r_asi
);
1431 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1433 TCGv_i32 r_asi
, r_size
;
1435 r_asi
= gen_get_asi(insn
, addr
);
1436 r_size
= tcg_const_i32(size
);
1437 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1438 tcg_temp_free_i32(r_size
);
1439 tcg_temp_free_i32(r_asi
);
1442 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1444 TCGv_i32 r_asi
, r_size
, r_rd
;
1446 r_asi
= gen_get_asi(insn
, addr
);
1447 r_size
= tcg_const_i32(size
);
1448 r_rd
= tcg_const_i32(rd
);
1449 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1450 tcg_temp_free_i32(r_rd
);
1451 tcg_temp_free_i32(r_size
);
1452 tcg_temp_free_i32(r_asi
);
1455 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1457 TCGv_i32 r_asi
, r_size
, r_rd
;
1459 r_asi
= gen_get_asi(insn
, addr
);
1460 r_size
= tcg_const_i32(size
);
1461 r_rd
= tcg_const_i32(rd
);
1462 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1463 tcg_temp_free_i32(r_rd
);
1464 tcg_temp_free_i32(r_size
);
1465 tcg_temp_free_i32(r_asi
);
1468 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1470 TCGv_i32 r_asi
, r_size
, r_sign
;
1472 r_asi
= gen_get_asi(insn
, addr
);
1473 r_size
= tcg_const_i32(4);
1474 r_sign
= tcg_const_i32(0);
1475 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1476 tcg_temp_free_i32(r_sign
);
1477 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1478 tcg_temp_free_i32(r_size
);
1479 tcg_temp_free_i32(r_asi
);
1480 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1483 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1485 TCGv_i32 r_asi
, r_rd
;
1487 r_asi
= gen_get_asi(insn
, addr
);
1488 r_rd
= tcg_const_i32(rd
);
1489 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1490 tcg_temp_free_i32(r_rd
);
1491 tcg_temp_free_i32(r_asi
);
1494 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1496 TCGv_i32 r_asi
, r_size
;
1498 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1499 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1500 r_asi
= gen_get_asi(insn
, addr
);
1501 r_size
= tcg_const_i32(8);
1502 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1503 tcg_temp_free_i32(r_size
);
1504 tcg_temp_free_i32(r_asi
);
1507 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1513 r_val1
= tcg_temp_new();
1514 gen_movl_reg_TN(rd
, r_val1
);
1515 r_asi
= gen_get_asi(insn
, addr
);
1516 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1517 tcg_temp_free_i32(r_asi
);
1518 tcg_temp_free(r_val1
);
1521 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1526 gen_movl_reg_TN(rd
, cpu_tmp64
);
1527 r_asi
= gen_get_asi(insn
, addr
);
1528 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1529 tcg_temp_free_i32(r_asi
);
1532 #elif !defined(CONFIG_USER_ONLY)
1534 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1537 TCGv_i32 r_asi
, r_size
, r_sign
;
1539 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1540 r_size
= tcg_const_i32(size
);
1541 r_sign
= tcg_const_i32(sign
);
1542 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1543 tcg_temp_free(r_sign
);
1544 tcg_temp_free(r_size
);
1545 tcg_temp_free(r_asi
);
1546 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1549 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1551 TCGv_i32 r_asi
, r_size
;
1553 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1554 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1555 r_size
= tcg_const_i32(size
);
1556 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1557 tcg_temp_free(r_size
);
1558 tcg_temp_free(r_asi
);
1561 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1563 TCGv_i32 r_asi
, r_size
, r_sign
;
1566 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1567 r_size
= tcg_const_i32(4);
1568 r_sign
= tcg_const_i32(0);
1569 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1570 tcg_temp_free(r_sign
);
1571 r_val
= tcg_temp_new_i64();
1572 tcg_gen_extu_tl_i64(r_val
, dst
);
1573 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1574 tcg_temp_free_i64(r_val
);
1575 tcg_temp_free(r_size
);
1576 tcg_temp_free(r_asi
);
1577 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1580 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1582 TCGv_i32 r_asi
, r_size
, r_sign
;
1584 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1585 r_size
= tcg_const_i32(8);
1586 r_sign
= tcg_const_i32(0);
1587 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1588 tcg_temp_free(r_sign
);
1589 tcg_temp_free(r_size
);
1590 tcg_temp_free(r_asi
);
1591 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1592 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1593 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1594 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1595 gen_movl_TN_reg(rd
, hi
);
1598 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1600 TCGv_i32 r_asi
, r_size
;
1602 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1603 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1604 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1605 r_size
= tcg_const_i32(8);
1606 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1607 tcg_temp_free(r_size
);
1608 tcg_temp_free(r_asi
);
1612 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1613 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1616 TCGv_i32 r_asi
, r_size
;
1618 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1620 r_val
= tcg_const_i64(0xffULL
);
1621 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1622 r_size
= tcg_const_i32(1);
1623 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1624 tcg_temp_free_i32(r_size
);
1625 tcg_temp_free_i32(r_asi
);
1626 tcg_temp_free_i64(r_val
);
1630 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1635 rs1
= GET_FIELD(insn
, 13, 17);
1637 tcg_gen_movi_tl(def
, 0);
1638 } else if (rs1
< 8) {
1639 r_rs1
= cpu_gregs
[rs1
];
1641 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1646 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1650 if (IS_IMM
) { /* immediate */
1651 target_long simm
= GET_FIELDs(insn
, 19, 31);
1652 tcg_gen_movi_tl(def
, simm
);
1653 } else { /* register */
1654 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1656 tcg_gen_movi_tl(def
, 0);
1657 } else if (rs2
< 8) {
1658 r_rs2
= cpu_gregs
[rs2
];
1660 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1666 #ifdef TARGET_SPARC64
1667 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1669 TCGv_i32 r_tl
= tcg_temp_new_i32();
1671 /* load env->tl into r_tl */
1672 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1674 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1675 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1677 /* calculate offset to current trap state from env->ts, reuse r_tl */
1678 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1679 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1681 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1683 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1684 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1685 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1686 tcg_temp_free_ptr(r_tl_tmp
);
1689 tcg_temp_free_i32(r_tl
);
1693 #define CHECK_IU_FEATURE(dc, FEATURE) \
1694 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1696 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1697 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1700 /* before an instruction, dc->pc must be static */
1701 static void disas_sparc_insn(DisasContext
* dc
)
1703 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1704 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1707 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1708 tcg_gen_debug_insn_start(dc
->pc
);
1709 insn
= ldl_code(dc
->pc
);
1710 opc
= GET_FIELD(insn
, 0, 1);
1712 rd
= GET_FIELD(insn
, 2, 6);
1714 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1715 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1718 case 0: /* branches/sethi */
1720 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1723 #ifdef TARGET_SPARC64
1724 case 0x1: /* V9 BPcc */
1728 target
= GET_FIELD_SP(insn
, 0, 18);
1729 target
= sign_extend(target
, 18);
1731 cc
= GET_FIELD_SP(insn
, 20, 21);
1733 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1735 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1740 case 0x3: /* V9 BPr */
1742 target
= GET_FIELD_SP(insn
, 0, 13) |
1743 (GET_FIELD_SP(insn
, 20, 21) << 14);
1744 target
= sign_extend(target
, 16);
1746 cpu_src1
= get_src1(insn
, cpu_src1
);
1747 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1750 case 0x5: /* V9 FBPcc */
1752 int cc
= GET_FIELD_SP(insn
, 20, 21);
1753 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1755 target
= GET_FIELD_SP(insn
, 0, 18);
1756 target
= sign_extend(target
, 19);
1758 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1762 case 0x7: /* CBN+x */
1767 case 0x2: /* BN+x */
1769 target
= GET_FIELD(insn
, 10, 31);
1770 target
= sign_extend(target
, 22);
1772 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1775 case 0x6: /* FBN+x */
1777 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1779 target
= GET_FIELD(insn
, 10, 31);
1780 target
= sign_extend(target
, 22);
1782 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1785 case 0x4: /* SETHI */
1787 uint32_t value
= GET_FIELD(insn
, 10, 31);
1790 r_const
= tcg_const_tl(value
<< 10);
1791 gen_movl_TN_reg(rd
, r_const
);
1792 tcg_temp_free(r_const
);
1795 case 0x0: /* UNIMPL */
1804 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1807 r_const
= tcg_const_tl(dc
->pc
);
1808 gen_movl_TN_reg(15, r_const
);
1809 tcg_temp_free(r_const
);
1811 gen_mov_pc_npc(dc
, cpu_cond
);
1815 case 2: /* FPU & Logical Operations */
1817 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1818 if (xop
== 0x3a) { /* generate trap */
1821 cpu_src1
= get_src1(insn
, cpu_src1
);
1823 rs2
= GET_FIELD(insn
, 25, 31);
1824 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1826 rs2
= GET_FIELD(insn
, 27, 31);
1828 gen_movl_reg_TN(rs2
, cpu_src2
);
1829 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1831 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1833 cond
= GET_FIELD(insn
, 3, 6);
1835 save_state(dc
, cpu_cond
);
1836 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1838 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1840 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1841 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1842 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1843 gen_helper_raise_exception(cpu_tmp32
);
1844 } else if (cond
!= 0) {
1845 TCGv r_cond
= tcg_temp_new();
1847 #ifdef TARGET_SPARC64
1849 int cc
= GET_FIELD_SP(insn
, 11, 12);
1851 save_state(dc
, cpu_cond
);
1853 gen_cond(r_cond
, 0, cond
, dc
);
1855 gen_cond(r_cond
, 1, cond
, dc
);
1859 save_state(dc
, cpu_cond
);
1860 gen_cond(r_cond
, 0, cond
, dc
);
1862 l1
= gen_new_label();
1863 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1865 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1867 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1869 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1870 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1871 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1872 gen_helper_raise_exception(cpu_tmp32
);
1875 tcg_temp_free(r_cond
);
1881 } else if (xop
== 0x28) {
1882 rs1
= GET_FIELD(insn
, 13, 17);
1885 #ifndef TARGET_SPARC64
1886 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1887 manual, rdy on the microSPARC
1889 case 0x0f: /* stbar in the SPARCv8 manual,
1890 rdy on the microSPARC II */
1891 case 0x10 ... 0x1f: /* implementation-dependent in the
1892 SPARCv8 manual, rdy on the
1895 gen_movl_TN_reg(rd
, cpu_y
);
1897 #ifdef TARGET_SPARC64
1898 case 0x2: /* V9 rdccr */
1899 gen_helper_compute_psr();
1900 gen_helper_rdccr(cpu_dst
);
1901 gen_movl_TN_reg(rd
, cpu_dst
);
1903 case 0x3: /* V9 rdasi */
1904 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
1905 gen_movl_TN_reg(rd
, cpu_dst
);
1907 case 0x4: /* V9 rdtick */
1911 r_tickptr
= tcg_temp_new_ptr();
1912 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1913 offsetof(CPUState
, tick
));
1914 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1915 tcg_temp_free_ptr(r_tickptr
);
1916 gen_movl_TN_reg(rd
, cpu_dst
);
1919 case 0x5: /* V9 rdpc */
1923 r_const
= tcg_const_tl(dc
->pc
);
1924 gen_movl_TN_reg(rd
, r_const
);
1925 tcg_temp_free(r_const
);
1928 case 0x6: /* V9 rdfprs */
1929 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
1930 gen_movl_TN_reg(rd
, cpu_dst
);
1932 case 0xf: /* V9 membar */
1933 break; /* no effect */
1934 case 0x13: /* Graphics Status */
1935 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1937 gen_movl_TN_reg(rd
, cpu_gsr
);
1939 case 0x16: /* Softint */
1940 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
1941 gen_movl_TN_reg(rd
, cpu_dst
);
1943 case 0x17: /* Tick compare */
1944 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
1946 case 0x18: /* System tick */
1950 r_tickptr
= tcg_temp_new_ptr();
1951 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1952 offsetof(CPUState
, stick
));
1953 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1954 tcg_temp_free_ptr(r_tickptr
);
1955 gen_movl_TN_reg(rd
, cpu_dst
);
1958 case 0x19: /* System tick compare */
1959 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
1961 case 0x10: /* Performance Control */
1962 case 0x11: /* Performance Instrumentation Counter */
1963 case 0x12: /* Dispatch Control */
1964 case 0x14: /* Softint set, WO */
1965 case 0x15: /* Softint clear, WO */
1970 #if !defined(CONFIG_USER_ONLY)
1971 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
1972 #ifndef TARGET_SPARC64
1973 if (!supervisor(dc
))
1975 gen_helper_compute_psr();
1976 dc
->cc_op
= CC_OP_FLAGS
;
1977 gen_helper_rdpsr(cpu_dst
);
1979 CHECK_IU_FEATURE(dc
, HYPV
);
1980 if (!hypervisor(dc
))
1982 rs1
= GET_FIELD(insn
, 13, 17);
1985 // gen_op_rdhpstate();
1988 // gen_op_rdhtstate();
1991 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
1994 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
1997 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
1999 case 31: // hstick_cmpr
2000 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2006 gen_movl_TN_reg(rd
, cpu_dst
);
2008 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2009 if (!supervisor(dc
))
2011 #ifdef TARGET_SPARC64
2012 rs1
= GET_FIELD(insn
, 13, 17);
2018 r_tsptr
= tcg_temp_new_ptr();
2019 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2020 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2021 offsetof(trap_state
, tpc
));
2022 tcg_temp_free_ptr(r_tsptr
);
2029 r_tsptr
= tcg_temp_new_ptr();
2030 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2031 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2032 offsetof(trap_state
, tnpc
));
2033 tcg_temp_free_ptr(r_tsptr
);
2040 r_tsptr
= tcg_temp_new_ptr();
2041 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2042 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2043 offsetof(trap_state
, tstate
));
2044 tcg_temp_free_ptr(r_tsptr
);
2051 r_tsptr
= tcg_temp_new_ptr();
2052 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2053 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2054 offsetof(trap_state
, tt
));
2055 tcg_temp_free_ptr(r_tsptr
);
2056 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2063 r_tickptr
= tcg_temp_new_ptr();
2064 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2065 offsetof(CPUState
, tick
));
2066 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2067 gen_movl_TN_reg(rd
, cpu_tmp0
);
2068 tcg_temp_free_ptr(r_tickptr
);
2072 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2075 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2076 offsetof(CPUSPARCState
, pstate
));
2077 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2080 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2081 offsetof(CPUSPARCState
, tl
));
2082 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2085 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2086 offsetof(CPUSPARCState
, psrpil
));
2087 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2090 gen_helper_rdcwp(cpu_tmp0
);
2093 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2094 offsetof(CPUSPARCState
, cansave
));
2095 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2097 case 11: // canrestore
2098 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2099 offsetof(CPUSPARCState
, canrestore
));
2100 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2102 case 12: // cleanwin
2103 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2104 offsetof(CPUSPARCState
, cleanwin
));
2105 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2107 case 13: // otherwin
2108 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2109 offsetof(CPUSPARCState
, otherwin
));
2110 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2113 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2114 offsetof(CPUSPARCState
, wstate
));
2115 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2117 case 16: // UA2005 gl
2118 CHECK_IU_FEATURE(dc
, GL
);
2119 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2120 offsetof(CPUSPARCState
, gl
));
2121 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2123 case 26: // UA2005 strand status
2124 CHECK_IU_FEATURE(dc
, HYPV
);
2125 if (!hypervisor(dc
))
2127 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2130 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2137 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2139 gen_movl_TN_reg(rd
, cpu_tmp0
);
2141 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2142 #ifdef TARGET_SPARC64
2143 save_state(dc
, cpu_cond
);
2144 gen_helper_flushw();
2146 if (!supervisor(dc
))
2148 gen_movl_TN_reg(rd
, cpu_tbr
);
2152 } else if (xop
== 0x34) { /* FPU Operations */
2153 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2155 gen_op_clear_ieee_excp_and_FTT();
2156 rs1
= GET_FIELD(insn
, 13, 17);
2157 rs2
= GET_FIELD(insn
, 27, 31);
2158 xop
= GET_FIELD(insn
, 18, 26);
2159 save_state(dc
, cpu_cond
);
2161 case 0x1: /* fmovs */
2162 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2164 case 0x5: /* fnegs */
2165 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2167 case 0x9: /* fabss */
2168 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2170 case 0x29: /* fsqrts */
2171 CHECK_FPU_FEATURE(dc
, FSQRT
);
2172 gen_clear_float_exceptions();
2173 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2174 gen_helper_check_ieee_exceptions();
2175 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2177 case 0x2a: /* fsqrtd */
2178 CHECK_FPU_FEATURE(dc
, FSQRT
);
2179 gen_op_load_fpr_DT1(DFPREG(rs2
));
2180 gen_clear_float_exceptions();
2181 gen_helper_fsqrtd();
2182 gen_helper_check_ieee_exceptions();
2183 gen_op_store_DT0_fpr(DFPREG(rd
));
2185 case 0x2b: /* fsqrtq */
2186 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2187 gen_op_load_fpr_QT1(QFPREG(rs2
));
2188 gen_clear_float_exceptions();
2189 gen_helper_fsqrtq();
2190 gen_helper_check_ieee_exceptions();
2191 gen_op_store_QT0_fpr(QFPREG(rd
));
2193 case 0x41: /* fadds */
2194 gen_clear_float_exceptions();
2195 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2196 gen_helper_check_ieee_exceptions();
2197 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2199 case 0x42: /* faddd */
2200 gen_op_load_fpr_DT0(DFPREG(rs1
));
2201 gen_op_load_fpr_DT1(DFPREG(rs2
));
2202 gen_clear_float_exceptions();
2204 gen_helper_check_ieee_exceptions();
2205 gen_op_store_DT0_fpr(DFPREG(rd
));
2207 case 0x43: /* faddq */
2208 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2209 gen_op_load_fpr_QT0(QFPREG(rs1
));
2210 gen_op_load_fpr_QT1(QFPREG(rs2
));
2211 gen_clear_float_exceptions();
2213 gen_helper_check_ieee_exceptions();
2214 gen_op_store_QT0_fpr(QFPREG(rd
));
2216 case 0x45: /* fsubs */
2217 gen_clear_float_exceptions();
2218 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2219 gen_helper_check_ieee_exceptions();
2220 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2222 case 0x46: /* fsubd */
2223 gen_op_load_fpr_DT0(DFPREG(rs1
));
2224 gen_op_load_fpr_DT1(DFPREG(rs2
));
2225 gen_clear_float_exceptions();
2227 gen_helper_check_ieee_exceptions();
2228 gen_op_store_DT0_fpr(DFPREG(rd
));
2230 case 0x47: /* fsubq */
2231 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2232 gen_op_load_fpr_QT0(QFPREG(rs1
));
2233 gen_op_load_fpr_QT1(QFPREG(rs2
));
2234 gen_clear_float_exceptions();
2236 gen_helper_check_ieee_exceptions();
2237 gen_op_store_QT0_fpr(QFPREG(rd
));
2239 case 0x49: /* fmuls */
2240 CHECK_FPU_FEATURE(dc
, FMUL
);
2241 gen_clear_float_exceptions();
2242 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2243 gen_helper_check_ieee_exceptions();
2244 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2246 case 0x4a: /* fmuld */
2247 CHECK_FPU_FEATURE(dc
, FMUL
);
2248 gen_op_load_fpr_DT0(DFPREG(rs1
));
2249 gen_op_load_fpr_DT1(DFPREG(rs2
));
2250 gen_clear_float_exceptions();
2252 gen_helper_check_ieee_exceptions();
2253 gen_op_store_DT0_fpr(DFPREG(rd
));
2255 case 0x4b: /* fmulq */
2256 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2257 CHECK_FPU_FEATURE(dc
, FMUL
);
2258 gen_op_load_fpr_QT0(QFPREG(rs1
));
2259 gen_op_load_fpr_QT1(QFPREG(rs2
));
2260 gen_clear_float_exceptions();
2262 gen_helper_check_ieee_exceptions();
2263 gen_op_store_QT0_fpr(QFPREG(rd
));
2265 case 0x4d: /* fdivs */
2266 gen_clear_float_exceptions();
2267 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2268 gen_helper_check_ieee_exceptions();
2269 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2271 case 0x4e: /* fdivd */
2272 gen_op_load_fpr_DT0(DFPREG(rs1
));
2273 gen_op_load_fpr_DT1(DFPREG(rs2
));
2274 gen_clear_float_exceptions();
2276 gen_helper_check_ieee_exceptions();
2277 gen_op_store_DT0_fpr(DFPREG(rd
));
2279 case 0x4f: /* fdivq */
2280 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2281 gen_op_load_fpr_QT0(QFPREG(rs1
));
2282 gen_op_load_fpr_QT1(QFPREG(rs2
));
2283 gen_clear_float_exceptions();
2285 gen_helper_check_ieee_exceptions();
2286 gen_op_store_QT0_fpr(QFPREG(rd
));
2288 case 0x69: /* fsmuld */
2289 CHECK_FPU_FEATURE(dc
, FSMULD
);
2290 gen_clear_float_exceptions();
2291 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2292 gen_helper_check_ieee_exceptions();
2293 gen_op_store_DT0_fpr(DFPREG(rd
));
2295 case 0x6e: /* fdmulq */
2296 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2297 gen_op_load_fpr_DT0(DFPREG(rs1
));
2298 gen_op_load_fpr_DT1(DFPREG(rs2
));
2299 gen_clear_float_exceptions();
2300 gen_helper_fdmulq();
2301 gen_helper_check_ieee_exceptions();
2302 gen_op_store_QT0_fpr(QFPREG(rd
));
2304 case 0xc4: /* fitos */
2305 gen_clear_float_exceptions();
2306 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2307 gen_helper_check_ieee_exceptions();
2308 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2310 case 0xc6: /* fdtos */
2311 gen_op_load_fpr_DT1(DFPREG(rs2
));
2312 gen_clear_float_exceptions();
2313 gen_helper_fdtos(cpu_tmp32
);
2314 gen_helper_check_ieee_exceptions();
2315 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2317 case 0xc7: /* fqtos */
2318 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2319 gen_op_load_fpr_QT1(QFPREG(rs2
));
2320 gen_clear_float_exceptions();
2321 gen_helper_fqtos(cpu_tmp32
);
2322 gen_helper_check_ieee_exceptions();
2323 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2325 case 0xc8: /* fitod */
2326 gen_helper_fitod(cpu_fpr
[rs2
]);
2327 gen_op_store_DT0_fpr(DFPREG(rd
));
2329 case 0xc9: /* fstod */
2330 gen_helper_fstod(cpu_fpr
[rs2
]);
2331 gen_op_store_DT0_fpr(DFPREG(rd
));
2333 case 0xcb: /* fqtod */
2334 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2335 gen_op_load_fpr_QT1(QFPREG(rs2
));
2336 gen_clear_float_exceptions();
2338 gen_helper_check_ieee_exceptions();
2339 gen_op_store_DT0_fpr(DFPREG(rd
));
2341 case 0xcc: /* fitoq */
2342 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2343 gen_helper_fitoq(cpu_fpr
[rs2
]);
2344 gen_op_store_QT0_fpr(QFPREG(rd
));
2346 case 0xcd: /* fstoq */
2347 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2348 gen_helper_fstoq(cpu_fpr
[rs2
]);
2349 gen_op_store_QT0_fpr(QFPREG(rd
));
2351 case 0xce: /* fdtoq */
2352 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2353 gen_op_load_fpr_DT1(DFPREG(rs2
));
2355 gen_op_store_QT0_fpr(QFPREG(rd
));
2357 case 0xd1: /* fstoi */
2358 gen_clear_float_exceptions();
2359 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2363 case 0xd2: /* fdtoi */
2364 gen_op_load_fpr_DT1(DFPREG(rs2
));
2365 gen_clear_float_exceptions();
2366 gen_helper_fdtoi(cpu_tmp32
);
2367 gen_helper_check_ieee_exceptions();
2368 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2370 case 0xd3: /* fqtoi */
2371 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2372 gen_op_load_fpr_QT1(QFPREG(rs2
));
2373 gen_clear_float_exceptions();
2374 gen_helper_fqtoi(cpu_tmp32
);
2375 gen_helper_check_ieee_exceptions();
2376 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2378 #ifdef TARGET_SPARC64
2379 case 0x2: /* V9 fmovd */
2380 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2381 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2382 cpu_fpr
[DFPREG(rs2
) + 1]);
2384 case 0x3: /* V9 fmovq */
2385 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2386 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2387 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2388 cpu_fpr
[QFPREG(rs2
) + 1]);
2389 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2390 cpu_fpr
[QFPREG(rs2
) + 2]);
2391 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2392 cpu_fpr
[QFPREG(rs2
) + 3]);
2394 case 0x6: /* V9 fnegd */
2395 gen_op_load_fpr_DT1(DFPREG(rs2
));
2397 gen_op_store_DT0_fpr(DFPREG(rd
));
2399 case 0x7: /* V9 fnegq */
2400 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2401 gen_op_load_fpr_QT1(QFPREG(rs2
));
2403 gen_op_store_QT0_fpr(QFPREG(rd
));
2405 case 0xa: /* V9 fabsd */
2406 gen_op_load_fpr_DT1(DFPREG(rs2
));
2408 gen_op_store_DT0_fpr(DFPREG(rd
));
2410 case 0xb: /* V9 fabsq */
2411 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2412 gen_op_load_fpr_QT1(QFPREG(rs2
));
2414 gen_op_store_QT0_fpr(QFPREG(rd
));
2416 case 0x81: /* V9 fstox */
2417 gen_clear_float_exceptions();
2418 gen_helper_fstox(cpu_fpr
[rs2
]);
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_DT0_fpr(DFPREG(rd
));
2422 case 0x82: /* V9 fdtox */
2423 gen_op_load_fpr_DT1(DFPREG(rs2
));
2424 gen_clear_float_exceptions();
2426 gen_helper_check_ieee_exceptions();
2427 gen_op_store_DT0_fpr(DFPREG(rd
));
2429 case 0x83: /* V9 fqtox */
2430 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2431 gen_op_load_fpr_QT1(QFPREG(rs2
));
2432 gen_clear_float_exceptions();
2434 gen_helper_check_ieee_exceptions();
2435 gen_op_store_DT0_fpr(DFPREG(rd
));
2437 case 0x84: /* V9 fxtos */
2438 gen_op_load_fpr_DT1(DFPREG(rs2
));
2439 gen_clear_float_exceptions();
2440 gen_helper_fxtos(cpu_tmp32
);
2441 gen_helper_check_ieee_exceptions();
2442 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2444 case 0x88: /* V9 fxtod */
2445 gen_op_load_fpr_DT1(DFPREG(rs2
));
2446 gen_clear_float_exceptions();
2448 gen_helper_check_ieee_exceptions();
2449 gen_op_store_DT0_fpr(DFPREG(rd
));
2451 case 0x8c: /* V9 fxtoq */
2452 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2453 gen_op_load_fpr_DT1(DFPREG(rs2
));
2454 gen_clear_float_exceptions();
2456 gen_helper_check_ieee_exceptions();
2457 gen_op_store_QT0_fpr(QFPREG(rd
));
2463 } else if (xop
== 0x35) { /* FPU Operations */
2464 #ifdef TARGET_SPARC64
2467 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2469 gen_op_clear_ieee_excp_and_FTT();
2470 rs1
= GET_FIELD(insn
, 13, 17);
2471 rs2
= GET_FIELD(insn
, 27, 31);
2472 xop
= GET_FIELD(insn
, 18, 26);
2473 save_state(dc
, cpu_cond
);
2474 #ifdef TARGET_SPARC64
2475 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2478 l1
= gen_new_label();
2479 cond
= GET_FIELD_SP(insn
, 14, 17);
2480 cpu_src1
= get_src1(insn
, cpu_src1
);
2481 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2483 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2486 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2489 l1
= gen_new_label();
2490 cond
= GET_FIELD_SP(insn
, 14, 17);
2491 cpu_src1
= get_src1(insn
, cpu_src1
);
2492 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2494 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2495 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2498 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2501 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2502 l1
= gen_new_label();
2503 cond
= GET_FIELD_SP(insn
, 14, 17);
2504 cpu_src1
= get_src1(insn
, cpu_src1
);
2505 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2507 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2508 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2509 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2510 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2516 #ifdef TARGET_SPARC64
2517 #define FMOVSCC(fcc) \
2522 l1 = gen_new_label(); \
2523 r_cond = tcg_temp_new(); \
2524 cond = GET_FIELD_SP(insn, 14, 17); \
2525 gen_fcond(r_cond, fcc, cond); \
2526 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2528 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2529 gen_set_label(l1); \
2530 tcg_temp_free(r_cond); \
2532 #define FMOVDCC(fcc) \
2537 l1 = gen_new_label(); \
2538 r_cond = tcg_temp_new(); \
2539 cond = GET_FIELD_SP(insn, 14, 17); \
2540 gen_fcond(r_cond, fcc, cond); \
2541 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2543 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2544 cpu_fpr[DFPREG(rs2)]); \
2545 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2546 cpu_fpr[DFPREG(rs2) + 1]); \
2547 gen_set_label(l1); \
2548 tcg_temp_free(r_cond); \
2550 #define FMOVQCC(fcc) \
2555 l1 = gen_new_label(); \
2556 r_cond = tcg_temp_new(); \
2557 cond = GET_FIELD_SP(insn, 14, 17); \
2558 gen_fcond(r_cond, fcc, cond); \
2559 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2561 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2562 cpu_fpr[QFPREG(rs2)]); \
2563 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2564 cpu_fpr[QFPREG(rs2) + 1]); \
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2566 cpu_fpr[QFPREG(rs2) + 2]); \
2567 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2568 cpu_fpr[QFPREG(rs2) + 3]); \
2569 gen_set_label(l1); \
2570 tcg_temp_free(r_cond); \
2572 case 0x001: /* V9 fmovscc %fcc0 */
2575 case 0x002: /* V9 fmovdcc %fcc0 */
2578 case 0x003: /* V9 fmovqcc %fcc0 */
2579 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2582 case 0x041: /* V9 fmovscc %fcc1 */
2585 case 0x042: /* V9 fmovdcc %fcc1 */
2588 case 0x043: /* V9 fmovqcc %fcc1 */
2589 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2592 case 0x081: /* V9 fmovscc %fcc2 */
2595 case 0x082: /* V9 fmovdcc %fcc2 */
2598 case 0x083: /* V9 fmovqcc %fcc2 */
2599 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2602 case 0x0c1: /* V9 fmovscc %fcc3 */
2605 case 0x0c2: /* V9 fmovdcc %fcc3 */
2608 case 0x0c3: /* V9 fmovqcc %fcc3 */
2609 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2615 #define FMOVSCC(icc) \
2620 l1 = gen_new_label(); \
2621 r_cond = tcg_temp_new(); \
2622 cond = GET_FIELD_SP(insn, 14, 17); \
2623 gen_cond(r_cond, icc, cond, dc); \
2624 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2626 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2627 gen_set_label(l1); \
2628 tcg_temp_free(r_cond); \
2630 #define FMOVDCC(icc) \
2635 l1 = gen_new_label(); \
2636 r_cond = tcg_temp_new(); \
2637 cond = GET_FIELD_SP(insn, 14, 17); \
2638 gen_cond(r_cond, icc, cond, dc); \
2639 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2641 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2642 cpu_fpr[DFPREG(rs2)]); \
2643 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2644 cpu_fpr[DFPREG(rs2) + 1]); \
2645 gen_set_label(l1); \
2646 tcg_temp_free(r_cond); \
2648 #define FMOVQCC(icc) \
2653 l1 = gen_new_label(); \
2654 r_cond = tcg_temp_new(); \
2655 cond = GET_FIELD_SP(insn, 14, 17); \
2656 gen_cond(r_cond, icc, cond, dc); \
2657 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2659 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2660 cpu_fpr[QFPREG(rs2)]); \
2661 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2662 cpu_fpr[QFPREG(rs2) + 1]); \
2663 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2664 cpu_fpr[QFPREG(rs2) + 2]); \
2665 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2666 cpu_fpr[QFPREG(rs2) + 3]); \
2667 gen_set_label(l1); \
2668 tcg_temp_free(r_cond); \
2671 case 0x101: /* V9 fmovscc %icc */
2674 case 0x102: /* V9 fmovdcc %icc */
2676 case 0x103: /* V9 fmovqcc %icc */
2677 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2680 case 0x181: /* V9 fmovscc %xcc */
2683 case 0x182: /* V9 fmovdcc %xcc */
2686 case 0x183: /* V9 fmovqcc %xcc */
2687 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2694 case 0x51: /* fcmps, V9 %fcc */
2695 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2697 case 0x52: /* fcmpd, V9 %fcc */
2698 gen_op_load_fpr_DT0(DFPREG(rs1
));
2699 gen_op_load_fpr_DT1(DFPREG(rs2
));
2700 gen_op_fcmpd(rd
& 3);
2702 case 0x53: /* fcmpq, V9 %fcc */
2703 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2704 gen_op_load_fpr_QT0(QFPREG(rs1
));
2705 gen_op_load_fpr_QT1(QFPREG(rs2
));
2706 gen_op_fcmpq(rd
& 3);
2708 case 0x55: /* fcmpes, V9 %fcc */
2709 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2711 case 0x56: /* fcmped, V9 %fcc */
2712 gen_op_load_fpr_DT0(DFPREG(rs1
));
2713 gen_op_load_fpr_DT1(DFPREG(rs2
));
2714 gen_op_fcmped(rd
& 3);
2716 case 0x57: /* fcmpeq, V9 %fcc */
2717 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2718 gen_op_load_fpr_QT0(QFPREG(rs1
));
2719 gen_op_load_fpr_QT1(QFPREG(rs2
));
2720 gen_op_fcmpeq(rd
& 3);
2725 } else if (xop
== 0x2) {
2728 rs1
= GET_FIELD(insn
, 13, 17);
2730 // or %g0, x, y -> mov T0, x; mov y, T0
2731 if (IS_IMM
) { /* immediate */
2734 simm
= GET_FIELDs(insn
, 19, 31);
2735 r_const
= tcg_const_tl(simm
);
2736 gen_movl_TN_reg(rd
, r_const
);
2737 tcg_temp_free(r_const
);
2738 } else { /* register */
2739 rs2
= GET_FIELD(insn
, 27, 31);
2740 gen_movl_reg_TN(rs2
, cpu_dst
);
2741 gen_movl_TN_reg(rd
, cpu_dst
);
2744 cpu_src1
= get_src1(insn
, cpu_src1
);
2745 if (IS_IMM
) { /* immediate */
2746 simm
= GET_FIELDs(insn
, 19, 31);
2747 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2748 gen_movl_TN_reg(rd
, cpu_dst
);
2749 } else { /* register */
2750 // or x, %g0, y -> mov T1, x; mov y, T1
2751 rs2
= GET_FIELD(insn
, 27, 31);
2753 gen_movl_reg_TN(rs2
, cpu_src2
);
2754 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2755 gen_movl_TN_reg(rd
, cpu_dst
);
2757 gen_movl_TN_reg(rd
, cpu_src1
);
2760 #ifdef TARGET_SPARC64
2761 } else if (xop
== 0x25) { /* sll, V9 sllx */
2762 cpu_src1
= get_src1(insn
, cpu_src1
);
2763 if (IS_IMM
) { /* immediate */
2764 simm
= GET_FIELDs(insn
, 20, 31);
2765 if (insn
& (1 << 12)) {
2766 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2768 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2770 } else { /* register */
2771 rs2
= GET_FIELD(insn
, 27, 31);
2772 gen_movl_reg_TN(rs2
, cpu_src2
);
2773 if (insn
& (1 << 12)) {
2774 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2776 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2778 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2780 gen_movl_TN_reg(rd
, cpu_dst
);
2781 } else if (xop
== 0x26) { /* srl, V9 srlx */
2782 cpu_src1
= get_src1(insn
, cpu_src1
);
2783 if (IS_IMM
) { /* immediate */
2784 simm
= GET_FIELDs(insn
, 20, 31);
2785 if (insn
& (1 << 12)) {
2786 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2788 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2789 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2791 } else { /* register */
2792 rs2
= GET_FIELD(insn
, 27, 31);
2793 gen_movl_reg_TN(rs2
, cpu_src2
);
2794 if (insn
& (1 << 12)) {
2795 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2796 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2798 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2799 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2800 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2803 gen_movl_TN_reg(rd
, cpu_dst
);
2804 } else if (xop
== 0x27) { /* sra, V9 srax */
2805 cpu_src1
= get_src1(insn
, cpu_src1
);
2806 if (IS_IMM
) { /* immediate */
2807 simm
= GET_FIELDs(insn
, 20, 31);
2808 if (insn
& (1 << 12)) {
2809 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2811 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2812 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2813 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2815 } else { /* register */
2816 rs2
= GET_FIELD(insn
, 27, 31);
2817 gen_movl_reg_TN(rs2
, cpu_src2
);
2818 if (insn
& (1 << 12)) {
2819 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2820 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2822 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2823 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2824 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2825 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2828 gen_movl_TN_reg(rd
, cpu_dst
);
2830 } else if (xop
< 0x36) {
2832 cpu_src1
= get_src1(insn
, cpu_src1
);
2833 cpu_src2
= get_src2(insn
, cpu_src2
);
2834 switch (xop
& ~0x10) {
2837 simm
= GET_FIELDs(insn
, 19, 31);
2839 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
2840 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2841 dc
->cc_op
= CC_OP_ADD
;
2843 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
2847 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2848 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2849 dc
->cc_op
= CC_OP_ADD
;
2851 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2857 simm
= GET_FIELDs(insn
, 19, 31);
2858 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
2860 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2863 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2864 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2865 dc
->cc_op
= CC_OP_LOGIC
;
2870 simm
= GET_FIELDs(insn
, 19, 31);
2871 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2873 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2876 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2877 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2878 dc
->cc_op
= CC_OP_LOGIC
;
2883 simm
= GET_FIELDs(insn
, 19, 31);
2884 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
2886 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2889 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2890 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2891 dc
->cc_op
= CC_OP_LOGIC
;
2896 simm
= GET_FIELDs(insn
, 19, 31);
2898 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
2900 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
2904 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2905 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2906 dc
->cc_op
= CC_OP_SUB
;
2908 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2912 case 0x5: /* andn */
2914 simm
= GET_FIELDs(insn
, 19, 31);
2915 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
2917 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2920 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2921 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2922 dc
->cc_op
= CC_OP_LOGIC
;
2927 simm
= GET_FIELDs(insn
, 19, 31);
2928 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
2930 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2933 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2934 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2935 dc
->cc_op
= CC_OP_LOGIC
;
2938 case 0x7: /* xorn */
2940 simm
= GET_FIELDs(insn
, 19, 31);
2941 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
2943 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
2944 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2947 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2948 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2949 dc
->cc_op
= CC_OP_LOGIC
;
2952 case 0x8: /* addx, V9 addc */
2954 simm
= GET_FIELDs(insn
, 19, 31);
2956 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
2957 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2958 dc
->cc_op
= CC_OP_ADDX
;
2960 gen_helper_compute_C_icc(cpu_tmp0
);
2961 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
2962 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2966 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2967 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2968 dc
->cc_op
= CC_OP_ADDX
;
2970 gen_helper_compute_C_icc(cpu_tmp0
);
2971 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2972 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2976 #ifdef TARGET_SPARC64
2977 case 0x9: /* V9 mulx */
2979 simm
= GET_FIELDs(insn
, 19, 31);
2980 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
2982 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
2986 case 0xa: /* umul */
2987 CHECK_IU_FEATURE(dc
, MUL
);
2988 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
2990 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2991 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2992 dc
->cc_op
= CC_OP_LOGIC
;
2995 case 0xb: /* smul */
2996 CHECK_IU_FEATURE(dc
, MUL
);
2997 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
2999 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3000 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3001 dc
->cc_op
= CC_OP_LOGIC
;
3004 case 0xc: /* subx, V9 subc */
3006 simm
= GET_FIELDs(insn
, 19, 31);
3008 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
3009 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3010 dc
->cc_op
= CC_OP_SUBX
;
3012 gen_helper_compute_C_icc(cpu_tmp0
);
3013 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3014 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3018 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3019 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3020 dc
->cc_op
= CC_OP_SUBX
;
3022 gen_helper_compute_C_icc(cpu_tmp0
);
3023 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3024 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3028 #ifdef TARGET_SPARC64
3029 case 0xd: /* V9 udivx */
3030 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3031 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3032 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3033 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3036 case 0xe: /* udiv */
3037 CHECK_IU_FEATURE(dc
, DIV
);
3038 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3040 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3041 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3042 dc
->cc_op
= CC_OP_DIV
;
3045 case 0xf: /* sdiv */
3046 CHECK_IU_FEATURE(dc
, DIV
);
3047 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3049 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3050 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3051 dc
->cc_op
= CC_OP_DIV
;
3057 gen_movl_TN_reg(rd
, cpu_dst
);
3059 cpu_src1
= get_src1(insn
, cpu_src1
);
3060 cpu_src2
= get_src2(insn
, cpu_src2
);
3062 case 0x20: /* taddcc */
3063 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3064 gen_movl_TN_reg(rd
, cpu_dst
);
3065 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3066 dc
->cc_op
= CC_OP_TADD
;
3068 case 0x21: /* tsubcc */
3069 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3070 gen_movl_TN_reg(rd
, cpu_dst
);
3071 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3072 dc
->cc_op
= CC_OP_TSUB
;
3074 case 0x22: /* taddcctv */
3075 save_state(dc
, cpu_cond
);
3076 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3077 gen_movl_TN_reg(rd
, cpu_dst
);
3078 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3079 dc
->cc_op
= CC_OP_TADDTV
;
3081 case 0x23: /* tsubcctv */
3082 save_state(dc
, cpu_cond
);
3083 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3084 gen_movl_TN_reg(rd
, cpu_dst
);
3085 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3086 dc
->cc_op
= CC_OP_TSUBTV
;
3088 case 0x24: /* mulscc */
3089 gen_helper_compute_psr();
3090 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3091 gen_movl_TN_reg(rd
, cpu_dst
);
3092 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3093 dc
->cc_op
= CC_OP_ADD
;
3095 #ifndef TARGET_SPARC64
3096 case 0x25: /* sll */
3097 if (IS_IMM
) { /* immediate */
3098 simm
= GET_FIELDs(insn
, 20, 31);
3099 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3100 } else { /* register */
3101 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3102 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3104 gen_movl_TN_reg(rd
, cpu_dst
);
3106 case 0x26: /* srl */
3107 if (IS_IMM
) { /* immediate */
3108 simm
= GET_FIELDs(insn
, 20, 31);
3109 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3110 } else { /* register */
3111 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3112 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3114 gen_movl_TN_reg(rd
, cpu_dst
);
3116 case 0x27: /* sra */
3117 if (IS_IMM
) { /* immediate */
3118 simm
= GET_FIELDs(insn
, 20, 31);
3119 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3120 } else { /* register */
3121 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3122 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3124 gen_movl_TN_reg(rd
, cpu_dst
);
3131 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3132 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3134 #ifndef TARGET_SPARC64
3135 case 0x01 ... 0x0f: /* undefined in the
3139 case 0x10 ... 0x1f: /* implementation-dependent
3145 case 0x2: /* V9 wrccr */
3146 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3147 gen_helper_wrccr(cpu_dst
);
3148 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3149 dc
->cc_op
= CC_OP_FLAGS
;
3151 case 0x3: /* V9 wrasi */
3152 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3153 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3154 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3156 case 0x6: /* V9 wrfprs */
3157 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3158 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3159 save_state(dc
, cpu_cond
);
3164 case 0xf: /* V9 sir, nop if user */
3165 #if !defined(CONFIG_USER_ONLY)
3166 if (supervisor(dc
)) {
3171 case 0x13: /* Graphics Status */
3172 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3174 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3176 case 0x14: /* Softint set */
3177 if (!supervisor(dc
))
3179 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3180 gen_helper_set_softint(cpu_tmp64
);
3182 case 0x15: /* Softint clear */
3183 if (!supervisor(dc
))
3185 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3186 gen_helper_clear_softint(cpu_tmp64
);
3188 case 0x16: /* Softint write */
3189 if (!supervisor(dc
))
3191 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3192 gen_helper_write_softint(cpu_tmp64
);
3194 case 0x17: /* Tick compare */
3195 #if !defined(CONFIG_USER_ONLY)
3196 if (!supervisor(dc
))
3202 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3204 r_tickptr
= tcg_temp_new_ptr();
3205 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3206 offsetof(CPUState
, tick
));
3207 gen_helper_tick_set_limit(r_tickptr
,
3209 tcg_temp_free_ptr(r_tickptr
);
3212 case 0x18: /* System tick */
3213 #if !defined(CONFIG_USER_ONLY)
3214 if (!supervisor(dc
))
3220 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3222 r_tickptr
= tcg_temp_new_ptr();
3223 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3224 offsetof(CPUState
, stick
));
3225 gen_helper_tick_set_count(r_tickptr
,
3227 tcg_temp_free_ptr(r_tickptr
);
3230 case 0x19: /* System tick compare */
3231 #if !defined(CONFIG_USER_ONLY)
3232 if (!supervisor(dc
))
3238 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3240 r_tickptr
= tcg_temp_new_ptr();
3241 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3242 offsetof(CPUState
, stick
));
3243 gen_helper_tick_set_limit(r_tickptr
,
3245 tcg_temp_free_ptr(r_tickptr
);
3249 case 0x10: /* Performance Control */
3250 case 0x11: /* Performance Instrumentation
3252 case 0x12: /* Dispatch Control */
3259 #if !defined(CONFIG_USER_ONLY)
3260 case 0x31: /* wrpsr, V9 saved, restored */
3262 if (!supervisor(dc
))
3264 #ifdef TARGET_SPARC64
3270 gen_helper_restored();
3272 case 2: /* UA2005 allclean */
3273 case 3: /* UA2005 otherw */
3274 case 4: /* UA2005 normalw */
3275 case 5: /* UA2005 invalw */
3281 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3282 gen_helper_wrpsr(cpu_dst
);
3283 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3284 dc
->cc_op
= CC_OP_FLAGS
;
3285 save_state(dc
, cpu_cond
);
3292 case 0x32: /* wrwim, V9 wrpr */
3294 if (!supervisor(dc
))
3296 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3297 #ifdef TARGET_SPARC64
3303 r_tsptr
= tcg_temp_new_ptr();
3304 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3305 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3306 offsetof(trap_state
, tpc
));
3307 tcg_temp_free_ptr(r_tsptr
);
3314 r_tsptr
= tcg_temp_new_ptr();
3315 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3316 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3317 offsetof(trap_state
, tnpc
));
3318 tcg_temp_free_ptr(r_tsptr
);
3325 r_tsptr
= tcg_temp_new_ptr();
3326 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3327 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3328 offsetof(trap_state
,
3330 tcg_temp_free_ptr(r_tsptr
);
3337 r_tsptr
= tcg_temp_new_ptr();
3338 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3339 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3340 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3341 offsetof(trap_state
, tt
));
3342 tcg_temp_free_ptr(r_tsptr
);
3349 r_tickptr
= tcg_temp_new_ptr();
3350 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3351 offsetof(CPUState
, tick
));
3352 gen_helper_tick_set_count(r_tickptr
,
3354 tcg_temp_free_ptr(r_tickptr
);
3358 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3361 save_state(dc
, cpu_cond
);
3362 gen_helper_wrpstate(cpu_tmp0
);
3368 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3369 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3370 offsetof(CPUSPARCState
, tl
));
3373 gen_helper_wrpil(cpu_tmp0
);
3376 gen_helper_wrcwp(cpu_tmp0
);
3379 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3380 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3381 offsetof(CPUSPARCState
,
3384 case 11: // canrestore
3385 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3386 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3387 offsetof(CPUSPARCState
,
3390 case 12: // cleanwin
3391 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3392 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3393 offsetof(CPUSPARCState
,
3396 case 13: // otherwin
3397 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3398 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3399 offsetof(CPUSPARCState
,
3403 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3404 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3405 offsetof(CPUSPARCState
,
3408 case 16: // UA2005 gl
3409 CHECK_IU_FEATURE(dc
, GL
);
3410 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3411 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3412 offsetof(CPUSPARCState
, gl
));
3414 case 26: // UA2005 strand status
3415 CHECK_IU_FEATURE(dc
, HYPV
);
3416 if (!hypervisor(dc
))
3418 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3424 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3425 if (dc
->def
->nwindows
!= 32)
3426 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3427 (1 << dc
->def
->nwindows
) - 1);
3428 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3432 case 0x33: /* wrtbr, UA2005 wrhpr */
3434 #ifndef TARGET_SPARC64
3435 if (!supervisor(dc
))
3437 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3439 CHECK_IU_FEATURE(dc
, HYPV
);
3440 if (!hypervisor(dc
))
3442 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3445 // XXX gen_op_wrhpstate();
3446 save_state(dc
, cpu_cond
);
3452 // XXX gen_op_wrhtstate();
3455 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3458 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3460 case 31: // hstick_cmpr
3464 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3465 r_tickptr
= tcg_temp_new_ptr();
3466 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3467 offsetof(CPUState
, hstick
));
3468 gen_helper_tick_set_limit(r_tickptr
,
3470 tcg_temp_free_ptr(r_tickptr
);
3473 case 6: // hver readonly
3481 #ifdef TARGET_SPARC64
3482 case 0x2c: /* V9 movcc */
3484 int cc
= GET_FIELD_SP(insn
, 11, 12);
3485 int cond
= GET_FIELD_SP(insn
, 14, 17);
3489 r_cond
= tcg_temp_new();
3490 if (insn
& (1 << 18)) {
3492 gen_cond(r_cond
, 0, cond
, dc
);
3494 gen_cond(r_cond
, 1, cond
, dc
);
3498 gen_fcond(r_cond
, cc
, cond
);
3501 l1
= gen_new_label();
3503 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3504 if (IS_IMM
) { /* immediate */
3507 simm
= GET_FIELD_SPs(insn
, 0, 10);
3508 r_const
= tcg_const_tl(simm
);
3509 gen_movl_TN_reg(rd
, r_const
);
3510 tcg_temp_free(r_const
);
3512 rs2
= GET_FIELD_SP(insn
, 0, 4);
3513 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3514 gen_movl_TN_reg(rd
, cpu_tmp0
);
3517 tcg_temp_free(r_cond
);
3520 case 0x2d: /* V9 sdivx */
3521 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3522 gen_movl_TN_reg(rd
, cpu_dst
);
3524 case 0x2e: /* V9 popc */
3526 cpu_src2
= get_src2(insn
, cpu_src2
);
3527 gen_helper_popc(cpu_dst
, cpu_src2
);
3528 gen_movl_TN_reg(rd
, cpu_dst
);
3530 case 0x2f: /* V9 movr */
3532 int cond
= GET_FIELD_SP(insn
, 10, 12);
3535 cpu_src1
= get_src1(insn
, cpu_src1
);
3537 l1
= gen_new_label();
3539 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3541 if (IS_IMM
) { /* immediate */
3544 simm
= GET_FIELD_SPs(insn
, 0, 9);
3545 r_const
= tcg_const_tl(simm
);
3546 gen_movl_TN_reg(rd
, r_const
);
3547 tcg_temp_free(r_const
);
3549 rs2
= GET_FIELD_SP(insn
, 0, 4);
3550 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3551 gen_movl_TN_reg(rd
, cpu_tmp0
);
3561 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3562 #ifdef TARGET_SPARC64
3563 int opf
= GET_FIELD_SP(insn
, 5, 13);
3564 rs1
= GET_FIELD(insn
, 13, 17);
3565 rs2
= GET_FIELD(insn
, 27, 31);
3566 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3570 case 0x000: /* VIS I edge8cc */
3571 case 0x001: /* VIS II edge8n */
3572 case 0x002: /* VIS I edge8lcc */
3573 case 0x003: /* VIS II edge8ln */
3574 case 0x004: /* VIS I edge16cc */
3575 case 0x005: /* VIS II edge16n */
3576 case 0x006: /* VIS I edge16lcc */
3577 case 0x007: /* VIS II edge16ln */
3578 case 0x008: /* VIS I edge32cc */
3579 case 0x009: /* VIS II edge32n */
3580 case 0x00a: /* VIS I edge32lcc */
3581 case 0x00b: /* VIS II edge32ln */
3584 case 0x010: /* VIS I array8 */
3585 CHECK_FPU_FEATURE(dc
, VIS1
);
3586 cpu_src1
= get_src1(insn
, cpu_src1
);
3587 gen_movl_reg_TN(rs2
, cpu_src2
);
3588 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3589 gen_movl_TN_reg(rd
, cpu_dst
);
3591 case 0x012: /* VIS I array16 */
3592 CHECK_FPU_FEATURE(dc
, VIS1
);
3593 cpu_src1
= get_src1(insn
, cpu_src1
);
3594 gen_movl_reg_TN(rs2
, cpu_src2
);
3595 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3596 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3597 gen_movl_TN_reg(rd
, cpu_dst
);
3599 case 0x014: /* VIS I array32 */
3600 CHECK_FPU_FEATURE(dc
, VIS1
);
3601 cpu_src1
= get_src1(insn
, cpu_src1
);
3602 gen_movl_reg_TN(rs2
, cpu_src2
);
3603 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3604 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3605 gen_movl_TN_reg(rd
, cpu_dst
);
3607 case 0x018: /* VIS I alignaddr */
3608 CHECK_FPU_FEATURE(dc
, VIS1
);
3609 cpu_src1
= get_src1(insn
, cpu_src1
);
3610 gen_movl_reg_TN(rs2
, cpu_src2
);
3611 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3612 gen_movl_TN_reg(rd
, cpu_dst
);
3614 case 0x019: /* VIS II bmask */
3615 case 0x01a: /* VIS I alignaddrl */
3618 case 0x020: /* VIS I fcmple16 */
3619 CHECK_FPU_FEATURE(dc
, VIS1
);
3620 gen_op_load_fpr_DT0(DFPREG(rs1
));
3621 gen_op_load_fpr_DT1(DFPREG(rs2
));
3622 gen_helper_fcmple16();
3623 gen_op_store_DT0_fpr(DFPREG(rd
));
3625 case 0x022: /* VIS I fcmpne16 */
3626 CHECK_FPU_FEATURE(dc
, VIS1
);
3627 gen_op_load_fpr_DT0(DFPREG(rs1
));
3628 gen_op_load_fpr_DT1(DFPREG(rs2
));
3629 gen_helper_fcmpne16();
3630 gen_op_store_DT0_fpr(DFPREG(rd
));
3632 case 0x024: /* VIS I fcmple32 */
3633 CHECK_FPU_FEATURE(dc
, VIS1
);
3634 gen_op_load_fpr_DT0(DFPREG(rs1
));
3635 gen_op_load_fpr_DT1(DFPREG(rs2
));
3636 gen_helper_fcmple32();
3637 gen_op_store_DT0_fpr(DFPREG(rd
));
3639 case 0x026: /* VIS I fcmpne32 */
3640 CHECK_FPU_FEATURE(dc
, VIS1
);
3641 gen_op_load_fpr_DT0(DFPREG(rs1
));
3642 gen_op_load_fpr_DT1(DFPREG(rs2
));
3643 gen_helper_fcmpne32();
3644 gen_op_store_DT0_fpr(DFPREG(rd
));
3646 case 0x028: /* VIS I fcmpgt16 */
3647 CHECK_FPU_FEATURE(dc
, VIS1
);
3648 gen_op_load_fpr_DT0(DFPREG(rs1
));
3649 gen_op_load_fpr_DT1(DFPREG(rs2
));
3650 gen_helper_fcmpgt16();
3651 gen_op_store_DT0_fpr(DFPREG(rd
));
3653 case 0x02a: /* VIS I fcmpeq16 */
3654 CHECK_FPU_FEATURE(dc
, VIS1
);
3655 gen_op_load_fpr_DT0(DFPREG(rs1
));
3656 gen_op_load_fpr_DT1(DFPREG(rs2
));
3657 gen_helper_fcmpeq16();
3658 gen_op_store_DT0_fpr(DFPREG(rd
));
3660 case 0x02c: /* VIS I fcmpgt32 */
3661 CHECK_FPU_FEATURE(dc
, VIS1
);
3662 gen_op_load_fpr_DT0(DFPREG(rs1
));
3663 gen_op_load_fpr_DT1(DFPREG(rs2
));
3664 gen_helper_fcmpgt32();
3665 gen_op_store_DT0_fpr(DFPREG(rd
));
3667 case 0x02e: /* VIS I fcmpeq32 */
3668 CHECK_FPU_FEATURE(dc
, VIS1
);
3669 gen_op_load_fpr_DT0(DFPREG(rs1
));
3670 gen_op_load_fpr_DT1(DFPREG(rs2
));
3671 gen_helper_fcmpeq32();
3672 gen_op_store_DT0_fpr(DFPREG(rd
));
3674 case 0x031: /* VIS I fmul8x16 */
3675 CHECK_FPU_FEATURE(dc
, VIS1
);
3676 gen_op_load_fpr_DT0(DFPREG(rs1
));
3677 gen_op_load_fpr_DT1(DFPREG(rs2
));
3678 gen_helper_fmul8x16();
3679 gen_op_store_DT0_fpr(DFPREG(rd
));
3681 case 0x033: /* VIS I fmul8x16au */
3682 CHECK_FPU_FEATURE(dc
, VIS1
);
3683 gen_op_load_fpr_DT0(DFPREG(rs1
));
3684 gen_op_load_fpr_DT1(DFPREG(rs2
));
3685 gen_helper_fmul8x16au();
3686 gen_op_store_DT0_fpr(DFPREG(rd
));
3688 case 0x035: /* VIS I fmul8x16al */
3689 CHECK_FPU_FEATURE(dc
, VIS1
);
3690 gen_op_load_fpr_DT0(DFPREG(rs1
));
3691 gen_op_load_fpr_DT1(DFPREG(rs2
));
3692 gen_helper_fmul8x16al();
3693 gen_op_store_DT0_fpr(DFPREG(rd
));
3695 case 0x036: /* VIS I fmul8sux16 */
3696 CHECK_FPU_FEATURE(dc
, VIS1
);
3697 gen_op_load_fpr_DT0(DFPREG(rs1
));
3698 gen_op_load_fpr_DT1(DFPREG(rs2
));
3699 gen_helper_fmul8sux16();
3700 gen_op_store_DT0_fpr(DFPREG(rd
));
3702 case 0x037: /* VIS I fmul8ulx16 */
3703 CHECK_FPU_FEATURE(dc
, VIS1
);
3704 gen_op_load_fpr_DT0(DFPREG(rs1
));
3705 gen_op_load_fpr_DT1(DFPREG(rs2
));
3706 gen_helper_fmul8ulx16();
3707 gen_op_store_DT0_fpr(DFPREG(rd
));
3709 case 0x038: /* VIS I fmuld8sux16 */
3710 CHECK_FPU_FEATURE(dc
, VIS1
);
3711 gen_op_load_fpr_DT0(DFPREG(rs1
));
3712 gen_op_load_fpr_DT1(DFPREG(rs2
));
3713 gen_helper_fmuld8sux16();
3714 gen_op_store_DT0_fpr(DFPREG(rd
));
3716 case 0x039: /* VIS I fmuld8ulx16 */
3717 CHECK_FPU_FEATURE(dc
, VIS1
);
3718 gen_op_load_fpr_DT0(DFPREG(rs1
));
3719 gen_op_load_fpr_DT1(DFPREG(rs2
));
3720 gen_helper_fmuld8ulx16();
3721 gen_op_store_DT0_fpr(DFPREG(rd
));
3723 case 0x03a: /* VIS I fpack32 */
3724 case 0x03b: /* VIS I fpack16 */
3725 case 0x03d: /* VIS I fpackfix */
3726 case 0x03e: /* VIS I pdist */
3729 case 0x048: /* VIS I faligndata */
3730 CHECK_FPU_FEATURE(dc
, VIS1
);
3731 gen_op_load_fpr_DT0(DFPREG(rs1
));
3732 gen_op_load_fpr_DT1(DFPREG(rs2
));
3733 gen_helper_faligndata();
3734 gen_op_store_DT0_fpr(DFPREG(rd
));
3736 case 0x04b: /* VIS I fpmerge */
3737 CHECK_FPU_FEATURE(dc
, VIS1
);
3738 gen_op_load_fpr_DT0(DFPREG(rs1
));
3739 gen_op_load_fpr_DT1(DFPREG(rs2
));
3740 gen_helper_fpmerge();
3741 gen_op_store_DT0_fpr(DFPREG(rd
));
3743 case 0x04c: /* VIS II bshuffle */
3746 case 0x04d: /* VIS I fexpand */
3747 CHECK_FPU_FEATURE(dc
, VIS1
);
3748 gen_op_load_fpr_DT0(DFPREG(rs1
));
3749 gen_op_load_fpr_DT1(DFPREG(rs2
));
3750 gen_helper_fexpand();
3751 gen_op_store_DT0_fpr(DFPREG(rd
));
3753 case 0x050: /* VIS I fpadd16 */
3754 CHECK_FPU_FEATURE(dc
, VIS1
);
3755 gen_op_load_fpr_DT0(DFPREG(rs1
));
3756 gen_op_load_fpr_DT1(DFPREG(rs2
));
3757 gen_helper_fpadd16();
3758 gen_op_store_DT0_fpr(DFPREG(rd
));
3760 case 0x051: /* VIS I fpadd16s */
3761 CHECK_FPU_FEATURE(dc
, VIS1
);
3762 gen_helper_fpadd16s(cpu_fpr
[rd
],
3763 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3765 case 0x052: /* VIS I fpadd32 */
3766 CHECK_FPU_FEATURE(dc
, VIS1
);
3767 gen_op_load_fpr_DT0(DFPREG(rs1
));
3768 gen_op_load_fpr_DT1(DFPREG(rs2
));
3769 gen_helper_fpadd32();
3770 gen_op_store_DT0_fpr(DFPREG(rd
));
3772 case 0x053: /* VIS I fpadd32s */
3773 CHECK_FPU_FEATURE(dc
, VIS1
);
3774 gen_helper_fpadd32s(cpu_fpr
[rd
],
3775 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3777 case 0x054: /* VIS I fpsub16 */
3778 CHECK_FPU_FEATURE(dc
, VIS1
);
3779 gen_op_load_fpr_DT0(DFPREG(rs1
));
3780 gen_op_load_fpr_DT1(DFPREG(rs2
));
3781 gen_helper_fpsub16();
3782 gen_op_store_DT0_fpr(DFPREG(rd
));
3784 case 0x055: /* VIS I fpsub16s */
3785 CHECK_FPU_FEATURE(dc
, VIS1
);
3786 gen_helper_fpsub16s(cpu_fpr
[rd
],
3787 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3789 case 0x056: /* VIS I fpsub32 */
3790 CHECK_FPU_FEATURE(dc
, VIS1
);
3791 gen_op_load_fpr_DT0(DFPREG(rs1
));
3792 gen_op_load_fpr_DT1(DFPREG(rs2
));
3793 gen_helper_fpsub32();
3794 gen_op_store_DT0_fpr(DFPREG(rd
));
3796 case 0x057: /* VIS I fpsub32s */
3797 CHECK_FPU_FEATURE(dc
, VIS1
);
3798 gen_helper_fpsub32s(cpu_fpr
[rd
],
3799 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3801 case 0x060: /* VIS I fzero */
3802 CHECK_FPU_FEATURE(dc
, VIS1
);
3803 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3804 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3806 case 0x061: /* VIS I fzeros */
3807 CHECK_FPU_FEATURE(dc
, VIS1
);
3808 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3810 case 0x062: /* VIS I fnor */
3811 CHECK_FPU_FEATURE(dc
, VIS1
);
3812 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3813 cpu_fpr
[DFPREG(rs2
)]);
3814 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3815 cpu_fpr
[DFPREG(rs2
) + 1]);
3817 case 0x063: /* VIS I fnors */
3818 CHECK_FPU_FEATURE(dc
, VIS1
);
3819 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3821 case 0x064: /* VIS I fandnot2 */
3822 CHECK_FPU_FEATURE(dc
, VIS1
);
3823 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3824 cpu_fpr
[DFPREG(rs2
)]);
3825 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3826 cpu_fpr
[DFPREG(rs1
) + 1],
3827 cpu_fpr
[DFPREG(rs2
) + 1]);
3829 case 0x065: /* VIS I fandnot2s */
3830 CHECK_FPU_FEATURE(dc
, VIS1
);
3831 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3833 case 0x066: /* VIS I fnot2 */
3834 CHECK_FPU_FEATURE(dc
, VIS1
);
3835 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3836 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3837 cpu_fpr
[DFPREG(rs2
) + 1]);
3839 case 0x067: /* VIS I fnot2s */
3840 CHECK_FPU_FEATURE(dc
, VIS1
);
3841 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3843 case 0x068: /* VIS I fandnot1 */
3844 CHECK_FPU_FEATURE(dc
, VIS1
);
3845 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3846 cpu_fpr
[DFPREG(rs1
)]);
3847 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3848 cpu_fpr
[DFPREG(rs2
) + 1],
3849 cpu_fpr
[DFPREG(rs1
) + 1]);
3851 case 0x069: /* VIS I fandnot1s */
3852 CHECK_FPU_FEATURE(dc
, VIS1
);
3853 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3855 case 0x06a: /* VIS I fnot1 */
3856 CHECK_FPU_FEATURE(dc
, VIS1
);
3857 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3858 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3859 cpu_fpr
[DFPREG(rs1
) + 1]);
3861 case 0x06b: /* VIS I fnot1s */
3862 CHECK_FPU_FEATURE(dc
, VIS1
);
3863 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3865 case 0x06c: /* VIS I fxor */
3866 CHECK_FPU_FEATURE(dc
, VIS1
);
3867 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3868 cpu_fpr
[DFPREG(rs2
)]);
3869 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3870 cpu_fpr
[DFPREG(rs1
) + 1],
3871 cpu_fpr
[DFPREG(rs2
) + 1]);
3873 case 0x06d: /* VIS I fxors */
3874 CHECK_FPU_FEATURE(dc
, VIS1
);
3875 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3877 case 0x06e: /* VIS I fnand */
3878 CHECK_FPU_FEATURE(dc
, VIS1
);
3879 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3880 cpu_fpr
[DFPREG(rs2
)]);
3881 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3882 cpu_fpr
[DFPREG(rs2
) + 1]);
3884 case 0x06f: /* VIS I fnands */
3885 CHECK_FPU_FEATURE(dc
, VIS1
);
3886 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3888 case 0x070: /* VIS I fand */
3889 CHECK_FPU_FEATURE(dc
, VIS1
);
3890 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3891 cpu_fpr
[DFPREG(rs2
)]);
3892 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3893 cpu_fpr
[DFPREG(rs1
) + 1],
3894 cpu_fpr
[DFPREG(rs2
) + 1]);
3896 case 0x071: /* VIS I fands */
3897 CHECK_FPU_FEATURE(dc
, VIS1
);
3898 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3900 case 0x072: /* VIS I fxnor */
3901 CHECK_FPU_FEATURE(dc
, VIS1
);
3902 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3903 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3904 cpu_fpr
[DFPREG(rs1
)]);
3905 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3906 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3907 cpu_fpr
[DFPREG(rs1
) + 1]);
3909 case 0x073: /* VIS I fxnors */
3910 CHECK_FPU_FEATURE(dc
, VIS1
);
3911 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3912 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3914 case 0x074: /* VIS I fsrc1 */
3915 CHECK_FPU_FEATURE(dc
, VIS1
);
3916 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3917 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3918 cpu_fpr
[DFPREG(rs1
) + 1]);
3920 case 0x075: /* VIS I fsrc1s */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3924 case 0x076: /* VIS I fornot2 */
3925 CHECK_FPU_FEATURE(dc
, VIS1
);
3926 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3927 cpu_fpr
[DFPREG(rs2
)]);
3928 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3929 cpu_fpr
[DFPREG(rs1
) + 1],
3930 cpu_fpr
[DFPREG(rs2
) + 1]);
3932 case 0x077: /* VIS I fornot2s */
3933 CHECK_FPU_FEATURE(dc
, VIS1
);
3934 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3936 case 0x078: /* VIS I fsrc2 */
3937 CHECK_FPU_FEATURE(dc
, VIS1
);
3938 gen_op_load_fpr_DT0(DFPREG(rs2
));
3939 gen_op_store_DT0_fpr(DFPREG(rd
));
3941 case 0x079: /* VIS I fsrc2s */
3942 CHECK_FPU_FEATURE(dc
, VIS1
);
3943 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3945 case 0x07a: /* VIS I fornot1 */
3946 CHECK_FPU_FEATURE(dc
, VIS1
);
3947 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3948 cpu_fpr
[DFPREG(rs1
)]);
3949 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3950 cpu_fpr
[DFPREG(rs2
) + 1],
3951 cpu_fpr
[DFPREG(rs1
) + 1]);
3953 case 0x07b: /* VIS I fornot1s */
3954 CHECK_FPU_FEATURE(dc
, VIS1
);
3955 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3957 case 0x07c: /* VIS I for */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3960 cpu_fpr
[DFPREG(rs2
)]);
3961 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
3962 cpu_fpr
[DFPREG(rs1
) + 1],
3963 cpu_fpr
[DFPREG(rs2
) + 1]);
3965 case 0x07d: /* VIS I fors */
3966 CHECK_FPU_FEATURE(dc
, VIS1
);
3967 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3969 case 0x07e: /* VIS I fone */
3970 CHECK_FPU_FEATURE(dc
, VIS1
);
3971 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
3972 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
3974 case 0x07f: /* VIS I fones */
3975 CHECK_FPU_FEATURE(dc
, VIS1
);
3976 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
3978 case 0x080: /* VIS I shutdown */
3979 case 0x081: /* VIS II siam */
3988 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3989 #ifdef TARGET_SPARC64
3994 #ifdef TARGET_SPARC64
3995 } else if (xop
== 0x39) { /* V9 return */
3998 save_state(dc
, cpu_cond
);
3999 cpu_src1
= get_src1(insn
, cpu_src1
);
4000 if (IS_IMM
) { /* immediate */
4001 simm
= GET_FIELDs(insn
, 19, 31);
4002 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4003 } else { /* register */
4004 rs2
= GET_FIELD(insn
, 27, 31);
4006 gen_movl_reg_TN(rs2
, cpu_src2
);
4007 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4009 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4011 gen_helper_restore();
4012 gen_mov_pc_npc(dc
, cpu_cond
);
4013 r_const
= tcg_const_i32(3);
4014 gen_helper_check_align(cpu_dst
, r_const
);
4015 tcg_temp_free_i32(r_const
);
4016 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4017 dc
->npc
= DYNAMIC_PC
;
4021 cpu_src1
= get_src1(insn
, cpu_src1
);
4022 if (IS_IMM
) { /* immediate */
4023 simm
= GET_FIELDs(insn
, 19, 31);
4024 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4025 } else { /* register */
4026 rs2
= GET_FIELD(insn
, 27, 31);
4028 gen_movl_reg_TN(rs2
, cpu_src2
);
4029 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4031 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4034 case 0x38: /* jmpl */
4039 r_pc
= tcg_const_tl(dc
->pc
);
4040 gen_movl_TN_reg(rd
, r_pc
);
4041 tcg_temp_free(r_pc
);
4042 gen_mov_pc_npc(dc
, cpu_cond
);
4043 r_const
= tcg_const_i32(3);
4044 gen_helper_check_align(cpu_dst
, r_const
);
4045 tcg_temp_free_i32(r_const
);
4046 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4047 dc
->npc
= DYNAMIC_PC
;
4050 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4051 case 0x39: /* rett, V9 return */
4055 if (!supervisor(dc
))
4057 gen_mov_pc_npc(dc
, cpu_cond
);
4058 r_const
= tcg_const_i32(3);
4059 gen_helper_check_align(cpu_dst
, r_const
);
4060 tcg_temp_free_i32(r_const
);
4061 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4062 dc
->npc
= DYNAMIC_PC
;
4067 case 0x3b: /* flush */
4068 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4070 gen_helper_flush(cpu_dst
);
4072 case 0x3c: /* save */
4073 save_state(dc
, cpu_cond
);
4075 gen_movl_TN_reg(rd
, cpu_dst
);
4077 case 0x3d: /* restore */
4078 save_state(dc
, cpu_cond
);
4079 gen_helper_restore();
4080 gen_movl_TN_reg(rd
, cpu_dst
);
4082 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4083 case 0x3e: /* V9 done/retry */
4087 if (!supervisor(dc
))
4089 dc
->npc
= DYNAMIC_PC
;
4090 dc
->pc
= DYNAMIC_PC
;
4094 if (!supervisor(dc
))
4096 dc
->npc
= DYNAMIC_PC
;
4097 dc
->pc
= DYNAMIC_PC
;
4113 case 3: /* load/store instructions */
4115 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4117 /* flush pending conditional evaluations before exposing
4119 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4120 dc
->cc_op
= CC_OP_FLAGS
;
4121 gen_helper_compute_psr();
4123 cpu_src1
= get_src1(insn
, cpu_src1
);
4124 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4125 rs2
= GET_FIELD(insn
, 27, 31);
4126 gen_movl_reg_TN(rs2
, cpu_src2
);
4127 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4128 } else if (IS_IMM
) { /* immediate */
4129 simm
= GET_FIELDs(insn
, 19, 31);
4130 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4131 } else { /* register */
4132 rs2
= GET_FIELD(insn
, 27, 31);
4134 gen_movl_reg_TN(rs2
, cpu_src2
);
4135 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4137 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4139 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4140 (xop
> 0x17 && xop
<= 0x1d ) ||
4141 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4143 case 0x0: /* ld, V9 lduw, load unsigned word */
4144 gen_address_mask(dc
, cpu_addr
);
4145 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4147 case 0x1: /* ldub, load unsigned byte */
4148 gen_address_mask(dc
, cpu_addr
);
4149 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4151 case 0x2: /* lduh, load unsigned halfword */
4152 gen_address_mask(dc
, cpu_addr
);
4153 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4155 case 0x3: /* ldd, load double word */
4161 save_state(dc
, cpu_cond
);
4162 r_const
= tcg_const_i32(7);
4163 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4164 tcg_temp_free_i32(r_const
);
4165 gen_address_mask(dc
, cpu_addr
);
4166 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4167 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4168 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4169 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4170 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4171 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4172 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4175 case 0x9: /* ldsb, load signed byte */
4176 gen_address_mask(dc
, cpu_addr
);
4177 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4179 case 0xa: /* ldsh, load signed halfword */
4180 gen_address_mask(dc
, cpu_addr
);
4181 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4183 case 0xd: /* ldstub -- XXX: should be atomically */
4187 gen_address_mask(dc
, cpu_addr
);
4188 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4189 r_const
= tcg_const_tl(0xff);
4190 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4191 tcg_temp_free(r_const
);
4194 case 0x0f: /* swap, swap register with memory. Also
4196 CHECK_IU_FEATURE(dc
, SWAP
);
4197 gen_movl_reg_TN(rd
, cpu_val
);
4198 gen_address_mask(dc
, cpu_addr
);
4199 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4200 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4201 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4203 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4204 case 0x10: /* lda, V9 lduwa, load word alternate */
4205 #ifndef TARGET_SPARC64
4208 if (!supervisor(dc
))
4211 save_state(dc
, cpu_cond
);
4212 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4214 case 0x11: /* lduba, load unsigned byte alternate */
4215 #ifndef TARGET_SPARC64
4218 if (!supervisor(dc
))
4221 save_state(dc
, cpu_cond
);
4222 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4224 case 0x12: /* lduha, load unsigned halfword alternate */
4225 #ifndef TARGET_SPARC64
4228 if (!supervisor(dc
))
4231 save_state(dc
, cpu_cond
);
4232 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4234 case 0x13: /* ldda, load double word alternate */
4235 #ifndef TARGET_SPARC64
4238 if (!supervisor(dc
))
4243 save_state(dc
, cpu_cond
);
4244 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4246 case 0x19: /* ldsba, load signed byte alternate */
4247 #ifndef TARGET_SPARC64
4250 if (!supervisor(dc
))
4253 save_state(dc
, cpu_cond
);
4254 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4256 case 0x1a: /* ldsha, load signed halfword alternate */
4257 #ifndef TARGET_SPARC64
4260 if (!supervisor(dc
))
4263 save_state(dc
, cpu_cond
);
4264 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4266 case 0x1d: /* ldstuba -- XXX: should be atomically */
4267 #ifndef TARGET_SPARC64
4270 if (!supervisor(dc
))
4273 save_state(dc
, cpu_cond
);
4274 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4276 case 0x1f: /* swapa, swap reg with alt. memory. Also
4278 CHECK_IU_FEATURE(dc
, SWAP
);
4279 #ifndef TARGET_SPARC64
4282 if (!supervisor(dc
))
4285 save_state(dc
, cpu_cond
);
4286 gen_movl_reg_TN(rd
, cpu_val
);
4287 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4290 #ifndef TARGET_SPARC64
4291 case 0x30: /* ldc */
4292 case 0x31: /* ldcsr */
4293 case 0x33: /* lddc */
4297 #ifdef TARGET_SPARC64
4298 case 0x08: /* V9 ldsw */
4299 gen_address_mask(dc
, cpu_addr
);
4300 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4302 case 0x0b: /* V9 ldx */
4303 gen_address_mask(dc
, cpu_addr
);
4304 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4306 case 0x18: /* V9 ldswa */
4307 save_state(dc
, cpu_cond
);
4308 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4310 case 0x1b: /* V9 ldxa */
4311 save_state(dc
, cpu_cond
);
4312 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4314 case 0x2d: /* V9 prefetch, no effect */
4316 case 0x30: /* V9 ldfa */
4317 save_state(dc
, cpu_cond
);
4318 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4320 case 0x33: /* V9 lddfa */
4321 save_state(dc
, cpu_cond
);
4322 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4324 case 0x3d: /* V9 prefetcha, no effect */
4326 case 0x32: /* V9 ldqfa */
4327 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4328 save_state(dc
, cpu_cond
);
4329 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4335 gen_movl_TN_reg(rd
, cpu_val
);
4336 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4339 } else if (xop
>= 0x20 && xop
< 0x24) {
4340 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4342 save_state(dc
, cpu_cond
);
4344 case 0x20: /* ldf, load fpreg */
4345 gen_address_mask(dc
, cpu_addr
);
4346 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4347 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4349 case 0x21: /* ldfsr, V9 ldxfsr */
4350 #ifdef TARGET_SPARC64
4351 gen_address_mask(dc
, cpu_addr
);
4353 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4354 gen_helper_ldxfsr(cpu_tmp64
);
4358 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4359 gen_helper_ldfsr(cpu_tmp32
);
4363 case 0x22: /* ldqf, load quad fpreg */
4367 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4368 r_const
= tcg_const_i32(dc
->mem_idx
);
4369 gen_helper_ldqf(cpu_addr
, r_const
);
4370 tcg_temp_free_i32(r_const
);
4371 gen_op_store_QT0_fpr(QFPREG(rd
));
4374 case 0x23: /* lddf, load double fpreg */
4378 r_const
= tcg_const_i32(dc
->mem_idx
);
4379 gen_helper_lddf(cpu_addr
, r_const
);
4380 tcg_temp_free_i32(r_const
);
4381 gen_op_store_DT0_fpr(DFPREG(rd
));
4387 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4388 xop
== 0xe || xop
== 0x1e) {
4389 gen_movl_reg_TN(rd
, cpu_val
);
4391 case 0x4: /* st, store word */
4392 gen_address_mask(dc
, cpu_addr
);
4393 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4395 case 0x5: /* stb, store byte */
4396 gen_address_mask(dc
, cpu_addr
);
4397 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4399 case 0x6: /* sth, store halfword */
4400 gen_address_mask(dc
, cpu_addr
);
4401 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4403 case 0x7: /* std, store double word */
4409 save_state(dc
, cpu_cond
);
4410 gen_address_mask(dc
, cpu_addr
);
4411 r_const
= tcg_const_i32(7);
4412 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4413 tcg_temp_free_i32(r_const
);
4414 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4415 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4416 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4419 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4420 case 0x14: /* sta, V9 stwa, store word alternate */
4421 #ifndef TARGET_SPARC64
4424 if (!supervisor(dc
))
4427 save_state(dc
, cpu_cond
);
4428 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4430 case 0x15: /* stba, store byte alternate */
4431 #ifndef TARGET_SPARC64
4434 if (!supervisor(dc
))
4437 save_state(dc
, cpu_cond
);
4438 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4440 case 0x16: /* stha, store halfword alternate */
4441 #ifndef TARGET_SPARC64
4444 if (!supervisor(dc
))
4447 save_state(dc
, cpu_cond
);
4448 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4450 case 0x17: /* stda, store double word alternate */
4451 #ifndef TARGET_SPARC64
4454 if (!supervisor(dc
))
4460 save_state(dc
, cpu_cond
);
4461 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4465 #ifdef TARGET_SPARC64
4466 case 0x0e: /* V9 stx */
4467 gen_address_mask(dc
, cpu_addr
);
4468 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4470 case 0x1e: /* V9 stxa */
4471 save_state(dc
, cpu_cond
);
4472 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4478 } else if (xop
> 0x23 && xop
< 0x28) {
4479 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4481 save_state(dc
, cpu_cond
);
4483 case 0x24: /* stf, store fpreg */
4484 gen_address_mask(dc
, cpu_addr
);
4485 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4486 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4488 case 0x25: /* stfsr, V9 stxfsr */
4489 #ifdef TARGET_SPARC64
4490 gen_address_mask(dc
, cpu_addr
);
4491 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4493 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4495 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4497 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4498 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4502 #ifdef TARGET_SPARC64
4503 /* V9 stqf, store quad fpreg */
4507 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4508 gen_op_load_fpr_QT0(QFPREG(rd
));
4509 r_const
= tcg_const_i32(dc
->mem_idx
);
4510 gen_helper_stqf(cpu_addr
, r_const
);
4511 tcg_temp_free_i32(r_const
);
4514 #else /* !TARGET_SPARC64 */
4515 /* stdfq, store floating point queue */
4516 #if defined(CONFIG_USER_ONLY)
4519 if (!supervisor(dc
))
4521 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4526 case 0x27: /* stdf, store double fpreg */
4530 gen_op_load_fpr_DT0(DFPREG(rd
));
4531 r_const
= tcg_const_i32(dc
->mem_idx
);
4532 gen_helper_stdf(cpu_addr
, r_const
);
4533 tcg_temp_free_i32(r_const
);
4539 } else if (xop
> 0x33 && xop
< 0x3f) {
4540 save_state(dc
, cpu_cond
);
4542 #ifdef TARGET_SPARC64
4543 case 0x34: /* V9 stfa */
4544 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4546 case 0x36: /* V9 stqfa */
4550 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4551 r_const
= tcg_const_i32(7);
4552 gen_helper_check_align(cpu_addr
, r_const
);
4553 tcg_temp_free_i32(r_const
);
4554 gen_op_load_fpr_QT0(QFPREG(rd
));
4555 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4558 case 0x37: /* V9 stdfa */
4559 gen_op_load_fpr_DT0(DFPREG(rd
));
4560 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4562 case 0x3c: /* V9 casa */
4563 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4564 gen_movl_TN_reg(rd
, cpu_val
);
4566 case 0x3e: /* V9 casxa */
4567 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4568 gen_movl_TN_reg(rd
, cpu_val
);
4571 case 0x34: /* stc */
4572 case 0x35: /* stcsr */
4573 case 0x36: /* stdcq */
4574 case 0x37: /* stdc */
4585 /* default case for non jump instructions */
4586 if (dc
->npc
== DYNAMIC_PC
) {
4587 dc
->pc
= DYNAMIC_PC
;
4589 } else if (dc
->npc
== JUMP_PC
) {
4590 /* we can do a static jump */
4591 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4595 dc
->npc
= dc
->npc
+ 4;
4603 save_state(dc
, cpu_cond
);
4604 r_const
= tcg_const_i32(TT_ILL_INSN
);
4605 gen_helper_raise_exception(r_const
);
4606 tcg_temp_free_i32(r_const
);
4614 save_state(dc
, cpu_cond
);
4615 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4616 gen_helper_raise_exception(r_const
);
4617 tcg_temp_free_i32(r_const
);
4621 #if !defined(CONFIG_USER_ONLY)
4626 save_state(dc
, cpu_cond
);
4627 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4628 gen_helper_raise_exception(r_const
);
4629 tcg_temp_free_i32(r_const
);
4635 save_state(dc
, cpu_cond
);
4636 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4639 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4641 save_state(dc
, cpu_cond
);
4642 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4646 #ifndef TARGET_SPARC64
4651 save_state(dc
, cpu_cond
);
4652 r_const
= tcg_const_i32(TT_NCP_INSN
);
4653 gen_helper_raise_exception(r_const
);
4654 tcg_temp_free(r_const
);
4660 tcg_temp_free(cpu_tmp1
);
4661 tcg_temp_free(cpu_tmp2
);
4664 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4665 int spc
, CPUSPARCState
*env
)
4667 target_ulong pc_start
, last_pc
;
4668 uint16_t *gen_opc_end
;
4669 DisasContext dc1
, *dc
= &dc1
;
4675 memset(dc
, 0, sizeof(DisasContext
));
4680 dc
->npc
= (target_ulong
) tb
->cs_base
;
4681 dc
->cc_op
= CC_OP_DYNAMIC
;
4682 dc
->mem_idx
= cpu_mmu_index(env
);
4684 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4685 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4687 dc
->fpu_enabled
= 0;
4688 #ifdef TARGET_SPARC64
4689 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4691 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4692 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4694 cpu_tmp0
= tcg_temp_new();
4695 cpu_tmp32
= tcg_temp_new_i32();
4696 cpu_tmp64
= tcg_temp_new_i64();
4698 cpu_dst
= tcg_temp_local_new();
4701 cpu_val
= tcg_temp_local_new();
4702 cpu_addr
= tcg_temp_local_new();
4705 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4707 max_insns
= CF_COUNT_MASK
;
4710 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4711 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4712 if (bp
->pc
== dc
->pc
) {
4713 if (dc
->pc
!= pc_start
)
4714 save_state(dc
, cpu_cond
);
4723 qemu_log("Search PC...\n");
4724 j
= gen_opc_ptr
- gen_opc_buf
;
4728 gen_opc_instr_start
[lj
++] = 0;
4729 gen_opc_pc
[lj
] = dc
->pc
;
4730 gen_opc_npc
[lj
] = dc
->npc
;
4731 gen_opc_instr_start
[lj
] = 1;
4732 gen_opc_icount
[lj
] = num_insns
;
4735 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4738 disas_sparc_insn(dc
);
4743 /* if the next PC is different, we abort now */
4744 if (dc
->pc
!= (last_pc
+ 4))
4746 /* if we reach a page boundary, we stop generation so that the
4747 PC of a TT_TFAULT exception is always in the right page */
4748 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4750 /* if single step mode, we generate only one instruction and
4751 generate an exception */
4752 if (dc
->singlestep
) {
4755 } while ((gen_opc_ptr
< gen_opc_end
) &&
4756 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4757 num_insns
< max_insns
);
4760 tcg_temp_free(cpu_addr
);
4761 tcg_temp_free(cpu_val
);
4762 tcg_temp_free(cpu_dst
);
4763 tcg_temp_free_i64(cpu_tmp64
);
4764 tcg_temp_free_i32(cpu_tmp32
);
4765 tcg_temp_free(cpu_tmp0
);
4766 if (tb
->cflags
& CF_LAST_IO
)
4769 if (dc
->pc
!= DYNAMIC_PC
&&
4770 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4771 /* static PC and NPC: we can use direct chaining */
4772 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4774 if (dc
->pc
!= DYNAMIC_PC
)
4775 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4776 save_npc(dc
, cpu_cond
);
4780 gen_icount_end(tb
, num_insns
);
4781 *gen_opc_ptr
= INDEX_op_end
;
4783 j
= gen_opc_ptr
- gen_opc_buf
;
4786 gen_opc_instr_start
[lj
++] = 0;
4790 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4791 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4793 tb
->size
= last_pc
+ 4 - pc_start
;
4794 tb
->icount
= num_insns
;
4797 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4798 qemu_log("--------------\n");
4799 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4800 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4806 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4808 gen_intermediate_code_internal(tb
, 0, env
);
4811 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4813 gen_intermediate_code_internal(tb
, 1, env
);
4816 void gen_intermediate_code_init(CPUSPARCState
*env
)
4820 static const char * const gregnames
[8] = {
4821 NULL
, // g0 not used
4830 static const char * const fregnames
[64] = {
4831 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4832 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4833 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4834 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4835 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4836 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4837 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4838 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4841 /* init various static tables */
4845 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4846 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4847 offsetof(CPUState
, regwptr
),
4849 #ifdef TARGET_SPARC64
4850 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4852 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4854 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4856 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4858 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4859 offsetof(CPUState
, tick_cmpr
),
4861 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4862 offsetof(CPUState
, stick_cmpr
),
4864 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4865 offsetof(CPUState
, hstick_cmpr
),
4867 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4869 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4871 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4873 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4874 offsetof(CPUState
, ssr
), "ssr");
4875 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4876 offsetof(CPUState
, version
), "ver");
4877 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4878 offsetof(CPUState
, softint
),
4881 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4884 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
4886 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
4888 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
4889 offsetof(CPUState
, cc_src2
),
4891 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4893 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
4895 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
4897 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
4899 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
4901 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
4903 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
4904 #ifndef CONFIG_USER_ONLY
4905 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
4908 for (i
= 1; i
< 8; i
++)
4909 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
4910 offsetof(CPUState
, gregs
[i
]),
4912 for (i
= 0; i
< TARGET_FPREGS
; i
++)
4913 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
4914 offsetof(CPUState
, fpr
[i
]),
4917 /* register helpers */
4919 #define GEN_HELPER 2
4924 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
4925 unsigned long searched_pc
, int pc_pos
, void *puc
)
4928 env
->pc
= gen_opc_pc
[pc_pos
];
4929 npc
= gen_opc_npc
[pc_pos
];
4931 /* dynamic NPC: already stored */
4932 } else if (npc
== 2) {
4933 /* jump PC: use 'cond' and the jump targets of the translation */
4935 env
->npc
= gen_opc_jump_pc
[0];
4937 env
->npc
= gen_opc_jump_pc
[1];
4943 /* flush pending conditional evaluations before exposing cpu state */
4944 if (CC_OP
!= CC_OP_FLAGS
) {
4945 helper_compute_psr();