4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
83 uint32_t cc_op
; /* current CC operation */
84 struct TranslationBlock
*tb
;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x
, int len
)
113 return (x
<< len
) >> len
;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
122 offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
132 offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
138 offsetof(CPU_DoubleU
, l
.upper
));
139 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
140 offsetof(CPU_DoubleU
, l
.lower
));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.upmost
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.upper
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.lower
));
151 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
152 offsetof(CPU_QuadU
, l
.lowest
));
155 static void gen_op_load_fpr_QT1(unsigned int src
)
157 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.upmost
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.upper
));
161 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
162 offsetof(CPU_QuadU
, l
.lower
));
163 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
164 offsetof(CPU_QuadU
, l
.lowest
));
167 static void gen_op_store_QT0_fpr(unsigned int dst
)
169 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
174 offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
176 offsetof(CPU_QuadU
, l
.lowest
));
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
186 #define supervisor(dc) (dc->mem_idx >= 1)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == 2)
193 #ifdef TARGET_SPARC64
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #define AM_CHECK(dc) (1)
201 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
203 #ifdef TARGET_SPARC64
205 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
209 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
212 tcg_gen_movi_tl(tn
, 0);
214 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
216 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
220 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
225 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
227 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
232 target_ulong pc
, target_ulong npc
)
234 TranslationBlock
*tb
;
237 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
238 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((long)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
288 l1
= gen_new_label();
290 r_temp
= tcg_temp_new();
291 tcg_gen_xor_tl(r_temp
, src1
, src2
);
292 tcg_gen_not_tl(r_temp
, r_temp
);
293 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
294 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
295 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
297 r_const
= tcg_const_i32(TT_TOVF
);
298 gen_helper_raise_exception(r_const
);
299 tcg_temp_free_i32(r_const
);
301 tcg_temp_free(r_temp
);
304 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
309 l1
= gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
311 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
313 r_const
= tcg_const_i32(TT_TOVF
);
314 gen_helper_raise_exception(r_const
);
315 tcg_temp_free_i32(r_const
);
319 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
321 tcg_gen_mov_tl(cpu_cc_src
, src1
);
322 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
323 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
324 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
327 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
329 tcg_gen_mov_tl(cpu_cc_src
, src1
);
330 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
331 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
332 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
335 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
337 tcg_gen_mov_tl(cpu_cc_src
, src1
);
338 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
339 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
340 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
341 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
342 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
345 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
347 tcg_gen_mov_tl(cpu_cc_src
, src1
);
348 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
349 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
350 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
351 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
352 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
355 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
357 tcg_gen_mov_tl(cpu_cc_src
, src1
);
358 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
359 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
360 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
363 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
365 tcg_gen_mov_tl(cpu_cc_src
, src1
);
366 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
367 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
368 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
369 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
370 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
373 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
379 l1
= gen_new_label();
381 r_temp
= tcg_temp_new();
382 tcg_gen_xor_tl(r_temp
, src1
, src2
);
383 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
384 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
385 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
386 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
387 r_const
= tcg_const_i32(TT_TOVF
);
388 gen_helper_raise_exception(r_const
);
389 tcg_temp_free_i32(r_const
);
391 tcg_temp_free(r_temp
);
394 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
396 tcg_gen_mov_tl(cpu_cc_src
, src1
);
397 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
399 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
400 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
401 dc
->cc_op
= CC_OP_LOGIC
;
403 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
404 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
405 dc
->cc_op
= CC_OP_SUB
;
407 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
410 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
412 tcg_gen_mov_tl(cpu_cc_src
, src1
);
413 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
414 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
415 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
418 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
420 tcg_gen_mov_tl(cpu_cc_src
, src1
);
421 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
422 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
423 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
424 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
425 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
428 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
430 tcg_gen_mov_tl(cpu_cc_src
, src1
);
431 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
432 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
433 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
434 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
435 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
438 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
440 tcg_gen_mov_tl(cpu_cc_src
, src1
);
441 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
442 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
443 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
446 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
448 tcg_gen_mov_tl(cpu_cc_src
, src1
);
449 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
450 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
451 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
452 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
453 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
456 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
461 l1
= gen_new_label();
462 r_temp
= tcg_temp_new();
468 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
469 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
470 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
471 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
472 tcg_gen_movi_tl(cpu_cc_src2
, 0);
476 // env->y = (b2 << 31) | (env->y >> 1);
477 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
478 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
479 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
480 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
481 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
482 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
485 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
486 gen_mov_reg_V(r_temp
, cpu_psr
);
487 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
488 tcg_temp_free(r_temp
);
490 // T0 = (b1 << 31) | (T0 >> 1);
492 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
493 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
494 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
496 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
498 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
501 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
503 TCGv_i64 r_temp
, r_temp2
;
505 r_temp
= tcg_temp_new_i64();
506 r_temp2
= tcg_temp_new_i64();
508 tcg_gen_extu_tl_i64(r_temp
, src2
);
509 tcg_gen_extu_tl_i64(r_temp2
, src1
);
510 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
512 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
513 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
514 tcg_temp_free_i64(r_temp
);
515 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
516 #ifdef TARGET_SPARC64
517 tcg_gen_mov_i64(dst
, r_temp2
);
519 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
521 tcg_temp_free_i64(r_temp2
);
524 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
526 TCGv_i64 r_temp
, r_temp2
;
528 r_temp
= tcg_temp_new_i64();
529 r_temp2
= tcg_temp_new_i64();
531 tcg_gen_ext_tl_i64(r_temp
, src2
);
532 tcg_gen_ext_tl_i64(r_temp2
, src1
);
533 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
535 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
536 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
537 tcg_temp_free_i64(r_temp
);
538 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
539 #ifdef TARGET_SPARC64
540 tcg_gen_mov_i64(dst
, r_temp2
);
542 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
544 tcg_temp_free_i64(r_temp2
);
547 #ifdef TARGET_SPARC64
548 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
553 l1
= gen_new_label();
554 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
555 r_const
= tcg_const_i32(TT_DIV_ZERO
);
556 gen_helper_raise_exception(r_const
);
557 tcg_temp_free_i32(r_const
);
561 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
565 l1
= gen_new_label();
566 l2
= gen_new_label();
567 tcg_gen_mov_tl(cpu_cc_src
, src1
);
568 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
569 gen_trap_ifdivzero_tl(cpu_cc_src2
);
570 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
571 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
572 tcg_gen_movi_i64(dst
, INT64_MIN
);
575 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
581 static inline void gen_op_eval_ba(TCGv dst
)
583 tcg_gen_movi_tl(dst
, 1);
587 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
589 gen_mov_reg_Z(dst
, src
);
593 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
595 gen_mov_reg_N(cpu_tmp0
, src
);
596 gen_mov_reg_V(dst
, src
);
597 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
598 gen_mov_reg_Z(cpu_tmp0
, src
);
599 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
603 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
605 gen_mov_reg_V(cpu_tmp0
, src
);
606 gen_mov_reg_N(dst
, src
);
607 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
611 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
613 gen_mov_reg_Z(cpu_tmp0
, src
);
614 gen_mov_reg_C(dst
, src
);
615 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
619 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
621 gen_mov_reg_C(dst
, src
);
625 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
627 gen_mov_reg_V(dst
, src
);
631 static inline void gen_op_eval_bn(TCGv dst
)
633 tcg_gen_movi_tl(dst
, 0);
637 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
639 gen_mov_reg_N(dst
, src
);
643 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
645 gen_mov_reg_Z(dst
, src
);
646 tcg_gen_xori_tl(dst
, dst
, 0x1);
650 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
652 gen_mov_reg_N(cpu_tmp0
, src
);
653 gen_mov_reg_V(dst
, src
);
654 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
655 gen_mov_reg_Z(cpu_tmp0
, src
);
656 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
657 tcg_gen_xori_tl(dst
, dst
, 0x1);
661 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
663 gen_mov_reg_V(cpu_tmp0
, src
);
664 gen_mov_reg_N(dst
, src
);
665 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
666 tcg_gen_xori_tl(dst
, dst
, 0x1);
670 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
672 gen_mov_reg_Z(cpu_tmp0
, src
);
673 gen_mov_reg_C(dst
, src
);
674 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
675 tcg_gen_xori_tl(dst
, dst
, 0x1);
679 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
681 gen_mov_reg_C(dst
, src
);
682 tcg_gen_xori_tl(dst
, dst
, 0x1);
686 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
688 gen_mov_reg_N(dst
, src
);
689 tcg_gen_xori_tl(dst
, dst
, 0x1);
693 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
695 gen_mov_reg_V(dst
, src
);
696 tcg_gen_xori_tl(dst
, dst
, 0x1);
700 FPSR bit field FCC1 | FCC0:
706 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
707 unsigned int fcc_offset
)
709 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
710 tcg_gen_andi_tl(reg
, reg
, 0x1);
713 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
714 unsigned int fcc_offset
)
716 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
717 tcg_gen_andi_tl(reg
, reg
, 0x1);
721 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
722 unsigned int fcc_offset
)
724 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
725 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
726 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
729 // 1 or 2: FCC0 ^ FCC1
730 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
731 unsigned int fcc_offset
)
733 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
734 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
735 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
739 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
740 unsigned int fcc_offset
)
742 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
746 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
747 unsigned int fcc_offset
)
749 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
750 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
751 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
752 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
756 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
757 unsigned int fcc_offset
)
759 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
763 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
764 unsigned int fcc_offset
)
766 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
767 tcg_gen_xori_tl(dst
, dst
, 0x1);
768 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
769 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
773 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
774 unsigned int fcc_offset
)
776 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
777 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
778 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
782 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
783 unsigned int fcc_offset
)
785 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
786 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
787 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
788 tcg_gen_xori_tl(dst
, dst
, 0x1);
791 // 0 or 3: !(FCC0 ^ FCC1)
792 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
793 unsigned int fcc_offset
)
795 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
796 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
797 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
798 tcg_gen_xori_tl(dst
, dst
, 0x1);
802 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
803 unsigned int fcc_offset
)
805 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
806 tcg_gen_xori_tl(dst
, dst
, 0x1);
809 // !1: !(FCC0 & !FCC1)
810 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
811 unsigned int fcc_offset
)
813 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
814 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
815 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
816 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
817 tcg_gen_xori_tl(dst
, dst
, 0x1);
821 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
822 unsigned int fcc_offset
)
824 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
825 tcg_gen_xori_tl(dst
, dst
, 0x1);
828 // !2: !(!FCC0 & FCC1)
829 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
830 unsigned int fcc_offset
)
832 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
833 tcg_gen_xori_tl(dst
, dst
, 0x1);
834 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
835 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
836 tcg_gen_xori_tl(dst
, dst
, 0x1);
839 // !3: !(FCC0 & FCC1)
840 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
841 unsigned int fcc_offset
)
843 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
844 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
845 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
846 tcg_gen_xori_tl(dst
, dst
, 0x1);
849 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
850 target_ulong pc2
, TCGv r_cond
)
854 l1
= gen_new_label();
856 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
858 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
861 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
864 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
865 target_ulong pc2
, TCGv r_cond
)
869 l1
= gen_new_label();
871 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
873 gen_goto_tb(dc
, 0, pc2
, pc1
);
876 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
879 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
884 l1
= gen_new_label();
885 l2
= gen_new_label();
887 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
889 tcg_gen_movi_tl(cpu_npc
, npc1
);
893 tcg_gen_movi_tl(cpu_npc
, npc2
);
897 /* call this function before using the condition register as it may
898 have been set for a jump */
899 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
901 if (dc
->npc
== JUMP_PC
) {
902 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
903 dc
->npc
= DYNAMIC_PC
;
907 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
909 if (dc
->npc
== JUMP_PC
) {
910 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
911 dc
->npc
= DYNAMIC_PC
;
912 } else if (dc
->npc
!= DYNAMIC_PC
) {
913 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
917 static inline void save_state(DisasContext
*dc
, TCGv cond
)
919 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
920 /* flush pending conditional evaluations before exposing cpu state */
921 if (dc
->cc_op
!= CC_OP_FLAGS
) {
922 dc
->cc_op
= CC_OP_FLAGS
;
923 gen_helper_compute_psr();
928 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
930 if (dc
->npc
== JUMP_PC
) {
931 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
932 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
934 } else if (dc
->npc
== DYNAMIC_PC
) {
935 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
942 static inline void gen_op_next_insn(void)
944 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
945 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
948 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
953 #ifdef TARGET_SPARC64
965 gen_helper_compute_psr();
966 dc
->cc_op
= CC_OP_FLAGS
;
971 gen_op_eval_bn(r_dst
);
974 gen_op_eval_be(r_dst
, r_src
);
977 gen_op_eval_ble(r_dst
, r_src
);
980 gen_op_eval_bl(r_dst
, r_src
);
983 gen_op_eval_bleu(r_dst
, r_src
);
986 gen_op_eval_bcs(r_dst
, r_src
);
989 gen_op_eval_bneg(r_dst
, r_src
);
992 gen_op_eval_bvs(r_dst
, r_src
);
995 gen_op_eval_ba(r_dst
);
998 gen_op_eval_bne(r_dst
, r_src
);
1001 gen_op_eval_bg(r_dst
, r_src
);
1004 gen_op_eval_bge(r_dst
, r_src
);
1007 gen_op_eval_bgu(r_dst
, r_src
);
1010 gen_op_eval_bcc(r_dst
, r_src
);
1013 gen_op_eval_bpos(r_dst
, r_src
);
1016 gen_op_eval_bvc(r_dst
, r_src
);
1021 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1023 unsigned int offset
;
1043 gen_op_eval_bn(r_dst
);
1046 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1049 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1052 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1055 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1058 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1061 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1064 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1067 gen_op_eval_ba(r_dst
);
1070 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1073 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1076 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1079 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1082 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1085 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1088 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1093 #ifdef TARGET_SPARC64
1095 static const int gen_tcg_cond_reg
[8] = {
1106 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1110 l1
= gen_new_label();
1111 tcg_gen_movi_tl(r_dst
, 0);
1112 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1113 tcg_gen_movi_tl(r_dst
, 1);
1118 /* XXX: potentially incorrect if dynamic npc */
1119 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1122 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1123 target_ulong target
= dc
->pc
+ offset
;
1126 /* unconditional not taken */
1128 dc
->pc
= dc
->npc
+ 4;
1129 dc
->npc
= dc
->pc
+ 4;
1132 dc
->npc
= dc
->pc
+ 4;
1134 } else if (cond
== 0x8) {
1135 /* unconditional taken */
1138 dc
->npc
= dc
->pc
+ 4;
1142 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1145 flush_cond(dc
, r_cond
);
1146 gen_cond(r_cond
, cc
, cond
, dc
);
1148 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1152 dc
->jump_pc
[0] = target
;
1153 dc
->jump_pc
[1] = dc
->npc
+ 4;
1159 /* XXX: potentially incorrect if dynamic npc */
1160 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1163 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1164 target_ulong target
= dc
->pc
+ offset
;
1167 /* unconditional not taken */
1169 dc
->pc
= dc
->npc
+ 4;
1170 dc
->npc
= dc
->pc
+ 4;
1173 dc
->npc
= dc
->pc
+ 4;
1175 } else if (cond
== 0x8) {
1176 /* unconditional taken */
1179 dc
->npc
= dc
->pc
+ 4;
1183 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1186 flush_cond(dc
, r_cond
);
1187 gen_fcond(r_cond
, cc
, cond
);
1189 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1193 dc
->jump_pc
[0] = target
;
1194 dc
->jump_pc
[1] = dc
->npc
+ 4;
1200 #ifdef TARGET_SPARC64
1201 /* XXX: potentially incorrect if dynamic npc */
1202 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1203 TCGv r_cond
, TCGv r_reg
)
1205 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1206 target_ulong target
= dc
->pc
+ offset
;
1208 flush_cond(dc
, r_cond
);
1209 gen_cond_reg(r_cond
, cond
, r_reg
);
1211 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1215 dc
->jump_pc
[0] = target
;
1216 dc
->jump_pc
[1] = dc
->npc
+ 4;
1221 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1225 gen_helper_fcmps(r_rs1
, r_rs2
);
1228 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1231 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1234 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1239 static inline void gen_op_fcmpd(int fccno
)
1246 gen_helper_fcmpd_fcc1();
1249 gen_helper_fcmpd_fcc2();
1252 gen_helper_fcmpd_fcc3();
1257 static inline void gen_op_fcmpq(int fccno
)
1264 gen_helper_fcmpq_fcc1();
1267 gen_helper_fcmpq_fcc2();
1270 gen_helper_fcmpq_fcc3();
1275 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1279 gen_helper_fcmpes(r_rs1
, r_rs2
);
1282 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1285 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1288 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1293 static inline void gen_op_fcmped(int fccno
)
1297 gen_helper_fcmped();
1300 gen_helper_fcmped_fcc1();
1303 gen_helper_fcmped_fcc2();
1306 gen_helper_fcmped_fcc3();
1311 static inline void gen_op_fcmpeq(int fccno
)
1315 gen_helper_fcmpeq();
1318 gen_helper_fcmpeq_fcc1();
1321 gen_helper_fcmpeq_fcc2();
1324 gen_helper_fcmpeq_fcc3();
1331 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1333 gen_helper_fcmps(r_rs1
, r_rs2
);
1336 static inline void gen_op_fcmpd(int fccno
)
1341 static inline void gen_op_fcmpq(int fccno
)
1346 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1348 gen_helper_fcmpes(r_rs1
, r_rs2
);
1351 static inline void gen_op_fcmped(int fccno
)
1353 gen_helper_fcmped();
1356 static inline void gen_op_fcmpeq(int fccno
)
1358 gen_helper_fcmpeq();
1362 static inline void gen_op_fpexception_im(int fsr_flags
)
1366 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1367 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1368 r_const
= tcg_const_i32(TT_FP_EXCP
);
1369 gen_helper_raise_exception(r_const
);
1370 tcg_temp_free_i32(r_const
);
1373 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1375 #if !defined(CONFIG_USER_ONLY)
1376 if (!dc
->fpu_enabled
) {
1379 save_state(dc
, r_cond
);
1380 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1381 gen_helper_raise_exception(r_const
);
1382 tcg_temp_free_i32(r_const
);
1390 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1392 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1395 static inline void gen_clear_float_exceptions(void)
1397 gen_helper_clear_float_exceptions();
1401 #ifdef TARGET_SPARC64
1402 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1408 r_asi
= tcg_temp_new_i32();
1409 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1411 asi
= GET_FIELD(insn
, 19, 26);
1412 r_asi
= tcg_const_i32(asi
);
1417 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1420 TCGv_i32 r_asi
, r_size
, r_sign
;
1422 r_asi
= gen_get_asi(insn
, addr
);
1423 r_size
= tcg_const_i32(size
);
1424 r_sign
= tcg_const_i32(sign
);
1425 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1426 tcg_temp_free_i32(r_sign
);
1427 tcg_temp_free_i32(r_size
);
1428 tcg_temp_free_i32(r_asi
);
1431 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1433 TCGv_i32 r_asi
, r_size
;
1435 r_asi
= gen_get_asi(insn
, addr
);
1436 r_size
= tcg_const_i32(size
);
1437 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1438 tcg_temp_free_i32(r_size
);
1439 tcg_temp_free_i32(r_asi
);
1442 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1444 TCGv_i32 r_asi
, r_size
, r_rd
;
1446 r_asi
= gen_get_asi(insn
, addr
);
1447 r_size
= tcg_const_i32(size
);
1448 r_rd
= tcg_const_i32(rd
);
1449 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1450 tcg_temp_free_i32(r_rd
);
1451 tcg_temp_free_i32(r_size
);
1452 tcg_temp_free_i32(r_asi
);
1455 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1457 TCGv_i32 r_asi
, r_size
, r_rd
;
1459 r_asi
= gen_get_asi(insn
, addr
);
1460 r_size
= tcg_const_i32(size
);
1461 r_rd
= tcg_const_i32(rd
);
1462 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1463 tcg_temp_free_i32(r_rd
);
1464 tcg_temp_free_i32(r_size
);
1465 tcg_temp_free_i32(r_asi
);
1468 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1470 TCGv_i32 r_asi
, r_size
, r_sign
;
1472 r_asi
= gen_get_asi(insn
, addr
);
1473 r_size
= tcg_const_i32(4);
1474 r_sign
= tcg_const_i32(0);
1475 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1476 tcg_temp_free_i32(r_sign
);
1477 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1478 tcg_temp_free_i32(r_size
);
1479 tcg_temp_free_i32(r_asi
);
1480 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1483 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1485 TCGv_i32 r_asi
, r_rd
;
1487 r_asi
= gen_get_asi(insn
, addr
);
1488 r_rd
= tcg_const_i32(rd
);
1489 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1490 tcg_temp_free_i32(r_rd
);
1491 tcg_temp_free_i32(r_asi
);
1494 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1496 TCGv_i32 r_asi
, r_size
;
1498 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1499 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1500 r_asi
= gen_get_asi(insn
, addr
);
1501 r_size
= tcg_const_i32(8);
1502 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1503 tcg_temp_free_i32(r_size
);
1504 tcg_temp_free_i32(r_asi
);
1507 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1513 r_val1
= tcg_temp_new();
1514 gen_movl_reg_TN(rd
, r_val1
);
1515 r_asi
= gen_get_asi(insn
, addr
);
1516 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1517 tcg_temp_free_i32(r_asi
);
1518 tcg_temp_free(r_val1
);
1521 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1526 gen_movl_reg_TN(rd
, cpu_tmp64
);
1527 r_asi
= gen_get_asi(insn
, addr
);
1528 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1529 tcg_temp_free_i32(r_asi
);
1532 #elif !defined(CONFIG_USER_ONLY)
1534 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1537 TCGv_i32 r_asi
, r_size
, r_sign
;
1539 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1540 r_size
= tcg_const_i32(size
);
1541 r_sign
= tcg_const_i32(sign
);
1542 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1543 tcg_temp_free(r_sign
);
1544 tcg_temp_free(r_size
);
1545 tcg_temp_free(r_asi
);
1546 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1549 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1551 TCGv_i32 r_asi
, r_size
;
1553 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1554 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1555 r_size
= tcg_const_i32(size
);
1556 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1557 tcg_temp_free(r_size
);
1558 tcg_temp_free(r_asi
);
1561 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1563 TCGv_i32 r_asi
, r_size
, r_sign
;
1566 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1567 r_size
= tcg_const_i32(4);
1568 r_sign
= tcg_const_i32(0);
1569 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1570 tcg_temp_free(r_sign
);
1571 r_val
= tcg_temp_new_i64();
1572 tcg_gen_extu_tl_i64(r_val
, dst
);
1573 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1574 tcg_temp_free_i64(r_val
);
1575 tcg_temp_free(r_size
);
1576 tcg_temp_free(r_asi
);
1577 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1580 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1582 TCGv_i32 r_asi
, r_size
, r_sign
;
1584 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1585 r_size
= tcg_const_i32(8);
1586 r_sign
= tcg_const_i32(0);
1587 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1588 tcg_temp_free(r_sign
);
1589 tcg_temp_free(r_size
);
1590 tcg_temp_free(r_asi
);
1591 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1592 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1593 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1594 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1595 gen_movl_TN_reg(rd
, hi
);
1598 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1600 TCGv_i32 r_asi
, r_size
;
1602 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1603 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1604 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1605 r_size
= tcg_const_i32(8);
1606 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1607 tcg_temp_free(r_size
);
1608 tcg_temp_free(r_asi
);
1612 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1613 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1616 TCGv_i32 r_asi
, r_size
;
1618 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1620 r_val
= tcg_const_i64(0xffULL
);
1621 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1622 r_size
= tcg_const_i32(1);
1623 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1624 tcg_temp_free_i32(r_size
);
1625 tcg_temp_free_i32(r_asi
);
1626 tcg_temp_free_i64(r_val
);
1630 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1635 rs1
= GET_FIELD(insn
, 13, 17);
1637 tcg_gen_movi_tl(def
, 0);
1638 } else if (rs1
< 8) {
1639 r_rs1
= cpu_gregs
[rs1
];
1641 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1646 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1650 if (IS_IMM
) { /* immediate */
1651 target_long simm
= GET_FIELDs(insn
, 19, 31);
1652 tcg_gen_movi_tl(def
, simm
);
1653 } else { /* register */
1654 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1656 tcg_gen_movi_tl(def
, 0);
1657 } else if (rs2
< 8) {
1658 r_rs2
= cpu_gregs
[rs2
];
1660 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1666 #ifdef TARGET_SPARC64
1667 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1669 TCGv_i32 r_tl
= tcg_temp_new_i32();
1671 /* load env->tl into r_tl */
1672 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1674 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1675 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1677 /* calculate offset to current trap state from env->ts, reuse r_tl */
1678 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1679 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1681 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1683 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1684 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1685 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1686 tcg_temp_free_ptr(r_tl_tmp
);
1689 tcg_temp_free_i32(r_tl
);
1693 #define CHECK_IU_FEATURE(dc, FEATURE) \
1694 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1696 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1697 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1700 /* before an instruction, dc->pc must be static */
1701 static void disas_sparc_insn(DisasContext
* dc
)
1703 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1704 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1707 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1708 tcg_gen_debug_insn_start(dc
->pc
);
1709 insn
= ldl_code(dc
->pc
);
1710 opc
= GET_FIELD(insn
, 0, 1);
1712 rd
= GET_FIELD(insn
, 2, 6);
1714 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1715 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1718 case 0: /* branches/sethi */
1720 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1723 #ifdef TARGET_SPARC64
1724 case 0x1: /* V9 BPcc */
1728 target
= GET_FIELD_SP(insn
, 0, 18);
1729 target
= sign_extend(target
, 18);
1731 cc
= GET_FIELD_SP(insn
, 20, 21);
1733 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1735 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1740 case 0x3: /* V9 BPr */
1742 target
= GET_FIELD_SP(insn
, 0, 13) |
1743 (GET_FIELD_SP(insn
, 20, 21) << 14);
1744 target
= sign_extend(target
, 16);
1746 cpu_src1
= get_src1(insn
, cpu_src1
);
1747 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1750 case 0x5: /* V9 FBPcc */
1752 int cc
= GET_FIELD_SP(insn
, 20, 21);
1753 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1755 target
= GET_FIELD_SP(insn
, 0, 18);
1756 target
= sign_extend(target
, 19);
1758 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1762 case 0x7: /* CBN+x */
1767 case 0x2: /* BN+x */
1769 target
= GET_FIELD(insn
, 10, 31);
1770 target
= sign_extend(target
, 22);
1772 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1775 case 0x6: /* FBN+x */
1777 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1779 target
= GET_FIELD(insn
, 10, 31);
1780 target
= sign_extend(target
, 22);
1782 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1785 case 0x4: /* SETHI */
1787 uint32_t value
= GET_FIELD(insn
, 10, 31);
1790 r_const
= tcg_const_tl(value
<< 10);
1791 gen_movl_TN_reg(rd
, r_const
);
1792 tcg_temp_free(r_const
);
1795 case 0x0: /* UNIMPL */
1804 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1807 r_const
= tcg_const_tl(dc
->pc
);
1808 gen_movl_TN_reg(15, r_const
);
1809 tcg_temp_free(r_const
);
1811 gen_mov_pc_npc(dc
, cpu_cond
);
1815 case 2: /* FPU & Logical Operations */
1817 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1818 if (xop
== 0x3a) { /* generate trap */
1821 cpu_src1
= get_src1(insn
, cpu_src1
);
1823 rs2
= GET_FIELD(insn
, 25, 31);
1824 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1826 rs2
= GET_FIELD(insn
, 27, 31);
1828 gen_movl_reg_TN(rs2
, cpu_src2
);
1829 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1831 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1833 cond
= GET_FIELD(insn
, 3, 6);
1835 save_state(dc
, cpu_cond
);
1836 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1838 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1840 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1841 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1842 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1843 gen_helper_raise_exception(cpu_tmp32
);
1844 } else if (cond
!= 0) {
1845 TCGv r_cond
= tcg_temp_new();
1847 #ifdef TARGET_SPARC64
1849 int cc
= GET_FIELD_SP(insn
, 11, 12);
1851 save_state(dc
, cpu_cond
);
1853 gen_cond(r_cond
, 0, cond
, dc
);
1855 gen_cond(r_cond
, 1, cond
, dc
);
1859 save_state(dc
, cpu_cond
);
1860 gen_cond(r_cond
, 0, cond
, dc
);
1862 l1
= gen_new_label();
1863 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1865 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1867 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1869 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1870 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1871 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1872 gen_helper_raise_exception(cpu_tmp32
);
1875 tcg_temp_free(r_cond
);
1881 } else if (xop
== 0x28) {
1882 rs1
= GET_FIELD(insn
, 13, 17);
1885 #ifndef TARGET_SPARC64
1886 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1887 manual, rdy on the microSPARC
1889 case 0x0f: /* stbar in the SPARCv8 manual,
1890 rdy on the microSPARC II */
1891 case 0x10 ... 0x1f: /* implementation-dependent in the
1892 SPARCv8 manual, rdy on the
1895 gen_movl_TN_reg(rd
, cpu_y
);
1897 #ifdef TARGET_SPARC64
1898 case 0x2: /* V9 rdccr */
1899 gen_helper_compute_psr();
1900 gen_helper_rdccr(cpu_dst
);
1901 gen_movl_TN_reg(rd
, cpu_dst
);
1903 case 0x3: /* V9 rdasi */
1904 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
1905 gen_movl_TN_reg(rd
, cpu_dst
);
1907 case 0x4: /* V9 rdtick */
1911 r_tickptr
= tcg_temp_new_ptr();
1912 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1913 offsetof(CPUState
, tick
));
1914 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1915 tcg_temp_free_ptr(r_tickptr
);
1916 gen_movl_TN_reg(rd
, cpu_dst
);
1919 case 0x5: /* V9 rdpc */
1923 r_const
= tcg_const_tl(dc
->pc
);
1924 gen_movl_TN_reg(rd
, r_const
);
1925 tcg_temp_free(r_const
);
1928 case 0x6: /* V9 rdfprs */
1929 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
1930 gen_movl_TN_reg(rd
, cpu_dst
);
1932 case 0xf: /* V9 membar */
1933 break; /* no effect */
1934 case 0x13: /* Graphics Status */
1935 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1937 gen_movl_TN_reg(rd
, cpu_gsr
);
1939 case 0x16: /* Softint */
1940 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
1941 gen_movl_TN_reg(rd
, cpu_dst
);
1943 case 0x17: /* Tick compare */
1944 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
1946 case 0x18: /* System tick */
1950 r_tickptr
= tcg_temp_new_ptr();
1951 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1952 offsetof(CPUState
, stick
));
1953 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1954 tcg_temp_free_ptr(r_tickptr
);
1955 gen_movl_TN_reg(rd
, cpu_dst
);
1958 case 0x19: /* System tick compare */
1959 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
1961 case 0x10: /* Performance Control */
1962 case 0x11: /* Performance Instrumentation Counter */
1963 case 0x12: /* Dispatch Control */
1964 case 0x14: /* Softint set, WO */
1965 case 0x15: /* Softint clear, WO */
1970 #if !defined(CONFIG_USER_ONLY)
1971 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
1972 #ifndef TARGET_SPARC64
1973 if (!supervisor(dc
))
1975 gen_helper_compute_psr();
1976 dc
->cc_op
= CC_OP_FLAGS
;
1977 gen_helper_rdpsr(cpu_dst
);
1979 CHECK_IU_FEATURE(dc
, HYPV
);
1980 if (!hypervisor(dc
))
1982 rs1
= GET_FIELD(insn
, 13, 17);
1985 // gen_op_rdhpstate();
1988 // gen_op_rdhtstate();
1991 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
1994 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
1997 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
1999 case 31: // hstick_cmpr
2000 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2006 gen_movl_TN_reg(rd
, cpu_dst
);
2008 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2009 if (!supervisor(dc
))
2011 #ifdef TARGET_SPARC64
2012 rs1
= GET_FIELD(insn
, 13, 17);
2018 r_tsptr
= tcg_temp_new_ptr();
2019 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2020 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2021 offsetof(trap_state
, tpc
));
2022 tcg_temp_free_ptr(r_tsptr
);
2029 r_tsptr
= tcg_temp_new_ptr();
2030 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2031 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2032 offsetof(trap_state
, tnpc
));
2033 tcg_temp_free_ptr(r_tsptr
);
2040 r_tsptr
= tcg_temp_new_ptr();
2041 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2042 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2043 offsetof(trap_state
, tstate
));
2044 tcg_temp_free_ptr(r_tsptr
);
2051 r_tsptr
= tcg_temp_new_ptr();
2052 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2053 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2054 offsetof(trap_state
, tt
));
2055 tcg_temp_free_ptr(r_tsptr
);
2056 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2063 r_tickptr
= tcg_temp_new_ptr();
2064 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2065 offsetof(CPUState
, tick
));
2066 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2067 gen_movl_TN_reg(rd
, cpu_tmp0
);
2068 tcg_temp_free_ptr(r_tickptr
);
2072 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2075 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2076 offsetof(CPUSPARCState
, pstate
));
2077 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2080 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2081 offsetof(CPUSPARCState
, tl
));
2082 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2085 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2086 offsetof(CPUSPARCState
, psrpil
));
2087 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2090 gen_helper_rdcwp(cpu_tmp0
);
2093 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2094 offsetof(CPUSPARCState
, cansave
));
2095 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2097 case 11: // canrestore
2098 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2099 offsetof(CPUSPARCState
, canrestore
));
2100 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2102 case 12: // cleanwin
2103 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2104 offsetof(CPUSPARCState
, cleanwin
));
2105 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2107 case 13: // otherwin
2108 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2109 offsetof(CPUSPARCState
, otherwin
));
2110 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2113 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2114 offsetof(CPUSPARCState
, wstate
));
2115 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2117 case 16: // UA2005 gl
2118 CHECK_IU_FEATURE(dc
, GL
);
2119 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2120 offsetof(CPUSPARCState
, gl
));
2121 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2123 case 26: // UA2005 strand status
2124 CHECK_IU_FEATURE(dc
, HYPV
);
2125 if (!hypervisor(dc
))
2127 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2130 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2137 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2139 gen_movl_TN_reg(rd
, cpu_tmp0
);
2141 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2142 #ifdef TARGET_SPARC64
2143 save_state(dc
, cpu_cond
);
2144 gen_helper_flushw();
2146 if (!supervisor(dc
))
2148 gen_movl_TN_reg(rd
, cpu_tbr
);
2152 } else if (xop
== 0x34) { /* FPU Operations */
2153 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2155 gen_op_clear_ieee_excp_and_FTT();
2156 rs1
= GET_FIELD(insn
, 13, 17);
2157 rs2
= GET_FIELD(insn
, 27, 31);
2158 xop
= GET_FIELD(insn
, 18, 26);
2159 save_state(dc
, cpu_cond
);
2161 case 0x1: /* fmovs */
2162 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2164 case 0x5: /* fnegs */
2165 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2167 case 0x9: /* fabss */
2168 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2170 case 0x29: /* fsqrts */
2171 CHECK_FPU_FEATURE(dc
, FSQRT
);
2172 gen_clear_float_exceptions();
2173 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2174 gen_helper_check_ieee_exceptions();
2175 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2177 case 0x2a: /* fsqrtd */
2178 CHECK_FPU_FEATURE(dc
, FSQRT
);
2179 gen_op_load_fpr_DT1(DFPREG(rs2
));
2180 gen_clear_float_exceptions();
2181 gen_helper_fsqrtd();
2182 gen_helper_check_ieee_exceptions();
2183 gen_op_store_DT0_fpr(DFPREG(rd
));
2185 case 0x2b: /* fsqrtq */
2186 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2187 gen_op_load_fpr_QT1(QFPREG(rs2
));
2188 gen_clear_float_exceptions();
2189 gen_helper_fsqrtq();
2190 gen_helper_check_ieee_exceptions();
2191 gen_op_store_QT0_fpr(QFPREG(rd
));
2193 case 0x41: /* fadds */
2194 gen_clear_float_exceptions();
2195 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2196 gen_helper_check_ieee_exceptions();
2197 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2199 case 0x42: /* faddd */
2200 gen_op_load_fpr_DT0(DFPREG(rs1
));
2201 gen_op_load_fpr_DT1(DFPREG(rs2
));
2202 gen_clear_float_exceptions();
2204 gen_helper_check_ieee_exceptions();
2205 gen_op_store_DT0_fpr(DFPREG(rd
));
2207 case 0x43: /* faddq */
2208 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2209 gen_op_load_fpr_QT0(QFPREG(rs1
));
2210 gen_op_load_fpr_QT1(QFPREG(rs2
));
2211 gen_clear_float_exceptions();
2213 gen_helper_check_ieee_exceptions();
2214 gen_op_store_QT0_fpr(QFPREG(rd
));
2216 case 0x45: /* fsubs */
2217 gen_clear_float_exceptions();
2218 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2219 gen_helper_check_ieee_exceptions();
2220 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2222 case 0x46: /* fsubd */
2223 gen_op_load_fpr_DT0(DFPREG(rs1
));
2224 gen_op_load_fpr_DT1(DFPREG(rs2
));
2225 gen_clear_float_exceptions();
2227 gen_helper_check_ieee_exceptions();
2228 gen_op_store_DT0_fpr(DFPREG(rd
));
2230 case 0x47: /* fsubq */
2231 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2232 gen_op_load_fpr_QT0(QFPREG(rs1
));
2233 gen_op_load_fpr_QT1(QFPREG(rs2
));
2234 gen_clear_float_exceptions();
2236 gen_helper_check_ieee_exceptions();
2237 gen_op_store_QT0_fpr(QFPREG(rd
));
2239 case 0x49: /* fmuls */
2240 CHECK_FPU_FEATURE(dc
, FMUL
);
2241 gen_clear_float_exceptions();
2242 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2243 gen_helper_check_ieee_exceptions();
2244 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2246 case 0x4a: /* fmuld */
2247 CHECK_FPU_FEATURE(dc
, FMUL
);
2248 gen_op_load_fpr_DT0(DFPREG(rs1
));
2249 gen_op_load_fpr_DT1(DFPREG(rs2
));
2250 gen_clear_float_exceptions();
2252 gen_helper_check_ieee_exceptions();
2253 gen_op_store_DT0_fpr(DFPREG(rd
));
2255 case 0x4b: /* fmulq */
2256 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2257 CHECK_FPU_FEATURE(dc
, FMUL
);
2258 gen_op_load_fpr_QT0(QFPREG(rs1
));
2259 gen_op_load_fpr_QT1(QFPREG(rs2
));
2260 gen_clear_float_exceptions();
2262 gen_helper_check_ieee_exceptions();
2263 gen_op_store_QT0_fpr(QFPREG(rd
));
2265 case 0x4d: /* fdivs */
2266 gen_clear_float_exceptions();
2267 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2268 gen_helper_check_ieee_exceptions();
2269 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2271 case 0x4e: /* fdivd */
2272 gen_op_load_fpr_DT0(DFPREG(rs1
));
2273 gen_op_load_fpr_DT1(DFPREG(rs2
));
2274 gen_clear_float_exceptions();
2276 gen_helper_check_ieee_exceptions();
2277 gen_op_store_DT0_fpr(DFPREG(rd
));
2279 case 0x4f: /* fdivq */
2280 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2281 gen_op_load_fpr_QT0(QFPREG(rs1
));
2282 gen_op_load_fpr_QT1(QFPREG(rs2
));
2283 gen_clear_float_exceptions();
2285 gen_helper_check_ieee_exceptions();
2286 gen_op_store_QT0_fpr(QFPREG(rd
));
2288 case 0x69: /* fsmuld */
2289 CHECK_FPU_FEATURE(dc
, FSMULD
);
2290 gen_clear_float_exceptions();
2291 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2292 gen_helper_check_ieee_exceptions();
2293 gen_op_store_DT0_fpr(DFPREG(rd
));
2295 case 0x6e: /* fdmulq */
2296 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2297 gen_op_load_fpr_DT0(DFPREG(rs1
));
2298 gen_op_load_fpr_DT1(DFPREG(rs2
));
2299 gen_clear_float_exceptions();
2300 gen_helper_fdmulq();
2301 gen_helper_check_ieee_exceptions();
2302 gen_op_store_QT0_fpr(QFPREG(rd
));
2304 case 0xc4: /* fitos */
2305 gen_clear_float_exceptions();
2306 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2307 gen_helper_check_ieee_exceptions();
2308 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2310 case 0xc6: /* fdtos */
2311 gen_op_load_fpr_DT1(DFPREG(rs2
));
2312 gen_clear_float_exceptions();
2313 gen_helper_fdtos(cpu_tmp32
);
2314 gen_helper_check_ieee_exceptions();
2315 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2317 case 0xc7: /* fqtos */
2318 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2319 gen_op_load_fpr_QT1(QFPREG(rs2
));
2320 gen_clear_float_exceptions();
2321 gen_helper_fqtos(cpu_tmp32
);
2322 gen_helper_check_ieee_exceptions();
2323 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2325 case 0xc8: /* fitod */
2326 gen_helper_fitod(cpu_fpr
[rs2
]);
2327 gen_op_store_DT0_fpr(DFPREG(rd
));
2329 case 0xc9: /* fstod */
2330 gen_helper_fstod(cpu_fpr
[rs2
]);
2331 gen_op_store_DT0_fpr(DFPREG(rd
));
2333 case 0xcb: /* fqtod */
2334 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2335 gen_op_load_fpr_QT1(QFPREG(rs2
));
2336 gen_clear_float_exceptions();
2338 gen_helper_check_ieee_exceptions();
2339 gen_op_store_DT0_fpr(DFPREG(rd
));
2341 case 0xcc: /* fitoq */
2342 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2343 gen_helper_fitoq(cpu_fpr
[rs2
]);
2344 gen_op_store_QT0_fpr(QFPREG(rd
));
2346 case 0xcd: /* fstoq */
2347 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2348 gen_helper_fstoq(cpu_fpr
[rs2
]);
2349 gen_op_store_QT0_fpr(QFPREG(rd
));
2351 case 0xce: /* fdtoq */
2352 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2353 gen_op_load_fpr_DT1(DFPREG(rs2
));
2355 gen_op_store_QT0_fpr(QFPREG(rd
));
2357 case 0xd1: /* fstoi */
2358 gen_clear_float_exceptions();
2359 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2363 case 0xd2: /* fdtoi */
2364 gen_op_load_fpr_DT1(DFPREG(rs2
));
2365 gen_clear_float_exceptions();
2366 gen_helper_fdtoi(cpu_tmp32
);
2367 gen_helper_check_ieee_exceptions();
2368 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2370 case 0xd3: /* fqtoi */
2371 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2372 gen_op_load_fpr_QT1(QFPREG(rs2
));
2373 gen_clear_float_exceptions();
2374 gen_helper_fqtoi(cpu_tmp32
);
2375 gen_helper_check_ieee_exceptions();
2376 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2378 #ifdef TARGET_SPARC64
2379 case 0x2: /* V9 fmovd */
2380 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2381 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2382 cpu_fpr
[DFPREG(rs2
) + 1]);
2384 case 0x3: /* V9 fmovq */
2385 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2386 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2387 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2388 cpu_fpr
[QFPREG(rs2
) + 1]);
2389 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2390 cpu_fpr
[QFPREG(rs2
) + 2]);
2391 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2392 cpu_fpr
[QFPREG(rs2
) + 3]);
2394 case 0x6: /* V9 fnegd */
2395 gen_op_load_fpr_DT1(DFPREG(rs2
));
2397 gen_op_store_DT0_fpr(DFPREG(rd
));
2399 case 0x7: /* V9 fnegq */
2400 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2401 gen_op_load_fpr_QT1(QFPREG(rs2
));
2403 gen_op_store_QT0_fpr(QFPREG(rd
));
2405 case 0xa: /* V9 fabsd */
2406 gen_op_load_fpr_DT1(DFPREG(rs2
));
2408 gen_op_store_DT0_fpr(DFPREG(rd
));
2410 case 0xb: /* V9 fabsq */
2411 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2412 gen_op_load_fpr_QT1(QFPREG(rs2
));
2414 gen_op_store_QT0_fpr(QFPREG(rd
));
2416 case 0x81: /* V9 fstox */
2417 gen_clear_float_exceptions();
2418 gen_helper_fstox(cpu_fpr
[rs2
]);
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_DT0_fpr(DFPREG(rd
));
2422 case 0x82: /* V9 fdtox */
2423 gen_op_load_fpr_DT1(DFPREG(rs2
));
2424 gen_clear_float_exceptions();
2426 gen_helper_check_ieee_exceptions();
2427 gen_op_store_DT0_fpr(DFPREG(rd
));
2429 case 0x83: /* V9 fqtox */
2430 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2431 gen_op_load_fpr_QT1(QFPREG(rs2
));
2432 gen_clear_float_exceptions();
2434 gen_helper_check_ieee_exceptions();
2435 gen_op_store_DT0_fpr(DFPREG(rd
));
2437 case 0x84: /* V9 fxtos */
2438 gen_op_load_fpr_DT1(DFPREG(rs2
));
2439 gen_clear_float_exceptions();
2440 gen_helper_fxtos(cpu_tmp32
);
2441 gen_helper_check_ieee_exceptions();
2442 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2444 case 0x88: /* V9 fxtod */
2445 gen_op_load_fpr_DT1(DFPREG(rs2
));
2446 gen_clear_float_exceptions();
2448 gen_helper_check_ieee_exceptions();
2449 gen_op_store_DT0_fpr(DFPREG(rd
));
2451 case 0x8c: /* V9 fxtoq */
2452 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2453 gen_op_load_fpr_DT1(DFPREG(rs2
));
2454 gen_clear_float_exceptions();
2456 gen_helper_check_ieee_exceptions();
2457 gen_op_store_QT0_fpr(QFPREG(rd
));
2463 } else if (xop
== 0x35) { /* FPU Operations */
2464 #ifdef TARGET_SPARC64
2467 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2469 gen_op_clear_ieee_excp_and_FTT();
2470 rs1
= GET_FIELD(insn
, 13, 17);
2471 rs2
= GET_FIELD(insn
, 27, 31);
2472 xop
= GET_FIELD(insn
, 18, 26);
2473 save_state(dc
, cpu_cond
);
2474 #ifdef TARGET_SPARC64
2475 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2478 l1
= gen_new_label();
2479 cond
= GET_FIELD_SP(insn
, 14, 17);
2480 cpu_src1
= get_src1(insn
, cpu_src1
);
2481 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2483 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2486 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2489 l1
= gen_new_label();
2490 cond
= GET_FIELD_SP(insn
, 14, 17);
2491 cpu_src1
= get_src1(insn
, cpu_src1
);
2492 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2494 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2495 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2498 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2501 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2502 l1
= gen_new_label();
2503 cond
= GET_FIELD_SP(insn
, 14, 17);
2504 cpu_src1
= get_src1(insn
, cpu_src1
);
2505 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2507 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2508 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2509 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2510 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2516 #ifdef TARGET_SPARC64
2517 #define FMOVSCC(fcc) \
2522 l1 = gen_new_label(); \
2523 r_cond = tcg_temp_new(); \
2524 cond = GET_FIELD_SP(insn, 14, 17); \
2525 gen_fcond(r_cond, fcc, cond); \
2526 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2528 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2529 gen_set_label(l1); \
2530 tcg_temp_free(r_cond); \
2532 #define FMOVDCC(fcc) \
2537 l1 = gen_new_label(); \
2538 r_cond = tcg_temp_new(); \
2539 cond = GET_FIELD_SP(insn, 14, 17); \
2540 gen_fcond(r_cond, fcc, cond); \
2541 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2543 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2544 cpu_fpr[DFPREG(rs2)]); \
2545 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2546 cpu_fpr[DFPREG(rs2) + 1]); \
2547 gen_set_label(l1); \
2548 tcg_temp_free(r_cond); \
2550 #define FMOVQCC(fcc) \
2555 l1 = gen_new_label(); \
2556 r_cond = tcg_temp_new(); \
2557 cond = GET_FIELD_SP(insn, 14, 17); \
2558 gen_fcond(r_cond, fcc, cond); \
2559 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2561 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2562 cpu_fpr[QFPREG(rs2)]); \
2563 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2564 cpu_fpr[QFPREG(rs2) + 1]); \
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2566 cpu_fpr[QFPREG(rs2) + 2]); \
2567 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2568 cpu_fpr[QFPREG(rs2) + 3]); \
2569 gen_set_label(l1); \
2570 tcg_temp_free(r_cond); \
2572 case 0x001: /* V9 fmovscc %fcc0 */
2575 case 0x002: /* V9 fmovdcc %fcc0 */
2578 case 0x003: /* V9 fmovqcc %fcc0 */
2579 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2582 case 0x041: /* V9 fmovscc %fcc1 */
2585 case 0x042: /* V9 fmovdcc %fcc1 */
2588 case 0x043: /* V9 fmovqcc %fcc1 */
2589 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2592 case 0x081: /* V9 fmovscc %fcc2 */
2595 case 0x082: /* V9 fmovdcc %fcc2 */
2598 case 0x083: /* V9 fmovqcc %fcc2 */
2599 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2602 case 0x0c1: /* V9 fmovscc %fcc3 */
2605 case 0x0c2: /* V9 fmovdcc %fcc3 */
2608 case 0x0c3: /* V9 fmovqcc %fcc3 */
2609 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2615 #define FMOVSCC(icc) \
2620 l1 = gen_new_label(); \
2621 r_cond = tcg_temp_new(); \
2622 cond = GET_FIELD_SP(insn, 14, 17); \
2623 gen_cond(r_cond, icc, cond, dc); \
2624 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2626 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2627 gen_set_label(l1); \
2628 tcg_temp_free(r_cond); \
2630 #define FMOVDCC(icc) \
2635 l1 = gen_new_label(); \
2636 r_cond = tcg_temp_new(); \
2637 cond = GET_FIELD_SP(insn, 14, 17); \
2638 gen_cond(r_cond, icc, cond, dc); \
2639 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2641 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2642 cpu_fpr[DFPREG(rs2)]); \
2643 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2644 cpu_fpr[DFPREG(rs2) + 1]); \
2645 gen_set_label(l1); \
2646 tcg_temp_free(r_cond); \
2648 #define FMOVQCC(icc) \
2653 l1 = gen_new_label(); \
2654 r_cond = tcg_temp_new(); \
2655 cond = GET_FIELD_SP(insn, 14, 17); \
2656 gen_cond(r_cond, icc, cond, dc); \
2657 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2659 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2660 cpu_fpr[QFPREG(rs2)]); \
2661 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2662 cpu_fpr[QFPREG(rs2) + 1]); \
2663 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2664 cpu_fpr[QFPREG(rs2) + 2]); \
2665 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2666 cpu_fpr[QFPREG(rs2) + 3]); \
2667 gen_set_label(l1); \
2668 tcg_temp_free(r_cond); \
2671 case 0x101: /* V9 fmovscc %icc */
2674 case 0x102: /* V9 fmovdcc %icc */
2676 case 0x103: /* V9 fmovqcc %icc */
2677 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2680 case 0x181: /* V9 fmovscc %xcc */
2683 case 0x182: /* V9 fmovdcc %xcc */
2686 case 0x183: /* V9 fmovqcc %xcc */
2687 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2694 case 0x51: /* fcmps, V9 %fcc */
2695 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2697 case 0x52: /* fcmpd, V9 %fcc */
2698 gen_op_load_fpr_DT0(DFPREG(rs1
));
2699 gen_op_load_fpr_DT1(DFPREG(rs2
));
2700 gen_op_fcmpd(rd
& 3);
2702 case 0x53: /* fcmpq, V9 %fcc */
2703 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2704 gen_op_load_fpr_QT0(QFPREG(rs1
));
2705 gen_op_load_fpr_QT1(QFPREG(rs2
));
2706 gen_op_fcmpq(rd
& 3);
2708 case 0x55: /* fcmpes, V9 %fcc */
2709 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2711 case 0x56: /* fcmped, V9 %fcc */
2712 gen_op_load_fpr_DT0(DFPREG(rs1
));
2713 gen_op_load_fpr_DT1(DFPREG(rs2
));
2714 gen_op_fcmped(rd
& 3);
2716 case 0x57: /* fcmpeq, V9 %fcc */
2717 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2718 gen_op_load_fpr_QT0(QFPREG(rs1
));
2719 gen_op_load_fpr_QT1(QFPREG(rs2
));
2720 gen_op_fcmpeq(rd
& 3);
2725 } else if (xop
== 0x2) {
2728 rs1
= GET_FIELD(insn
, 13, 17);
2730 // or %g0, x, y -> mov T0, x; mov y, T0
2731 if (IS_IMM
) { /* immediate */
2734 simm
= GET_FIELDs(insn
, 19, 31);
2735 r_const
= tcg_const_tl(simm
);
2736 gen_movl_TN_reg(rd
, r_const
);
2737 tcg_temp_free(r_const
);
2738 } else { /* register */
2739 rs2
= GET_FIELD(insn
, 27, 31);
2740 gen_movl_reg_TN(rs2
, cpu_dst
);
2741 gen_movl_TN_reg(rd
, cpu_dst
);
2744 cpu_src1
= get_src1(insn
, cpu_src1
);
2745 if (IS_IMM
) { /* immediate */
2746 simm
= GET_FIELDs(insn
, 19, 31);
2747 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2748 gen_movl_TN_reg(rd
, cpu_dst
);
2749 } else { /* register */
2750 // or x, %g0, y -> mov T1, x; mov y, T1
2751 rs2
= GET_FIELD(insn
, 27, 31);
2753 gen_movl_reg_TN(rs2
, cpu_src2
);
2754 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2755 gen_movl_TN_reg(rd
, cpu_dst
);
2757 gen_movl_TN_reg(rd
, cpu_src1
);
2760 #ifdef TARGET_SPARC64
2761 } else if (xop
== 0x25) { /* sll, V9 sllx */
2762 cpu_src1
= get_src1(insn
, cpu_src1
);
2763 if (IS_IMM
) { /* immediate */
2764 simm
= GET_FIELDs(insn
, 20, 31);
2765 if (insn
& (1 << 12)) {
2766 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2768 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2770 } else { /* register */
2771 rs2
= GET_FIELD(insn
, 27, 31);
2772 gen_movl_reg_TN(rs2
, cpu_src2
);
2773 if (insn
& (1 << 12)) {
2774 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2776 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2778 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2780 gen_movl_TN_reg(rd
, cpu_dst
);
2781 } else if (xop
== 0x26) { /* srl, V9 srlx */
2782 cpu_src1
= get_src1(insn
, cpu_src1
);
2783 if (IS_IMM
) { /* immediate */
2784 simm
= GET_FIELDs(insn
, 20, 31);
2785 if (insn
& (1 << 12)) {
2786 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2788 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2789 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2791 } else { /* register */
2792 rs2
= GET_FIELD(insn
, 27, 31);
2793 gen_movl_reg_TN(rs2
, cpu_src2
);
2794 if (insn
& (1 << 12)) {
2795 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2796 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2798 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2799 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2800 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2803 gen_movl_TN_reg(rd
, cpu_dst
);
2804 } else if (xop
== 0x27) { /* sra, V9 srax */
2805 cpu_src1
= get_src1(insn
, cpu_src1
);
2806 if (IS_IMM
) { /* immediate */
2807 simm
= GET_FIELDs(insn
, 20, 31);
2808 if (insn
& (1 << 12)) {
2809 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2811 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2812 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2813 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2815 } else { /* register */
2816 rs2
= GET_FIELD(insn
, 27, 31);
2817 gen_movl_reg_TN(rs2
, cpu_src2
);
2818 if (insn
& (1 << 12)) {
2819 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2820 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2822 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2823 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2824 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2825 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2828 gen_movl_TN_reg(rd
, cpu_dst
);
2830 } else if (xop
< 0x36) {
2832 cpu_src1
= get_src1(insn
, cpu_src1
);
2833 cpu_src2
= get_src2(insn
, cpu_src2
);
2834 switch (xop
& ~0x10) {
2837 simm
= GET_FIELDs(insn
, 19, 31);
2839 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
2840 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2841 dc
->cc_op
= CC_OP_ADD
;
2843 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
2847 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2848 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2849 dc
->cc_op
= CC_OP_ADD
;
2851 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2857 simm
= GET_FIELDs(insn
, 19, 31);
2858 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
2860 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2863 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2864 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2865 dc
->cc_op
= CC_OP_LOGIC
;
2870 simm
= GET_FIELDs(insn
, 19, 31);
2871 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2873 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2876 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2877 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2878 dc
->cc_op
= CC_OP_LOGIC
;
2883 simm
= GET_FIELDs(insn
, 19, 31);
2884 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
2886 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2889 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2890 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2891 dc
->cc_op
= CC_OP_LOGIC
;
2896 simm
= GET_FIELDs(insn
, 19, 31);
2898 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
2900 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
2904 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2905 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2906 dc
->cc_op
= CC_OP_SUB
;
2908 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2912 case 0x5: /* andn */
2914 simm
= GET_FIELDs(insn
, 19, 31);
2915 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
2917 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2920 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2921 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2922 dc
->cc_op
= CC_OP_LOGIC
;
2927 simm
= GET_FIELDs(insn
, 19, 31);
2928 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
2930 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2933 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2934 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2935 dc
->cc_op
= CC_OP_LOGIC
;
2938 case 0x7: /* xorn */
2940 simm
= GET_FIELDs(insn
, 19, 31);
2941 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
2943 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
2944 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2947 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2948 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2949 dc
->cc_op
= CC_OP_LOGIC
;
2952 case 0x8: /* addx, V9 addc */
2954 simm
= GET_FIELDs(insn
, 19, 31);
2956 gen_helper_compute_psr();
2957 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
2958 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2959 dc
->cc_op
= CC_OP_ADDX
;
2961 gen_helper_compute_psr();
2962 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2963 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
2964 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2968 gen_helper_compute_psr();
2969 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2970 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2971 dc
->cc_op
= CC_OP_ADDX
;
2973 gen_helper_compute_psr();
2974 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2975 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2976 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2980 #ifdef TARGET_SPARC64
2981 case 0x9: /* V9 mulx */
2983 simm
= GET_FIELDs(insn
, 19, 31);
2984 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
2986 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
2990 case 0xa: /* umul */
2991 CHECK_IU_FEATURE(dc
, MUL
);
2992 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
2994 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2995 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2996 dc
->cc_op
= CC_OP_LOGIC
;
2999 case 0xb: /* smul */
3000 CHECK_IU_FEATURE(dc
, MUL
);
3001 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3003 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3004 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3005 dc
->cc_op
= CC_OP_LOGIC
;
3008 case 0xc: /* subx, V9 subc */
3010 simm
= GET_FIELDs(insn
, 19, 31);
3012 gen_helper_compute_psr();
3013 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
3014 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3015 dc
->cc_op
= CC_OP_SUBX
;
3017 gen_helper_compute_psr();
3018 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3019 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3020 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3024 gen_helper_compute_psr();
3025 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3026 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3027 dc
->cc_op
= CC_OP_SUBX
;
3029 gen_helper_compute_psr();
3030 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3031 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3032 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3036 #ifdef TARGET_SPARC64
3037 case 0xd: /* V9 udivx */
3038 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3039 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3040 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3041 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3044 case 0xe: /* udiv */
3045 CHECK_IU_FEATURE(dc
, DIV
);
3046 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3048 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3049 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3050 dc
->cc_op
= CC_OP_DIV
;
3053 case 0xf: /* sdiv */
3054 CHECK_IU_FEATURE(dc
, DIV
);
3055 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3057 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3058 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3059 dc
->cc_op
= CC_OP_DIV
;
3065 gen_movl_TN_reg(rd
, cpu_dst
);
3067 cpu_src1
= get_src1(insn
, cpu_src1
);
3068 cpu_src2
= get_src2(insn
, cpu_src2
);
3070 case 0x20: /* taddcc */
3071 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3072 gen_movl_TN_reg(rd
, cpu_dst
);
3073 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3074 dc
->cc_op
= CC_OP_TADD
;
3076 case 0x21: /* tsubcc */
3077 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3078 gen_movl_TN_reg(rd
, cpu_dst
);
3079 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3080 dc
->cc_op
= CC_OP_TSUB
;
3082 case 0x22: /* taddcctv */
3083 save_state(dc
, cpu_cond
);
3084 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3085 gen_movl_TN_reg(rd
, cpu_dst
);
3086 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3087 dc
->cc_op
= CC_OP_TADDTV
;
3089 case 0x23: /* tsubcctv */
3090 save_state(dc
, cpu_cond
);
3091 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3092 gen_movl_TN_reg(rd
, cpu_dst
);
3093 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3094 dc
->cc_op
= CC_OP_TSUBTV
;
3096 case 0x24: /* mulscc */
3097 gen_helper_compute_psr();
3098 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3099 gen_movl_TN_reg(rd
, cpu_dst
);
3100 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3101 dc
->cc_op
= CC_OP_ADD
;
3103 #ifndef TARGET_SPARC64
3104 case 0x25: /* sll */
3105 if (IS_IMM
) { /* immediate */
3106 simm
= GET_FIELDs(insn
, 20, 31);
3107 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3108 } else { /* register */
3109 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3110 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3112 gen_movl_TN_reg(rd
, cpu_dst
);
3114 case 0x26: /* srl */
3115 if (IS_IMM
) { /* immediate */
3116 simm
= GET_FIELDs(insn
, 20, 31);
3117 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3118 } else { /* register */
3119 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3120 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3122 gen_movl_TN_reg(rd
, cpu_dst
);
3124 case 0x27: /* sra */
3125 if (IS_IMM
) { /* immediate */
3126 simm
= GET_FIELDs(insn
, 20, 31);
3127 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3128 } else { /* register */
3129 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3130 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3132 gen_movl_TN_reg(rd
, cpu_dst
);
3139 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3140 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3142 #ifndef TARGET_SPARC64
3143 case 0x01 ... 0x0f: /* undefined in the
3147 case 0x10 ... 0x1f: /* implementation-dependent
3153 case 0x2: /* V9 wrccr */
3154 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3155 gen_helper_wrccr(cpu_dst
);
3156 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3157 dc
->cc_op
= CC_OP_FLAGS
;
3159 case 0x3: /* V9 wrasi */
3160 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3161 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3162 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3164 case 0x6: /* V9 wrfprs */
3165 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3166 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3167 save_state(dc
, cpu_cond
);
3172 case 0xf: /* V9 sir, nop if user */
3173 #if !defined(CONFIG_USER_ONLY)
3174 if (supervisor(dc
)) {
3179 case 0x13: /* Graphics Status */
3180 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3182 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3184 case 0x14: /* Softint set */
3185 if (!supervisor(dc
))
3187 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3188 gen_helper_set_softint(cpu_tmp64
);
3190 case 0x15: /* Softint clear */
3191 if (!supervisor(dc
))
3193 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3194 gen_helper_clear_softint(cpu_tmp64
);
3196 case 0x16: /* Softint write */
3197 if (!supervisor(dc
))
3199 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3200 gen_helper_write_softint(cpu_tmp64
);
3202 case 0x17: /* Tick compare */
3203 #if !defined(CONFIG_USER_ONLY)
3204 if (!supervisor(dc
))
3210 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3212 r_tickptr
= tcg_temp_new_ptr();
3213 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3214 offsetof(CPUState
, tick
));
3215 gen_helper_tick_set_limit(r_tickptr
,
3217 tcg_temp_free_ptr(r_tickptr
);
3220 case 0x18: /* System tick */
3221 #if !defined(CONFIG_USER_ONLY)
3222 if (!supervisor(dc
))
3228 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3230 r_tickptr
= tcg_temp_new_ptr();
3231 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3232 offsetof(CPUState
, stick
));
3233 gen_helper_tick_set_count(r_tickptr
,
3235 tcg_temp_free_ptr(r_tickptr
);
3238 case 0x19: /* System tick compare */
3239 #if !defined(CONFIG_USER_ONLY)
3240 if (!supervisor(dc
))
3246 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3248 r_tickptr
= tcg_temp_new_ptr();
3249 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3250 offsetof(CPUState
, stick
));
3251 gen_helper_tick_set_limit(r_tickptr
,
3253 tcg_temp_free_ptr(r_tickptr
);
3257 case 0x10: /* Performance Control */
3258 case 0x11: /* Performance Instrumentation
3260 case 0x12: /* Dispatch Control */
3267 #if !defined(CONFIG_USER_ONLY)
3268 case 0x31: /* wrpsr, V9 saved, restored */
3270 if (!supervisor(dc
))
3272 #ifdef TARGET_SPARC64
3278 gen_helper_restored();
3280 case 2: /* UA2005 allclean */
3281 case 3: /* UA2005 otherw */
3282 case 4: /* UA2005 normalw */
3283 case 5: /* UA2005 invalw */
3289 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3290 gen_helper_wrpsr(cpu_dst
);
3291 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3292 dc
->cc_op
= CC_OP_FLAGS
;
3293 save_state(dc
, cpu_cond
);
3300 case 0x32: /* wrwim, V9 wrpr */
3302 if (!supervisor(dc
))
3304 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3305 #ifdef TARGET_SPARC64
3311 r_tsptr
= tcg_temp_new_ptr();
3312 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3313 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3314 offsetof(trap_state
, tpc
));
3315 tcg_temp_free_ptr(r_tsptr
);
3322 r_tsptr
= tcg_temp_new_ptr();
3323 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3324 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3325 offsetof(trap_state
, tnpc
));
3326 tcg_temp_free_ptr(r_tsptr
);
3333 r_tsptr
= tcg_temp_new_ptr();
3334 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3335 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3336 offsetof(trap_state
,
3338 tcg_temp_free_ptr(r_tsptr
);
3345 r_tsptr
= tcg_temp_new_ptr();
3346 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3347 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3348 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3349 offsetof(trap_state
, tt
));
3350 tcg_temp_free_ptr(r_tsptr
);
3357 r_tickptr
= tcg_temp_new_ptr();
3358 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3359 offsetof(CPUState
, tick
));
3360 gen_helper_tick_set_count(r_tickptr
,
3362 tcg_temp_free_ptr(r_tickptr
);
3366 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3369 save_state(dc
, cpu_cond
);
3370 gen_helper_wrpstate(cpu_tmp0
);
3376 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3377 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3378 offsetof(CPUSPARCState
, tl
));
3381 gen_helper_wrpil(cpu_tmp0
);
3384 gen_helper_wrcwp(cpu_tmp0
);
3387 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3388 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3389 offsetof(CPUSPARCState
,
3392 case 11: // canrestore
3393 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3394 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3395 offsetof(CPUSPARCState
,
3398 case 12: // cleanwin
3399 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3400 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3401 offsetof(CPUSPARCState
,
3404 case 13: // otherwin
3405 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3406 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3407 offsetof(CPUSPARCState
,
3411 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3412 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3413 offsetof(CPUSPARCState
,
3416 case 16: // UA2005 gl
3417 CHECK_IU_FEATURE(dc
, GL
);
3418 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3419 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3420 offsetof(CPUSPARCState
, gl
));
3422 case 26: // UA2005 strand status
3423 CHECK_IU_FEATURE(dc
, HYPV
);
3424 if (!hypervisor(dc
))
3426 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3432 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3433 if (dc
->def
->nwindows
!= 32)
3434 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3435 (1 << dc
->def
->nwindows
) - 1);
3436 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3440 case 0x33: /* wrtbr, UA2005 wrhpr */
3442 #ifndef TARGET_SPARC64
3443 if (!supervisor(dc
))
3445 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3447 CHECK_IU_FEATURE(dc
, HYPV
);
3448 if (!hypervisor(dc
))
3450 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3453 // XXX gen_op_wrhpstate();
3454 save_state(dc
, cpu_cond
);
3460 // XXX gen_op_wrhtstate();
3463 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3466 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3468 case 31: // hstick_cmpr
3472 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3473 r_tickptr
= tcg_temp_new_ptr();
3474 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3475 offsetof(CPUState
, hstick
));
3476 gen_helper_tick_set_limit(r_tickptr
,
3478 tcg_temp_free_ptr(r_tickptr
);
3481 case 6: // hver readonly
3489 #ifdef TARGET_SPARC64
3490 case 0x2c: /* V9 movcc */
3492 int cc
= GET_FIELD_SP(insn
, 11, 12);
3493 int cond
= GET_FIELD_SP(insn
, 14, 17);
3497 r_cond
= tcg_temp_new();
3498 if (insn
& (1 << 18)) {
3500 gen_cond(r_cond
, 0, cond
, dc
);
3502 gen_cond(r_cond
, 1, cond
, dc
);
3506 gen_fcond(r_cond
, cc
, cond
);
3509 l1
= gen_new_label();
3511 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3512 if (IS_IMM
) { /* immediate */
3515 simm
= GET_FIELD_SPs(insn
, 0, 10);
3516 r_const
= tcg_const_tl(simm
);
3517 gen_movl_TN_reg(rd
, r_const
);
3518 tcg_temp_free(r_const
);
3520 rs2
= GET_FIELD_SP(insn
, 0, 4);
3521 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3522 gen_movl_TN_reg(rd
, cpu_tmp0
);
3525 tcg_temp_free(r_cond
);
3528 case 0x2d: /* V9 sdivx */
3529 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3530 gen_movl_TN_reg(rd
, cpu_dst
);
3532 case 0x2e: /* V9 popc */
3534 cpu_src2
= get_src2(insn
, cpu_src2
);
3535 gen_helper_popc(cpu_dst
, cpu_src2
);
3536 gen_movl_TN_reg(rd
, cpu_dst
);
3538 case 0x2f: /* V9 movr */
3540 int cond
= GET_FIELD_SP(insn
, 10, 12);
3543 cpu_src1
= get_src1(insn
, cpu_src1
);
3545 l1
= gen_new_label();
3547 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3549 if (IS_IMM
) { /* immediate */
3552 simm
= GET_FIELD_SPs(insn
, 0, 9);
3553 r_const
= tcg_const_tl(simm
);
3554 gen_movl_TN_reg(rd
, r_const
);
3555 tcg_temp_free(r_const
);
3557 rs2
= GET_FIELD_SP(insn
, 0, 4);
3558 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3559 gen_movl_TN_reg(rd
, cpu_tmp0
);
3569 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3570 #ifdef TARGET_SPARC64
3571 int opf
= GET_FIELD_SP(insn
, 5, 13);
3572 rs1
= GET_FIELD(insn
, 13, 17);
3573 rs2
= GET_FIELD(insn
, 27, 31);
3574 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3578 case 0x000: /* VIS I edge8cc */
3579 case 0x001: /* VIS II edge8n */
3580 case 0x002: /* VIS I edge8lcc */
3581 case 0x003: /* VIS II edge8ln */
3582 case 0x004: /* VIS I edge16cc */
3583 case 0x005: /* VIS II edge16n */
3584 case 0x006: /* VIS I edge16lcc */
3585 case 0x007: /* VIS II edge16ln */
3586 case 0x008: /* VIS I edge32cc */
3587 case 0x009: /* VIS II edge32n */
3588 case 0x00a: /* VIS I edge32lcc */
3589 case 0x00b: /* VIS II edge32ln */
3592 case 0x010: /* VIS I array8 */
3593 CHECK_FPU_FEATURE(dc
, VIS1
);
3594 cpu_src1
= get_src1(insn
, cpu_src1
);
3595 gen_movl_reg_TN(rs2
, cpu_src2
);
3596 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3597 gen_movl_TN_reg(rd
, cpu_dst
);
3599 case 0x012: /* VIS I array16 */
3600 CHECK_FPU_FEATURE(dc
, VIS1
);
3601 cpu_src1
= get_src1(insn
, cpu_src1
);
3602 gen_movl_reg_TN(rs2
, cpu_src2
);
3603 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3604 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3605 gen_movl_TN_reg(rd
, cpu_dst
);
3607 case 0x014: /* VIS I array32 */
3608 CHECK_FPU_FEATURE(dc
, VIS1
);
3609 cpu_src1
= get_src1(insn
, cpu_src1
);
3610 gen_movl_reg_TN(rs2
, cpu_src2
);
3611 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3612 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3613 gen_movl_TN_reg(rd
, cpu_dst
);
3615 case 0x018: /* VIS I alignaddr */
3616 CHECK_FPU_FEATURE(dc
, VIS1
);
3617 cpu_src1
= get_src1(insn
, cpu_src1
);
3618 gen_movl_reg_TN(rs2
, cpu_src2
);
3619 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3620 gen_movl_TN_reg(rd
, cpu_dst
);
3622 case 0x019: /* VIS II bmask */
3623 case 0x01a: /* VIS I alignaddrl */
3626 case 0x020: /* VIS I fcmple16 */
3627 CHECK_FPU_FEATURE(dc
, VIS1
);
3628 gen_op_load_fpr_DT0(DFPREG(rs1
));
3629 gen_op_load_fpr_DT1(DFPREG(rs2
));
3630 gen_helper_fcmple16();
3631 gen_op_store_DT0_fpr(DFPREG(rd
));
3633 case 0x022: /* VIS I fcmpne16 */
3634 CHECK_FPU_FEATURE(dc
, VIS1
);
3635 gen_op_load_fpr_DT0(DFPREG(rs1
));
3636 gen_op_load_fpr_DT1(DFPREG(rs2
));
3637 gen_helper_fcmpne16();
3638 gen_op_store_DT0_fpr(DFPREG(rd
));
3640 case 0x024: /* VIS I fcmple32 */
3641 CHECK_FPU_FEATURE(dc
, VIS1
);
3642 gen_op_load_fpr_DT0(DFPREG(rs1
));
3643 gen_op_load_fpr_DT1(DFPREG(rs2
));
3644 gen_helper_fcmple32();
3645 gen_op_store_DT0_fpr(DFPREG(rd
));
3647 case 0x026: /* VIS I fcmpne32 */
3648 CHECK_FPU_FEATURE(dc
, VIS1
);
3649 gen_op_load_fpr_DT0(DFPREG(rs1
));
3650 gen_op_load_fpr_DT1(DFPREG(rs2
));
3651 gen_helper_fcmpne32();
3652 gen_op_store_DT0_fpr(DFPREG(rd
));
3654 case 0x028: /* VIS I fcmpgt16 */
3655 CHECK_FPU_FEATURE(dc
, VIS1
);
3656 gen_op_load_fpr_DT0(DFPREG(rs1
));
3657 gen_op_load_fpr_DT1(DFPREG(rs2
));
3658 gen_helper_fcmpgt16();
3659 gen_op_store_DT0_fpr(DFPREG(rd
));
3661 case 0x02a: /* VIS I fcmpeq16 */
3662 CHECK_FPU_FEATURE(dc
, VIS1
);
3663 gen_op_load_fpr_DT0(DFPREG(rs1
));
3664 gen_op_load_fpr_DT1(DFPREG(rs2
));
3665 gen_helper_fcmpeq16();
3666 gen_op_store_DT0_fpr(DFPREG(rd
));
3668 case 0x02c: /* VIS I fcmpgt32 */
3669 CHECK_FPU_FEATURE(dc
, VIS1
);
3670 gen_op_load_fpr_DT0(DFPREG(rs1
));
3671 gen_op_load_fpr_DT1(DFPREG(rs2
));
3672 gen_helper_fcmpgt32();
3673 gen_op_store_DT0_fpr(DFPREG(rd
));
3675 case 0x02e: /* VIS I fcmpeq32 */
3676 CHECK_FPU_FEATURE(dc
, VIS1
);
3677 gen_op_load_fpr_DT0(DFPREG(rs1
));
3678 gen_op_load_fpr_DT1(DFPREG(rs2
));
3679 gen_helper_fcmpeq32();
3680 gen_op_store_DT0_fpr(DFPREG(rd
));
3682 case 0x031: /* VIS I fmul8x16 */
3683 CHECK_FPU_FEATURE(dc
, VIS1
);
3684 gen_op_load_fpr_DT0(DFPREG(rs1
));
3685 gen_op_load_fpr_DT1(DFPREG(rs2
));
3686 gen_helper_fmul8x16();
3687 gen_op_store_DT0_fpr(DFPREG(rd
));
3689 case 0x033: /* VIS I fmul8x16au */
3690 CHECK_FPU_FEATURE(dc
, VIS1
);
3691 gen_op_load_fpr_DT0(DFPREG(rs1
));
3692 gen_op_load_fpr_DT1(DFPREG(rs2
));
3693 gen_helper_fmul8x16au();
3694 gen_op_store_DT0_fpr(DFPREG(rd
));
3696 case 0x035: /* VIS I fmul8x16al */
3697 CHECK_FPU_FEATURE(dc
, VIS1
);
3698 gen_op_load_fpr_DT0(DFPREG(rs1
));
3699 gen_op_load_fpr_DT1(DFPREG(rs2
));
3700 gen_helper_fmul8x16al();
3701 gen_op_store_DT0_fpr(DFPREG(rd
));
3703 case 0x036: /* VIS I fmul8sux16 */
3704 CHECK_FPU_FEATURE(dc
, VIS1
);
3705 gen_op_load_fpr_DT0(DFPREG(rs1
));
3706 gen_op_load_fpr_DT1(DFPREG(rs2
));
3707 gen_helper_fmul8sux16();
3708 gen_op_store_DT0_fpr(DFPREG(rd
));
3710 case 0x037: /* VIS I fmul8ulx16 */
3711 CHECK_FPU_FEATURE(dc
, VIS1
);
3712 gen_op_load_fpr_DT0(DFPREG(rs1
));
3713 gen_op_load_fpr_DT1(DFPREG(rs2
));
3714 gen_helper_fmul8ulx16();
3715 gen_op_store_DT0_fpr(DFPREG(rd
));
3717 case 0x038: /* VIS I fmuld8sux16 */
3718 CHECK_FPU_FEATURE(dc
, VIS1
);
3719 gen_op_load_fpr_DT0(DFPREG(rs1
));
3720 gen_op_load_fpr_DT1(DFPREG(rs2
));
3721 gen_helper_fmuld8sux16();
3722 gen_op_store_DT0_fpr(DFPREG(rd
));
3724 case 0x039: /* VIS I fmuld8ulx16 */
3725 CHECK_FPU_FEATURE(dc
, VIS1
);
3726 gen_op_load_fpr_DT0(DFPREG(rs1
));
3727 gen_op_load_fpr_DT1(DFPREG(rs2
));
3728 gen_helper_fmuld8ulx16();
3729 gen_op_store_DT0_fpr(DFPREG(rd
));
3731 case 0x03a: /* VIS I fpack32 */
3732 case 0x03b: /* VIS I fpack16 */
3733 case 0x03d: /* VIS I fpackfix */
3734 case 0x03e: /* VIS I pdist */
3737 case 0x048: /* VIS I faligndata */
3738 CHECK_FPU_FEATURE(dc
, VIS1
);
3739 gen_op_load_fpr_DT0(DFPREG(rs1
));
3740 gen_op_load_fpr_DT1(DFPREG(rs2
));
3741 gen_helper_faligndata();
3742 gen_op_store_DT0_fpr(DFPREG(rd
));
3744 case 0x04b: /* VIS I fpmerge */
3745 CHECK_FPU_FEATURE(dc
, VIS1
);
3746 gen_op_load_fpr_DT0(DFPREG(rs1
));
3747 gen_op_load_fpr_DT1(DFPREG(rs2
));
3748 gen_helper_fpmerge();
3749 gen_op_store_DT0_fpr(DFPREG(rd
));
3751 case 0x04c: /* VIS II bshuffle */
3754 case 0x04d: /* VIS I fexpand */
3755 CHECK_FPU_FEATURE(dc
, VIS1
);
3756 gen_op_load_fpr_DT0(DFPREG(rs1
));
3757 gen_op_load_fpr_DT1(DFPREG(rs2
));
3758 gen_helper_fexpand();
3759 gen_op_store_DT0_fpr(DFPREG(rd
));
3761 case 0x050: /* VIS I fpadd16 */
3762 CHECK_FPU_FEATURE(dc
, VIS1
);
3763 gen_op_load_fpr_DT0(DFPREG(rs1
));
3764 gen_op_load_fpr_DT1(DFPREG(rs2
));
3765 gen_helper_fpadd16();
3766 gen_op_store_DT0_fpr(DFPREG(rd
));
3768 case 0x051: /* VIS I fpadd16s */
3769 CHECK_FPU_FEATURE(dc
, VIS1
);
3770 gen_helper_fpadd16s(cpu_fpr
[rd
],
3771 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3773 case 0x052: /* VIS I fpadd32 */
3774 CHECK_FPU_FEATURE(dc
, VIS1
);
3775 gen_op_load_fpr_DT0(DFPREG(rs1
));
3776 gen_op_load_fpr_DT1(DFPREG(rs2
));
3777 gen_helper_fpadd32();
3778 gen_op_store_DT0_fpr(DFPREG(rd
));
3780 case 0x053: /* VIS I fpadd32s */
3781 CHECK_FPU_FEATURE(dc
, VIS1
);
3782 gen_helper_fpadd32s(cpu_fpr
[rd
],
3783 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3785 case 0x054: /* VIS I fpsub16 */
3786 CHECK_FPU_FEATURE(dc
, VIS1
);
3787 gen_op_load_fpr_DT0(DFPREG(rs1
));
3788 gen_op_load_fpr_DT1(DFPREG(rs2
));
3789 gen_helper_fpsub16();
3790 gen_op_store_DT0_fpr(DFPREG(rd
));
3792 case 0x055: /* VIS I fpsub16s */
3793 CHECK_FPU_FEATURE(dc
, VIS1
);
3794 gen_helper_fpsub16s(cpu_fpr
[rd
],
3795 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3797 case 0x056: /* VIS I fpsub32 */
3798 CHECK_FPU_FEATURE(dc
, VIS1
);
3799 gen_op_load_fpr_DT0(DFPREG(rs1
));
3800 gen_op_load_fpr_DT1(DFPREG(rs2
));
3801 gen_helper_fpsub32();
3802 gen_op_store_DT0_fpr(DFPREG(rd
));
3804 case 0x057: /* VIS I fpsub32s */
3805 CHECK_FPU_FEATURE(dc
, VIS1
);
3806 gen_helper_fpsub32s(cpu_fpr
[rd
],
3807 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3809 case 0x060: /* VIS I fzero */
3810 CHECK_FPU_FEATURE(dc
, VIS1
);
3811 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3812 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3814 case 0x061: /* VIS I fzeros */
3815 CHECK_FPU_FEATURE(dc
, VIS1
);
3816 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3818 case 0x062: /* VIS I fnor */
3819 CHECK_FPU_FEATURE(dc
, VIS1
);
3820 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3821 cpu_fpr
[DFPREG(rs2
)]);
3822 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3823 cpu_fpr
[DFPREG(rs2
) + 1]);
3825 case 0x063: /* VIS I fnors */
3826 CHECK_FPU_FEATURE(dc
, VIS1
);
3827 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3829 case 0x064: /* VIS I fandnot2 */
3830 CHECK_FPU_FEATURE(dc
, VIS1
);
3831 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3832 cpu_fpr
[DFPREG(rs2
)]);
3833 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3834 cpu_fpr
[DFPREG(rs1
) + 1],
3835 cpu_fpr
[DFPREG(rs2
) + 1]);
3837 case 0x065: /* VIS I fandnot2s */
3838 CHECK_FPU_FEATURE(dc
, VIS1
);
3839 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3841 case 0x066: /* VIS I fnot2 */
3842 CHECK_FPU_FEATURE(dc
, VIS1
);
3843 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3844 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3845 cpu_fpr
[DFPREG(rs2
) + 1]);
3847 case 0x067: /* VIS I fnot2s */
3848 CHECK_FPU_FEATURE(dc
, VIS1
);
3849 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3851 case 0x068: /* VIS I fandnot1 */
3852 CHECK_FPU_FEATURE(dc
, VIS1
);
3853 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3854 cpu_fpr
[DFPREG(rs1
)]);
3855 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3856 cpu_fpr
[DFPREG(rs2
) + 1],
3857 cpu_fpr
[DFPREG(rs1
) + 1]);
3859 case 0x069: /* VIS I fandnot1s */
3860 CHECK_FPU_FEATURE(dc
, VIS1
);
3861 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3863 case 0x06a: /* VIS I fnot1 */
3864 CHECK_FPU_FEATURE(dc
, VIS1
);
3865 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3866 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3867 cpu_fpr
[DFPREG(rs1
) + 1]);
3869 case 0x06b: /* VIS I fnot1s */
3870 CHECK_FPU_FEATURE(dc
, VIS1
);
3871 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3873 case 0x06c: /* VIS I fxor */
3874 CHECK_FPU_FEATURE(dc
, VIS1
);
3875 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3876 cpu_fpr
[DFPREG(rs2
)]);
3877 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3878 cpu_fpr
[DFPREG(rs1
) + 1],
3879 cpu_fpr
[DFPREG(rs2
) + 1]);
3881 case 0x06d: /* VIS I fxors */
3882 CHECK_FPU_FEATURE(dc
, VIS1
);
3883 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3885 case 0x06e: /* VIS I fnand */
3886 CHECK_FPU_FEATURE(dc
, VIS1
);
3887 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3888 cpu_fpr
[DFPREG(rs2
)]);
3889 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3890 cpu_fpr
[DFPREG(rs2
) + 1]);
3892 case 0x06f: /* VIS I fnands */
3893 CHECK_FPU_FEATURE(dc
, VIS1
);
3894 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3896 case 0x070: /* VIS I fand */
3897 CHECK_FPU_FEATURE(dc
, VIS1
);
3898 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3899 cpu_fpr
[DFPREG(rs2
)]);
3900 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3901 cpu_fpr
[DFPREG(rs1
) + 1],
3902 cpu_fpr
[DFPREG(rs2
) + 1]);
3904 case 0x071: /* VIS I fands */
3905 CHECK_FPU_FEATURE(dc
, VIS1
);
3906 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3908 case 0x072: /* VIS I fxnor */
3909 CHECK_FPU_FEATURE(dc
, VIS1
);
3910 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3911 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3912 cpu_fpr
[DFPREG(rs1
)]);
3913 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3914 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3915 cpu_fpr
[DFPREG(rs1
) + 1]);
3917 case 0x073: /* VIS I fxnors */
3918 CHECK_FPU_FEATURE(dc
, VIS1
);
3919 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3920 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3922 case 0x074: /* VIS I fsrc1 */
3923 CHECK_FPU_FEATURE(dc
, VIS1
);
3924 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3925 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3926 cpu_fpr
[DFPREG(rs1
) + 1]);
3928 case 0x075: /* VIS I fsrc1s */
3929 CHECK_FPU_FEATURE(dc
, VIS1
);
3930 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3932 case 0x076: /* VIS I fornot2 */
3933 CHECK_FPU_FEATURE(dc
, VIS1
);
3934 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3935 cpu_fpr
[DFPREG(rs2
)]);
3936 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3937 cpu_fpr
[DFPREG(rs1
) + 1],
3938 cpu_fpr
[DFPREG(rs2
) + 1]);
3940 case 0x077: /* VIS I fornot2s */
3941 CHECK_FPU_FEATURE(dc
, VIS1
);
3942 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3944 case 0x078: /* VIS I fsrc2 */
3945 CHECK_FPU_FEATURE(dc
, VIS1
);
3946 gen_op_load_fpr_DT0(DFPREG(rs2
));
3947 gen_op_store_DT0_fpr(DFPREG(rd
));
3949 case 0x079: /* VIS I fsrc2s */
3950 CHECK_FPU_FEATURE(dc
, VIS1
);
3951 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3953 case 0x07a: /* VIS I fornot1 */
3954 CHECK_FPU_FEATURE(dc
, VIS1
);
3955 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3956 cpu_fpr
[DFPREG(rs1
)]);
3957 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3958 cpu_fpr
[DFPREG(rs2
) + 1],
3959 cpu_fpr
[DFPREG(rs1
) + 1]);
3961 case 0x07b: /* VIS I fornot1s */
3962 CHECK_FPU_FEATURE(dc
, VIS1
);
3963 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3965 case 0x07c: /* VIS I for */
3966 CHECK_FPU_FEATURE(dc
, VIS1
);
3967 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3968 cpu_fpr
[DFPREG(rs2
)]);
3969 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
3970 cpu_fpr
[DFPREG(rs1
) + 1],
3971 cpu_fpr
[DFPREG(rs2
) + 1]);
3973 case 0x07d: /* VIS I fors */
3974 CHECK_FPU_FEATURE(dc
, VIS1
);
3975 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3977 case 0x07e: /* VIS I fone */
3978 CHECK_FPU_FEATURE(dc
, VIS1
);
3979 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
3980 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
3982 case 0x07f: /* VIS I fones */
3983 CHECK_FPU_FEATURE(dc
, VIS1
);
3984 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
3986 case 0x080: /* VIS I shutdown */
3987 case 0x081: /* VIS II siam */
3996 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3997 #ifdef TARGET_SPARC64
4002 #ifdef TARGET_SPARC64
4003 } else if (xop
== 0x39) { /* V9 return */
4006 save_state(dc
, cpu_cond
);
4007 cpu_src1
= get_src1(insn
, cpu_src1
);
4008 if (IS_IMM
) { /* immediate */
4009 simm
= GET_FIELDs(insn
, 19, 31);
4010 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4011 } else { /* register */
4012 rs2
= GET_FIELD(insn
, 27, 31);
4014 gen_movl_reg_TN(rs2
, cpu_src2
);
4015 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4017 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4019 gen_helper_restore();
4020 gen_mov_pc_npc(dc
, cpu_cond
);
4021 r_const
= tcg_const_i32(3);
4022 gen_helper_check_align(cpu_dst
, r_const
);
4023 tcg_temp_free_i32(r_const
);
4024 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4025 dc
->npc
= DYNAMIC_PC
;
4029 cpu_src1
= get_src1(insn
, cpu_src1
);
4030 if (IS_IMM
) { /* immediate */
4031 simm
= GET_FIELDs(insn
, 19, 31);
4032 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4033 } else { /* register */
4034 rs2
= GET_FIELD(insn
, 27, 31);
4036 gen_movl_reg_TN(rs2
, cpu_src2
);
4037 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4039 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4042 case 0x38: /* jmpl */
4047 r_pc
= tcg_const_tl(dc
->pc
);
4048 gen_movl_TN_reg(rd
, r_pc
);
4049 tcg_temp_free(r_pc
);
4050 gen_mov_pc_npc(dc
, cpu_cond
);
4051 r_const
= tcg_const_i32(3);
4052 gen_helper_check_align(cpu_dst
, r_const
);
4053 tcg_temp_free_i32(r_const
);
4054 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4055 dc
->npc
= DYNAMIC_PC
;
4058 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4059 case 0x39: /* rett, V9 return */
4063 if (!supervisor(dc
))
4065 gen_mov_pc_npc(dc
, cpu_cond
);
4066 r_const
= tcg_const_i32(3);
4067 gen_helper_check_align(cpu_dst
, r_const
);
4068 tcg_temp_free_i32(r_const
);
4069 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4070 dc
->npc
= DYNAMIC_PC
;
4075 case 0x3b: /* flush */
4076 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4078 gen_helper_flush(cpu_dst
);
4080 case 0x3c: /* save */
4081 save_state(dc
, cpu_cond
);
4083 gen_movl_TN_reg(rd
, cpu_dst
);
4085 case 0x3d: /* restore */
4086 save_state(dc
, cpu_cond
);
4087 gen_helper_restore();
4088 gen_movl_TN_reg(rd
, cpu_dst
);
4090 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4091 case 0x3e: /* V9 done/retry */
4095 if (!supervisor(dc
))
4097 dc
->npc
= DYNAMIC_PC
;
4098 dc
->pc
= DYNAMIC_PC
;
4102 if (!supervisor(dc
))
4104 dc
->npc
= DYNAMIC_PC
;
4105 dc
->pc
= DYNAMIC_PC
;
4121 case 3: /* load/store instructions */
4123 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4125 /* flush pending conditional evaluations before exposing
4127 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4128 dc
->cc_op
= CC_OP_FLAGS
;
4129 gen_helper_compute_psr();
4131 cpu_src1
= get_src1(insn
, cpu_src1
);
4132 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4133 rs2
= GET_FIELD(insn
, 27, 31);
4134 gen_movl_reg_TN(rs2
, cpu_src2
);
4135 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4136 } else if (IS_IMM
) { /* immediate */
4137 simm
= GET_FIELDs(insn
, 19, 31);
4138 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4139 } else { /* register */
4140 rs2
= GET_FIELD(insn
, 27, 31);
4142 gen_movl_reg_TN(rs2
, cpu_src2
);
4143 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4145 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4147 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4148 (xop
> 0x17 && xop
<= 0x1d ) ||
4149 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4151 case 0x0: /* ld, V9 lduw, load unsigned word */
4152 gen_address_mask(dc
, cpu_addr
);
4153 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4155 case 0x1: /* ldub, load unsigned byte */
4156 gen_address_mask(dc
, cpu_addr
);
4157 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4159 case 0x2: /* lduh, load unsigned halfword */
4160 gen_address_mask(dc
, cpu_addr
);
4161 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4163 case 0x3: /* ldd, load double word */
4169 save_state(dc
, cpu_cond
);
4170 r_const
= tcg_const_i32(7);
4171 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4172 tcg_temp_free_i32(r_const
);
4173 gen_address_mask(dc
, cpu_addr
);
4174 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4175 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4176 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4177 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4178 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4179 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4180 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4183 case 0x9: /* ldsb, load signed byte */
4184 gen_address_mask(dc
, cpu_addr
);
4185 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4187 case 0xa: /* ldsh, load signed halfword */
4188 gen_address_mask(dc
, cpu_addr
);
4189 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4191 case 0xd: /* ldstub -- XXX: should be atomically */
4195 gen_address_mask(dc
, cpu_addr
);
4196 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4197 r_const
= tcg_const_tl(0xff);
4198 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4199 tcg_temp_free(r_const
);
4202 case 0x0f: /* swap, swap register with memory. Also
4204 CHECK_IU_FEATURE(dc
, SWAP
);
4205 gen_movl_reg_TN(rd
, cpu_val
);
4206 gen_address_mask(dc
, cpu_addr
);
4207 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4208 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4209 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4211 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4212 case 0x10: /* lda, V9 lduwa, load word alternate */
4213 #ifndef TARGET_SPARC64
4216 if (!supervisor(dc
))
4219 save_state(dc
, cpu_cond
);
4220 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4222 case 0x11: /* lduba, load unsigned byte alternate */
4223 #ifndef TARGET_SPARC64
4226 if (!supervisor(dc
))
4229 save_state(dc
, cpu_cond
);
4230 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4232 case 0x12: /* lduha, load unsigned halfword alternate */
4233 #ifndef TARGET_SPARC64
4236 if (!supervisor(dc
))
4239 save_state(dc
, cpu_cond
);
4240 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4242 case 0x13: /* ldda, load double word alternate */
4243 #ifndef TARGET_SPARC64
4246 if (!supervisor(dc
))
4251 save_state(dc
, cpu_cond
);
4252 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4254 case 0x19: /* ldsba, load signed byte alternate */
4255 #ifndef TARGET_SPARC64
4258 if (!supervisor(dc
))
4261 save_state(dc
, cpu_cond
);
4262 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4264 case 0x1a: /* ldsha, load signed halfword alternate */
4265 #ifndef TARGET_SPARC64
4268 if (!supervisor(dc
))
4271 save_state(dc
, cpu_cond
);
4272 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4274 case 0x1d: /* ldstuba -- XXX: should be atomically */
4275 #ifndef TARGET_SPARC64
4278 if (!supervisor(dc
))
4281 save_state(dc
, cpu_cond
);
4282 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4284 case 0x1f: /* swapa, swap reg with alt. memory. Also
4286 CHECK_IU_FEATURE(dc
, SWAP
);
4287 #ifndef TARGET_SPARC64
4290 if (!supervisor(dc
))
4293 save_state(dc
, cpu_cond
);
4294 gen_movl_reg_TN(rd
, cpu_val
);
4295 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4298 #ifndef TARGET_SPARC64
4299 case 0x30: /* ldc */
4300 case 0x31: /* ldcsr */
4301 case 0x33: /* lddc */
4305 #ifdef TARGET_SPARC64
4306 case 0x08: /* V9 ldsw */
4307 gen_address_mask(dc
, cpu_addr
);
4308 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4310 case 0x0b: /* V9 ldx */
4311 gen_address_mask(dc
, cpu_addr
);
4312 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4314 case 0x18: /* V9 ldswa */
4315 save_state(dc
, cpu_cond
);
4316 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4318 case 0x1b: /* V9 ldxa */
4319 save_state(dc
, cpu_cond
);
4320 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4322 case 0x2d: /* V9 prefetch, no effect */
4324 case 0x30: /* V9 ldfa */
4325 save_state(dc
, cpu_cond
);
4326 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4328 case 0x33: /* V9 lddfa */
4329 save_state(dc
, cpu_cond
);
4330 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4332 case 0x3d: /* V9 prefetcha, no effect */
4334 case 0x32: /* V9 ldqfa */
4335 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4336 save_state(dc
, cpu_cond
);
4337 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4343 gen_movl_TN_reg(rd
, cpu_val
);
4344 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4347 } else if (xop
>= 0x20 && xop
< 0x24) {
4348 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4350 save_state(dc
, cpu_cond
);
4352 case 0x20: /* ldf, load fpreg */
4353 gen_address_mask(dc
, cpu_addr
);
4354 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4355 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4357 case 0x21: /* ldfsr, V9 ldxfsr */
4358 #ifdef TARGET_SPARC64
4359 gen_address_mask(dc
, cpu_addr
);
4361 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4362 gen_helper_ldxfsr(cpu_tmp64
);
4366 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4367 gen_helper_ldfsr(cpu_tmp32
);
4371 case 0x22: /* ldqf, load quad fpreg */
4375 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4376 r_const
= tcg_const_i32(dc
->mem_idx
);
4377 gen_helper_ldqf(cpu_addr
, r_const
);
4378 tcg_temp_free_i32(r_const
);
4379 gen_op_store_QT0_fpr(QFPREG(rd
));
4382 case 0x23: /* lddf, load double fpreg */
4386 r_const
= tcg_const_i32(dc
->mem_idx
);
4387 gen_helper_lddf(cpu_addr
, r_const
);
4388 tcg_temp_free_i32(r_const
);
4389 gen_op_store_DT0_fpr(DFPREG(rd
));
4395 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4396 xop
== 0xe || xop
== 0x1e) {
4397 gen_movl_reg_TN(rd
, cpu_val
);
4399 case 0x4: /* st, store word */
4400 gen_address_mask(dc
, cpu_addr
);
4401 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4403 case 0x5: /* stb, store byte */
4404 gen_address_mask(dc
, cpu_addr
);
4405 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4407 case 0x6: /* sth, store halfword */
4408 gen_address_mask(dc
, cpu_addr
);
4409 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4411 case 0x7: /* std, store double word */
4417 save_state(dc
, cpu_cond
);
4418 gen_address_mask(dc
, cpu_addr
);
4419 r_const
= tcg_const_i32(7);
4420 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4421 tcg_temp_free_i32(r_const
);
4422 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4423 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4424 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4427 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4428 case 0x14: /* sta, V9 stwa, store word alternate */
4429 #ifndef TARGET_SPARC64
4432 if (!supervisor(dc
))
4435 save_state(dc
, cpu_cond
);
4436 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4438 case 0x15: /* stba, store byte alternate */
4439 #ifndef TARGET_SPARC64
4442 if (!supervisor(dc
))
4445 save_state(dc
, cpu_cond
);
4446 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4448 case 0x16: /* stha, store halfword alternate */
4449 #ifndef TARGET_SPARC64
4452 if (!supervisor(dc
))
4455 save_state(dc
, cpu_cond
);
4456 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4458 case 0x17: /* stda, store double word alternate */
4459 #ifndef TARGET_SPARC64
4462 if (!supervisor(dc
))
4468 save_state(dc
, cpu_cond
);
4469 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4473 #ifdef TARGET_SPARC64
4474 case 0x0e: /* V9 stx */
4475 gen_address_mask(dc
, cpu_addr
);
4476 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4478 case 0x1e: /* V9 stxa */
4479 save_state(dc
, cpu_cond
);
4480 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4486 } else if (xop
> 0x23 && xop
< 0x28) {
4487 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4489 save_state(dc
, cpu_cond
);
4491 case 0x24: /* stf, store fpreg */
4492 gen_address_mask(dc
, cpu_addr
);
4493 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4494 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4496 case 0x25: /* stfsr, V9 stxfsr */
4497 #ifdef TARGET_SPARC64
4498 gen_address_mask(dc
, cpu_addr
);
4499 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4501 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4503 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4505 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4506 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4510 #ifdef TARGET_SPARC64
4511 /* V9 stqf, store quad fpreg */
4515 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4516 gen_op_load_fpr_QT0(QFPREG(rd
));
4517 r_const
= tcg_const_i32(dc
->mem_idx
);
4518 gen_helper_stqf(cpu_addr
, r_const
);
4519 tcg_temp_free_i32(r_const
);
4522 #else /* !TARGET_SPARC64 */
4523 /* stdfq, store floating point queue */
4524 #if defined(CONFIG_USER_ONLY)
4527 if (!supervisor(dc
))
4529 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4534 case 0x27: /* stdf, store double fpreg */
4538 gen_op_load_fpr_DT0(DFPREG(rd
));
4539 r_const
= tcg_const_i32(dc
->mem_idx
);
4540 gen_helper_stdf(cpu_addr
, r_const
);
4541 tcg_temp_free_i32(r_const
);
4547 } else if (xop
> 0x33 && xop
< 0x3f) {
4548 save_state(dc
, cpu_cond
);
4550 #ifdef TARGET_SPARC64
4551 case 0x34: /* V9 stfa */
4552 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4554 case 0x36: /* V9 stqfa */
4558 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4559 r_const
= tcg_const_i32(7);
4560 gen_helper_check_align(cpu_addr
, r_const
);
4561 tcg_temp_free_i32(r_const
);
4562 gen_op_load_fpr_QT0(QFPREG(rd
));
4563 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4566 case 0x37: /* V9 stdfa */
4567 gen_op_load_fpr_DT0(DFPREG(rd
));
4568 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4570 case 0x3c: /* V9 casa */
4571 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4572 gen_movl_TN_reg(rd
, cpu_val
);
4574 case 0x3e: /* V9 casxa */
4575 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4576 gen_movl_TN_reg(rd
, cpu_val
);
4579 case 0x34: /* stc */
4580 case 0x35: /* stcsr */
4581 case 0x36: /* stdcq */
4582 case 0x37: /* stdc */
4593 /* default case for non jump instructions */
4594 if (dc
->npc
== DYNAMIC_PC
) {
4595 dc
->pc
= DYNAMIC_PC
;
4597 } else if (dc
->npc
== JUMP_PC
) {
4598 /* we can do a static jump */
4599 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4603 dc
->npc
= dc
->npc
+ 4;
4611 save_state(dc
, cpu_cond
);
4612 r_const
= tcg_const_i32(TT_ILL_INSN
);
4613 gen_helper_raise_exception(r_const
);
4614 tcg_temp_free_i32(r_const
);
4622 save_state(dc
, cpu_cond
);
4623 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4624 gen_helper_raise_exception(r_const
);
4625 tcg_temp_free_i32(r_const
);
4629 #if !defined(CONFIG_USER_ONLY)
4634 save_state(dc
, cpu_cond
);
4635 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4636 gen_helper_raise_exception(r_const
);
4637 tcg_temp_free_i32(r_const
);
4643 save_state(dc
, cpu_cond
);
4644 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4647 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4649 save_state(dc
, cpu_cond
);
4650 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4654 #ifndef TARGET_SPARC64
4659 save_state(dc
, cpu_cond
);
4660 r_const
= tcg_const_i32(TT_NCP_INSN
);
4661 gen_helper_raise_exception(r_const
);
4662 tcg_temp_free(r_const
);
4668 tcg_temp_free(cpu_tmp1
);
4669 tcg_temp_free(cpu_tmp2
);
4672 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4673 int spc
, CPUSPARCState
*env
)
4675 target_ulong pc_start
, last_pc
;
4676 uint16_t *gen_opc_end
;
4677 DisasContext dc1
, *dc
= &dc1
;
4683 memset(dc
, 0, sizeof(DisasContext
));
4688 dc
->npc
= (target_ulong
) tb
->cs_base
;
4689 dc
->cc_op
= CC_OP_DYNAMIC
;
4690 dc
->mem_idx
= cpu_mmu_index(env
);
4692 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4693 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4695 dc
->fpu_enabled
= 0;
4696 #ifdef TARGET_SPARC64
4697 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4699 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4700 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4702 cpu_tmp0
= tcg_temp_new();
4703 cpu_tmp32
= tcg_temp_new_i32();
4704 cpu_tmp64
= tcg_temp_new_i64();
4706 cpu_dst
= tcg_temp_local_new();
4709 cpu_val
= tcg_temp_local_new();
4710 cpu_addr
= tcg_temp_local_new();
4713 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4715 max_insns
= CF_COUNT_MASK
;
4718 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4719 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4720 if (bp
->pc
== dc
->pc
) {
4721 if (dc
->pc
!= pc_start
)
4722 save_state(dc
, cpu_cond
);
4731 qemu_log("Search PC...\n");
4732 j
= gen_opc_ptr
- gen_opc_buf
;
4736 gen_opc_instr_start
[lj
++] = 0;
4737 gen_opc_pc
[lj
] = dc
->pc
;
4738 gen_opc_npc
[lj
] = dc
->npc
;
4739 gen_opc_instr_start
[lj
] = 1;
4740 gen_opc_icount
[lj
] = num_insns
;
4743 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4746 disas_sparc_insn(dc
);
4751 /* if the next PC is different, we abort now */
4752 if (dc
->pc
!= (last_pc
+ 4))
4754 /* if we reach a page boundary, we stop generation so that the
4755 PC of a TT_TFAULT exception is always in the right page */
4756 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4758 /* if single step mode, we generate only one instruction and
4759 generate an exception */
4760 if (dc
->singlestep
) {
4763 } while ((gen_opc_ptr
< gen_opc_end
) &&
4764 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4765 num_insns
< max_insns
);
4768 tcg_temp_free(cpu_addr
);
4769 tcg_temp_free(cpu_val
);
4770 tcg_temp_free(cpu_dst
);
4771 tcg_temp_free_i64(cpu_tmp64
);
4772 tcg_temp_free_i32(cpu_tmp32
);
4773 tcg_temp_free(cpu_tmp0
);
4774 if (tb
->cflags
& CF_LAST_IO
)
4777 if (dc
->pc
!= DYNAMIC_PC
&&
4778 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4779 /* static PC and NPC: we can use direct chaining */
4780 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4782 if (dc
->pc
!= DYNAMIC_PC
)
4783 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4784 save_npc(dc
, cpu_cond
);
4788 gen_icount_end(tb
, num_insns
);
4789 *gen_opc_ptr
= INDEX_op_end
;
4791 j
= gen_opc_ptr
- gen_opc_buf
;
4794 gen_opc_instr_start
[lj
++] = 0;
4798 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4799 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4801 tb
->size
= last_pc
+ 4 - pc_start
;
4802 tb
->icount
= num_insns
;
4805 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4806 qemu_log("--------------\n");
4807 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4808 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4814 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4816 gen_intermediate_code_internal(tb
, 0, env
);
4819 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4821 gen_intermediate_code_internal(tb
, 1, env
);
4824 void gen_intermediate_code_init(CPUSPARCState
*env
)
4828 static const char * const gregnames
[8] = {
4829 NULL
, // g0 not used
4838 static const char * const fregnames
[64] = {
4839 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4840 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4841 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4842 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4843 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4844 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4845 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4846 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4849 /* init various static tables */
4853 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4854 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4855 offsetof(CPUState
, regwptr
),
4857 #ifdef TARGET_SPARC64
4858 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4860 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4862 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4864 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4866 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4867 offsetof(CPUState
, tick_cmpr
),
4869 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4870 offsetof(CPUState
, stick_cmpr
),
4872 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4873 offsetof(CPUState
, hstick_cmpr
),
4875 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4877 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4879 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4881 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4882 offsetof(CPUState
, ssr
), "ssr");
4883 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4884 offsetof(CPUState
, version
), "ver");
4885 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4886 offsetof(CPUState
, softint
),
4889 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4892 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
4894 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
4896 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
4897 offsetof(CPUState
, cc_src2
),
4899 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4901 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
4903 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
4905 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
4907 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
4909 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
4911 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
4912 #ifndef CONFIG_USER_ONLY
4913 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
4916 for (i
= 1; i
< 8; i
++)
4917 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
4918 offsetof(CPUState
, gregs
[i
]),
4920 for (i
= 0; i
< TARGET_FPREGS
; i
++)
4921 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
4922 offsetof(CPUState
, fpr
[i
]),
4925 /* register helpers */
4927 #define GEN_HELPER 2
4932 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
4933 unsigned long searched_pc
, int pc_pos
, void *puc
)
4936 env
->pc
= gen_opc_pc
[pc_pos
];
4937 npc
= gen_opc_npc
[pc_pos
];
4939 /* dynamic NPC: already stored */
4940 } else if (npc
== 2) {
4941 /* jump PC: use 'cond' and the jump targets of the translation */
4943 env
->npc
= gen_opc_jump_pc
[0];
4945 env
->npc
= gen_opc_jump_pc
[1];
4951 /* flush pending conditional evaluations before exposing cpu state */
4952 if (CC_OP
!= CC_OP_FLAGS
) {
4953 helper_compute_psr();