4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
, cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 #include "gen-icount.h"
71 typedef struct DisasContext
{
72 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit
;
79 uint32_t cc_op
; /* current CC operation */
80 struct TranslationBlock
*tb
;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x
, int len
)
109 return (x
<< len
) >> len
;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src
)
117 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
118 offsetof(CPU_DoubleU
, l
.upper
));
119 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
120 offsetof(CPU_DoubleU
, l
.lower
));
123 static void gen_op_load_fpr_DT1(unsigned int src
)
125 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
126 offsetof(CPU_DoubleU
, l
.upper
));
127 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
128 offsetof(CPU_DoubleU
, l
.lower
));
131 static void gen_op_store_DT0_fpr(unsigned int dst
)
133 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
134 offsetof(CPU_DoubleU
, l
.upper
));
135 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
136 offsetof(CPU_DoubleU
, l
.lower
));
139 static void gen_op_load_fpr_QT0(unsigned int src
)
141 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
142 offsetof(CPU_QuadU
, l
.upmost
));
143 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
144 offsetof(CPU_QuadU
, l
.upper
));
145 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.lower
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.lowest
));
151 static void gen_op_load_fpr_QT1(unsigned int src
)
153 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
154 offsetof(CPU_QuadU
, l
.upmost
));
155 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
156 offsetof(CPU_QuadU
, l
.upper
));
157 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.lower
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.lowest
));
163 static void gen_op_store_QT0_fpr(unsigned int dst
)
165 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
166 offsetof(CPU_QuadU
, l
.upmost
));
167 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
168 offsetof(CPU_QuadU
, l
.upper
));
169 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.lower
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.lowest
));
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
189 #ifdef TARGET_SPARC64
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #define AM_CHECK(dc) (1)
197 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
199 #ifdef TARGET_SPARC64
201 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
205 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
208 tcg_gen_movi_tl(tn
, 0);
210 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
212 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
216 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
221 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
223 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
227 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
228 target_ulong pc
, target_ulong npc
)
230 TranslationBlock
*tb
;
233 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
234 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num
);
237 tcg_gen_movi_tl(cpu_pc
, pc
);
238 tcg_gen_movi_tl(cpu_npc
, npc
);
239 tcg_gen_exit_tb((long)tb
+ tb_num
);
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
249 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
251 tcg_gen_extu_i32_tl(reg
, src
);
252 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
253 tcg_gen_andi_tl(reg
, reg
, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
258 tcg_gen_extu_i32_tl(reg
, src
);
259 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
260 tcg_gen_andi_tl(reg
, reg
, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
265 tcg_gen_extu_i32_tl(reg
, src
);
266 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
267 tcg_gen_andi_tl(reg
, reg
, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
272 tcg_gen_extu_i32_tl(reg
, src
);
273 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
274 tcg_gen_andi_tl(reg
, reg
, 0x1);
277 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
283 l1
= gen_new_label();
285 r_temp
= tcg_temp_new();
286 tcg_gen_xor_tl(r_temp
, src1
, src2
);
287 tcg_gen_not_tl(r_temp
, r_temp
);
288 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
289 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
290 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
291 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
292 r_const
= tcg_const_i32(TT_TOVF
);
293 gen_helper_raise_exception(r_const
);
294 tcg_temp_free_i32(r_const
);
296 tcg_temp_free(r_temp
);
299 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
304 l1
= gen_new_label();
305 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
306 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
307 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
308 r_const
= tcg_const_i32(TT_TOVF
);
309 gen_helper_raise_exception(r_const
);
310 tcg_temp_free_i32(r_const
);
314 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
316 tcg_gen_mov_tl(cpu_cc_src
, src1
);
317 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
318 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
319 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
322 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
324 tcg_gen_mov_tl(cpu_cc_src
, src1
);
325 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
326 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
327 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
330 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
332 tcg_gen_mov_tl(cpu_cc_src
, src1
);
333 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
334 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
335 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
336 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
337 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
340 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
342 tcg_gen_mov_tl(cpu_cc_src
, src1
);
343 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
344 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
345 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
346 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
347 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
350 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
352 tcg_gen_mov_tl(cpu_cc_src
, src1
);
353 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
354 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
355 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
358 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
360 tcg_gen_mov_tl(cpu_cc_src
, src1
);
361 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
362 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
363 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
364 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
365 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
368 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
374 l1
= gen_new_label();
376 r_temp
= tcg_temp_new();
377 tcg_gen_xor_tl(r_temp
, src1
, src2
);
378 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
379 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
380 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
381 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
382 r_const
= tcg_const_i32(TT_TOVF
);
383 gen_helper_raise_exception(r_const
);
384 tcg_temp_free_i32(r_const
);
386 tcg_temp_free(r_temp
);
389 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
391 tcg_gen_mov_tl(cpu_cc_src
, src1
);
392 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
394 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
395 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
396 dc
->cc_op
= CC_OP_LOGIC
;
398 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
399 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
400 dc
->cc_op
= CC_OP_SUB
;
402 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
405 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
407 tcg_gen_mov_tl(cpu_cc_src
, src1
);
408 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
409 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
410 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
413 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
415 tcg_gen_mov_tl(cpu_cc_src
, src1
);
416 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
417 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
418 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
419 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
420 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
423 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
425 tcg_gen_mov_tl(cpu_cc_src
, src1
);
426 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
427 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
428 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
429 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
430 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
433 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
435 tcg_gen_mov_tl(cpu_cc_src
, src1
);
436 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
437 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
438 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
441 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
443 tcg_gen_mov_tl(cpu_cc_src
, src1
);
444 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
445 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
446 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
447 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
448 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
451 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
456 l1
= gen_new_label();
457 r_temp
= tcg_temp_new();
463 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
464 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
465 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
466 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
467 tcg_gen_movi_tl(cpu_cc_src2
, 0);
471 // env->y = (b2 << 31) | (env->y >> 1);
472 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
473 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
474 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
475 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
476 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
477 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
480 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
481 gen_mov_reg_V(r_temp
, cpu_psr
);
482 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
483 tcg_temp_free(r_temp
);
485 // T0 = (b1 << 31) | (T0 >> 1);
487 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
488 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
489 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
491 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
493 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
496 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
498 TCGv_i64 r_temp
, r_temp2
;
500 r_temp
= tcg_temp_new_i64();
501 r_temp2
= tcg_temp_new_i64();
503 tcg_gen_extu_tl_i64(r_temp
, src2
);
504 tcg_gen_extu_tl_i64(r_temp2
, src1
);
505 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
507 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
508 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
509 tcg_temp_free_i64(r_temp
);
510 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
511 #ifdef TARGET_SPARC64
512 tcg_gen_mov_i64(dst
, r_temp2
);
514 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
516 tcg_temp_free_i64(r_temp2
);
519 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
521 TCGv_i64 r_temp
, r_temp2
;
523 r_temp
= tcg_temp_new_i64();
524 r_temp2
= tcg_temp_new_i64();
526 tcg_gen_ext_tl_i64(r_temp
, src2
);
527 tcg_gen_ext_tl_i64(r_temp2
, src1
);
528 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
530 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
531 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
532 tcg_temp_free_i64(r_temp
);
533 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
534 #ifdef TARGET_SPARC64
535 tcg_gen_mov_i64(dst
, r_temp2
);
537 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
539 tcg_temp_free_i64(r_temp2
);
542 #ifdef TARGET_SPARC64
543 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
548 l1
= gen_new_label();
549 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
550 r_const
= tcg_const_i32(TT_DIV_ZERO
);
551 gen_helper_raise_exception(r_const
);
552 tcg_temp_free_i32(r_const
);
556 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
560 l1
= gen_new_label();
561 l2
= gen_new_label();
562 tcg_gen_mov_tl(cpu_cc_src
, src1
);
563 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
564 gen_trap_ifdivzero_tl(cpu_cc_src2
);
565 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
566 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
567 tcg_gen_movi_i64(dst
, INT64_MIN
);
570 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
576 static inline void gen_op_eval_ba(TCGv dst
)
578 tcg_gen_movi_tl(dst
, 1);
582 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
584 gen_mov_reg_Z(dst
, src
);
588 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
590 gen_mov_reg_N(cpu_tmp0
, src
);
591 gen_mov_reg_V(dst
, src
);
592 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
593 gen_mov_reg_Z(cpu_tmp0
, src
);
594 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
598 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
600 gen_mov_reg_V(cpu_tmp0
, src
);
601 gen_mov_reg_N(dst
, src
);
602 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
606 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
608 gen_mov_reg_Z(cpu_tmp0
, src
);
609 gen_mov_reg_C(dst
, src
);
610 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
614 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
616 gen_mov_reg_C(dst
, src
);
620 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
622 gen_mov_reg_V(dst
, src
);
626 static inline void gen_op_eval_bn(TCGv dst
)
628 tcg_gen_movi_tl(dst
, 0);
632 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
634 gen_mov_reg_N(dst
, src
);
638 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
640 gen_mov_reg_Z(dst
, src
);
641 tcg_gen_xori_tl(dst
, dst
, 0x1);
645 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
647 gen_mov_reg_N(cpu_tmp0
, src
);
648 gen_mov_reg_V(dst
, src
);
649 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
650 gen_mov_reg_Z(cpu_tmp0
, src
);
651 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
652 tcg_gen_xori_tl(dst
, dst
, 0x1);
656 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
658 gen_mov_reg_V(cpu_tmp0
, src
);
659 gen_mov_reg_N(dst
, src
);
660 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
661 tcg_gen_xori_tl(dst
, dst
, 0x1);
665 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
667 gen_mov_reg_Z(cpu_tmp0
, src
);
668 gen_mov_reg_C(dst
, src
);
669 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
670 tcg_gen_xori_tl(dst
, dst
, 0x1);
674 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
676 gen_mov_reg_C(dst
, src
);
677 tcg_gen_xori_tl(dst
, dst
, 0x1);
681 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
683 gen_mov_reg_N(dst
, src
);
684 tcg_gen_xori_tl(dst
, dst
, 0x1);
688 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
690 gen_mov_reg_V(dst
, src
);
691 tcg_gen_xori_tl(dst
, dst
, 0x1);
695 FPSR bit field FCC1 | FCC0:
701 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
702 unsigned int fcc_offset
)
704 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
705 tcg_gen_andi_tl(reg
, reg
, 0x1);
708 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
709 unsigned int fcc_offset
)
711 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
712 tcg_gen_andi_tl(reg
, reg
, 0x1);
716 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
717 unsigned int fcc_offset
)
719 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
720 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
721 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
724 // 1 or 2: FCC0 ^ FCC1
725 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
726 unsigned int fcc_offset
)
728 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
729 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
730 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
734 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
735 unsigned int fcc_offset
)
737 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
741 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
742 unsigned int fcc_offset
)
744 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
745 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
746 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
747 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
751 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
752 unsigned int fcc_offset
)
754 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
758 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
759 unsigned int fcc_offset
)
761 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
762 tcg_gen_xori_tl(dst
, dst
, 0x1);
763 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
764 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
768 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
769 unsigned int fcc_offset
)
771 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
772 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
773 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
777 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
778 unsigned int fcc_offset
)
780 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
781 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
782 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
783 tcg_gen_xori_tl(dst
, dst
, 0x1);
786 // 0 or 3: !(FCC0 ^ FCC1)
787 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
788 unsigned int fcc_offset
)
790 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
791 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
792 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
793 tcg_gen_xori_tl(dst
, dst
, 0x1);
797 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
798 unsigned int fcc_offset
)
800 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
801 tcg_gen_xori_tl(dst
, dst
, 0x1);
804 // !1: !(FCC0 & !FCC1)
805 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
806 unsigned int fcc_offset
)
808 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
809 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
810 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
811 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
812 tcg_gen_xori_tl(dst
, dst
, 0x1);
816 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
817 unsigned int fcc_offset
)
819 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
820 tcg_gen_xori_tl(dst
, dst
, 0x1);
823 // !2: !(!FCC0 & FCC1)
824 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
825 unsigned int fcc_offset
)
827 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
828 tcg_gen_xori_tl(dst
, dst
, 0x1);
829 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
830 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
831 tcg_gen_xori_tl(dst
, dst
, 0x1);
834 // !3: !(FCC0 & FCC1)
835 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
836 unsigned int fcc_offset
)
838 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
839 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
840 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
841 tcg_gen_xori_tl(dst
, dst
, 0x1);
844 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
845 target_ulong pc2
, TCGv r_cond
)
849 l1
= gen_new_label();
851 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
853 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
856 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
859 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
860 target_ulong pc2
, TCGv r_cond
)
864 l1
= gen_new_label();
866 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
868 gen_goto_tb(dc
, 0, pc2
, pc1
);
871 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
874 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
879 l1
= gen_new_label();
880 l2
= gen_new_label();
882 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
884 tcg_gen_movi_tl(cpu_npc
, npc1
);
888 tcg_gen_movi_tl(cpu_npc
, npc2
);
892 /* call this function before using the condition register as it may
893 have been set for a jump */
894 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
896 if (dc
->npc
== JUMP_PC
) {
897 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
898 dc
->npc
= DYNAMIC_PC
;
902 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
904 if (dc
->npc
== JUMP_PC
) {
905 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
906 dc
->npc
= DYNAMIC_PC
;
907 } else if (dc
->npc
!= DYNAMIC_PC
) {
908 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
912 static inline void save_state(DisasContext
*dc
, TCGv cond
)
914 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
918 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
920 if (dc
->npc
== JUMP_PC
) {
921 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
922 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
924 } else if (dc
->npc
== DYNAMIC_PC
) {
925 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
932 static inline void gen_op_next_insn(void)
934 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
935 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
938 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
943 #ifdef TARGET_SPARC64
955 gen_helper_compute_psr();
956 dc
->cc_op
= CC_OP_FLAGS
;
961 gen_op_eval_bn(r_dst
);
964 gen_op_eval_be(r_dst
, r_src
);
967 gen_op_eval_ble(r_dst
, r_src
);
970 gen_op_eval_bl(r_dst
, r_src
);
973 gen_op_eval_bleu(r_dst
, r_src
);
976 gen_op_eval_bcs(r_dst
, r_src
);
979 gen_op_eval_bneg(r_dst
, r_src
);
982 gen_op_eval_bvs(r_dst
, r_src
);
985 gen_op_eval_ba(r_dst
);
988 gen_op_eval_bne(r_dst
, r_src
);
991 gen_op_eval_bg(r_dst
, r_src
);
994 gen_op_eval_bge(r_dst
, r_src
);
997 gen_op_eval_bgu(r_dst
, r_src
);
1000 gen_op_eval_bcc(r_dst
, r_src
);
1003 gen_op_eval_bpos(r_dst
, r_src
);
1006 gen_op_eval_bvc(r_dst
, r_src
);
1011 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1013 unsigned int offset
;
1033 gen_op_eval_bn(r_dst
);
1036 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1039 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1042 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1045 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1048 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1051 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1054 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1057 gen_op_eval_ba(r_dst
);
1060 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1063 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1066 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1069 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1072 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1075 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1078 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1083 #ifdef TARGET_SPARC64
1085 static const int gen_tcg_cond_reg
[8] = {
1096 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1100 l1
= gen_new_label();
1101 tcg_gen_movi_tl(r_dst
, 0);
1102 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1103 tcg_gen_movi_tl(r_dst
, 1);
1108 /* XXX: potentially incorrect if dynamic npc */
1109 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1112 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1113 target_ulong target
= dc
->pc
+ offset
;
1116 /* unconditional not taken */
1118 dc
->pc
= dc
->npc
+ 4;
1119 dc
->npc
= dc
->pc
+ 4;
1122 dc
->npc
= dc
->pc
+ 4;
1124 } else if (cond
== 0x8) {
1125 /* unconditional taken */
1128 dc
->npc
= dc
->pc
+ 4;
1134 flush_cond(dc
, r_cond
);
1135 gen_cond(r_cond
, cc
, cond
, dc
);
1137 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1141 dc
->jump_pc
[0] = target
;
1142 dc
->jump_pc
[1] = dc
->npc
+ 4;
1148 /* XXX: potentially incorrect if dynamic npc */
1149 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1152 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1153 target_ulong target
= dc
->pc
+ offset
;
1156 /* unconditional not taken */
1158 dc
->pc
= dc
->npc
+ 4;
1159 dc
->npc
= dc
->pc
+ 4;
1162 dc
->npc
= dc
->pc
+ 4;
1164 } else if (cond
== 0x8) {
1165 /* unconditional taken */
1168 dc
->npc
= dc
->pc
+ 4;
1174 flush_cond(dc
, r_cond
);
1175 gen_fcond(r_cond
, cc
, cond
);
1177 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1181 dc
->jump_pc
[0] = target
;
1182 dc
->jump_pc
[1] = dc
->npc
+ 4;
1188 #ifdef TARGET_SPARC64
1189 /* XXX: potentially incorrect if dynamic npc */
1190 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1191 TCGv r_cond
, TCGv r_reg
)
1193 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1194 target_ulong target
= dc
->pc
+ offset
;
1196 flush_cond(dc
, r_cond
);
1197 gen_cond_reg(r_cond
, cond
, r_reg
);
1199 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1203 dc
->jump_pc
[0] = target
;
1204 dc
->jump_pc
[1] = dc
->npc
+ 4;
1209 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1213 gen_helper_fcmps(r_rs1
, r_rs2
);
1216 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1219 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1222 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1227 static inline void gen_op_fcmpd(int fccno
)
1234 gen_helper_fcmpd_fcc1();
1237 gen_helper_fcmpd_fcc2();
1240 gen_helper_fcmpd_fcc3();
1245 static inline void gen_op_fcmpq(int fccno
)
1252 gen_helper_fcmpq_fcc1();
1255 gen_helper_fcmpq_fcc2();
1258 gen_helper_fcmpq_fcc3();
1263 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1267 gen_helper_fcmpes(r_rs1
, r_rs2
);
1270 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1273 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1276 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1281 static inline void gen_op_fcmped(int fccno
)
1285 gen_helper_fcmped();
1288 gen_helper_fcmped_fcc1();
1291 gen_helper_fcmped_fcc2();
1294 gen_helper_fcmped_fcc3();
1299 static inline void gen_op_fcmpeq(int fccno
)
1303 gen_helper_fcmpeq();
1306 gen_helper_fcmpeq_fcc1();
1309 gen_helper_fcmpeq_fcc2();
1312 gen_helper_fcmpeq_fcc3();
1319 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1321 gen_helper_fcmps(r_rs1
, r_rs2
);
1324 static inline void gen_op_fcmpd(int fccno
)
1329 static inline void gen_op_fcmpq(int fccno
)
1334 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1336 gen_helper_fcmpes(r_rs1
, r_rs2
);
1339 static inline void gen_op_fcmped(int fccno
)
1341 gen_helper_fcmped();
1344 static inline void gen_op_fcmpeq(int fccno
)
1346 gen_helper_fcmpeq();
1350 static inline void gen_op_fpexception_im(int fsr_flags
)
1354 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1355 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1356 r_const
= tcg_const_i32(TT_FP_EXCP
);
1357 gen_helper_raise_exception(r_const
);
1358 tcg_temp_free_i32(r_const
);
1361 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1363 #if !defined(CONFIG_USER_ONLY)
1364 if (!dc
->fpu_enabled
) {
1367 save_state(dc
, r_cond
);
1368 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1369 gen_helper_raise_exception(r_const
);
1370 tcg_temp_free_i32(r_const
);
1378 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1380 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1383 static inline void gen_clear_float_exceptions(void)
1385 gen_helper_clear_float_exceptions();
1389 #ifdef TARGET_SPARC64
1390 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1396 r_asi
= tcg_temp_new_i32();
1397 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1399 asi
= GET_FIELD(insn
, 19, 26);
1400 r_asi
= tcg_const_i32(asi
);
1405 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1408 TCGv_i32 r_asi
, r_size
, r_sign
;
1410 r_asi
= gen_get_asi(insn
, addr
);
1411 r_size
= tcg_const_i32(size
);
1412 r_sign
= tcg_const_i32(sign
);
1413 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1414 tcg_temp_free_i32(r_sign
);
1415 tcg_temp_free_i32(r_size
);
1416 tcg_temp_free_i32(r_asi
);
1419 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1421 TCGv_i32 r_asi
, r_size
;
1423 r_asi
= gen_get_asi(insn
, addr
);
1424 r_size
= tcg_const_i32(size
);
1425 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1426 tcg_temp_free_i32(r_size
);
1427 tcg_temp_free_i32(r_asi
);
1430 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1432 TCGv_i32 r_asi
, r_size
, r_rd
;
1434 r_asi
= gen_get_asi(insn
, addr
);
1435 r_size
= tcg_const_i32(size
);
1436 r_rd
= tcg_const_i32(rd
);
1437 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1438 tcg_temp_free_i32(r_rd
);
1439 tcg_temp_free_i32(r_size
);
1440 tcg_temp_free_i32(r_asi
);
1443 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1445 TCGv_i32 r_asi
, r_size
, r_rd
;
1447 r_asi
= gen_get_asi(insn
, addr
);
1448 r_size
= tcg_const_i32(size
);
1449 r_rd
= tcg_const_i32(rd
);
1450 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1451 tcg_temp_free_i32(r_rd
);
1452 tcg_temp_free_i32(r_size
);
1453 tcg_temp_free_i32(r_asi
);
1456 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1458 TCGv_i32 r_asi
, r_size
, r_sign
;
1460 r_asi
= gen_get_asi(insn
, addr
);
1461 r_size
= tcg_const_i32(4);
1462 r_sign
= tcg_const_i32(0);
1463 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1464 tcg_temp_free_i32(r_sign
);
1465 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1466 tcg_temp_free_i32(r_size
);
1467 tcg_temp_free_i32(r_asi
);
1468 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1471 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1473 TCGv_i32 r_asi
, r_rd
;
1475 r_asi
= gen_get_asi(insn
, addr
);
1476 r_rd
= tcg_const_i32(rd
);
1477 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1478 tcg_temp_free_i32(r_rd
);
1479 tcg_temp_free_i32(r_asi
);
1482 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1484 TCGv_i32 r_asi
, r_size
;
1486 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1487 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1488 r_asi
= gen_get_asi(insn
, addr
);
1489 r_size
= tcg_const_i32(8);
1490 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1491 tcg_temp_free_i32(r_size
);
1492 tcg_temp_free_i32(r_asi
);
1495 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1501 r_val1
= tcg_temp_new();
1502 gen_movl_reg_TN(rd
, r_val1
);
1503 r_asi
= gen_get_asi(insn
, addr
);
1504 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1505 tcg_temp_free_i32(r_asi
);
1506 tcg_temp_free(r_val1
);
1509 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1514 gen_movl_reg_TN(rd
, cpu_tmp64
);
1515 r_asi
= gen_get_asi(insn
, addr
);
1516 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1517 tcg_temp_free_i32(r_asi
);
1520 #elif !defined(CONFIG_USER_ONLY)
1522 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1525 TCGv_i32 r_asi
, r_size
, r_sign
;
1527 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1528 r_size
= tcg_const_i32(size
);
1529 r_sign
= tcg_const_i32(sign
);
1530 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1531 tcg_temp_free(r_sign
);
1532 tcg_temp_free(r_size
);
1533 tcg_temp_free(r_asi
);
1534 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1537 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1539 TCGv_i32 r_asi
, r_size
;
1541 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1542 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1543 r_size
= tcg_const_i32(size
);
1544 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1545 tcg_temp_free(r_size
);
1546 tcg_temp_free(r_asi
);
1549 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1551 TCGv_i32 r_asi
, r_size
, r_sign
;
1554 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1555 r_size
= tcg_const_i32(4);
1556 r_sign
= tcg_const_i32(0);
1557 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1558 tcg_temp_free(r_sign
);
1559 r_val
= tcg_temp_new_i64();
1560 tcg_gen_extu_tl_i64(r_val
, dst
);
1561 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1562 tcg_temp_free_i64(r_val
);
1563 tcg_temp_free(r_size
);
1564 tcg_temp_free(r_asi
);
1565 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1568 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1570 TCGv_i32 r_asi
, r_size
, r_sign
;
1572 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1573 r_size
= tcg_const_i32(8);
1574 r_sign
= tcg_const_i32(0);
1575 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1576 tcg_temp_free(r_sign
);
1577 tcg_temp_free(r_size
);
1578 tcg_temp_free(r_asi
);
1579 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1580 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1581 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1582 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1583 gen_movl_TN_reg(rd
, hi
);
1586 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1588 TCGv_i32 r_asi
, r_size
;
1590 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1591 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1592 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1593 r_size
= tcg_const_i32(8);
1594 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1595 tcg_temp_free(r_size
);
1596 tcg_temp_free(r_asi
);
1600 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1601 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1604 TCGv_i32 r_asi
, r_size
;
1606 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1608 r_val
= tcg_const_i64(0xffULL
);
1609 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1610 r_size
= tcg_const_i32(1);
1611 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1612 tcg_temp_free_i32(r_size
);
1613 tcg_temp_free_i32(r_asi
);
1614 tcg_temp_free_i64(r_val
);
1618 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1623 rs1
= GET_FIELD(insn
, 13, 17);
1625 r_rs1
= tcg_const_tl(0); // XXX how to free?
1627 r_rs1
= cpu_gregs
[rs1
];
1629 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1633 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1637 if (IS_IMM
) { /* immediate */
1640 simm
= GET_FIELDs(insn
, 19, 31);
1641 r_rs2
= tcg_const_tl(simm
); // XXX how to free?
1642 } else { /* register */
1645 rs2
= GET_FIELD(insn
, 27, 31);
1647 r_rs2
= tcg_const_tl(0); // XXX how to free?
1649 r_rs2
= cpu_gregs
[rs2
];
1651 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1656 #define CHECK_IU_FEATURE(dc, FEATURE) \
1657 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1659 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1660 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1663 /* before an instruction, dc->pc must be static */
1664 static void disas_sparc_insn(DisasContext
* dc
)
1666 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1669 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1670 tcg_gen_debug_insn_start(dc
->pc
);
1671 insn
= ldl_code(dc
->pc
);
1672 opc
= GET_FIELD(insn
, 0, 1);
1674 rd
= GET_FIELD(insn
, 2, 6);
1676 cpu_src1
= tcg_temp_new(); // const
1677 cpu_src2
= tcg_temp_new(); // const
1680 case 0: /* branches/sethi */
1682 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1685 #ifdef TARGET_SPARC64
1686 case 0x1: /* V9 BPcc */
1690 target
= GET_FIELD_SP(insn
, 0, 18);
1691 target
= sign_extend(target
, 18);
1693 cc
= GET_FIELD_SP(insn
, 20, 21);
1695 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1697 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1702 case 0x3: /* V9 BPr */
1704 target
= GET_FIELD_SP(insn
, 0, 13) |
1705 (GET_FIELD_SP(insn
, 20, 21) << 14);
1706 target
= sign_extend(target
, 16);
1708 cpu_src1
= get_src1(insn
, cpu_src1
);
1709 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1712 case 0x5: /* V9 FBPcc */
1714 int cc
= GET_FIELD_SP(insn
, 20, 21);
1715 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1717 target
= GET_FIELD_SP(insn
, 0, 18);
1718 target
= sign_extend(target
, 19);
1720 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1724 case 0x7: /* CBN+x */
1729 case 0x2: /* BN+x */
1731 target
= GET_FIELD(insn
, 10, 31);
1732 target
= sign_extend(target
, 22);
1734 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1737 case 0x6: /* FBN+x */
1739 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1741 target
= GET_FIELD(insn
, 10, 31);
1742 target
= sign_extend(target
, 22);
1744 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1747 case 0x4: /* SETHI */
1749 uint32_t value
= GET_FIELD(insn
, 10, 31);
1752 r_const
= tcg_const_tl(value
<< 10);
1753 gen_movl_TN_reg(rd
, r_const
);
1754 tcg_temp_free(r_const
);
1757 case 0x0: /* UNIMPL */
1766 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1769 r_const
= tcg_const_tl(dc
->pc
);
1770 gen_movl_TN_reg(15, r_const
);
1771 tcg_temp_free(r_const
);
1773 gen_mov_pc_npc(dc
, cpu_cond
);
1777 case 2: /* FPU & Logical Operations */
1779 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1780 if (xop
== 0x3a) { /* generate trap */
1783 cpu_src1
= get_src1(insn
, cpu_src1
);
1785 rs2
= GET_FIELD(insn
, 25, 31);
1786 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1788 rs2
= GET_FIELD(insn
, 27, 31);
1790 gen_movl_reg_TN(rs2
, cpu_src2
);
1791 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1793 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1795 cond
= GET_FIELD(insn
, 3, 6);
1797 save_state(dc
, cpu_cond
);
1798 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1800 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1802 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1803 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1804 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1805 gen_helper_raise_exception(cpu_tmp32
);
1806 } else if (cond
!= 0) {
1807 TCGv r_cond
= tcg_temp_new();
1809 #ifdef TARGET_SPARC64
1811 int cc
= GET_FIELD_SP(insn
, 11, 12);
1813 save_state(dc
, cpu_cond
);
1815 gen_cond(r_cond
, 0, cond
, dc
);
1817 gen_cond(r_cond
, 1, cond
, dc
);
1821 save_state(dc
, cpu_cond
);
1822 gen_cond(r_cond
, 0, cond
, dc
);
1824 l1
= gen_new_label();
1825 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1827 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1829 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1831 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1832 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1833 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1834 gen_helper_raise_exception(cpu_tmp32
);
1837 tcg_temp_free(r_cond
);
1843 } else if (xop
== 0x28) {
1844 rs1
= GET_FIELD(insn
, 13, 17);
1847 #ifndef TARGET_SPARC64
1848 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1849 manual, rdy on the microSPARC
1851 case 0x0f: /* stbar in the SPARCv8 manual,
1852 rdy on the microSPARC II */
1853 case 0x10 ... 0x1f: /* implementation-dependent in the
1854 SPARCv8 manual, rdy on the
1857 gen_movl_TN_reg(rd
, cpu_y
);
1859 #ifdef TARGET_SPARC64
1860 case 0x2: /* V9 rdccr */
1861 gen_helper_compute_psr();
1862 gen_helper_rdccr(cpu_dst
);
1863 gen_movl_TN_reg(rd
, cpu_dst
);
1865 case 0x3: /* V9 rdasi */
1866 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
1867 gen_movl_TN_reg(rd
, cpu_dst
);
1869 case 0x4: /* V9 rdtick */
1873 r_tickptr
= tcg_temp_new_ptr();
1874 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1875 offsetof(CPUState
, tick
));
1876 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1877 tcg_temp_free_ptr(r_tickptr
);
1878 gen_movl_TN_reg(rd
, cpu_dst
);
1881 case 0x5: /* V9 rdpc */
1885 r_const
= tcg_const_tl(dc
->pc
);
1886 gen_movl_TN_reg(rd
, r_const
);
1887 tcg_temp_free(r_const
);
1890 case 0x6: /* V9 rdfprs */
1891 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
1892 gen_movl_TN_reg(rd
, cpu_dst
);
1894 case 0xf: /* V9 membar */
1895 break; /* no effect */
1896 case 0x13: /* Graphics Status */
1897 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1899 gen_movl_TN_reg(rd
, cpu_gsr
);
1901 case 0x16: /* Softint */
1902 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
1903 gen_movl_TN_reg(rd
, cpu_dst
);
1905 case 0x17: /* Tick compare */
1906 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
1908 case 0x18: /* System tick */
1912 r_tickptr
= tcg_temp_new_ptr();
1913 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1914 offsetof(CPUState
, stick
));
1915 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1916 tcg_temp_free_ptr(r_tickptr
);
1917 gen_movl_TN_reg(rd
, cpu_dst
);
1920 case 0x19: /* System tick compare */
1921 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
1923 case 0x10: /* Performance Control */
1924 case 0x11: /* Performance Instrumentation Counter */
1925 case 0x12: /* Dispatch Control */
1926 case 0x14: /* Softint set, WO */
1927 case 0x15: /* Softint clear, WO */
1932 #if !defined(CONFIG_USER_ONLY)
1933 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
1934 #ifndef TARGET_SPARC64
1935 if (!supervisor(dc
))
1937 gen_helper_compute_psr();
1938 dc
->cc_op
= CC_OP_FLAGS
;
1939 gen_helper_rdpsr(cpu_dst
);
1941 CHECK_IU_FEATURE(dc
, HYPV
);
1942 if (!hypervisor(dc
))
1944 rs1
= GET_FIELD(insn
, 13, 17);
1947 // gen_op_rdhpstate();
1950 // gen_op_rdhtstate();
1953 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
1956 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
1959 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
1961 case 31: // hstick_cmpr
1962 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
1968 gen_movl_TN_reg(rd
, cpu_dst
);
1970 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
1971 if (!supervisor(dc
))
1973 #ifdef TARGET_SPARC64
1974 rs1
= GET_FIELD(insn
, 13, 17);
1980 r_tsptr
= tcg_temp_new_ptr();
1981 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1982 offsetof(CPUState
, tsptr
));
1983 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
1984 offsetof(trap_state
, tpc
));
1985 tcg_temp_free_ptr(r_tsptr
);
1992 r_tsptr
= tcg_temp_new_ptr();
1993 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1994 offsetof(CPUState
, tsptr
));
1995 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
1996 offsetof(trap_state
, tnpc
));
1997 tcg_temp_free_ptr(r_tsptr
);
2004 r_tsptr
= tcg_temp_new_ptr();
2005 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2006 offsetof(CPUState
, tsptr
));
2007 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2008 offsetof(trap_state
, tstate
));
2009 tcg_temp_free_ptr(r_tsptr
);
2016 r_tsptr
= tcg_temp_new_ptr();
2017 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2018 offsetof(CPUState
, tsptr
));
2019 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2020 offsetof(trap_state
, tt
));
2021 tcg_temp_free_ptr(r_tsptr
);
2022 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2029 r_tickptr
= tcg_temp_new_ptr();
2030 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2031 offsetof(CPUState
, tick
));
2032 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2033 gen_movl_TN_reg(rd
, cpu_tmp0
);
2034 tcg_temp_free_ptr(r_tickptr
);
2038 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2041 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2042 offsetof(CPUSPARCState
, pstate
));
2043 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2046 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2047 offsetof(CPUSPARCState
, tl
));
2048 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2051 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2052 offsetof(CPUSPARCState
, psrpil
));
2053 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2056 gen_helper_rdcwp(cpu_tmp0
);
2059 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2060 offsetof(CPUSPARCState
, cansave
));
2061 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2063 case 11: // canrestore
2064 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2065 offsetof(CPUSPARCState
, canrestore
));
2066 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2068 case 12: // cleanwin
2069 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2070 offsetof(CPUSPARCState
, cleanwin
));
2071 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2073 case 13: // otherwin
2074 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2075 offsetof(CPUSPARCState
, otherwin
));
2076 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2079 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2080 offsetof(CPUSPARCState
, wstate
));
2081 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2083 case 16: // UA2005 gl
2084 CHECK_IU_FEATURE(dc
, GL
);
2085 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2086 offsetof(CPUSPARCState
, gl
));
2087 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2089 case 26: // UA2005 strand status
2090 CHECK_IU_FEATURE(dc
, HYPV
);
2091 if (!hypervisor(dc
))
2093 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2096 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2103 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2105 gen_movl_TN_reg(rd
, cpu_tmp0
);
2107 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2108 #ifdef TARGET_SPARC64
2109 save_state(dc
, cpu_cond
);
2110 gen_helper_flushw();
2112 if (!supervisor(dc
))
2114 gen_movl_TN_reg(rd
, cpu_tbr
);
2118 } else if (xop
== 0x34) { /* FPU Operations */
2119 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2121 gen_op_clear_ieee_excp_and_FTT();
2122 rs1
= GET_FIELD(insn
, 13, 17);
2123 rs2
= GET_FIELD(insn
, 27, 31);
2124 xop
= GET_FIELD(insn
, 18, 26);
2126 case 0x1: /* fmovs */
2127 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2129 case 0x5: /* fnegs */
2130 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2132 case 0x9: /* fabss */
2133 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2135 case 0x29: /* fsqrts */
2136 CHECK_FPU_FEATURE(dc
, FSQRT
);
2137 gen_clear_float_exceptions();
2138 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2139 gen_helper_check_ieee_exceptions();
2140 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2142 case 0x2a: /* fsqrtd */
2143 CHECK_FPU_FEATURE(dc
, FSQRT
);
2144 gen_op_load_fpr_DT1(DFPREG(rs2
));
2145 gen_clear_float_exceptions();
2146 gen_helper_fsqrtd();
2147 gen_helper_check_ieee_exceptions();
2148 gen_op_store_DT0_fpr(DFPREG(rd
));
2150 case 0x2b: /* fsqrtq */
2151 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2152 gen_op_load_fpr_QT1(QFPREG(rs2
));
2153 gen_clear_float_exceptions();
2154 gen_helper_fsqrtq();
2155 gen_helper_check_ieee_exceptions();
2156 gen_op_store_QT0_fpr(QFPREG(rd
));
2158 case 0x41: /* fadds */
2159 gen_clear_float_exceptions();
2160 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2161 gen_helper_check_ieee_exceptions();
2162 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2164 case 0x42: /* faddd */
2165 gen_op_load_fpr_DT0(DFPREG(rs1
));
2166 gen_op_load_fpr_DT1(DFPREG(rs2
));
2167 gen_clear_float_exceptions();
2169 gen_helper_check_ieee_exceptions();
2170 gen_op_store_DT0_fpr(DFPREG(rd
));
2172 case 0x43: /* faddq */
2173 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2174 gen_op_load_fpr_QT0(QFPREG(rs1
));
2175 gen_op_load_fpr_QT1(QFPREG(rs2
));
2176 gen_clear_float_exceptions();
2178 gen_helper_check_ieee_exceptions();
2179 gen_op_store_QT0_fpr(QFPREG(rd
));
2181 case 0x45: /* fsubs */
2182 gen_clear_float_exceptions();
2183 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2184 gen_helper_check_ieee_exceptions();
2185 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2187 case 0x46: /* fsubd */
2188 gen_op_load_fpr_DT0(DFPREG(rs1
));
2189 gen_op_load_fpr_DT1(DFPREG(rs2
));
2190 gen_clear_float_exceptions();
2192 gen_helper_check_ieee_exceptions();
2193 gen_op_store_DT0_fpr(DFPREG(rd
));
2195 case 0x47: /* fsubq */
2196 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2197 gen_op_load_fpr_QT0(QFPREG(rs1
));
2198 gen_op_load_fpr_QT1(QFPREG(rs2
));
2199 gen_clear_float_exceptions();
2201 gen_helper_check_ieee_exceptions();
2202 gen_op_store_QT0_fpr(QFPREG(rd
));
2204 case 0x49: /* fmuls */
2205 CHECK_FPU_FEATURE(dc
, FMUL
);
2206 gen_clear_float_exceptions();
2207 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2208 gen_helper_check_ieee_exceptions();
2209 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2211 case 0x4a: /* fmuld */
2212 CHECK_FPU_FEATURE(dc
, FMUL
);
2213 gen_op_load_fpr_DT0(DFPREG(rs1
));
2214 gen_op_load_fpr_DT1(DFPREG(rs2
));
2215 gen_clear_float_exceptions();
2217 gen_helper_check_ieee_exceptions();
2218 gen_op_store_DT0_fpr(DFPREG(rd
));
2220 case 0x4b: /* fmulq */
2221 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2222 CHECK_FPU_FEATURE(dc
, FMUL
);
2223 gen_op_load_fpr_QT0(QFPREG(rs1
));
2224 gen_op_load_fpr_QT1(QFPREG(rs2
));
2225 gen_clear_float_exceptions();
2227 gen_helper_check_ieee_exceptions();
2228 gen_op_store_QT0_fpr(QFPREG(rd
));
2230 case 0x4d: /* fdivs */
2231 gen_clear_float_exceptions();
2232 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2233 gen_helper_check_ieee_exceptions();
2234 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2236 case 0x4e: /* fdivd */
2237 gen_op_load_fpr_DT0(DFPREG(rs1
));
2238 gen_op_load_fpr_DT1(DFPREG(rs2
));
2239 gen_clear_float_exceptions();
2241 gen_helper_check_ieee_exceptions();
2242 gen_op_store_DT0_fpr(DFPREG(rd
));
2244 case 0x4f: /* fdivq */
2245 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2246 gen_op_load_fpr_QT0(QFPREG(rs1
));
2247 gen_op_load_fpr_QT1(QFPREG(rs2
));
2248 gen_clear_float_exceptions();
2250 gen_helper_check_ieee_exceptions();
2251 gen_op_store_QT0_fpr(QFPREG(rd
));
2253 case 0x69: /* fsmuld */
2254 CHECK_FPU_FEATURE(dc
, FSMULD
);
2255 gen_clear_float_exceptions();
2256 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2257 gen_helper_check_ieee_exceptions();
2258 gen_op_store_DT0_fpr(DFPREG(rd
));
2260 case 0x6e: /* fdmulq */
2261 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2262 gen_op_load_fpr_DT0(DFPREG(rs1
));
2263 gen_op_load_fpr_DT1(DFPREG(rs2
));
2264 gen_clear_float_exceptions();
2265 gen_helper_fdmulq();
2266 gen_helper_check_ieee_exceptions();
2267 gen_op_store_QT0_fpr(QFPREG(rd
));
2269 case 0xc4: /* fitos */
2270 gen_clear_float_exceptions();
2271 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2272 gen_helper_check_ieee_exceptions();
2273 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2275 case 0xc6: /* fdtos */
2276 gen_op_load_fpr_DT1(DFPREG(rs2
));
2277 gen_clear_float_exceptions();
2278 gen_helper_fdtos(cpu_tmp32
);
2279 gen_helper_check_ieee_exceptions();
2280 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2282 case 0xc7: /* fqtos */
2283 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2284 gen_op_load_fpr_QT1(QFPREG(rs2
));
2285 gen_clear_float_exceptions();
2286 gen_helper_fqtos(cpu_tmp32
);
2287 gen_helper_check_ieee_exceptions();
2288 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2290 case 0xc8: /* fitod */
2291 gen_helper_fitod(cpu_fpr
[rs2
]);
2292 gen_op_store_DT0_fpr(DFPREG(rd
));
2294 case 0xc9: /* fstod */
2295 gen_helper_fstod(cpu_fpr
[rs2
]);
2296 gen_op_store_DT0_fpr(DFPREG(rd
));
2298 case 0xcb: /* fqtod */
2299 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2300 gen_op_load_fpr_QT1(QFPREG(rs2
));
2301 gen_clear_float_exceptions();
2303 gen_helper_check_ieee_exceptions();
2304 gen_op_store_DT0_fpr(DFPREG(rd
));
2306 case 0xcc: /* fitoq */
2307 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2308 gen_helper_fitoq(cpu_fpr
[rs2
]);
2309 gen_op_store_QT0_fpr(QFPREG(rd
));
2311 case 0xcd: /* fstoq */
2312 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2313 gen_helper_fstoq(cpu_fpr
[rs2
]);
2314 gen_op_store_QT0_fpr(QFPREG(rd
));
2316 case 0xce: /* fdtoq */
2317 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2318 gen_op_load_fpr_DT1(DFPREG(rs2
));
2320 gen_op_store_QT0_fpr(QFPREG(rd
));
2322 case 0xd1: /* fstoi */
2323 gen_clear_float_exceptions();
2324 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2325 gen_helper_check_ieee_exceptions();
2326 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2328 case 0xd2: /* fdtoi */
2329 gen_op_load_fpr_DT1(DFPREG(rs2
));
2330 gen_clear_float_exceptions();
2331 gen_helper_fdtoi(cpu_tmp32
);
2332 gen_helper_check_ieee_exceptions();
2333 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2335 case 0xd3: /* fqtoi */
2336 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2337 gen_op_load_fpr_QT1(QFPREG(rs2
));
2338 gen_clear_float_exceptions();
2339 gen_helper_fqtoi(cpu_tmp32
);
2340 gen_helper_check_ieee_exceptions();
2341 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2343 #ifdef TARGET_SPARC64
2344 case 0x2: /* V9 fmovd */
2345 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2346 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2347 cpu_fpr
[DFPREG(rs2
) + 1]);
2349 case 0x3: /* V9 fmovq */
2350 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2351 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2352 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2353 cpu_fpr
[QFPREG(rs2
) + 1]);
2354 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2355 cpu_fpr
[QFPREG(rs2
) + 2]);
2356 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2357 cpu_fpr
[QFPREG(rs2
) + 3]);
2359 case 0x6: /* V9 fnegd */
2360 gen_op_load_fpr_DT1(DFPREG(rs2
));
2362 gen_op_store_DT0_fpr(DFPREG(rd
));
2364 case 0x7: /* V9 fnegq */
2365 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2366 gen_op_load_fpr_QT1(QFPREG(rs2
));
2368 gen_op_store_QT0_fpr(QFPREG(rd
));
2370 case 0xa: /* V9 fabsd */
2371 gen_op_load_fpr_DT1(DFPREG(rs2
));
2373 gen_op_store_DT0_fpr(DFPREG(rd
));
2375 case 0xb: /* V9 fabsq */
2376 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2377 gen_op_load_fpr_QT1(QFPREG(rs2
));
2379 gen_op_store_QT0_fpr(QFPREG(rd
));
2381 case 0x81: /* V9 fstox */
2382 gen_clear_float_exceptions();
2383 gen_helper_fstox(cpu_fpr
[rs2
]);
2384 gen_helper_check_ieee_exceptions();
2385 gen_op_store_DT0_fpr(DFPREG(rd
));
2387 case 0x82: /* V9 fdtox */
2388 gen_op_load_fpr_DT1(DFPREG(rs2
));
2389 gen_clear_float_exceptions();
2391 gen_helper_check_ieee_exceptions();
2392 gen_op_store_DT0_fpr(DFPREG(rd
));
2394 case 0x83: /* V9 fqtox */
2395 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2396 gen_op_load_fpr_QT1(QFPREG(rs2
));
2397 gen_clear_float_exceptions();
2399 gen_helper_check_ieee_exceptions();
2400 gen_op_store_DT0_fpr(DFPREG(rd
));
2402 case 0x84: /* V9 fxtos */
2403 gen_op_load_fpr_DT1(DFPREG(rs2
));
2404 gen_clear_float_exceptions();
2405 gen_helper_fxtos(cpu_tmp32
);
2406 gen_helper_check_ieee_exceptions();
2407 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2409 case 0x88: /* V9 fxtod */
2410 gen_op_load_fpr_DT1(DFPREG(rs2
));
2411 gen_clear_float_exceptions();
2413 gen_helper_check_ieee_exceptions();
2414 gen_op_store_DT0_fpr(DFPREG(rd
));
2416 case 0x8c: /* V9 fxtoq */
2417 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2418 gen_op_load_fpr_DT1(DFPREG(rs2
));
2419 gen_clear_float_exceptions();
2421 gen_helper_check_ieee_exceptions();
2422 gen_op_store_QT0_fpr(QFPREG(rd
));
2428 } else if (xop
== 0x35) { /* FPU Operations */
2429 #ifdef TARGET_SPARC64
2432 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2434 gen_op_clear_ieee_excp_and_FTT();
2435 rs1
= GET_FIELD(insn
, 13, 17);
2436 rs2
= GET_FIELD(insn
, 27, 31);
2437 xop
= GET_FIELD(insn
, 18, 26);
2438 #ifdef TARGET_SPARC64
2439 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2442 l1
= gen_new_label();
2443 cond
= GET_FIELD_SP(insn
, 14, 17);
2444 cpu_src1
= get_src1(insn
, cpu_src1
);
2445 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2447 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2450 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2453 l1
= gen_new_label();
2454 cond
= GET_FIELD_SP(insn
, 14, 17);
2455 cpu_src1
= get_src1(insn
, cpu_src1
);
2456 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2458 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2459 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2462 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2465 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2466 l1
= gen_new_label();
2467 cond
= GET_FIELD_SP(insn
, 14, 17);
2468 cpu_src1
= get_src1(insn
, cpu_src1
);
2469 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2471 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2472 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2473 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2474 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2480 #ifdef TARGET_SPARC64
2481 #define FMOVSCC(fcc) \
2486 l1 = gen_new_label(); \
2487 r_cond = tcg_temp_new(); \
2488 cond = GET_FIELD_SP(insn, 14, 17); \
2489 gen_fcond(r_cond, fcc, cond); \
2490 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2492 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2493 gen_set_label(l1); \
2494 tcg_temp_free(r_cond); \
2496 #define FMOVDCC(fcc) \
2501 l1 = gen_new_label(); \
2502 r_cond = tcg_temp_new(); \
2503 cond = GET_FIELD_SP(insn, 14, 17); \
2504 gen_fcond(r_cond, fcc, cond); \
2505 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2507 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2508 cpu_fpr[DFPREG(rs2)]); \
2509 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2510 cpu_fpr[DFPREG(rs2) + 1]); \
2511 gen_set_label(l1); \
2512 tcg_temp_free(r_cond); \
2514 #define FMOVQCC(fcc) \
2519 l1 = gen_new_label(); \
2520 r_cond = tcg_temp_new(); \
2521 cond = GET_FIELD_SP(insn, 14, 17); \
2522 gen_fcond(r_cond, fcc, cond); \
2523 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2525 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2526 cpu_fpr[QFPREG(rs2)]); \
2527 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2528 cpu_fpr[QFPREG(rs2) + 1]); \
2529 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2530 cpu_fpr[QFPREG(rs2) + 2]); \
2531 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2532 cpu_fpr[QFPREG(rs2) + 3]); \
2533 gen_set_label(l1); \
2534 tcg_temp_free(r_cond); \
2536 case 0x001: /* V9 fmovscc %fcc0 */
2539 case 0x002: /* V9 fmovdcc %fcc0 */
2542 case 0x003: /* V9 fmovqcc %fcc0 */
2543 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2546 case 0x041: /* V9 fmovscc %fcc1 */
2549 case 0x042: /* V9 fmovdcc %fcc1 */
2552 case 0x043: /* V9 fmovqcc %fcc1 */
2553 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2556 case 0x081: /* V9 fmovscc %fcc2 */
2559 case 0x082: /* V9 fmovdcc %fcc2 */
2562 case 0x083: /* V9 fmovqcc %fcc2 */
2563 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2566 case 0x0c1: /* V9 fmovscc %fcc3 */
2569 case 0x0c2: /* V9 fmovdcc %fcc3 */
2572 case 0x0c3: /* V9 fmovqcc %fcc3 */
2573 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2579 #define FMOVSCC(icc) \
2584 l1 = gen_new_label(); \
2585 r_cond = tcg_temp_new(); \
2586 cond = GET_FIELD_SP(insn, 14, 17); \
2587 gen_cond(r_cond, icc, cond, dc); \
2588 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2590 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2591 gen_set_label(l1); \
2592 tcg_temp_free(r_cond); \
2594 #define FMOVDCC(icc) \
2599 l1 = gen_new_label(); \
2600 r_cond = tcg_temp_new(); \
2601 cond = GET_FIELD_SP(insn, 14, 17); \
2602 gen_cond(r_cond, icc, cond, dc); \
2603 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2605 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2606 cpu_fpr[DFPREG(rs2)]); \
2607 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2608 cpu_fpr[DFPREG(rs2) + 1]); \
2609 gen_set_label(l1); \
2610 tcg_temp_free(r_cond); \
2612 #define FMOVQCC(icc) \
2617 l1 = gen_new_label(); \
2618 r_cond = tcg_temp_new(); \
2619 cond = GET_FIELD_SP(insn, 14, 17); \
2620 gen_cond(r_cond, icc, cond, dc); \
2621 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2623 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2624 cpu_fpr[QFPREG(rs2)]); \
2625 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2626 cpu_fpr[QFPREG(rs2) + 1]); \
2627 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2628 cpu_fpr[QFPREG(rs2) + 2]); \
2629 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2630 cpu_fpr[QFPREG(rs2) + 3]); \
2631 gen_set_label(l1); \
2632 tcg_temp_free(r_cond); \
2635 case 0x101: /* V9 fmovscc %icc */
2638 case 0x102: /* V9 fmovdcc %icc */
2640 case 0x103: /* V9 fmovqcc %icc */
2641 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2644 case 0x181: /* V9 fmovscc %xcc */
2647 case 0x182: /* V9 fmovdcc %xcc */
2650 case 0x183: /* V9 fmovqcc %xcc */
2651 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2658 case 0x51: /* fcmps, V9 %fcc */
2659 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2661 case 0x52: /* fcmpd, V9 %fcc */
2662 gen_op_load_fpr_DT0(DFPREG(rs1
));
2663 gen_op_load_fpr_DT1(DFPREG(rs2
));
2664 gen_op_fcmpd(rd
& 3);
2666 case 0x53: /* fcmpq, V9 %fcc */
2667 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2668 gen_op_load_fpr_QT0(QFPREG(rs1
));
2669 gen_op_load_fpr_QT1(QFPREG(rs2
));
2670 gen_op_fcmpq(rd
& 3);
2672 case 0x55: /* fcmpes, V9 %fcc */
2673 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2675 case 0x56: /* fcmped, V9 %fcc */
2676 gen_op_load_fpr_DT0(DFPREG(rs1
));
2677 gen_op_load_fpr_DT1(DFPREG(rs2
));
2678 gen_op_fcmped(rd
& 3);
2680 case 0x57: /* fcmpeq, V9 %fcc */
2681 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2682 gen_op_load_fpr_QT0(QFPREG(rs1
));
2683 gen_op_load_fpr_QT1(QFPREG(rs2
));
2684 gen_op_fcmpeq(rd
& 3);
2689 } else if (xop
== 0x2) {
2692 rs1
= GET_FIELD(insn
, 13, 17);
2694 // or %g0, x, y -> mov T0, x; mov y, T0
2695 if (IS_IMM
) { /* immediate */
2698 simm
= GET_FIELDs(insn
, 19, 31);
2699 r_const
= tcg_const_tl(simm
);
2700 gen_movl_TN_reg(rd
, r_const
);
2701 tcg_temp_free(r_const
);
2702 } else { /* register */
2703 rs2
= GET_FIELD(insn
, 27, 31);
2704 gen_movl_reg_TN(rs2
, cpu_dst
);
2705 gen_movl_TN_reg(rd
, cpu_dst
);
2708 cpu_src1
= get_src1(insn
, cpu_src1
);
2709 if (IS_IMM
) { /* immediate */
2710 simm
= GET_FIELDs(insn
, 19, 31);
2711 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2712 gen_movl_TN_reg(rd
, cpu_dst
);
2713 } else { /* register */
2714 // or x, %g0, y -> mov T1, x; mov y, T1
2715 rs2
= GET_FIELD(insn
, 27, 31);
2717 gen_movl_reg_TN(rs2
, cpu_src2
);
2718 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2719 gen_movl_TN_reg(rd
, cpu_dst
);
2721 gen_movl_TN_reg(rd
, cpu_src1
);
2724 #ifdef TARGET_SPARC64
2725 } else if (xop
== 0x25) { /* sll, V9 sllx */
2726 cpu_src1
= get_src1(insn
, cpu_src1
);
2727 if (IS_IMM
) { /* immediate */
2728 simm
= GET_FIELDs(insn
, 20, 31);
2729 if (insn
& (1 << 12)) {
2730 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2732 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2734 } else { /* register */
2735 rs2
= GET_FIELD(insn
, 27, 31);
2736 gen_movl_reg_TN(rs2
, cpu_src2
);
2737 if (insn
& (1 << 12)) {
2738 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2740 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2742 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2744 gen_movl_TN_reg(rd
, cpu_dst
);
2745 } else if (xop
== 0x26) { /* srl, V9 srlx */
2746 cpu_src1
= get_src1(insn
, cpu_src1
);
2747 if (IS_IMM
) { /* immediate */
2748 simm
= GET_FIELDs(insn
, 20, 31);
2749 if (insn
& (1 << 12)) {
2750 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2752 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2753 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2755 } else { /* register */
2756 rs2
= GET_FIELD(insn
, 27, 31);
2757 gen_movl_reg_TN(rs2
, cpu_src2
);
2758 if (insn
& (1 << 12)) {
2759 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2760 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2762 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2763 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2764 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2767 gen_movl_TN_reg(rd
, cpu_dst
);
2768 } else if (xop
== 0x27) { /* sra, V9 srax */
2769 cpu_src1
= get_src1(insn
, cpu_src1
);
2770 if (IS_IMM
) { /* immediate */
2771 simm
= GET_FIELDs(insn
, 20, 31);
2772 if (insn
& (1 << 12)) {
2773 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2775 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2776 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2777 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2779 } else { /* register */
2780 rs2
= GET_FIELD(insn
, 27, 31);
2781 gen_movl_reg_TN(rs2
, cpu_src2
);
2782 if (insn
& (1 << 12)) {
2783 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2784 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2786 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2787 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2788 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2789 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2792 gen_movl_TN_reg(rd
, cpu_dst
);
2794 } else if (xop
< 0x36) {
2796 cpu_src1
= get_src1(insn
, cpu_src1
);
2797 cpu_src2
= get_src2(insn
, cpu_src2
);
2798 switch (xop
& ~0x10) {
2801 simm
= GET_FIELDs(insn
, 19, 31);
2803 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
2804 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2805 dc
->cc_op
= CC_OP_ADD
;
2807 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
2811 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2812 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2813 dc
->cc_op
= CC_OP_ADD
;
2815 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2821 simm
= GET_FIELDs(insn
, 19, 31);
2822 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
2824 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2827 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2828 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2829 dc
->cc_op
= CC_OP_LOGIC
;
2834 simm
= GET_FIELDs(insn
, 19, 31);
2835 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2837 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2840 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2841 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2842 dc
->cc_op
= CC_OP_LOGIC
;
2847 simm
= GET_FIELDs(insn
, 19, 31);
2848 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
2850 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2853 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2854 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2855 dc
->cc_op
= CC_OP_LOGIC
;
2860 simm
= GET_FIELDs(insn
, 19, 31);
2862 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
2864 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
2868 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2869 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2870 dc
->cc_op
= CC_OP_SUB
;
2872 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2876 case 0x5: /* andn */
2878 simm
= GET_FIELDs(insn
, 19, 31);
2879 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
2881 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2884 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2885 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2886 dc
->cc_op
= CC_OP_LOGIC
;
2891 simm
= GET_FIELDs(insn
, 19, 31);
2892 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
2894 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2897 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2898 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2899 dc
->cc_op
= CC_OP_LOGIC
;
2902 case 0x7: /* xorn */
2904 simm
= GET_FIELDs(insn
, 19, 31);
2905 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
2907 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
2908 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2911 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2912 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2913 dc
->cc_op
= CC_OP_LOGIC
;
2916 case 0x8: /* addx, V9 addc */
2918 simm
= GET_FIELDs(insn
, 19, 31);
2920 gen_helper_compute_psr();
2921 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
2922 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2923 dc
->cc_op
= CC_OP_ADDX
;
2925 gen_helper_compute_psr();
2926 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2927 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
2928 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2932 gen_helper_compute_psr();
2933 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2934 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2935 dc
->cc_op
= CC_OP_ADDX
;
2937 gen_helper_compute_psr();
2938 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2939 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2940 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2944 #ifdef TARGET_SPARC64
2945 case 0x9: /* V9 mulx */
2947 simm
= GET_FIELDs(insn
, 19, 31);
2948 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
2950 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
2954 case 0xa: /* umul */
2955 CHECK_IU_FEATURE(dc
, MUL
);
2956 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
2958 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2959 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2960 dc
->cc_op
= CC_OP_LOGIC
;
2963 case 0xb: /* smul */
2964 CHECK_IU_FEATURE(dc
, MUL
);
2965 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
2967 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2968 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2969 dc
->cc_op
= CC_OP_LOGIC
;
2972 case 0xc: /* subx, V9 subc */
2974 simm
= GET_FIELDs(insn
, 19, 31);
2976 gen_helper_compute_psr();
2977 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
2978 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
2979 dc
->cc_op
= CC_OP_SUBX
;
2981 gen_helper_compute_psr();
2982 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2983 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
2984 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2988 gen_helper_compute_psr();
2989 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2990 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
2991 dc
->cc_op
= CC_OP_SUBX
;
2993 gen_helper_compute_psr();
2994 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2995 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2996 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3000 #ifdef TARGET_SPARC64
3001 case 0xd: /* V9 udivx */
3002 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3003 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3004 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3005 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3008 case 0xe: /* udiv */
3009 CHECK_IU_FEATURE(dc
, DIV
);
3010 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3012 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3013 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3014 dc
->cc_op
= CC_OP_DIV
;
3017 case 0xf: /* sdiv */
3018 CHECK_IU_FEATURE(dc
, DIV
);
3019 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3021 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3022 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3023 dc
->cc_op
= CC_OP_DIV
;
3029 gen_movl_TN_reg(rd
, cpu_dst
);
3031 cpu_src1
= get_src1(insn
, cpu_src1
);
3032 cpu_src2
= get_src2(insn
, cpu_src2
);
3034 case 0x20: /* taddcc */
3035 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3036 gen_movl_TN_reg(rd
, cpu_dst
);
3037 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3038 dc
->cc_op
= CC_OP_TADD
;
3040 case 0x21: /* tsubcc */
3041 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3042 gen_movl_TN_reg(rd
, cpu_dst
);
3043 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3044 dc
->cc_op
= CC_OP_TSUB
;
3046 case 0x22: /* taddcctv */
3047 save_state(dc
, cpu_cond
);
3048 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3049 gen_movl_TN_reg(rd
, cpu_dst
);
3050 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3051 dc
->cc_op
= CC_OP_TADDTV
;
3053 case 0x23: /* tsubcctv */
3054 save_state(dc
, cpu_cond
);
3055 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3056 gen_movl_TN_reg(rd
, cpu_dst
);
3057 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3058 dc
->cc_op
= CC_OP_TSUBTV
;
3060 case 0x24: /* mulscc */
3061 gen_helper_compute_psr();
3062 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3063 gen_movl_TN_reg(rd
, cpu_dst
);
3064 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3065 dc
->cc_op
= CC_OP_ADD
;
3067 #ifndef TARGET_SPARC64
3068 case 0x25: /* sll */
3069 if (IS_IMM
) { /* immediate */
3070 simm
= GET_FIELDs(insn
, 20, 31);
3071 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3072 } else { /* register */
3073 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3074 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3076 gen_movl_TN_reg(rd
, cpu_dst
);
3078 case 0x26: /* srl */
3079 if (IS_IMM
) { /* immediate */
3080 simm
= GET_FIELDs(insn
, 20, 31);
3081 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3082 } else { /* register */
3083 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3084 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3086 gen_movl_TN_reg(rd
, cpu_dst
);
3088 case 0x27: /* sra */
3089 if (IS_IMM
) { /* immediate */
3090 simm
= GET_FIELDs(insn
, 20, 31);
3091 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3092 } else { /* register */
3093 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3094 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3096 gen_movl_TN_reg(rd
, cpu_dst
);
3103 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3104 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3106 #ifndef TARGET_SPARC64
3107 case 0x01 ... 0x0f: /* undefined in the
3111 case 0x10 ... 0x1f: /* implementation-dependent
3117 case 0x2: /* V9 wrccr */
3118 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3119 gen_helper_wrccr(cpu_dst
);
3120 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3121 dc
->cc_op
= CC_OP_FLAGS
;
3123 case 0x3: /* V9 wrasi */
3124 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3125 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3127 case 0x6: /* V9 wrfprs */
3128 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3129 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3130 save_state(dc
, cpu_cond
);
3135 case 0xf: /* V9 sir, nop if user */
3136 #if !defined(CONFIG_USER_ONLY)
3141 case 0x13: /* Graphics Status */
3142 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3144 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3146 case 0x14: /* Softint set */
3147 if (!supervisor(dc
))
3149 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3150 gen_helper_set_softint(cpu_tmp64
);
3152 case 0x15: /* Softint clear */
3153 if (!supervisor(dc
))
3155 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3156 gen_helper_clear_softint(cpu_tmp64
);
3158 case 0x16: /* Softint write */
3159 if (!supervisor(dc
))
3161 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3162 gen_helper_write_softint(cpu_tmp64
);
3164 case 0x17: /* Tick compare */
3165 #if !defined(CONFIG_USER_ONLY)
3166 if (!supervisor(dc
))
3172 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3174 r_tickptr
= tcg_temp_new_ptr();
3175 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3176 offsetof(CPUState
, tick
));
3177 gen_helper_tick_set_limit(r_tickptr
,
3179 tcg_temp_free_ptr(r_tickptr
);
3182 case 0x18: /* System tick */
3183 #if !defined(CONFIG_USER_ONLY)
3184 if (!supervisor(dc
))
3190 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3192 r_tickptr
= tcg_temp_new_ptr();
3193 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3194 offsetof(CPUState
, stick
));
3195 gen_helper_tick_set_count(r_tickptr
,
3197 tcg_temp_free_ptr(r_tickptr
);
3200 case 0x19: /* System tick compare */
3201 #if !defined(CONFIG_USER_ONLY)
3202 if (!supervisor(dc
))
3208 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3210 r_tickptr
= tcg_temp_new_ptr();
3211 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3212 offsetof(CPUState
, stick
));
3213 gen_helper_tick_set_limit(r_tickptr
,
3215 tcg_temp_free_ptr(r_tickptr
);
3219 case 0x10: /* Performance Control */
3220 case 0x11: /* Performance Instrumentation
3222 case 0x12: /* Dispatch Control */
3229 #if !defined(CONFIG_USER_ONLY)
3230 case 0x31: /* wrpsr, V9 saved, restored */
3232 if (!supervisor(dc
))
3234 #ifdef TARGET_SPARC64
3240 gen_helper_restored();
3242 case 2: /* UA2005 allclean */
3243 case 3: /* UA2005 otherw */
3244 case 4: /* UA2005 normalw */
3245 case 5: /* UA2005 invalw */
3251 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3252 gen_helper_wrpsr(cpu_dst
);
3253 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3254 dc
->cc_op
= CC_OP_FLAGS
;
3255 save_state(dc
, cpu_cond
);
3262 case 0x32: /* wrwim, V9 wrpr */
3264 if (!supervisor(dc
))
3266 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3267 #ifdef TARGET_SPARC64
3273 r_tsptr
= tcg_temp_new_ptr();
3274 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3275 offsetof(CPUState
, tsptr
));
3276 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3277 offsetof(trap_state
, tpc
));
3278 tcg_temp_free_ptr(r_tsptr
);
3285 r_tsptr
= tcg_temp_new_ptr();
3286 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3287 offsetof(CPUState
, tsptr
));
3288 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3289 offsetof(trap_state
, tnpc
));
3290 tcg_temp_free_ptr(r_tsptr
);
3297 r_tsptr
= tcg_temp_new_ptr();
3298 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3299 offsetof(CPUState
, tsptr
));
3300 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3301 offsetof(trap_state
,
3303 tcg_temp_free_ptr(r_tsptr
);
3310 r_tsptr
= tcg_temp_new_ptr();
3311 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3312 offsetof(CPUState
, tsptr
));
3313 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3314 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3315 offsetof(trap_state
, tt
));
3316 tcg_temp_free_ptr(r_tsptr
);
3323 r_tickptr
= tcg_temp_new_ptr();
3324 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3325 offsetof(CPUState
, tick
));
3326 gen_helper_tick_set_count(r_tickptr
,
3328 tcg_temp_free_ptr(r_tickptr
);
3332 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3335 save_state(dc
, cpu_cond
);
3336 gen_helper_wrpstate(cpu_tmp0
);
3342 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3343 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3344 offsetof(CPUSPARCState
, tl
));
3347 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3348 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3349 offsetof(CPUSPARCState
,
3353 gen_helper_wrcwp(cpu_tmp0
);
3356 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3357 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3358 offsetof(CPUSPARCState
,
3361 case 11: // canrestore
3362 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3363 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3364 offsetof(CPUSPARCState
,
3367 case 12: // cleanwin
3368 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3369 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3370 offsetof(CPUSPARCState
,
3373 case 13: // otherwin
3374 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3375 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3376 offsetof(CPUSPARCState
,
3380 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3381 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3382 offsetof(CPUSPARCState
,
3385 case 16: // UA2005 gl
3386 CHECK_IU_FEATURE(dc
, GL
);
3387 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3388 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3389 offsetof(CPUSPARCState
, gl
));
3391 case 26: // UA2005 strand status
3392 CHECK_IU_FEATURE(dc
, HYPV
);
3393 if (!hypervisor(dc
))
3395 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3401 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3402 if (dc
->def
->nwindows
!= 32)
3403 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3404 (1 << dc
->def
->nwindows
) - 1);
3405 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3409 case 0x33: /* wrtbr, UA2005 wrhpr */
3411 #ifndef TARGET_SPARC64
3412 if (!supervisor(dc
))
3414 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3416 CHECK_IU_FEATURE(dc
, HYPV
);
3417 if (!hypervisor(dc
))
3419 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3422 // XXX gen_op_wrhpstate();
3423 save_state(dc
, cpu_cond
);
3429 // XXX gen_op_wrhtstate();
3432 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3435 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3437 case 31: // hstick_cmpr
3441 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3442 r_tickptr
= tcg_temp_new_ptr();
3443 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3444 offsetof(CPUState
, hstick
));
3445 gen_helper_tick_set_limit(r_tickptr
,
3447 tcg_temp_free_ptr(r_tickptr
);
3450 case 6: // hver readonly
3458 #ifdef TARGET_SPARC64
3459 case 0x2c: /* V9 movcc */
3461 int cc
= GET_FIELD_SP(insn
, 11, 12);
3462 int cond
= GET_FIELD_SP(insn
, 14, 17);
3466 r_cond
= tcg_temp_new();
3467 if (insn
& (1 << 18)) {
3469 gen_cond(r_cond
, 0, cond
, dc
);
3471 gen_cond(r_cond
, 1, cond
, dc
);
3475 gen_fcond(r_cond
, cc
, cond
);
3478 l1
= gen_new_label();
3480 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3481 if (IS_IMM
) { /* immediate */
3484 simm
= GET_FIELD_SPs(insn
, 0, 10);
3485 r_const
= tcg_const_tl(simm
);
3486 gen_movl_TN_reg(rd
, r_const
);
3487 tcg_temp_free(r_const
);
3489 rs2
= GET_FIELD_SP(insn
, 0, 4);
3490 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3491 gen_movl_TN_reg(rd
, cpu_tmp0
);
3494 tcg_temp_free(r_cond
);
3497 case 0x2d: /* V9 sdivx */
3498 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3499 gen_movl_TN_reg(rd
, cpu_dst
);
3501 case 0x2e: /* V9 popc */
3503 cpu_src2
= get_src2(insn
, cpu_src2
);
3504 gen_helper_popc(cpu_dst
, cpu_src2
);
3505 gen_movl_TN_reg(rd
, cpu_dst
);
3507 case 0x2f: /* V9 movr */
3509 int cond
= GET_FIELD_SP(insn
, 10, 12);
3512 cpu_src1
= get_src1(insn
, cpu_src1
);
3514 l1
= gen_new_label();
3516 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3518 if (IS_IMM
) { /* immediate */
3521 simm
= GET_FIELD_SPs(insn
, 0, 9);
3522 r_const
= tcg_const_tl(simm
);
3523 gen_movl_TN_reg(rd
, r_const
);
3524 tcg_temp_free(r_const
);
3526 rs2
= GET_FIELD_SP(insn
, 0, 4);
3527 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3528 gen_movl_TN_reg(rd
, cpu_tmp0
);
3538 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3539 #ifdef TARGET_SPARC64
3540 int opf
= GET_FIELD_SP(insn
, 5, 13);
3541 rs1
= GET_FIELD(insn
, 13, 17);
3542 rs2
= GET_FIELD(insn
, 27, 31);
3543 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3547 case 0x000: /* VIS I edge8cc */
3548 case 0x001: /* VIS II edge8n */
3549 case 0x002: /* VIS I edge8lcc */
3550 case 0x003: /* VIS II edge8ln */
3551 case 0x004: /* VIS I edge16cc */
3552 case 0x005: /* VIS II edge16n */
3553 case 0x006: /* VIS I edge16lcc */
3554 case 0x007: /* VIS II edge16ln */
3555 case 0x008: /* VIS I edge32cc */
3556 case 0x009: /* VIS II edge32n */
3557 case 0x00a: /* VIS I edge32lcc */
3558 case 0x00b: /* VIS II edge32ln */
3561 case 0x010: /* VIS I array8 */
3562 CHECK_FPU_FEATURE(dc
, VIS1
);
3563 cpu_src1
= get_src1(insn
, cpu_src1
);
3564 gen_movl_reg_TN(rs2
, cpu_src2
);
3565 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3566 gen_movl_TN_reg(rd
, cpu_dst
);
3568 case 0x012: /* VIS I array16 */
3569 CHECK_FPU_FEATURE(dc
, VIS1
);
3570 cpu_src1
= get_src1(insn
, cpu_src1
);
3571 gen_movl_reg_TN(rs2
, cpu_src2
);
3572 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3573 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3574 gen_movl_TN_reg(rd
, cpu_dst
);
3576 case 0x014: /* VIS I array32 */
3577 CHECK_FPU_FEATURE(dc
, VIS1
);
3578 cpu_src1
= get_src1(insn
, cpu_src1
);
3579 gen_movl_reg_TN(rs2
, cpu_src2
);
3580 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3581 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3582 gen_movl_TN_reg(rd
, cpu_dst
);
3584 case 0x018: /* VIS I alignaddr */
3585 CHECK_FPU_FEATURE(dc
, VIS1
);
3586 cpu_src1
= get_src1(insn
, cpu_src1
);
3587 gen_movl_reg_TN(rs2
, cpu_src2
);
3588 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3589 gen_movl_TN_reg(rd
, cpu_dst
);
3591 case 0x019: /* VIS II bmask */
3592 case 0x01a: /* VIS I alignaddrl */
3595 case 0x020: /* VIS I fcmple16 */
3596 CHECK_FPU_FEATURE(dc
, VIS1
);
3597 gen_op_load_fpr_DT0(DFPREG(rs1
));
3598 gen_op_load_fpr_DT1(DFPREG(rs2
));
3599 gen_helper_fcmple16();
3600 gen_op_store_DT0_fpr(DFPREG(rd
));
3602 case 0x022: /* VIS I fcmpne16 */
3603 CHECK_FPU_FEATURE(dc
, VIS1
);
3604 gen_op_load_fpr_DT0(DFPREG(rs1
));
3605 gen_op_load_fpr_DT1(DFPREG(rs2
));
3606 gen_helper_fcmpne16();
3607 gen_op_store_DT0_fpr(DFPREG(rd
));
3609 case 0x024: /* VIS I fcmple32 */
3610 CHECK_FPU_FEATURE(dc
, VIS1
);
3611 gen_op_load_fpr_DT0(DFPREG(rs1
));
3612 gen_op_load_fpr_DT1(DFPREG(rs2
));
3613 gen_helper_fcmple32();
3614 gen_op_store_DT0_fpr(DFPREG(rd
));
3616 case 0x026: /* VIS I fcmpne32 */
3617 CHECK_FPU_FEATURE(dc
, VIS1
);
3618 gen_op_load_fpr_DT0(DFPREG(rs1
));
3619 gen_op_load_fpr_DT1(DFPREG(rs2
));
3620 gen_helper_fcmpne32();
3621 gen_op_store_DT0_fpr(DFPREG(rd
));
3623 case 0x028: /* VIS I fcmpgt16 */
3624 CHECK_FPU_FEATURE(dc
, VIS1
);
3625 gen_op_load_fpr_DT0(DFPREG(rs1
));
3626 gen_op_load_fpr_DT1(DFPREG(rs2
));
3627 gen_helper_fcmpgt16();
3628 gen_op_store_DT0_fpr(DFPREG(rd
));
3630 case 0x02a: /* VIS I fcmpeq16 */
3631 CHECK_FPU_FEATURE(dc
, VIS1
);
3632 gen_op_load_fpr_DT0(DFPREG(rs1
));
3633 gen_op_load_fpr_DT1(DFPREG(rs2
));
3634 gen_helper_fcmpeq16();
3635 gen_op_store_DT0_fpr(DFPREG(rd
));
3637 case 0x02c: /* VIS I fcmpgt32 */
3638 CHECK_FPU_FEATURE(dc
, VIS1
);
3639 gen_op_load_fpr_DT0(DFPREG(rs1
));
3640 gen_op_load_fpr_DT1(DFPREG(rs2
));
3641 gen_helper_fcmpgt32();
3642 gen_op_store_DT0_fpr(DFPREG(rd
));
3644 case 0x02e: /* VIS I fcmpeq32 */
3645 CHECK_FPU_FEATURE(dc
, VIS1
);
3646 gen_op_load_fpr_DT0(DFPREG(rs1
));
3647 gen_op_load_fpr_DT1(DFPREG(rs2
));
3648 gen_helper_fcmpeq32();
3649 gen_op_store_DT0_fpr(DFPREG(rd
));
3651 case 0x031: /* VIS I fmul8x16 */
3652 CHECK_FPU_FEATURE(dc
, VIS1
);
3653 gen_op_load_fpr_DT0(DFPREG(rs1
));
3654 gen_op_load_fpr_DT1(DFPREG(rs2
));
3655 gen_helper_fmul8x16();
3656 gen_op_store_DT0_fpr(DFPREG(rd
));
3658 case 0x033: /* VIS I fmul8x16au */
3659 CHECK_FPU_FEATURE(dc
, VIS1
);
3660 gen_op_load_fpr_DT0(DFPREG(rs1
));
3661 gen_op_load_fpr_DT1(DFPREG(rs2
));
3662 gen_helper_fmul8x16au();
3663 gen_op_store_DT0_fpr(DFPREG(rd
));
3665 case 0x035: /* VIS I fmul8x16al */
3666 CHECK_FPU_FEATURE(dc
, VIS1
);
3667 gen_op_load_fpr_DT0(DFPREG(rs1
));
3668 gen_op_load_fpr_DT1(DFPREG(rs2
));
3669 gen_helper_fmul8x16al();
3670 gen_op_store_DT0_fpr(DFPREG(rd
));
3672 case 0x036: /* VIS I fmul8sux16 */
3673 CHECK_FPU_FEATURE(dc
, VIS1
);
3674 gen_op_load_fpr_DT0(DFPREG(rs1
));
3675 gen_op_load_fpr_DT1(DFPREG(rs2
));
3676 gen_helper_fmul8sux16();
3677 gen_op_store_DT0_fpr(DFPREG(rd
));
3679 case 0x037: /* VIS I fmul8ulx16 */
3680 CHECK_FPU_FEATURE(dc
, VIS1
);
3681 gen_op_load_fpr_DT0(DFPREG(rs1
));
3682 gen_op_load_fpr_DT1(DFPREG(rs2
));
3683 gen_helper_fmul8ulx16();
3684 gen_op_store_DT0_fpr(DFPREG(rd
));
3686 case 0x038: /* VIS I fmuld8sux16 */
3687 CHECK_FPU_FEATURE(dc
, VIS1
);
3688 gen_op_load_fpr_DT0(DFPREG(rs1
));
3689 gen_op_load_fpr_DT1(DFPREG(rs2
));
3690 gen_helper_fmuld8sux16();
3691 gen_op_store_DT0_fpr(DFPREG(rd
));
3693 case 0x039: /* VIS I fmuld8ulx16 */
3694 CHECK_FPU_FEATURE(dc
, VIS1
);
3695 gen_op_load_fpr_DT0(DFPREG(rs1
));
3696 gen_op_load_fpr_DT1(DFPREG(rs2
));
3697 gen_helper_fmuld8ulx16();
3698 gen_op_store_DT0_fpr(DFPREG(rd
));
3700 case 0x03a: /* VIS I fpack32 */
3701 case 0x03b: /* VIS I fpack16 */
3702 case 0x03d: /* VIS I fpackfix */
3703 case 0x03e: /* VIS I pdist */
3706 case 0x048: /* VIS I faligndata */
3707 CHECK_FPU_FEATURE(dc
, VIS1
);
3708 gen_op_load_fpr_DT0(DFPREG(rs1
));
3709 gen_op_load_fpr_DT1(DFPREG(rs2
));
3710 gen_helper_faligndata();
3711 gen_op_store_DT0_fpr(DFPREG(rd
));
3713 case 0x04b: /* VIS I fpmerge */
3714 CHECK_FPU_FEATURE(dc
, VIS1
);
3715 gen_op_load_fpr_DT0(DFPREG(rs1
));
3716 gen_op_load_fpr_DT1(DFPREG(rs2
));
3717 gen_helper_fpmerge();
3718 gen_op_store_DT0_fpr(DFPREG(rd
));
3720 case 0x04c: /* VIS II bshuffle */
3723 case 0x04d: /* VIS I fexpand */
3724 CHECK_FPU_FEATURE(dc
, VIS1
);
3725 gen_op_load_fpr_DT0(DFPREG(rs1
));
3726 gen_op_load_fpr_DT1(DFPREG(rs2
));
3727 gen_helper_fexpand();
3728 gen_op_store_DT0_fpr(DFPREG(rd
));
3730 case 0x050: /* VIS I fpadd16 */
3731 CHECK_FPU_FEATURE(dc
, VIS1
);
3732 gen_op_load_fpr_DT0(DFPREG(rs1
));
3733 gen_op_load_fpr_DT1(DFPREG(rs2
));
3734 gen_helper_fpadd16();
3735 gen_op_store_DT0_fpr(DFPREG(rd
));
3737 case 0x051: /* VIS I fpadd16s */
3738 CHECK_FPU_FEATURE(dc
, VIS1
);
3739 gen_helper_fpadd16s(cpu_fpr
[rd
],
3740 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3742 case 0x052: /* VIS I fpadd32 */
3743 CHECK_FPU_FEATURE(dc
, VIS1
);
3744 gen_op_load_fpr_DT0(DFPREG(rs1
));
3745 gen_op_load_fpr_DT1(DFPREG(rs2
));
3746 gen_helper_fpadd32();
3747 gen_op_store_DT0_fpr(DFPREG(rd
));
3749 case 0x053: /* VIS I fpadd32s */
3750 CHECK_FPU_FEATURE(dc
, VIS1
);
3751 gen_helper_fpadd32s(cpu_fpr
[rd
],
3752 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3754 case 0x054: /* VIS I fpsub16 */
3755 CHECK_FPU_FEATURE(dc
, VIS1
);
3756 gen_op_load_fpr_DT0(DFPREG(rs1
));
3757 gen_op_load_fpr_DT1(DFPREG(rs2
));
3758 gen_helper_fpsub16();
3759 gen_op_store_DT0_fpr(DFPREG(rd
));
3761 case 0x055: /* VIS I fpsub16s */
3762 CHECK_FPU_FEATURE(dc
, VIS1
);
3763 gen_helper_fpsub16s(cpu_fpr
[rd
],
3764 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3766 case 0x056: /* VIS I fpsub32 */
3767 CHECK_FPU_FEATURE(dc
, VIS1
);
3768 gen_op_load_fpr_DT0(DFPREG(rs1
));
3769 gen_op_load_fpr_DT1(DFPREG(rs2
));
3770 gen_helper_fpsub32();
3771 gen_op_store_DT0_fpr(DFPREG(rd
));
3773 case 0x057: /* VIS I fpsub32s */
3774 CHECK_FPU_FEATURE(dc
, VIS1
);
3775 gen_helper_fpsub32s(cpu_fpr
[rd
],
3776 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3778 case 0x060: /* VIS I fzero */
3779 CHECK_FPU_FEATURE(dc
, VIS1
);
3780 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3781 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3783 case 0x061: /* VIS I fzeros */
3784 CHECK_FPU_FEATURE(dc
, VIS1
);
3785 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3787 case 0x062: /* VIS I fnor */
3788 CHECK_FPU_FEATURE(dc
, VIS1
);
3789 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3790 cpu_fpr
[DFPREG(rs2
)]);
3791 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3792 cpu_fpr
[DFPREG(rs2
) + 1]);
3794 case 0x063: /* VIS I fnors */
3795 CHECK_FPU_FEATURE(dc
, VIS1
);
3796 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3798 case 0x064: /* VIS I fandnot2 */
3799 CHECK_FPU_FEATURE(dc
, VIS1
);
3800 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3801 cpu_fpr
[DFPREG(rs2
)]);
3802 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3803 cpu_fpr
[DFPREG(rs1
) + 1],
3804 cpu_fpr
[DFPREG(rs2
) + 1]);
3806 case 0x065: /* VIS I fandnot2s */
3807 CHECK_FPU_FEATURE(dc
, VIS1
);
3808 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3810 case 0x066: /* VIS I fnot2 */
3811 CHECK_FPU_FEATURE(dc
, VIS1
);
3812 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3813 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3814 cpu_fpr
[DFPREG(rs2
) + 1]);
3816 case 0x067: /* VIS I fnot2s */
3817 CHECK_FPU_FEATURE(dc
, VIS1
);
3818 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3820 case 0x068: /* VIS I fandnot1 */
3821 CHECK_FPU_FEATURE(dc
, VIS1
);
3822 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3823 cpu_fpr
[DFPREG(rs1
)]);
3824 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3825 cpu_fpr
[DFPREG(rs2
) + 1],
3826 cpu_fpr
[DFPREG(rs1
) + 1]);
3828 case 0x069: /* VIS I fandnot1s */
3829 CHECK_FPU_FEATURE(dc
, VIS1
);
3830 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3832 case 0x06a: /* VIS I fnot1 */
3833 CHECK_FPU_FEATURE(dc
, VIS1
);
3834 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3835 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3836 cpu_fpr
[DFPREG(rs1
) + 1]);
3838 case 0x06b: /* VIS I fnot1s */
3839 CHECK_FPU_FEATURE(dc
, VIS1
);
3840 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3842 case 0x06c: /* VIS I fxor */
3843 CHECK_FPU_FEATURE(dc
, VIS1
);
3844 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3845 cpu_fpr
[DFPREG(rs2
)]);
3846 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3847 cpu_fpr
[DFPREG(rs1
) + 1],
3848 cpu_fpr
[DFPREG(rs2
) + 1]);
3850 case 0x06d: /* VIS I fxors */
3851 CHECK_FPU_FEATURE(dc
, VIS1
);
3852 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3854 case 0x06e: /* VIS I fnand */
3855 CHECK_FPU_FEATURE(dc
, VIS1
);
3856 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3857 cpu_fpr
[DFPREG(rs2
)]);
3858 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3859 cpu_fpr
[DFPREG(rs2
) + 1]);
3861 case 0x06f: /* VIS I fnands */
3862 CHECK_FPU_FEATURE(dc
, VIS1
);
3863 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3865 case 0x070: /* VIS I fand */
3866 CHECK_FPU_FEATURE(dc
, VIS1
);
3867 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3868 cpu_fpr
[DFPREG(rs2
)]);
3869 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3870 cpu_fpr
[DFPREG(rs1
) + 1],
3871 cpu_fpr
[DFPREG(rs2
) + 1]);
3873 case 0x071: /* VIS I fands */
3874 CHECK_FPU_FEATURE(dc
, VIS1
);
3875 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3877 case 0x072: /* VIS I fxnor */
3878 CHECK_FPU_FEATURE(dc
, VIS1
);
3879 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3880 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3881 cpu_fpr
[DFPREG(rs1
)]);
3882 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3883 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3884 cpu_fpr
[DFPREG(rs1
) + 1]);
3886 case 0x073: /* VIS I fxnors */
3887 CHECK_FPU_FEATURE(dc
, VIS1
);
3888 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3889 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3891 case 0x074: /* VIS I fsrc1 */
3892 CHECK_FPU_FEATURE(dc
, VIS1
);
3893 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3894 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3895 cpu_fpr
[DFPREG(rs1
) + 1]);
3897 case 0x075: /* VIS I fsrc1s */
3898 CHECK_FPU_FEATURE(dc
, VIS1
);
3899 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3901 case 0x076: /* VIS I fornot2 */
3902 CHECK_FPU_FEATURE(dc
, VIS1
);
3903 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3904 cpu_fpr
[DFPREG(rs2
)]);
3905 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3906 cpu_fpr
[DFPREG(rs1
) + 1],
3907 cpu_fpr
[DFPREG(rs2
) + 1]);
3909 case 0x077: /* VIS I fornot2s */
3910 CHECK_FPU_FEATURE(dc
, VIS1
);
3911 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3913 case 0x078: /* VIS I fsrc2 */
3914 CHECK_FPU_FEATURE(dc
, VIS1
);
3915 gen_op_load_fpr_DT0(DFPREG(rs2
));
3916 gen_op_store_DT0_fpr(DFPREG(rd
));
3918 case 0x079: /* VIS I fsrc2s */
3919 CHECK_FPU_FEATURE(dc
, VIS1
);
3920 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3922 case 0x07a: /* VIS I fornot1 */
3923 CHECK_FPU_FEATURE(dc
, VIS1
);
3924 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3925 cpu_fpr
[DFPREG(rs1
)]);
3926 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3927 cpu_fpr
[DFPREG(rs2
) + 1],
3928 cpu_fpr
[DFPREG(rs1
) + 1]);
3930 case 0x07b: /* VIS I fornot1s */
3931 CHECK_FPU_FEATURE(dc
, VIS1
);
3932 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3934 case 0x07c: /* VIS I for */
3935 CHECK_FPU_FEATURE(dc
, VIS1
);
3936 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3937 cpu_fpr
[DFPREG(rs2
)]);
3938 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
3939 cpu_fpr
[DFPREG(rs1
) + 1],
3940 cpu_fpr
[DFPREG(rs2
) + 1]);
3942 case 0x07d: /* VIS I fors */
3943 CHECK_FPU_FEATURE(dc
, VIS1
);
3944 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3946 case 0x07e: /* VIS I fone */
3947 CHECK_FPU_FEATURE(dc
, VIS1
);
3948 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
3949 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
3951 case 0x07f: /* VIS I fones */
3952 CHECK_FPU_FEATURE(dc
, VIS1
);
3953 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
3955 case 0x080: /* VIS I shutdown */
3956 case 0x081: /* VIS II siam */
3965 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3966 #ifdef TARGET_SPARC64
3971 #ifdef TARGET_SPARC64
3972 } else if (xop
== 0x39) { /* V9 return */
3975 save_state(dc
, cpu_cond
);
3976 cpu_src1
= get_src1(insn
, cpu_src1
);
3977 if (IS_IMM
) { /* immediate */
3978 simm
= GET_FIELDs(insn
, 19, 31);
3979 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3980 } else { /* register */
3981 rs2
= GET_FIELD(insn
, 27, 31);
3983 gen_movl_reg_TN(rs2
, cpu_src2
);
3984 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3986 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
3988 gen_helper_restore();
3989 gen_mov_pc_npc(dc
, cpu_cond
);
3990 r_const
= tcg_const_i32(3);
3991 gen_helper_check_align(cpu_dst
, r_const
);
3992 tcg_temp_free_i32(r_const
);
3993 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
3994 dc
->npc
= DYNAMIC_PC
;
3998 cpu_src1
= get_src1(insn
, cpu_src1
);
3999 if (IS_IMM
) { /* immediate */
4000 simm
= GET_FIELDs(insn
, 19, 31);
4001 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4002 } else { /* register */
4003 rs2
= GET_FIELD(insn
, 27, 31);
4005 gen_movl_reg_TN(rs2
, cpu_src2
);
4006 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4008 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4011 case 0x38: /* jmpl */
4016 r_pc
= tcg_const_tl(dc
->pc
);
4017 gen_movl_TN_reg(rd
, r_pc
);
4018 tcg_temp_free(r_pc
);
4019 gen_mov_pc_npc(dc
, cpu_cond
);
4020 r_const
= tcg_const_i32(3);
4021 gen_helper_check_align(cpu_dst
, r_const
);
4022 tcg_temp_free_i32(r_const
);
4023 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4024 dc
->npc
= DYNAMIC_PC
;
4027 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4028 case 0x39: /* rett, V9 return */
4032 if (!supervisor(dc
))
4034 gen_mov_pc_npc(dc
, cpu_cond
);
4035 r_const
= tcg_const_i32(3);
4036 gen_helper_check_align(cpu_dst
, r_const
);
4037 tcg_temp_free_i32(r_const
);
4038 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4039 dc
->npc
= DYNAMIC_PC
;
4044 case 0x3b: /* flush */
4045 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4047 gen_helper_flush(cpu_dst
);
4049 case 0x3c: /* save */
4050 save_state(dc
, cpu_cond
);
4052 gen_movl_TN_reg(rd
, cpu_dst
);
4054 case 0x3d: /* restore */
4055 save_state(dc
, cpu_cond
);
4056 gen_helper_restore();
4057 gen_movl_TN_reg(rd
, cpu_dst
);
4059 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4060 case 0x3e: /* V9 done/retry */
4064 if (!supervisor(dc
))
4066 dc
->npc
= DYNAMIC_PC
;
4067 dc
->pc
= DYNAMIC_PC
;
4071 if (!supervisor(dc
))
4073 dc
->npc
= DYNAMIC_PC
;
4074 dc
->pc
= DYNAMIC_PC
;
4090 case 3: /* load/store instructions */
4092 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4094 cpu_src1
= get_src1(insn
, cpu_src1
);
4095 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4096 rs2
= GET_FIELD(insn
, 27, 31);
4097 gen_movl_reg_TN(rs2
, cpu_src2
);
4098 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4099 } else if (IS_IMM
) { /* immediate */
4100 simm
= GET_FIELDs(insn
, 19, 31);
4101 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4102 } else { /* register */
4103 rs2
= GET_FIELD(insn
, 27, 31);
4105 gen_movl_reg_TN(rs2
, cpu_src2
);
4106 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4108 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4110 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4111 (xop
> 0x17 && xop
<= 0x1d ) ||
4112 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4114 case 0x0: /* ld, V9 lduw, load unsigned word */
4115 gen_address_mask(dc
, cpu_addr
);
4116 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4118 case 0x1: /* ldub, load unsigned byte */
4119 gen_address_mask(dc
, cpu_addr
);
4120 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4122 case 0x2: /* lduh, load unsigned halfword */
4123 gen_address_mask(dc
, cpu_addr
);
4124 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4126 case 0x3: /* ldd, load double word */
4132 save_state(dc
, cpu_cond
);
4133 r_const
= tcg_const_i32(7);
4134 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4135 tcg_temp_free_i32(r_const
);
4136 gen_address_mask(dc
, cpu_addr
);
4137 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4138 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4139 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4140 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4141 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4142 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4143 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4146 case 0x9: /* ldsb, load signed byte */
4147 gen_address_mask(dc
, cpu_addr
);
4148 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4150 case 0xa: /* ldsh, load signed halfword */
4151 gen_address_mask(dc
, cpu_addr
);
4152 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4154 case 0xd: /* ldstub -- XXX: should be atomically */
4158 gen_address_mask(dc
, cpu_addr
);
4159 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4160 r_const
= tcg_const_tl(0xff);
4161 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4162 tcg_temp_free(r_const
);
4165 case 0x0f: /* swap, swap register with memory. Also
4167 CHECK_IU_FEATURE(dc
, SWAP
);
4168 gen_movl_reg_TN(rd
, cpu_val
);
4169 gen_address_mask(dc
, cpu_addr
);
4170 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4171 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4172 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4174 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4175 case 0x10: /* lda, V9 lduwa, load word alternate */
4176 #ifndef TARGET_SPARC64
4179 if (!supervisor(dc
))
4182 save_state(dc
, cpu_cond
);
4183 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4185 case 0x11: /* lduba, load unsigned byte alternate */
4186 #ifndef TARGET_SPARC64
4189 if (!supervisor(dc
))
4192 save_state(dc
, cpu_cond
);
4193 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4195 case 0x12: /* lduha, load unsigned halfword alternate */
4196 #ifndef TARGET_SPARC64
4199 if (!supervisor(dc
))
4202 save_state(dc
, cpu_cond
);
4203 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4205 case 0x13: /* ldda, load double word alternate */
4206 #ifndef TARGET_SPARC64
4209 if (!supervisor(dc
))
4214 save_state(dc
, cpu_cond
);
4215 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4217 case 0x19: /* ldsba, load signed byte alternate */
4218 #ifndef TARGET_SPARC64
4221 if (!supervisor(dc
))
4224 save_state(dc
, cpu_cond
);
4225 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4227 case 0x1a: /* ldsha, load signed halfword alternate */
4228 #ifndef TARGET_SPARC64
4231 if (!supervisor(dc
))
4234 save_state(dc
, cpu_cond
);
4235 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4237 case 0x1d: /* ldstuba -- XXX: should be atomically */
4238 #ifndef TARGET_SPARC64
4241 if (!supervisor(dc
))
4244 save_state(dc
, cpu_cond
);
4245 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4247 case 0x1f: /* swapa, swap reg with alt. memory. Also
4249 CHECK_IU_FEATURE(dc
, SWAP
);
4250 #ifndef TARGET_SPARC64
4253 if (!supervisor(dc
))
4256 save_state(dc
, cpu_cond
);
4257 gen_movl_reg_TN(rd
, cpu_val
);
4258 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4261 #ifndef TARGET_SPARC64
4262 case 0x30: /* ldc */
4263 case 0x31: /* ldcsr */
4264 case 0x33: /* lddc */
4268 #ifdef TARGET_SPARC64
4269 case 0x08: /* V9 ldsw */
4270 gen_address_mask(dc
, cpu_addr
);
4271 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4273 case 0x0b: /* V9 ldx */
4274 gen_address_mask(dc
, cpu_addr
);
4275 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4277 case 0x18: /* V9 ldswa */
4278 save_state(dc
, cpu_cond
);
4279 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4281 case 0x1b: /* V9 ldxa */
4282 save_state(dc
, cpu_cond
);
4283 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4285 case 0x2d: /* V9 prefetch, no effect */
4287 case 0x30: /* V9 ldfa */
4288 save_state(dc
, cpu_cond
);
4289 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4291 case 0x33: /* V9 lddfa */
4292 save_state(dc
, cpu_cond
);
4293 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4295 case 0x3d: /* V9 prefetcha, no effect */
4297 case 0x32: /* V9 ldqfa */
4298 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4299 save_state(dc
, cpu_cond
);
4300 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4306 gen_movl_TN_reg(rd
, cpu_val
);
4307 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4310 } else if (xop
>= 0x20 && xop
< 0x24) {
4311 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4313 save_state(dc
, cpu_cond
);
4315 case 0x20: /* ldf, load fpreg */
4316 gen_address_mask(dc
, cpu_addr
);
4317 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4318 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4320 case 0x21: /* ldfsr, V9 ldxfsr */
4321 #ifdef TARGET_SPARC64
4322 gen_address_mask(dc
, cpu_addr
);
4324 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4325 gen_helper_ldxfsr(cpu_tmp64
);
4329 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4330 gen_helper_ldfsr(cpu_tmp32
);
4334 case 0x22: /* ldqf, load quad fpreg */
4338 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4339 r_const
= tcg_const_i32(dc
->mem_idx
);
4340 gen_helper_ldqf(cpu_addr
, r_const
);
4341 tcg_temp_free_i32(r_const
);
4342 gen_op_store_QT0_fpr(QFPREG(rd
));
4345 case 0x23: /* lddf, load double fpreg */
4349 r_const
= tcg_const_i32(dc
->mem_idx
);
4350 gen_helper_lddf(cpu_addr
, r_const
);
4351 tcg_temp_free_i32(r_const
);
4352 gen_op_store_DT0_fpr(DFPREG(rd
));
4358 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4359 xop
== 0xe || xop
== 0x1e) {
4360 gen_movl_reg_TN(rd
, cpu_val
);
4362 case 0x4: /* st, store word */
4363 gen_address_mask(dc
, cpu_addr
);
4364 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4366 case 0x5: /* stb, store byte */
4367 gen_address_mask(dc
, cpu_addr
);
4368 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4370 case 0x6: /* sth, store halfword */
4371 gen_address_mask(dc
, cpu_addr
);
4372 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4374 case 0x7: /* std, store double word */
4380 save_state(dc
, cpu_cond
);
4381 gen_address_mask(dc
, cpu_addr
);
4382 r_const
= tcg_const_i32(7);
4383 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4384 tcg_temp_free_i32(r_const
);
4385 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4386 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4387 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4390 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4391 case 0x14: /* sta, V9 stwa, store word alternate */
4392 #ifndef TARGET_SPARC64
4395 if (!supervisor(dc
))
4398 save_state(dc
, cpu_cond
);
4399 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4401 case 0x15: /* stba, store byte alternate */
4402 #ifndef TARGET_SPARC64
4405 if (!supervisor(dc
))
4408 save_state(dc
, cpu_cond
);
4409 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4411 case 0x16: /* stha, store halfword alternate */
4412 #ifndef TARGET_SPARC64
4415 if (!supervisor(dc
))
4418 save_state(dc
, cpu_cond
);
4419 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4421 case 0x17: /* stda, store double word alternate */
4422 #ifndef TARGET_SPARC64
4425 if (!supervisor(dc
))
4431 save_state(dc
, cpu_cond
);
4432 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4436 #ifdef TARGET_SPARC64
4437 case 0x0e: /* V9 stx */
4438 gen_address_mask(dc
, cpu_addr
);
4439 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4441 case 0x1e: /* V9 stxa */
4442 save_state(dc
, cpu_cond
);
4443 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4449 } else if (xop
> 0x23 && xop
< 0x28) {
4450 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4452 save_state(dc
, cpu_cond
);
4454 case 0x24: /* stf, store fpreg */
4455 gen_address_mask(dc
, cpu_addr
);
4456 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4457 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4459 case 0x25: /* stfsr, V9 stxfsr */
4460 #ifdef TARGET_SPARC64
4461 gen_address_mask(dc
, cpu_addr
);
4462 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4464 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4466 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4468 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4469 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4473 #ifdef TARGET_SPARC64
4474 /* V9 stqf, store quad fpreg */
4478 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4479 gen_op_load_fpr_QT0(QFPREG(rd
));
4480 r_const
= tcg_const_i32(dc
->mem_idx
);
4481 gen_helper_stqf(cpu_addr
, r_const
);
4482 tcg_temp_free_i32(r_const
);
4485 #else /* !TARGET_SPARC64 */
4486 /* stdfq, store floating point queue */
4487 #if defined(CONFIG_USER_ONLY)
4490 if (!supervisor(dc
))
4492 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4497 case 0x27: /* stdf, store double fpreg */
4501 gen_op_load_fpr_DT0(DFPREG(rd
));
4502 r_const
= tcg_const_i32(dc
->mem_idx
);
4503 gen_helper_stdf(cpu_addr
, r_const
);
4504 tcg_temp_free_i32(r_const
);
4510 } else if (xop
> 0x33 && xop
< 0x3f) {
4511 save_state(dc
, cpu_cond
);
4513 #ifdef TARGET_SPARC64
4514 case 0x34: /* V9 stfa */
4515 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4517 case 0x36: /* V9 stqfa */
4521 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4522 r_const
= tcg_const_i32(7);
4523 gen_helper_check_align(cpu_addr
, r_const
);
4524 tcg_temp_free_i32(r_const
);
4525 gen_op_load_fpr_QT0(QFPREG(rd
));
4526 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4529 case 0x37: /* V9 stdfa */
4530 gen_op_load_fpr_DT0(DFPREG(rd
));
4531 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4533 case 0x3c: /* V9 casa */
4534 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4535 gen_movl_TN_reg(rd
, cpu_val
);
4537 case 0x3e: /* V9 casxa */
4538 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4539 gen_movl_TN_reg(rd
, cpu_val
);
4542 case 0x34: /* stc */
4543 case 0x35: /* stcsr */
4544 case 0x36: /* stdcq */
4545 case 0x37: /* stdc */
4556 /* default case for non jump instructions */
4557 if (dc
->npc
== DYNAMIC_PC
) {
4558 dc
->pc
= DYNAMIC_PC
;
4560 } else if (dc
->npc
== JUMP_PC
) {
4561 /* we can do a static jump */
4562 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4566 dc
->npc
= dc
->npc
+ 4;
4574 save_state(dc
, cpu_cond
);
4575 r_const
= tcg_const_i32(TT_ILL_INSN
);
4576 gen_helper_raise_exception(r_const
);
4577 tcg_temp_free_i32(r_const
);
4585 save_state(dc
, cpu_cond
);
4586 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4587 gen_helper_raise_exception(r_const
);
4588 tcg_temp_free_i32(r_const
);
4592 #if !defined(CONFIG_USER_ONLY)
4597 save_state(dc
, cpu_cond
);
4598 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4599 gen_helper_raise_exception(r_const
);
4600 tcg_temp_free_i32(r_const
);
4606 save_state(dc
, cpu_cond
);
4607 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4610 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4612 save_state(dc
, cpu_cond
);
4613 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4617 #ifndef TARGET_SPARC64
4622 save_state(dc
, cpu_cond
);
4623 r_const
= tcg_const_i32(TT_NCP_INSN
);
4624 gen_helper_raise_exception(r_const
);
4625 tcg_temp_free(r_const
);
4632 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4633 int spc
, CPUSPARCState
*env
)
4635 target_ulong pc_start
, last_pc
;
4636 uint16_t *gen_opc_end
;
4637 DisasContext dc1
, *dc
= &dc1
;
4643 memset(dc
, 0, sizeof(DisasContext
));
4648 dc
->npc
= (target_ulong
) tb
->cs_base
;
4649 dc
->cc_op
= CC_OP_DYNAMIC
;
4650 dc
->mem_idx
= cpu_mmu_index(env
);
4652 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4653 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4655 dc
->fpu_enabled
= 0;
4656 #ifdef TARGET_SPARC64
4657 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4659 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4661 cpu_tmp0
= tcg_temp_new();
4662 cpu_tmp32
= tcg_temp_new_i32();
4663 cpu_tmp64
= tcg_temp_new_i64();
4665 cpu_dst
= tcg_temp_local_new();
4668 cpu_val
= tcg_temp_local_new();
4669 cpu_addr
= tcg_temp_local_new();
4672 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4674 max_insns
= CF_COUNT_MASK
;
4677 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4678 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4679 if (bp
->pc
== dc
->pc
) {
4680 if (dc
->pc
!= pc_start
)
4681 save_state(dc
, cpu_cond
);
4690 qemu_log("Search PC...\n");
4691 j
= gen_opc_ptr
- gen_opc_buf
;
4695 gen_opc_instr_start
[lj
++] = 0;
4696 gen_opc_pc
[lj
] = dc
->pc
;
4697 gen_opc_npc
[lj
] = dc
->npc
;
4698 gen_opc_instr_start
[lj
] = 1;
4699 gen_opc_icount
[lj
] = num_insns
;
4702 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4705 disas_sparc_insn(dc
);
4710 /* if the next PC is different, we abort now */
4711 if (dc
->pc
!= (last_pc
+ 4))
4713 /* if we reach a page boundary, we stop generation so that the
4714 PC of a TT_TFAULT exception is always in the right page */
4715 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4717 /* if single step mode, we generate only one instruction and
4718 generate an exception */
4719 if (env
->singlestep_enabled
|| singlestep
) {
4720 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4724 } while ((gen_opc_ptr
< gen_opc_end
) &&
4725 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4726 num_insns
< max_insns
);
4729 tcg_temp_free(cpu_addr
);
4730 tcg_temp_free(cpu_val
);
4731 tcg_temp_free(cpu_dst
);
4732 tcg_temp_free_i64(cpu_tmp64
);
4733 tcg_temp_free_i32(cpu_tmp32
);
4734 tcg_temp_free(cpu_tmp0
);
4735 if (tb
->cflags
& CF_LAST_IO
)
4738 if (dc
->pc
!= DYNAMIC_PC
&&
4739 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4740 /* static PC and NPC: we can use direct chaining */
4741 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4743 if (dc
->pc
!= DYNAMIC_PC
)
4744 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4745 save_npc(dc
, cpu_cond
);
4749 gen_icount_end(tb
, num_insns
);
4750 *gen_opc_ptr
= INDEX_op_end
;
4752 j
= gen_opc_ptr
- gen_opc_buf
;
4755 gen_opc_instr_start
[lj
++] = 0;
4759 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4760 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4762 tb
->size
= last_pc
+ 4 - pc_start
;
4763 tb
->icount
= num_insns
;
4766 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4767 qemu_log("--------------\n");
4768 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4769 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4775 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4777 gen_intermediate_code_internal(tb
, 0, env
);
4780 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4782 gen_intermediate_code_internal(tb
, 1, env
);
4785 void gen_intermediate_code_init(CPUSPARCState
*env
)
4789 static const char * const gregnames
[8] = {
4790 NULL
, // g0 not used
4799 static const char * const fregnames
[64] = {
4800 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4801 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4802 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4803 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4804 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4805 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4806 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4807 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4810 /* init various static tables */
4814 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4815 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4816 offsetof(CPUState
, regwptr
),
4818 #ifdef TARGET_SPARC64
4819 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4821 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4823 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4825 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4827 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4828 offsetof(CPUState
, tick_cmpr
),
4830 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4831 offsetof(CPUState
, stick_cmpr
),
4833 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4834 offsetof(CPUState
, hstick_cmpr
),
4836 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4838 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4840 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4842 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4843 offsetof(CPUState
, ssr
), "ssr");
4844 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4845 offsetof(CPUState
, version
), "ver");
4846 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4847 offsetof(CPUState
, softint
),
4850 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4853 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
4855 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
4857 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
4858 offsetof(CPUState
, cc_src2
),
4860 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4862 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
4864 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
4866 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
4868 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
4870 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
4872 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
4873 #ifndef CONFIG_USER_ONLY
4874 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
4877 for (i
= 1; i
< 8; i
++)
4878 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
4879 offsetof(CPUState
, gregs
[i
]),
4881 for (i
= 0; i
< TARGET_FPREGS
; i
++)
4882 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
4883 offsetof(CPUState
, fpr
[i
]),
4886 /* register helpers */
4888 #define GEN_HELPER 2
4893 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
4894 unsigned long searched_pc
, int pc_pos
, void *puc
)
4897 env
->pc
= gen_opc_pc
[pc_pos
];
4898 npc
= gen_opc_npc
[pc_pos
];
4900 /* dynamic NPC: already stored */
4901 } else if (npc
== 2) {
4902 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
4903 /* jump PC: use T2 and the jump targets of the translation */
4905 env
->npc
= gen_opc_jump_pc
[0];
4907 env
->npc
= gen_opc_jump_pc
[1];