4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
46 static TCGv_i32 cpu_cc_op
;
47 static TCGv_i32 cpu_psr
;
48 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
50 #ifndef CONFIG_USER_ONLY
53 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
55 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
57 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
58 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
59 static TCGv_i32 cpu_softint
;
63 /* local register indexes (only used inside old micro ops) */
65 static TCGv_i32 cpu_tmp32
;
66 static TCGv_i64 cpu_tmp64
;
67 /* Floating point registers */
68 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
70 #include "gen-icount.h"
72 typedef struct DisasContext
{
73 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
74 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
75 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
79 int address_mask_32bit
;
80 uint32_t cc_op
; /* current CC operation */
81 struct TranslationBlock
*tb
;
85 // This function uses non-native bit order
86 #define GET_FIELD(X, FROM, TO) \
87 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
89 // This function uses the order in the manuals, i.e. bit 0 is 2^0
90 #define GET_FIELD_SP(X, FROM, TO) \
91 GET_FIELD(X, 31 - (TO), 31 - (FROM))
93 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
94 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
98 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
100 #define DFPREG(r) (r & 0x1e)
101 #define QFPREG(r) (r & 0x1c)
104 #define UA2005_HTRAP_MASK 0xff
105 #define V8_TRAP_MASK 0x7f
107 static int sign_extend(int x
, int len
)
110 return (x
<< len
) >> len
;
113 #define IS_IMM (insn & (1<<13))
115 /* floating point registers moves */
116 static void gen_op_load_fpr_DT0(unsigned int src
)
118 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
119 offsetof(CPU_DoubleU
, l
.upper
));
120 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
121 offsetof(CPU_DoubleU
, l
.lower
));
124 static void gen_op_load_fpr_DT1(unsigned int src
)
126 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
127 offsetof(CPU_DoubleU
, l
.upper
));
128 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
129 offsetof(CPU_DoubleU
, l
.lower
));
132 static void gen_op_store_DT0_fpr(unsigned int dst
)
134 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
135 offsetof(CPU_DoubleU
, l
.upper
));
136 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
137 offsetof(CPU_DoubleU
, l
.lower
));
140 static void gen_op_load_fpr_QT0(unsigned int src
)
142 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
143 offsetof(CPU_QuadU
, l
.upmost
));
144 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.upper
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.lower
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.lowest
));
152 static void gen_op_load_fpr_QT1(unsigned int src
)
154 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
155 offsetof(CPU_QuadU
, l
.upmost
));
156 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.upper
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.lower
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.lowest
));
164 static void gen_op_store_QT0_fpr(unsigned int dst
)
166 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
167 offsetof(CPU_QuadU
, l
.upmost
));
168 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.upper
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.lower
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.lowest
));
177 #ifdef CONFIG_USER_ONLY
178 #define supervisor(dc) 0
179 #ifdef TARGET_SPARC64
180 #define hypervisor(dc) 0
183 #define supervisor(dc) (dc->mem_idx >= 1)
184 #ifdef TARGET_SPARC64
185 #define hypervisor(dc) (dc->mem_idx == 2)
190 #ifdef TARGET_SPARC64
192 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
194 #define AM_CHECK(dc) (1)
198 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
200 #ifdef TARGET_SPARC64
202 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
206 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
209 tcg_gen_movi_tl(tn
, 0);
211 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
213 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
217 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
222 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
224 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
228 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
229 target_ulong pc
, target_ulong npc
)
231 TranslationBlock
*tb
;
234 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
235 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
236 /* jump to same page: we can use a direct jump */
237 tcg_gen_goto_tb(tb_num
);
238 tcg_gen_movi_tl(cpu_pc
, pc
);
239 tcg_gen_movi_tl(cpu_npc
, npc
);
240 tcg_gen_exit_tb((long)tb
+ tb_num
);
242 /* jump to another page: currently not optimized */
243 tcg_gen_movi_tl(cpu_pc
, pc
);
244 tcg_gen_movi_tl(cpu_npc
, npc
);
250 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
252 tcg_gen_extu_i32_tl(reg
, src
);
253 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
254 tcg_gen_andi_tl(reg
, reg
, 0x1);
257 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
259 tcg_gen_extu_i32_tl(reg
, src
);
260 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
261 tcg_gen_andi_tl(reg
, reg
, 0x1);
264 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
266 tcg_gen_extu_i32_tl(reg
, src
);
267 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
268 tcg_gen_andi_tl(reg
, reg
, 0x1);
271 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
273 tcg_gen_extu_i32_tl(reg
, src
);
274 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
275 tcg_gen_andi_tl(reg
, reg
, 0x1);
278 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
284 l1
= gen_new_label();
286 r_temp
= tcg_temp_new();
287 tcg_gen_xor_tl(r_temp
, src1
, src2
);
288 tcg_gen_not_tl(r_temp
, r_temp
);
289 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
290 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
291 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
292 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
293 r_const
= tcg_const_i32(TT_TOVF
);
294 gen_helper_raise_exception(r_const
);
295 tcg_temp_free_i32(r_const
);
297 tcg_temp_free(r_temp
);
300 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
305 l1
= gen_new_label();
306 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
307 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
308 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
309 r_const
= tcg_const_i32(TT_TOVF
);
310 gen_helper_raise_exception(r_const
);
311 tcg_temp_free_i32(r_const
);
315 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
317 tcg_gen_mov_tl(cpu_cc_src
, src1
);
318 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
319 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
320 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
323 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
325 tcg_gen_mov_tl(cpu_cc_src
, src1
);
326 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
327 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
328 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
331 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
333 tcg_gen_mov_tl(cpu_cc_src
, src1
);
334 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
335 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
336 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
337 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
338 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
341 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
343 tcg_gen_mov_tl(cpu_cc_src
, src1
);
344 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
345 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
346 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
347 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
348 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
351 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
353 tcg_gen_mov_tl(cpu_cc_src
, src1
);
354 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
355 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
356 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
359 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
361 tcg_gen_mov_tl(cpu_cc_src
, src1
);
362 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
363 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
364 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
365 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
366 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
369 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
375 l1
= gen_new_label();
377 r_temp
= tcg_temp_new();
378 tcg_gen_xor_tl(r_temp
, src1
, src2
);
379 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
380 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
381 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
382 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
383 r_const
= tcg_const_i32(TT_TOVF
);
384 gen_helper_raise_exception(r_const
);
385 tcg_temp_free_i32(r_const
);
387 tcg_temp_free(r_temp
);
390 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
392 tcg_gen_mov_tl(cpu_cc_src
, src1
);
393 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
395 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
396 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
397 dc
->cc_op
= CC_OP_LOGIC
;
399 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
400 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
401 dc
->cc_op
= CC_OP_SUB
;
403 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
406 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
408 tcg_gen_mov_tl(cpu_cc_src
, src1
);
409 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
410 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
411 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
414 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
416 tcg_gen_mov_tl(cpu_cc_src
, src1
);
417 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
418 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
419 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
420 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
421 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
424 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
426 tcg_gen_mov_tl(cpu_cc_src
, src1
);
427 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
428 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
429 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
430 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
431 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
434 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
436 tcg_gen_mov_tl(cpu_cc_src
, src1
);
437 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
438 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
439 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
442 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
444 tcg_gen_mov_tl(cpu_cc_src
, src1
);
445 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
446 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
447 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
448 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
449 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
452 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
457 l1
= gen_new_label();
458 r_temp
= tcg_temp_new();
464 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
465 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
466 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
467 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
468 tcg_gen_movi_tl(cpu_cc_src2
, 0);
472 // env->y = (b2 << 31) | (env->y >> 1);
473 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
474 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
475 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
476 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
477 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
478 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
481 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
482 gen_mov_reg_V(r_temp
, cpu_psr
);
483 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
484 tcg_temp_free(r_temp
);
486 // T0 = (b1 << 31) | (T0 >> 1);
488 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
489 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
490 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
492 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
494 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
497 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
499 TCGv_i64 r_temp
, r_temp2
;
501 r_temp
= tcg_temp_new_i64();
502 r_temp2
= tcg_temp_new_i64();
504 tcg_gen_extu_tl_i64(r_temp
, src2
);
505 tcg_gen_extu_tl_i64(r_temp2
, src1
);
506 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
508 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
509 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
510 tcg_temp_free_i64(r_temp
);
511 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
512 #ifdef TARGET_SPARC64
513 tcg_gen_mov_i64(dst
, r_temp2
);
515 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
517 tcg_temp_free_i64(r_temp2
);
520 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
522 TCGv_i64 r_temp
, r_temp2
;
524 r_temp
= tcg_temp_new_i64();
525 r_temp2
= tcg_temp_new_i64();
527 tcg_gen_ext_tl_i64(r_temp
, src2
);
528 tcg_gen_ext_tl_i64(r_temp2
, src1
);
529 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
531 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
532 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
533 tcg_temp_free_i64(r_temp
);
534 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
535 #ifdef TARGET_SPARC64
536 tcg_gen_mov_i64(dst
, r_temp2
);
538 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
540 tcg_temp_free_i64(r_temp2
);
543 #ifdef TARGET_SPARC64
544 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
549 l1
= gen_new_label();
550 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
551 r_const
= tcg_const_i32(TT_DIV_ZERO
);
552 gen_helper_raise_exception(r_const
);
553 tcg_temp_free_i32(r_const
);
557 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
561 l1
= gen_new_label();
562 l2
= gen_new_label();
563 tcg_gen_mov_tl(cpu_cc_src
, src1
);
564 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
565 gen_trap_ifdivzero_tl(cpu_cc_src2
);
566 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
567 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
568 tcg_gen_movi_i64(dst
, INT64_MIN
);
571 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
577 static inline void gen_op_eval_ba(TCGv dst
)
579 tcg_gen_movi_tl(dst
, 1);
583 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
585 gen_mov_reg_Z(dst
, src
);
589 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
591 gen_mov_reg_N(cpu_tmp0
, src
);
592 gen_mov_reg_V(dst
, src
);
593 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
594 gen_mov_reg_Z(cpu_tmp0
, src
);
595 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
599 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
601 gen_mov_reg_V(cpu_tmp0
, src
);
602 gen_mov_reg_N(dst
, src
);
603 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
607 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
609 gen_mov_reg_Z(cpu_tmp0
, src
);
610 gen_mov_reg_C(dst
, src
);
611 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
615 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
617 gen_mov_reg_C(dst
, src
);
621 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
623 gen_mov_reg_V(dst
, src
);
627 static inline void gen_op_eval_bn(TCGv dst
)
629 tcg_gen_movi_tl(dst
, 0);
633 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
635 gen_mov_reg_N(dst
, src
);
639 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
641 gen_mov_reg_Z(dst
, src
);
642 tcg_gen_xori_tl(dst
, dst
, 0x1);
646 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
648 gen_mov_reg_N(cpu_tmp0
, src
);
649 gen_mov_reg_V(dst
, src
);
650 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
651 gen_mov_reg_Z(cpu_tmp0
, src
);
652 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
653 tcg_gen_xori_tl(dst
, dst
, 0x1);
657 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
659 gen_mov_reg_V(cpu_tmp0
, src
);
660 gen_mov_reg_N(dst
, src
);
661 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
662 tcg_gen_xori_tl(dst
, dst
, 0x1);
666 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
668 gen_mov_reg_Z(cpu_tmp0
, src
);
669 gen_mov_reg_C(dst
, src
);
670 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
671 tcg_gen_xori_tl(dst
, dst
, 0x1);
675 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
677 gen_mov_reg_C(dst
, src
);
678 tcg_gen_xori_tl(dst
, dst
, 0x1);
682 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
684 gen_mov_reg_N(dst
, src
);
685 tcg_gen_xori_tl(dst
, dst
, 0x1);
689 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
691 gen_mov_reg_V(dst
, src
);
692 tcg_gen_xori_tl(dst
, dst
, 0x1);
696 FPSR bit field FCC1 | FCC0:
702 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
703 unsigned int fcc_offset
)
705 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
706 tcg_gen_andi_tl(reg
, reg
, 0x1);
709 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
710 unsigned int fcc_offset
)
712 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
713 tcg_gen_andi_tl(reg
, reg
, 0x1);
717 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
718 unsigned int fcc_offset
)
720 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
721 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
722 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
725 // 1 or 2: FCC0 ^ FCC1
726 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
727 unsigned int fcc_offset
)
729 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
730 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
731 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
735 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
736 unsigned int fcc_offset
)
738 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
742 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
743 unsigned int fcc_offset
)
745 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
746 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
747 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
748 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
752 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
753 unsigned int fcc_offset
)
755 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
759 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
760 unsigned int fcc_offset
)
762 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
763 tcg_gen_xori_tl(dst
, dst
, 0x1);
764 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
765 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
769 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
770 unsigned int fcc_offset
)
772 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
773 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
774 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
778 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
779 unsigned int fcc_offset
)
781 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
782 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
783 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
784 tcg_gen_xori_tl(dst
, dst
, 0x1);
787 // 0 or 3: !(FCC0 ^ FCC1)
788 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
789 unsigned int fcc_offset
)
791 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
792 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
793 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
794 tcg_gen_xori_tl(dst
, dst
, 0x1);
798 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
799 unsigned int fcc_offset
)
801 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
802 tcg_gen_xori_tl(dst
, dst
, 0x1);
805 // !1: !(FCC0 & !FCC1)
806 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
807 unsigned int fcc_offset
)
809 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
810 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
811 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
812 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
813 tcg_gen_xori_tl(dst
, dst
, 0x1);
817 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
818 unsigned int fcc_offset
)
820 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
821 tcg_gen_xori_tl(dst
, dst
, 0x1);
824 // !2: !(!FCC0 & FCC1)
825 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
826 unsigned int fcc_offset
)
828 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
829 tcg_gen_xori_tl(dst
, dst
, 0x1);
830 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
831 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
832 tcg_gen_xori_tl(dst
, dst
, 0x1);
835 // !3: !(FCC0 & FCC1)
836 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
837 unsigned int fcc_offset
)
839 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
840 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
841 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
842 tcg_gen_xori_tl(dst
, dst
, 0x1);
845 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
846 target_ulong pc2
, TCGv r_cond
)
850 l1
= gen_new_label();
852 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
854 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
857 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
860 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
861 target_ulong pc2
, TCGv r_cond
)
865 l1
= gen_new_label();
867 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
869 gen_goto_tb(dc
, 0, pc2
, pc1
);
872 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
875 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
880 l1
= gen_new_label();
881 l2
= gen_new_label();
883 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
885 tcg_gen_movi_tl(cpu_npc
, npc1
);
889 tcg_gen_movi_tl(cpu_npc
, npc2
);
893 /* call this function before using the condition register as it may
894 have been set for a jump */
895 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
897 if (dc
->npc
== JUMP_PC
) {
898 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
899 dc
->npc
= DYNAMIC_PC
;
903 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
905 if (dc
->npc
== JUMP_PC
) {
906 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
907 dc
->npc
= DYNAMIC_PC
;
908 } else if (dc
->npc
!= DYNAMIC_PC
) {
909 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
913 static inline void save_state(DisasContext
*dc
, TCGv cond
)
915 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
919 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
921 if (dc
->npc
== JUMP_PC
) {
922 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
923 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
925 } else if (dc
->npc
== DYNAMIC_PC
) {
926 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
933 static inline void gen_op_next_insn(void)
935 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
936 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
939 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
944 #ifdef TARGET_SPARC64
956 gen_helper_compute_psr();
957 dc
->cc_op
= CC_OP_FLAGS
;
962 gen_op_eval_bn(r_dst
);
965 gen_op_eval_be(r_dst
, r_src
);
968 gen_op_eval_ble(r_dst
, r_src
);
971 gen_op_eval_bl(r_dst
, r_src
);
974 gen_op_eval_bleu(r_dst
, r_src
);
977 gen_op_eval_bcs(r_dst
, r_src
);
980 gen_op_eval_bneg(r_dst
, r_src
);
983 gen_op_eval_bvs(r_dst
, r_src
);
986 gen_op_eval_ba(r_dst
);
989 gen_op_eval_bne(r_dst
, r_src
);
992 gen_op_eval_bg(r_dst
, r_src
);
995 gen_op_eval_bge(r_dst
, r_src
);
998 gen_op_eval_bgu(r_dst
, r_src
);
1001 gen_op_eval_bcc(r_dst
, r_src
);
1004 gen_op_eval_bpos(r_dst
, r_src
);
1007 gen_op_eval_bvc(r_dst
, r_src
);
1012 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1014 unsigned int offset
;
1034 gen_op_eval_bn(r_dst
);
1037 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1040 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1043 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1046 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1049 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1052 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1055 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1058 gen_op_eval_ba(r_dst
);
1061 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1064 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1067 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1070 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1073 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1076 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1079 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1084 #ifdef TARGET_SPARC64
1086 static const int gen_tcg_cond_reg
[8] = {
1097 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1101 l1
= gen_new_label();
1102 tcg_gen_movi_tl(r_dst
, 0);
1103 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1104 tcg_gen_movi_tl(r_dst
, 1);
1109 /* XXX: potentially incorrect if dynamic npc */
1110 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1113 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1114 target_ulong target
= dc
->pc
+ offset
;
1117 /* unconditional not taken */
1119 dc
->pc
= dc
->npc
+ 4;
1120 dc
->npc
= dc
->pc
+ 4;
1123 dc
->npc
= dc
->pc
+ 4;
1125 } else if (cond
== 0x8) {
1126 /* unconditional taken */
1129 dc
->npc
= dc
->pc
+ 4;
1135 flush_cond(dc
, r_cond
);
1136 gen_cond(r_cond
, cc
, cond
, dc
);
1138 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1142 dc
->jump_pc
[0] = target
;
1143 dc
->jump_pc
[1] = dc
->npc
+ 4;
1149 /* XXX: potentially incorrect if dynamic npc */
1150 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1153 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1154 target_ulong target
= dc
->pc
+ offset
;
1157 /* unconditional not taken */
1159 dc
->pc
= dc
->npc
+ 4;
1160 dc
->npc
= dc
->pc
+ 4;
1163 dc
->npc
= dc
->pc
+ 4;
1165 } else if (cond
== 0x8) {
1166 /* unconditional taken */
1169 dc
->npc
= dc
->pc
+ 4;
1175 flush_cond(dc
, r_cond
);
1176 gen_fcond(r_cond
, cc
, cond
);
1178 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1182 dc
->jump_pc
[0] = target
;
1183 dc
->jump_pc
[1] = dc
->npc
+ 4;
1189 #ifdef TARGET_SPARC64
1190 /* XXX: potentially incorrect if dynamic npc */
1191 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1192 TCGv r_cond
, TCGv r_reg
)
1194 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1195 target_ulong target
= dc
->pc
+ offset
;
1197 flush_cond(dc
, r_cond
);
1198 gen_cond_reg(r_cond
, cond
, r_reg
);
1200 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1204 dc
->jump_pc
[0] = target
;
1205 dc
->jump_pc
[1] = dc
->npc
+ 4;
1210 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1214 gen_helper_fcmps(r_rs1
, r_rs2
);
1217 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1220 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1223 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1228 static inline void gen_op_fcmpd(int fccno
)
1235 gen_helper_fcmpd_fcc1();
1238 gen_helper_fcmpd_fcc2();
1241 gen_helper_fcmpd_fcc3();
1246 static inline void gen_op_fcmpq(int fccno
)
1253 gen_helper_fcmpq_fcc1();
1256 gen_helper_fcmpq_fcc2();
1259 gen_helper_fcmpq_fcc3();
1264 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1268 gen_helper_fcmpes(r_rs1
, r_rs2
);
1271 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1274 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1277 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1282 static inline void gen_op_fcmped(int fccno
)
1286 gen_helper_fcmped();
1289 gen_helper_fcmped_fcc1();
1292 gen_helper_fcmped_fcc2();
1295 gen_helper_fcmped_fcc3();
1300 static inline void gen_op_fcmpeq(int fccno
)
1304 gen_helper_fcmpeq();
1307 gen_helper_fcmpeq_fcc1();
1310 gen_helper_fcmpeq_fcc2();
1313 gen_helper_fcmpeq_fcc3();
1320 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1322 gen_helper_fcmps(r_rs1
, r_rs2
);
1325 static inline void gen_op_fcmpd(int fccno
)
1330 static inline void gen_op_fcmpq(int fccno
)
1335 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1337 gen_helper_fcmpes(r_rs1
, r_rs2
);
1340 static inline void gen_op_fcmped(int fccno
)
1342 gen_helper_fcmped();
1345 static inline void gen_op_fcmpeq(int fccno
)
1347 gen_helper_fcmpeq();
1351 static inline void gen_op_fpexception_im(int fsr_flags
)
1355 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1356 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1357 r_const
= tcg_const_i32(TT_FP_EXCP
);
1358 gen_helper_raise_exception(r_const
);
1359 tcg_temp_free_i32(r_const
);
1362 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1364 #if !defined(CONFIG_USER_ONLY)
1365 if (!dc
->fpu_enabled
) {
1368 save_state(dc
, r_cond
);
1369 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1370 gen_helper_raise_exception(r_const
);
1371 tcg_temp_free_i32(r_const
);
1379 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1381 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1384 static inline void gen_clear_float_exceptions(void)
1386 gen_helper_clear_float_exceptions();
1390 #ifdef TARGET_SPARC64
1391 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1397 r_asi
= tcg_temp_new_i32();
1398 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1400 asi
= GET_FIELD(insn
, 19, 26);
1401 r_asi
= tcg_const_i32(asi
);
1406 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1409 TCGv_i32 r_asi
, r_size
, r_sign
;
1411 r_asi
= gen_get_asi(insn
, addr
);
1412 r_size
= tcg_const_i32(size
);
1413 r_sign
= tcg_const_i32(sign
);
1414 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1415 tcg_temp_free_i32(r_sign
);
1416 tcg_temp_free_i32(r_size
);
1417 tcg_temp_free_i32(r_asi
);
1420 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1422 TCGv_i32 r_asi
, r_size
;
1424 r_asi
= gen_get_asi(insn
, addr
);
1425 r_size
= tcg_const_i32(size
);
1426 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1427 tcg_temp_free_i32(r_size
);
1428 tcg_temp_free_i32(r_asi
);
1431 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1433 TCGv_i32 r_asi
, r_size
, r_rd
;
1435 r_asi
= gen_get_asi(insn
, addr
);
1436 r_size
= tcg_const_i32(size
);
1437 r_rd
= tcg_const_i32(rd
);
1438 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1439 tcg_temp_free_i32(r_rd
);
1440 tcg_temp_free_i32(r_size
);
1441 tcg_temp_free_i32(r_asi
);
1444 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1446 TCGv_i32 r_asi
, r_size
, r_rd
;
1448 r_asi
= gen_get_asi(insn
, addr
);
1449 r_size
= tcg_const_i32(size
);
1450 r_rd
= tcg_const_i32(rd
);
1451 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1452 tcg_temp_free_i32(r_rd
);
1453 tcg_temp_free_i32(r_size
);
1454 tcg_temp_free_i32(r_asi
);
1457 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1459 TCGv_i32 r_asi
, r_size
, r_sign
;
1461 r_asi
= gen_get_asi(insn
, addr
);
1462 r_size
= tcg_const_i32(4);
1463 r_sign
= tcg_const_i32(0);
1464 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1465 tcg_temp_free_i32(r_sign
);
1466 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1467 tcg_temp_free_i32(r_size
);
1468 tcg_temp_free_i32(r_asi
);
1469 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1472 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1474 TCGv_i32 r_asi
, r_rd
;
1476 r_asi
= gen_get_asi(insn
, addr
);
1477 r_rd
= tcg_const_i32(rd
);
1478 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1479 tcg_temp_free_i32(r_rd
);
1480 tcg_temp_free_i32(r_asi
);
1483 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1485 TCGv_i32 r_asi
, r_size
;
1487 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1488 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1489 r_asi
= gen_get_asi(insn
, addr
);
1490 r_size
= tcg_const_i32(8);
1491 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1492 tcg_temp_free_i32(r_size
);
1493 tcg_temp_free_i32(r_asi
);
1496 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1502 r_val1
= tcg_temp_new();
1503 gen_movl_reg_TN(rd
, r_val1
);
1504 r_asi
= gen_get_asi(insn
, addr
);
1505 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1506 tcg_temp_free_i32(r_asi
);
1507 tcg_temp_free(r_val1
);
1510 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1515 gen_movl_reg_TN(rd
, cpu_tmp64
);
1516 r_asi
= gen_get_asi(insn
, addr
);
1517 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1518 tcg_temp_free_i32(r_asi
);
1521 #elif !defined(CONFIG_USER_ONLY)
1523 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1526 TCGv_i32 r_asi
, r_size
, r_sign
;
1528 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1529 r_size
= tcg_const_i32(size
);
1530 r_sign
= tcg_const_i32(sign
);
1531 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1532 tcg_temp_free(r_sign
);
1533 tcg_temp_free(r_size
);
1534 tcg_temp_free(r_asi
);
1535 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1538 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1540 TCGv_i32 r_asi
, r_size
;
1542 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1543 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1544 r_size
= tcg_const_i32(size
);
1545 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1546 tcg_temp_free(r_size
);
1547 tcg_temp_free(r_asi
);
1550 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1552 TCGv_i32 r_asi
, r_size
, r_sign
;
1555 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1556 r_size
= tcg_const_i32(4);
1557 r_sign
= tcg_const_i32(0);
1558 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1559 tcg_temp_free(r_sign
);
1560 r_val
= tcg_temp_new_i64();
1561 tcg_gen_extu_tl_i64(r_val
, dst
);
1562 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1563 tcg_temp_free_i64(r_val
);
1564 tcg_temp_free(r_size
);
1565 tcg_temp_free(r_asi
);
1566 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1569 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1571 TCGv_i32 r_asi
, r_size
, r_sign
;
1573 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1574 r_size
= tcg_const_i32(8);
1575 r_sign
= tcg_const_i32(0);
1576 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1577 tcg_temp_free(r_sign
);
1578 tcg_temp_free(r_size
);
1579 tcg_temp_free(r_asi
);
1580 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1581 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1582 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1583 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1584 gen_movl_TN_reg(rd
, hi
);
1587 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1589 TCGv_i32 r_asi
, r_size
;
1591 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1592 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1593 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1594 r_size
= tcg_const_i32(8);
1595 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1596 tcg_temp_free(r_size
);
1597 tcg_temp_free(r_asi
);
1601 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1602 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1605 TCGv_i32 r_asi
, r_size
;
1607 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1609 r_val
= tcg_const_i64(0xffULL
);
1610 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1611 r_size
= tcg_const_i32(1);
1612 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1613 tcg_temp_free_i32(r_size
);
1614 tcg_temp_free_i32(r_asi
);
1615 tcg_temp_free_i64(r_val
);
1619 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1624 rs1
= GET_FIELD(insn
, 13, 17);
1626 r_rs1
= tcg_const_tl(0); // XXX how to free?
1628 r_rs1
= cpu_gregs
[rs1
];
1630 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1634 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1638 if (IS_IMM
) { /* immediate */
1641 simm
= GET_FIELDs(insn
, 19, 31);
1642 r_rs2
= tcg_const_tl(simm
); // XXX how to free?
1643 } else { /* register */
1646 rs2
= GET_FIELD(insn
, 27, 31);
1648 r_rs2
= tcg_const_tl(0); // XXX how to free?
1650 r_rs2
= cpu_gregs
[rs2
];
1652 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1657 #define CHECK_IU_FEATURE(dc, FEATURE) \
1658 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1660 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1661 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1664 /* before an instruction, dc->pc must be static */
1665 static void disas_sparc_insn(DisasContext
* dc
)
1667 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1670 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1671 tcg_gen_debug_insn_start(dc
->pc
);
1672 insn
= ldl_code(dc
->pc
);
1673 opc
= GET_FIELD(insn
, 0, 1);
1675 rd
= GET_FIELD(insn
, 2, 6);
1677 cpu_src1
= tcg_temp_new(); // const
1678 cpu_src2
= tcg_temp_new(); // const
1681 case 0: /* branches/sethi */
1683 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1686 #ifdef TARGET_SPARC64
1687 case 0x1: /* V9 BPcc */
1691 target
= GET_FIELD_SP(insn
, 0, 18);
1692 target
= sign_extend(target
, 18);
1694 cc
= GET_FIELD_SP(insn
, 20, 21);
1696 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1698 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1703 case 0x3: /* V9 BPr */
1705 target
= GET_FIELD_SP(insn
, 0, 13) |
1706 (GET_FIELD_SP(insn
, 20, 21) << 14);
1707 target
= sign_extend(target
, 16);
1709 cpu_src1
= get_src1(insn
, cpu_src1
);
1710 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1713 case 0x5: /* V9 FBPcc */
1715 int cc
= GET_FIELD_SP(insn
, 20, 21);
1716 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1718 target
= GET_FIELD_SP(insn
, 0, 18);
1719 target
= sign_extend(target
, 19);
1721 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1725 case 0x7: /* CBN+x */
1730 case 0x2: /* BN+x */
1732 target
= GET_FIELD(insn
, 10, 31);
1733 target
= sign_extend(target
, 22);
1735 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1738 case 0x6: /* FBN+x */
1740 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1742 target
= GET_FIELD(insn
, 10, 31);
1743 target
= sign_extend(target
, 22);
1745 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1748 case 0x4: /* SETHI */
1750 uint32_t value
= GET_FIELD(insn
, 10, 31);
1753 r_const
= tcg_const_tl(value
<< 10);
1754 gen_movl_TN_reg(rd
, r_const
);
1755 tcg_temp_free(r_const
);
1758 case 0x0: /* UNIMPL */
1767 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1770 r_const
= tcg_const_tl(dc
->pc
);
1771 gen_movl_TN_reg(15, r_const
);
1772 tcg_temp_free(r_const
);
1774 gen_mov_pc_npc(dc
, cpu_cond
);
1778 case 2: /* FPU & Logical Operations */
1780 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1781 if (xop
== 0x3a) { /* generate trap */
1784 cpu_src1
= get_src1(insn
, cpu_src1
);
1786 rs2
= GET_FIELD(insn
, 25, 31);
1787 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1789 rs2
= GET_FIELD(insn
, 27, 31);
1791 gen_movl_reg_TN(rs2
, cpu_src2
);
1792 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1794 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1796 cond
= GET_FIELD(insn
, 3, 6);
1798 save_state(dc
, cpu_cond
);
1799 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1801 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1803 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1804 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1805 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1806 gen_helper_raise_exception(cpu_tmp32
);
1807 } else if (cond
!= 0) {
1808 TCGv r_cond
= tcg_temp_new();
1810 #ifdef TARGET_SPARC64
1812 int cc
= GET_FIELD_SP(insn
, 11, 12);
1814 save_state(dc
, cpu_cond
);
1816 gen_cond(r_cond
, 0, cond
, dc
);
1818 gen_cond(r_cond
, 1, cond
, dc
);
1822 save_state(dc
, cpu_cond
);
1823 gen_cond(r_cond
, 0, cond
, dc
);
1825 l1
= gen_new_label();
1826 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1828 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1830 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1832 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1833 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1834 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1835 gen_helper_raise_exception(cpu_tmp32
);
1838 tcg_temp_free(r_cond
);
1844 } else if (xop
== 0x28) {
1845 rs1
= GET_FIELD(insn
, 13, 17);
1848 #ifndef TARGET_SPARC64
1849 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1850 manual, rdy on the microSPARC
1852 case 0x0f: /* stbar in the SPARCv8 manual,
1853 rdy on the microSPARC II */
1854 case 0x10 ... 0x1f: /* implementation-dependent in the
1855 SPARCv8 manual, rdy on the
1858 gen_movl_TN_reg(rd
, cpu_y
);
1860 #ifdef TARGET_SPARC64
1861 case 0x2: /* V9 rdccr */
1862 gen_helper_compute_psr();
1863 gen_helper_rdccr(cpu_dst
);
1864 gen_movl_TN_reg(rd
, cpu_dst
);
1866 case 0x3: /* V9 rdasi */
1867 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
1868 gen_movl_TN_reg(rd
, cpu_dst
);
1870 case 0x4: /* V9 rdtick */
1874 r_tickptr
= tcg_temp_new_ptr();
1875 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1876 offsetof(CPUState
, tick
));
1877 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1878 tcg_temp_free_ptr(r_tickptr
);
1879 gen_movl_TN_reg(rd
, cpu_dst
);
1882 case 0x5: /* V9 rdpc */
1886 r_const
= tcg_const_tl(dc
->pc
);
1887 gen_movl_TN_reg(rd
, r_const
);
1888 tcg_temp_free(r_const
);
1891 case 0x6: /* V9 rdfprs */
1892 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
1893 gen_movl_TN_reg(rd
, cpu_dst
);
1895 case 0xf: /* V9 membar */
1896 break; /* no effect */
1897 case 0x13: /* Graphics Status */
1898 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1900 gen_movl_TN_reg(rd
, cpu_gsr
);
1902 case 0x16: /* Softint */
1903 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
1904 gen_movl_TN_reg(rd
, cpu_dst
);
1906 case 0x17: /* Tick compare */
1907 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
1909 case 0x18: /* System tick */
1913 r_tickptr
= tcg_temp_new_ptr();
1914 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1915 offsetof(CPUState
, stick
));
1916 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1917 tcg_temp_free_ptr(r_tickptr
);
1918 gen_movl_TN_reg(rd
, cpu_dst
);
1921 case 0x19: /* System tick compare */
1922 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
1924 case 0x10: /* Performance Control */
1925 case 0x11: /* Performance Instrumentation Counter */
1926 case 0x12: /* Dispatch Control */
1927 case 0x14: /* Softint set, WO */
1928 case 0x15: /* Softint clear, WO */
1933 #if !defined(CONFIG_USER_ONLY)
1934 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
1935 #ifndef TARGET_SPARC64
1936 if (!supervisor(dc
))
1938 gen_helper_compute_psr();
1939 dc
->cc_op
= CC_OP_FLAGS
;
1940 gen_helper_rdpsr(cpu_dst
);
1942 CHECK_IU_FEATURE(dc
, HYPV
);
1943 if (!hypervisor(dc
))
1945 rs1
= GET_FIELD(insn
, 13, 17);
1948 // gen_op_rdhpstate();
1951 // gen_op_rdhtstate();
1954 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
1957 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
1960 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
1962 case 31: // hstick_cmpr
1963 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
1969 gen_movl_TN_reg(rd
, cpu_dst
);
1971 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
1972 if (!supervisor(dc
))
1974 #ifdef TARGET_SPARC64
1975 rs1
= GET_FIELD(insn
, 13, 17);
1981 r_tsptr
= tcg_temp_new_ptr();
1982 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1983 offsetof(CPUState
, tsptr
));
1984 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
1985 offsetof(trap_state
, tpc
));
1986 tcg_temp_free_ptr(r_tsptr
);
1993 r_tsptr
= tcg_temp_new_ptr();
1994 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1995 offsetof(CPUState
, tsptr
));
1996 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
1997 offsetof(trap_state
, tnpc
));
1998 tcg_temp_free_ptr(r_tsptr
);
2005 r_tsptr
= tcg_temp_new_ptr();
2006 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2007 offsetof(CPUState
, tsptr
));
2008 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2009 offsetof(trap_state
, tstate
));
2010 tcg_temp_free_ptr(r_tsptr
);
2017 r_tsptr
= tcg_temp_new_ptr();
2018 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2019 offsetof(CPUState
, tsptr
));
2020 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2021 offsetof(trap_state
, tt
));
2022 tcg_temp_free_ptr(r_tsptr
);
2023 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2030 r_tickptr
= tcg_temp_new_ptr();
2031 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2032 offsetof(CPUState
, tick
));
2033 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2034 gen_movl_TN_reg(rd
, cpu_tmp0
);
2035 tcg_temp_free_ptr(r_tickptr
);
2039 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2042 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2043 offsetof(CPUSPARCState
, pstate
));
2044 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2047 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2048 offsetof(CPUSPARCState
, tl
));
2049 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2052 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2053 offsetof(CPUSPARCState
, psrpil
));
2054 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2057 gen_helper_rdcwp(cpu_tmp0
);
2060 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2061 offsetof(CPUSPARCState
, cansave
));
2062 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2064 case 11: // canrestore
2065 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2066 offsetof(CPUSPARCState
, canrestore
));
2067 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2069 case 12: // cleanwin
2070 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2071 offsetof(CPUSPARCState
, cleanwin
));
2072 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2074 case 13: // otherwin
2075 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2076 offsetof(CPUSPARCState
, otherwin
));
2077 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2080 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2081 offsetof(CPUSPARCState
, wstate
));
2082 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2084 case 16: // UA2005 gl
2085 CHECK_IU_FEATURE(dc
, GL
);
2086 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2087 offsetof(CPUSPARCState
, gl
));
2088 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2090 case 26: // UA2005 strand status
2091 CHECK_IU_FEATURE(dc
, HYPV
);
2092 if (!hypervisor(dc
))
2094 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2097 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2104 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2106 gen_movl_TN_reg(rd
, cpu_tmp0
);
2108 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2109 #ifdef TARGET_SPARC64
2110 save_state(dc
, cpu_cond
);
2111 gen_helper_flushw();
2113 if (!supervisor(dc
))
2115 gen_movl_TN_reg(rd
, cpu_tbr
);
2119 } else if (xop
== 0x34) { /* FPU Operations */
2120 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2122 gen_op_clear_ieee_excp_and_FTT();
2123 rs1
= GET_FIELD(insn
, 13, 17);
2124 rs2
= GET_FIELD(insn
, 27, 31);
2125 xop
= GET_FIELD(insn
, 18, 26);
2127 case 0x1: /* fmovs */
2128 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2130 case 0x5: /* fnegs */
2131 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2133 case 0x9: /* fabss */
2134 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2136 case 0x29: /* fsqrts */
2137 CHECK_FPU_FEATURE(dc
, FSQRT
);
2138 gen_clear_float_exceptions();
2139 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2140 gen_helper_check_ieee_exceptions();
2141 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2143 case 0x2a: /* fsqrtd */
2144 CHECK_FPU_FEATURE(dc
, FSQRT
);
2145 gen_op_load_fpr_DT1(DFPREG(rs2
));
2146 gen_clear_float_exceptions();
2147 gen_helper_fsqrtd();
2148 gen_helper_check_ieee_exceptions();
2149 gen_op_store_DT0_fpr(DFPREG(rd
));
2151 case 0x2b: /* fsqrtq */
2152 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2153 gen_op_load_fpr_QT1(QFPREG(rs2
));
2154 gen_clear_float_exceptions();
2155 gen_helper_fsqrtq();
2156 gen_helper_check_ieee_exceptions();
2157 gen_op_store_QT0_fpr(QFPREG(rd
));
2159 case 0x41: /* fadds */
2160 gen_clear_float_exceptions();
2161 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2162 gen_helper_check_ieee_exceptions();
2163 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2165 case 0x42: /* faddd */
2166 gen_op_load_fpr_DT0(DFPREG(rs1
));
2167 gen_op_load_fpr_DT1(DFPREG(rs2
));
2168 gen_clear_float_exceptions();
2170 gen_helper_check_ieee_exceptions();
2171 gen_op_store_DT0_fpr(DFPREG(rd
));
2173 case 0x43: /* faddq */
2174 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2175 gen_op_load_fpr_QT0(QFPREG(rs1
));
2176 gen_op_load_fpr_QT1(QFPREG(rs2
));
2177 gen_clear_float_exceptions();
2179 gen_helper_check_ieee_exceptions();
2180 gen_op_store_QT0_fpr(QFPREG(rd
));
2182 case 0x45: /* fsubs */
2183 gen_clear_float_exceptions();
2184 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2185 gen_helper_check_ieee_exceptions();
2186 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2188 case 0x46: /* fsubd */
2189 gen_op_load_fpr_DT0(DFPREG(rs1
));
2190 gen_op_load_fpr_DT1(DFPREG(rs2
));
2191 gen_clear_float_exceptions();
2193 gen_helper_check_ieee_exceptions();
2194 gen_op_store_DT0_fpr(DFPREG(rd
));
2196 case 0x47: /* fsubq */
2197 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2198 gen_op_load_fpr_QT0(QFPREG(rs1
));
2199 gen_op_load_fpr_QT1(QFPREG(rs2
));
2200 gen_clear_float_exceptions();
2202 gen_helper_check_ieee_exceptions();
2203 gen_op_store_QT0_fpr(QFPREG(rd
));
2205 case 0x49: /* fmuls */
2206 CHECK_FPU_FEATURE(dc
, FMUL
);
2207 gen_clear_float_exceptions();
2208 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2209 gen_helper_check_ieee_exceptions();
2210 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2212 case 0x4a: /* fmuld */
2213 CHECK_FPU_FEATURE(dc
, FMUL
);
2214 gen_op_load_fpr_DT0(DFPREG(rs1
));
2215 gen_op_load_fpr_DT1(DFPREG(rs2
));
2216 gen_clear_float_exceptions();
2218 gen_helper_check_ieee_exceptions();
2219 gen_op_store_DT0_fpr(DFPREG(rd
));
2221 case 0x4b: /* fmulq */
2222 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2223 CHECK_FPU_FEATURE(dc
, FMUL
);
2224 gen_op_load_fpr_QT0(QFPREG(rs1
));
2225 gen_op_load_fpr_QT1(QFPREG(rs2
));
2226 gen_clear_float_exceptions();
2228 gen_helper_check_ieee_exceptions();
2229 gen_op_store_QT0_fpr(QFPREG(rd
));
2231 case 0x4d: /* fdivs */
2232 gen_clear_float_exceptions();
2233 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2234 gen_helper_check_ieee_exceptions();
2235 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2237 case 0x4e: /* fdivd */
2238 gen_op_load_fpr_DT0(DFPREG(rs1
));
2239 gen_op_load_fpr_DT1(DFPREG(rs2
));
2240 gen_clear_float_exceptions();
2242 gen_helper_check_ieee_exceptions();
2243 gen_op_store_DT0_fpr(DFPREG(rd
));
2245 case 0x4f: /* fdivq */
2246 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2247 gen_op_load_fpr_QT0(QFPREG(rs1
));
2248 gen_op_load_fpr_QT1(QFPREG(rs2
));
2249 gen_clear_float_exceptions();
2251 gen_helper_check_ieee_exceptions();
2252 gen_op_store_QT0_fpr(QFPREG(rd
));
2254 case 0x69: /* fsmuld */
2255 CHECK_FPU_FEATURE(dc
, FSMULD
);
2256 gen_clear_float_exceptions();
2257 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2258 gen_helper_check_ieee_exceptions();
2259 gen_op_store_DT0_fpr(DFPREG(rd
));
2261 case 0x6e: /* fdmulq */
2262 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2263 gen_op_load_fpr_DT0(DFPREG(rs1
));
2264 gen_op_load_fpr_DT1(DFPREG(rs2
));
2265 gen_clear_float_exceptions();
2266 gen_helper_fdmulq();
2267 gen_helper_check_ieee_exceptions();
2268 gen_op_store_QT0_fpr(QFPREG(rd
));
2270 case 0xc4: /* fitos */
2271 gen_clear_float_exceptions();
2272 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2273 gen_helper_check_ieee_exceptions();
2274 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2276 case 0xc6: /* fdtos */
2277 gen_op_load_fpr_DT1(DFPREG(rs2
));
2278 gen_clear_float_exceptions();
2279 gen_helper_fdtos(cpu_tmp32
);
2280 gen_helper_check_ieee_exceptions();
2281 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2283 case 0xc7: /* fqtos */
2284 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2285 gen_op_load_fpr_QT1(QFPREG(rs2
));
2286 gen_clear_float_exceptions();
2287 gen_helper_fqtos(cpu_tmp32
);
2288 gen_helper_check_ieee_exceptions();
2289 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2291 case 0xc8: /* fitod */
2292 gen_helper_fitod(cpu_fpr
[rs2
]);
2293 gen_op_store_DT0_fpr(DFPREG(rd
));
2295 case 0xc9: /* fstod */
2296 gen_helper_fstod(cpu_fpr
[rs2
]);
2297 gen_op_store_DT0_fpr(DFPREG(rd
));
2299 case 0xcb: /* fqtod */
2300 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2301 gen_op_load_fpr_QT1(QFPREG(rs2
));
2302 gen_clear_float_exceptions();
2304 gen_helper_check_ieee_exceptions();
2305 gen_op_store_DT0_fpr(DFPREG(rd
));
2307 case 0xcc: /* fitoq */
2308 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2309 gen_helper_fitoq(cpu_fpr
[rs2
]);
2310 gen_op_store_QT0_fpr(QFPREG(rd
));
2312 case 0xcd: /* fstoq */
2313 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2314 gen_helper_fstoq(cpu_fpr
[rs2
]);
2315 gen_op_store_QT0_fpr(QFPREG(rd
));
2317 case 0xce: /* fdtoq */
2318 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2319 gen_op_load_fpr_DT1(DFPREG(rs2
));
2321 gen_op_store_QT0_fpr(QFPREG(rd
));
2323 case 0xd1: /* fstoi */
2324 gen_clear_float_exceptions();
2325 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2326 gen_helper_check_ieee_exceptions();
2327 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2329 case 0xd2: /* fdtoi */
2330 gen_op_load_fpr_DT1(DFPREG(rs2
));
2331 gen_clear_float_exceptions();
2332 gen_helper_fdtoi(cpu_tmp32
);
2333 gen_helper_check_ieee_exceptions();
2334 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2336 case 0xd3: /* fqtoi */
2337 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2338 gen_op_load_fpr_QT1(QFPREG(rs2
));
2339 gen_clear_float_exceptions();
2340 gen_helper_fqtoi(cpu_tmp32
);
2341 gen_helper_check_ieee_exceptions();
2342 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2344 #ifdef TARGET_SPARC64
2345 case 0x2: /* V9 fmovd */
2346 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2347 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2348 cpu_fpr
[DFPREG(rs2
) + 1]);
2350 case 0x3: /* V9 fmovq */
2351 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2352 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2353 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2354 cpu_fpr
[QFPREG(rs2
) + 1]);
2355 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2356 cpu_fpr
[QFPREG(rs2
) + 2]);
2357 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2358 cpu_fpr
[QFPREG(rs2
) + 3]);
2360 case 0x6: /* V9 fnegd */
2361 gen_op_load_fpr_DT1(DFPREG(rs2
));
2363 gen_op_store_DT0_fpr(DFPREG(rd
));
2365 case 0x7: /* V9 fnegq */
2366 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2367 gen_op_load_fpr_QT1(QFPREG(rs2
));
2369 gen_op_store_QT0_fpr(QFPREG(rd
));
2371 case 0xa: /* V9 fabsd */
2372 gen_op_load_fpr_DT1(DFPREG(rs2
));
2374 gen_op_store_DT0_fpr(DFPREG(rd
));
2376 case 0xb: /* V9 fabsq */
2377 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2378 gen_op_load_fpr_QT1(QFPREG(rs2
));
2380 gen_op_store_QT0_fpr(QFPREG(rd
));
2382 case 0x81: /* V9 fstox */
2383 gen_clear_float_exceptions();
2384 gen_helper_fstox(cpu_fpr
[rs2
]);
2385 gen_helper_check_ieee_exceptions();
2386 gen_op_store_DT0_fpr(DFPREG(rd
));
2388 case 0x82: /* V9 fdtox */
2389 gen_op_load_fpr_DT1(DFPREG(rs2
));
2390 gen_clear_float_exceptions();
2392 gen_helper_check_ieee_exceptions();
2393 gen_op_store_DT0_fpr(DFPREG(rd
));
2395 case 0x83: /* V9 fqtox */
2396 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2397 gen_op_load_fpr_QT1(QFPREG(rs2
));
2398 gen_clear_float_exceptions();
2400 gen_helper_check_ieee_exceptions();
2401 gen_op_store_DT0_fpr(DFPREG(rd
));
2403 case 0x84: /* V9 fxtos */
2404 gen_op_load_fpr_DT1(DFPREG(rs2
));
2405 gen_clear_float_exceptions();
2406 gen_helper_fxtos(cpu_tmp32
);
2407 gen_helper_check_ieee_exceptions();
2408 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2410 case 0x88: /* V9 fxtod */
2411 gen_op_load_fpr_DT1(DFPREG(rs2
));
2412 gen_clear_float_exceptions();
2414 gen_helper_check_ieee_exceptions();
2415 gen_op_store_DT0_fpr(DFPREG(rd
));
2417 case 0x8c: /* V9 fxtoq */
2418 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2419 gen_op_load_fpr_DT1(DFPREG(rs2
));
2420 gen_clear_float_exceptions();
2422 gen_helper_check_ieee_exceptions();
2423 gen_op_store_QT0_fpr(QFPREG(rd
));
2429 } else if (xop
== 0x35) { /* FPU Operations */
2430 #ifdef TARGET_SPARC64
2433 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2435 gen_op_clear_ieee_excp_and_FTT();
2436 rs1
= GET_FIELD(insn
, 13, 17);
2437 rs2
= GET_FIELD(insn
, 27, 31);
2438 xop
= GET_FIELD(insn
, 18, 26);
2439 #ifdef TARGET_SPARC64
2440 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2443 l1
= gen_new_label();
2444 cond
= GET_FIELD_SP(insn
, 14, 17);
2445 cpu_src1
= get_src1(insn
, cpu_src1
);
2446 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2448 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2451 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2454 l1
= gen_new_label();
2455 cond
= GET_FIELD_SP(insn
, 14, 17);
2456 cpu_src1
= get_src1(insn
, cpu_src1
);
2457 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2459 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2460 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2463 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2466 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2467 l1
= gen_new_label();
2468 cond
= GET_FIELD_SP(insn
, 14, 17);
2469 cpu_src1
= get_src1(insn
, cpu_src1
);
2470 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2472 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2473 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2474 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2475 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2481 #ifdef TARGET_SPARC64
2482 #define FMOVSCC(fcc) \
2487 l1 = gen_new_label(); \
2488 r_cond = tcg_temp_new(); \
2489 cond = GET_FIELD_SP(insn, 14, 17); \
2490 gen_fcond(r_cond, fcc, cond); \
2491 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2493 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2494 gen_set_label(l1); \
2495 tcg_temp_free(r_cond); \
2497 #define FMOVDCC(fcc) \
2502 l1 = gen_new_label(); \
2503 r_cond = tcg_temp_new(); \
2504 cond = GET_FIELD_SP(insn, 14, 17); \
2505 gen_fcond(r_cond, fcc, cond); \
2506 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2508 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2509 cpu_fpr[DFPREG(rs2)]); \
2510 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2511 cpu_fpr[DFPREG(rs2) + 1]); \
2512 gen_set_label(l1); \
2513 tcg_temp_free(r_cond); \
2515 #define FMOVQCC(fcc) \
2520 l1 = gen_new_label(); \
2521 r_cond = tcg_temp_new(); \
2522 cond = GET_FIELD_SP(insn, 14, 17); \
2523 gen_fcond(r_cond, fcc, cond); \
2524 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2526 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2527 cpu_fpr[QFPREG(rs2)]); \
2528 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2529 cpu_fpr[QFPREG(rs2) + 1]); \
2530 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2531 cpu_fpr[QFPREG(rs2) + 2]); \
2532 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2533 cpu_fpr[QFPREG(rs2) + 3]); \
2534 gen_set_label(l1); \
2535 tcg_temp_free(r_cond); \
2537 case 0x001: /* V9 fmovscc %fcc0 */
2540 case 0x002: /* V9 fmovdcc %fcc0 */
2543 case 0x003: /* V9 fmovqcc %fcc0 */
2544 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2547 case 0x041: /* V9 fmovscc %fcc1 */
2550 case 0x042: /* V9 fmovdcc %fcc1 */
2553 case 0x043: /* V9 fmovqcc %fcc1 */
2554 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2557 case 0x081: /* V9 fmovscc %fcc2 */
2560 case 0x082: /* V9 fmovdcc %fcc2 */
2563 case 0x083: /* V9 fmovqcc %fcc2 */
2564 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2567 case 0x0c1: /* V9 fmovscc %fcc3 */
2570 case 0x0c2: /* V9 fmovdcc %fcc3 */
2573 case 0x0c3: /* V9 fmovqcc %fcc3 */
2574 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2580 #define FMOVSCC(icc) \
2585 l1 = gen_new_label(); \
2586 r_cond = tcg_temp_new(); \
2587 cond = GET_FIELD_SP(insn, 14, 17); \
2588 gen_cond(r_cond, icc, cond, dc); \
2589 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2591 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2592 gen_set_label(l1); \
2593 tcg_temp_free(r_cond); \
2595 #define FMOVDCC(icc) \
2600 l1 = gen_new_label(); \
2601 r_cond = tcg_temp_new(); \
2602 cond = GET_FIELD_SP(insn, 14, 17); \
2603 gen_cond(r_cond, icc, cond, dc); \
2604 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2606 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2607 cpu_fpr[DFPREG(rs2)]); \
2608 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2609 cpu_fpr[DFPREG(rs2) + 1]); \
2610 gen_set_label(l1); \
2611 tcg_temp_free(r_cond); \
2613 #define FMOVQCC(icc) \
2618 l1 = gen_new_label(); \
2619 r_cond = tcg_temp_new(); \
2620 cond = GET_FIELD_SP(insn, 14, 17); \
2621 gen_cond(r_cond, icc, cond, dc); \
2622 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2624 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2625 cpu_fpr[QFPREG(rs2)]); \
2626 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2627 cpu_fpr[QFPREG(rs2) + 1]); \
2628 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2629 cpu_fpr[QFPREG(rs2) + 2]); \
2630 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2631 cpu_fpr[QFPREG(rs2) + 3]); \
2632 gen_set_label(l1); \
2633 tcg_temp_free(r_cond); \
2636 case 0x101: /* V9 fmovscc %icc */
2639 case 0x102: /* V9 fmovdcc %icc */
2641 case 0x103: /* V9 fmovqcc %icc */
2642 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2645 case 0x181: /* V9 fmovscc %xcc */
2648 case 0x182: /* V9 fmovdcc %xcc */
2651 case 0x183: /* V9 fmovqcc %xcc */
2652 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2659 case 0x51: /* fcmps, V9 %fcc */
2660 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2662 case 0x52: /* fcmpd, V9 %fcc */
2663 gen_op_load_fpr_DT0(DFPREG(rs1
));
2664 gen_op_load_fpr_DT1(DFPREG(rs2
));
2665 gen_op_fcmpd(rd
& 3);
2667 case 0x53: /* fcmpq, V9 %fcc */
2668 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2669 gen_op_load_fpr_QT0(QFPREG(rs1
));
2670 gen_op_load_fpr_QT1(QFPREG(rs2
));
2671 gen_op_fcmpq(rd
& 3);
2673 case 0x55: /* fcmpes, V9 %fcc */
2674 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2676 case 0x56: /* fcmped, V9 %fcc */
2677 gen_op_load_fpr_DT0(DFPREG(rs1
));
2678 gen_op_load_fpr_DT1(DFPREG(rs2
));
2679 gen_op_fcmped(rd
& 3);
2681 case 0x57: /* fcmpeq, V9 %fcc */
2682 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2683 gen_op_load_fpr_QT0(QFPREG(rs1
));
2684 gen_op_load_fpr_QT1(QFPREG(rs2
));
2685 gen_op_fcmpeq(rd
& 3);
2690 } else if (xop
== 0x2) {
2693 rs1
= GET_FIELD(insn
, 13, 17);
2695 // or %g0, x, y -> mov T0, x; mov y, T0
2696 if (IS_IMM
) { /* immediate */
2699 simm
= GET_FIELDs(insn
, 19, 31);
2700 r_const
= tcg_const_tl(simm
);
2701 gen_movl_TN_reg(rd
, r_const
);
2702 tcg_temp_free(r_const
);
2703 } else { /* register */
2704 rs2
= GET_FIELD(insn
, 27, 31);
2705 gen_movl_reg_TN(rs2
, cpu_dst
);
2706 gen_movl_TN_reg(rd
, cpu_dst
);
2709 cpu_src1
= get_src1(insn
, cpu_src1
);
2710 if (IS_IMM
) { /* immediate */
2711 simm
= GET_FIELDs(insn
, 19, 31);
2712 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2713 gen_movl_TN_reg(rd
, cpu_dst
);
2714 } else { /* register */
2715 // or x, %g0, y -> mov T1, x; mov y, T1
2716 rs2
= GET_FIELD(insn
, 27, 31);
2718 gen_movl_reg_TN(rs2
, cpu_src2
);
2719 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2720 gen_movl_TN_reg(rd
, cpu_dst
);
2722 gen_movl_TN_reg(rd
, cpu_src1
);
2725 #ifdef TARGET_SPARC64
2726 } else if (xop
== 0x25) { /* sll, V9 sllx */
2727 cpu_src1
= get_src1(insn
, cpu_src1
);
2728 if (IS_IMM
) { /* immediate */
2729 simm
= GET_FIELDs(insn
, 20, 31);
2730 if (insn
& (1 << 12)) {
2731 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2733 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2735 } else { /* register */
2736 rs2
= GET_FIELD(insn
, 27, 31);
2737 gen_movl_reg_TN(rs2
, cpu_src2
);
2738 if (insn
& (1 << 12)) {
2739 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2741 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2743 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2745 gen_movl_TN_reg(rd
, cpu_dst
);
2746 } else if (xop
== 0x26) { /* srl, V9 srlx */
2747 cpu_src1
= get_src1(insn
, cpu_src1
);
2748 if (IS_IMM
) { /* immediate */
2749 simm
= GET_FIELDs(insn
, 20, 31);
2750 if (insn
& (1 << 12)) {
2751 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2753 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2754 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2756 } else { /* register */
2757 rs2
= GET_FIELD(insn
, 27, 31);
2758 gen_movl_reg_TN(rs2
, cpu_src2
);
2759 if (insn
& (1 << 12)) {
2760 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2761 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2763 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2764 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2765 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2768 gen_movl_TN_reg(rd
, cpu_dst
);
2769 } else if (xop
== 0x27) { /* sra, V9 srax */
2770 cpu_src1
= get_src1(insn
, cpu_src1
);
2771 if (IS_IMM
) { /* immediate */
2772 simm
= GET_FIELDs(insn
, 20, 31);
2773 if (insn
& (1 << 12)) {
2774 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2776 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2777 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2778 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2780 } else { /* register */
2781 rs2
= GET_FIELD(insn
, 27, 31);
2782 gen_movl_reg_TN(rs2
, cpu_src2
);
2783 if (insn
& (1 << 12)) {
2784 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2785 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2787 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2788 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2789 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2790 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2793 gen_movl_TN_reg(rd
, cpu_dst
);
2795 } else if (xop
< 0x36) {
2797 cpu_src1
= get_src1(insn
, cpu_src1
);
2798 cpu_src2
= get_src2(insn
, cpu_src2
);
2799 switch (xop
& ~0x10) {
2802 simm
= GET_FIELDs(insn
, 19, 31);
2804 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
2805 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2806 dc
->cc_op
= CC_OP_ADD
;
2808 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
2812 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2813 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2814 dc
->cc_op
= CC_OP_ADD
;
2816 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2822 simm
= GET_FIELDs(insn
, 19, 31);
2823 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
2825 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2828 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2829 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2830 dc
->cc_op
= CC_OP_LOGIC
;
2835 simm
= GET_FIELDs(insn
, 19, 31);
2836 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2838 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2841 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2842 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2843 dc
->cc_op
= CC_OP_LOGIC
;
2848 simm
= GET_FIELDs(insn
, 19, 31);
2849 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
2851 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2854 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2855 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2856 dc
->cc_op
= CC_OP_LOGIC
;
2861 simm
= GET_FIELDs(insn
, 19, 31);
2863 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
2865 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
2869 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2870 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2871 dc
->cc_op
= CC_OP_SUB
;
2873 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2877 case 0x5: /* andn */
2879 simm
= GET_FIELDs(insn
, 19, 31);
2880 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
2882 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2885 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2886 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2887 dc
->cc_op
= CC_OP_LOGIC
;
2892 simm
= GET_FIELDs(insn
, 19, 31);
2893 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
2895 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2898 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2899 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2900 dc
->cc_op
= CC_OP_LOGIC
;
2903 case 0x7: /* xorn */
2905 simm
= GET_FIELDs(insn
, 19, 31);
2906 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
2908 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
2909 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2912 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2913 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2914 dc
->cc_op
= CC_OP_LOGIC
;
2917 case 0x8: /* addx, V9 addc */
2919 simm
= GET_FIELDs(insn
, 19, 31);
2921 gen_helper_compute_psr();
2922 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
2923 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2924 dc
->cc_op
= CC_OP_ADDX
;
2926 gen_helper_compute_psr();
2927 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2928 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
2929 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2933 gen_helper_compute_psr();
2934 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2935 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2936 dc
->cc_op
= CC_OP_ADDX
;
2938 gen_helper_compute_psr();
2939 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2940 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2941 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2945 #ifdef TARGET_SPARC64
2946 case 0x9: /* V9 mulx */
2948 simm
= GET_FIELDs(insn
, 19, 31);
2949 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
2951 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
2955 case 0xa: /* umul */
2956 CHECK_IU_FEATURE(dc
, MUL
);
2957 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
2959 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2960 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2961 dc
->cc_op
= CC_OP_LOGIC
;
2964 case 0xb: /* smul */
2965 CHECK_IU_FEATURE(dc
, MUL
);
2966 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
2968 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2969 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2970 dc
->cc_op
= CC_OP_LOGIC
;
2973 case 0xc: /* subx, V9 subc */
2975 simm
= GET_FIELDs(insn
, 19, 31);
2977 gen_helper_compute_psr();
2978 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
2979 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
2980 dc
->cc_op
= CC_OP_SUBX
;
2982 gen_helper_compute_psr();
2983 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2984 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
2985 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2989 gen_helper_compute_psr();
2990 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2991 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
2992 dc
->cc_op
= CC_OP_SUBX
;
2994 gen_helper_compute_psr();
2995 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2996 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2997 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3001 #ifdef TARGET_SPARC64
3002 case 0xd: /* V9 udivx */
3003 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3004 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3005 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3006 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3009 case 0xe: /* udiv */
3010 CHECK_IU_FEATURE(dc
, DIV
);
3011 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3013 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3014 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3015 dc
->cc_op
= CC_OP_DIV
;
3018 case 0xf: /* sdiv */
3019 CHECK_IU_FEATURE(dc
, DIV
);
3020 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3022 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3023 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3024 dc
->cc_op
= CC_OP_DIV
;
3030 gen_movl_TN_reg(rd
, cpu_dst
);
3032 cpu_src1
= get_src1(insn
, cpu_src1
);
3033 cpu_src2
= get_src2(insn
, cpu_src2
);
3035 case 0x20: /* taddcc */
3036 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3037 gen_movl_TN_reg(rd
, cpu_dst
);
3038 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3039 dc
->cc_op
= CC_OP_TADD
;
3041 case 0x21: /* tsubcc */
3042 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3043 gen_movl_TN_reg(rd
, cpu_dst
);
3044 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3045 dc
->cc_op
= CC_OP_TSUB
;
3047 case 0x22: /* taddcctv */
3048 save_state(dc
, cpu_cond
);
3049 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3050 gen_movl_TN_reg(rd
, cpu_dst
);
3051 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3052 dc
->cc_op
= CC_OP_TADDTV
;
3054 case 0x23: /* tsubcctv */
3055 save_state(dc
, cpu_cond
);
3056 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3057 gen_movl_TN_reg(rd
, cpu_dst
);
3058 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3059 dc
->cc_op
= CC_OP_TSUBTV
;
3061 case 0x24: /* mulscc */
3062 gen_helper_compute_psr();
3063 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3064 gen_movl_TN_reg(rd
, cpu_dst
);
3065 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3066 dc
->cc_op
= CC_OP_ADD
;
3068 #ifndef TARGET_SPARC64
3069 case 0x25: /* sll */
3070 if (IS_IMM
) { /* immediate */
3071 simm
= GET_FIELDs(insn
, 20, 31);
3072 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3073 } else { /* register */
3074 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3075 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3077 gen_movl_TN_reg(rd
, cpu_dst
);
3079 case 0x26: /* srl */
3080 if (IS_IMM
) { /* immediate */
3081 simm
= GET_FIELDs(insn
, 20, 31);
3082 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3083 } else { /* register */
3084 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3085 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3087 gen_movl_TN_reg(rd
, cpu_dst
);
3089 case 0x27: /* sra */
3090 if (IS_IMM
) { /* immediate */
3091 simm
= GET_FIELDs(insn
, 20, 31);
3092 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3093 } else { /* register */
3094 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3095 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3097 gen_movl_TN_reg(rd
, cpu_dst
);
3104 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3105 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3107 #ifndef TARGET_SPARC64
3108 case 0x01 ... 0x0f: /* undefined in the
3112 case 0x10 ... 0x1f: /* implementation-dependent
3118 case 0x2: /* V9 wrccr */
3119 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3120 gen_helper_wrccr(cpu_dst
);
3121 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3122 dc
->cc_op
= CC_OP_FLAGS
;
3124 case 0x3: /* V9 wrasi */
3125 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3126 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3128 case 0x6: /* V9 wrfprs */
3129 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3130 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3131 save_state(dc
, cpu_cond
);
3136 case 0xf: /* V9 sir, nop if user */
3137 #if !defined(CONFIG_USER_ONLY)
3142 case 0x13: /* Graphics Status */
3143 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3145 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3147 case 0x14: /* Softint set */
3148 if (!supervisor(dc
))
3150 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3151 gen_helper_set_softint(cpu_tmp64
);
3153 case 0x15: /* Softint clear */
3154 if (!supervisor(dc
))
3156 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3157 gen_helper_clear_softint(cpu_tmp64
);
3159 case 0x16: /* Softint write */
3160 if (!supervisor(dc
))
3162 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3163 gen_helper_write_softint(cpu_tmp64
);
3165 case 0x17: /* Tick compare */
3166 #if !defined(CONFIG_USER_ONLY)
3167 if (!supervisor(dc
))
3173 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3175 r_tickptr
= tcg_temp_new_ptr();
3176 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3177 offsetof(CPUState
, tick
));
3178 gen_helper_tick_set_limit(r_tickptr
,
3180 tcg_temp_free_ptr(r_tickptr
);
3183 case 0x18: /* System tick */
3184 #if !defined(CONFIG_USER_ONLY)
3185 if (!supervisor(dc
))
3191 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3193 r_tickptr
= tcg_temp_new_ptr();
3194 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3195 offsetof(CPUState
, stick
));
3196 gen_helper_tick_set_count(r_tickptr
,
3198 tcg_temp_free_ptr(r_tickptr
);
3201 case 0x19: /* System tick compare */
3202 #if !defined(CONFIG_USER_ONLY)
3203 if (!supervisor(dc
))
3209 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3211 r_tickptr
= tcg_temp_new_ptr();
3212 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3213 offsetof(CPUState
, stick
));
3214 gen_helper_tick_set_limit(r_tickptr
,
3216 tcg_temp_free_ptr(r_tickptr
);
3220 case 0x10: /* Performance Control */
3221 case 0x11: /* Performance Instrumentation
3223 case 0x12: /* Dispatch Control */
3230 #if !defined(CONFIG_USER_ONLY)
3231 case 0x31: /* wrpsr, V9 saved, restored */
3233 if (!supervisor(dc
))
3235 #ifdef TARGET_SPARC64
3241 gen_helper_restored();
3243 case 2: /* UA2005 allclean */
3244 case 3: /* UA2005 otherw */
3245 case 4: /* UA2005 normalw */
3246 case 5: /* UA2005 invalw */
3252 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3253 gen_helper_wrpsr(cpu_dst
);
3254 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3255 dc
->cc_op
= CC_OP_FLAGS
;
3256 save_state(dc
, cpu_cond
);
3263 case 0x32: /* wrwim, V9 wrpr */
3265 if (!supervisor(dc
))
3267 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3268 #ifdef TARGET_SPARC64
3274 r_tsptr
= tcg_temp_new_ptr();
3275 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3276 offsetof(CPUState
, tsptr
));
3277 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3278 offsetof(trap_state
, tpc
));
3279 tcg_temp_free_ptr(r_tsptr
);
3286 r_tsptr
= tcg_temp_new_ptr();
3287 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3288 offsetof(CPUState
, tsptr
));
3289 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3290 offsetof(trap_state
, tnpc
));
3291 tcg_temp_free_ptr(r_tsptr
);
3298 r_tsptr
= tcg_temp_new_ptr();
3299 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3300 offsetof(CPUState
, tsptr
));
3301 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3302 offsetof(trap_state
,
3304 tcg_temp_free_ptr(r_tsptr
);
3311 r_tsptr
= tcg_temp_new_ptr();
3312 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3313 offsetof(CPUState
, tsptr
));
3314 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3315 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3316 offsetof(trap_state
, tt
));
3317 tcg_temp_free_ptr(r_tsptr
);
3324 r_tickptr
= tcg_temp_new_ptr();
3325 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3326 offsetof(CPUState
, tick
));
3327 gen_helper_tick_set_count(r_tickptr
,
3329 tcg_temp_free_ptr(r_tickptr
);
3333 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3336 save_state(dc
, cpu_cond
);
3337 gen_helper_wrpstate(cpu_tmp0
);
3343 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3344 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3345 offsetof(CPUSPARCState
, tl
));
3348 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3349 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3350 offsetof(CPUSPARCState
,
3354 gen_helper_wrcwp(cpu_tmp0
);
3357 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3358 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3359 offsetof(CPUSPARCState
,
3362 case 11: // canrestore
3363 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3364 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3365 offsetof(CPUSPARCState
,
3368 case 12: // cleanwin
3369 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3370 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3371 offsetof(CPUSPARCState
,
3374 case 13: // otherwin
3375 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3376 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3377 offsetof(CPUSPARCState
,
3381 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3382 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3383 offsetof(CPUSPARCState
,
3386 case 16: // UA2005 gl
3387 CHECK_IU_FEATURE(dc
, GL
);
3388 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3389 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3390 offsetof(CPUSPARCState
, gl
));
3392 case 26: // UA2005 strand status
3393 CHECK_IU_FEATURE(dc
, HYPV
);
3394 if (!hypervisor(dc
))
3396 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3402 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3403 if (dc
->def
->nwindows
!= 32)
3404 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3405 (1 << dc
->def
->nwindows
) - 1);
3406 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3410 case 0x33: /* wrtbr, UA2005 wrhpr */
3412 #ifndef TARGET_SPARC64
3413 if (!supervisor(dc
))
3415 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3417 CHECK_IU_FEATURE(dc
, HYPV
);
3418 if (!hypervisor(dc
))
3420 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3423 // XXX gen_op_wrhpstate();
3424 save_state(dc
, cpu_cond
);
3430 // XXX gen_op_wrhtstate();
3433 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3436 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3438 case 31: // hstick_cmpr
3442 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3443 r_tickptr
= tcg_temp_new_ptr();
3444 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3445 offsetof(CPUState
, hstick
));
3446 gen_helper_tick_set_limit(r_tickptr
,
3448 tcg_temp_free_ptr(r_tickptr
);
3451 case 6: // hver readonly
3459 #ifdef TARGET_SPARC64
3460 case 0x2c: /* V9 movcc */
3462 int cc
= GET_FIELD_SP(insn
, 11, 12);
3463 int cond
= GET_FIELD_SP(insn
, 14, 17);
3467 r_cond
= tcg_temp_new();
3468 if (insn
& (1 << 18)) {
3470 gen_cond(r_cond
, 0, cond
, dc
);
3472 gen_cond(r_cond
, 1, cond
, dc
);
3476 gen_fcond(r_cond
, cc
, cond
);
3479 l1
= gen_new_label();
3481 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3482 if (IS_IMM
) { /* immediate */
3485 simm
= GET_FIELD_SPs(insn
, 0, 10);
3486 r_const
= tcg_const_tl(simm
);
3487 gen_movl_TN_reg(rd
, r_const
);
3488 tcg_temp_free(r_const
);
3490 rs2
= GET_FIELD_SP(insn
, 0, 4);
3491 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3492 gen_movl_TN_reg(rd
, cpu_tmp0
);
3495 tcg_temp_free(r_cond
);
3498 case 0x2d: /* V9 sdivx */
3499 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3500 gen_movl_TN_reg(rd
, cpu_dst
);
3502 case 0x2e: /* V9 popc */
3504 cpu_src2
= get_src2(insn
, cpu_src2
);
3505 gen_helper_popc(cpu_dst
, cpu_src2
);
3506 gen_movl_TN_reg(rd
, cpu_dst
);
3508 case 0x2f: /* V9 movr */
3510 int cond
= GET_FIELD_SP(insn
, 10, 12);
3513 cpu_src1
= get_src1(insn
, cpu_src1
);
3515 l1
= gen_new_label();
3517 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3519 if (IS_IMM
) { /* immediate */
3522 simm
= GET_FIELD_SPs(insn
, 0, 9);
3523 r_const
= tcg_const_tl(simm
);
3524 gen_movl_TN_reg(rd
, r_const
);
3525 tcg_temp_free(r_const
);
3527 rs2
= GET_FIELD_SP(insn
, 0, 4);
3528 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3529 gen_movl_TN_reg(rd
, cpu_tmp0
);
3539 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3540 #ifdef TARGET_SPARC64
3541 int opf
= GET_FIELD_SP(insn
, 5, 13);
3542 rs1
= GET_FIELD(insn
, 13, 17);
3543 rs2
= GET_FIELD(insn
, 27, 31);
3544 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3548 case 0x000: /* VIS I edge8cc */
3549 case 0x001: /* VIS II edge8n */
3550 case 0x002: /* VIS I edge8lcc */
3551 case 0x003: /* VIS II edge8ln */
3552 case 0x004: /* VIS I edge16cc */
3553 case 0x005: /* VIS II edge16n */
3554 case 0x006: /* VIS I edge16lcc */
3555 case 0x007: /* VIS II edge16ln */
3556 case 0x008: /* VIS I edge32cc */
3557 case 0x009: /* VIS II edge32n */
3558 case 0x00a: /* VIS I edge32lcc */
3559 case 0x00b: /* VIS II edge32ln */
3562 case 0x010: /* VIS I array8 */
3563 CHECK_FPU_FEATURE(dc
, VIS1
);
3564 cpu_src1
= get_src1(insn
, cpu_src1
);
3565 gen_movl_reg_TN(rs2
, cpu_src2
);
3566 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3567 gen_movl_TN_reg(rd
, cpu_dst
);
3569 case 0x012: /* VIS I array16 */
3570 CHECK_FPU_FEATURE(dc
, VIS1
);
3571 cpu_src1
= get_src1(insn
, cpu_src1
);
3572 gen_movl_reg_TN(rs2
, cpu_src2
);
3573 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3574 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3575 gen_movl_TN_reg(rd
, cpu_dst
);
3577 case 0x014: /* VIS I array32 */
3578 CHECK_FPU_FEATURE(dc
, VIS1
);
3579 cpu_src1
= get_src1(insn
, cpu_src1
);
3580 gen_movl_reg_TN(rs2
, cpu_src2
);
3581 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3582 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3583 gen_movl_TN_reg(rd
, cpu_dst
);
3585 case 0x018: /* VIS I alignaddr */
3586 CHECK_FPU_FEATURE(dc
, VIS1
);
3587 cpu_src1
= get_src1(insn
, cpu_src1
);
3588 gen_movl_reg_TN(rs2
, cpu_src2
);
3589 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3590 gen_movl_TN_reg(rd
, cpu_dst
);
3592 case 0x019: /* VIS II bmask */
3593 case 0x01a: /* VIS I alignaddrl */
3596 case 0x020: /* VIS I fcmple16 */
3597 CHECK_FPU_FEATURE(dc
, VIS1
);
3598 gen_op_load_fpr_DT0(DFPREG(rs1
));
3599 gen_op_load_fpr_DT1(DFPREG(rs2
));
3600 gen_helper_fcmple16();
3601 gen_op_store_DT0_fpr(DFPREG(rd
));
3603 case 0x022: /* VIS I fcmpne16 */
3604 CHECK_FPU_FEATURE(dc
, VIS1
);
3605 gen_op_load_fpr_DT0(DFPREG(rs1
));
3606 gen_op_load_fpr_DT1(DFPREG(rs2
));
3607 gen_helper_fcmpne16();
3608 gen_op_store_DT0_fpr(DFPREG(rd
));
3610 case 0x024: /* VIS I fcmple32 */
3611 CHECK_FPU_FEATURE(dc
, VIS1
);
3612 gen_op_load_fpr_DT0(DFPREG(rs1
));
3613 gen_op_load_fpr_DT1(DFPREG(rs2
));
3614 gen_helper_fcmple32();
3615 gen_op_store_DT0_fpr(DFPREG(rd
));
3617 case 0x026: /* VIS I fcmpne32 */
3618 CHECK_FPU_FEATURE(dc
, VIS1
);
3619 gen_op_load_fpr_DT0(DFPREG(rs1
));
3620 gen_op_load_fpr_DT1(DFPREG(rs2
));
3621 gen_helper_fcmpne32();
3622 gen_op_store_DT0_fpr(DFPREG(rd
));
3624 case 0x028: /* VIS I fcmpgt16 */
3625 CHECK_FPU_FEATURE(dc
, VIS1
);
3626 gen_op_load_fpr_DT0(DFPREG(rs1
));
3627 gen_op_load_fpr_DT1(DFPREG(rs2
));
3628 gen_helper_fcmpgt16();
3629 gen_op_store_DT0_fpr(DFPREG(rd
));
3631 case 0x02a: /* VIS I fcmpeq16 */
3632 CHECK_FPU_FEATURE(dc
, VIS1
);
3633 gen_op_load_fpr_DT0(DFPREG(rs1
));
3634 gen_op_load_fpr_DT1(DFPREG(rs2
));
3635 gen_helper_fcmpeq16();
3636 gen_op_store_DT0_fpr(DFPREG(rd
));
3638 case 0x02c: /* VIS I fcmpgt32 */
3639 CHECK_FPU_FEATURE(dc
, VIS1
);
3640 gen_op_load_fpr_DT0(DFPREG(rs1
));
3641 gen_op_load_fpr_DT1(DFPREG(rs2
));
3642 gen_helper_fcmpgt32();
3643 gen_op_store_DT0_fpr(DFPREG(rd
));
3645 case 0x02e: /* VIS I fcmpeq32 */
3646 CHECK_FPU_FEATURE(dc
, VIS1
);
3647 gen_op_load_fpr_DT0(DFPREG(rs1
));
3648 gen_op_load_fpr_DT1(DFPREG(rs2
));
3649 gen_helper_fcmpeq32();
3650 gen_op_store_DT0_fpr(DFPREG(rd
));
3652 case 0x031: /* VIS I fmul8x16 */
3653 CHECK_FPU_FEATURE(dc
, VIS1
);
3654 gen_op_load_fpr_DT0(DFPREG(rs1
));
3655 gen_op_load_fpr_DT1(DFPREG(rs2
));
3656 gen_helper_fmul8x16();
3657 gen_op_store_DT0_fpr(DFPREG(rd
));
3659 case 0x033: /* VIS I fmul8x16au */
3660 CHECK_FPU_FEATURE(dc
, VIS1
);
3661 gen_op_load_fpr_DT0(DFPREG(rs1
));
3662 gen_op_load_fpr_DT1(DFPREG(rs2
));
3663 gen_helper_fmul8x16au();
3664 gen_op_store_DT0_fpr(DFPREG(rd
));
3666 case 0x035: /* VIS I fmul8x16al */
3667 CHECK_FPU_FEATURE(dc
, VIS1
);
3668 gen_op_load_fpr_DT0(DFPREG(rs1
));
3669 gen_op_load_fpr_DT1(DFPREG(rs2
));
3670 gen_helper_fmul8x16al();
3671 gen_op_store_DT0_fpr(DFPREG(rd
));
3673 case 0x036: /* VIS I fmul8sux16 */
3674 CHECK_FPU_FEATURE(dc
, VIS1
);
3675 gen_op_load_fpr_DT0(DFPREG(rs1
));
3676 gen_op_load_fpr_DT1(DFPREG(rs2
));
3677 gen_helper_fmul8sux16();
3678 gen_op_store_DT0_fpr(DFPREG(rd
));
3680 case 0x037: /* VIS I fmul8ulx16 */
3681 CHECK_FPU_FEATURE(dc
, VIS1
);
3682 gen_op_load_fpr_DT0(DFPREG(rs1
));
3683 gen_op_load_fpr_DT1(DFPREG(rs2
));
3684 gen_helper_fmul8ulx16();
3685 gen_op_store_DT0_fpr(DFPREG(rd
));
3687 case 0x038: /* VIS I fmuld8sux16 */
3688 CHECK_FPU_FEATURE(dc
, VIS1
);
3689 gen_op_load_fpr_DT0(DFPREG(rs1
));
3690 gen_op_load_fpr_DT1(DFPREG(rs2
));
3691 gen_helper_fmuld8sux16();
3692 gen_op_store_DT0_fpr(DFPREG(rd
));
3694 case 0x039: /* VIS I fmuld8ulx16 */
3695 CHECK_FPU_FEATURE(dc
, VIS1
);
3696 gen_op_load_fpr_DT0(DFPREG(rs1
));
3697 gen_op_load_fpr_DT1(DFPREG(rs2
));
3698 gen_helper_fmuld8ulx16();
3699 gen_op_store_DT0_fpr(DFPREG(rd
));
3701 case 0x03a: /* VIS I fpack32 */
3702 case 0x03b: /* VIS I fpack16 */
3703 case 0x03d: /* VIS I fpackfix */
3704 case 0x03e: /* VIS I pdist */
3707 case 0x048: /* VIS I faligndata */
3708 CHECK_FPU_FEATURE(dc
, VIS1
);
3709 gen_op_load_fpr_DT0(DFPREG(rs1
));
3710 gen_op_load_fpr_DT1(DFPREG(rs2
));
3711 gen_helper_faligndata();
3712 gen_op_store_DT0_fpr(DFPREG(rd
));
3714 case 0x04b: /* VIS I fpmerge */
3715 CHECK_FPU_FEATURE(dc
, VIS1
);
3716 gen_op_load_fpr_DT0(DFPREG(rs1
));
3717 gen_op_load_fpr_DT1(DFPREG(rs2
));
3718 gen_helper_fpmerge();
3719 gen_op_store_DT0_fpr(DFPREG(rd
));
3721 case 0x04c: /* VIS II bshuffle */
3724 case 0x04d: /* VIS I fexpand */
3725 CHECK_FPU_FEATURE(dc
, VIS1
);
3726 gen_op_load_fpr_DT0(DFPREG(rs1
));
3727 gen_op_load_fpr_DT1(DFPREG(rs2
));
3728 gen_helper_fexpand();
3729 gen_op_store_DT0_fpr(DFPREG(rd
));
3731 case 0x050: /* VIS I fpadd16 */
3732 CHECK_FPU_FEATURE(dc
, VIS1
);
3733 gen_op_load_fpr_DT0(DFPREG(rs1
));
3734 gen_op_load_fpr_DT1(DFPREG(rs2
));
3735 gen_helper_fpadd16();
3736 gen_op_store_DT0_fpr(DFPREG(rd
));
3738 case 0x051: /* VIS I fpadd16s */
3739 CHECK_FPU_FEATURE(dc
, VIS1
);
3740 gen_helper_fpadd16s(cpu_fpr
[rd
],
3741 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3743 case 0x052: /* VIS I fpadd32 */
3744 CHECK_FPU_FEATURE(dc
, VIS1
);
3745 gen_op_load_fpr_DT0(DFPREG(rs1
));
3746 gen_op_load_fpr_DT1(DFPREG(rs2
));
3747 gen_helper_fpadd32();
3748 gen_op_store_DT0_fpr(DFPREG(rd
));
3750 case 0x053: /* VIS I fpadd32s */
3751 CHECK_FPU_FEATURE(dc
, VIS1
);
3752 gen_helper_fpadd32s(cpu_fpr
[rd
],
3753 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3755 case 0x054: /* VIS I fpsub16 */
3756 CHECK_FPU_FEATURE(dc
, VIS1
);
3757 gen_op_load_fpr_DT0(DFPREG(rs1
));
3758 gen_op_load_fpr_DT1(DFPREG(rs2
));
3759 gen_helper_fpsub16();
3760 gen_op_store_DT0_fpr(DFPREG(rd
));
3762 case 0x055: /* VIS I fpsub16s */
3763 CHECK_FPU_FEATURE(dc
, VIS1
);
3764 gen_helper_fpsub16s(cpu_fpr
[rd
],
3765 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3767 case 0x056: /* VIS I fpsub32 */
3768 CHECK_FPU_FEATURE(dc
, VIS1
);
3769 gen_op_load_fpr_DT0(DFPREG(rs1
));
3770 gen_op_load_fpr_DT1(DFPREG(rs2
));
3771 gen_helper_fpsub32();
3772 gen_op_store_DT0_fpr(DFPREG(rd
));
3774 case 0x057: /* VIS I fpsub32s */
3775 CHECK_FPU_FEATURE(dc
, VIS1
);
3776 gen_helper_fpsub32s(cpu_fpr
[rd
],
3777 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3779 case 0x060: /* VIS I fzero */
3780 CHECK_FPU_FEATURE(dc
, VIS1
);
3781 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3782 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3784 case 0x061: /* VIS I fzeros */
3785 CHECK_FPU_FEATURE(dc
, VIS1
);
3786 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3788 case 0x062: /* VIS I fnor */
3789 CHECK_FPU_FEATURE(dc
, VIS1
);
3790 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3791 cpu_fpr
[DFPREG(rs2
)]);
3792 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3793 cpu_fpr
[DFPREG(rs2
) + 1]);
3795 case 0x063: /* VIS I fnors */
3796 CHECK_FPU_FEATURE(dc
, VIS1
);
3797 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3799 case 0x064: /* VIS I fandnot2 */
3800 CHECK_FPU_FEATURE(dc
, VIS1
);
3801 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3802 cpu_fpr
[DFPREG(rs2
)]);
3803 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3804 cpu_fpr
[DFPREG(rs1
) + 1],
3805 cpu_fpr
[DFPREG(rs2
) + 1]);
3807 case 0x065: /* VIS I fandnot2s */
3808 CHECK_FPU_FEATURE(dc
, VIS1
);
3809 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3811 case 0x066: /* VIS I fnot2 */
3812 CHECK_FPU_FEATURE(dc
, VIS1
);
3813 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3814 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3815 cpu_fpr
[DFPREG(rs2
) + 1]);
3817 case 0x067: /* VIS I fnot2s */
3818 CHECK_FPU_FEATURE(dc
, VIS1
);
3819 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3821 case 0x068: /* VIS I fandnot1 */
3822 CHECK_FPU_FEATURE(dc
, VIS1
);
3823 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3824 cpu_fpr
[DFPREG(rs1
)]);
3825 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3826 cpu_fpr
[DFPREG(rs2
) + 1],
3827 cpu_fpr
[DFPREG(rs1
) + 1]);
3829 case 0x069: /* VIS I fandnot1s */
3830 CHECK_FPU_FEATURE(dc
, VIS1
);
3831 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3833 case 0x06a: /* VIS I fnot1 */
3834 CHECK_FPU_FEATURE(dc
, VIS1
);
3835 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3836 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3837 cpu_fpr
[DFPREG(rs1
) + 1]);
3839 case 0x06b: /* VIS I fnot1s */
3840 CHECK_FPU_FEATURE(dc
, VIS1
);
3841 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3843 case 0x06c: /* VIS I fxor */
3844 CHECK_FPU_FEATURE(dc
, VIS1
);
3845 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3846 cpu_fpr
[DFPREG(rs2
)]);
3847 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3848 cpu_fpr
[DFPREG(rs1
) + 1],
3849 cpu_fpr
[DFPREG(rs2
) + 1]);
3851 case 0x06d: /* VIS I fxors */
3852 CHECK_FPU_FEATURE(dc
, VIS1
);
3853 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3855 case 0x06e: /* VIS I fnand */
3856 CHECK_FPU_FEATURE(dc
, VIS1
);
3857 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3858 cpu_fpr
[DFPREG(rs2
)]);
3859 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3860 cpu_fpr
[DFPREG(rs2
) + 1]);
3862 case 0x06f: /* VIS I fnands */
3863 CHECK_FPU_FEATURE(dc
, VIS1
);
3864 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3866 case 0x070: /* VIS I fand */
3867 CHECK_FPU_FEATURE(dc
, VIS1
);
3868 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3869 cpu_fpr
[DFPREG(rs2
)]);
3870 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3871 cpu_fpr
[DFPREG(rs1
) + 1],
3872 cpu_fpr
[DFPREG(rs2
) + 1]);
3874 case 0x071: /* VIS I fands */
3875 CHECK_FPU_FEATURE(dc
, VIS1
);
3876 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3878 case 0x072: /* VIS I fxnor */
3879 CHECK_FPU_FEATURE(dc
, VIS1
);
3880 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3881 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3882 cpu_fpr
[DFPREG(rs1
)]);
3883 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3884 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3885 cpu_fpr
[DFPREG(rs1
) + 1]);
3887 case 0x073: /* VIS I fxnors */
3888 CHECK_FPU_FEATURE(dc
, VIS1
);
3889 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3890 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3892 case 0x074: /* VIS I fsrc1 */
3893 CHECK_FPU_FEATURE(dc
, VIS1
);
3894 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3895 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3896 cpu_fpr
[DFPREG(rs1
) + 1]);
3898 case 0x075: /* VIS I fsrc1s */
3899 CHECK_FPU_FEATURE(dc
, VIS1
);
3900 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3902 case 0x076: /* VIS I fornot2 */
3903 CHECK_FPU_FEATURE(dc
, VIS1
);
3904 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3905 cpu_fpr
[DFPREG(rs2
)]);
3906 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3907 cpu_fpr
[DFPREG(rs1
) + 1],
3908 cpu_fpr
[DFPREG(rs2
) + 1]);
3910 case 0x077: /* VIS I fornot2s */
3911 CHECK_FPU_FEATURE(dc
, VIS1
);
3912 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3914 case 0x078: /* VIS I fsrc2 */
3915 CHECK_FPU_FEATURE(dc
, VIS1
);
3916 gen_op_load_fpr_DT0(DFPREG(rs2
));
3917 gen_op_store_DT0_fpr(DFPREG(rd
));
3919 case 0x079: /* VIS I fsrc2s */
3920 CHECK_FPU_FEATURE(dc
, VIS1
);
3921 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3923 case 0x07a: /* VIS I fornot1 */
3924 CHECK_FPU_FEATURE(dc
, VIS1
);
3925 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3926 cpu_fpr
[DFPREG(rs1
)]);
3927 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3928 cpu_fpr
[DFPREG(rs2
) + 1],
3929 cpu_fpr
[DFPREG(rs1
) + 1]);
3931 case 0x07b: /* VIS I fornot1s */
3932 CHECK_FPU_FEATURE(dc
, VIS1
);
3933 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3935 case 0x07c: /* VIS I for */
3936 CHECK_FPU_FEATURE(dc
, VIS1
);
3937 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3938 cpu_fpr
[DFPREG(rs2
)]);
3939 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
3940 cpu_fpr
[DFPREG(rs1
) + 1],
3941 cpu_fpr
[DFPREG(rs2
) + 1]);
3943 case 0x07d: /* VIS I fors */
3944 CHECK_FPU_FEATURE(dc
, VIS1
);
3945 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3947 case 0x07e: /* VIS I fone */
3948 CHECK_FPU_FEATURE(dc
, VIS1
);
3949 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
3950 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
3952 case 0x07f: /* VIS I fones */
3953 CHECK_FPU_FEATURE(dc
, VIS1
);
3954 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
3956 case 0x080: /* VIS I shutdown */
3957 case 0x081: /* VIS II siam */
3966 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3967 #ifdef TARGET_SPARC64
3972 #ifdef TARGET_SPARC64
3973 } else if (xop
== 0x39) { /* V9 return */
3976 save_state(dc
, cpu_cond
);
3977 cpu_src1
= get_src1(insn
, cpu_src1
);
3978 if (IS_IMM
) { /* immediate */
3979 simm
= GET_FIELDs(insn
, 19, 31);
3980 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3981 } else { /* register */
3982 rs2
= GET_FIELD(insn
, 27, 31);
3984 gen_movl_reg_TN(rs2
, cpu_src2
);
3985 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3987 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
3989 gen_helper_restore();
3990 gen_mov_pc_npc(dc
, cpu_cond
);
3991 r_const
= tcg_const_i32(3);
3992 gen_helper_check_align(cpu_dst
, r_const
);
3993 tcg_temp_free_i32(r_const
);
3994 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
3995 dc
->npc
= DYNAMIC_PC
;
3999 cpu_src1
= get_src1(insn
, cpu_src1
);
4000 if (IS_IMM
) { /* immediate */
4001 simm
= GET_FIELDs(insn
, 19, 31);
4002 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4003 } else { /* register */
4004 rs2
= GET_FIELD(insn
, 27, 31);
4006 gen_movl_reg_TN(rs2
, cpu_src2
);
4007 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4009 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4012 case 0x38: /* jmpl */
4017 r_pc
= tcg_const_tl(dc
->pc
);
4018 gen_movl_TN_reg(rd
, r_pc
);
4019 tcg_temp_free(r_pc
);
4020 gen_mov_pc_npc(dc
, cpu_cond
);
4021 r_const
= tcg_const_i32(3);
4022 gen_helper_check_align(cpu_dst
, r_const
);
4023 tcg_temp_free_i32(r_const
);
4024 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4025 dc
->npc
= DYNAMIC_PC
;
4028 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4029 case 0x39: /* rett, V9 return */
4033 if (!supervisor(dc
))
4035 gen_mov_pc_npc(dc
, cpu_cond
);
4036 r_const
= tcg_const_i32(3);
4037 gen_helper_check_align(cpu_dst
, r_const
);
4038 tcg_temp_free_i32(r_const
);
4039 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4040 dc
->npc
= DYNAMIC_PC
;
4045 case 0x3b: /* flush */
4046 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4048 gen_helper_flush(cpu_dst
);
4050 case 0x3c: /* save */
4051 save_state(dc
, cpu_cond
);
4053 gen_movl_TN_reg(rd
, cpu_dst
);
4055 case 0x3d: /* restore */
4056 save_state(dc
, cpu_cond
);
4057 gen_helper_restore();
4058 gen_movl_TN_reg(rd
, cpu_dst
);
4060 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4061 case 0x3e: /* V9 done/retry */
4065 if (!supervisor(dc
))
4067 dc
->npc
= DYNAMIC_PC
;
4068 dc
->pc
= DYNAMIC_PC
;
4072 if (!supervisor(dc
))
4074 dc
->npc
= DYNAMIC_PC
;
4075 dc
->pc
= DYNAMIC_PC
;
4091 case 3: /* load/store instructions */
4093 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4095 cpu_src1
= get_src1(insn
, cpu_src1
);
4096 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4097 rs2
= GET_FIELD(insn
, 27, 31);
4098 gen_movl_reg_TN(rs2
, cpu_src2
);
4099 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4100 } else if (IS_IMM
) { /* immediate */
4101 simm
= GET_FIELDs(insn
, 19, 31);
4102 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4103 } else { /* register */
4104 rs2
= GET_FIELD(insn
, 27, 31);
4106 gen_movl_reg_TN(rs2
, cpu_src2
);
4107 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4109 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4111 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4112 (xop
> 0x17 && xop
<= 0x1d ) ||
4113 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4115 case 0x0: /* ld, V9 lduw, load unsigned word */
4116 gen_address_mask(dc
, cpu_addr
);
4117 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4119 case 0x1: /* ldub, load unsigned byte */
4120 gen_address_mask(dc
, cpu_addr
);
4121 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4123 case 0x2: /* lduh, load unsigned halfword */
4124 gen_address_mask(dc
, cpu_addr
);
4125 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4127 case 0x3: /* ldd, load double word */
4133 save_state(dc
, cpu_cond
);
4134 r_const
= tcg_const_i32(7);
4135 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4136 tcg_temp_free_i32(r_const
);
4137 gen_address_mask(dc
, cpu_addr
);
4138 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4139 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4140 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4141 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4142 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4143 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4144 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4147 case 0x9: /* ldsb, load signed byte */
4148 gen_address_mask(dc
, cpu_addr
);
4149 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4151 case 0xa: /* ldsh, load signed halfword */
4152 gen_address_mask(dc
, cpu_addr
);
4153 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4155 case 0xd: /* ldstub -- XXX: should be atomically */
4159 gen_address_mask(dc
, cpu_addr
);
4160 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4161 r_const
= tcg_const_tl(0xff);
4162 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4163 tcg_temp_free(r_const
);
4166 case 0x0f: /* swap, swap register with memory. Also
4168 CHECK_IU_FEATURE(dc
, SWAP
);
4169 gen_movl_reg_TN(rd
, cpu_val
);
4170 gen_address_mask(dc
, cpu_addr
);
4171 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4172 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4173 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4175 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4176 case 0x10: /* lda, V9 lduwa, load word alternate */
4177 #ifndef TARGET_SPARC64
4180 if (!supervisor(dc
))
4183 save_state(dc
, cpu_cond
);
4184 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4186 case 0x11: /* lduba, load unsigned byte alternate */
4187 #ifndef TARGET_SPARC64
4190 if (!supervisor(dc
))
4193 save_state(dc
, cpu_cond
);
4194 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4196 case 0x12: /* lduha, load unsigned halfword alternate */
4197 #ifndef TARGET_SPARC64
4200 if (!supervisor(dc
))
4203 save_state(dc
, cpu_cond
);
4204 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4206 case 0x13: /* ldda, load double word alternate */
4207 #ifndef TARGET_SPARC64
4210 if (!supervisor(dc
))
4215 save_state(dc
, cpu_cond
);
4216 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4218 case 0x19: /* ldsba, load signed byte alternate */
4219 #ifndef TARGET_SPARC64
4222 if (!supervisor(dc
))
4225 save_state(dc
, cpu_cond
);
4226 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4228 case 0x1a: /* ldsha, load signed halfword alternate */
4229 #ifndef TARGET_SPARC64
4232 if (!supervisor(dc
))
4235 save_state(dc
, cpu_cond
);
4236 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4238 case 0x1d: /* ldstuba -- XXX: should be atomically */
4239 #ifndef TARGET_SPARC64
4242 if (!supervisor(dc
))
4245 save_state(dc
, cpu_cond
);
4246 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4248 case 0x1f: /* swapa, swap reg with alt. memory. Also
4250 CHECK_IU_FEATURE(dc
, SWAP
);
4251 #ifndef TARGET_SPARC64
4254 if (!supervisor(dc
))
4257 save_state(dc
, cpu_cond
);
4258 gen_movl_reg_TN(rd
, cpu_val
);
4259 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4262 #ifndef TARGET_SPARC64
4263 case 0x30: /* ldc */
4264 case 0x31: /* ldcsr */
4265 case 0x33: /* lddc */
4269 #ifdef TARGET_SPARC64
4270 case 0x08: /* V9 ldsw */
4271 gen_address_mask(dc
, cpu_addr
);
4272 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4274 case 0x0b: /* V9 ldx */
4275 gen_address_mask(dc
, cpu_addr
);
4276 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4278 case 0x18: /* V9 ldswa */
4279 save_state(dc
, cpu_cond
);
4280 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4282 case 0x1b: /* V9 ldxa */
4283 save_state(dc
, cpu_cond
);
4284 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4286 case 0x2d: /* V9 prefetch, no effect */
4288 case 0x30: /* V9 ldfa */
4289 save_state(dc
, cpu_cond
);
4290 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4292 case 0x33: /* V9 lddfa */
4293 save_state(dc
, cpu_cond
);
4294 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4296 case 0x3d: /* V9 prefetcha, no effect */
4298 case 0x32: /* V9 ldqfa */
4299 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4300 save_state(dc
, cpu_cond
);
4301 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4307 gen_movl_TN_reg(rd
, cpu_val
);
4308 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4311 } else if (xop
>= 0x20 && xop
< 0x24) {
4312 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4314 save_state(dc
, cpu_cond
);
4316 case 0x20: /* ldf, load fpreg */
4317 gen_address_mask(dc
, cpu_addr
);
4318 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4319 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4321 case 0x21: /* ldfsr, V9 ldxfsr */
4322 #ifdef TARGET_SPARC64
4323 gen_address_mask(dc
, cpu_addr
);
4325 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4326 gen_helper_ldxfsr(cpu_tmp64
);
4330 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4331 gen_helper_ldfsr(cpu_tmp32
);
4335 case 0x22: /* ldqf, load quad fpreg */
4339 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4340 r_const
= tcg_const_i32(dc
->mem_idx
);
4341 gen_helper_ldqf(cpu_addr
, r_const
);
4342 tcg_temp_free_i32(r_const
);
4343 gen_op_store_QT0_fpr(QFPREG(rd
));
4346 case 0x23: /* lddf, load double fpreg */
4350 r_const
= tcg_const_i32(dc
->mem_idx
);
4351 gen_helper_lddf(cpu_addr
, r_const
);
4352 tcg_temp_free_i32(r_const
);
4353 gen_op_store_DT0_fpr(DFPREG(rd
));
4359 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4360 xop
== 0xe || xop
== 0x1e) {
4361 gen_movl_reg_TN(rd
, cpu_val
);
4363 case 0x4: /* st, store word */
4364 gen_address_mask(dc
, cpu_addr
);
4365 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4367 case 0x5: /* stb, store byte */
4368 gen_address_mask(dc
, cpu_addr
);
4369 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4371 case 0x6: /* sth, store halfword */
4372 gen_address_mask(dc
, cpu_addr
);
4373 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4375 case 0x7: /* std, store double word */
4381 save_state(dc
, cpu_cond
);
4382 gen_address_mask(dc
, cpu_addr
);
4383 r_const
= tcg_const_i32(7);
4384 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4385 tcg_temp_free_i32(r_const
);
4386 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4387 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4388 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4391 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4392 case 0x14: /* sta, V9 stwa, store word alternate */
4393 #ifndef TARGET_SPARC64
4396 if (!supervisor(dc
))
4399 save_state(dc
, cpu_cond
);
4400 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4402 case 0x15: /* stba, store byte alternate */
4403 #ifndef TARGET_SPARC64
4406 if (!supervisor(dc
))
4409 save_state(dc
, cpu_cond
);
4410 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4412 case 0x16: /* stha, store halfword alternate */
4413 #ifndef TARGET_SPARC64
4416 if (!supervisor(dc
))
4419 save_state(dc
, cpu_cond
);
4420 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4422 case 0x17: /* stda, store double word alternate */
4423 #ifndef TARGET_SPARC64
4426 if (!supervisor(dc
))
4432 save_state(dc
, cpu_cond
);
4433 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4437 #ifdef TARGET_SPARC64
4438 case 0x0e: /* V9 stx */
4439 gen_address_mask(dc
, cpu_addr
);
4440 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4442 case 0x1e: /* V9 stxa */
4443 save_state(dc
, cpu_cond
);
4444 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4450 } else if (xop
> 0x23 && xop
< 0x28) {
4451 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4453 save_state(dc
, cpu_cond
);
4455 case 0x24: /* stf, store fpreg */
4456 gen_address_mask(dc
, cpu_addr
);
4457 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4458 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4460 case 0x25: /* stfsr, V9 stxfsr */
4461 #ifdef TARGET_SPARC64
4462 gen_address_mask(dc
, cpu_addr
);
4463 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4465 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4467 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4469 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4470 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4474 #ifdef TARGET_SPARC64
4475 /* V9 stqf, store quad fpreg */
4479 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4480 gen_op_load_fpr_QT0(QFPREG(rd
));
4481 r_const
= tcg_const_i32(dc
->mem_idx
);
4482 gen_helper_stqf(cpu_addr
, r_const
);
4483 tcg_temp_free_i32(r_const
);
4486 #else /* !TARGET_SPARC64 */
4487 /* stdfq, store floating point queue */
4488 #if defined(CONFIG_USER_ONLY)
4491 if (!supervisor(dc
))
4493 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4498 case 0x27: /* stdf, store double fpreg */
4502 gen_op_load_fpr_DT0(DFPREG(rd
));
4503 r_const
= tcg_const_i32(dc
->mem_idx
);
4504 gen_helper_stdf(cpu_addr
, r_const
);
4505 tcg_temp_free_i32(r_const
);
4511 } else if (xop
> 0x33 && xop
< 0x3f) {
4512 save_state(dc
, cpu_cond
);
4514 #ifdef TARGET_SPARC64
4515 case 0x34: /* V9 stfa */
4516 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4518 case 0x36: /* V9 stqfa */
4522 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4523 r_const
= tcg_const_i32(7);
4524 gen_helper_check_align(cpu_addr
, r_const
);
4525 tcg_temp_free_i32(r_const
);
4526 gen_op_load_fpr_QT0(QFPREG(rd
));
4527 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4530 case 0x37: /* V9 stdfa */
4531 gen_op_load_fpr_DT0(DFPREG(rd
));
4532 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4534 case 0x3c: /* V9 casa */
4535 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4536 gen_movl_TN_reg(rd
, cpu_val
);
4538 case 0x3e: /* V9 casxa */
4539 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4540 gen_movl_TN_reg(rd
, cpu_val
);
4543 case 0x34: /* stc */
4544 case 0x35: /* stcsr */
4545 case 0x36: /* stdcq */
4546 case 0x37: /* stdc */
4557 /* default case for non jump instructions */
4558 if (dc
->npc
== DYNAMIC_PC
) {
4559 dc
->pc
= DYNAMIC_PC
;
4561 } else if (dc
->npc
== JUMP_PC
) {
4562 /* we can do a static jump */
4563 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4567 dc
->npc
= dc
->npc
+ 4;
4575 save_state(dc
, cpu_cond
);
4576 r_const
= tcg_const_i32(TT_ILL_INSN
);
4577 gen_helper_raise_exception(r_const
);
4578 tcg_temp_free_i32(r_const
);
4586 save_state(dc
, cpu_cond
);
4587 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4588 gen_helper_raise_exception(r_const
);
4589 tcg_temp_free_i32(r_const
);
4593 #if !defined(CONFIG_USER_ONLY)
4598 save_state(dc
, cpu_cond
);
4599 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4600 gen_helper_raise_exception(r_const
);
4601 tcg_temp_free_i32(r_const
);
4607 save_state(dc
, cpu_cond
);
4608 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4611 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4613 save_state(dc
, cpu_cond
);
4614 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4618 #ifndef TARGET_SPARC64
4623 save_state(dc
, cpu_cond
);
4624 r_const
= tcg_const_i32(TT_NCP_INSN
);
4625 gen_helper_raise_exception(r_const
);
4626 tcg_temp_free(r_const
);
4633 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4634 int spc
, CPUSPARCState
*env
)
4636 target_ulong pc_start
, last_pc
;
4637 uint16_t *gen_opc_end
;
4638 DisasContext dc1
, *dc
= &dc1
;
4644 memset(dc
, 0, sizeof(DisasContext
));
4649 dc
->npc
= (target_ulong
) tb
->cs_base
;
4650 dc
->cc_op
= CC_OP_DYNAMIC
;
4651 dc
->mem_idx
= cpu_mmu_index(env
);
4653 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4654 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4656 dc
->fpu_enabled
= 0;
4657 #ifdef TARGET_SPARC64
4658 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4660 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4662 cpu_tmp0
= tcg_temp_new();
4663 cpu_tmp32
= tcg_temp_new_i32();
4664 cpu_tmp64
= tcg_temp_new_i64();
4666 cpu_dst
= tcg_temp_local_new();
4669 cpu_val
= tcg_temp_local_new();
4670 cpu_addr
= tcg_temp_local_new();
4673 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4675 max_insns
= CF_COUNT_MASK
;
4678 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4679 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4680 if (bp
->pc
== dc
->pc
) {
4681 if (dc
->pc
!= pc_start
)
4682 save_state(dc
, cpu_cond
);
4691 qemu_log("Search PC...\n");
4692 j
= gen_opc_ptr
- gen_opc_buf
;
4696 gen_opc_instr_start
[lj
++] = 0;
4697 gen_opc_pc
[lj
] = dc
->pc
;
4698 gen_opc_npc
[lj
] = dc
->npc
;
4699 gen_opc_instr_start
[lj
] = 1;
4700 gen_opc_icount
[lj
] = num_insns
;
4703 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4706 disas_sparc_insn(dc
);
4711 /* if the next PC is different, we abort now */
4712 if (dc
->pc
!= (last_pc
+ 4))
4714 /* if we reach a page boundary, we stop generation so that the
4715 PC of a TT_TFAULT exception is always in the right page */
4716 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4718 /* if single step mode, we generate only one instruction and
4719 generate an exception */
4720 if (env
->singlestep_enabled
|| singlestep
) {
4721 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4725 } while ((gen_opc_ptr
< gen_opc_end
) &&
4726 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4727 num_insns
< max_insns
);
4730 tcg_temp_free(cpu_addr
);
4731 tcg_temp_free(cpu_val
);
4732 tcg_temp_free(cpu_dst
);
4733 tcg_temp_free_i64(cpu_tmp64
);
4734 tcg_temp_free_i32(cpu_tmp32
);
4735 tcg_temp_free(cpu_tmp0
);
4736 if (tb
->cflags
& CF_LAST_IO
)
4739 if (dc
->pc
!= DYNAMIC_PC
&&
4740 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4741 /* static PC and NPC: we can use direct chaining */
4742 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4744 if (dc
->pc
!= DYNAMIC_PC
)
4745 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4746 save_npc(dc
, cpu_cond
);
4750 gen_icount_end(tb
, num_insns
);
4751 *gen_opc_ptr
= INDEX_op_end
;
4753 j
= gen_opc_ptr
- gen_opc_buf
;
4756 gen_opc_instr_start
[lj
++] = 0;
4760 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4761 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4763 tb
->size
= last_pc
+ 4 - pc_start
;
4764 tb
->icount
= num_insns
;
4767 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4768 qemu_log("--------------\n");
4769 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4770 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4776 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4778 gen_intermediate_code_internal(tb
, 0, env
);
4781 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4783 gen_intermediate_code_internal(tb
, 1, env
);
4786 void gen_intermediate_code_init(CPUSPARCState
*env
)
4790 static const char * const gregnames
[8] = {
4791 NULL
, // g0 not used
4800 static const char * const fregnames
[64] = {
4801 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4802 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4803 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4804 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4805 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4806 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4807 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4808 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4811 /* init various static tables */
4815 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4816 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4817 offsetof(CPUState
, regwptr
),
4819 #ifdef TARGET_SPARC64
4820 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4822 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4824 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4826 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4828 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4829 offsetof(CPUState
, tick_cmpr
),
4831 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4832 offsetof(CPUState
, stick_cmpr
),
4834 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4835 offsetof(CPUState
, hstick_cmpr
),
4837 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4839 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4841 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4843 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4844 offsetof(CPUState
, ssr
), "ssr");
4845 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4846 offsetof(CPUState
, version
), "ver");
4847 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4848 offsetof(CPUState
, softint
),
4851 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4854 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
4856 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
4858 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
4859 offsetof(CPUState
, cc_src2
),
4861 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4863 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
4865 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
4867 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
4869 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
4871 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
4873 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
4874 #ifndef CONFIG_USER_ONLY
4875 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
4878 for (i
= 1; i
< 8; i
++)
4879 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
4880 offsetof(CPUState
, gregs
[i
]),
4882 for (i
= 0; i
< TARGET_FPREGS
; i
++)
4883 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
4884 offsetof(CPUState
, fpr
[i
]),
4887 /* register helpers */
4889 #define GEN_HELPER 2
4894 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
4895 unsigned long searched_pc
, int pc_pos
, void *puc
)
4898 env
->pc
= gen_opc_pc
[pc_pos
];
4899 npc
= gen_opc_npc
[pc_pos
];
4901 /* dynamic NPC: already stored */
4902 } else if (npc
== 2) {
4903 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
4904 /* jump PC: use T2 and the jump targets of the translation */
4906 env
->npc
= gen_opc_jump_pc
[0];
4908 env
->npc
= gen_opc_jump_pc
[1];