Only shutdown video subsytem in sdl_cleanup
[qemu-kvm/fedora.git] / target-sparc / translate.c
blobcd22f2bf8f0b1ebcfb36677bfe6537e8586962d9
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
81 sparc_def_t *def;
82 } DisasContext;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
108 len = 32 - len;
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
200 if (AM_CHECK(dc))
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
207 if (reg == 0)
208 tcg_gen_movi_tl(tn, 0);
209 else if (reg < 8)
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211 else {
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
218 if (reg == 0)
219 return;
220 else if (reg < 8)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
222 else {
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
232 tb = s->tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
240 } else {
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb(0);
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
279 TCGv r_temp;
280 TCGv_i32 r_const;
281 int l1;
283 l1 = gen_new_label();
285 r_temp = tcg_temp_new();
286 tcg_gen_xor_tl(r_temp, src1, src2);
287 tcg_gen_not_tl(r_temp, r_temp);
288 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
289 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
290 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
291 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
292 r_const = tcg_const_i32(TT_TOVF);
293 gen_helper_raise_exception(r_const);
294 tcg_temp_free_i32(r_const);
295 gen_set_label(l1);
296 tcg_temp_free(r_temp);
299 static inline void gen_tag_tv(TCGv src1, TCGv src2)
301 int l1;
302 TCGv_i32 r_const;
304 l1 = gen_new_label();
305 tcg_gen_or_tl(cpu_tmp0, src1, src2);
306 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
307 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
308 r_const = tcg_const_i32(TT_TOVF);
309 gen_helper_raise_exception(r_const);
310 tcg_temp_free_i32(r_const);
311 gen_set_label(l1);
314 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
316 tcg_gen_mov_tl(cpu_cc_src, src1);
317 tcg_gen_movi_tl(cpu_cc_src2, src2);
318 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
319 tcg_gen_mov_tl(dst, cpu_cc_dst);
322 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
324 tcg_gen_mov_tl(cpu_cc_src, src1);
325 tcg_gen_mov_tl(cpu_cc_src2, src2);
326 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
327 tcg_gen_mov_tl(dst, cpu_cc_dst);
330 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
332 tcg_gen_mov_tl(cpu_cc_src, src1);
333 tcg_gen_movi_tl(cpu_cc_src2, src2);
334 gen_mov_reg_C(cpu_tmp0, cpu_psr);
335 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
336 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
337 tcg_gen_mov_tl(dst, cpu_cc_dst);
340 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
342 tcg_gen_mov_tl(cpu_cc_src, src1);
343 tcg_gen_mov_tl(cpu_cc_src2, src2);
344 gen_mov_reg_C(cpu_tmp0, cpu_psr);
345 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
346 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
347 tcg_gen_mov_tl(dst, cpu_cc_dst);
350 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
352 tcg_gen_mov_tl(cpu_cc_src, src1);
353 tcg_gen_mov_tl(cpu_cc_src2, src2);
354 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
355 tcg_gen_mov_tl(dst, cpu_cc_dst);
358 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
360 tcg_gen_mov_tl(cpu_cc_src, src1);
361 tcg_gen_mov_tl(cpu_cc_src2, src2);
362 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
363 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
364 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
365 tcg_gen_mov_tl(dst, cpu_cc_dst);
368 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
370 TCGv r_temp;
371 TCGv_i32 r_const;
372 int l1;
374 l1 = gen_new_label();
376 r_temp = tcg_temp_new();
377 tcg_gen_xor_tl(r_temp, src1, src2);
378 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
379 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
380 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
381 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
382 r_const = tcg_const_i32(TT_TOVF);
383 gen_helper_raise_exception(r_const);
384 tcg_temp_free_i32(r_const);
385 gen_set_label(l1);
386 tcg_temp_free(r_temp);
389 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
391 tcg_gen_mov_tl(cpu_cc_src, src1);
392 tcg_gen_movi_tl(cpu_cc_src2, src2);
393 if (src2 == 0) {
394 tcg_gen_mov_tl(cpu_cc_dst, src1);
395 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
396 dc->cc_op = CC_OP_LOGIC;
397 } else {
398 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
399 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
400 dc->cc_op = CC_OP_SUB;
402 tcg_gen_mov_tl(dst, cpu_cc_dst);
405 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
407 tcg_gen_mov_tl(cpu_cc_src, src1);
408 tcg_gen_mov_tl(cpu_cc_src2, src2);
409 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
410 tcg_gen_mov_tl(dst, cpu_cc_dst);
413 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
415 tcg_gen_mov_tl(cpu_cc_src, src1);
416 tcg_gen_movi_tl(cpu_cc_src2, src2);
417 gen_mov_reg_C(cpu_tmp0, cpu_psr);
418 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
419 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
420 tcg_gen_mov_tl(dst, cpu_cc_dst);
423 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
425 tcg_gen_mov_tl(cpu_cc_src, src1);
426 tcg_gen_mov_tl(cpu_cc_src2, src2);
427 gen_mov_reg_C(cpu_tmp0, cpu_psr);
428 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
429 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
430 tcg_gen_mov_tl(dst, cpu_cc_dst);
433 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
435 tcg_gen_mov_tl(cpu_cc_src, src1);
436 tcg_gen_mov_tl(cpu_cc_src2, src2);
437 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
438 tcg_gen_mov_tl(dst, cpu_cc_dst);
441 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
443 tcg_gen_mov_tl(cpu_cc_src, src1);
444 tcg_gen_mov_tl(cpu_cc_src2, src2);
445 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
446 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
447 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 tcg_gen_mov_tl(dst, cpu_cc_dst);
451 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
453 TCGv r_temp;
454 int l1;
456 l1 = gen_new_label();
457 r_temp = tcg_temp_new();
459 /* old op:
460 if (!(env->y & 1))
461 T1 = 0;
463 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
464 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
465 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
466 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
467 tcg_gen_movi_tl(cpu_cc_src2, 0);
468 gen_set_label(l1);
470 // b2 = T0 & 1;
471 // env->y = (b2 << 31) | (env->y >> 1);
472 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
473 tcg_gen_shli_tl(r_temp, r_temp, 31);
474 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
475 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
476 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
477 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
479 // b1 = N ^ V;
480 gen_mov_reg_N(cpu_tmp0, cpu_psr);
481 gen_mov_reg_V(r_temp, cpu_psr);
482 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
483 tcg_temp_free(r_temp);
485 // T0 = (b1 << 31) | (T0 >> 1);
486 // src1 = T0;
487 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
488 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
489 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
491 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
493 tcg_gen_mov_tl(dst, cpu_cc_dst);
496 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
498 TCGv_i64 r_temp, r_temp2;
500 r_temp = tcg_temp_new_i64();
501 r_temp2 = tcg_temp_new_i64();
503 tcg_gen_extu_tl_i64(r_temp, src2);
504 tcg_gen_extu_tl_i64(r_temp2, src1);
505 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
507 tcg_gen_shri_i64(r_temp, r_temp2, 32);
508 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
509 tcg_temp_free_i64(r_temp);
510 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
511 #ifdef TARGET_SPARC64
512 tcg_gen_mov_i64(dst, r_temp2);
513 #else
514 tcg_gen_trunc_i64_tl(dst, r_temp2);
515 #endif
516 tcg_temp_free_i64(r_temp2);
519 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
521 TCGv_i64 r_temp, r_temp2;
523 r_temp = tcg_temp_new_i64();
524 r_temp2 = tcg_temp_new_i64();
526 tcg_gen_ext_tl_i64(r_temp, src2);
527 tcg_gen_ext_tl_i64(r_temp2, src1);
528 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
530 tcg_gen_shri_i64(r_temp, r_temp2, 32);
531 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
532 tcg_temp_free_i64(r_temp);
533 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
534 #ifdef TARGET_SPARC64
535 tcg_gen_mov_i64(dst, r_temp2);
536 #else
537 tcg_gen_trunc_i64_tl(dst, r_temp2);
538 #endif
539 tcg_temp_free_i64(r_temp2);
542 #ifdef TARGET_SPARC64
543 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
545 TCGv_i32 r_const;
546 int l1;
548 l1 = gen_new_label();
549 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
550 r_const = tcg_const_i32(TT_DIV_ZERO);
551 gen_helper_raise_exception(r_const);
552 tcg_temp_free_i32(r_const);
553 gen_set_label(l1);
556 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
558 int l1, l2;
560 l1 = gen_new_label();
561 l2 = gen_new_label();
562 tcg_gen_mov_tl(cpu_cc_src, src1);
563 tcg_gen_mov_tl(cpu_cc_src2, src2);
564 gen_trap_ifdivzero_tl(cpu_cc_src2);
565 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
566 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
567 tcg_gen_movi_i64(dst, INT64_MIN);
568 tcg_gen_br(l2);
569 gen_set_label(l1);
570 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
571 gen_set_label(l2);
573 #endif
575 // 1
576 static inline void gen_op_eval_ba(TCGv dst)
578 tcg_gen_movi_tl(dst, 1);
581 // Z
582 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
584 gen_mov_reg_Z(dst, src);
587 // Z | (N ^ V)
588 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
590 gen_mov_reg_N(cpu_tmp0, src);
591 gen_mov_reg_V(dst, src);
592 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
593 gen_mov_reg_Z(cpu_tmp0, src);
594 tcg_gen_or_tl(dst, dst, cpu_tmp0);
597 // N ^ V
598 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
600 gen_mov_reg_V(cpu_tmp0, src);
601 gen_mov_reg_N(dst, src);
602 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
605 // C | Z
606 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
608 gen_mov_reg_Z(cpu_tmp0, src);
609 gen_mov_reg_C(dst, src);
610 tcg_gen_or_tl(dst, dst, cpu_tmp0);
613 // C
614 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
616 gen_mov_reg_C(dst, src);
619 // V
620 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
622 gen_mov_reg_V(dst, src);
625 // 0
626 static inline void gen_op_eval_bn(TCGv dst)
628 tcg_gen_movi_tl(dst, 0);
631 // N
632 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
634 gen_mov_reg_N(dst, src);
637 // !Z
638 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
640 gen_mov_reg_Z(dst, src);
641 tcg_gen_xori_tl(dst, dst, 0x1);
644 // !(Z | (N ^ V))
645 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
647 gen_mov_reg_N(cpu_tmp0, src);
648 gen_mov_reg_V(dst, src);
649 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
650 gen_mov_reg_Z(cpu_tmp0, src);
651 tcg_gen_or_tl(dst, dst, cpu_tmp0);
652 tcg_gen_xori_tl(dst, dst, 0x1);
655 // !(N ^ V)
656 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
658 gen_mov_reg_V(cpu_tmp0, src);
659 gen_mov_reg_N(dst, src);
660 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
661 tcg_gen_xori_tl(dst, dst, 0x1);
664 // !(C | Z)
665 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
667 gen_mov_reg_Z(cpu_tmp0, src);
668 gen_mov_reg_C(dst, src);
669 tcg_gen_or_tl(dst, dst, cpu_tmp0);
670 tcg_gen_xori_tl(dst, dst, 0x1);
673 // !C
674 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
676 gen_mov_reg_C(dst, src);
677 tcg_gen_xori_tl(dst, dst, 0x1);
680 // !N
681 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
683 gen_mov_reg_N(dst, src);
684 tcg_gen_xori_tl(dst, dst, 0x1);
687 // !V
688 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
690 gen_mov_reg_V(dst, src);
691 tcg_gen_xori_tl(dst, dst, 0x1);
695 FPSR bit field FCC1 | FCC0:
699 3 unordered
701 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
702 unsigned int fcc_offset)
704 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
705 tcg_gen_andi_tl(reg, reg, 0x1);
708 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
709 unsigned int fcc_offset)
711 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
712 tcg_gen_andi_tl(reg, reg, 0x1);
715 // !0: FCC0 | FCC1
716 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
717 unsigned int fcc_offset)
719 gen_mov_reg_FCC0(dst, src, fcc_offset);
720 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
721 tcg_gen_or_tl(dst, dst, cpu_tmp0);
724 // 1 or 2: FCC0 ^ FCC1
725 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
726 unsigned int fcc_offset)
728 gen_mov_reg_FCC0(dst, src, fcc_offset);
729 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
730 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
733 // 1 or 3: FCC0
734 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
735 unsigned int fcc_offset)
737 gen_mov_reg_FCC0(dst, src, fcc_offset);
740 // 1: FCC0 & !FCC1
741 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
742 unsigned int fcc_offset)
744 gen_mov_reg_FCC0(dst, src, fcc_offset);
745 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
746 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
747 tcg_gen_and_tl(dst, dst, cpu_tmp0);
750 // 2 or 3: FCC1
751 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
752 unsigned int fcc_offset)
754 gen_mov_reg_FCC1(dst, src, fcc_offset);
757 // 2: !FCC0 & FCC1
758 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
759 unsigned int fcc_offset)
761 gen_mov_reg_FCC0(dst, src, fcc_offset);
762 tcg_gen_xori_tl(dst, dst, 0x1);
763 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
764 tcg_gen_and_tl(dst, dst, cpu_tmp0);
767 // 3: FCC0 & FCC1
768 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
769 unsigned int fcc_offset)
771 gen_mov_reg_FCC0(dst, src, fcc_offset);
772 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
773 tcg_gen_and_tl(dst, dst, cpu_tmp0);
776 // 0: !(FCC0 | FCC1)
777 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
778 unsigned int fcc_offset)
780 gen_mov_reg_FCC0(dst, src, fcc_offset);
781 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
782 tcg_gen_or_tl(dst, dst, cpu_tmp0);
783 tcg_gen_xori_tl(dst, dst, 0x1);
786 // 0 or 3: !(FCC0 ^ FCC1)
787 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
788 unsigned int fcc_offset)
790 gen_mov_reg_FCC0(dst, src, fcc_offset);
791 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
792 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
793 tcg_gen_xori_tl(dst, dst, 0x1);
796 // 0 or 2: !FCC0
797 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
798 unsigned int fcc_offset)
800 gen_mov_reg_FCC0(dst, src, fcc_offset);
801 tcg_gen_xori_tl(dst, dst, 0x1);
804 // !1: !(FCC0 & !FCC1)
805 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
806 unsigned int fcc_offset)
808 gen_mov_reg_FCC0(dst, src, fcc_offset);
809 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
810 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
811 tcg_gen_and_tl(dst, dst, cpu_tmp0);
812 tcg_gen_xori_tl(dst, dst, 0x1);
815 // 0 or 1: !FCC1
816 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
817 unsigned int fcc_offset)
819 gen_mov_reg_FCC1(dst, src, fcc_offset);
820 tcg_gen_xori_tl(dst, dst, 0x1);
823 // !2: !(!FCC0 & FCC1)
824 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
825 unsigned int fcc_offset)
827 gen_mov_reg_FCC0(dst, src, fcc_offset);
828 tcg_gen_xori_tl(dst, dst, 0x1);
829 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
830 tcg_gen_and_tl(dst, dst, cpu_tmp0);
831 tcg_gen_xori_tl(dst, dst, 0x1);
834 // !3: !(FCC0 & FCC1)
835 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
838 gen_mov_reg_FCC0(dst, src, fcc_offset);
839 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
840 tcg_gen_and_tl(dst, dst, cpu_tmp0);
841 tcg_gen_xori_tl(dst, dst, 0x1);
844 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
845 target_ulong pc2, TCGv r_cond)
847 int l1;
849 l1 = gen_new_label();
851 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
853 gen_goto_tb(dc, 0, pc1, pc1 + 4);
855 gen_set_label(l1);
856 gen_goto_tb(dc, 1, pc2, pc2 + 4);
859 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
860 target_ulong pc2, TCGv r_cond)
862 int l1;
864 l1 = gen_new_label();
866 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
868 gen_goto_tb(dc, 0, pc2, pc1);
870 gen_set_label(l1);
871 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
874 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
875 TCGv r_cond)
877 int l1, l2;
879 l1 = gen_new_label();
880 l2 = gen_new_label();
882 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
884 tcg_gen_movi_tl(cpu_npc, npc1);
885 tcg_gen_br(l2);
887 gen_set_label(l1);
888 tcg_gen_movi_tl(cpu_npc, npc2);
889 gen_set_label(l2);
892 /* call this function before using the condition register as it may
893 have been set for a jump */
894 static inline void flush_cond(DisasContext *dc, TCGv cond)
896 if (dc->npc == JUMP_PC) {
897 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
898 dc->npc = DYNAMIC_PC;
902 static inline void save_npc(DisasContext *dc, TCGv cond)
904 if (dc->npc == JUMP_PC) {
905 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
906 dc->npc = DYNAMIC_PC;
907 } else if (dc->npc != DYNAMIC_PC) {
908 tcg_gen_movi_tl(cpu_npc, dc->npc);
912 static inline void save_state(DisasContext *dc, TCGv cond)
914 tcg_gen_movi_tl(cpu_pc, dc->pc);
915 save_npc(dc, cond);
918 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
920 if (dc->npc == JUMP_PC) {
921 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
922 tcg_gen_mov_tl(cpu_pc, cpu_npc);
923 dc->pc = DYNAMIC_PC;
924 } else if (dc->npc == DYNAMIC_PC) {
925 tcg_gen_mov_tl(cpu_pc, cpu_npc);
926 dc->pc = DYNAMIC_PC;
927 } else {
928 dc->pc = dc->npc;
932 static inline void gen_op_next_insn(void)
934 tcg_gen_mov_tl(cpu_pc, cpu_npc);
935 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
938 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
939 DisasContext *dc)
941 TCGv_i32 r_src;
943 #ifdef TARGET_SPARC64
944 if (cc)
945 r_src = cpu_xcc;
946 else
947 r_src = cpu_psr;
948 #else
949 r_src = cpu_psr;
950 #endif
951 switch (dc->cc_op) {
952 case CC_OP_FLAGS:
953 break;
954 default:
955 gen_helper_compute_psr();
956 dc->cc_op = CC_OP_FLAGS;
957 break;
959 switch (cond) {
960 case 0x0:
961 gen_op_eval_bn(r_dst);
962 break;
963 case 0x1:
964 gen_op_eval_be(r_dst, r_src);
965 break;
966 case 0x2:
967 gen_op_eval_ble(r_dst, r_src);
968 break;
969 case 0x3:
970 gen_op_eval_bl(r_dst, r_src);
971 break;
972 case 0x4:
973 gen_op_eval_bleu(r_dst, r_src);
974 break;
975 case 0x5:
976 gen_op_eval_bcs(r_dst, r_src);
977 break;
978 case 0x6:
979 gen_op_eval_bneg(r_dst, r_src);
980 break;
981 case 0x7:
982 gen_op_eval_bvs(r_dst, r_src);
983 break;
984 case 0x8:
985 gen_op_eval_ba(r_dst);
986 break;
987 case 0x9:
988 gen_op_eval_bne(r_dst, r_src);
989 break;
990 case 0xa:
991 gen_op_eval_bg(r_dst, r_src);
992 break;
993 case 0xb:
994 gen_op_eval_bge(r_dst, r_src);
995 break;
996 case 0xc:
997 gen_op_eval_bgu(r_dst, r_src);
998 break;
999 case 0xd:
1000 gen_op_eval_bcc(r_dst, r_src);
1001 break;
1002 case 0xe:
1003 gen_op_eval_bpos(r_dst, r_src);
1004 break;
1005 case 0xf:
1006 gen_op_eval_bvc(r_dst, r_src);
1007 break;
1011 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1013 unsigned int offset;
1015 switch (cc) {
1016 default:
1017 case 0x0:
1018 offset = 0;
1019 break;
1020 case 0x1:
1021 offset = 32 - 10;
1022 break;
1023 case 0x2:
1024 offset = 34 - 10;
1025 break;
1026 case 0x3:
1027 offset = 36 - 10;
1028 break;
1031 switch (cond) {
1032 case 0x0:
1033 gen_op_eval_bn(r_dst);
1034 break;
1035 case 0x1:
1036 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1037 break;
1038 case 0x2:
1039 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1040 break;
1041 case 0x3:
1042 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1043 break;
1044 case 0x4:
1045 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1046 break;
1047 case 0x5:
1048 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1049 break;
1050 case 0x6:
1051 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1052 break;
1053 case 0x7:
1054 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1055 break;
1056 case 0x8:
1057 gen_op_eval_ba(r_dst);
1058 break;
1059 case 0x9:
1060 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1061 break;
1062 case 0xa:
1063 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1064 break;
1065 case 0xb:
1066 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1067 break;
1068 case 0xc:
1069 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1070 break;
1071 case 0xd:
1072 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1073 break;
1074 case 0xe:
1075 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1076 break;
1077 case 0xf:
1078 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1079 break;
1083 #ifdef TARGET_SPARC64
1084 // Inverted logic
1085 static const int gen_tcg_cond_reg[8] = {
1087 TCG_COND_NE,
1088 TCG_COND_GT,
1089 TCG_COND_GE,
1091 TCG_COND_EQ,
1092 TCG_COND_LE,
1093 TCG_COND_LT,
1096 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1098 int l1;
1100 l1 = gen_new_label();
1101 tcg_gen_movi_tl(r_dst, 0);
1102 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1103 tcg_gen_movi_tl(r_dst, 1);
1104 gen_set_label(l1);
1106 #endif
1108 /* XXX: potentially incorrect if dynamic npc */
1109 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1110 TCGv r_cond)
1112 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1113 target_ulong target = dc->pc + offset;
1115 if (cond == 0x0) {
1116 /* unconditional not taken */
1117 if (a) {
1118 dc->pc = dc->npc + 4;
1119 dc->npc = dc->pc + 4;
1120 } else {
1121 dc->pc = dc->npc;
1122 dc->npc = dc->pc + 4;
1124 } else if (cond == 0x8) {
1125 /* unconditional taken */
1126 if (a) {
1127 dc->pc = target;
1128 dc->npc = dc->pc + 4;
1129 } else {
1130 dc->pc = dc->npc;
1131 dc->npc = target;
1133 } else {
1134 flush_cond(dc, r_cond);
1135 gen_cond(r_cond, cc, cond, dc);
1136 if (a) {
1137 gen_branch_a(dc, target, dc->npc, r_cond);
1138 dc->is_br = 1;
1139 } else {
1140 dc->pc = dc->npc;
1141 dc->jump_pc[0] = target;
1142 dc->jump_pc[1] = dc->npc + 4;
1143 dc->npc = JUMP_PC;
1148 /* XXX: potentially incorrect if dynamic npc */
1149 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1150 TCGv r_cond)
1152 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1153 target_ulong target = dc->pc + offset;
1155 if (cond == 0x0) {
1156 /* unconditional not taken */
1157 if (a) {
1158 dc->pc = dc->npc + 4;
1159 dc->npc = dc->pc + 4;
1160 } else {
1161 dc->pc = dc->npc;
1162 dc->npc = dc->pc + 4;
1164 } else if (cond == 0x8) {
1165 /* unconditional taken */
1166 if (a) {
1167 dc->pc = target;
1168 dc->npc = dc->pc + 4;
1169 } else {
1170 dc->pc = dc->npc;
1171 dc->npc = target;
1173 } else {
1174 flush_cond(dc, r_cond);
1175 gen_fcond(r_cond, cc, cond);
1176 if (a) {
1177 gen_branch_a(dc, target, dc->npc, r_cond);
1178 dc->is_br = 1;
1179 } else {
1180 dc->pc = dc->npc;
1181 dc->jump_pc[0] = target;
1182 dc->jump_pc[1] = dc->npc + 4;
1183 dc->npc = JUMP_PC;
1188 #ifdef TARGET_SPARC64
1189 /* XXX: potentially incorrect if dynamic npc */
1190 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1191 TCGv r_cond, TCGv r_reg)
1193 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1194 target_ulong target = dc->pc + offset;
1196 flush_cond(dc, r_cond);
1197 gen_cond_reg(r_cond, cond, r_reg);
1198 if (a) {
1199 gen_branch_a(dc, target, dc->npc, r_cond);
1200 dc->is_br = 1;
1201 } else {
1202 dc->pc = dc->npc;
1203 dc->jump_pc[0] = target;
1204 dc->jump_pc[1] = dc->npc + 4;
1205 dc->npc = JUMP_PC;
1209 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1211 switch (fccno) {
1212 case 0:
1213 gen_helper_fcmps(r_rs1, r_rs2);
1214 break;
1215 case 1:
1216 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1217 break;
1218 case 2:
1219 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1220 break;
1221 case 3:
1222 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1223 break;
1227 static inline void gen_op_fcmpd(int fccno)
1229 switch (fccno) {
1230 case 0:
1231 gen_helper_fcmpd();
1232 break;
1233 case 1:
1234 gen_helper_fcmpd_fcc1();
1235 break;
1236 case 2:
1237 gen_helper_fcmpd_fcc2();
1238 break;
1239 case 3:
1240 gen_helper_fcmpd_fcc3();
1241 break;
1245 static inline void gen_op_fcmpq(int fccno)
1247 switch (fccno) {
1248 case 0:
1249 gen_helper_fcmpq();
1250 break;
1251 case 1:
1252 gen_helper_fcmpq_fcc1();
1253 break;
1254 case 2:
1255 gen_helper_fcmpq_fcc2();
1256 break;
1257 case 3:
1258 gen_helper_fcmpq_fcc3();
1259 break;
1263 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1265 switch (fccno) {
1266 case 0:
1267 gen_helper_fcmpes(r_rs1, r_rs2);
1268 break;
1269 case 1:
1270 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1271 break;
1272 case 2:
1273 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1274 break;
1275 case 3:
1276 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1277 break;
1281 static inline void gen_op_fcmped(int fccno)
1283 switch (fccno) {
1284 case 0:
1285 gen_helper_fcmped();
1286 break;
1287 case 1:
1288 gen_helper_fcmped_fcc1();
1289 break;
1290 case 2:
1291 gen_helper_fcmped_fcc2();
1292 break;
1293 case 3:
1294 gen_helper_fcmped_fcc3();
1295 break;
1299 static inline void gen_op_fcmpeq(int fccno)
1301 switch (fccno) {
1302 case 0:
1303 gen_helper_fcmpeq();
1304 break;
1305 case 1:
1306 gen_helper_fcmpeq_fcc1();
1307 break;
1308 case 2:
1309 gen_helper_fcmpeq_fcc2();
1310 break;
1311 case 3:
1312 gen_helper_fcmpeq_fcc3();
1313 break;
1317 #else
1319 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1321 gen_helper_fcmps(r_rs1, r_rs2);
1324 static inline void gen_op_fcmpd(int fccno)
1326 gen_helper_fcmpd();
1329 static inline void gen_op_fcmpq(int fccno)
1331 gen_helper_fcmpq();
1334 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1336 gen_helper_fcmpes(r_rs1, r_rs2);
1339 static inline void gen_op_fcmped(int fccno)
1341 gen_helper_fcmped();
1344 static inline void gen_op_fcmpeq(int fccno)
1346 gen_helper_fcmpeq();
1348 #endif
1350 static inline void gen_op_fpexception_im(int fsr_flags)
1352 TCGv_i32 r_const;
1354 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1355 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1356 r_const = tcg_const_i32(TT_FP_EXCP);
1357 gen_helper_raise_exception(r_const);
1358 tcg_temp_free_i32(r_const);
1361 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1363 #if !defined(CONFIG_USER_ONLY)
1364 if (!dc->fpu_enabled) {
1365 TCGv_i32 r_const;
1367 save_state(dc, r_cond);
1368 r_const = tcg_const_i32(TT_NFPU_INSN);
1369 gen_helper_raise_exception(r_const);
1370 tcg_temp_free_i32(r_const);
1371 dc->is_br = 1;
1372 return 1;
1374 #endif
1375 return 0;
1378 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1380 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1383 static inline void gen_clear_float_exceptions(void)
1385 gen_helper_clear_float_exceptions();
1388 /* asi moves */
1389 #ifdef TARGET_SPARC64
1390 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1392 int asi;
1393 TCGv_i32 r_asi;
1395 if (IS_IMM) {
1396 r_asi = tcg_temp_new_i32();
1397 tcg_gen_mov_i32(r_asi, cpu_asi);
1398 } else {
1399 asi = GET_FIELD(insn, 19, 26);
1400 r_asi = tcg_const_i32(asi);
1402 return r_asi;
1405 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1406 int sign)
1408 TCGv_i32 r_asi, r_size, r_sign;
1410 r_asi = gen_get_asi(insn, addr);
1411 r_size = tcg_const_i32(size);
1412 r_sign = tcg_const_i32(sign);
1413 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1414 tcg_temp_free_i32(r_sign);
1415 tcg_temp_free_i32(r_size);
1416 tcg_temp_free_i32(r_asi);
1419 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1421 TCGv_i32 r_asi, r_size;
1423 r_asi = gen_get_asi(insn, addr);
1424 r_size = tcg_const_i32(size);
1425 gen_helper_st_asi(addr, src, r_asi, r_size);
1426 tcg_temp_free_i32(r_size);
1427 tcg_temp_free_i32(r_asi);
1430 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1432 TCGv_i32 r_asi, r_size, r_rd;
1434 r_asi = gen_get_asi(insn, addr);
1435 r_size = tcg_const_i32(size);
1436 r_rd = tcg_const_i32(rd);
1437 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1438 tcg_temp_free_i32(r_rd);
1439 tcg_temp_free_i32(r_size);
1440 tcg_temp_free_i32(r_asi);
1443 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1445 TCGv_i32 r_asi, r_size, r_rd;
1447 r_asi = gen_get_asi(insn, addr);
1448 r_size = tcg_const_i32(size);
1449 r_rd = tcg_const_i32(rd);
1450 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1451 tcg_temp_free_i32(r_rd);
1452 tcg_temp_free_i32(r_size);
1453 tcg_temp_free_i32(r_asi);
1456 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1458 TCGv_i32 r_asi, r_size, r_sign;
1460 r_asi = gen_get_asi(insn, addr);
1461 r_size = tcg_const_i32(4);
1462 r_sign = tcg_const_i32(0);
1463 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1464 tcg_temp_free_i32(r_sign);
1465 gen_helper_st_asi(addr, dst, r_asi, r_size);
1466 tcg_temp_free_i32(r_size);
1467 tcg_temp_free_i32(r_asi);
1468 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1471 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1473 TCGv_i32 r_asi, r_rd;
1475 r_asi = gen_get_asi(insn, addr);
1476 r_rd = tcg_const_i32(rd);
1477 gen_helper_ldda_asi(addr, r_asi, r_rd);
1478 tcg_temp_free_i32(r_rd);
1479 tcg_temp_free_i32(r_asi);
1482 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1484 TCGv_i32 r_asi, r_size;
1486 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1487 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1488 r_asi = gen_get_asi(insn, addr);
1489 r_size = tcg_const_i32(8);
1490 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1491 tcg_temp_free_i32(r_size);
1492 tcg_temp_free_i32(r_asi);
1495 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1496 int rd)
1498 TCGv r_val1;
1499 TCGv_i32 r_asi;
1501 r_val1 = tcg_temp_new();
1502 gen_movl_reg_TN(rd, r_val1);
1503 r_asi = gen_get_asi(insn, addr);
1504 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1505 tcg_temp_free_i32(r_asi);
1506 tcg_temp_free(r_val1);
1509 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1510 int rd)
1512 TCGv_i32 r_asi;
1514 gen_movl_reg_TN(rd, cpu_tmp64);
1515 r_asi = gen_get_asi(insn, addr);
1516 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1517 tcg_temp_free_i32(r_asi);
1520 #elif !defined(CONFIG_USER_ONLY)
1522 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1523 int sign)
1525 TCGv_i32 r_asi, r_size, r_sign;
1527 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1528 r_size = tcg_const_i32(size);
1529 r_sign = tcg_const_i32(sign);
1530 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1531 tcg_temp_free(r_sign);
1532 tcg_temp_free(r_size);
1533 tcg_temp_free(r_asi);
1534 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1537 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1539 TCGv_i32 r_asi, r_size;
1541 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1542 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1543 r_size = tcg_const_i32(size);
1544 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1545 tcg_temp_free(r_size);
1546 tcg_temp_free(r_asi);
1549 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1551 TCGv_i32 r_asi, r_size, r_sign;
1552 TCGv_i64 r_val;
1554 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1555 r_size = tcg_const_i32(4);
1556 r_sign = tcg_const_i32(0);
1557 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1558 tcg_temp_free(r_sign);
1559 r_val = tcg_temp_new_i64();
1560 tcg_gen_extu_tl_i64(r_val, dst);
1561 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1562 tcg_temp_free_i64(r_val);
1563 tcg_temp_free(r_size);
1564 tcg_temp_free(r_asi);
1565 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1568 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1570 TCGv_i32 r_asi, r_size, r_sign;
1572 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1573 r_size = tcg_const_i32(8);
1574 r_sign = tcg_const_i32(0);
1575 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1576 tcg_temp_free(r_sign);
1577 tcg_temp_free(r_size);
1578 tcg_temp_free(r_asi);
1579 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1580 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1581 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1582 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1583 gen_movl_TN_reg(rd, hi);
1586 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1588 TCGv_i32 r_asi, r_size;
1590 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1591 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1592 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1593 r_size = tcg_const_i32(8);
1594 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1595 tcg_temp_free(r_size);
1596 tcg_temp_free(r_asi);
1598 #endif
1600 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1601 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1603 TCGv_i64 r_val;
1604 TCGv_i32 r_asi, r_size;
1606 gen_ld_asi(dst, addr, insn, 1, 0);
1608 r_val = tcg_const_i64(0xffULL);
1609 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1610 r_size = tcg_const_i32(1);
1611 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1612 tcg_temp_free_i32(r_size);
1613 tcg_temp_free_i32(r_asi);
1614 tcg_temp_free_i64(r_val);
1616 #endif
1618 static inline TCGv get_src1(unsigned int insn, TCGv def)
1620 TCGv r_rs1 = def;
1621 unsigned int rs1;
1623 rs1 = GET_FIELD(insn, 13, 17);
1624 if (rs1 == 0)
1625 r_rs1 = tcg_const_tl(0); // XXX how to free?
1626 else if (rs1 < 8)
1627 r_rs1 = cpu_gregs[rs1];
1628 else
1629 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1630 return r_rs1;
1633 static inline TCGv get_src2(unsigned int insn, TCGv def)
1635 TCGv r_rs2 = def;
1637 if (IS_IMM) { /* immediate */
1638 target_long simm;
1640 simm = GET_FIELDs(insn, 19, 31);
1641 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1642 } else { /* register */
1643 unsigned int rs2;
1645 rs2 = GET_FIELD(insn, 27, 31);
1646 if (rs2 == 0)
1647 r_rs2 = tcg_const_tl(0); // XXX how to free?
1648 else if (rs2 < 8)
1649 r_rs2 = cpu_gregs[rs2];
1650 else
1651 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1653 return r_rs2;
1656 #define CHECK_IU_FEATURE(dc, FEATURE) \
1657 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1658 goto illegal_insn;
1659 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1660 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1661 goto nfpu_insn;
1663 /* before an instruction, dc->pc must be static */
1664 static void disas_sparc_insn(DisasContext * dc)
1666 unsigned int insn, opc, rs1, rs2, rd;
1667 target_long simm;
1669 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1670 tcg_gen_debug_insn_start(dc->pc);
1671 insn = ldl_code(dc->pc);
1672 opc = GET_FIELD(insn, 0, 1);
1674 rd = GET_FIELD(insn, 2, 6);
1676 cpu_src1 = tcg_temp_new(); // const
1677 cpu_src2 = tcg_temp_new(); // const
1679 switch (opc) {
1680 case 0: /* branches/sethi */
1682 unsigned int xop = GET_FIELD(insn, 7, 9);
1683 int32_t target;
1684 switch (xop) {
1685 #ifdef TARGET_SPARC64
1686 case 0x1: /* V9 BPcc */
1688 int cc;
1690 target = GET_FIELD_SP(insn, 0, 18);
1691 target = sign_extend(target, 18);
1692 target <<= 2;
1693 cc = GET_FIELD_SP(insn, 20, 21);
1694 if (cc == 0)
1695 do_branch(dc, target, insn, 0, cpu_cond);
1696 else if (cc == 2)
1697 do_branch(dc, target, insn, 1, cpu_cond);
1698 else
1699 goto illegal_insn;
1700 goto jmp_insn;
1702 case 0x3: /* V9 BPr */
1704 target = GET_FIELD_SP(insn, 0, 13) |
1705 (GET_FIELD_SP(insn, 20, 21) << 14);
1706 target = sign_extend(target, 16);
1707 target <<= 2;
1708 cpu_src1 = get_src1(insn, cpu_src1);
1709 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1710 goto jmp_insn;
1712 case 0x5: /* V9 FBPcc */
1714 int cc = GET_FIELD_SP(insn, 20, 21);
1715 if (gen_trap_ifnofpu(dc, cpu_cond))
1716 goto jmp_insn;
1717 target = GET_FIELD_SP(insn, 0, 18);
1718 target = sign_extend(target, 19);
1719 target <<= 2;
1720 do_fbranch(dc, target, insn, cc, cpu_cond);
1721 goto jmp_insn;
1723 #else
1724 case 0x7: /* CBN+x */
1726 goto ncp_insn;
1728 #endif
1729 case 0x2: /* BN+x */
1731 target = GET_FIELD(insn, 10, 31);
1732 target = sign_extend(target, 22);
1733 target <<= 2;
1734 do_branch(dc, target, insn, 0, cpu_cond);
1735 goto jmp_insn;
1737 case 0x6: /* FBN+x */
1739 if (gen_trap_ifnofpu(dc, cpu_cond))
1740 goto jmp_insn;
1741 target = GET_FIELD(insn, 10, 31);
1742 target = sign_extend(target, 22);
1743 target <<= 2;
1744 do_fbranch(dc, target, insn, 0, cpu_cond);
1745 goto jmp_insn;
1747 case 0x4: /* SETHI */
1748 if (rd) { // nop
1749 uint32_t value = GET_FIELD(insn, 10, 31);
1750 TCGv r_const;
1752 r_const = tcg_const_tl(value << 10);
1753 gen_movl_TN_reg(rd, r_const);
1754 tcg_temp_free(r_const);
1756 break;
1757 case 0x0: /* UNIMPL */
1758 default:
1759 goto illegal_insn;
1761 break;
1763 break;
1764 case 1: /*CALL*/
1766 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1767 TCGv r_const;
1769 r_const = tcg_const_tl(dc->pc);
1770 gen_movl_TN_reg(15, r_const);
1771 tcg_temp_free(r_const);
1772 target += dc->pc;
1773 gen_mov_pc_npc(dc, cpu_cond);
1774 dc->npc = target;
1776 goto jmp_insn;
1777 case 2: /* FPU & Logical Operations */
1779 unsigned int xop = GET_FIELD(insn, 7, 12);
1780 if (xop == 0x3a) { /* generate trap */
1781 int cond;
1783 cpu_src1 = get_src1(insn, cpu_src1);
1784 if (IS_IMM) {
1785 rs2 = GET_FIELD(insn, 25, 31);
1786 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1787 } else {
1788 rs2 = GET_FIELD(insn, 27, 31);
1789 if (rs2 != 0) {
1790 gen_movl_reg_TN(rs2, cpu_src2);
1791 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1792 } else
1793 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1795 cond = GET_FIELD(insn, 3, 6);
1796 if (cond == 0x8) {
1797 save_state(dc, cpu_cond);
1798 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1799 supervisor(dc))
1800 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1801 else
1802 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1803 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1804 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1805 gen_helper_raise_exception(cpu_tmp32);
1806 } else if (cond != 0) {
1807 TCGv r_cond = tcg_temp_new();
1808 int l1;
1809 #ifdef TARGET_SPARC64
1810 /* V9 icc/xcc */
1811 int cc = GET_FIELD_SP(insn, 11, 12);
1813 save_state(dc, cpu_cond);
1814 if (cc == 0)
1815 gen_cond(r_cond, 0, cond, dc);
1816 else if (cc == 2)
1817 gen_cond(r_cond, 1, cond, dc);
1818 else
1819 goto illegal_insn;
1820 #else
1821 save_state(dc, cpu_cond);
1822 gen_cond(r_cond, 0, cond, dc);
1823 #endif
1824 l1 = gen_new_label();
1825 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1827 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1828 supervisor(dc))
1829 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1830 else
1831 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1832 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1833 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1834 gen_helper_raise_exception(cpu_tmp32);
1836 gen_set_label(l1);
1837 tcg_temp_free(r_cond);
1839 gen_op_next_insn();
1840 tcg_gen_exit_tb(0);
1841 dc->is_br = 1;
1842 goto jmp_insn;
1843 } else if (xop == 0x28) {
1844 rs1 = GET_FIELD(insn, 13, 17);
1845 switch(rs1) {
1846 case 0: /* rdy */
1847 #ifndef TARGET_SPARC64
1848 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1849 manual, rdy on the microSPARC
1850 II */
1851 case 0x0f: /* stbar in the SPARCv8 manual,
1852 rdy on the microSPARC II */
1853 case 0x10 ... 0x1f: /* implementation-dependent in the
1854 SPARCv8 manual, rdy on the
1855 microSPARC II */
1856 #endif
1857 gen_movl_TN_reg(rd, cpu_y);
1858 break;
1859 #ifdef TARGET_SPARC64
1860 case 0x2: /* V9 rdccr */
1861 gen_helper_compute_psr();
1862 gen_helper_rdccr(cpu_dst);
1863 gen_movl_TN_reg(rd, cpu_dst);
1864 break;
1865 case 0x3: /* V9 rdasi */
1866 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1867 gen_movl_TN_reg(rd, cpu_dst);
1868 break;
1869 case 0x4: /* V9 rdtick */
1871 TCGv_ptr r_tickptr;
1873 r_tickptr = tcg_temp_new_ptr();
1874 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1875 offsetof(CPUState, tick));
1876 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1877 tcg_temp_free_ptr(r_tickptr);
1878 gen_movl_TN_reg(rd, cpu_dst);
1880 break;
1881 case 0x5: /* V9 rdpc */
1883 TCGv r_const;
1885 r_const = tcg_const_tl(dc->pc);
1886 gen_movl_TN_reg(rd, r_const);
1887 tcg_temp_free(r_const);
1889 break;
1890 case 0x6: /* V9 rdfprs */
1891 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1892 gen_movl_TN_reg(rd, cpu_dst);
1893 break;
1894 case 0xf: /* V9 membar */
1895 break; /* no effect */
1896 case 0x13: /* Graphics Status */
1897 if (gen_trap_ifnofpu(dc, cpu_cond))
1898 goto jmp_insn;
1899 gen_movl_TN_reg(rd, cpu_gsr);
1900 break;
1901 case 0x16: /* Softint */
1902 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1903 gen_movl_TN_reg(rd, cpu_dst);
1904 break;
1905 case 0x17: /* Tick compare */
1906 gen_movl_TN_reg(rd, cpu_tick_cmpr);
1907 break;
1908 case 0x18: /* System tick */
1910 TCGv_ptr r_tickptr;
1912 r_tickptr = tcg_temp_new_ptr();
1913 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1914 offsetof(CPUState, stick));
1915 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1916 tcg_temp_free_ptr(r_tickptr);
1917 gen_movl_TN_reg(rd, cpu_dst);
1919 break;
1920 case 0x19: /* System tick compare */
1921 gen_movl_TN_reg(rd, cpu_stick_cmpr);
1922 break;
1923 case 0x10: /* Performance Control */
1924 case 0x11: /* Performance Instrumentation Counter */
1925 case 0x12: /* Dispatch Control */
1926 case 0x14: /* Softint set, WO */
1927 case 0x15: /* Softint clear, WO */
1928 #endif
1929 default:
1930 goto illegal_insn;
1932 #if !defined(CONFIG_USER_ONLY)
1933 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1934 #ifndef TARGET_SPARC64
1935 if (!supervisor(dc))
1936 goto priv_insn;
1937 gen_helper_compute_psr();
1938 dc->cc_op = CC_OP_FLAGS;
1939 gen_helper_rdpsr(cpu_dst);
1940 #else
1941 CHECK_IU_FEATURE(dc, HYPV);
1942 if (!hypervisor(dc))
1943 goto priv_insn;
1944 rs1 = GET_FIELD(insn, 13, 17);
1945 switch (rs1) {
1946 case 0: // hpstate
1947 // gen_op_rdhpstate();
1948 break;
1949 case 1: // htstate
1950 // gen_op_rdhtstate();
1951 break;
1952 case 3: // hintp
1953 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1954 break;
1955 case 5: // htba
1956 tcg_gen_mov_tl(cpu_dst, cpu_htba);
1957 break;
1958 case 6: // hver
1959 tcg_gen_mov_tl(cpu_dst, cpu_hver);
1960 break;
1961 case 31: // hstick_cmpr
1962 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1963 break;
1964 default:
1965 goto illegal_insn;
1967 #endif
1968 gen_movl_TN_reg(rd, cpu_dst);
1969 break;
1970 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
1971 if (!supervisor(dc))
1972 goto priv_insn;
1973 #ifdef TARGET_SPARC64
1974 rs1 = GET_FIELD(insn, 13, 17);
1975 switch (rs1) {
1976 case 0: // tpc
1978 TCGv_ptr r_tsptr;
1980 r_tsptr = tcg_temp_new_ptr();
1981 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1982 offsetof(CPUState, tsptr));
1983 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
1984 offsetof(trap_state, tpc));
1985 tcg_temp_free_ptr(r_tsptr);
1987 break;
1988 case 1: // tnpc
1990 TCGv_ptr r_tsptr;
1992 r_tsptr = tcg_temp_new_ptr();
1993 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1994 offsetof(CPUState, tsptr));
1995 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
1996 offsetof(trap_state, tnpc));
1997 tcg_temp_free_ptr(r_tsptr);
1999 break;
2000 case 2: // tstate
2002 TCGv_ptr r_tsptr;
2004 r_tsptr = tcg_temp_new_ptr();
2005 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2006 offsetof(CPUState, tsptr));
2007 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2008 offsetof(trap_state, tstate));
2009 tcg_temp_free_ptr(r_tsptr);
2011 break;
2012 case 3: // tt
2014 TCGv_ptr r_tsptr;
2016 r_tsptr = tcg_temp_new_ptr();
2017 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2018 offsetof(CPUState, tsptr));
2019 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2020 offsetof(trap_state, tt));
2021 tcg_temp_free_ptr(r_tsptr);
2022 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2024 break;
2025 case 4: // tick
2027 TCGv_ptr r_tickptr;
2029 r_tickptr = tcg_temp_new_ptr();
2030 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2031 offsetof(CPUState, tick));
2032 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2033 gen_movl_TN_reg(rd, cpu_tmp0);
2034 tcg_temp_free_ptr(r_tickptr);
2036 break;
2037 case 5: // tba
2038 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2039 break;
2040 case 6: // pstate
2041 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2042 offsetof(CPUSPARCState, pstate));
2043 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2044 break;
2045 case 7: // tl
2046 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2047 offsetof(CPUSPARCState, tl));
2048 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2049 break;
2050 case 8: // pil
2051 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2052 offsetof(CPUSPARCState, psrpil));
2053 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2054 break;
2055 case 9: // cwp
2056 gen_helper_rdcwp(cpu_tmp0);
2057 break;
2058 case 10: // cansave
2059 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2060 offsetof(CPUSPARCState, cansave));
2061 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2062 break;
2063 case 11: // canrestore
2064 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2065 offsetof(CPUSPARCState, canrestore));
2066 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2067 break;
2068 case 12: // cleanwin
2069 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2070 offsetof(CPUSPARCState, cleanwin));
2071 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2072 break;
2073 case 13: // otherwin
2074 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2075 offsetof(CPUSPARCState, otherwin));
2076 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2077 break;
2078 case 14: // wstate
2079 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2080 offsetof(CPUSPARCState, wstate));
2081 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2082 break;
2083 case 16: // UA2005 gl
2084 CHECK_IU_FEATURE(dc, GL);
2085 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2086 offsetof(CPUSPARCState, gl));
2087 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2088 break;
2089 case 26: // UA2005 strand status
2090 CHECK_IU_FEATURE(dc, HYPV);
2091 if (!hypervisor(dc))
2092 goto priv_insn;
2093 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2094 break;
2095 case 31: // ver
2096 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2097 break;
2098 case 15: // fq
2099 default:
2100 goto illegal_insn;
2102 #else
2103 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2104 #endif
2105 gen_movl_TN_reg(rd, cpu_tmp0);
2106 break;
2107 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2108 #ifdef TARGET_SPARC64
2109 save_state(dc, cpu_cond);
2110 gen_helper_flushw();
2111 #else
2112 if (!supervisor(dc))
2113 goto priv_insn;
2114 gen_movl_TN_reg(rd, cpu_tbr);
2115 #endif
2116 break;
2117 #endif
2118 } else if (xop == 0x34) { /* FPU Operations */
2119 if (gen_trap_ifnofpu(dc, cpu_cond))
2120 goto jmp_insn;
2121 gen_op_clear_ieee_excp_and_FTT();
2122 rs1 = GET_FIELD(insn, 13, 17);
2123 rs2 = GET_FIELD(insn, 27, 31);
2124 xop = GET_FIELD(insn, 18, 26);
2125 switch (xop) {
2126 case 0x1: /* fmovs */
2127 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2128 break;
2129 case 0x5: /* fnegs */
2130 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2131 break;
2132 case 0x9: /* fabss */
2133 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2134 break;
2135 case 0x29: /* fsqrts */
2136 CHECK_FPU_FEATURE(dc, FSQRT);
2137 gen_clear_float_exceptions();
2138 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2139 gen_helper_check_ieee_exceptions();
2140 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2141 break;
2142 case 0x2a: /* fsqrtd */
2143 CHECK_FPU_FEATURE(dc, FSQRT);
2144 gen_op_load_fpr_DT1(DFPREG(rs2));
2145 gen_clear_float_exceptions();
2146 gen_helper_fsqrtd();
2147 gen_helper_check_ieee_exceptions();
2148 gen_op_store_DT0_fpr(DFPREG(rd));
2149 break;
2150 case 0x2b: /* fsqrtq */
2151 CHECK_FPU_FEATURE(dc, FLOAT128);
2152 gen_op_load_fpr_QT1(QFPREG(rs2));
2153 gen_clear_float_exceptions();
2154 gen_helper_fsqrtq();
2155 gen_helper_check_ieee_exceptions();
2156 gen_op_store_QT0_fpr(QFPREG(rd));
2157 break;
2158 case 0x41: /* fadds */
2159 gen_clear_float_exceptions();
2160 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2161 gen_helper_check_ieee_exceptions();
2162 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2163 break;
2164 case 0x42: /* faddd */
2165 gen_op_load_fpr_DT0(DFPREG(rs1));
2166 gen_op_load_fpr_DT1(DFPREG(rs2));
2167 gen_clear_float_exceptions();
2168 gen_helper_faddd();
2169 gen_helper_check_ieee_exceptions();
2170 gen_op_store_DT0_fpr(DFPREG(rd));
2171 break;
2172 case 0x43: /* faddq */
2173 CHECK_FPU_FEATURE(dc, FLOAT128);
2174 gen_op_load_fpr_QT0(QFPREG(rs1));
2175 gen_op_load_fpr_QT1(QFPREG(rs2));
2176 gen_clear_float_exceptions();
2177 gen_helper_faddq();
2178 gen_helper_check_ieee_exceptions();
2179 gen_op_store_QT0_fpr(QFPREG(rd));
2180 break;
2181 case 0x45: /* fsubs */
2182 gen_clear_float_exceptions();
2183 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2184 gen_helper_check_ieee_exceptions();
2185 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2186 break;
2187 case 0x46: /* fsubd */
2188 gen_op_load_fpr_DT0(DFPREG(rs1));
2189 gen_op_load_fpr_DT1(DFPREG(rs2));
2190 gen_clear_float_exceptions();
2191 gen_helper_fsubd();
2192 gen_helper_check_ieee_exceptions();
2193 gen_op_store_DT0_fpr(DFPREG(rd));
2194 break;
2195 case 0x47: /* fsubq */
2196 CHECK_FPU_FEATURE(dc, FLOAT128);
2197 gen_op_load_fpr_QT0(QFPREG(rs1));
2198 gen_op_load_fpr_QT1(QFPREG(rs2));
2199 gen_clear_float_exceptions();
2200 gen_helper_fsubq();
2201 gen_helper_check_ieee_exceptions();
2202 gen_op_store_QT0_fpr(QFPREG(rd));
2203 break;
2204 case 0x49: /* fmuls */
2205 CHECK_FPU_FEATURE(dc, FMUL);
2206 gen_clear_float_exceptions();
2207 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2208 gen_helper_check_ieee_exceptions();
2209 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2210 break;
2211 case 0x4a: /* fmuld */
2212 CHECK_FPU_FEATURE(dc, FMUL);
2213 gen_op_load_fpr_DT0(DFPREG(rs1));
2214 gen_op_load_fpr_DT1(DFPREG(rs2));
2215 gen_clear_float_exceptions();
2216 gen_helper_fmuld();
2217 gen_helper_check_ieee_exceptions();
2218 gen_op_store_DT0_fpr(DFPREG(rd));
2219 break;
2220 case 0x4b: /* fmulq */
2221 CHECK_FPU_FEATURE(dc, FLOAT128);
2222 CHECK_FPU_FEATURE(dc, FMUL);
2223 gen_op_load_fpr_QT0(QFPREG(rs1));
2224 gen_op_load_fpr_QT1(QFPREG(rs2));
2225 gen_clear_float_exceptions();
2226 gen_helper_fmulq();
2227 gen_helper_check_ieee_exceptions();
2228 gen_op_store_QT0_fpr(QFPREG(rd));
2229 break;
2230 case 0x4d: /* fdivs */
2231 gen_clear_float_exceptions();
2232 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2233 gen_helper_check_ieee_exceptions();
2234 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2235 break;
2236 case 0x4e: /* fdivd */
2237 gen_op_load_fpr_DT0(DFPREG(rs1));
2238 gen_op_load_fpr_DT1(DFPREG(rs2));
2239 gen_clear_float_exceptions();
2240 gen_helper_fdivd();
2241 gen_helper_check_ieee_exceptions();
2242 gen_op_store_DT0_fpr(DFPREG(rd));
2243 break;
2244 case 0x4f: /* fdivq */
2245 CHECK_FPU_FEATURE(dc, FLOAT128);
2246 gen_op_load_fpr_QT0(QFPREG(rs1));
2247 gen_op_load_fpr_QT1(QFPREG(rs2));
2248 gen_clear_float_exceptions();
2249 gen_helper_fdivq();
2250 gen_helper_check_ieee_exceptions();
2251 gen_op_store_QT0_fpr(QFPREG(rd));
2252 break;
2253 case 0x69: /* fsmuld */
2254 CHECK_FPU_FEATURE(dc, FSMULD);
2255 gen_clear_float_exceptions();
2256 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2257 gen_helper_check_ieee_exceptions();
2258 gen_op_store_DT0_fpr(DFPREG(rd));
2259 break;
2260 case 0x6e: /* fdmulq */
2261 CHECK_FPU_FEATURE(dc, FLOAT128);
2262 gen_op_load_fpr_DT0(DFPREG(rs1));
2263 gen_op_load_fpr_DT1(DFPREG(rs2));
2264 gen_clear_float_exceptions();
2265 gen_helper_fdmulq();
2266 gen_helper_check_ieee_exceptions();
2267 gen_op_store_QT0_fpr(QFPREG(rd));
2268 break;
2269 case 0xc4: /* fitos */
2270 gen_clear_float_exceptions();
2271 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2272 gen_helper_check_ieee_exceptions();
2273 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2274 break;
2275 case 0xc6: /* fdtos */
2276 gen_op_load_fpr_DT1(DFPREG(rs2));
2277 gen_clear_float_exceptions();
2278 gen_helper_fdtos(cpu_tmp32);
2279 gen_helper_check_ieee_exceptions();
2280 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2281 break;
2282 case 0xc7: /* fqtos */
2283 CHECK_FPU_FEATURE(dc, FLOAT128);
2284 gen_op_load_fpr_QT1(QFPREG(rs2));
2285 gen_clear_float_exceptions();
2286 gen_helper_fqtos(cpu_tmp32);
2287 gen_helper_check_ieee_exceptions();
2288 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2289 break;
2290 case 0xc8: /* fitod */
2291 gen_helper_fitod(cpu_fpr[rs2]);
2292 gen_op_store_DT0_fpr(DFPREG(rd));
2293 break;
2294 case 0xc9: /* fstod */
2295 gen_helper_fstod(cpu_fpr[rs2]);
2296 gen_op_store_DT0_fpr(DFPREG(rd));
2297 break;
2298 case 0xcb: /* fqtod */
2299 CHECK_FPU_FEATURE(dc, FLOAT128);
2300 gen_op_load_fpr_QT1(QFPREG(rs2));
2301 gen_clear_float_exceptions();
2302 gen_helper_fqtod();
2303 gen_helper_check_ieee_exceptions();
2304 gen_op_store_DT0_fpr(DFPREG(rd));
2305 break;
2306 case 0xcc: /* fitoq */
2307 CHECK_FPU_FEATURE(dc, FLOAT128);
2308 gen_helper_fitoq(cpu_fpr[rs2]);
2309 gen_op_store_QT0_fpr(QFPREG(rd));
2310 break;
2311 case 0xcd: /* fstoq */
2312 CHECK_FPU_FEATURE(dc, FLOAT128);
2313 gen_helper_fstoq(cpu_fpr[rs2]);
2314 gen_op_store_QT0_fpr(QFPREG(rd));
2315 break;
2316 case 0xce: /* fdtoq */
2317 CHECK_FPU_FEATURE(dc, FLOAT128);
2318 gen_op_load_fpr_DT1(DFPREG(rs2));
2319 gen_helper_fdtoq();
2320 gen_op_store_QT0_fpr(QFPREG(rd));
2321 break;
2322 case 0xd1: /* fstoi */
2323 gen_clear_float_exceptions();
2324 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2325 gen_helper_check_ieee_exceptions();
2326 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2327 break;
2328 case 0xd2: /* fdtoi */
2329 gen_op_load_fpr_DT1(DFPREG(rs2));
2330 gen_clear_float_exceptions();
2331 gen_helper_fdtoi(cpu_tmp32);
2332 gen_helper_check_ieee_exceptions();
2333 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2334 break;
2335 case 0xd3: /* fqtoi */
2336 CHECK_FPU_FEATURE(dc, FLOAT128);
2337 gen_op_load_fpr_QT1(QFPREG(rs2));
2338 gen_clear_float_exceptions();
2339 gen_helper_fqtoi(cpu_tmp32);
2340 gen_helper_check_ieee_exceptions();
2341 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2342 break;
2343 #ifdef TARGET_SPARC64
2344 case 0x2: /* V9 fmovd */
2345 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2346 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2347 cpu_fpr[DFPREG(rs2) + 1]);
2348 break;
2349 case 0x3: /* V9 fmovq */
2350 CHECK_FPU_FEATURE(dc, FLOAT128);
2351 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2352 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2353 cpu_fpr[QFPREG(rs2) + 1]);
2354 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2355 cpu_fpr[QFPREG(rs2) + 2]);
2356 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2357 cpu_fpr[QFPREG(rs2) + 3]);
2358 break;
2359 case 0x6: /* V9 fnegd */
2360 gen_op_load_fpr_DT1(DFPREG(rs2));
2361 gen_helper_fnegd();
2362 gen_op_store_DT0_fpr(DFPREG(rd));
2363 break;
2364 case 0x7: /* V9 fnegq */
2365 CHECK_FPU_FEATURE(dc, FLOAT128);
2366 gen_op_load_fpr_QT1(QFPREG(rs2));
2367 gen_helper_fnegq();
2368 gen_op_store_QT0_fpr(QFPREG(rd));
2369 break;
2370 case 0xa: /* V9 fabsd */
2371 gen_op_load_fpr_DT1(DFPREG(rs2));
2372 gen_helper_fabsd();
2373 gen_op_store_DT0_fpr(DFPREG(rd));
2374 break;
2375 case 0xb: /* V9 fabsq */
2376 CHECK_FPU_FEATURE(dc, FLOAT128);
2377 gen_op_load_fpr_QT1(QFPREG(rs2));
2378 gen_helper_fabsq();
2379 gen_op_store_QT0_fpr(QFPREG(rd));
2380 break;
2381 case 0x81: /* V9 fstox */
2382 gen_clear_float_exceptions();
2383 gen_helper_fstox(cpu_fpr[rs2]);
2384 gen_helper_check_ieee_exceptions();
2385 gen_op_store_DT0_fpr(DFPREG(rd));
2386 break;
2387 case 0x82: /* V9 fdtox */
2388 gen_op_load_fpr_DT1(DFPREG(rs2));
2389 gen_clear_float_exceptions();
2390 gen_helper_fdtox();
2391 gen_helper_check_ieee_exceptions();
2392 gen_op_store_DT0_fpr(DFPREG(rd));
2393 break;
2394 case 0x83: /* V9 fqtox */
2395 CHECK_FPU_FEATURE(dc, FLOAT128);
2396 gen_op_load_fpr_QT1(QFPREG(rs2));
2397 gen_clear_float_exceptions();
2398 gen_helper_fqtox();
2399 gen_helper_check_ieee_exceptions();
2400 gen_op_store_DT0_fpr(DFPREG(rd));
2401 break;
2402 case 0x84: /* V9 fxtos */
2403 gen_op_load_fpr_DT1(DFPREG(rs2));
2404 gen_clear_float_exceptions();
2405 gen_helper_fxtos(cpu_tmp32);
2406 gen_helper_check_ieee_exceptions();
2407 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2408 break;
2409 case 0x88: /* V9 fxtod */
2410 gen_op_load_fpr_DT1(DFPREG(rs2));
2411 gen_clear_float_exceptions();
2412 gen_helper_fxtod();
2413 gen_helper_check_ieee_exceptions();
2414 gen_op_store_DT0_fpr(DFPREG(rd));
2415 break;
2416 case 0x8c: /* V9 fxtoq */
2417 CHECK_FPU_FEATURE(dc, FLOAT128);
2418 gen_op_load_fpr_DT1(DFPREG(rs2));
2419 gen_clear_float_exceptions();
2420 gen_helper_fxtoq();
2421 gen_helper_check_ieee_exceptions();
2422 gen_op_store_QT0_fpr(QFPREG(rd));
2423 break;
2424 #endif
2425 default:
2426 goto illegal_insn;
2428 } else if (xop == 0x35) { /* FPU Operations */
2429 #ifdef TARGET_SPARC64
2430 int cond;
2431 #endif
2432 if (gen_trap_ifnofpu(dc, cpu_cond))
2433 goto jmp_insn;
2434 gen_op_clear_ieee_excp_and_FTT();
2435 rs1 = GET_FIELD(insn, 13, 17);
2436 rs2 = GET_FIELD(insn, 27, 31);
2437 xop = GET_FIELD(insn, 18, 26);
2438 #ifdef TARGET_SPARC64
2439 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2440 int l1;
2442 l1 = gen_new_label();
2443 cond = GET_FIELD_SP(insn, 14, 17);
2444 cpu_src1 = get_src1(insn, cpu_src1);
2445 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2446 0, l1);
2447 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2448 gen_set_label(l1);
2449 break;
2450 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2451 int l1;
2453 l1 = gen_new_label();
2454 cond = GET_FIELD_SP(insn, 14, 17);
2455 cpu_src1 = get_src1(insn, cpu_src1);
2456 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2457 0, l1);
2458 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2459 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2460 gen_set_label(l1);
2461 break;
2462 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2463 int l1;
2465 CHECK_FPU_FEATURE(dc, FLOAT128);
2466 l1 = gen_new_label();
2467 cond = GET_FIELD_SP(insn, 14, 17);
2468 cpu_src1 = get_src1(insn, cpu_src1);
2469 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2470 0, l1);
2471 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2472 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2473 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2474 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2475 gen_set_label(l1);
2476 break;
2478 #endif
2479 switch (xop) {
2480 #ifdef TARGET_SPARC64
2481 #define FMOVSCC(fcc) \
2483 TCGv r_cond; \
2484 int l1; \
2486 l1 = gen_new_label(); \
2487 r_cond = tcg_temp_new(); \
2488 cond = GET_FIELD_SP(insn, 14, 17); \
2489 gen_fcond(r_cond, fcc, cond); \
2490 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2491 0, l1); \
2492 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2493 gen_set_label(l1); \
2494 tcg_temp_free(r_cond); \
2496 #define FMOVDCC(fcc) \
2498 TCGv r_cond; \
2499 int l1; \
2501 l1 = gen_new_label(); \
2502 r_cond = tcg_temp_new(); \
2503 cond = GET_FIELD_SP(insn, 14, 17); \
2504 gen_fcond(r_cond, fcc, cond); \
2505 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2506 0, l1); \
2507 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2508 cpu_fpr[DFPREG(rs2)]); \
2509 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2510 cpu_fpr[DFPREG(rs2) + 1]); \
2511 gen_set_label(l1); \
2512 tcg_temp_free(r_cond); \
2514 #define FMOVQCC(fcc) \
2516 TCGv r_cond; \
2517 int l1; \
2519 l1 = gen_new_label(); \
2520 r_cond = tcg_temp_new(); \
2521 cond = GET_FIELD_SP(insn, 14, 17); \
2522 gen_fcond(r_cond, fcc, cond); \
2523 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2524 0, l1); \
2525 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2526 cpu_fpr[QFPREG(rs2)]); \
2527 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2528 cpu_fpr[QFPREG(rs2) + 1]); \
2529 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2530 cpu_fpr[QFPREG(rs2) + 2]); \
2531 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2532 cpu_fpr[QFPREG(rs2) + 3]); \
2533 gen_set_label(l1); \
2534 tcg_temp_free(r_cond); \
2536 case 0x001: /* V9 fmovscc %fcc0 */
2537 FMOVSCC(0);
2538 break;
2539 case 0x002: /* V9 fmovdcc %fcc0 */
2540 FMOVDCC(0);
2541 break;
2542 case 0x003: /* V9 fmovqcc %fcc0 */
2543 CHECK_FPU_FEATURE(dc, FLOAT128);
2544 FMOVQCC(0);
2545 break;
2546 case 0x041: /* V9 fmovscc %fcc1 */
2547 FMOVSCC(1);
2548 break;
2549 case 0x042: /* V9 fmovdcc %fcc1 */
2550 FMOVDCC(1);
2551 break;
2552 case 0x043: /* V9 fmovqcc %fcc1 */
2553 CHECK_FPU_FEATURE(dc, FLOAT128);
2554 FMOVQCC(1);
2555 break;
2556 case 0x081: /* V9 fmovscc %fcc2 */
2557 FMOVSCC(2);
2558 break;
2559 case 0x082: /* V9 fmovdcc %fcc2 */
2560 FMOVDCC(2);
2561 break;
2562 case 0x083: /* V9 fmovqcc %fcc2 */
2563 CHECK_FPU_FEATURE(dc, FLOAT128);
2564 FMOVQCC(2);
2565 break;
2566 case 0x0c1: /* V9 fmovscc %fcc3 */
2567 FMOVSCC(3);
2568 break;
2569 case 0x0c2: /* V9 fmovdcc %fcc3 */
2570 FMOVDCC(3);
2571 break;
2572 case 0x0c3: /* V9 fmovqcc %fcc3 */
2573 CHECK_FPU_FEATURE(dc, FLOAT128);
2574 FMOVQCC(3);
2575 break;
2576 #undef FMOVSCC
2577 #undef FMOVDCC
2578 #undef FMOVQCC
2579 #define FMOVSCC(icc) \
2581 TCGv r_cond; \
2582 int l1; \
2584 l1 = gen_new_label(); \
2585 r_cond = tcg_temp_new(); \
2586 cond = GET_FIELD_SP(insn, 14, 17); \
2587 gen_cond(r_cond, icc, cond, dc); \
2588 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2589 0, l1); \
2590 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2591 gen_set_label(l1); \
2592 tcg_temp_free(r_cond); \
2594 #define FMOVDCC(icc) \
2596 TCGv r_cond; \
2597 int l1; \
2599 l1 = gen_new_label(); \
2600 r_cond = tcg_temp_new(); \
2601 cond = GET_FIELD_SP(insn, 14, 17); \
2602 gen_cond(r_cond, icc, cond, dc); \
2603 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2604 0, l1); \
2605 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2606 cpu_fpr[DFPREG(rs2)]); \
2607 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2608 cpu_fpr[DFPREG(rs2) + 1]); \
2609 gen_set_label(l1); \
2610 tcg_temp_free(r_cond); \
2612 #define FMOVQCC(icc) \
2614 TCGv r_cond; \
2615 int l1; \
2617 l1 = gen_new_label(); \
2618 r_cond = tcg_temp_new(); \
2619 cond = GET_FIELD_SP(insn, 14, 17); \
2620 gen_cond(r_cond, icc, cond, dc); \
2621 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2622 0, l1); \
2623 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2624 cpu_fpr[QFPREG(rs2)]); \
2625 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2626 cpu_fpr[QFPREG(rs2) + 1]); \
2627 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2628 cpu_fpr[QFPREG(rs2) + 2]); \
2629 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2630 cpu_fpr[QFPREG(rs2) + 3]); \
2631 gen_set_label(l1); \
2632 tcg_temp_free(r_cond); \
2635 case 0x101: /* V9 fmovscc %icc */
2636 FMOVSCC(0);
2637 break;
2638 case 0x102: /* V9 fmovdcc %icc */
2639 FMOVDCC(0);
2640 case 0x103: /* V9 fmovqcc %icc */
2641 CHECK_FPU_FEATURE(dc, FLOAT128);
2642 FMOVQCC(0);
2643 break;
2644 case 0x181: /* V9 fmovscc %xcc */
2645 FMOVSCC(1);
2646 break;
2647 case 0x182: /* V9 fmovdcc %xcc */
2648 FMOVDCC(1);
2649 break;
2650 case 0x183: /* V9 fmovqcc %xcc */
2651 CHECK_FPU_FEATURE(dc, FLOAT128);
2652 FMOVQCC(1);
2653 break;
2654 #undef FMOVSCC
2655 #undef FMOVDCC
2656 #undef FMOVQCC
2657 #endif
2658 case 0x51: /* fcmps, V9 %fcc */
2659 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2660 break;
2661 case 0x52: /* fcmpd, V9 %fcc */
2662 gen_op_load_fpr_DT0(DFPREG(rs1));
2663 gen_op_load_fpr_DT1(DFPREG(rs2));
2664 gen_op_fcmpd(rd & 3);
2665 break;
2666 case 0x53: /* fcmpq, V9 %fcc */
2667 CHECK_FPU_FEATURE(dc, FLOAT128);
2668 gen_op_load_fpr_QT0(QFPREG(rs1));
2669 gen_op_load_fpr_QT1(QFPREG(rs2));
2670 gen_op_fcmpq(rd & 3);
2671 break;
2672 case 0x55: /* fcmpes, V9 %fcc */
2673 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2674 break;
2675 case 0x56: /* fcmped, V9 %fcc */
2676 gen_op_load_fpr_DT0(DFPREG(rs1));
2677 gen_op_load_fpr_DT1(DFPREG(rs2));
2678 gen_op_fcmped(rd & 3);
2679 break;
2680 case 0x57: /* fcmpeq, V9 %fcc */
2681 CHECK_FPU_FEATURE(dc, FLOAT128);
2682 gen_op_load_fpr_QT0(QFPREG(rs1));
2683 gen_op_load_fpr_QT1(QFPREG(rs2));
2684 gen_op_fcmpeq(rd & 3);
2685 break;
2686 default:
2687 goto illegal_insn;
2689 } else if (xop == 0x2) {
2690 // clr/mov shortcut
2692 rs1 = GET_FIELD(insn, 13, 17);
2693 if (rs1 == 0) {
2694 // or %g0, x, y -> mov T0, x; mov y, T0
2695 if (IS_IMM) { /* immediate */
2696 TCGv r_const;
2698 simm = GET_FIELDs(insn, 19, 31);
2699 r_const = tcg_const_tl(simm);
2700 gen_movl_TN_reg(rd, r_const);
2701 tcg_temp_free(r_const);
2702 } else { /* register */
2703 rs2 = GET_FIELD(insn, 27, 31);
2704 gen_movl_reg_TN(rs2, cpu_dst);
2705 gen_movl_TN_reg(rd, cpu_dst);
2707 } else {
2708 cpu_src1 = get_src1(insn, cpu_src1);
2709 if (IS_IMM) { /* immediate */
2710 simm = GET_FIELDs(insn, 19, 31);
2711 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2712 gen_movl_TN_reg(rd, cpu_dst);
2713 } else { /* register */
2714 // or x, %g0, y -> mov T1, x; mov y, T1
2715 rs2 = GET_FIELD(insn, 27, 31);
2716 if (rs2 != 0) {
2717 gen_movl_reg_TN(rs2, cpu_src2);
2718 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2719 gen_movl_TN_reg(rd, cpu_dst);
2720 } else
2721 gen_movl_TN_reg(rd, cpu_src1);
2724 #ifdef TARGET_SPARC64
2725 } else if (xop == 0x25) { /* sll, V9 sllx */
2726 cpu_src1 = get_src1(insn, cpu_src1);
2727 if (IS_IMM) { /* immediate */
2728 simm = GET_FIELDs(insn, 20, 31);
2729 if (insn & (1 << 12)) {
2730 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2731 } else {
2732 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2734 } else { /* register */
2735 rs2 = GET_FIELD(insn, 27, 31);
2736 gen_movl_reg_TN(rs2, cpu_src2);
2737 if (insn & (1 << 12)) {
2738 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2739 } else {
2740 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2742 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2744 gen_movl_TN_reg(rd, cpu_dst);
2745 } else if (xop == 0x26) { /* srl, V9 srlx */
2746 cpu_src1 = get_src1(insn, cpu_src1);
2747 if (IS_IMM) { /* immediate */
2748 simm = GET_FIELDs(insn, 20, 31);
2749 if (insn & (1 << 12)) {
2750 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2751 } else {
2752 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2753 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2755 } else { /* register */
2756 rs2 = GET_FIELD(insn, 27, 31);
2757 gen_movl_reg_TN(rs2, cpu_src2);
2758 if (insn & (1 << 12)) {
2759 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2760 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2761 } else {
2762 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2763 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2764 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2767 gen_movl_TN_reg(rd, cpu_dst);
2768 } else if (xop == 0x27) { /* sra, V9 srax */
2769 cpu_src1 = get_src1(insn, cpu_src1);
2770 if (IS_IMM) { /* immediate */
2771 simm = GET_FIELDs(insn, 20, 31);
2772 if (insn & (1 << 12)) {
2773 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2774 } else {
2775 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2776 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2777 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2779 } else { /* register */
2780 rs2 = GET_FIELD(insn, 27, 31);
2781 gen_movl_reg_TN(rs2, cpu_src2);
2782 if (insn & (1 << 12)) {
2783 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2784 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2785 } else {
2786 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2787 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2788 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2789 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2792 gen_movl_TN_reg(rd, cpu_dst);
2793 #endif
2794 } else if (xop < 0x36) {
2795 if (xop < 0x20) {
2796 cpu_src1 = get_src1(insn, cpu_src1);
2797 cpu_src2 = get_src2(insn, cpu_src2);
2798 switch (xop & ~0x10) {
2799 case 0x0: /* add */
2800 if (IS_IMM) {
2801 simm = GET_FIELDs(insn, 19, 31);
2802 if (xop & 0x10) {
2803 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2804 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2805 dc->cc_op = CC_OP_ADD;
2806 } else {
2807 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2809 } else {
2810 if (xop & 0x10) {
2811 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2812 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2813 dc->cc_op = CC_OP_ADD;
2814 } else {
2815 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2818 break;
2819 case 0x1: /* and */
2820 if (IS_IMM) {
2821 simm = GET_FIELDs(insn, 19, 31);
2822 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2823 } else {
2824 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2826 if (xop & 0x10) {
2827 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2828 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2829 dc->cc_op = CC_OP_LOGIC;
2831 break;
2832 case 0x2: /* or */
2833 if (IS_IMM) {
2834 simm = GET_FIELDs(insn, 19, 31);
2835 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2836 } else {
2837 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2839 if (xop & 0x10) {
2840 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2841 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2842 dc->cc_op = CC_OP_LOGIC;
2844 break;
2845 case 0x3: /* xor */
2846 if (IS_IMM) {
2847 simm = GET_FIELDs(insn, 19, 31);
2848 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2849 } else {
2850 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2852 if (xop & 0x10) {
2853 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2854 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2855 dc->cc_op = CC_OP_LOGIC;
2857 break;
2858 case 0x4: /* sub */
2859 if (IS_IMM) {
2860 simm = GET_FIELDs(insn, 19, 31);
2861 if (xop & 0x10) {
2862 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2863 } else {
2864 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2866 } else {
2867 if (xop & 0x10) {
2868 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2869 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2870 dc->cc_op = CC_OP_SUB;
2871 } else {
2872 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2875 break;
2876 case 0x5: /* andn */
2877 if (IS_IMM) {
2878 simm = GET_FIELDs(insn, 19, 31);
2879 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2880 } else {
2881 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2883 if (xop & 0x10) {
2884 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2885 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2886 dc->cc_op = CC_OP_LOGIC;
2888 break;
2889 case 0x6: /* orn */
2890 if (IS_IMM) {
2891 simm = GET_FIELDs(insn, 19, 31);
2892 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2893 } else {
2894 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2896 if (xop & 0x10) {
2897 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2898 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2899 dc->cc_op = CC_OP_LOGIC;
2901 break;
2902 case 0x7: /* xorn */
2903 if (IS_IMM) {
2904 simm = GET_FIELDs(insn, 19, 31);
2905 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2906 } else {
2907 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2908 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2910 if (xop & 0x10) {
2911 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2912 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2913 dc->cc_op = CC_OP_LOGIC;
2915 break;
2916 case 0x8: /* addx, V9 addc */
2917 if (IS_IMM) {
2918 simm = GET_FIELDs(insn, 19, 31);
2919 if (xop & 0x10) {
2920 gen_helper_compute_psr();
2921 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2922 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2923 dc->cc_op = CC_OP_ADDX;
2924 } else {
2925 gen_helper_compute_psr();
2926 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2927 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2928 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2930 } else {
2931 if (xop & 0x10) {
2932 gen_helper_compute_psr();
2933 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2934 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2935 dc->cc_op = CC_OP_ADDX;
2936 } else {
2937 gen_helper_compute_psr();
2938 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2939 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2940 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2943 break;
2944 #ifdef TARGET_SPARC64
2945 case 0x9: /* V9 mulx */
2946 if (IS_IMM) {
2947 simm = GET_FIELDs(insn, 19, 31);
2948 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2949 } else {
2950 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2952 break;
2953 #endif
2954 case 0xa: /* umul */
2955 CHECK_IU_FEATURE(dc, MUL);
2956 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2957 if (xop & 0x10) {
2958 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2959 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2960 dc->cc_op = CC_OP_LOGIC;
2962 break;
2963 case 0xb: /* smul */
2964 CHECK_IU_FEATURE(dc, MUL);
2965 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2966 if (xop & 0x10) {
2967 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2968 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2969 dc->cc_op = CC_OP_LOGIC;
2971 break;
2972 case 0xc: /* subx, V9 subc */
2973 if (IS_IMM) {
2974 simm = GET_FIELDs(insn, 19, 31);
2975 if (xop & 0x10) {
2976 gen_helper_compute_psr();
2977 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
2978 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
2979 dc->cc_op = CC_OP_SUBX;
2980 } else {
2981 gen_helper_compute_psr();
2982 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2983 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2984 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
2986 } else {
2987 if (xop & 0x10) {
2988 gen_helper_compute_psr();
2989 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
2990 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
2991 dc->cc_op = CC_OP_SUBX;
2992 } else {
2993 gen_helper_compute_psr();
2994 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2995 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2996 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
2999 break;
3000 #ifdef TARGET_SPARC64
3001 case 0xd: /* V9 udivx */
3002 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3003 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3004 gen_trap_ifdivzero_tl(cpu_cc_src2);
3005 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3006 break;
3007 #endif
3008 case 0xe: /* udiv */
3009 CHECK_IU_FEATURE(dc, DIV);
3010 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3011 if (xop & 0x10) {
3012 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3013 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3014 dc->cc_op = CC_OP_DIV;
3016 break;
3017 case 0xf: /* sdiv */
3018 CHECK_IU_FEATURE(dc, DIV);
3019 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3020 if (xop & 0x10) {
3021 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3022 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3023 dc->cc_op = CC_OP_DIV;
3025 break;
3026 default:
3027 goto illegal_insn;
3029 gen_movl_TN_reg(rd, cpu_dst);
3030 } else {
3031 cpu_src1 = get_src1(insn, cpu_src1);
3032 cpu_src2 = get_src2(insn, cpu_src2);
3033 switch (xop) {
3034 case 0x20: /* taddcc */
3035 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3036 gen_movl_TN_reg(rd, cpu_dst);
3037 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3038 dc->cc_op = CC_OP_TADD;
3039 break;
3040 case 0x21: /* tsubcc */
3041 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3042 gen_movl_TN_reg(rd, cpu_dst);
3043 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3044 dc->cc_op = CC_OP_TSUB;
3045 break;
3046 case 0x22: /* taddcctv */
3047 save_state(dc, cpu_cond);
3048 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3049 gen_movl_TN_reg(rd, cpu_dst);
3050 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3051 dc->cc_op = CC_OP_TADDTV;
3052 break;
3053 case 0x23: /* tsubcctv */
3054 save_state(dc, cpu_cond);
3055 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3056 gen_movl_TN_reg(rd, cpu_dst);
3057 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3058 dc->cc_op = CC_OP_TSUBTV;
3059 break;
3060 case 0x24: /* mulscc */
3061 gen_helper_compute_psr();
3062 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3063 gen_movl_TN_reg(rd, cpu_dst);
3064 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3065 dc->cc_op = CC_OP_ADD;
3066 break;
3067 #ifndef TARGET_SPARC64
3068 case 0x25: /* sll */
3069 if (IS_IMM) { /* immediate */
3070 simm = GET_FIELDs(insn, 20, 31);
3071 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3072 } else { /* register */
3073 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3074 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3076 gen_movl_TN_reg(rd, cpu_dst);
3077 break;
3078 case 0x26: /* srl */
3079 if (IS_IMM) { /* immediate */
3080 simm = GET_FIELDs(insn, 20, 31);
3081 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3082 } else { /* register */
3083 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3084 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3086 gen_movl_TN_reg(rd, cpu_dst);
3087 break;
3088 case 0x27: /* sra */
3089 if (IS_IMM) { /* immediate */
3090 simm = GET_FIELDs(insn, 20, 31);
3091 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3092 } else { /* register */
3093 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3094 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3096 gen_movl_TN_reg(rd, cpu_dst);
3097 break;
3098 #endif
3099 case 0x30:
3101 switch(rd) {
3102 case 0: /* wry */
3103 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3104 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3105 break;
3106 #ifndef TARGET_SPARC64
3107 case 0x01 ... 0x0f: /* undefined in the
3108 SPARCv8 manual, nop
3109 on the microSPARC
3110 II */
3111 case 0x10 ... 0x1f: /* implementation-dependent
3112 in the SPARCv8
3113 manual, nop on the
3114 microSPARC II */
3115 break;
3116 #else
3117 case 0x2: /* V9 wrccr */
3118 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3119 gen_helper_wrccr(cpu_dst);
3120 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3121 dc->cc_op = CC_OP_FLAGS;
3122 break;
3123 case 0x3: /* V9 wrasi */
3124 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3125 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3126 break;
3127 case 0x6: /* V9 wrfprs */
3128 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3129 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3130 save_state(dc, cpu_cond);
3131 gen_op_next_insn();
3132 tcg_gen_exit_tb(0);
3133 dc->is_br = 1;
3134 break;
3135 case 0xf: /* V9 sir, nop if user */
3136 #if !defined(CONFIG_USER_ONLY)
3137 if (supervisor(dc))
3138 ; // XXX
3139 #endif
3140 break;
3141 case 0x13: /* Graphics Status */
3142 if (gen_trap_ifnofpu(dc, cpu_cond))
3143 goto jmp_insn;
3144 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3145 break;
3146 case 0x14: /* Softint set */
3147 if (!supervisor(dc))
3148 goto illegal_insn;
3149 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3150 gen_helper_set_softint(cpu_tmp64);
3151 break;
3152 case 0x15: /* Softint clear */
3153 if (!supervisor(dc))
3154 goto illegal_insn;
3155 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3156 gen_helper_clear_softint(cpu_tmp64);
3157 break;
3158 case 0x16: /* Softint write */
3159 if (!supervisor(dc))
3160 goto illegal_insn;
3161 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3162 gen_helper_write_softint(cpu_tmp64);
3163 break;
3164 case 0x17: /* Tick compare */
3165 #if !defined(CONFIG_USER_ONLY)
3166 if (!supervisor(dc))
3167 goto illegal_insn;
3168 #endif
3170 TCGv_ptr r_tickptr;
3172 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3173 cpu_src2);
3174 r_tickptr = tcg_temp_new_ptr();
3175 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3176 offsetof(CPUState, tick));
3177 gen_helper_tick_set_limit(r_tickptr,
3178 cpu_tick_cmpr);
3179 tcg_temp_free_ptr(r_tickptr);
3181 break;
3182 case 0x18: /* System tick */
3183 #if !defined(CONFIG_USER_ONLY)
3184 if (!supervisor(dc))
3185 goto illegal_insn;
3186 #endif
3188 TCGv_ptr r_tickptr;
3190 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3191 cpu_src2);
3192 r_tickptr = tcg_temp_new_ptr();
3193 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3194 offsetof(CPUState, stick));
3195 gen_helper_tick_set_count(r_tickptr,
3196 cpu_dst);
3197 tcg_temp_free_ptr(r_tickptr);
3199 break;
3200 case 0x19: /* System tick compare */
3201 #if !defined(CONFIG_USER_ONLY)
3202 if (!supervisor(dc))
3203 goto illegal_insn;
3204 #endif
3206 TCGv_ptr r_tickptr;
3208 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3209 cpu_src2);
3210 r_tickptr = tcg_temp_new_ptr();
3211 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3212 offsetof(CPUState, stick));
3213 gen_helper_tick_set_limit(r_tickptr,
3214 cpu_stick_cmpr);
3215 tcg_temp_free_ptr(r_tickptr);
3217 break;
3219 case 0x10: /* Performance Control */
3220 case 0x11: /* Performance Instrumentation
3221 Counter */
3222 case 0x12: /* Dispatch Control */
3223 #endif
3224 default:
3225 goto illegal_insn;
3228 break;
3229 #if !defined(CONFIG_USER_ONLY)
3230 case 0x31: /* wrpsr, V9 saved, restored */
3232 if (!supervisor(dc))
3233 goto priv_insn;
3234 #ifdef TARGET_SPARC64
3235 switch (rd) {
3236 case 0:
3237 gen_helper_saved();
3238 break;
3239 case 1:
3240 gen_helper_restored();
3241 break;
3242 case 2: /* UA2005 allclean */
3243 case 3: /* UA2005 otherw */
3244 case 4: /* UA2005 normalw */
3245 case 5: /* UA2005 invalw */
3246 // XXX
3247 default:
3248 goto illegal_insn;
3250 #else
3251 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3252 gen_helper_wrpsr(cpu_dst);
3253 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3254 dc->cc_op = CC_OP_FLAGS;
3255 save_state(dc, cpu_cond);
3256 gen_op_next_insn();
3257 tcg_gen_exit_tb(0);
3258 dc->is_br = 1;
3259 #endif
3261 break;
3262 case 0x32: /* wrwim, V9 wrpr */
3264 if (!supervisor(dc))
3265 goto priv_insn;
3266 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3267 #ifdef TARGET_SPARC64
3268 switch (rd) {
3269 case 0: // tpc
3271 TCGv_ptr r_tsptr;
3273 r_tsptr = tcg_temp_new_ptr();
3274 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3275 offsetof(CPUState, tsptr));
3276 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3277 offsetof(trap_state, tpc));
3278 tcg_temp_free_ptr(r_tsptr);
3280 break;
3281 case 1: // tnpc
3283 TCGv_ptr r_tsptr;
3285 r_tsptr = tcg_temp_new_ptr();
3286 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3287 offsetof(CPUState, tsptr));
3288 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3289 offsetof(trap_state, tnpc));
3290 tcg_temp_free_ptr(r_tsptr);
3292 break;
3293 case 2: // tstate
3295 TCGv_ptr r_tsptr;
3297 r_tsptr = tcg_temp_new_ptr();
3298 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3299 offsetof(CPUState, tsptr));
3300 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3301 offsetof(trap_state,
3302 tstate));
3303 tcg_temp_free_ptr(r_tsptr);
3305 break;
3306 case 3: // tt
3308 TCGv_ptr r_tsptr;
3310 r_tsptr = tcg_temp_new_ptr();
3311 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3312 offsetof(CPUState, tsptr));
3313 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3314 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3315 offsetof(trap_state, tt));
3316 tcg_temp_free_ptr(r_tsptr);
3318 break;
3319 case 4: // tick
3321 TCGv_ptr r_tickptr;
3323 r_tickptr = tcg_temp_new_ptr();
3324 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3325 offsetof(CPUState, tick));
3326 gen_helper_tick_set_count(r_tickptr,
3327 cpu_tmp0);
3328 tcg_temp_free_ptr(r_tickptr);
3330 break;
3331 case 5: // tba
3332 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3333 break;
3334 case 6: // pstate
3335 save_state(dc, cpu_cond);
3336 gen_helper_wrpstate(cpu_tmp0);
3337 gen_op_next_insn();
3338 tcg_gen_exit_tb(0);
3339 dc->is_br = 1;
3340 break;
3341 case 7: // tl
3342 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3343 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3344 offsetof(CPUSPARCState, tl));
3345 break;
3346 case 8: // pil
3347 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3348 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3349 offsetof(CPUSPARCState,
3350 psrpil));
3351 break;
3352 case 9: // cwp
3353 gen_helper_wrcwp(cpu_tmp0);
3354 break;
3355 case 10: // cansave
3356 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3357 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3358 offsetof(CPUSPARCState,
3359 cansave));
3360 break;
3361 case 11: // canrestore
3362 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3363 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3364 offsetof(CPUSPARCState,
3365 canrestore));
3366 break;
3367 case 12: // cleanwin
3368 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3369 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3370 offsetof(CPUSPARCState,
3371 cleanwin));
3372 break;
3373 case 13: // otherwin
3374 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3375 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3376 offsetof(CPUSPARCState,
3377 otherwin));
3378 break;
3379 case 14: // wstate
3380 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3381 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3382 offsetof(CPUSPARCState,
3383 wstate));
3384 break;
3385 case 16: // UA2005 gl
3386 CHECK_IU_FEATURE(dc, GL);
3387 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3388 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3389 offsetof(CPUSPARCState, gl));
3390 break;
3391 case 26: // UA2005 strand status
3392 CHECK_IU_FEATURE(dc, HYPV);
3393 if (!hypervisor(dc))
3394 goto priv_insn;
3395 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3396 break;
3397 default:
3398 goto illegal_insn;
3400 #else
3401 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3402 if (dc->def->nwindows != 32)
3403 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3404 (1 << dc->def->nwindows) - 1);
3405 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3406 #endif
3408 break;
3409 case 0x33: /* wrtbr, UA2005 wrhpr */
3411 #ifndef TARGET_SPARC64
3412 if (!supervisor(dc))
3413 goto priv_insn;
3414 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3415 #else
3416 CHECK_IU_FEATURE(dc, HYPV);
3417 if (!hypervisor(dc))
3418 goto priv_insn;
3419 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3420 switch (rd) {
3421 case 0: // hpstate
3422 // XXX gen_op_wrhpstate();
3423 save_state(dc, cpu_cond);
3424 gen_op_next_insn();
3425 tcg_gen_exit_tb(0);
3426 dc->is_br = 1;
3427 break;
3428 case 1: // htstate
3429 // XXX gen_op_wrhtstate();
3430 break;
3431 case 3: // hintp
3432 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3433 break;
3434 case 5: // htba
3435 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3436 break;
3437 case 31: // hstick_cmpr
3439 TCGv_ptr r_tickptr;
3441 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3442 r_tickptr = tcg_temp_new_ptr();
3443 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3444 offsetof(CPUState, hstick));
3445 gen_helper_tick_set_limit(r_tickptr,
3446 cpu_hstick_cmpr);
3447 tcg_temp_free_ptr(r_tickptr);
3449 break;
3450 case 6: // hver readonly
3451 default:
3452 goto illegal_insn;
3454 #endif
3456 break;
3457 #endif
3458 #ifdef TARGET_SPARC64
3459 case 0x2c: /* V9 movcc */
3461 int cc = GET_FIELD_SP(insn, 11, 12);
3462 int cond = GET_FIELD_SP(insn, 14, 17);
3463 TCGv r_cond;
3464 int l1;
3466 r_cond = tcg_temp_new();
3467 if (insn & (1 << 18)) {
3468 if (cc == 0)
3469 gen_cond(r_cond, 0, cond, dc);
3470 else if (cc == 2)
3471 gen_cond(r_cond, 1, cond, dc);
3472 else
3473 goto illegal_insn;
3474 } else {
3475 gen_fcond(r_cond, cc, cond);
3478 l1 = gen_new_label();
3480 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3481 if (IS_IMM) { /* immediate */
3482 TCGv r_const;
3484 simm = GET_FIELD_SPs(insn, 0, 10);
3485 r_const = tcg_const_tl(simm);
3486 gen_movl_TN_reg(rd, r_const);
3487 tcg_temp_free(r_const);
3488 } else {
3489 rs2 = GET_FIELD_SP(insn, 0, 4);
3490 gen_movl_reg_TN(rs2, cpu_tmp0);
3491 gen_movl_TN_reg(rd, cpu_tmp0);
3493 gen_set_label(l1);
3494 tcg_temp_free(r_cond);
3495 break;
3497 case 0x2d: /* V9 sdivx */
3498 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3499 gen_movl_TN_reg(rd, cpu_dst);
3500 break;
3501 case 0x2e: /* V9 popc */
3503 cpu_src2 = get_src2(insn, cpu_src2);
3504 gen_helper_popc(cpu_dst, cpu_src2);
3505 gen_movl_TN_reg(rd, cpu_dst);
3507 case 0x2f: /* V9 movr */
3509 int cond = GET_FIELD_SP(insn, 10, 12);
3510 int l1;
3512 cpu_src1 = get_src1(insn, cpu_src1);
3514 l1 = gen_new_label();
3516 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3517 cpu_src1, 0, l1);
3518 if (IS_IMM) { /* immediate */
3519 TCGv r_const;
3521 simm = GET_FIELD_SPs(insn, 0, 9);
3522 r_const = tcg_const_tl(simm);
3523 gen_movl_TN_reg(rd, r_const);
3524 tcg_temp_free(r_const);
3525 } else {
3526 rs2 = GET_FIELD_SP(insn, 0, 4);
3527 gen_movl_reg_TN(rs2, cpu_tmp0);
3528 gen_movl_TN_reg(rd, cpu_tmp0);
3530 gen_set_label(l1);
3531 break;
3533 #endif
3534 default:
3535 goto illegal_insn;
3538 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3539 #ifdef TARGET_SPARC64
3540 int opf = GET_FIELD_SP(insn, 5, 13);
3541 rs1 = GET_FIELD(insn, 13, 17);
3542 rs2 = GET_FIELD(insn, 27, 31);
3543 if (gen_trap_ifnofpu(dc, cpu_cond))
3544 goto jmp_insn;
3546 switch (opf) {
3547 case 0x000: /* VIS I edge8cc */
3548 case 0x001: /* VIS II edge8n */
3549 case 0x002: /* VIS I edge8lcc */
3550 case 0x003: /* VIS II edge8ln */
3551 case 0x004: /* VIS I edge16cc */
3552 case 0x005: /* VIS II edge16n */
3553 case 0x006: /* VIS I edge16lcc */
3554 case 0x007: /* VIS II edge16ln */
3555 case 0x008: /* VIS I edge32cc */
3556 case 0x009: /* VIS II edge32n */
3557 case 0x00a: /* VIS I edge32lcc */
3558 case 0x00b: /* VIS II edge32ln */
3559 // XXX
3560 goto illegal_insn;
3561 case 0x010: /* VIS I array8 */
3562 CHECK_FPU_FEATURE(dc, VIS1);
3563 cpu_src1 = get_src1(insn, cpu_src1);
3564 gen_movl_reg_TN(rs2, cpu_src2);
3565 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3566 gen_movl_TN_reg(rd, cpu_dst);
3567 break;
3568 case 0x012: /* VIS I array16 */
3569 CHECK_FPU_FEATURE(dc, VIS1);
3570 cpu_src1 = get_src1(insn, cpu_src1);
3571 gen_movl_reg_TN(rs2, cpu_src2);
3572 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3573 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3574 gen_movl_TN_reg(rd, cpu_dst);
3575 break;
3576 case 0x014: /* VIS I array32 */
3577 CHECK_FPU_FEATURE(dc, VIS1);
3578 cpu_src1 = get_src1(insn, cpu_src1);
3579 gen_movl_reg_TN(rs2, cpu_src2);
3580 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3581 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3582 gen_movl_TN_reg(rd, cpu_dst);
3583 break;
3584 case 0x018: /* VIS I alignaddr */
3585 CHECK_FPU_FEATURE(dc, VIS1);
3586 cpu_src1 = get_src1(insn, cpu_src1);
3587 gen_movl_reg_TN(rs2, cpu_src2);
3588 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3589 gen_movl_TN_reg(rd, cpu_dst);
3590 break;
3591 case 0x019: /* VIS II bmask */
3592 case 0x01a: /* VIS I alignaddrl */
3593 // XXX
3594 goto illegal_insn;
3595 case 0x020: /* VIS I fcmple16 */
3596 CHECK_FPU_FEATURE(dc, VIS1);
3597 gen_op_load_fpr_DT0(DFPREG(rs1));
3598 gen_op_load_fpr_DT1(DFPREG(rs2));
3599 gen_helper_fcmple16();
3600 gen_op_store_DT0_fpr(DFPREG(rd));
3601 break;
3602 case 0x022: /* VIS I fcmpne16 */
3603 CHECK_FPU_FEATURE(dc, VIS1);
3604 gen_op_load_fpr_DT0(DFPREG(rs1));
3605 gen_op_load_fpr_DT1(DFPREG(rs2));
3606 gen_helper_fcmpne16();
3607 gen_op_store_DT0_fpr(DFPREG(rd));
3608 break;
3609 case 0x024: /* VIS I fcmple32 */
3610 CHECK_FPU_FEATURE(dc, VIS1);
3611 gen_op_load_fpr_DT0(DFPREG(rs1));
3612 gen_op_load_fpr_DT1(DFPREG(rs2));
3613 gen_helper_fcmple32();
3614 gen_op_store_DT0_fpr(DFPREG(rd));
3615 break;
3616 case 0x026: /* VIS I fcmpne32 */
3617 CHECK_FPU_FEATURE(dc, VIS1);
3618 gen_op_load_fpr_DT0(DFPREG(rs1));
3619 gen_op_load_fpr_DT1(DFPREG(rs2));
3620 gen_helper_fcmpne32();
3621 gen_op_store_DT0_fpr(DFPREG(rd));
3622 break;
3623 case 0x028: /* VIS I fcmpgt16 */
3624 CHECK_FPU_FEATURE(dc, VIS1);
3625 gen_op_load_fpr_DT0(DFPREG(rs1));
3626 gen_op_load_fpr_DT1(DFPREG(rs2));
3627 gen_helper_fcmpgt16();
3628 gen_op_store_DT0_fpr(DFPREG(rd));
3629 break;
3630 case 0x02a: /* VIS I fcmpeq16 */
3631 CHECK_FPU_FEATURE(dc, VIS1);
3632 gen_op_load_fpr_DT0(DFPREG(rs1));
3633 gen_op_load_fpr_DT1(DFPREG(rs2));
3634 gen_helper_fcmpeq16();
3635 gen_op_store_DT0_fpr(DFPREG(rd));
3636 break;
3637 case 0x02c: /* VIS I fcmpgt32 */
3638 CHECK_FPU_FEATURE(dc, VIS1);
3639 gen_op_load_fpr_DT0(DFPREG(rs1));
3640 gen_op_load_fpr_DT1(DFPREG(rs2));
3641 gen_helper_fcmpgt32();
3642 gen_op_store_DT0_fpr(DFPREG(rd));
3643 break;
3644 case 0x02e: /* VIS I fcmpeq32 */
3645 CHECK_FPU_FEATURE(dc, VIS1);
3646 gen_op_load_fpr_DT0(DFPREG(rs1));
3647 gen_op_load_fpr_DT1(DFPREG(rs2));
3648 gen_helper_fcmpeq32();
3649 gen_op_store_DT0_fpr(DFPREG(rd));
3650 break;
3651 case 0x031: /* VIS I fmul8x16 */
3652 CHECK_FPU_FEATURE(dc, VIS1);
3653 gen_op_load_fpr_DT0(DFPREG(rs1));
3654 gen_op_load_fpr_DT1(DFPREG(rs2));
3655 gen_helper_fmul8x16();
3656 gen_op_store_DT0_fpr(DFPREG(rd));
3657 break;
3658 case 0x033: /* VIS I fmul8x16au */
3659 CHECK_FPU_FEATURE(dc, VIS1);
3660 gen_op_load_fpr_DT0(DFPREG(rs1));
3661 gen_op_load_fpr_DT1(DFPREG(rs2));
3662 gen_helper_fmul8x16au();
3663 gen_op_store_DT0_fpr(DFPREG(rd));
3664 break;
3665 case 0x035: /* VIS I fmul8x16al */
3666 CHECK_FPU_FEATURE(dc, VIS1);
3667 gen_op_load_fpr_DT0(DFPREG(rs1));
3668 gen_op_load_fpr_DT1(DFPREG(rs2));
3669 gen_helper_fmul8x16al();
3670 gen_op_store_DT0_fpr(DFPREG(rd));
3671 break;
3672 case 0x036: /* VIS I fmul8sux16 */
3673 CHECK_FPU_FEATURE(dc, VIS1);
3674 gen_op_load_fpr_DT0(DFPREG(rs1));
3675 gen_op_load_fpr_DT1(DFPREG(rs2));
3676 gen_helper_fmul8sux16();
3677 gen_op_store_DT0_fpr(DFPREG(rd));
3678 break;
3679 case 0x037: /* VIS I fmul8ulx16 */
3680 CHECK_FPU_FEATURE(dc, VIS1);
3681 gen_op_load_fpr_DT0(DFPREG(rs1));
3682 gen_op_load_fpr_DT1(DFPREG(rs2));
3683 gen_helper_fmul8ulx16();
3684 gen_op_store_DT0_fpr(DFPREG(rd));
3685 break;
3686 case 0x038: /* VIS I fmuld8sux16 */
3687 CHECK_FPU_FEATURE(dc, VIS1);
3688 gen_op_load_fpr_DT0(DFPREG(rs1));
3689 gen_op_load_fpr_DT1(DFPREG(rs2));
3690 gen_helper_fmuld8sux16();
3691 gen_op_store_DT0_fpr(DFPREG(rd));
3692 break;
3693 case 0x039: /* VIS I fmuld8ulx16 */
3694 CHECK_FPU_FEATURE(dc, VIS1);
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 gen_helper_fmuld8ulx16();
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3699 break;
3700 case 0x03a: /* VIS I fpack32 */
3701 case 0x03b: /* VIS I fpack16 */
3702 case 0x03d: /* VIS I fpackfix */
3703 case 0x03e: /* VIS I pdist */
3704 // XXX
3705 goto illegal_insn;
3706 case 0x048: /* VIS I faligndata */
3707 CHECK_FPU_FEATURE(dc, VIS1);
3708 gen_op_load_fpr_DT0(DFPREG(rs1));
3709 gen_op_load_fpr_DT1(DFPREG(rs2));
3710 gen_helper_faligndata();
3711 gen_op_store_DT0_fpr(DFPREG(rd));
3712 break;
3713 case 0x04b: /* VIS I fpmerge */
3714 CHECK_FPU_FEATURE(dc, VIS1);
3715 gen_op_load_fpr_DT0(DFPREG(rs1));
3716 gen_op_load_fpr_DT1(DFPREG(rs2));
3717 gen_helper_fpmerge();
3718 gen_op_store_DT0_fpr(DFPREG(rd));
3719 break;
3720 case 0x04c: /* VIS II bshuffle */
3721 // XXX
3722 goto illegal_insn;
3723 case 0x04d: /* VIS I fexpand */
3724 CHECK_FPU_FEATURE(dc, VIS1);
3725 gen_op_load_fpr_DT0(DFPREG(rs1));
3726 gen_op_load_fpr_DT1(DFPREG(rs2));
3727 gen_helper_fexpand();
3728 gen_op_store_DT0_fpr(DFPREG(rd));
3729 break;
3730 case 0x050: /* VIS I fpadd16 */
3731 CHECK_FPU_FEATURE(dc, VIS1);
3732 gen_op_load_fpr_DT0(DFPREG(rs1));
3733 gen_op_load_fpr_DT1(DFPREG(rs2));
3734 gen_helper_fpadd16();
3735 gen_op_store_DT0_fpr(DFPREG(rd));
3736 break;
3737 case 0x051: /* VIS I fpadd16s */
3738 CHECK_FPU_FEATURE(dc, VIS1);
3739 gen_helper_fpadd16s(cpu_fpr[rd],
3740 cpu_fpr[rs1], cpu_fpr[rs2]);
3741 break;
3742 case 0x052: /* VIS I fpadd32 */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 gen_op_load_fpr_DT0(DFPREG(rs1));
3745 gen_op_load_fpr_DT1(DFPREG(rs2));
3746 gen_helper_fpadd32();
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3748 break;
3749 case 0x053: /* VIS I fpadd32s */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 gen_helper_fpadd32s(cpu_fpr[rd],
3752 cpu_fpr[rs1], cpu_fpr[rs2]);
3753 break;
3754 case 0x054: /* VIS I fpsub16 */
3755 CHECK_FPU_FEATURE(dc, VIS1);
3756 gen_op_load_fpr_DT0(DFPREG(rs1));
3757 gen_op_load_fpr_DT1(DFPREG(rs2));
3758 gen_helper_fpsub16();
3759 gen_op_store_DT0_fpr(DFPREG(rd));
3760 break;
3761 case 0x055: /* VIS I fpsub16s */
3762 CHECK_FPU_FEATURE(dc, VIS1);
3763 gen_helper_fpsub16s(cpu_fpr[rd],
3764 cpu_fpr[rs1], cpu_fpr[rs2]);
3765 break;
3766 case 0x056: /* VIS I fpsub32 */
3767 CHECK_FPU_FEATURE(dc, VIS1);
3768 gen_op_load_fpr_DT0(DFPREG(rs1));
3769 gen_op_load_fpr_DT1(DFPREG(rs2));
3770 gen_helper_fpsub32();
3771 gen_op_store_DT0_fpr(DFPREG(rd));
3772 break;
3773 case 0x057: /* VIS I fpsub32s */
3774 CHECK_FPU_FEATURE(dc, VIS1);
3775 gen_helper_fpsub32s(cpu_fpr[rd],
3776 cpu_fpr[rs1], cpu_fpr[rs2]);
3777 break;
3778 case 0x060: /* VIS I fzero */
3779 CHECK_FPU_FEATURE(dc, VIS1);
3780 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3781 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3782 break;
3783 case 0x061: /* VIS I fzeros */
3784 CHECK_FPU_FEATURE(dc, VIS1);
3785 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3786 break;
3787 case 0x062: /* VIS I fnor */
3788 CHECK_FPU_FEATURE(dc, VIS1);
3789 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3790 cpu_fpr[DFPREG(rs2)]);
3791 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3792 cpu_fpr[DFPREG(rs2) + 1]);
3793 break;
3794 case 0x063: /* VIS I fnors */
3795 CHECK_FPU_FEATURE(dc, VIS1);
3796 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3797 break;
3798 case 0x064: /* VIS I fandnot2 */
3799 CHECK_FPU_FEATURE(dc, VIS1);
3800 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3801 cpu_fpr[DFPREG(rs2)]);
3802 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3803 cpu_fpr[DFPREG(rs1) + 1],
3804 cpu_fpr[DFPREG(rs2) + 1]);
3805 break;
3806 case 0x065: /* VIS I fandnot2s */
3807 CHECK_FPU_FEATURE(dc, VIS1);
3808 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3809 break;
3810 case 0x066: /* VIS I fnot2 */
3811 CHECK_FPU_FEATURE(dc, VIS1);
3812 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3813 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3814 cpu_fpr[DFPREG(rs2) + 1]);
3815 break;
3816 case 0x067: /* VIS I fnot2s */
3817 CHECK_FPU_FEATURE(dc, VIS1);
3818 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3819 break;
3820 case 0x068: /* VIS I fandnot1 */
3821 CHECK_FPU_FEATURE(dc, VIS1);
3822 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3823 cpu_fpr[DFPREG(rs1)]);
3824 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3825 cpu_fpr[DFPREG(rs2) + 1],
3826 cpu_fpr[DFPREG(rs1) + 1]);
3827 break;
3828 case 0x069: /* VIS I fandnot1s */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3831 break;
3832 case 0x06a: /* VIS I fnot1 */
3833 CHECK_FPU_FEATURE(dc, VIS1);
3834 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3835 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3836 cpu_fpr[DFPREG(rs1) + 1]);
3837 break;
3838 case 0x06b: /* VIS I fnot1s */
3839 CHECK_FPU_FEATURE(dc, VIS1);
3840 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3841 break;
3842 case 0x06c: /* VIS I fxor */
3843 CHECK_FPU_FEATURE(dc, VIS1);
3844 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3845 cpu_fpr[DFPREG(rs2)]);
3846 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3847 cpu_fpr[DFPREG(rs1) + 1],
3848 cpu_fpr[DFPREG(rs2) + 1]);
3849 break;
3850 case 0x06d: /* VIS I fxors */
3851 CHECK_FPU_FEATURE(dc, VIS1);
3852 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3853 break;
3854 case 0x06e: /* VIS I fnand */
3855 CHECK_FPU_FEATURE(dc, VIS1);
3856 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3857 cpu_fpr[DFPREG(rs2)]);
3858 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3859 cpu_fpr[DFPREG(rs2) + 1]);
3860 break;
3861 case 0x06f: /* VIS I fnands */
3862 CHECK_FPU_FEATURE(dc, VIS1);
3863 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3864 break;
3865 case 0x070: /* VIS I fand */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3868 cpu_fpr[DFPREG(rs2)]);
3869 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3870 cpu_fpr[DFPREG(rs1) + 1],
3871 cpu_fpr[DFPREG(rs2) + 1]);
3872 break;
3873 case 0x071: /* VIS I fands */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3876 break;
3877 case 0x072: /* VIS I fxnor */
3878 CHECK_FPU_FEATURE(dc, VIS1);
3879 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3880 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3881 cpu_fpr[DFPREG(rs1)]);
3882 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3883 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3884 cpu_fpr[DFPREG(rs1) + 1]);
3885 break;
3886 case 0x073: /* VIS I fxnors */
3887 CHECK_FPU_FEATURE(dc, VIS1);
3888 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3889 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3890 break;
3891 case 0x074: /* VIS I fsrc1 */
3892 CHECK_FPU_FEATURE(dc, VIS1);
3893 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3894 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3895 cpu_fpr[DFPREG(rs1) + 1]);
3896 break;
3897 case 0x075: /* VIS I fsrc1s */
3898 CHECK_FPU_FEATURE(dc, VIS1);
3899 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3900 break;
3901 case 0x076: /* VIS I fornot2 */
3902 CHECK_FPU_FEATURE(dc, VIS1);
3903 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3904 cpu_fpr[DFPREG(rs2)]);
3905 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3906 cpu_fpr[DFPREG(rs1) + 1],
3907 cpu_fpr[DFPREG(rs2) + 1]);
3908 break;
3909 case 0x077: /* VIS I fornot2s */
3910 CHECK_FPU_FEATURE(dc, VIS1);
3911 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3912 break;
3913 case 0x078: /* VIS I fsrc2 */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 gen_op_load_fpr_DT0(DFPREG(rs2));
3916 gen_op_store_DT0_fpr(DFPREG(rd));
3917 break;
3918 case 0x079: /* VIS I fsrc2s */
3919 CHECK_FPU_FEATURE(dc, VIS1);
3920 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3921 break;
3922 case 0x07a: /* VIS I fornot1 */
3923 CHECK_FPU_FEATURE(dc, VIS1);
3924 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3925 cpu_fpr[DFPREG(rs1)]);
3926 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3927 cpu_fpr[DFPREG(rs2) + 1],
3928 cpu_fpr[DFPREG(rs1) + 1]);
3929 break;
3930 case 0x07b: /* VIS I fornot1s */
3931 CHECK_FPU_FEATURE(dc, VIS1);
3932 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3933 break;
3934 case 0x07c: /* VIS I for */
3935 CHECK_FPU_FEATURE(dc, VIS1);
3936 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3937 cpu_fpr[DFPREG(rs2)]);
3938 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3939 cpu_fpr[DFPREG(rs1) + 1],
3940 cpu_fpr[DFPREG(rs2) + 1]);
3941 break;
3942 case 0x07d: /* VIS I fors */
3943 CHECK_FPU_FEATURE(dc, VIS1);
3944 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3945 break;
3946 case 0x07e: /* VIS I fone */
3947 CHECK_FPU_FEATURE(dc, VIS1);
3948 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3949 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3950 break;
3951 case 0x07f: /* VIS I fones */
3952 CHECK_FPU_FEATURE(dc, VIS1);
3953 tcg_gen_movi_i32(cpu_fpr[rd], -1);
3954 break;
3955 case 0x080: /* VIS I shutdown */
3956 case 0x081: /* VIS II siam */
3957 // XXX
3958 goto illegal_insn;
3959 default:
3960 goto illegal_insn;
3962 #else
3963 goto ncp_insn;
3964 #endif
3965 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3966 #ifdef TARGET_SPARC64
3967 goto illegal_insn;
3968 #else
3969 goto ncp_insn;
3970 #endif
3971 #ifdef TARGET_SPARC64
3972 } else if (xop == 0x39) { /* V9 return */
3973 TCGv_i32 r_const;
3975 save_state(dc, cpu_cond);
3976 cpu_src1 = get_src1(insn, cpu_src1);
3977 if (IS_IMM) { /* immediate */
3978 simm = GET_FIELDs(insn, 19, 31);
3979 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3980 } else { /* register */
3981 rs2 = GET_FIELD(insn, 27, 31);
3982 if (rs2) {
3983 gen_movl_reg_TN(rs2, cpu_src2);
3984 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3985 } else
3986 tcg_gen_mov_tl(cpu_dst, cpu_src1);
3988 gen_helper_restore();
3989 gen_mov_pc_npc(dc, cpu_cond);
3990 r_const = tcg_const_i32(3);
3991 gen_helper_check_align(cpu_dst, r_const);
3992 tcg_temp_free_i32(r_const);
3993 tcg_gen_mov_tl(cpu_npc, cpu_dst);
3994 dc->npc = DYNAMIC_PC;
3995 goto jmp_insn;
3996 #endif
3997 } else {
3998 cpu_src1 = get_src1(insn, cpu_src1);
3999 if (IS_IMM) { /* immediate */
4000 simm = GET_FIELDs(insn, 19, 31);
4001 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4002 } else { /* register */
4003 rs2 = GET_FIELD(insn, 27, 31);
4004 if (rs2) {
4005 gen_movl_reg_TN(rs2, cpu_src2);
4006 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4007 } else
4008 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4010 switch (xop) {
4011 case 0x38: /* jmpl */
4013 TCGv r_pc;
4014 TCGv_i32 r_const;
4016 r_pc = tcg_const_tl(dc->pc);
4017 gen_movl_TN_reg(rd, r_pc);
4018 tcg_temp_free(r_pc);
4019 gen_mov_pc_npc(dc, cpu_cond);
4020 r_const = tcg_const_i32(3);
4021 gen_helper_check_align(cpu_dst, r_const);
4022 tcg_temp_free_i32(r_const);
4023 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4024 dc->npc = DYNAMIC_PC;
4026 goto jmp_insn;
4027 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4028 case 0x39: /* rett, V9 return */
4030 TCGv_i32 r_const;
4032 if (!supervisor(dc))
4033 goto priv_insn;
4034 gen_mov_pc_npc(dc, cpu_cond);
4035 r_const = tcg_const_i32(3);
4036 gen_helper_check_align(cpu_dst, r_const);
4037 tcg_temp_free_i32(r_const);
4038 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4039 dc->npc = DYNAMIC_PC;
4040 gen_helper_rett();
4042 goto jmp_insn;
4043 #endif
4044 case 0x3b: /* flush */
4045 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4046 goto unimp_flush;
4047 gen_helper_flush(cpu_dst);
4048 break;
4049 case 0x3c: /* save */
4050 save_state(dc, cpu_cond);
4051 gen_helper_save();
4052 gen_movl_TN_reg(rd, cpu_dst);
4053 break;
4054 case 0x3d: /* restore */
4055 save_state(dc, cpu_cond);
4056 gen_helper_restore();
4057 gen_movl_TN_reg(rd, cpu_dst);
4058 break;
4059 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4060 case 0x3e: /* V9 done/retry */
4062 switch (rd) {
4063 case 0:
4064 if (!supervisor(dc))
4065 goto priv_insn;
4066 dc->npc = DYNAMIC_PC;
4067 dc->pc = DYNAMIC_PC;
4068 gen_helper_done();
4069 goto jmp_insn;
4070 case 1:
4071 if (!supervisor(dc))
4072 goto priv_insn;
4073 dc->npc = DYNAMIC_PC;
4074 dc->pc = DYNAMIC_PC;
4075 gen_helper_retry();
4076 goto jmp_insn;
4077 default:
4078 goto illegal_insn;
4081 break;
4082 #endif
4083 default:
4084 goto illegal_insn;
4087 break;
4089 break;
4090 case 3: /* load/store instructions */
4092 unsigned int xop = GET_FIELD(insn, 7, 12);
4094 cpu_src1 = get_src1(insn, cpu_src1);
4095 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4096 rs2 = GET_FIELD(insn, 27, 31);
4097 gen_movl_reg_TN(rs2, cpu_src2);
4098 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4099 } else if (IS_IMM) { /* immediate */
4100 simm = GET_FIELDs(insn, 19, 31);
4101 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4102 } else { /* register */
4103 rs2 = GET_FIELD(insn, 27, 31);
4104 if (rs2 != 0) {
4105 gen_movl_reg_TN(rs2, cpu_src2);
4106 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4107 } else
4108 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4110 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4111 (xop > 0x17 && xop <= 0x1d ) ||
4112 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4113 switch (xop) {
4114 case 0x0: /* ld, V9 lduw, load unsigned word */
4115 gen_address_mask(dc, cpu_addr);
4116 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4117 break;
4118 case 0x1: /* ldub, load unsigned byte */
4119 gen_address_mask(dc, cpu_addr);
4120 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4121 break;
4122 case 0x2: /* lduh, load unsigned halfword */
4123 gen_address_mask(dc, cpu_addr);
4124 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4125 break;
4126 case 0x3: /* ldd, load double word */
4127 if (rd & 1)
4128 goto illegal_insn;
4129 else {
4130 TCGv_i32 r_const;
4132 save_state(dc, cpu_cond);
4133 r_const = tcg_const_i32(7);
4134 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4135 tcg_temp_free_i32(r_const);
4136 gen_address_mask(dc, cpu_addr);
4137 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4138 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4139 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4140 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4141 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4142 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4143 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4145 break;
4146 case 0x9: /* ldsb, load signed byte */
4147 gen_address_mask(dc, cpu_addr);
4148 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4149 break;
4150 case 0xa: /* ldsh, load signed halfword */
4151 gen_address_mask(dc, cpu_addr);
4152 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4153 break;
4154 case 0xd: /* ldstub -- XXX: should be atomically */
4156 TCGv r_const;
4158 gen_address_mask(dc, cpu_addr);
4159 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4160 r_const = tcg_const_tl(0xff);
4161 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4162 tcg_temp_free(r_const);
4164 break;
4165 case 0x0f: /* swap, swap register with memory. Also
4166 atomically */
4167 CHECK_IU_FEATURE(dc, SWAP);
4168 gen_movl_reg_TN(rd, cpu_val);
4169 gen_address_mask(dc, cpu_addr);
4170 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4171 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4172 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4173 break;
4174 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4175 case 0x10: /* lda, V9 lduwa, load word alternate */
4176 #ifndef TARGET_SPARC64
4177 if (IS_IMM)
4178 goto illegal_insn;
4179 if (!supervisor(dc))
4180 goto priv_insn;
4181 #endif
4182 save_state(dc, cpu_cond);
4183 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4184 break;
4185 case 0x11: /* lduba, load unsigned byte alternate */
4186 #ifndef TARGET_SPARC64
4187 if (IS_IMM)
4188 goto illegal_insn;
4189 if (!supervisor(dc))
4190 goto priv_insn;
4191 #endif
4192 save_state(dc, cpu_cond);
4193 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4194 break;
4195 case 0x12: /* lduha, load unsigned halfword alternate */
4196 #ifndef TARGET_SPARC64
4197 if (IS_IMM)
4198 goto illegal_insn;
4199 if (!supervisor(dc))
4200 goto priv_insn;
4201 #endif
4202 save_state(dc, cpu_cond);
4203 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4204 break;
4205 case 0x13: /* ldda, load double word alternate */
4206 #ifndef TARGET_SPARC64
4207 if (IS_IMM)
4208 goto illegal_insn;
4209 if (!supervisor(dc))
4210 goto priv_insn;
4211 #endif
4212 if (rd & 1)
4213 goto illegal_insn;
4214 save_state(dc, cpu_cond);
4215 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4216 goto skip_move;
4217 case 0x19: /* ldsba, load signed byte alternate */
4218 #ifndef TARGET_SPARC64
4219 if (IS_IMM)
4220 goto illegal_insn;
4221 if (!supervisor(dc))
4222 goto priv_insn;
4223 #endif
4224 save_state(dc, cpu_cond);
4225 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4226 break;
4227 case 0x1a: /* ldsha, load signed halfword alternate */
4228 #ifndef TARGET_SPARC64
4229 if (IS_IMM)
4230 goto illegal_insn;
4231 if (!supervisor(dc))
4232 goto priv_insn;
4233 #endif
4234 save_state(dc, cpu_cond);
4235 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4236 break;
4237 case 0x1d: /* ldstuba -- XXX: should be atomically */
4238 #ifndef TARGET_SPARC64
4239 if (IS_IMM)
4240 goto illegal_insn;
4241 if (!supervisor(dc))
4242 goto priv_insn;
4243 #endif
4244 save_state(dc, cpu_cond);
4245 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4246 break;
4247 case 0x1f: /* swapa, swap reg with alt. memory. Also
4248 atomically */
4249 CHECK_IU_FEATURE(dc, SWAP);
4250 #ifndef TARGET_SPARC64
4251 if (IS_IMM)
4252 goto illegal_insn;
4253 if (!supervisor(dc))
4254 goto priv_insn;
4255 #endif
4256 save_state(dc, cpu_cond);
4257 gen_movl_reg_TN(rd, cpu_val);
4258 gen_swap_asi(cpu_val, cpu_addr, insn);
4259 break;
4261 #ifndef TARGET_SPARC64
4262 case 0x30: /* ldc */
4263 case 0x31: /* ldcsr */
4264 case 0x33: /* lddc */
4265 goto ncp_insn;
4266 #endif
4267 #endif
4268 #ifdef TARGET_SPARC64
4269 case 0x08: /* V9 ldsw */
4270 gen_address_mask(dc, cpu_addr);
4271 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4272 break;
4273 case 0x0b: /* V9 ldx */
4274 gen_address_mask(dc, cpu_addr);
4275 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4276 break;
4277 case 0x18: /* V9 ldswa */
4278 save_state(dc, cpu_cond);
4279 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4280 break;
4281 case 0x1b: /* V9 ldxa */
4282 save_state(dc, cpu_cond);
4283 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4284 break;
4285 case 0x2d: /* V9 prefetch, no effect */
4286 goto skip_move;
4287 case 0x30: /* V9 ldfa */
4288 save_state(dc, cpu_cond);
4289 gen_ldf_asi(cpu_addr, insn, 4, rd);
4290 goto skip_move;
4291 case 0x33: /* V9 lddfa */
4292 save_state(dc, cpu_cond);
4293 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4294 goto skip_move;
4295 case 0x3d: /* V9 prefetcha, no effect */
4296 goto skip_move;
4297 case 0x32: /* V9 ldqfa */
4298 CHECK_FPU_FEATURE(dc, FLOAT128);
4299 save_state(dc, cpu_cond);
4300 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4301 goto skip_move;
4302 #endif
4303 default:
4304 goto illegal_insn;
4306 gen_movl_TN_reg(rd, cpu_val);
4307 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4308 skip_move: ;
4309 #endif
4310 } else if (xop >= 0x20 && xop < 0x24) {
4311 if (gen_trap_ifnofpu(dc, cpu_cond))
4312 goto jmp_insn;
4313 save_state(dc, cpu_cond);
4314 switch (xop) {
4315 case 0x20: /* ldf, load fpreg */
4316 gen_address_mask(dc, cpu_addr);
4317 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4318 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4319 break;
4320 case 0x21: /* ldfsr, V9 ldxfsr */
4321 #ifdef TARGET_SPARC64
4322 gen_address_mask(dc, cpu_addr);
4323 if (rd == 1) {
4324 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4325 gen_helper_ldxfsr(cpu_tmp64);
4326 } else
4327 #else
4329 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4330 gen_helper_ldfsr(cpu_tmp32);
4332 #endif
4333 break;
4334 case 0x22: /* ldqf, load quad fpreg */
4336 TCGv_i32 r_const;
4338 CHECK_FPU_FEATURE(dc, FLOAT128);
4339 r_const = tcg_const_i32(dc->mem_idx);
4340 gen_helper_ldqf(cpu_addr, r_const);
4341 tcg_temp_free_i32(r_const);
4342 gen_op_store_QT0_fpr(QFPREG(rd));
4344 break;
4345 case 0x23: /* lddf, load double fpreg */
4347 TCGv_i32 r_const;
4349 r_const = tcg_const_i32(dc->mem_idx);
4350 gen_helper_lddf(cpu_addr, r_const);
4351 tcg_temp_free_i32(r_const);
4352 gen_op_store_DT0_fpr(DFPREG(rd));
4354 break;
4355 default:
4356 goto illegal_insn;
4358 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4359 xop == 0xe || xop == 0x1e) {
4360 gen_movl_reg_TN(rd, cpu_val);
4361 switch (xop) {
4362 case 0x4: /* st, store word */
4363 gen_address_mask(dc, cpu_addr);
4364 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4365 break;
4366 case 0x5: /* stb, store byte */
4367 gen_address_mask(dc, cpu_addr);
4368 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4369 break;
4370 case 0x6: /* sth, store halfword */
4371 gen_address_mask(dc, cpu_addr);
4372 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4373 break;
4374 case 0x7: /* std, store double word */
4375 if (rd & 1)
4376 goto illegal_insn;
4377 else {
4378 TCGv_i32 r_const;
4380 save_state(dc, cpu_cond);
4381 gen_address_mask(dc, cpu_addr);
4382 r_const = tcg_const_i32(7);
4383 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4384 tcg_temp_free_i32(r_const);
4385 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4386 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4387 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4389 break;
4390 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4391 case 0x14: /* sta, V9 stwa, store word alternate */
4392 #ifndef TARGET_SPARC64
4393 if (IS_IMM)
4394 goto illegal_insn;
4395 if (!supervisor(dc))
4396 goto priv_insn;
4397 #endif
4398 save_state(dc, cpu_cond);
4399 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4400 break;
4401 case 0x15: /* stba, store byte alternate */
4402 #ifndef TARGET_SPARC64
4403 if (IS_IMM)
4404 goto illegal_insn;
4405 if (!supervisor(dc))
4406 goto priv_insn;
4407 #endif
4408 save_state(dc, cpu_cond);
4409 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4410 break;
4411 case 0x16: /* stha, store halfword alternate */
4412 #ifndef TARGET_SPARC64
4413 if (IS_IMM)
4414 goto illegal_insn;
4415 if (!supervisor(dc))
4416 goto priv_insn;
4417 #endif
4418 save_state(dc, cpu_cond);
4419 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4420 break;
4421 case 0x17: /* stda, store double word alternate */
4422 #ifndef TARGET_SPARC64
4423 if (IS_IMM)
4424 goto illegal_insn;
4425 if (!supervisor(dc))
4426 goto priv_insn;
4427 #endif
4428 if (rd & 1)
4429 goto illegal_insn;
4430 else {
4431 save_state(dc, cpu_cond);
4432 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4434 break;
4435 #endif
4436 #ifdef TARGET_SPARC64
4437 case 0x0e: /* V9 stx */
4438 gen_address_mask(dc, cpu_addr);
4439 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4440 break;
4441 case 0x1e: /* V9 stxa */
4442 save_state(dc, cpu_cond);
4443 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4444 break;
4445 #endif
4446 default:
4447 goto illegal_insn;
4449 } else if (xop > 0x23 && xop < 0x28) {
4450 if (gen_trap_ifnofpu(dc, cpu_cond))
4451 goto jmp_insn;
4452 save_state(dc, cpu_cond);
4453 switch (xop) {
4454 case 0x24: /* stf, store fpreg */
4455 gen_address_mask(dc, cpu_addr);
4456 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4457 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4458 break;
4459 case 0x25: /* stfsr, V9 stxfsr */
4460 #ifdef TARGET_SPARC64
4461 gen_address_mask(dc, cpu_addr);
4462 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4463 if (rd == 1)
4464 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4465 else
4466 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4467 #else
4468 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4469 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4470 #endif
4471 break;
4472 case 0x26:
4473 #ifdef TARGET_SPARC64
4474 /* V9 stqf, store quad fpreg */
4476 TCGv_i32 r_const;
4478 CHECK_FPU_FEATURE(dc, FLOAT128);
4479 gen_op_load_fpr_QT0(QFPREG(rd));
4480 r_const = tcg_const_i32(dc->mem_idx);
4481 gen_helper_stqf(cpu_addr, r_const);
4482 tcg_temp_free_i32(r_const);
4484 break;
4485 #else /* !TARGET_SPARC64 */
4486 /* stdfq, store floating point queue */
4487 #if defined(CONFIG_USER_ONLY)
4488 goto illegal_insn;
4489 #else
4490 if (!supervisor(dc))
4491 goto priv_insn;
4492 if (gen_trap_ifnofpu(dc, cpu_cond))
4493 goto jmp_insn;
4494 goto nfq_insn;
4495 #endif
4496 #endif
4497 case 0x27: /* stdf, store double fpreg */
4499 TCGv_i32 r_const;
4501 gen_op_load_fpr_DT0(DFPREG(rd));
4502 r_const = tcg_const_i32(dc->mem_idx);
4503 gen_helper_stdf(cpu_addr, r_const);
4504 tcg_temp_free_i32(r_const);
4506 break;
4507 default:
4508 goto illegal_insn;
4510 } else if (xop > 0x33 && xop < 0x3f) {
4511 save_state(dc, cpu_cond);
4512 switch (xop) {
4513 #ifdef TARGET_SPARC64
4514 case 0x34: /* V9 stfa */
4515 gen_stf_asi(cpu_addr, insn, 4, rd);
4516 break;
4517 case 0x36: /* V9 stqfa */
4519 TCGv_i32 r_const;
4521 CHECK_FPU_FEATURE(dc, FLOAT128);
4522 r_const = tcg_const_i32(7);
4523 gen_helper_check_align(cpu_addr, r_const);
4524 tcg_temp_free_i32(r_const);
4525 gen_op_load_fpr_QT0(QFPREG(rd));
4526 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4528 break;
4529 case 0x37: /* V9 stdfa */
4530 gen_op_load_fpr_DT0(DFPREG(rd));
4531 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4532 break;
4533 case 0x3c: /* V9 casa */
4534 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4535 gen_movl_TN_reg(rd, cpu_val);
4536 break;
4537 case 0x3e: /* V9 casxa */
4538 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4539 gen_movl_TN_reg(rd, cpu_val);
4540 break;
4541 #else
4542 case 0x34: /* stc */
4543 case 0x35: /* stcsr */
4544 case 0x36: /* stdcq */
4545 case 0x37: /* stdc */
4546 goto ncp_insn;
4547 #endif
4548 default:
4549 goto illegal_insn;
4551 } else
4552 goto illegal_insn;
4554 break;
4556 /* default case for non jump instructions */
4557 if (dc->npc == DYNAMIC_PC) {
4558 dc->pc = DYNAMIC_PC;
4559 gen_op_next_insn();
4560 } else if (dc->npc == JUMP_PC) {
4561 /* we can do a static jump */
4562 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4563 dc->is_br = 1;
4564 } else {
4565 dc->pc = dc->npc;
4566 dc->npc = dc->npc + 4;
4568 jmp_insn:
4569 return;
4570 illegal_insn:
4572 TCGv_i32 r_const;
4574 save_state(dc, cpu_cond);
4575 r_const = tcg_const_i32(TT_ILL_INSN);
4576 gen_helper_raise_exception(r_const);
4577 tcg_temp_free_i32(r_const);
4578 dc->is_br = 1;
4580 return;
4581 unimp_flush:
4583 TCGv_i32 r_const;
4585 save_state(dc, cpu_cond);
4586 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4587 gen_helper_raise_exception(r_const);
4588 tcg_temp_free_i32(r_const);
4589 dc->is_br = 1;
4591 return;
4592 #if !defined(CONFIG_USER_ONLY)
4593 priv_insn:
4595 TCGv_i32 r_const;
4597 save_state(dc, cpu_cond);
4598 r_const = tcg_const_i32(TT_PRIV_INSN);
4599 gen_helper_raise_exception(r_const);
4600 tcg_temp_free_i32(r_const);
4601 dc->is_br = 1;
4603 return;
4604 #endif
4605 nfpu_insn:
4606 save_state(dc, cpu_cond);
4607 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4608 dc->is_br = 1;
4609 return;
4610 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4611 nfq_insn:
4612 save_state(dc, cpu_cond);
4613 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4614 dc->is_br = 1;
4615 return;
4616 #endif
4617 #ifndef TARGET_SPARC64
4618 ncp_insn:
4620 TCGv r_const;
4622 save_state(dc, cpu_cond);
4623 r_const = tcg_const_i32(TT_NCP_INSN);
4624 gen_helper_raise_exception(r_const);
4625 tcg_temp_free(r_const);
4626 dc->is_br = 1;
4628 return;
4629 #endif
4632 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4633 int spc, CPUSPARCState *env)
4635 target_ulong pc_start, last_pc;
4636 uint16_t *gen_opc_end;
4637 DisasContext dc1, *dc = &dc1;
4638 CPUBreakpoint *bp;
4639 int j, lj = -1;
4640 int num_insns;
4641 int max_insns;
4643 memset(dc, 0, sizeof(DisasContext));
4644 dc->tb = tb;
4645 pc_start = tb->pc;
4646 dc->pc = pc_start;
4647 last_pc = dc->pc;
4648 dc->npc = (target_ulong) tb->cs_base;
4649 dc->cc_op = CC_OP_DYNAMIC;
4650 dc->mem_idx = cpu_mmu_index(env);
4651 dc->def = env->def;
4652 if ((dc->def->features & CPU_FEATURE_FLOAT))
4653 dc->fpu_enabled = cpu_fpu_enabled(env);
4654 else
4655 dc->fpu_enabled = 0;
4656 #ifdef TARGET_SPARC64
4657 dc->address_mask_32bit = env->pstate & PS_AM;
4658 #endif
4659 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4661 cpu_tmp0 = tcg_temp_new();
4662 cpu_tmp32 = tcg_temp_new_i32();
4663 cpu_tmp64 = tcg_temp_new_i64();
4665 cpu_dst = tcg_temp_local_new();
4667 // loads and stores
4668 cpu_val = tcg_temp_local_new();
4669 cpu_addr = tcg_temp_local_new();
4671 num_insns = 0;
4672 max_insns = tb->cflags & CF_COUNT_MASK;
4673 if (max_insns == 0)
4674 max_insns = CF_COUNT_MASK;
4675 gen_icount_start();
4676 do {
4677 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4678 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4679 if (bp->pc == dc->pc) {
4680 if (dc->pc != pc_start)
4681 save_state(dc, cpu_cond);
4682 gen_helper_debug();
4683 tcg_gen_exit_tb(0);
4684 dc->is_br = 1;
4685 goto exit_gen_loop;
4689 if (spc) {
4690 qemu_log("Search PC...\n");
4691 j = gen_opc_ptr - gen_opc_buf;
4692 if (lj < j) {
4693 lj++;
4694 while (lj < j)
4695 gen_opc_instr_start[lj++] = 0;
4696 gen_opc_pc[lj] = dc->pc;
4697 gen_opc_npc[lj] = dc->npc;
4698 gen_opc_instr_start[lj] = 1;
4699 gen_opc_icount[lj] = num_insns;
4702 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4703 gen_io_start();
4704 last_pc = dc->pc;
4705 disas_sparc_insn(dc);
4706 num_insns++;
4708 if (dc->is_br)
4709 break;
4710 /* if the next PC is different, we abort now */
4711 if (dc->pc != (last_pc + 4))
4712 break;
4713 /* if we reach a page boundary, we stop generation so that the
4714 PC of a TT_TFAULT exception is always in the right page */
4715 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4716 break;
4717 /* if single step mode, we generate only one instruction and
4718 generate an exception */
4719 if (env->singlestep_enabled || singlestep) {
4720 tcg_gen_movi_tl(cpu_pc, dc->pc);
4721 tcg_gen_exit_tb(0);
4722 break;
4724 } while ((gen_opc_ptr < gen_opc_end) &&
4725 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4726 num_insns < max_insns);
4728 exit_gen_loop:
4729 tcg_temp_free(cpu_addr);
4730 tcg_temp_free(cpu_val);
4731 tcg_temp_free(cpu_dst);
4732 tcg_temp_free_i64(cpu_tmp64);
4733 tcg_temp_free_i32(cpu_tmp32);
4734 tcg_temp_free(cpu_tmp0);
4735 if (tb->cflags & CF_LAST_IO)
4736 gen_io_end();
4737 if (!dc->is_br) {
4738 if (dc->pc != DYNAMIC_PC &&
4739 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4740 /* static PC and NPC: we can use direct chaining */
4741 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4742 } else {
4743 if (dc->pc != DYNAMIC_PC)
4744 tcg_gen_movi_tl(cpu_pc, dc->pc);
4745 save_npc(dc, cpu_cond);
4746 tcg_gen_exit_tb(0);
4749 gen_icount_end(tb, num_insns);
4750 *gen_opc_ptr = INDEX_op_end;
4751 if (spc) {
4752 j = gen_opc_ptr - gen_opc_buf;
4753 lj++;
4754 while (lj <= j)
4755 gen_opc_instr_start[lj++] = 0;
4756 #if 0
4757 log_page_dump();
4758 #endif
4759 gen_opc_jump_pc[0] = dc->jump_pc[0];
4760 gen_opc_jump_pc[1] = dc->jump_pc[1];
4761 } else {
4762 tb->size = last_pc + 4 - pc_start;
4763 tb->icount = num_insns;
4765 #ifdef DEBUG_DISAS
4766 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4767 qemu_log("--------------\n");
4768 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4769 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4770 qemu_log("\n");
4772 #endif
4775 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4777 gen_intermediate_code_internal(tb, 0, env);
4780 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4782 gen_intermediate_code_internal(tb, 1, env);
4785 void gen_intermediate_code_init(CPUSPARCState *env)
4787 unsigned int i;
4788 static int inited;
4789 static const char * const gregnames[8] = {
4790 NULL, // g0 not used
4791 "g1",
4792 "g2",
4793 "g3",
4794 "g4",
4795 "g5",
4796 "g6",
4797 "g7",
4799 static const char * const fregnames[64] = {
4800 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4801 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4802 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4803 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4804 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4805 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4806 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4807 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4810 /* init various static tables */
4811 if (!inited) {
4812 inited = 1;
4814 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4815 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4816 offsetof(CPUState, regwptr),
4817 "regwptr");
4818 #ifdef TARGET_SPARC64
4819 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4820 "xcc");
4821 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4822 "asi");
4823 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4824 "fprs");
4825 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4826 "gsr");
4827 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4828 offsetof(CPUState, tick_cmpr),
4829 "tick_cmpr");
4830 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4831 offsetof(CPUState, stick_cmpr),
4832 "stick_cmpr");
4833 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4834 offsetof(CPUState, hstick_cmpr),
4835 "hstick_cmpr");
4836 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4837 "hintp");
4838 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4839 "htba");
4840 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4841 "hver");
4842 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4843 offsetof(CPUState, ssr), "ssr");
4844 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4845 offsetof(CPUState, version), "ver");
4846 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4847 offsetof(CPUState, softint),
4848 "softint");
4849 #else
4850 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4851 "wim");
4852 #endif
4853 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4854 "cond");
4855 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4856 "cc_src");
4857 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4858 offsetof(CPUState, cc_src2),
4859 "cc_src2");
4860 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4861 "cc_dst");
4862 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4863 "cc_op");
4864 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4865 "psr");
4866 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4867 "fsr");
4868 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4869 "pc");
4870 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4871 "npc");
4872 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4873 #ifndef CONFIG_USER_ONLY
4874 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4875 "tbr");
4876 #endif
4877 for (i = 1; i < 8; i++)
4878 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4879 offsetof(CPUState, gregs[i]),
4880 gregnames[i]);
4881 for (i = 0; i < TARGET_FPREGS; i++)
4882 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4883 offsetof(CPUState, fpr[i]),
4884 fregnames[i]);
4886 /* register helpers */
4888 #define GEN_HELPER 2
4889 #include "helper.h"
4893 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4894 unsigned long searched_pc, int pc_pos, void *puc)
4896 target_ulong npc;
4897 env->pc = gen_opc_pc[pc_pos];
4898 npc = gen_opc_npc[pc_pos];
4899 if (npc == 1) {
4900 /* dynamic NPC: already stored */
4901 } else if (npc == 2) {
4902 target_ulong t2 = (target_ulong)(unsigned long)puc;
4903 /* jump PC: use T2 and the jump targets of the translation */
4904 if (t2)
4905 env->npc = gen_opc_jump_pc[0];
4906 else
4907 env->npc = gen_opc_jump_pc[1];
4908 } else {
4909 env->npc = npc;