qcow2: insert assert into qcow2_get_specific_info()
[qemu.git] / target-sparc / translate.c
blob63440dd792a659fd3bf4cec599576d72fcfa6ace
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas/disas.h"
29 #include "exec/helper-proto.h"
30 #include "tcg-op.h"
31 #include "exec/cpu_ldst.h"
33 #include "exec/helper-gen.h"
35 #include "trace-tcg.h"
38 #define DEBUG_DISAS
40 #define DYNAMIC_PC 1 /* dynamic pc value */
41 #define JUMP_PC 2 /* dynamic pc value which takes only two values
42 according to jump_pc[T2] */
44 /* global register indexes */
45 static TCGv_ptr cpu_env, cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
50 static TCGv cpu_y;
51 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_tbr;
53 #endif
54 static TCGv cpu_cond;
55 #ifdef TARGET_SPARC64
56 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
57 static TCGv cpu_gsr;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
60 static TCGv_i32 cpu_softint;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67 #include "exec/gen-icount.h"
69 typedef struct DisasContext {
70 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
71 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73 int is_br;
74 int mem_idx;
75 int fpu_enabled;
76 int address_mask_32bit;
77 int singlestep;
78 uint32_t cc_op; /* current CC operation */
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 TCGv_i32 t32[3];
82 TCGv ttl[5];
83 int n_t32;
84 int n_ttl;
85 } DisasContext;
87 typedef struct {
88 TCGCond cond;
89 bool is_bool;
90 bool g1, g2;
91 TCGv c1, c2;
92 } DisasCompare;
94 // This function uses non-native bit order
95 #define GET_FIELD(X, FROM, TO) \
96 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
98 // This function uses the order in the manuals, i.e. bit 0 is 2^0
99 #define GET_FIELD_SP(X, FROM, TO) \
100 GET_FIELD(X, 31 - (TO), 31 - (FROM))
102 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
103 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
105 #ifdef TARGET_SPARC64
106 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
107 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
108 #else
109 #define DFPREG(r) (r & 0x1e)
110 #define QFPREG(r) (r & 0x1c)
111 #endif
113 #define UA2005_HTRAP_MASK 0xff
114 #define V8_TRAP_MASK 0x7f
116 static int sign_extend(int x, int len)
118 len = 32 - len;
119 return (x << len) >> len;
122 #define IS_IMM (insn & (1<<13))
124 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
126 TCGv_i32 t;
127 assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
128 dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
129 return t;
132 static inline TCGv get_temp_tl(DisasContext *dc)
134 TCGv t;
135 assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
136 dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
137 return t;
140 static inline void gen_update_fprs_dirty(int rd)
142 #if defined(TARGET_SPARC64)
143 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
144 #endif
147 /* floating point registers moves */
148 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
150 #if TCG_TARGET_REG_BITS == 32
151 if (src & 1) {
152 return TCGV_LOW(cpu_fpr[src / 2]);
153 } else {
154 return TCGV_HIGH(cpu_fpr[src / 2]);
156 #else
157 if (src & 1) {
158 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
159 } else {
160 TCGv_i32 ret = get_temp_i32(dc);
161 TCGv_i64 t = tcg_temp_new_i64();
163 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
164 tcg_gen_extrl_i64_i32(ret, t);
165 tcg_temp_free_i64(t);
167 return ret;
169 #endif
172 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
174 #if TCG_TARGET_REG_BITS == 32
175 if (dst & 1) {
176 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
177 } else {
178 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
180 #else
181 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
182 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
183 (dst & 1 ? 0 : 32), 32);
184 #endif
185 gen_update_fprs_dirty(dst);
188 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
190 return get_temp_i32(dc);
193 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
195 src = DFPREG(src);
196 return cpu_fpr[src / 2];
199 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
201 dst = DFPREG(dst);
202 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
203 gen_update_fprs_dirty(dst);
206 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
208 return cpu_fpr[DFPREG(dst) / 2];
211 static void gen_op_load_fpr_QT0(unsigned int src)
213 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.upper));
215 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
216 offsetof(CPU_QuadU, ll.lower));
219 static void gen_op_load_fpr_QT1(unsigned int src)
221 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
222 offsetof(CPU_QuadU, ll.upper));
223 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
224 offsetof(CPU_QuadU, ll.lower));
227 static void gen_op_store_QT0_fpr(unsigned int dst)
229 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
230 offsetof(CPU_QuadU, ll.upper));
231 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
232 offsetof(CPU_QuadU, ll.lower));
235 #ifdef TARGET_SPARC64
236 static void gen_move_Q(unsigned int rd, unsigned int rs)
238 rd = QFPREG(rd);
239 rs = QFPREG(rs);
241 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
242 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
243 gen_update_fprs_dirty(rd);
245 #endif
247 /* moves */
248 #ifdef CONFIG_USER_ONLY
249 #define supervisor(dc) 0
250 #ifdef TARGET_SPARC64
251 #define hypervisor(dc) 0
252 #endif
253 #else
254 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
255 #ifdef TARGET_SPARC64
256 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
257 #else
258 #endif
259 #endif
261 #ifdef TARGET_SPARC64
262 #ifndef TARGET_ABI32
263 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
264 #else
265 #define AM_CHECK(dc) (1)
266 #endif
267 #endif
269 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
271 #ifdef TARGET_SPARC64
272 if (AM_CHECK(dc))
273 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
274 #endif
277 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
279 if (reg == 0 || reg >= 8) {
280 TCGv t = get_temp_tl(dc);
281 if (reg == 0) {
282 tcg_gen_movi_tl(t, 0);
283 } else {
284 tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
286 return t;
287 } else {
288 return cpu_gregs[reg];
292 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
294 if (reg > 0) {
295 if (reg < 8) {
296 tcg_gen_mov_tl(cpu_gregs[reg], v);
297 } else {
298 tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
303 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
305 if (reg == 0 || reg >= 8) {
306 return get_temp_tl(dc);
307 } else {
308 return cpu_gregs[reg];
312 static inline void gen_goto_tb(DisasContext *s, int tb_num,
313 target_ulong pc, target_ulong npc)
315 TranslationBlock *tb;
317 tb = s->tb;
318 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
319 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
320 !s->singlestep) {
321 /* jump to same page: we can use a direct jump */
322 tcg_gen_goto_tb(tb_num);
323 tcg_gen_movi_tl(cpu_pc, pc);
324 tcg_gen_movi_tl(cpu_npc, npc);
325 tcg_gen_exit_tb((uintptr_t)tb + tb_num);
326 } else {
327 /* jump to another page: currently not optimized */
328 tcg_gen_movi_tl(cpu_pc, pc);
329 tcg_gen_movi_tl(cpu_npc, npc);
330 tcg_gen_exit_tb(0);
334 // XXX suboptimal
335 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
337 tcg_gen_extu_i32_tl(reg, src);
338 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
339 tcg_gen_andi_tl(reg, reg, 0x1);
342 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
344 tcg_gen_extu_i32_tl(reg, src);
345 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
346 tcg_gen_andi_tl(reg, reg, 0x1);
349 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
351 tcg_gen_extu_i32_tl(reg, src);
352 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
353 tcg_gen_andi_tl(reg, reg, 0x1);
356 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
358 tcg_gen_extu_i32_tl(reg, src);
359 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
360 tcg_gen_andi_tl(reg, reg, 0x1);
363 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
365 tcg_gen_mov_tl(cpu_cc_src, src1);
366 tcg_gen_mov_tl(cpu_cc_src2, src2);
367 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
368 tcg_gen_mov_tl(dst, cpu_cc_dst);
371 static TCGv_i32 gen_add32_carry32(void)
373 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
375 /* Carry is computed from a previous add: (dst < src) */
376 #if TARGET_LONG_BITS == 64
377 cc_src1_32 = tcg_temp_new_i32();
378 cc_src2_32 = tcg_temp_new_i32();
379 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
380 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
381 #else
382 cc_src1_32 = cpu_cc_dst;
383 cc_src2_32 = cpu_cc_src;
384 #endif
386 carry_32 = tcg_temp_new_i32();
387 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
389 #if TARGET_LONG_BITS == 64
390 tcg_temp_free_i32(cc_src1_32);
391 tcg_temp_free_i32(cc_src2_32);
392 #endif
394 return carry_32;
397 static TCGv_i32 gen_sub32_carry32(void)
399 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
401 /* Carry is computed from a previous borrow: (src1 < src2) */
402 #if TARGET_LONG_BITS == 64
403 cc_src1_32 = tcg_temp_new_i32();
404 cc_src2_32 = tcg_temp_new_i32();
405 tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
406 tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
407 #else
408 cc_src1_32 = cpu_cc_src;
409 cc_src2_32 = cpu_cc_src2;
410 #endif
412 carry_32 = tcg_temp_new_i32();
413 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
415 #if TARGET_LONG_BITS == 64
416 tcg_temp_free_i32(cc_src1_32);
417 tcg_temp_free_i32(cc_src2_32);
418 #endif
420 return carry_32;
423 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
424 TCGv src2, int update_cc)
426 TCGv_i32 carry_32;
427 TCGv carry;
429 switch (dc->cc_op) {
430 case CC_OP_DIV:
431 case CC_OP_LOGIC:
432 /* Carry is known to be zero. Fall back to plain ADD. */
433 if (update_cc) {
434 gen_op_add_cc(dst, src1, src2);
435 } else {
436 tcg_gen_add_tl(dst, src1, src2);
438 return;
440 case CC_OP_ADD:
441 case CC_OP_TADD:
442 case CC_OP_TADDTV:
443 if (TARGET_LONG_BITS == 32) {
444 /* We can re-use the host's hardware carry generation by using
445 an ADD2 opcode. We discard the low part of the output.
446 Ideally we'd combine this operation with the add that
447 generated the carry in the first place. */
448 carry = tcg_temp_new();
449 tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
450 tcg_temp_free(carry);
451 goto add_done;
453 carry_32 = gen_add32_carry32();
454 break;
456 case CC_OP_SUB:
457 case CC_OP_TSUB:
458 case CC_OP_TSUBTV:
459 carry_32 = gen_sub32_carry32();
460 break;
462 default:
463 /* We need external help to produce the carry. */
464 carry_32 = tcg_temp_new_i32();
465 gen_helper_compute_C_icc(carry_32, cpu_env);
466 break;
469 #if TARGET_LONG_BITS == 64
470 carry = tcg_temp_new();
471 tcg_gen_extu_i32_i64(carry, carry_32);
472 #else
473 carry = carry_32;
474 #endif
476 tcg_gen_add_tl(dst, src1, src2);
477 tcg_gen_add_tl(dst, dst, carry);
479 tcg_temp_free_i32(carry_32);
480 #if TARGET_LONG_BITS == 64
481 tcg_temp_free(carry);
482 #endif
484 add_done:
485 if (update_cc) {
486 tcg_gen_mov_tl(cpu_cc_src, src1);
487 tcg_gen_mov_tl(cpu_cc_src2, src2);
488 tcg_gen_mov_tl(cpu_cc_dst, dst);
489 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
490 dc->cc_op = CC_OP_ADDX;
494 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
496 tcg_gen_mov_tl(cpu_cc_src, src1);
497 tcg_gen_mov_tl(cpu_cc_src2, src2);
498 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
499 tcg_gen_mov_tl(dst, cpu_cc_dst);
502 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
503 TCGv src2, int update_cc)
505 TCGv_i32 carry_32;
506 TCGv carry;
508 switch (dc->cc_op) {
509 case CC_OP_DIV:
510 case CC_OP_LOGIC:
511 /* Carry is known to be zero. Fall back to plain SUB. */
512 if (update_cc) {
513 gen_op_sub_cc(dst, src1, src2);
514 } else {
515 tcg_gen_sub_tl(dst, src1, src2);
517 return;
519 case CC_OP_ADD:
520 case CC_OP_TADD:
521 case CC_OP_TADDTV:
522 carry_32 = gen_add32_carry32();
523 break;
525 case CC_OP_SUB:
526 case CC_OP_TSUB:
527 case CC_OP_TSUBTV:
528 if (TARGET_LONG_BITS == 32) {
529 /* We can re-use the host's hardware carry generation by using
530 a SUB2 opcode. We discard the low part of the output.
531 Ideally we'd combine this operation with the add that
532 generated the carry in the first place. */
533 carry = tcg_temp_new();
534 tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
535 tcg_temp_free(carry);
536 goto sub_done;
538 carry_32 = gen_sub32_carry32();
539 break;
541 default:
542 /* We need external help to produce the carry. */
543 carry_32 = tcg_temp_new_i32();
544 gen_helper_compute_C_icc(carry_32, cpu_env);
545 break;
548 #if TARGET_LONG_BITS == 64
549 carry = tcg_temp_new();
550 tcg_gen_extu_i32_i64(carry, carry_32);
551 #else
552 carry = carry_32;
553 #endif
555 tcg_gen_sub_tl(dst, src1, src2);
556 tcg_gen_sub_tl(dst, dst, carry);
558 tcg_temp_free_i32(carry_32);
559 #if TARGET_LONG_BITS == 64
560 tcg_temp_free(carry);
561 #endif
563 sub_done:
564 if (update_cc) {
565 tcg_gen_mov_tl(cpu_cc_src, src1);
566 tcg_gen_mov_tl(cpu_cc_src2, src2);
567 tcg_gen_mov_tl(cpu_cc_dst, dst);
568 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
569 dc->cc_op = CC_OP_SUBX;
573 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
575 TCGv r_temp, zero, t0;
577 r_temp = tcg_temp_new();
578 t0 = tcg_temp_new();
580 /* old op:
581 if (!(env->y & 1))
582 T1 = 0;
584 zero = tcg_const_tl(0);
585 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
586 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
587 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
588 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
589 zero, cpu_cc_src2);
590 tcg_temp_free(zero);
592 // b2 = T0 & 1;
593 // env->y = (b2 << 31) | (env->y >> 1);
594 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
595 tcg_gen_shli_tl(r_temp, r_temp, 31);
596 tcg_gen_shri_tl(t0, cpu_y, 1);
597 tcg_gen_andi_tl(t0, t0, 0x7fffffff);
598 tcg_gen_or_tl(t0, t0, r_temp);
599 tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
601 // b1 = N ^ V;
602 gen_mov_reg_N(t0, cpu_psr);
603 gen_mov_reg_V(r_temp, cpu_psr);
604 tcg_gen_xor_tl(t0, t0, r_temp);
605 tcg_temp_free(r_temp);
607 // T0 = (b1 << 31) | (T0 >> 1);
608 // src1 = T0;
609 tcg_gen_shli_tl(t0, t0, 31);
610 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
611 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
612 tcg_temp_free(t0);
614 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 tcg_gen_mov_tl(dst, cpu_cc_dst);
619 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
621 #if TARGET_LONG_BITS == 32
622 if (sign_ext) {
623 tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
624 } else {
625 tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
627 #else
628 TCGv t0 = tcg_temp_new_i64();
629 TCGv t1 = tcg_temp_new_i64();
631 if (sign_ext) {
632 tcg_gen_ext32s_i64(t0, src1);
633 tcg_gen_ext32s_i64(t1, src2);
634 } else {
635 tcg_gen_ext32u_i64(t0, src1);
636 tcg_gen_ext32u_i64(t1, src2);
639 tcg_gen_mul_i64(dst, t0, t1);
640 tcg_temp_free(t0);
641 tcg_temp_free(t1);
643 tcg_gen_shri_i64(cpu_y, dst, 32);
644 #endif
647 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
649 /* zero-extend truncated operands before multiplication */
650 gen_op_multiply(dst, src1, src2, 0);
653 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
655 /* sign-extend truncated operands before multiplication */
656 gen_op_multiply(dst, src1, src2, 1);
659 // 1
660 static inline void gen_op_eval_ba(TCGv dst)
662 tcg_gen_movi_tl(dst, 1);
665 // Z
666 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
668 gen_mov_reg_Z(dst, src);
671 // Z | (N ^ V)
672 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
674 TCGv t0 = tcg_temp_new();
675 gen_mov_reg_N(t0, src);
676 gen_mov_reg_V(dst, src);
677 tcg_gen_xor_tl(dst, dst, t0);
678 gen_mov_reg_Z(t0, src);
679 tcg_gen_or_tl(dst, dst, t0);
680 tcg_temp_free(t0);
683 // N ^ V
684 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
686 TCGv t0 = tcg_temp_new();
687 gen_mov_reg_V(t0, src);
688 gen_mov_reg_N(dst, src);
689 tcg_gen_xor_tl(dst, dst, t0);
690 tcg_temp_free(t0);
693 // C | Z
694 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
696 TCGv t0 = tcg_temp_new();
697 gen_mov_reg_Z(t0, src);
698 gen_mov_reg_C(dst, src);
699 tcg_gen_or_tl(dst, dst, t0);
700 tcg_temp_free(t0);
703 // C
704 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
706 gen_mov_reg_C(dst, src);
709 // V
710 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
712 gen_mov_reg_V(dst, src);
715 // 0
716 static inline void gen_op_eval_bn(TCGv dst)
718 tcg_gen_movi_tl(dst, 0);
721 // N
722 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
724 gen_mov_reg_N(dst, src);
727 // !Z
728 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
730 gen_mov_reg_Z(dst, src);
731 tcg_gen_xori_tl(dst, dst, 0x1);
734 // !(Z | (N ^ V))
735 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
737 gen_op_eval_ble(dst, src);
738 tcg_gen_xori_tl(dst, dst, 0x1);
741 // !(N ^ V)
742 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
744 gen_op_eval_bl(dst, src);
745 tcg_gen_xori_tl(dst, dst, 0x1);
748 // !(C | Z)
749 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
751 gen_op_eval_bleu(dst, src);
752 tcg_gen_xori_tl(dst, dst, 0x1);
755 // !C
756 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
758 gen_mov_reg_C(dst, src);
759 tcg_gen_xori_tl(dst, dst, 0x1);
762 // !N
763 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
765 gen_mov_reg_N(dst, src);
766 tcg_gen_xori_tl(dst, dst, 0x1);
769 // !V
770 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
772 gen_mov_reg_V(dst, src);
773 tcg_gen_xori_tl(dst, dst, 0x1);
777 FPSR bit field FCC1 | FCC0:
781 3 unordered
783 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
784 unsigned int fcc_offset)
786 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
787 tcg_gen_andi_tl(reg, reg, 0x1);
790 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
791 unsigned int fcc_offset)
793 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
794 tcg_gen_andi_tl(reg, reg, 0x1);
797 // !0: FCC0 | FCC1
798 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
799 unsigned int fcc_offset)
801 TCGv t0 = tcg_temp_new();
802 gen_mov_reg_FCC0(dst, src, fcc_offset);
803 gen_mov_reg_FCC1(t0, src, fcc_offset);
804 tcg_gen_or_tl(dst, dst, t0);
805 tcg_temp_free(t0);
808 // 1 or 2: FCC0 ^ FCC1
809 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
810 unsigned int fcc_offset)
812 TCGv t0 = tcg_temp_new();
813 gen_mov_reg_FCC0(dst, src, fcc_offset);
814 gen_mov_reg_FCC1(t0, src, fcc_offset);
815 tcg_gen_xor_tl(dst, dst, t0);
816 tcg_temp_free(t0);
819 // 1 or 3: FCC0
820 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
821 unsigned int fcc_offset)
823 gen_mov_reg_FCC0(dst, src, fcc_offset);
826 // 1: FCC0 & !FCC1
827 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
828 unsigned int fcc_offset)
830 TCGv t0 = tcg_temp_new();
831 gen_mov_reg_FCC0(dst, src, fcc_offset);
832 gen_mov_reg_FCC1(t0, src, fcc_offset);
833 tcg_gen_andc_tl(dst, dst, t0);
834 tcg_temp_free(t0);
837 // 2 or 3: FCC1
838 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
839 unsigned int fcc_offset)
841 gen_mov_reg_FCC1(dst, src, fcc_offset);
844 // 2: !FCC0 & FCC1
845 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
846 unsigned int fcc_offset)
848 TCGv t0 = tcg_temp_new();
849 gen_mov_reg_FCC0(dst, src, fcc_offset);
850 gen_mov_reg_FCC1(t0, src, fcc_offset);
851 tcg_gen_andc_tl(dst, t0, dst);
852 tcg_temp_free(t0);
855 // 3: FCC0 & FCC1
856 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
857 unsigned int fcc_offset)
859 TCGv t0 = tcg_temp_new();
860 gen_mov_reg_FCC0(dst, src, fcc_offset);
861 gen_mov_reg_FCC1(t0, src, fcc_offset);
862 tcg_gen_and_tl(dst, dst, t0);
863 tcg_temp_free(t0);
866 // 0: !(FCC0 | FCC1)
867 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
868 unsigned int fcc_offset)
870 TCGv t0 = tcg_temp_new();
871 gen_mov_reg_FCC0(dst, src, fcc_offset);
872 gen_mov_reg_FCC1(t0, src, fcc_offset);
873 tcg_gen_or_tl(dst, dst, t0);
874 tcg_gen_xori_tl(dst, dst, 0x1);
875 tcg_temp_free(t0);
878 // 0 or 3: !(FCC0 ^ FCC1)
879 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
880 unsigned int fcc_offset)
882 TCGv t0 = tcg_temp_new();
883 gen_mov_reg_FCC0(dst, src, fcc_offset);
884 gen_mov_reg_FCC1(t0, src, fcc_offset);
885 tcg_gen_xor_tl(dst, dst, t0);
886 tcg_gen_xori_tl(dst, dst, 0x1);
887 tcg_temp_free(t0);
890 // 0 or 2: !FCC0
891 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
892 unsigned int fcc_offset)
894 gen_mov_reg_FCC0(dst, src, fcc_offset);
895 tcg_gen_xori_tl(dst, dst, 0x1);
898 // !1: !(FCC0 & !FCC1)
899 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
900 unsigned int fcc_offset)
902 TCGv t0 = tcg_temp_new();
903 gen_mov_reg_FCC0(dst, src, fcc_offset);
904 gen_mov_reg_FCC1(t0, src, fcc_offset);
905 tcg_gen_andc_tl(dst, dst, t0);
906 tcg_gen_xori_tl(dst, dst, 0x1);
907 tcg_temp_free(t0);
910 // 0 or 1: !FCC1
911 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
912 unsigned int fcc_offset)
914 gen_mov_reg_FCC1(dst, src, fcc_offset);
915 tcg_gen_xori_tl(dst, dst, 0x1);
918 // !2: !(!FCC0 & FCC1)
919 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
920 unsigned int fcc_offset)
922 TCGv t0 = tcg_temp_new();
923 gen_mov_reg_FCC0(dst, src, fcc_offset);
924 gen_mov_reg_FCC1(t0, src, fcc_offset);
925 tcg_gen_andc_tl(dst, t0, dst);
926 tcg_gen_xori_tl(dst, dst, 0x1);
927 tcg_temp_free(t0);
930 // !3: !(FCC0 & FCC1)
931 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
932 unsigned int fcc_offset)
934 TCGv t0 = tcg_temp_new();
935 gen_mov_reg_FCC0(dst, src, fcc_offset);
936 gen_mov_reg_FCC1(t0, src, fcc_offset);
937 tcg_gen_and_tl(dst, dst, t0);
938 tcg_gen_xori_tl(dst, dst, 0x1);
939 tcg_temp_free(t0);
942 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
943 target_ulong pc2, TCGv r_cond)
945 TCGLabel *l1 = gen_new_label();
947 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
949 gen_goto_tb(dc, 0, pc1, pc1 + 4);
951 gen_set_label(l1);
952 gen_goto_tb(dc, 1, pc2, pc2 + 4);
955 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
957 TCGLabel *l1 = gen_new_label();
958 target_ulong npc = dc->npc;
960 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
962 gen_goto_tb(dc, 0, npc, pc1);
964 gen_set_label(l1);
965 gen_goto_tb(dc, 1, npc + 4, npc + 8);
967 dc->is_br = 1;
970 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
972 target_ulong npc = dc->npc;
974 if (likely(npc != DYNAMIC_PC)) {
975 dc->pc = npc;
976 dc->jump_pc[0] = pc1;
977 dc->jump_pc[1] = npc + 4;
978 dc->npc = JUMP_PC;
979 } else {
980 TCGv t, z;
982 tcg_gen_mov_tl(cpu_pc, cpu_npc);
984 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
985 t = tcg_const_tl(pc1);
986 z = tcg_const_tl(0);
987 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
988 tcg_temp_free(t);
989 tcg_temp_free(z);
991 dc->pc = DYNAMIC_PC;
995 static inline void gen_generic_branch(DisasContext *dc)
997 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
998 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
999 TCGv zero = tcg_const_tl(0);
1001 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1003 tcg_temp_free(npc0);
1004 tcg_temp_free(npc1);
1005 tcg_temp_free(zero);
1008 /* call this function before using the condition register as it may
1009 have been set for a jump */
1010 static inline void flush_cond(DisasContext *dc)
1012 if (dc->npc == JUMP_PC) {
1013 gen_generic_branch(dc);
1014 dc->npc = DYNAMIC_PC;
1018 static inline void save_npc(DisasContext *dc)
1020 if (dc->npc == JUMP_PC) {
1021 gen_generic_branch(dc);
1022 dc->npc = DYNAMIC_PC;
1023 } else if (dc->npc != DYNAMIC_PC) {
1024 tcg_gen_movi_tl(cpu_npc, dc->npc);
1028 static inline void update_psr(DisasContext *dc)
1030 if (dc->cc_op != CC_OP_FLAGS) {
1031 dc->cc_op = CC_OP_FLAGS;
1032 gen_helper_compute_psr(cpu_env);
1036 static inline void save_state(DisasContext *dc)
1038 tcg_gen_movi_tl(cpu_pc, dc->pc);
1039 save_npc(dc);
1042 static inline void gen_mov_pc_npc(DisasContext *dc)
1044 if (dc->npc == JUMP_PC) {
1045 gen_generic_branch(dc);
1046 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1047 dc->pc = DYNAMIC_PC;
1048 } else if (dc->npc == DYNAMIC_PC) {
1049 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1050 dc->pc = DYNAMIC_PC;
1051 } else {
1052 dc->pc = dc->npc;
1056 static inline void gen_op_next_insn(void)
1058 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1059 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1062 static void free_compare(DisasCompare *cmp)
1064 if (!cmp->g1) {
1065 tcg_temp_free(cmp->c1);
1067 if (!cmp->g2) {
1068 tcg_temp_free(cmp->c2);
1072 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1073 DisasContext *dc)
1075 static int subcc_cond[16] = {
1076 TCG_COND_NEVER,
1077 TCG_COND_EQ,
1078 TCG_COND_LE,
1079 TCG_COND_LT,
1080 TCG_COND_LEU,
1081 TCG_COND_LTU,
1082 -1, /* neg */
1083 -1, /* overflow */
1084 TCG_COND_ALWAYS,
1085 TCG_COND_NE,
1086 TCG_COND_GT,
1087 TCG_COND_GE,
1088 TCG_COND_GTU,
1089 TCG_COND_GEU,
1090 -1, /* pos */
1091 -1, /* no overflow */
1094 static int logic_cond[16] = {
1095 TCG_COND_NEVER,
1096 TCG_COND_EQ, /* eq: Z */
1097 TCG_COND_LE, /* le: Z | (N ^ V) -> Z | N */
1098 TCG_COND_LT, /* lt: N ^ V -> N */
1099 TCG_COND_EQ, /* leu: C | Z -> Z */
1100 TCG_COND_NEVER, /* ltu: C -> 0 */
1101 TCG_COND_LT, /* neg: N */
1102 TCG_COND_NEVER, /* vs: V -> 0 */
1103 TCG_COND_ALWAYS,
1104 TCG_COND_NE, /* ne: !Z */
1105 TCG_COND_GT, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1106 TCG_COND_GE, /* ge: !(N ^ V) -> !N */
1107 TCG_COND_NE, /* gtu: !(C | Z) -> !Z */
1108 TCG_COND_ALWAYS, /* geu: !C -> 1 */
1109 TCG_COND_GE, /* pos: !N */
1110 TCG_COND_ALWAYS, /* vc: !V -> 1 */
1113 TCGv_i32 r_src;
1114 TCGv r_dst;
1116 #ifdef TARGET_SPARC64
1117 if (xcc) {
1118 r_src = cpu_xcc;
1119 } else {
1120 r_src = cpu_psr;
1122 #else
1123 r_src = cpu_psr;
1124 #endif
1126 switch (dc->cc_op) {
1127 case CC_OP_LOGIC:
1128 cmp->cond = logic_cond[cond];
1129 do_compare_dst_0:
1130 cmp->is_bool = false;
1131 cmp->g2 = false;
1132 cmp->c2 = tcg_const_tl(0);
1133 #ifdef TARGET_SPARC64
1134 if (!xcc) {
1135 cmp->g1 = false;
1136 cmp->c1 = tcg_temp_new();
1137 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1138 break;
1140 #endif
1141 cmp->g1 = true;
1142 cmp->c1 = cpu_cc_dst;
1143 break;
1145 case CC_OP_SUB:
1146 switch (cond) {
1147 case 6: /* neg */
1148 case 14: /* pos */
1149 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1150 goto do_compare_dst_0;
1152 case 7: /* overflow */
1153 case 15: /* !overflow */
1154 goto do_dynamic;
1156 default:
1157 cmp->cond = subcc_cond[cond];
1158 cmp->is_bool = false;
1159 #ifdef TARGET_SPARC64
1160 if (!xcc) {
1161 /* Note that sign-extension works for unsigned compares as
1162 long as both operands are sign-extended. */
1163 cmp->g1 = cmp->g2 = false;
1164 cmp->c1 = tcg_temp_new();
1165 cmp->c2 = tcg_temp_new();
1166 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1167 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1168 break;
1170 #endif
1171 cmp->g1 = cmp->g2 = true;
1172 cmp->c1 = cpu_cc_src;
1173 cmp->c2 = cpu_cc_src2;
1174 break;
1176 break;
1178 default:
1179 do_dynamic:
1180 gen_helper_compute_psr(cpu_env);
1181 dc->cc_op = CC_OP_FLAGS;
1182 /* FALLTHRU */
1184 case CC_OP_FLAGS:
1185 /* We're going to generate a boolean result. */
1186 cmp->cond = TCG_COND_NE;
1187 cmp->is_bool = true;
1188 cmp->g1 = cmp->g2 = false;
1189 cmp->c1 = r_dst = tcg_temp_new();
1190 cmp->c2 = tcg_const_tl(0);
1192 switch (cond) {
1193 case 0x0:
1194 gen_op_eval_bn(r_dst);
1195 break;
1196 case 0x1:
1197 gen_op_eval_be(r_dst, r_src);
1198 break;
1199 case 0x2:
1200 gen_op_eval_ble(r_dst, r_src);
1201 break;
1202 case 0x3:
1203 gen_op_eval_bl(r_dst, r_src);
1204 break;
1205 case 0x4:
1206 gen_op_eval_bleu(r_dst, r_src);
1207 break;
1208 case 0x5:
1209 gen_op_eval_bcs(r_dst, r_src);
1210 break;
1211 case 0x6:
1212 gen_op_eval_bneg(r_dst, r_src);
1213 break;
1214 case 0x7:
1215 gen_op_eval_bvs(r_dst, r_src);
1216 break;
1217 case 0x8:
1218 gen_op_eval_ba(r_dst);
1219 break;
1220 case 0x9:
1221 gen_op_eval_bne(r_dst, r_src);
1222 break;
1223 case 0xa:
1224 gen_op_eval_bg(r_dst, r_src);
1225 break;
1226 case 0xb:
1227 gen_op_eval_bge(r_dst, r_src);
1228 break;
1229 case 0xc:
1230 gen_op_eval_bgu(r_dst, r_src);
1231 break;
1232 case 0xd:
1233 gen_op_eval_bcc(r_dst, r_src);
1234 break;
1235 case 0xe:
1236 gen_op_eval_bpos(r_dst, r_src);
1237 break;
1238 case 0xf:
1239 gen_op_eval_bvc(r_dst, r_src);
1240 break;
1242 break;
1246 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1248 unsigned int offset;
1249 TCGv r_dst;
1251 /* For now we still generate a straight boolean result. */
1252 cmp->cond = TCG_COND_NE;
1253 cmp->is_bool = true;
1254 cmp->g1 = cmp->g2 = false;
1255 cmp->c1 = r_dst = tcg_temp_new();
1256 cmp->c2 = tcg_const_tl(0);
1258 switch (cc) {
1259 default:
1260 case 0x0:
1261 offset = 0;
1262 break;
1263 case 0x1:
1264 offset = 32 - 10;
1265 break;
1266 case 0x2:
1267 offset = 34 - 10;
1268 break;
1269 case 0x3:
1270 offset = 36 - 10;
1271 break;
1274 switch (cond) {
1275 case 0x0:
1276 gen_op_eval_bn(r_dst);
1277 break;
1278 case 0x1:
1279 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1280 break;
1281 case 0x2:
1282 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1283 break;
1284 case 0x3:
1285 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1286 break;
1287 case 0x4:
1288 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1289 break;
1290 case 0x5:
1291 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1292 break;
1293 case 0x6:
1294 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1295 break;
1296 case 0x7:
1297 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1298 break;
1299 case 0x8:
1300 gen_op_eval_ba(r_dst);
1301 break;
1302 case 0x9:
1303 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1304 break;
1305 case 0xa:
1306 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1307 break;
1308 case 0xb:
1309 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1310 break;
1311 case 0xc:
1312 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1313 break;
1314 case 0xd:
1315 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1316 break;
1317 case 0xe:
1318 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1319 break;
1320 case 0xf:
1321 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1322 break;
1326 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1327 DisasContext *dc)
1329 DisasCompare cmp;
1330 gen_compare(&cmp, cc, cond, dc);
1332 /* The interface is to return a boolean in r_dst. */
1333 if (cmp.is_bool) {
1334 tcg_gen_mov_tl(r_dst, cmp.c1);
1335 } else {
1336 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1339 free_compare(&cmp);
1342 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1344 DisasCompare cmp;
1345 gen_fcompare(&cmp, cc, cond);
1347 /* The interface is to return a boolean in r_dst. */
1348 if (cmp.is_bool) {
1349 tcg_gen_mov_tl(r_dst, cmp.c1);
1350 } else {
1351 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1354 free_compare(&cmp);
1357 #ifdef TARGET_SPARC64
1358 // Inverted logic
1359 static const int gen_tcg_cond_reg[8] = {
1361 TCG_COND_NE,
1362 TCG_COND_GT,
1363 TCG_COND_GE,
1365 TCG_COND_EQ,
1366 TCG_COND_LE,
1367 TCG_COND_LT,
1370 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1372 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1373 cmp->is_bool = false;
1374 cmp->g1 = true;
1375 cmp->g2 = false;
1376 cmp->c1 = r_src;
1377 cmp->c2 = tcg_const_tl(0);
1380 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1382 DisasCompare cmp;
1383 gen_compare_reg(&cmp, cond, r_src);
1385 /* The interface is to return a boolean in r_dst. */
1386 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1388 free_compare(&cmp);
1390 #endif
1392 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1394 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1395 target_ulong target = dc->pc + offset;
1397 #ifdef TARGET_SPARC64
1398 if (unlikely(AM_CHECK(dc))) {
1399 target &= 0xffffffffULL;
1401 #endif
1402 if (cond == 0x0) {
1403 /* unconditional not taken */
1404 if (a) {
1405 dc->pc = dc->npc + 4;
1406 dc->npc = dc->pc + 4;
1407 } else {
1408 dc->pc = dc->npc;
1409 dc->npc = dc->pc + 4;
1411 } else if (cond == 0x8) {
1412 /* unconditional taken */
1413 if (a) {
1414 dc->pc = target;
1415 dc->npc = dc->pc + 4;
1416 } else {
1417 dc->pc = dc->npc;
1418 dc->npc = target;
1419 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1421 } else {
1422 flush_cond(dc);
1423 gen_cond(cpu_cond, cc, cond, dc);
1424 if (a) {
1425 gen_branch_a(dc, target);
1426 } else {
1427 gen_branch_n(dc, target);
1432 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1434 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1435 target_ulong target = dc->pc + offset;
1437 #ifdef TARGET_SPARC64
1438 if (unlikely(AM_CHECK(dc))) {
1439 target &= 0xffffffffULL;
1441 #endif
1442 if (cond == 0x0) {
1443 /* unconditional not taken */
1444 if (a) {
1445 dc->pc = dc->npc + 4;
1446 dc->npc = dc->pc + 4;
1447 } else {
1448 dc->pc = dc->npc;
1449 dc->npc = dc->pc + 4;
1451 } else if (cond == 0x8) {
1452 /* unconditional taken */
1453 if (a) {
1454 dc->pc = target;
1455 dc->npc = dc->pc + 4;
1456 } else {
1457 dc->pc = dc->npc;
1458 dc->npc = target;
1459 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1461 } else {
1462 flush_cond(dc);
1463 gen_fcond(cpu_cond, cc, cond);
1464 if (a) {
1465 gen_branch_a(dc, target);
1466 } else {
1467 gen_branch_n(dc, target);
1472 #ifdef TARGET_SPARC64
1473 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1474 TCGv r_reg)
1476 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1477 target_ulong target = dc->pc + offset;
1479 if (unlikely(AM_CHECK(dc))) {
1480 target &= 0xffffffffULL;
1482 flush_cond(dc);
1483 gen_cond_reg(cpu_cond, cond, r_reg);
1484 if (a) {
1485 gen_branch_a(dc, target);
1486 } else {
1487 gen_branch_n(dc, target);
1491 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1493 switch (fccno) {
1494 case 0:
1495 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1496 break;
1497 case 1:
1498 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1499 break;
1500 case 2:
1501 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1502 break;
1503 case 3:
1504 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1505 break;
1509 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1511 switch (fccno) {
1512 case 0:
1513 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1514 break;
1515 case 1:
1516 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1517 break;
1518 case 2:
1519 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1520 break;
1521 case 3:
1522 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1523 break;
1527 static inline void gen_op_fcmpq(int fccno)
1529 switch (fccno) {
1530 case 0:
1531 gen_helper_fcmpq(cpu_env);
1532 break;
1533 case 1:
1534 gen_helper_fcmpq_fcc1(cpu_env);
1535 break;
1536 case 2:
1537 gen_helper_fcmpq_fcc2(cpu_env);
1538 break;
1539 case 3:
1540 gen_helper_fcmpq_fcc3(cpu_env);
1541 break;
1545 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1547 switch (fccno) {
1548 case 0:
1549 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1550 break;
1551 case 1:
1552 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1553 break;
1554 case 2:
1555 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1556 break;
1557 case 3:
1558 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1559 break;
1563 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1565 switch (fccno) {
1566 case 0:
1567 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1568 break;
1569 case 1:
1570 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1571 break;
1572 case 2:
1573 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1574 break;
1575 case 3:
1576 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1577 break;
1581 static inline void gen_op_fcmpeq(int fccno)
1583 switch (fccno) {
1584 case 0:
1585 gen_helper_fcmpeq(cpu_env);
1586 break;
1587 case 1:
1588 gen_helper_fcmpeq_fcc1(cpu_env);
1589 break;
1590 case 2:
1591 gen_helper_fcmpeq_fcc2(cpu_env);
1592 break;
1593 case 3:
1594 gen_helper_fcmpeq_fcc3(cpu_env);
1595 break;
1599 #else
1601 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1603 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1606 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1608 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1611 static inline void gen_op_fcmpq(int fccno)
1613 gen_helper_fcmpq(cpu_env);
1616 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1618 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1621 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1623 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1626 static inline void gen_op_fcmpeq(int fccno)
1628 gen_helper_fcmpeq(cpu_env);
1630 #endif
1632 static inline void gen_op_fpexception_im(int fsr_flags)
1634 TCGv_i32 r_const;
1636 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1637 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1638 r_const = tcg_const_i32(TT_FP_EXCP);
1639 gen_helper_raise_exception(cpu_env, r_const);
1640 tcg_temp_free_i32(r_const);
1643 static int gen_trap_ifnofpu(DisasContext *dc)
1645 #if !defined(CONFIG_USER_ONLY)
1646 if (!dc->fpu_enabled) {
1647 TCGv_i32 r_const;
1649 save_state(dc);
1650 r_const = tcg_const_i32(TT_NFPU_INSN);
1651 gen_helper_raise_exception(cpu_env, r_const);
1652 tcg_temp_free_i32(r_const);
1653 dc->is_br = 1;
1654 return 1;
1656 #endif
1657 return 0;
1660 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1662 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1665 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1666 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1668 TCGv_i32 dst, src;
1670 src = gen_load_fpr_F(dc, rs);
1671 dst = gen_dest_fpr_F(dc);
1673 gen(dst, cpu_env, src);
1675 gen_store_fpr_F(dc, rd, dst);
1678 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1679 void (*gen)(TCGv_i32, TCGv_i32))
1681 TCGv_i32 dst, src;
1683 src = gen_load_fpr_F(dc, rs);
1684 dst = gen_dest_fpr_F(dc);
1686 gen(dst, src);
1688 gen_store_fpr_F(dc, rd, dst);
1691 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1692 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1694 TCGv_i32 dst, src1, src2;
1696 src1 = gen_load_fpr_F(dc, rs1);
1697 src2 = gen_load_fpr_F(dc, rs2);
1698 dst = gen_dest_fpr_F(dc);
1700 gen(dst, cpu_env, src1, src2);
1702 gen_store_fpr_F(dc, rd, dst);
1705 #ifdef TARGET_SPARC64
1706 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1707 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1709 TCGv_i32 dst, src1, src2;
1711 src1 = gen_load_fpr_F(dc, rs1);
1712 src2 = gen_load_fpr_F(dc, rs2);
1713 dst = gen_dest_fpr_F(dc);
1715 gen(dst, src1, src2);
1717 gen_store_fpr_F(dc, rd, dst);
1719 #endif
1721 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1722 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1724 TCGv_i64 dst, src;
1726 src = gen_load_fpr_D(dc, rs);
1727 dst = gen_dest_fpr_D(dc, rd);
1729 gen(dst, cpu_env, src);
1731 gen_store_fpr_D(dc, rd, dst);
1734 #ifdef TARGET_SPARC64
1735 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1736 void (*gen)(TCGv_i64, TCGv_i64))
1738 TCGv_i64 dst, src;
1740 src = gen_load_fpr_D(dc, rs);
1741 dst = gen_dest_fpr_D(dc, rd);
1743 gen(dst, src);
1745 gen_store_fpr_D(dc, rd, dst);
1747 #endif
1749 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1750 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1752 TCGv_i64 dst, src1, src2;
1754 src1 = gen_load_fpr_D(dc, rs1);
1755 src2 = gen_load_fpr_D(dc, rs2);
1756 dst = gen_dest_fpr_D(dc, rd);
1758 gen(dst, cpu_env, src1, src2);
1760 gen_store_fpr_D(dc, rd, dst);
1763 #ifdef TARGET_SPARC64
1764 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1765 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1767 TCGv_i64 dst, src1, src2;
1769 src1 = gen_load_fpr_D(dc, rs1);
1770 src2 = gen_load_fpr_D(dc, rs2);
1771 dst = gen_dest_fpr_D(dc, rd);
1773 gen(dst, src1, src2);
1775 gen_store_fpr_D(dc, rd, dst);
1778 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1779 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1781 TCGv_i64 dst, src1, src2;
1783 src1 = gen_load_fpr_D(dc, rs1);
1784 src2 = gen_load_fpr_D(dc, rs2);
1785 dst = gen_dest_fpr_D(dc, rd);
1787 gen(dst, cpu_gsr, src1, src2);
1789 gen_store_fpr_D(dc, rd, dst);
1792 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1793 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1795 TCGv_i64 dst, src0, src1, src2;
1797 src1 = gen_load_fpr_D(dc, rs1);
1798 src2 = gen_load_fpr_D(dc, rs2);
1799 src0 = gen_load_fpr_D(dc, rd);
1800 dst = gen_dest_fpr_D(dc, rd);
1802 gen(dst, src0, src1, src2);
1804 gen_store_fpr_D(dc, rd, dst);
1806 #endif
1808 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1809 void (*gen)(TCGv_ptr))
1811 gen_op_load_fpr_QT1(QFPREG(rs));
1813 gen(cpu_env);
1815 gen_op_store_QT0_fpr(QFPREG(rd));
1816 gen_update_fprs_dirty(QFPREG(rd));
1819 #ifdef TARGET_SPARC64
1820 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1821 void (*gen)(TCGv_ptr))
1823 gen_op_load_fpr_QT1(QFPREG(rs));
1825 gen(cpu_env);
1827 gen_op_store_QT0_fpr(QFPREG(rd));
1828 gen_update_fprs_dirty(QFPREG(rd));
1830 #endif
1832 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1833 void (*gen)(TCGv_ptr))
1835 gen_op_load_fpr_QT0(QFPREG(rs1));
1836 gen_op_load_fpr_QT1(QFPREG(rs2));
1838 gen(cpu_env);
1840 gen_op_store_QT0_fpr(QFPREG(rd));
1841 gen_update_fprs_dirty(QFPREG(rd));
1844 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1845 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1847 TCGv_i64 dst;
1848 TCGv_i32 src1, src2;
1850 src1 = gen_load_fpr_F(dc, rs1);
1851 src2 = gen_load_fpr_F(dc, rs2);
1852 dst = gen_dest_fpr_D(dc, rd);
1854 gen(dst, cpu_env, src1, src2);
1856 gen_store_fpr_D(dc, rd, dst);
1859 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1860 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1862 TCGv_i64 src1, src2;
1864 src1 = gen_load_fpr_D(dc, rs1);
1865 src2 = gen_load_fpr_D(dc, rs2);
1867 gen(cpu_env, src1, src2);
1869 gen_op_store_QT0_fpr(QFPREG(rd));
1870 gen_update_fprs_dirty(QFPREG(rd));
1873 #ifdef TARGET_SPARC64
1874 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1875 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1877 TCGv_i64 dst;
1878 TCGv_i32 src;
1880 src = gen_load_fpr_F(dc, rs);
1881 dst = gen_dest_fpr_D(dc, rd);
1883 gen(dst, cpu_env, src);
1885 gen_store_fpr_D(dc, rd, dst);
1887 #endif
1889 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1890 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1892 TCGv_i64 dst;
1893 TCGv_i32 src;
1895 src = gen_load_fpr_F(dc, rs);
1896 dst = gen_dest_fpr_D(dc, rd);
1898 gen(dst, cpu_env, src);
1900 gen_store_fpr_D(dc, rd, dst);
1903 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1904 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1906 TCGv_i32 dst;
1907 TCGv_i64 src;
1909 src = gen_load_fpr_D(dc, rs);
1910 dst = gen_dest_fpr_F(dc);
1912 gen(dst, cpu_env, src);
1914 gen_store_fpr_F(dc, rd, dst);
1917 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1918 void (*gen)(TCGv_i32, TCGv_ptr))
1920 TCGv_i32 dst;
1922 gen_op_load_fpr_QT1(QFPREG(rs));
1923 dst = gen_dest_fpr_F(dc);
1925 gen(dst, cpu_env);
1927 gen_store_fpr_F(dc, rd, dst);
1930 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1931 void (*gen)(TCGv_i64, TCGv_ptr))
1933 TCGv_i64 dst;
1935 gen_op_load_fpr_QT1(QFPREG(rs));
1936 dst = gen_dest_fpr_D(dc, rd);
1938 gen(dst, cpu_env);
1940 gen_store_fpr_D(dc, rd, dst);
1943 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1944 void (*gen)(TCGv_ptr, TCGv_i32))
1946 TCGv_i32 src;
1948 src = gen_load_fpr_F(dc, rs);
1950 gen(cpu_env, src);
1952 gen_op_store_QT0_fpr(QFPREG(rd));
1953 gen_update_fprs_dirty(QFPREG(rd));
1956 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1957 void (*gen)(TCGv_ptr, TCGv_i64))
1959 TCGv_i64 src;
1961 src = gen_load_fpr_D(dc, rs);
1963 gen(cpu_env, src);
1965 gen_op_store_QT0_fpr(QFPREG(rd));
1966 gen_update_fprs_dirty(QFPREG(rd));
1969 /* asi moves */
1970 #ifdef TARGET_SPARC64
1971 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1973 int asi;
1974 TCGv_i32 r_asi;
1976 if (IS_IMM) {
1977 r_asi = tcg_temp_new_i32();
1978 tcg_gen_mov_i32(r_asi, cpu_asi);
1979 } else {
1980 asi = GET_FIELD(insn, 19, 26);
1981 r_asi = tcg_const_i32(asi);
1983 return r_asi;
1986 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1987 int sign)
1989 TCGv_i32 r_asi, r_size, r_sign;
1991 r_asi = gen_get_asi(insn, addr);
1992 r_size = tcg_const_i32(size);
1993 r_sign = tcg_const_i32(sign);
1994 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1995 tcg_temp_free_i32(r_sign);
1996 tcg_temp_free_i32(r_size);
1997 tcg_temp_free_i32(r_asi);
2000 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2002 TCGv_i32 r_asi, r_size;
2004 r_asi = gen_get_asi(insn, addr);
2005 r_size = tcg_const_i32(size);
2006 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2007 tcg_temp_free_i32(r_size);
2008 tcg_temp_free_i32(r_asi);
2011 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2013 TCGv_i32 r_asi, r_size, r_rd;
2015 r_asi = gen_get_asi(insn, addr);
2016 r_size = tcg_const_i32(size);
2017 r_rd = tcg_const_i32(rd);
2018 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2019 tcg_temp_free_i32(r_rd);
2020 tcg_temp_free_i32(r_size);
2021 tcg_temp_free_i32(r_asi);
2024 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2026 TCGv_i32 r_asi, r_size, r_rd;
2028 r_asi = gen_get_asi(insn, addr);
2029 r_size = tcg_const_i32(size);
2030 r_rd = tcg_const_i32(rd);
2031 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2032 tcg_temp_free_i32(r_rd);
2033 tcg_temp_free_i32(r_size);
2034 tcg_temp_free_i32(r_asi);
2037 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2039 TCGv_i32 r_asi, r_size, r_sign;
2040 TCGv_i64 t64 = tcg_temp_new_i64();
2042 r_asi = gen_get_asi(insn, addr);
2043 r_size = tcg_const_i32(4);
2044 r_sign = tcg_const_i32(0);
2045 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2046 tcg_temp_free_i32(r_sign);
2047 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2048 tcg_temp_free_i32(r_size);
2049 tcg_temp_free_i32(r_asi);
2050 tcg_gen_trunc_i64_tl(dst, t64);
2051 tcg_temp_free_i64(t64);
2054 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2055 int insn, int rd)
2057 TCGv_i32 r_asi, r_rd;
2059 r_asi = gen_get_asi(insn, addr);
2060 r_rd = tcg_const_i32(rd);
2061 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2062 tcg_temp_free_i32(r_rd);
2063 tcg_temp_free_i32(r_asi);
2066 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2067 int insn, int rd)
2069 TCGv_i32 r_asi, r_size;
2070 TCGv lo = gen_load_gpr(dc, rd + 1);
2071 TCGv_i64 t64 = tcg_temp_new_i64();
2073 tcg_gen_concat_tl_i64(t64, lo, hi);
2074 r_asi = gen_get_asi(insn, addr);
2075 r_size = tcg_const_i32(8);
2076 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2077 tcg_temp_free_i32(r_size);
2078 tcg_temp_free_i32(r_asi);
2079 tcg_temp_free_i64(t64);
2082 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2083 TCGv val2, int insn, int rd)
2085 TCGv val1 = gen_load_gpr(dc, rd);
2086 TCGv dst = gen_dest_gpr(dc, rd);
2087 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2089 gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2090 tcg_temp_free_i32(r_asi);
2091 gen_store_gpr(dc, rd, dst);
2094 #elif !defined(CONFIG_USER_ONLY)
2096 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2097 int sign)
2099 TCGv_i32 r_asi, r_size, r_sign;
2100 TCGv_i64 t64 = tcg_temp_new_i64();
2102 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2103 r_size = tcg_const_i32(size);
2104 r_sign = tcg_const_i32(sign);
2105 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2106 tcg_temp_free_i32(r_sign);
2107 tcg_temp_free_i32(r_size);
2108 tcg_temp_free_i32(r_asi);
2109 tcg_gen_trunc_i64_tl(dst, t64);
2110 tcg_temp_free_i64(t64);
2113 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2115 TCGv_i32 r_asi, r_size;
2116 TCGv_i64 t64 = tcg_temp_new_i64();
2118 tcg_gen_extu_tl_i64(t64, src);
2119 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2120 r_size = tcg_const_i32(size);
2121 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2122 tcg_temp_free_i32(r_size);
2123 tcg_temp_free_i32(r_asi);
2124 tcg_temp_free_i64(t64);
2127 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2129 TCGv_i32 r_asi, r_size, r_sign;
2130 TCGv_i64 r_val, t64;
2132 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2133 r_size = tcg_const_i32(4);
2134 r_sign = tcg_const_i32(0);
2135 t64 = tcg_temp_new_i64();
2136 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2137 tcg_temp_free(r_sign);
2138 r_val = tcg_temp_new_i64();
2139 tcg_gen_extu_tl_i64(r_val, src);
2140 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2141 tcg_temp_free_i64(r_val);
2142 tcg_temp_free_i32(r_size);
2143 tcg_temp_free_i32(r_asi);
2144 tcg_gen_trunc_i64_tl(dst, t64);
2145 tcg_temp_free_i64(t64);
2148 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2149 int insn, int rd)
2151 TCGv_i32 r_asi, r_size, r_sign;
2152 TCGv t;
2153 TCGv_i64 t64;
2155 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2156 r_size = tcg_const_i32(8);
2157 r_sign = tcg_const_i32(0);
2158 t64 = tcg_temp_new_i64();
2159 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2160 tcg_temp_free_i32(r_sign);
2161 tcg_temp_free_i32(r_size);
2162 tcg_temp_free_i32(r_asi);
2164 t = gen_dest_gpr(dc, rd + 1);
2165 tcg_gen_trunc_i64_tl(t, t64);
2166 gen_store_gpr(dc, rd + 1, t);
2168 tcg_gen_shri_i64(t64, t64, 32);
2169 tcg_gen_trunc_i64_tl(hi, t64);
2170 tcg_temp_free_i64(t64);
2171 gen_store_gpr(dc, rd, hi);
2174 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2175 int insn, int rd)
2177 TCGv_i32 r_asi, r_size;
2178 TCGv lo = gen_load_gpr(dc, rd + 1);
2179 TCGv_i64 t64 = tcg_temp_new_i64();
2181 tcg_gen_concat_tl_i64(t64, lo, hi);
2182 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2183 r_size = tcg_const_i32(8);
2184 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2185 tcg_temp_free_i32(r_size);
2186 tcg_temp_free_i32(r_asi);
2187 tcg_temp_free_i64(t64);
2189 #endif
2191 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2192 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2193 TCGv val2, int insn, int rd)
2195 TCGv val1 = gen_load_gpr(dc, rd);
2196 TCGv dst = gen_dest_gpr(dc, rd);
2197 #ifdef TARGET_SPARC64
2198 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2199 #else
2200 TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2201 #endif
2203 gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2204 tcg_temp_free_i32(r_asi);
2205 gen_store_gpr(dc, rd, dst);
2208 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2210 TCGv_i64 r_val;
2211 TCGv_i32 r_asi, r_size;
2213 gen_ld_asi(dst, addr, insn, 1, 0);
2215 r_val = tcg_const_i64(0xffULL);
2216 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2217 r_size = tcg_const_i32(1);
2218 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2219 tcg_temp_free_i32(r_size);
2220 tcg_temp_free_i32(r_asi);
2221 tcg_temp_free_i64(r_val);
2223 #endif
2225 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2227 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2228 return gen_load_gpr(dc, rs1);
2231 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2233 if (IS_IMM) { /* immediate */
2234 target_long simm = GET_FIELDs(insn, 19, 31);
2235 TCGv t = get_temp_tl(dc);
2236 tcg_gen_movi_tl(t, simm);
2237 return t;
2238 } else { /* register */
2239 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2240 return gen_load_gpr(dc, rs2);
2244 #ifdef TARGET_SPARC64
2245 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2247 TCGv_i32 c32, zero, dst, s1, s2;
2249 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2250 or fold the comparison down to 32 bits and use movcond_i32. Choose
2251 the later. */
2252 c32 = tcg_temp_new_i32();
2253 if (cmp->is_bool) {
2254 tcg_gen_extrl_i64_i32(c32, cmp->c1);
2255 } else {
2256 TCGv_i64 c64 = tcg_temp_new_i64();
2257 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2258 tcg_gen_extrl_i64_i32(c32, c64);
2259 tcg_temp_free_i64(c64);
2262 s1 = gen_load_fpr_F(dc, rs);
2263 s2 = gen_load_fpr_F(dc, rd);
2264 dst = gen_dest_fpr_F(dc);
2265 zero = tcg_const_i32(0);
2267 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2269 tcg_temp_free_i32(c32);
2270 tcg_temp_free_i32(zero);
2271 gen_store_fpr_F(dc, rd, dst);
2274 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2276 TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2277 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2278 gen_load_fpr_D(dc, rs),
2279 gen_load_fpr_D(dc, rd));
2280 gen_store_fpr_D(dc, rd, dst);
2283 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2285 int qd = QFPREG(rd);
2286 int qs = QFPREG(rs);
2288 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2289 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2290 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2291 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2293 gen_update_fprs_dirty(qd);
2296 #ifndef CONFIG_USER_ONLY
2297 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2299 TCGv_i32 r_tl = tcg_temp_new_i32();
2301 /* load env->tl into r_tl */
2302 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2304 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2305 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2307 /* calculate offset to current trap state from env->ts, reuse r_tl */
2308 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2309 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2311 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2313 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2314 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2315 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2316 tcg_temp_free_ptr(r_tl_tmp);
2319 tcg_temp_free_i32(r_tl);
2321 #endif
2323 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2324 int width, bool cc, bool left)
2326 TCGv lo1, lo2, t1, t2;
2327 uint64_t amask, tabl, tabr;
2328 int shift, imask, omask;
2330 if (cc) {
2331 tcg_gen_mov_tl(cpu_cc_src, s1);
2332 tcg_gen_mov_tl(cpu_cc_src2, s2);
2333 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2334 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2335 dc->cc_op = CC_OP_SUB;
2338 /* Theory of operation: there are two tables, left and right (not to
2339 be confused with the left and right versions of the opcode). These
2340 are indexed by the low 3 bits of the inputs. To make things "easy",
2341 these tables are loaded into two constants, TABL and TABR below.
2342 The operation index = (input & imask) << shift calculates the index
2343 into the constant, while val = (table >> index) & omask calculates
2344 the value we're looking for. */
2345 switch (width) {
2346 case 8:
2347 imask = 0x7;
2348 shift = 3;
2349 omask = 0xff;
2350 if (left) {
2351 tabl = 0x80c0e0f0f8fcfeffULL;
2352 tabr = 0xff7f3f1f0f070301ULL;
2353 } else {
2354 tabl = 0x0103070f1f3f7fffULL;
2355 tabr = 0xfffefcf8f0e0c080ULL;
2357 break;
2358 case 16:
2359 imask = 0x6;
2360 shift = 1;
2361 omask = 0xf;
2362 if (left) {
2363 tabl = 0x8cef;
2364 tabr = 0xf731;
2365 } else {
2366 tabl = 0x137f;
2367 tabr = 0xfec8;
2369 break;
2370 case 32:
2371 imask = 0x4;
2372 shift = 0;
2373 omask = 0x3;
2374 if (left) {
2375 tabl = (2 << 2) | 3;
2376 tabr = (3 << 2) | 1;
2377 } else {
2378 tabl = (1 << 2) | 3;
2379 tabr = (3 << 2) | 2;
2381 break;
2382 default:
2383 abort();
2386 lo1 = tcg_temp_new();
2387 lo2 = tcg_temp_new();
2388 tcg_gen_andi_tl(lo1, s1, imask);
2389 tcg_gen_andi_tl(lo2, s2, imask);
2390 tcg_gen_shli_tl(lo1, lo1, shift);
2391 tcg_gen_shli_tl(lo2, lo2, shift);
2393 t1 = tcg_const_tl(tabl);
2394 t2 = tcg_const_tl(tabr);
2395 tcg_gen_shr_tl(lo1, t1, lo1);
2396 tcg_gen_shr_tl(lo2, t2, lo2);
2397 tcg_gen_andi_tl(dst, lo1, omask);
2398 tcg_gen_andi_tl(lo2, lo2, omask);
2400 amask = -8;
2401 if (AM_CHECK(dc)) {
2402 amask &= 0xffffffffULL;
2404 tcg_gen_andi_tl(s1, s1, amask);
2405 tcg_gen_andi_tl(s2, s2, amask);
2407 /* We want to compute
2408 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2409 We've already done dst = lo1, so this reduces to
2410 dst &= (s1 == s2 ? -1 : lo2)
2411 Which we perform by
2412 lo2 |= -(s1 == s2)
2413 dst &= lo2
2415 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2416 tcg_gen_neg_tl(t1, t1);
2417 tcg_gen_or_tl(lo2, lo2, t1);
2418 tcg_gen_and_tl(dst, dst, lo2);
2420 tcg_temp_free(lo1);
2421 tcg_temp_free(lo2);
2422 tcg_temp_free(t1);
2423 tcg_temp_free(t2);
2426 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2428 TCGv tmp = tcg_temp_new();
2430 tcg_gen_add_tl(tmp, s1, s2);
2431 tcg_gen_andi_tl(dst, tmp, -8);
2432 if (left) {
2433 tcg_gen_neg_tl(tmp, tmp);
2435 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2437 tcg_temp_free(tmp);
2440 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2442 TCGv t1, t2, shift;
2444 t1 = tcg_temp_new();
2445 t2 = tcg_temp_new();
2446 shift = tcg_temp_new();
2448 tcg_gen_andi_tl(shift, gsr, 7);
2449 tcg_gen_shli_tl(shift, shift, 3);
2450 tcg_gen_shl_tl(t1, s1, shift);
2452 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2453 shift of (up to 63) followed by a constant shift of 1. */
2454 tcg_gen_xori_tl(shift, shift, 63);
2455 tcg_gen_shr_tl(t2, s2, shift);
2456 tcg_gen_shri_tl(t2, t2, 1);
2458 tcg_gen_or_tl(dst, t1, t2);
2460 tcg_temp_free(t1);
2461 tcg_temp_free(t2);
2462 tcg_temp_free(shift);
2464 #endif
2466 #define CHECK_IU_FEATURE(dc, FEATURE) \
2467 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2468 goto illegal_insn;
2469 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2470 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2471 goto nfpu_insn;
2473 /* before an instruction, dc->pc must be static */
2474 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2476 unsigned int opc, rs1, rs2, rd;
2477 TCGv cpu_src1, cpu_src2;
2478 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2479 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2480 target_long simm;
2482 opc = GET_FIELD(insn, 0, 1);
2483 rd = GET_FIELD(insn, 2, 6);
2485 switch (opc) {
2486 case 0: /* branches/sethi */
2488 unsigned int xop = GET_FIELD(insn, 7, 9);
2489 int32_t target;
2490 switch (xop) {
2491 #ifdef TARGET_SPARC64
2492 case 0x1: /* V9 BPcc */
2494 int cc;
2496 target = GET_FIELD_SP(insn, 0, 18);
2497 target = sign_extend(target, 19);
2498 target <<= 2;
2499 cc = GET_FIELD_SP(insn, 20, 21);
2500 if (cc == 0)
2501 do_branch(dc, target, insn, 0);
2502 else if (cc == 2)
2503 do_branch(dc, target, insn, 1);
2504 else
2505 goto illegal_insn;
2506 goto jmp_insn;
2508 case 0x3: /* V9 BPr */
2510 target = GET_FIELD_SP(insn, 0, 13) |
2511 (GET_FIELD_SP(insn, 20, 21) << 14);
2512 target = sign_extend(target, 16);
2513 target <<= 2;
2514 cpu_src1 = get_src1(dc, insn);
2515 do_branch_reg(dc, target, insn, cpu_src1);
2516 goto jmp_insn;
2518 case 0x5: /* V9 FBPcc */
2520 int cc = GET_FIELD_SP(insn, 20, 21);
2521 if (gen_trap_ifnofpu(dc)) {
2522 goto jmp_insn;
2524 target = GET_FIELD_SP(insn, 0, 18);
2525 target = sign_extend(target, 19);
2526 target <<= 2;
2527 do_fbranch(dc, target, insn, cc);
2528 goto jmp_insn;
2530 #else
2531 case 0x7: /* CBN+x */
2533 goto ncp_insn;
2535 #endif
2536 case 0x2: /* BN+x */
2538 target = GET_FIELD(insn, 10, 31);
2539 target = sign_extend(target, 22);
2540 target <<= 2;
2541 do_branch(dc, target, insn, 0);
2542 goto jmp_insn;
2544 case 0x6: /* FBN+x */
2546 if (gen_trap_ifnofpu(dc)) {
2547 goto jmp_insn;
2549 target = GET_FIELD(insn, 10, 31);
2550 target = sign_extend(target, 22);
2551 target <<= 2;
2552 do_fbranch(dc, target, insn, 0);
2553 goto jmp_insn;
2555 case 0x4: /* SETHI */
2556 /* Special-case %g0 because that's the canonical nop. */
2557 if (rd) {
2558 uint32_t value = GET_FIELD(insn, 10, 31);
2559 TCGv t = gen_dest_gpr(dc, rd);
2560 tcg_gen_movi_tl(t, value << 10);
2561 gen_store_gpr(dc, rd, t);
2563 break;
2564 case 0x0: /* UNIMPL */
2565 default:
2566 goto illegal_insn;
2568 break;
2570 break;
2571 case 1: /*CALL*/
2573 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2574 TCGv o7 = gen_dest_gpr(dc, 15);
2576 tcg_gen_movi_tl(o7, dc->pc);
2577 gen_store_gpr(dc, 15, o7);
2578 target += dc->pc;
2579 gen_mov_pc_npc(dc);
2580 #ifdef TARGET_SPARC64
2581 if (unlikely(AM_CHECK(dc))) {
2582 target &= 0xffffffffULL;
2584 #endif
2585 dc->npc = target;
2587 goto jmp_insn;
2588 case 2: /* FPU & Logical Operations */
2590 unsigned int xop = GET_FIELD(insn, 7, 12);
2591 TCGv cpu_dst = get_temp_tl(dc);
2592 TCGv cpu_tmp0;
2594 if (xop == 0x3a) { /* generate trap */
2595 int cond = GET_FIELD(insn, 3, 6);
2596 TCGv_i32 trap;
2597 TCGLabel *l1 = NULL;
2598 int mask;
2600 if (cond == 0) {
2601 /* Trap never. */
2602 break;
2605 save_state(dc);
2607 if (cond != 8) {
2608 /* Conditional trap. */
2609 DisasCompare cmp;
2610 #ifdef TARGET_SPARC64
2611 /* V9 icc/xcc */
2612 int cc = GET_FIELD_SP(insn, 11, 12);
2613 if (cc == 0) {
2614 gen_compare(&cmp, 0, cond, dc);
2615 } else if (cc == 2) {
2616 gen_compare(&cmp, 1, cond, dc);
2617 } else {
2618 goto illegal_insn;
2620 #else
2621 gen_compare(&cmp, 0, cond, dc);
2622 #endif
2623 l1 = gen_new_label();
2624 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2625 cmp.c1, cmp.c2, l1);
2626 free_compare(&cmp);
2629 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2630 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2632 /* Don't use the normal temporaries, as they may well have
2633 gone out of scope with the branch above. While we're
2634 doing that we might as well pre-truncate to 32-bit. */
2635 trap = tcg_temp_new_i32();
2637 rs1 = GET_FIELD_SP(insn, 14, 18);
2638 if (IS_IMM) {
2639 rs2 = GET_FIELD_SP(insn, 0, 6);
2640 if (rs1 == 0) {
2641 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2642 /* Signal that the trap value is fully constant. */
2643 mask = 0;
2644 } else {
2645 TCGv t1 = gen_load_gpr(dc, rs1);
2646 tcg_gen_trunc_tl_i32(trap, t1);
2647 tcg_gen_addi_i32(trap, trap, rs2);
2649 } else {
2650 TCGv t1, t2;
2651 rs2 = GET_FIELD_SP(insn, 0, 4);
2652 t1 = gen_load_gpr(dc, rs1);
2653 t2 = gen_load_gpr(dc, rs2);
2654 tcg_gen_add_tl(t1, t1, t2);
2655 tcg_gen_trunc_tl_i32(trap, t1);
2657 if (mask != 0) {
2658 tcg_gen_andi_i32(trap, trap, mask);
2659 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2662 gen_helper_raise_exception(cpu_env, trap);
2663 tcg_temp_free_i32(trap);
2665 if (cond == 8) {
2666 /* An unconditional trap ends the TB. */
2667 dc->is_br = 1;
2668 goto jmp_insn;
2669 } else {
2670 /* A conditional trap falls through to the next insn. */
2671 gen_set_label(l1);
2672 break;
2674 } else if (xop == 0x28) {
2675 rs1 = GET_FIELD(insn, 13, 17);
2676 switch(rs1) {
2677 case 0: /* rdy */
2678 #ifndef TARGET_SPARC64
2679 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2680 manual, rdy on the microSPARC
2681 II */
2682 case 0x0f: /* stbar in the SPARCv8 manual,
2683 rdy on the microSPARC II */
2684 case 0x10 ... 0x1f: /* implementation-dependent in the
2685 SPARCv8 manual, rdy on the
2686 microSPARC II */
2687 /* Read Asr17 */
2688 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2689 TCGv t = gen_dest_gpr(dc, rd);
2690 /* Read Asr17 for a Leon3 monoprocessor */
2691 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2692 gen_store_gpr(dc, rd, t);
2693 break;
2695 #endif
2696 gen_store_gpr(dc, rd, cpu_y);
2697 break;
2698 #ifdef TARGET_SPARC64
2699 case 0x2: /* V9 rdccr */
2700 update_psr(dc);
2701 gen_helper_rdccr(cpu_dst, cpu_env);
2702 gen_store_gpr(dc, rd, cpu_dst);
2703 break;
2704 case 0x3: /* V9 rdasi */
2705 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2706 gen_store_gpr(dc, rd, cpu_dst);
2707 break;
2708 case 0x4: /* V9 rdtick */
2710 TCGv_ptr r_tickptr;
2712 r_tickptr = tcg_temp_new_ptr();
2713 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2714 offsetof(CPUSPARCState, tick));
2715 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2716 tcg_temp_free_ptr(r_tickptr);
2717 gen_store_gpr(dc, rd, cpu_dst);
2719 break;
2720 case 0x5: /* V9 rdpc */
2722 TCGv t = gen_dest_gpr(dc, rd);
2723 if (unlikely(AM_CHECK(dc))) {
2724 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2725 } else {
2726 tcg_gen_movi_tl(t, dc->pc);
2728 gen_store_gpr(dc, rd, t);
2730 break;
2731 case 0x6: /* V9 rdfprs */
2732 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2733 gen_store_gpr(dc, rd, cpu_dst);
2734 break;
2735 case 0xf: /* V9 membar */
2736 break; /* no effect */
2737 case 0x13: /* Graphics Status */
2738 if (gen_trap_ifnofpu(dc)) {
2739 goto jmp_insn;
2741 gen_store_gpr(dc, rd, cpu_gsr);
2742 break;
2743 case 0x16: /* Softint */
2744 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2745 gen_store_gpr(dc, rd, cpu_dst);
2746 break;
2747 case 0x17: /* Tick compare */
2748 gen_store_gpr(dc, rd, cpu_tick_cmpr);
2749 break;
2750 case 0x18: /* System tick */
2752 TCGv_ptr r_tickptr;
2754 r_tickptr = tcg_temp_new_ptr();
2755 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2756 offsetof(CPUSPARCState, stick));
2757 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2758 tcg_temp_free_ptr(r_tickptr);
2759 gen_store_gpr(dc, rd, cpu_dst);
2761 break;
2762 case 0x19: /* System tick compare */
2763 gen_store_gpr(dc, rd, cpu_stick_cmpr);
2764 break;
2765 case 0x10: /* Performance Control */
2766 case 0x11: /* Performance Instrumentation Counter */
2767 case 0x12: /* Dispatch Control */
2768 case 0x14: /* Softint set, WO */
2769 case 0x15: /* Softint clear, WO */
2770 #endif
2771 default:
2772 goto illegal_insn;
2774 #if !defined(CONFIG_USER_ONLY)
2775 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2776 #ifndef TARGET_SPARC64
2777 if (!supervisor(dc)) {
2778 goto priv_insn;
2780 update_psr(dc);
2781 gen_helper_rdpsr(cpu_dst, cpu_env);
2782 #else
2783 CHECK_IU_FEATURE(dc, HYPV);
2784 if (!hypervisor(dc))
2785 goto priv_insn;
2786 rs1 = GET_FIELD(insn, 13, 17);
2787 switch (rs1) {
2788 case 0: // hpstate
2789 // gen_op_rdhpstate();
2790 break;
2791 case 1: // htstate
2792 // gen_op_rdhtstate();
2793 break;
2794 case 3: // hintp
2795 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2796 break;
2797 case 5: // htba
2798 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2799 break;
2800 case 6: // hver
2801 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2802 break;
2803 case 31: // hstick_cmpr
2804 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2805 break;
2806 default:
2807 goto illegal_insn;
2809 #endif
2810 gen_store_gpr(dc, rd, cpu_dst);
2811 break;
2812 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2813 if (!supervisor(dc)) {
2814 goto priv_insn;
2816 cpu_tmp0 = get_temp_tl(dc);
2817 #ifdef TARGET_SPARC64
2818 rs1 = GET_FIELD(insn, 13, 17);
2819 switch (rs1) {
2820 case 0: // tpc
2822 TCGv_ptr r_tsptr;
2824 r_tsptr = tcg_temp_new_ptr();
2825 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2826 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2827 offsetof(trap_state, tpc));
2828 tcg_temp_free_ptr(r_tsptr);
2830 break;
2831 case 1: // tnpc
2833 TCGv_ptr r_tsptr;
2835 r_tsptr = tcg_temp_new_ptr();
2836 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2837 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2838 offsetof(trap_state, tnpc));
2839 tcg_temp_free_ptr(r_tsptr);
2841 break;
2842 case 2: // tstate
2844 TCGv_ptr r_tsptr;
2846 r_tsptr = tcg_temp_new_ptr();
2847 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2848 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2849 offsetof(trap_state, tstate));
2850 tcg_temp_free_ptr(r_tsptr);
2852 break;
2853 case 3: // tt
2855 TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2857 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2858 tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2859 offsetof(trap_state, tt));
2860 tcg_temp_free_ptr(r_tsptr);
2862 break;
2863 case 4: // tick
2865 TCGv_ptr r_tickptr;
2867 r_tickptr = tcg_temp_new_ptr();
2868 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2869 offsetof(CPUSPARCState, tick));
2870 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2871 tcg_temp_free_ptr(r_tickptr);
2873 break;
2874 case 5: // tba
2875 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2876 break;
2877 case 6: // pstate
2878 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2879 offsetof(CPUSPARCState, pstate));
2880 break;
2881 case 7: // tl
2882 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2883 offsetof(CPUSPARCState, tl));
2884 break;
2885 case 8: // pil
2886 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2887 offsetof(CPUSPARCState, psrpil));
2888 break;
2889 case 9: // cwp
2890 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2891 break;
2892 case 10: // cansave
2893 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2894 offsetof(CPUSPARCState, cansave));
2895 break;
2896 case 11: // canrestore
2897 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2898 offsetof(CPUSPARCState, canrestore));
2899 break;
2900 case 12: // cleanwin
2901 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2902 offsetof(CPUSPARCState, cleanwin));
2903 break;
2904 case 13: // otherwin
2905 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2906 offsetof(CPUSPARCState, otherwin));
2907 break;
2908 case 14: // wstate
2909 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2910 offsetof(CPUSPARCState, wstate));
2911 break;
2912 case 16: // UA2005 gl
2913 CHECK_IU_FEATURE(dc, GL);
2914 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2915 offsetof(CPUSPARCState, gl));
2916 break;
2917 case 26: // UA2005 strand status
2918 CHECK_IU_FEATURE(dc, HYPV);
2919 if (!hypervisor(dc))
2920 goto priv_insn;
2921 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2922 break;
2923 case 31: // ver
2924 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2925 break;
2926 case 15: // fq
2927 default:
2928 goto illegal_insn;
2930 #else
2931 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2932 #endif
2933 gen_store_gpr(dc, rd, cpu_tmp0);
2934 break;
2935 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2936 #ifdef TARGET_SPARC64
2937 save_state(dc);
2938 gen_helper_flushw(cpu_env);
2939 #else
2940 if (!supervisor(dc))
2941 goto priv_insn;
2942 gen_store_gpr(dc, rd, cpu_tbr);
2943 #endif
2944 break;
2945 #endif
2946 } else if (xop == 0x34) { /* FPU Operations */
2947 if (gen_trap_ifnofpu(dc)) {
2948 goto jmp_insn;
2950 gen_op_clear_ieee_excp_and_FTT();
2951 rs1 = GET_FIELD(insn, 13, 17);
2952 rs2 = GET_FIELD(insn, 27, 31);
2953 xop = GET_FIELD(insn, 18, 26);
2954 save_state(dc);
2955 switch (xop) {
2956 case 0x1: /* fmovs */
2957 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2958 gen_store_fpr_F(dc, rd, cpu_src1_32);
2959 break;
2960 case 0x5: /* fnegs */
2961 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2962 break;
2963 case 0x9: /* fabss */
2964 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2965 break;
2966 case 0x29: /* fsqrts */
2967 CHECK_FPU_FEATURE(dc, FSQRT);
2968 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2969 break;
2970 case 0x2a: /* fsqrtd */
2971 CHECK_FPU_FEATURE(dc, FSQRT);
2972 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2973 break;
2974 case 0x2b: /* fsqrtq */
2975 CHECK_FPU_FEATURE(dc, FLOAT128);
2976 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2977 break;
2978 case 0x41: /* fadds */
2979 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2980 break;
2981 case 0x42: /* faddd */
2982 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2983 break;
2984 case 0x43: /* faddq */
2985 CHECK_FPU_FEATURE(dc, FLOAT128);
2986 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2987 break;
2988 case 0x45: /* fsubs */
2989 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2990 break;
2991 case 0x46: /* fsubd */
2992 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2993 break;
2994 case 0x47: /* fsubq */
2995 CHECK_FPU_FEATURE(dc, FLOAT128);
2996 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2997 break;
2998 case 0x49: /* fmuls */
2999 CHECK_FPU_FEATURE(dc, FMUL);
3000 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3001 break;
3002 case 0x4a: /* fmuld */
3003 CHECK_FPU_FEATURE(dc, FMUL);
3004 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3005 break;
3006 case 0x4b: /* fmulq */
3007 CHECK_FPU_FEATURE(dc, FLOAT128);
3008 CHECK_FPU_FEATURE(dc, FMUL);
3009 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3010 break;
3011 case 0x4d: /* fdivs */
3012 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3013 break;
3014 case 0x4e: /* fdivd */
3015 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3016 break;
3017 case 0x4f: /* fdivq */
3018 CHECK_FPU_FEATURE(dc, FLOAT128);
3019 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3020 break;
3021 case 0x69: /* fsmuld */
3022 CHECK_FPU_FEATURE(dc, FSMULD);
3023 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3024 break;
3025 case 0x6e: /* fdmulq */
3026 CHECK_FPU_FEATURE(dc, FLOAT128);
3027 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3028 break;
3029 case 0xc4: /* fitos */
3030 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3031 break;
3032 case 0xc6: /* fdtos */
3033 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3034 break;
3035 case 0xc7: /* fqtos */
3036 CHECK_FPU_FEATURE(dc, FLOAT128);
3037 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3038 break;
3039 case 0xc8: /* fitod */
3040 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3041 break;
3042 case 0xc9: /* fstod */
3043 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3044 break;
3045 case 0xcb: /* fqtod */
3046 CHECK_FPU_FEATURE(dc, FLOAT128);
3047 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3048 break;
3049 case 0xcc: /* fitoq */
3050 CHECK_FPU_FEATURE(dc, FLOAT128);
3051 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3052 break;
3053 case 0xcd: /* fstoq */
3054 CHECK_FPU_FEATURE(dc, FLOAT128);
3055 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3056 break;
3057 case 0xce: /* fdtoq */
3058 CHECK_FPU_FEATURE(dc, FLOAT128);
3059 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3060 break;
3061 case 0xd1: /* fstoi */
3062 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3063 break;
3064 case 0xd2: /* fdtoi */
3065 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3066 break;
3067 case 0xd3: /* fqtoi */
3068 CHECK_FPU_FEATURE(dc, FLOAT128);
3069 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3070 break;
3071 #ifdef TARGET_SPARC64
3072 case 0x2: /* V9 fmovd */
3073 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3074 gen_store_fpr_D(dc, rd, cpu_src1_64);
3075 break;
3076 case 0x3: /* V9 fmovq */
3077 CHECK_FPU_FEATURE(dc, FLOAT128);
3078 gen_move_Q(rd, rs2);
3079 break;
3080 case 0x6: /* V9 fnegd */
3081 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3082 break;
3083 case 0x7: /* V9 fnegq */
3084 CHECK_FPU_FEATURE(dc, FLOAT128);
3085 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3086 break;
3087 case 0xa: /* V9 fabsd */
3088 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3089 break;
3090 case 0xb: /* V9 fabsq */
3091 CHECK_FPU_FEATURE(dc, FLOAT128);
3092 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3093 break;
3094 case 0x81: /* V9 fstox */
3095 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3096 break;
3097 case 0x82: /* V9 fdtox */
3098 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3099 break;
3100 case 0x83: /* V9 fqtox */
3101 CHECK_FPU_FEATURE(dc, FLOAT128);
3102 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3103 break;
3104 case 0x84: /* V9 fxtos */
3105 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3106 break;
3107 case 0x88: /* V9 fxtod */
3108 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3109 break;
3110 case 0x8c: /* V9 fxtoq */
3111 CHECK_FPU_FEATURE(dc, FLOAT128);
3112 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3113 break;
3114 #endif
3115 default:
3116 goto illegal_insn;
3118 } else if (xop == 0x35) { /* FPU Operations */
3119 #ifdef TARGET_SPARC64
3120 int cond;
3121 #endif
3122 if (gen_trap_ifnofpu(dc)) {
3123 goto jmp_insn;
3125 gen_op_clear_ieee_excp_and_FTT();
3126 rs1 = GET_FIELD(insn, 13, 17);
3127 rs2 = GET_FIELD(insn, 27, 31);
3128 xop = GET_FIELD(insn, 18, 26);
3129 save_state(dc);
3131 #ifdef TARGET_SPARC64
3132 #define FMOVR(sz) \
3133 do { \
3134 DisasCompare cmp; \
3135 cond = GET_FIELD_SP(insn, 10, 12); \
3136 cpu_src1 = get_src1(dc, insn); \
3137 gen_compare_reg(&cmp, cond, cpu_src1); \
3138 gen_fmov##sz(dc, &cmp, rd, rs2); \
3139 free_compare(&cmp); \
3140 } while (0)
3142 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3143 FMOVR(s);
3144 break;
3145 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3146 FMOVR(d);
3147 break;
3148 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3149 CHECK_FPU_FEATURE(dc, FLOAT128);
3150 FMOVR(q);
3151 break;
3153 #undef FMOVR
3154 #endif
3155 switch (xop) {
3156 #ifdef TARGET_SPARC64
3157 #define FMOVCC(fcc, sz) \
3158 do { \
3159 DisasCompare cmp; \
3160 cond = GET_FIELD_SP(insn, 14, 17); \
3161 gen_fcompare(&cmp, fcc, cond); \
3162 gen_fmov##sz(dc, &cmp, rd, rs2); \
3163 free_compare(&cmp); \
3164 } while (0)
3166 case 0x001: /* V9 fmovscc %fcc0 */
3167 FMOVCC(0, s);
3168 break;
3169 case 0x002: /* V9 fmovdcc %fcc0 */
3170 FMOVCC(0, d);
3171 break;
3172 case 0x003: /* V9 fmovqcc %fcc0 */
3173 CHECK_FPU_FEATURE(dc, FLOAT128);
3174 FMOVCC(0, q);
3175 break;
3176 case 0x041: /* V9 fmovscc %fcc1 */
3177 FMOVCC(1, s);
3178 break;
3179 case 0x042: /* V9 fmovdcc %fcc1 */
3180 FMOVCC(1, d);
3181 break;
3182 case 0x043: /* V9 fmovqcc %fcc1 */
3183 CHECK_FPU_FEATURE(dc, FLOAT128);
3184 FMOVCC(1, q);
3185 break;
3186 case 0x081: /* V9 fmovscc %fcc2 */
3187 FMOVCC(2, s);
3188 break;
3189 case 0x082: /* V9 fmovdcc %fcc2 */
3190 FMOVCC(2, d);
3191 break;
3192 case 0x083: /* V9 fmovqcc %fcc2 */
3193 CHECK_FPU_FEATURE(dc, FLOAT128);
3194 FMOVCC(2, q);
3195 break;
3196 case 0x0c1: /* V9 fmovscc %fcc3 */
3197 FMOVCC(3, s);
3198 break;
3199 case 0x0c2: /* V9 fmovdcc %fcc3 */
3200 FMOVCC(3, d);
3201 break;
3202 case 0x0c3: /* V9 fmovqcc %fcc3 */
3203 CHECK_FPU_FEATURE(dc, FLOAT128);
3204 FMOVCC(3, q);
3205 break;
3206 #undef FMOVCC
3207 #define FMOVCC(xcc, sz) \
3208 do { \
3209 DisasCompare cmp; \
3210 cond = GET_FIELD_SP(insn, 14, 17); \
3211 gen_compare(&cmp, xcc, cond, dc); \
3212 gen_fmov##sz(dc, &cmp, rd, rs2); \
3213 free_compare(&cmp); \
3214 } while (0)
3216 case 0x101: /* V9 fmovscc %icc */
3217 FMOVCC(0, s);
3218 break;
3219 case 0x102: /* V9 fmovdcc %icc */
3220 FMOVCC(0, d);
3221 break;
3222 case 0x103: /* V9 fmovqcc %icc */
3223 CHECK_FPU_FEATURE(dc, FLOAT128);
3224 FMOVCC(0, q);
3225 break;
3226 case 0x181: /* V9 fmovscc %xcc */
3227 FMOVCC(1, s);
3228 break;
3229 case 0x182: /* V9 fmovdcc %xcc */
3230 FMOVCC(1, d);
3231 break;
3232 case 0x183: /* V9 fmovqcc %xcc */
3233 CHECK_FPU_FEATURE(dc, FLOAT128);
3234 FMOVCC(1, q);
3235 break;
3236 #undef FMOVCC
3237 #endif
3238 case 0x51: /* fcmps, V9 %fcc */
3239 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3240 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3241 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3242 break;
3243 case 0x52: /* fcmpd, V9 %fcc */
3244 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3245 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3246 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3247 break;
3248 case 0x53: /* fcmpq, V9 %fcc */
3249 CHECK_FPU_FEATURE(dc, FLOAT128);
3250 gen_op_load_fpr_QT0(QFPREG(rs1));
3251 gen_op_load_fpr_QT1(QFPREG(rs2));
3252 gen_op_fcmpq(rd & 3);
3253 break;
3254 case 0x55: /* fcmpes, V9 %fcc */
3255 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3256 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3257 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3258 break;
3259 case 0x56: /* fcmped, V9 %fcc */
3260 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3261 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3262 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3263 break;
3264 case 0x57: /* fcmpeq, V9 %fcc */
3265 CHECK_FPU_FEATURE(dc, FLOAT128);
3266 gen_op_load_fpr_QT0(QFPREG(rs1));
3267 gen_op_load_fpr_QT1(QFPREG(rs2));
3268 gen_op_fcmpeq(rd & 3);
3269 break;
3270 default:
3271 goto illegal_insn;
3273 } else if (xop == 0x2) {
3274 TCGv dst = gen_dest_gpr(dc, rd);
3275 rs1 = GET_FIELD(insn, 13, 17);
3276 if (rs1 == 0) {
3277 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3278 if (IS_IMM) { /* immediate */
3279 simm = GET_FIELDs(insn, 19, 31);
3280 tcg_gen_movi_tl(dst, simm);
3281 gen_store_gpr(dc, rd, dst);
3282 } else { /* register */
3283 rs2 = GET_FIELD(insn, 27, 31);
3284 if (rs2 == 0) {
3285 tcg_gen_movi_tl(dst, 0);
3286 gen_store_gpr(dc, rd, dst);
3287 } else {
3288 cpu_src2 = gen_load_gpr(dc, rs2);
3289 gen_store_gpr(dc, rd, cpu_src2);
3292 } else {
3293 cpu_src1 = get_src1(dc, insn);
3294 if (IS_IMM) { /* immediate */
3295 simm = GET_FIELDs(insn, 19, 31);
3296 tcg_gen_ori_tl(dst, cpu_src1, simm);
3297 gen_store_gpr(dc, rd, dst);
3298 } else { /* register */
3299 rs2 = GET_FIELD(insn, 27, 31);
3300 if (rs2 == 0) {
3301 /* mov shortcut: or x, %g0, y -> mov x, y */
3302 gen_store_gpr(dc, rd, cpu_src1);
3303 } else {
3304 cpu_src2 = gen_load_gpr(dc, rs2);
3305 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3306 gen_store_gpr(dc, rd, dst);
3310 #ifdef TARGET_SPARC64
3311 } else if (xop == 0x25) { /* sll, V9 sllx */
3312 cpu_src1 = get_src1(dc, insn);
3313 if (IS_IMM) { /* immediate */
3314 simm = GET_FIELDs(insn, 20, 31);
3315 if (insn & (1 << 12)) {
3316 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3317 } else {
3318 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3320 } else { /* register */
3321 rs2 = GET_FIELD(insn, 27, 31);
3322 cpu_src2 = gen_load_gpr(dc, rs2);
3323 cpu_tmp0 = get_temp_tl(dc);
3324 if (insn & (1 << 12)) {
3325 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3326 } else {
3327 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3329 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3331 gen_store_gpr(dc, rd, cpu_dst);
3332 } else if (xop == 0x26) { /* srl, V9 srlx */
3333 cpu_src1 = get_src1(dc, insn);
3334 if (IS_IMM) { /* immediate */
3335 simm = GET_FIELDs(insn, 20, 31);
3336 if (insn & (1 << 12)) {
3337 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3338 } else {
3339 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3340 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3342 } else { /* register */
3343 rs2 = GET_FIELD(insn, 27, 31);
3344 cpu_src2 = gen_load_gpr(dc, rs2);
3345 cpu_tmp0 = get_temp_tl(dc);
3346 if (insn & (1 << 12)) {
3347 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3348 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3349 } else {
3350 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3351 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3352 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3355 gen_store_gpr(dc, rd, cpu_dst);
3356 } else if (xop == 0x27) { /* sra, V9 srax */
3357 cpu_src1 = get_src1(dc, insn);
3358 if (IS_IMM) { /* immediate */
3359 simm = GET_FIELDs(insn, 20, 31);
3360 if (insn & (1 << 12)) {
3361 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3362 } else {
3363 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3364 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3366 } else { /* register */
3367 rs2 = GET_FIELD(insn, 27, 31);
3368 cpu_src2 = gen_load_gpr(dc, rs2);
3369 cpu_tmp0 = get_temp_tl(dc);
3370 if (insn & (1 << 12)) {
3371 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3372 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3373 } else {
3374 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3375 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3376 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3379 gen_store_gpr(dc, rd, cpu_dst);
3380 #endif
3381 } else if (xop < 0x36) {
3382 if (xop < 0x20) {
3383 cpu_src1 = get_src1(dc, insn);
3384 cpu_src2 = get_src2(dc, insn);
3385 switch (xop & ~0x10) {
3386 case 0x0: /* add */
3387 if (xop & 0x10) {
3388 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3389 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3390 dc->cc_op = CC_OP_ADD;
3391 } else {
3392 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3394 break;
3395 case 0x1: /* and */
3396 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3397 if (xop & 0x10) {
3398 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3399 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3400 dc->cc_op = CC_OP_LOGIC;
3402 break;
3403 case 0x2: /* or */
3404 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3405 if (xop & 0x10) {
3406 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3407 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3408 dc->cc_op = CC_OP_LOGIC;
3410 break;
3411 case 0x3: /* xor */
3412 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3413 if (xop & 0x10) {
3414 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3415 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3416 dc->cc_op = CC_OP_LOGIC;
3418 break;
3419 case 0x4: /* sub */
3420 if (xop & 0x10) {
3421 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3422 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3423 dc->cc_op = CC_OP_SUB;
3424 } else {
3425 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3427 break;
3428 case 0x5: /* andn */
3429 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3430 if (xop & 0x10) {
3431 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3432 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3433 dc->cc_op = CC_OP_LOGIC;
3435 break;
3436 case 0x6: /* orn */
3437 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3438 if (xop & 0x10) {
3439 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3440 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3441 dc->cc_op = CC_OP_LOGIC;
3443 break;
3444 case 0x7: /* xorn */
3445 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3446 if (xop & 0x10) {
3447 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3448 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3449 dc->cc_op = CC_OP_LOGIC;
3451 break;
3452 case 0x8: /* addx, V9 addc */
3453 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3454 (xop & 0x10));
3455 break;
3456 #ifdef TARGET_SPARC64
3457 case 0x9: /* V9 mulx */
3458 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3459 break;
3460 #endif
3461 case 0xa: /* umul */
3462 CHECK_IU_FEATURE(dc, MUL);
3463 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3464 if (xop & 0x10) {
3465 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3466 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3467 dc->cc_op = CC_OP_LOGIC;
3469 break;
3470 case 0xb: /* smul */
3471 CHECK_IU_FEATURE(dc, MUL);
3472 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3473 if (xop & 0x10) {
3474 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3475 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3476 dc->cc_op = CC_OP_LOGIC;
3478 break;
3479 case 0xc: /* subx, V9 subc */
3480 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3481 (xop & 0x10));
3482 break;
3483 #ifdef TARGET_SPARC64
3484 case 0xd: /* V9 udivx */
3485 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3486 break;
3487 #endif
3488 case 0xe: /* udiv */
3489 CHECK_IU_FEATURE(dc, DIV);
3490 if (xop & 0x10) {
3491 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3492 cpu_src2);
3493 dc->cc_op = CC_OP_DIV;
3494 } else {
3495 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3496 cpu_src2);
3498 break;
3499 case 0xf: /* sdiv */
3500 CHECK_IU_FEATURE(dc, DIV);
3501 if (xop & 0x10) {
3502 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3503 cpu_src2);
3504 dc->cc_op = CC_OP_DIV;
3505 } else {
3506 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3507 cpu_src2);
3509 break;
3510 default:
3511 goto illegal_insn;
3513 gen_store_gpr(dc, rd, cpu_dst);
3514 } else {
3515 cpu_src1 = get_src1(dc, insn);
3516 cpu_src2 = get_src2(dc, insn);
3517 switch (xop) {
3518 case 0x20: /* taddcc */
3519 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3520 gen_store_gpr(dc, rd, cpu_dst);
3521 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3522 dc->cc_op = CC_OP_TADD;
3523 break;
3524 case 0x21: /* tsubcc */
3525 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3526 gen_store_gpr(dc, rd, cpu_dst);
3527 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3528 dc->cc_op = CC_OP_TSUB;
3529 break;
3530 case 0x22: /* taddcctv */
3531 gen_helper_taddcctv(cpu_dst, cpu_env,
3532 cpu_src1, cpu_src2);
3533 gen_store_gpr(dc, rd, cpu_dst);
3534 dc->cc_op = CC_OP_TADDTV;
3535 break;
3536 case 0x23: /* tsubcctv */
3537 gen_helper_tsubcctv(cpu_dst, cpu_env,
3538 cpu_src1, cpu_src2);
3539 gen_store_gpr(dc, rd, cpu_dst);
3540 dc->cc_op = CC_OP_TSUBTV;
3541 break;
3542 case 0x24: /* mulscc */
3543 update_psr(dc);
3544 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3545 gen_store_gpr(dc, rd, cpu_dst);
3546 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3547 dc->cc_op = CC_OP_ADD;
3548 break;
3549 #ifndef TARGET_SPARC64
3550 case 0x25: /* sll */
3551 if (IS_IMM) { /* immediate */
3552 simm = GET_FIELDs(insn, 20, 31);
3553 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3554 } else { /* register */
3555 cpu_tmp0 = get_temp_tl(dc);
3556 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3557 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3559 gen_store_gpr(dc, rd, cpu_dst);
3560 break;
3561 case 0x26: /* srl */
3562 if (IS_IMM) { /* immediate */
3563 simm = GET_FIELDs(insn, 20, 31);
3564 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3565 } else { /* register */
3566 cpu_tmp0 = get_temp_tl(dc);
3567 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3568 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3570 gen_store_gpr(dc, rd, cpu_dst);
3571 break;
3572 case 0x27: /* sra */
3573 if (IS_IMM) { /* immediate */
3574 simm = GET_FIELDs(insn, 20, 31);
3575 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3576 } else { /* register */
3577 cpu_tmp0 = get_temp_tl(dc);
3578 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3579 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3581 gen_store_gpr(dc, rd, cpu_dst);
3582 break;
3583 #endif
3584 case 0x30:
3586 cpu_tmp0 = get_temp_tl(dc);
3587 switch(rd) {
3588 case 0: /* wry */
3589 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3590 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3591 break;
3592 #ifndef TARGET_SPARC64
3593 case 0x01 ... 0x0f: /* undefined in the
3594 SPARCv8 manual, nop
3595 on the microSPARC
3596 II */
3597 case 0x10 ... 0x1f: /* implementation-dependent
3598 in the SPARCv8
3599 manual, nop on the
3600 microSPARC II */
3601 if ((rd == 0x13) && (dc->def->features &
3602 CPU_FEATURE_POWERDOWN)) {
3603 /* LEON3 power-down */
3604 save_state(dc);
3605 gen_helper_power_down(cpu_env);
3607 break;
3608 #else
3609 case 0x2: /* V9 wrccr */
3610 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3611 gen_helper_wrccr(cpu_env, cpu_tmp0);
3612 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3613 dc->cc_op = CC_OP_FLAGS;
3614 break;
3615 case 0x3: /* V9 wrasi */
3616 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3617 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3618 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3619 break;
3620 case 0x6: /* V9 wrfprs */
3621 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3622 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3623 save_state(dc);
3624 gen_op_next_insn();
3625 tcg_gen_exit_tb(0);
3626 dc->is_br = 1;
3627 break;
3628 case 0xf: /* V9 sir, nop if user */
3629 #if !defined(CONFIG_USER_ONLY)
3630 if (supervisor(dc)) {
3631 ; // XXX
3633 #endif
3634 break;
3635 case 0x13: /* Graphics Status */
3636 if (gen_trap_ifnofpu(dc)) {
3637 goto jmp_insn;
3639 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3640 break;
3641 case 0x14: /* Softint set */
3642 if (!supervisor(dc))
3643 goto illegal_insn;
3644 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3645 gen_helper_set_softint(cpu_env, cpu_tmp0);
3646 break;
3647 case 0x15: /* Softint clear */
3648 if (!supervisor(dc))
3649 goto illegal_insn;
3650 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3651 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3652 break;
3653 case 0x16: /* Softint write */
3654 if (!supervisor(dc))
3655 goto illegal_insn;
3656 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3657 gen_helper_write_softint(cpu_env, cpu_tmp0);
3658 break;
3659 case 0x17: /* Tick compare */
3660 #if !defined(CONFIG_USER_ONLY)
3661 if (!supervisor(dc))
3662 goto illegal_insn;
3663 #endif
3665 TCGv_ptr r_tickptr;
3667 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3668 cpu_src2);
3669 r_tickptr = tcg_temp_new_ptr();
3670 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3671 offsetof(CPUSPARCState, tick));
3672 gen_helper_tick_set_limit(r_tickptr,
3673 cpu_tick_cmpr);
3674 tcg_temp_free_ptr(r_tickptr);
3676 break;
3677 case 0x18: /* System tick */
3678 #if !defined(CONFIG_USER_ONLY)
3679 if (!supervisor(dc))
3680 goto illegal_insn;
3681 #endif
3683 TCGv_ptr r_tickptr;
3685 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3686 cpu_src2);
3687 r_tickptr = tcg_temp_new_ptr();
3688 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3689 offsetof(CPUSPARCState, stick));
3690 gen_helper_tick_set_count(r_tickptr,
3691 cpu_tmp0);
3692 tcg_temp_free_ptr(r_tickptr);
3694 break;
3695 case 0x19: /* System tick compare */
3696 #if !defined(CONFIG_USER_ONLY)
3697 if (!supervisor(dc))
3698 goto illegal_insn;
3699 #endif
3701 TCGv_ptr r_tickptr;
3703 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3704 cpu_src2);
3705 r_tickptr = tcg_temp_new_ptr();
3706 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3707 offsetof(CPUSPARCState, stick));
3708 gen_helper_tick_set_limit(r_tickptr,
3709 cpu_stick_cmpr);
3710 tcg_temp_free_ptr(r_tickptr);
3712 break;
3714 case 0x10: /* Performance Control */
3715 case 0x11: /* Performance Instrumentation
3716 Counter */
3717 case 0x12: /* Dispatch Control */
3718 #endif
3719 default:
3720 goto illegal_insn;
3723 break;
3724 #if !defined(CONFIG_USER_ONLY)
3725 case 0x31: /* wrpsr, V9 saved, restored */
3727 if (!supervisor(dc))
3728 goto priv_insn;
3729 #ifdef TARGET_SPARC64
3730 switch (rd) {
3731 case 0:
3732 gen_helper_saved(cpu_env);
3733 break;
3734 case 1:
3735 gen_helper_restored(cpu_env);
3736 break;
3737 case 2: /* UA2005 allclean */
3738 case 3: /* UA2005 otherw */
3739 case 4: /* UA2005 normalw */
3740 case 5: /* UA2005 invalw */
3741 // XXX
3742 default:
3743 goto illegal_insn;
3745 #else
3746 cpu_tmp0 = get_temp_tl(dc);
3747 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3748 gen_helper_wrpsr(cpu_env, cpu_tmp0);
3749 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3750 dc->cc_op = CC_OP_FLAGS;
3751 save_state(dc);
3752 gen_op_next_insn();
3753 tcg_gen_exit_tb(0);
3754 dc->is_br = 1;
3755 #endif
3757 break;
3758 case 0x32: /* wrwim, V9 wrpr */
3760 if (!supervisor(dc))
3761 goto priv_insn;
3762 cpu_tmp0 = get_temp_tl(dc);
3763 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3764 #ifdef TARGET_SPARC64
3765 switch (rd) {
3766 case 0: // tpc
3768 TCGv_ptr r_tsptr;
3770 r_tsptr = tcg_temp_new_ptr();
3771 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3772 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3773 offsetof(trap_state, tpc));
3774 tcg_temp_free_ptr(r_tsptr);
3776 break;
3777 case 1: // tnpc
3779 TCGv_ptr r_tsptr;
3781 r_tsptr = tcg_temp_new_ptr();
3782 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3783 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3784 offsetof(trap_state, tnpc));
3785 tcg_temp_free_ptr(r_tsptr);
3787 break;
3788 case 2: // tstate
3790 TCGv_ptr r_tsptr;
3792 r_tsptr = tcg_temp_new_ptr();
3793 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3794 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3795 offsetof(trap_state,
3796 tstate));
3797 tcg_temp_free_ptr(r_tsptr);
3799 break;
3800 case 3: // tt
3802 TCGv_ptr r_tsptr;
3804 r_tsptr = tcg_temp_new_ptr();
3805 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3806 tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3807 offsetof(trap_state, tt));
3808 tcg_temp_free_ptr(r_tsptr);
3810 break;
3811 case 4: // tick
3813 TCGv_ptr r_tickptr;
3815 r_tickptr = tcg_temp_new_ptr();
3816 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3817 offsetof(CPUSPARCState, tick));
3818 gen_helper_tick_set_count(r_tickptr,
3819 cpu_tmp0);
3820 tcg_temp_free_ptr(r_tickptr);
3822 break;
3823 case 5: // tba
3824 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3825 break;
3826 case 6: // pstate
3827 save_state(dc);
3828 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3829 dc->npc = DYNAMIC_PC;
3830 break;
3831 case 7: // tl
3832 save_state(dc);
3833 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3834 offsetof(CPUSPARCState, tl));
3835 dc->npc = DYNAMIC_PC;
3836 break;
3837 case 8: // pil
3838 gen_helper_wrpil(cpu_env, cpu_tmp0);
3839 break;
3840 case 9: // cwp
3841 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3842 break;
3843 case 10: // cansave
3844 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3845 offsetof(CPUSPARCState,
3846 cansave));
3847 break;
3848 case 11: // canrestore
3849 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3850 offsetof(CPUSPARCState,
3851 canrestore));
3852 break;
3853 case 12: // cleanwin
3854 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3855 offsetof(CPUSPARCState,
3856 cleanwin));
3857 break;
3858 case 13: // otherwin
3859 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3860 offsetof(CPUSPARCState,
3861 otherwin));
3862 break;
3863 case 14: // wstate
3864 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3865 offsetof(CPUSPARCState,
3866 wstate));
3867 break;
3868 case 16: // UA2005 gl
3869 CHECK_IU_FEATURE(dc, GL);
3870 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3871 offsetof(CPUSPARCState, gl));
3872 break;
3873 case 26: // UA2005 strand status
3874 CHECK_IU_FEATURE(dc, HYPV);
3875 if (!hypervisor(dc))
3876 goto priv_insn;
3877 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3878 break;
3879 default:
3880 goto illegal_insn;
3882 #else
3883 tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3884 if (dc->def->nwindows != 32) {
3885 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3886 (1 << dc->def->nwindows) - 1);
3888 #endif
3890 break;
3891 case 0x33: /* wrtbr, UA2005 wrhpr */
3893 #ifndef TARGET_SPARC64
3894 if (!supervisor(dc))
3895 goto priv_insn;
3896 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3897 #else
3898 CHECK_IU_FEATURE(dc, HYPV);
3899 if (!hypervisor(dc))
3900 goto priv_insn;
3901 cpu_tmp0 = get_temp_tl(dc);
3902 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3903 switch (rd) {
3904 case 0: // hpstate
3905 // XXX gen_op_wrhpstate();
3906 save_state(dc);
3907 gen_op_next_insn();
3908 tcg_gen_exit_tb(0);
3909 dc->is_br = 1;
3910 break;
3911 case 1: // htstate
3912 // XXX gen_op_wrhtstate();
3913 break;
3914 case 3: // hintp
3915 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3916 break;
3917 case 5: // htba
3918 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3919 break;
3920 case 31: // hstick_cmpr
3922 TCGv_ptr r_tickptr;
3924 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3925 r_tickptr = tcg_temp_new_ptr();
3926 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3927 offsetof(CPUSPARCState, hstick));
3928 gen_helper_tick_set_limit(r_tickptr,
3929 cpu_hstick_cmpr);
3930 tcg_temp_free_ptr(r_tickptr);
3932 break;
3933 case 6: // hver readonly
3934 default:
3935 goto illegal_insn;
3937 #endif
3939 break;
3940 #endif
3941 #ifdef TARGET_SPARC64
3942 case 0x2c: /* V9 movcc */
3944 int cc = GET_FIELD_SP(insn, 11, 12);
3945 int cond = GET_FIELD_SP(insn, 14, 17);
3946 DisasCompare cmp;
3947 TCGv dst;
3949 if (insn & (1 << 18)) {
3950 if (cc == 0) {
3951 gen_compare(&cmp, 0, cond, dc);
3952 } else if (cc == 2) {
3953 gen_compare(&cmp, 1, cond, dc);
3954 } else {
3955 goto illegal_insn;
3957 } else {
3958 gen_fcompare(&cmp, cc, cond);
3961 /* The get_src2 above loaded the normal 13-bit
3962 immediate field, not the 11-bit field we have
3963 in movcc. But it did handle the reg case. */
3964 if (IS_IMM) {
3965 simm = GET_FIELD_SPs(insn, 0, 10);
3966 tcg_gen_movi_tl(cpu_src2, simm);
3969 dst = gen_load_gpr(dc, rd);
3970 tcg_gen_movcond_tl(cmp.cond, dst,
3971 cmp.c1, cmp.c2,
3972 cpu_src2, dst);
3973 free_compare(&cmp);
3974 gen_store_gpr(dc, rd, dst);
3975 break;
3977 case 0x2d: /* V9 sdivx */
3978 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3979 gen_store_gpr(dc, rd, cpu_dst);
3980 break;
3981 case 0x2e: /* V9 popc */
3982 gen_helper_popc(cpu_dst, cpu_src2);
3983 gen_store_gpr(dc, rd, cpu_dst);
3984 break;
3985 case 0x2f: /* V9 movr */
3987 int cond = GET_FIELD_SP(insn, 10, 12);
3988 DisasCompare cmp;
3989 TCGv dst;
3991 gen_compare_reg(&cmp, cond, cpu_src1);
3993 /* The get_src2 above loaded the normal 13-bit
3994 immediate field, not the 10-bit field we have
3995 in movr. But it did handle the reg case. */
3996 if (IS_IMM) {
3997 simm = GET_FIELD_SPs(insn, 0, 9);
3998 tcg_gen_movi_tl(cpu_src2, simm);
4001 dst = gen_load_gpr(dc, rd);
4002 tcg_gen_movcond_tl(cmp.cond, dst,
4003 cmp.c1, cmp.c2,
4004 cpu_src2, dst);
4005 free_compare(&cmp);
4006 gen_store_gpr(dc, rd, dst);
4007 break;
4009 #endif
4010 default:
4011 goto illegal_insn;
4014 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4015 #ifdef TARGET_SPARC64
4016 int opf = GET_FIELD_SP(insn, 5, 13);
4017 rs1 = GET_FIELD(insn, 13, 17);
4018 rs2 = GET_FIELD(insn, 27, 31);
4019 if (gen_trap_ifnofpu(dc)) {
4020 goto jmp_insn;
4023 switch (opf) {
4024 case 0x000: /* VIS I edge8cc */
4025 CHECK_FPU_FEATURE(dc, VIS1);
4026 cpu_src1 = gen_load_gpr(dc, rs1);
4027 cpu_src2 = gen_load_gpr(dc, rs2);
4028 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4029 gen_store_gpr(dc, rd, cpu_dst);
4030 break;
4031 case 0x001: /* VIS II edge8n */
4032 CHECK_FPU_FEATURE(dc, VIS2);
4033 cpu_src1 = gen_load_gpr(dc, rs1);
4034 cpu_src2 = gen_load_gpr(dc, rs2);
4035 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4036 gen_store_gpr(dc, rd, cpu_dst);
4037 break;
4038 case 0x002: /* VIS I edge8lcc */
4039 CHECK_FPU_FEATURE(dc, VIS1);
4040 cpu_src1 = gen_load_gpr(dc, rs1);
4041 cpu_src2 = gen_load_gpr(dc, rs2);
4042 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4043 gen_store_gpr(dc, rd, cpu_dst);
4044 break;
4045 case 0x003: /* VIS II edge8ln */
4046 CHECK_FPU_FEATURE(dc, VIS2);
4047 cpu_src1 = gen_load_gpr(dc, rs1);
4048 cpu_src2 = gen_load_gpr(dc, rs2);
4049 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4050 gen_store_gpr(dc, rd, cpu_dst);
4051 break;
4052 case 0x004: /* VIS I edge16cc */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 cpu_src1 = gen_load_gpr(dc, rs1);
4055 cpu_src2 = gen_load_gpr(dc, rs2);
4056 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4057 gen_store_gpr(dc, rd, cpu_dst);
4058 break;
4059 case 0x005: /* VIS II edge16n */
4060 CHECK_FPU_FEATURE(dc, VIS2);
4061 cpu_src1 = gen_load_gpr(dc, rs1);
4062 cpu_src2 = gen_load_gpr(dc, rs2);
4063 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4064 gen_store_gpr(dc, rd, cpu_dst);
4065 break;
4066 case 0x006: /* VIS I edge16lcc */
4067 CHECK_FPU_FEATURE(dc, VIS1);
4068 cpu_src1 = gen_load_gpr(dc, rs1);
4069 cpu_src2 = gen_load_gpr(dc, rs2);
4070 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4071 gen_store_gpr(dc, rd, cpu_dst);
4072 break;
4073 case 0x007: /* VIS II edge16ln */
4074 CHECK_FPU_FEATURE(dc, VIS2);
4075 cpu_src1 = gen_load_gpr(dc, rs1);
4076 cpu_src2 = gen_load_gpr(dc, rs2);
4077 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4078 gen_store_gpr(dc, rd, cpu_dst);
4079 break;
4080 case 0x008: /* VIS I edge32cc */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 cpu_src1 = gen_load_gpr(dc, rs1);
4083 cpu_src2 = gen_load_gpr(dc, rs2);
4084 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4085 gen_store_gpr(dc, rd, cpu_dst);
4086 break;
4087 case 0x009: /* VIS II edge32n */
4088 CHECK_FPU_FEATURE(dc, VIS2);
4089 cpu_src1 = gen_load_gpr(dc, rs1);
4090 cpu_src2 = gen_load_gpr(dc, rs2);
4091 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4092 gen_store_gpr(dc, rd, cpu_dst);
4093 break;
4094 case 0x00a: /* VIS I edge32lcc */
4095 CHECK_FPU_FEATURE(dc, VIS1);
4096 cpu_src1 = gen_load_gpr(dc, rs1);
4097 cpu_src2 = gen_load_gpr(dc, rs2);
4098 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4099 gen_store_gpr(dc, rd, cpu_dst);
4100 break;
4101 case 0x00b: /* VIS II edge32ln */
4102 CHECK_FPU_FEATURE(dc, VIS2);
4103 cpu_src1 = gen_load_gpr(dc, rs1);
4104 cpu_src2 = gen_load_gpr(dc, rs2);
4105 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4106 gen_store_gpr(dc, rd, cpu_dst);
4107 break;
4108 case 0x010: /* VIS I array8 */
4109 CHECK_FPU_FEATURE(dc, VIS1);
4110 cpu_src1 = gen_load_gpr(dc, rs1);
4111 cpu_src2 = gen_load_gpr(dc, rs2);
4112 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4113 gen_store_gpr(dc, rd, cpu_dst);
4114 break;
4115 case 0x012: /* VIS I array16 */
4116 CHECK_FPU_FEATURE(dc, VIS1);
4117 cpu_src1 = gen_load_gpr(dc, rs1);
4118 cpu_src2 = gen_load_gpr(dc, rs2);
4119 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4120 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4121 gen_store_gpr(dc, rd, cpu_dst);
4122 break;
4123 case 0x014: /* VIS I array32 */
4124 CHECK_FPU_FEATURE(dc, VIS1);
4125 cpu_src1 = gen_load_gpr(dc, rs1);
4126 cpu_src2 = gen_load_gpr(dc, rs2);
4127 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4128 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4129 gen_store_gpr(dc, rd, cpu_dst);
4130 break;
4131 case 0x018: /* VIS I alignaddr */
4132 CHECK_FPU_FEATURE(dc, VIS1);
4133 cpu_src1 = gen_load_gpr(dc, rs1);
4134 cpu_src2 = gen_load_gpr(dc, rs2);
4135 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4136 gen_store_gpr(dc, rd, cpu_dst);
4137 break;
4138 case 0x01a: /* VIS I alignaddrl */
4139 CHECK_FPU_FEATURE(dc, VIS1);
4140 cpu_src1 = gen_load_gpr(dc, rs1);
4141 cpu_src2 = gen_load_gpr(dc, rs2);
4142 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4143 gen_store_gpr(dc, rd, cpu_dst);
4144 break;
4145 case 0x019: /* VIS II bmask */
4146 CHECK_FPU_FEATURE(dc, VIS2);
4147 cpu_src1 = gen_load_gpr(dc, rs1);
4148 cpu_src2 = gen_load_gpr(dc, rs2);
4149 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4150 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4151 gen_store_gpr(dc, rd, cpu_dst);
4152 break;
4153 case 0x020: /* VIS I fcmple16 */
4154 CHECK_FPU_FEATURE(dc, VIS1);
4155 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4156 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4157 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4158 gen_store_gpr(dc, rd, cpu_dst);
4159 break;
4160 case 0x022: /* VIS I fcmpne16 */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4163 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4164 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4165 gen_store_gpr(dc, rd, cpu_dst);
4166 break;
4167 case 0x024: /* VIS I fcmple32 */
4168 CHECK_FPU_FEATURE(dc, VIS1);
4169 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4170 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4171 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4172 gen_store_gpr(dc, rd, cpu_dst);
4173 break;
4174 case 0x026: /* VIS I fcmpne32 */
4175 CHECK_FPU_FEATURE(dc, VIS1);
4176 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4177 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4178 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4179 gen_store_gpr(dc, rd, cpu_dst);
4180 break;
4181 case 0x028: /* VIS I fcmpgt16 */
4182 CHECK_FPU_FEATURE(dc, VIS1);
4183 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4184 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4185 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4186 gen_store_gpr(dc, rd, cpu_dst);
4187 break;
4188 case 0x02a: /* VIS I fcmpeq16 */
4189 CHECK_FPU_FEATURE(dc, VIS1);
4190 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4191 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4192 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4193 gen_store_gpr(dc, rd, cpu_dst);
4194 break;
4195 case 0x02c: /* VIS I fcmpgt32 */
4196 CHECK_FPU_FEATURE(dc, VIS1);
4197 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4198 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4199 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4200 gen_store_gpr(dc, rd, cpu_dst);
4201 break;
4202 case 0x02e: /* VIS I fcmpeq32 */
4203 CHECK_FPU_FEATURE(dc, VIS1);
4204 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4205 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4206 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4207 gen_store_gpr(dc, rd, cpu_dst);
4208 break;
4209 case 0x031: /* VIS I fmul8x16 */
4210 CHECK_FPU_FEATURE(dc, VIS1);
4211 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4212 break;
4213 case 0x033: /* VIS I fmul8x16au */
4214 CHECK_FPU_FEATURE(dc, VIS1);
4215 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4216 break;
4217 case 0x035: /* VIS I fmul8x16al */
4218 CHECK_FPU_FEATURE(dc, VIS1);
4219 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4220 break;
4221 case 0x036: /* VIS I fmul8sux16 */
4222 CHECK_FPU_FEATURE(dc, VIS1);
4223 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4224 break;
4225 case 0x037: /* VIS I fmul8ulx16 */
4226 CHECK_FPU_FEATURE(dc, VIS1);
4227 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4228 break;
4229 case 0x038: /* VIS I fmuld8sux16 */
4230 CHECK_FPU_FEATURE(dc, VIS1);
4231 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4232 break;
4233 case 0x039: /* VIS I fmuld8ulx16 */
4234 CHECK_FPU_FEATURE(dc, VIS1);
4235 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4236 break;
4237 case 0x03a: /* VIS I fpack32 */
4238 CHECK_FPU_FEATURE(dc, VIS1);
4239 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4240 break;
4241 case 0x03b: /* VIS I fpack16 */
4242 CHECK_FPU_FEATURE(dc, VIS1);
4243 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4244 cpu_dst_32 = gen_dest_fpr_F(dc);
4245 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4246 gen_store_fpr_F(dc, rd, cpu_dst_32);
4247 break;
4248 case 0x03d: /* VIS I fpackfix */
4249 CHECK_FPU_FEATURE(dc, VIS1);
4250 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4251 cpu_dst_32 = gen_dest_fpr_F(dc);
4252 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4253 gen_store_fpr_F(dc, rd, cpu_dst_32);
4254 break;
4255 case 0x03e: /* VIS I pdist */
4256 CHECK_FPU_FEATURE(dc, VIS1);
4257 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4258 break;
4259 case 0x048: /* VIS I faligndata */
4260 CHECK_FPU_FEATURE(dc, VIS1);
4261 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4262 break;
4263 case 0x04b: /* VIS I fpmerge */
4264 CHECK_FPU_FEATURE(dc, VIS1);
4265 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4266 break;
4267 case 0x04c: /* VIS II bshuffle */
4268 CHECK_FPU_FEATURE(dc, VIS2);
4269 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4270 break;
4271 case 0x04d: /* VIS I fexpand */
4272 CHECK_FPU_FEATURE(dc, VIS1);
4273 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4274 break;
4275 case 0x050: /* VIS I fpadd16 */
4276 CHECK_FPU_FEATURE(dc, VIS1);
4277 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4278 break;
4279 case 0x051: /* VIS I fpadd16s */
4280 CHECK_FPU_FEATURE(dc, VIS1);
4281 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4282 break;
4283 case 0x052: /* VIS I fpadd32 */
4284 CHECK_FPU_FEATURE(dc, VIS1);
4285 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4286 break;
4287 case 0x053: /* VIS I fpadd32s */
4288 CHECK_FPU_FEATURE(dc, VIS1);
4289 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4290 break;
4291 case 0x054: /* VIS I fpsub16 */
4292 CHECK_FPU_FEATURE(dc, VIS1);
4293 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4294 break;
4295 case 0x055: /* VIS I fpsub16s */
4296 CHECK_FPU_FEATURE(dc, VIS1);
4297 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4298 break;
4299 case 0x056: /* VIS I fpsub32 */
4300 CHECK_FPU_FEATURE(dc, VIS1);
4301 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4302 break;
4303 case 0x057: /* VIS I fpsub32s */
4304 CHECK_FPU_FEATURE(dc, VIS1);
4305 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4306 break;
4307 case 0x060: /* VIS I fzero */
4308 CHECK_FPU_FEATURE(dc, VIS1);
4309 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4310 tcg_gen_movi_i64(cpu_dst_64, 0);
4311 gen_store_fpr_D(dc, rd, cpu_dst_64);
4312 break;
4313 case 0x061: /* VIS I fzeros */
4314 CHECK_FPU_FEATURE(dc, VIS1);
4315 cpu_dst_32 = gen_dest_fpr_F(dc);
4316 tcg_gen_movi_i32(cpu_dst_32, 0);
4317 gen_store_fpr_F(dc, rd, cpu_dst_32);
4318 break;
4319 case 0x062: /* VIS I fnor */
4320 CHECK_FPU_FEATURE(dc, VIS1);
4321 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4322 break;
4323 case 0x063: /* VIS I fnors */
4324 CHECK_FPU_FEATURE(dc, VIS1);
4325 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4326 break;
4327 case 0x064: /* VIS I fandnot2 */
4328 CHECK_FPU_FEATURE(dc, VIS1);
4329 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4330 break;
4331 case 0x065: /* VIS I fandnot2s */
4332 CHECK_FPU_FEATURE(dc, VIS1);
4333 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4334 break;
4335 case 0x066: /* VIS I fnot2 */
4336 CHECK_FPU_FEATURE(dc, VIS1);
4337 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4338 break;
4339 case 0x067: /* VIS I fnot2s */
4340 CHECK_FPU_FEATURE(dc, VIS1);
4341 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4342 break;
4343 case 0x068: /* VIS I fandnot1 */
4344 CHECK_FPU_FEATURE(dc, VIS1);
4345 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4346 break;
4347 case 0x069: /* VIS I fandnot1s */
4348 CHECK_FPU_FEATURE(dc, VIS1);
4349 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4350 break;
4351 case 0x06a: /* VIS I fnot1 */
4352 CHECK_FPU_FEATURE(dc, VIS1);
4353 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4354 break;
4355 case 0x06b: /* VIS I fnot1s */
4356 CHECK_FPU_FEATURE(dc, VIS1);
4357 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4358 break;
4359 case 0x06c: /* VIS I fxor */
4360 CHECK_FPU_FEATURE(dc, VIS1);
4361 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4362 break;
4363 case 0x06d: /* VIS I fxors */
4364 CHECK_FPU_FEATURE(dc, VIS1);
4365 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4366 break;
4367 case 0x06e: /* VIS I fnand */
4368 CHECK_FPU_FEATURE(dc, VIS1);
4369 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4370 break;
4371 case 0x06f: /* VIS I fnands */
4372 CHECK_FPU_FEATURE(dc, VIS1);
4373 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4374 break;
4375 case 0x070: /* VIS I fand */
4376 CHECK_FPU_FEATURE(dc, VIS1);
4377 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4378 break;
4379 case 0x071: /* VIS I fands */
4380 CHECK_FPU_FEATURE(dc, VIS1);
4381 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4382 break;
4383 case 0x072: /* VIS I fxnor */
4384 CHECK_FPU_FEATURE(dc, VIS1);
4385 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4386 break;
4387 case 0x073: /* VIS I fxnors */
4388 CHECK_FPU_FEATURE(dc, VIS1);
4389 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4390 break;
4391 case 0x074: /* VIS I fsrc1 */
4392 CHECK_FPU_FEATURE(dc, VIS1);
4393 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4394 gen_store_fpr_D(dc, rd, cpu_src1_64);
4395 break;
4396 case 0x075: /* VIS I fsrc1s */
4397 CHECK_FPU_FEATURE(dc, VIS1);
4398 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4399 gen_store_fpr_F(dc, rd, cpu_src1_32);
4400 break;
4401 case 0x076: /* VIS I fornot2 */
4402 CHECK_FPU_FEATURE(dc, VIS1);
4403 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4404 break;
4405 case 0x077: /* VIS I fornot2s */
4406 CHECK_FPU_FEATURE(dc, VIS1);
4407 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4408 break;
4409 case 0x078: /* VIS I fsrc2 */
4410 CHECK_FPU_FEATURE(dc, VIS1);
4411 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4412 gen_store_fpr_D(dc, rd, cpu_src1_64);
4413 break;
4414 case 0x079: /* VIS I fsrc2s */
4415 CHECK_FPU_FEATURE(dc, VIS1);
4416 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4417 gen_store_fpr_F(dc, rd, cpu_src1_32);
4418 break;
4419 case 0x07a: /* VIS I fornot1 */
4420 CHECK_FPU_FEATURE(dc, VIS1);
4421 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4422 break;
4423 case 0x07b: /* VIS I fornot1s */
4424 CHECK_FPU_FEATURE(dc, VIS1);
4425 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4426 break;
4427 case 0x07c: /* VIS I for */
4428 CHECK_FPU_FEATURE(dc, VIS1);
4429 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4430 break;
4431 case 0x07d: /* VIS I fors */
4432 CHECK_FPU_FEATURE(dc, VIS1);
4433 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4434 break;
4435 case 0x07e: /* VIS I fone */
4436 CHECK_FPU_FEATURE(dc, VIS1);
4437 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4438 tcg_gen_movi_i64(cpu_dst_64, -1);
4439 gen_store_fpr_D(dc, rd, cpu_dst_64);
4440 break;
4441 case 0x07f: /* VIS I fones */
4442 CHECK_FPU_FEATURE(dc, VIS1);
4443 cpu_dst_32 = gen_dest_fpr_F(dc);
4444 tcg_gen_movi_i32(cpu_dst_32, -1);
4445 gen_store_fpr_F(dc, rd, cpu_dst_32);
4446 break;
4447 case 0x080: /* VIS I shutdown */
4448 case 0x081: /* VIS II siam */
4449 // XXX
4450 goto illegal_insn;
4451 default:
4452 goto illegal_insn;
4454 #else
4455 goto ncp_insn;
4456 #endif
4457 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4458 #ifdef TARGET_SPARC64
4459 goto illegal_insn;
4460 #else
4461 goto ncp_insn;
4462 #endif
4463 #ifdef TARGET_SPARC64
4464 } else if (xop == 0x39) { /* V9 return */
4465 TCGv_i32 r_const;
4467 save_state(dc);
4468 cpu_src1 = get_src1(dc, insn);
4469 cpu_tmp0 = get_temp_tl(dc);
4470 if (IS_IMM) { /* immediate */
4471 simm = GET_FIELDs(insn, 19, 31);
4472 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4473 } else { /* register */
4474 rs2 = GET_FIELD(insn, 27, 31);
4475 if (rs2) {
4476 cpu_src2 = gen_load_gpr(dc, rs2);
4477 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4478 } else {
4479 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4482 gen_helper_restore(cpu_env);
4483 gen_mov_pc_npc(dc);
4484 r_const = tcg_const_i32(3);
4485 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4486 tcg_temp_free_i32(r_const);
4487 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4488 dc->npc = DYNAMIC_PC;
4489 goto jmp_insn;
4490 #endif
4491 } else {
4492 cpu_src1 = get_src1(dc, insn);
4493 cpu_tmp0 = get_temp_tl(dc);
4494 if (IS_IMM) { /* immediate */
4495 simm = GET_FIELDs(insn, 19, 31);
4496 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4497 } else { /* register */
4498 rs2 = GET_FIELD(insn, 27, 31);
4499 if (rs2) {
4500 cpu_src2 = gen_load_gpr(dc, rs2);
4501 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4502 } else {
4503 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4506 switch (xop) {
4507 case 0x38: /* jmpl */
4509 TCGv t;
4510 TCGv_i32 r_const;
4512 t = gen_dest_gpr(dc, rd);
4513 tcg_gen_movi_tl(t, dc->pc);
4514 gen_store_gpr(dc, rd, t);
4515 gen_mov_pc_npc(dc);
4516 r_const = tcg_const_i32(3);
4517 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4518 tcg_temp_free_i32(r_const);
4519 gen_address_mask(dc, cpu_tmp0);
4520 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4521 dc->npc = DYNAMIC_PC;
4523 goto jmp_insn;
4524 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4525 case 0x39: /* rett, V9 return */
4527 TCGv_i32 r_const;
4529 if (!supervisor(dc))
4530 goto priv_insn;
4531 gen_mov_pc_npc(dc);
4532 r_const = tcg_const_i32(3);
4533 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4534 tcg_temp_free_i32(r_const);
4535 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4536 dc->npc = DYNAMIC_PC;
4537 gen_helper_rett(cpu_env);
4539 goto jmp_insn;
4540 #endif
4541 case 0x3b: /* flush */
4542 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4543 goto unimp_flush;
4544 /* nop */
4545 break;
4546 case 0x3c: /* save */
4547 save_state(dc);
4548 gen_helper_save(cpu_env);
4549 gen_store_gpr(dc, rd, cpu_tmp0);
4550 break;
4551 case 0x3d: /* restore */
4552 save_state(dc);
4553 gen_helper_restore(cpu_env);
4554 gen_store_gpr(dc, rd, cpu_tmp0);
4555 break;
4556 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4557 case 0x3e: /* V9 done/retry */
4559 switch (rd) {
4560 case 0:
4561 if (!supervisor(dc))
4562 goto priv_insn;
4563 dc->npc = DYNAMIC_PC;
4564 dc->pc = DYNAMIC_PC;
4565 gen_helper_done(cpu_env);
4566 goto jmp_insn;
4567 case 1:
4568 if (!supervisor(dc))
4569 goto priv_insn;
4570 dc->npc = DYNAMIC_PC;
4571 dc->pc = DYNAMIC_PC;
4572 gen_helper_retry(cpu_env);
4573 goto jmp_insn;
4574 default:
4575 goto illegal_insn;
4578 break;
4579 #endif
4580 default:
4581 goto illegal_insn;
4584 break;
4586 break;
4587 case 3: /* load/store instructions */
4589 unsigned int xop = GET_FIELD(insn, 7, 12);
4590 /* ??? gen_address_mask prevents us from using a source
4591 register directly. Always generate a temporary. */
4592 TCGv cpu_addr = get_temp_tl(dc);
4594 tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4595 if (xop == 0x3c || xop == 0x3e) {
4596 /* V9 casa/casxa : no offset */
4597 } else if (IS_IMM) { /* immediate */
4598 simm = GET_FIELDs(insn, 19, 31);
4599 if (simm != 0) {
4600 tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4602 } else { /* register */
4603 rs2 = GET_FIELD(insn, 27, 31);
4604 if (rs2 != 0) {
4605 tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4608 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4609 (xop > 0x17 && xop <= 0x1d ) ||
4610 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4611 TCGv cpu_val = gen_dest_gpr(dc, rd);
4613 switch (xop) {
4614 case 0x0: /* ld, V9 lduw, load unsigned word */
4615 gen_address_mask(dc, cpu_addr);
4616 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4617 break;
4618 case 0x1: /* ldub, load unsigned byte */
4619 gen_address_mask(dc, cpu_addr);
4620 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4621 break;
4622 case 0x2: /* lduh, load unsigned halfword */
4623 gen_address_mask(dc, cpu_addr);
4624 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4625 break;
4626 case 0x3: /* ldd, load double word */
4627 if (rd & 1)
4628 goto illegal_insn;
4629 else {
4630 TCGv_i32 r_const;
4631 TCGv_i64 t64;
4633 save_state(dc);
4634 r_const = tcg_const_i32(7);
4635 /* XXX remove alignment check */
4636 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4637 tcg_temp_free_i32(r_const);
4638 gen_address_mask(dc, cpu_addr);
4639 t64 = tcg_temp_new_i64();
4640 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4641 tcg_gen_trunc_i64_tl(cpu_val, t64);
4642 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4643 gen_store_gpr(dc, rd + 1, cpu_val);
4644 tcg_gen_shri_i64(t64, t64, 32);
4645 tcg_gen_trunc_i64_tl(cpu_val, t64);
4646 tcg_temp_free_i64(t64);
4647 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4649 break;
4650 case 0x9: /* ldsb, load signed byte */
4651 gen_address_mask(dc, cpu_addr);
4652 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4653 break;
4654 case 0xa: /* ldsh, load signed halfword */
4655 gen_address_mask(dc, cpu_addr);
4656 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4657 break;
4658 case 0xd: /* ldstub -- XXX: should be atomically */
4660 TCGv r_const;
4662 gen_address_mask(dc, cpu_addr);
4663 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4664 r_const = tcg_const_tl(0xff);
4665 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4666 tcg_temp_free(r_const);
4668 break;
4669 case 0x0f:
4670 /* swap, swap register with memory. Also atomically */
4672 TCGv t0 = get_temp_tl(dc);
4673 CHECK_IU_FEATURE(dc, SWAP);
4674 cpu_src1 = gen_load_gpr(dc, rd);
4675 gen_address_mask(dc, cpu_addr);
4676 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4677 tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4678 tcg_gen_mov_tl(cpu_val, t0);
4680 break;
4681 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4682 case 0x10: /* lda, V9 lduwa, load word alternate */
4683 #ifndef TARGET_SPARC64
4684 if (IS_IMM)
4685 goto illegal_insn;
4686 if (!supervisor(dc))
4687 goto priv_insn;
4688 #endif
4689 save_state(dc);
4690 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4691 break;
4692 case 0x11: /* lduba, load unsigned byte alternate */
4693 #ifndef TARGET_SPARC64
4694 if (IS_IMM)
4695 goto illegal_insn;
4696 if (!supervisor(dc))
4697 goto priv_insn;
4698 #endif
4699 save_state(dc);
4700 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4701 break;
4702 case 0x12: /* lduha, load unsigned halfword alternate */
4703 #ifndef TARGET_SPARC64
4704 if (IS_IMM)
4705 goto illegal_insn;
4706 if (!supervisor(dc))
4707 goto priv_insn;
4708 #endif
4709 save_state(dc);
4710 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4711 break;
4712 case 0x13: /* ldda, load double word alternate */
4713 #ifndef TARGET_SPARC64
4714 if (IS_IMM)
4715 goto illegal_insn;
4716 if (!supervisor(dc))
4717 goto priv_insn;
4718 #endif
4719 if (rd & 1)
4720 goto illegal_insn;
4721 save_state(dc);
4722 gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4723 goto skip_move;
4724 case 0x19: /* ldsba, load signed byte alternate */
4725 #ifndef TARGET_SPARC64
4726 if (IS_IMM)
4727 goto illegal_insn;
4728 if (!supervisor(dc))
4729 goto priv_insn;
4730 #endif
4731 save_state(dc);
4732 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4733 break;
4734 case 0x1a: /* ldsha, load signed halfword alternate */
4735 #ifndef TARGET_SPARC64
4736 if (IS_IMM)
4737 goto illegal_insn;
4738 if (!supervisor(dc))
4739 goto priv_insn;
4740 #endif
4741 save_state(dc);
4742 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4743 break;
4744 case 0x1d: /* ldstuba -- XXX: should be atomically */
4745 #ifndef TARGET_SPARC64
4746 if (IS_IMM)
4747 goto illegal_insn;
4748 if (!supervisor(dc))
4749 goto priv_insn;
4750 #endif
4751 save_state(dc);
4752 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4753 break;
4754 case 0x1f: /* swapa, swap reg with alt. memory. Also
4755 atomically */
4756 CHECK_IU_FEATURE(dc, SWAP);
4757 #ifndef TARGET_SPARC64
4758 if (IS_IMM)
4759 goto illegal_insn;
4760 if (!supervisor(dc))
4761 goto priv_insn;
4762 #endif
4763 save_state(dc);
4764 cpu_src1 = gen_load_gpr(dc, rd);
4765 gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4766 break;
4768 #ifndef TARGET_SPARC64
4769 case 0x30: /* ldc */
4770 case 0x31: /* ldcsr */
4771 case 0x33: /* lddc */
4772 goto ncp_insn;
4773 #endif
4774 #endif
4775 #ifdef TARGET_SPARC64
4776 case 0x08: /* V9 ldsw */
4777 gen_address_mask(dc, cpu_addr);
4778 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4779 break;
4780 case 0x0b: /* V9 ldx */
4781 gen_address_mask(dc, cpu_addr);
4782 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4783 break;
4784 case 0x18: /* V9 ldswa */
4785 save_state(dc);
4786 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4787 break;
4788 case 0x1b: /* V9 ldxa */
4789 save_state(dc);
4790 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4791 break;
4792 case 0x2d: /* V9 prefetch, no effect */
4793 goto skip_move;
4794 case 0x30: /* V9 ldfa */
4795 if (gen_trap_ifnofpu(dc)) {
4796 goto jmp_insn;
4798 save_state(dc);
4799 gen_ldf_asi(cpu_addr, insn, 4, rd);
4800 gen_update_fprs_dirty(rd);
4801 goto skip_move;
4802 case 0x33: /* V9 lddfa */
4803 if (gen_trap_ifnofpu(dc)) {
4804 goto jmp_insn;
4806 save_state(dc);
4807 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4808 gen_update_fprs_dirty(DFPREG(rd));
4809 goto skip_move;
4810 case 0x3d: /* V9 prefetcha, no effect */
4811 goto skip_move;
4812 case 0x32: /* V9 ldqfa */
4813 CHECK_FPU_FEATURE(dc, FLOAT128);
4814 if (gen_trap_ifnofpu(dc)) {
4815 goto jmp_insn;
4817 save_state(dc);
4818 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4819 gen_update_fprs_dirty(QFPREG(rd));
4820 goto skip_move;
4821 #endif
4822 default:
4823 goto illegal_insn;
4825 gen_store_gpr(dc, rd, cpu_val);
4826 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4827 skip_move: ;
4828 #endif
4829 } else if (xop >= 0x20 && xop < 0x24) {
4830 TCGv t0;
4832 if (gen_trap_ifnofpu(dc)) {
4833 goto jmp_insn;
4835 save_state(dc);
4836 switch (xop) {
4837 case 0x20: /* ldf, load fpreg */
4838 gen_address_mask(dc, cpu_addr);
4839 t0 = get_temp_tl(dc);
4840 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4841 cpu_dst_32 = gen_dest_fpr_F(dc);
4842 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4843 gen_store_fpr_F(dc, rd, cpu_dst_32);
4844 break;
4845 case 0x21: /* ldfsr, V9 ldxfsr */
4846 #ifdef TARGET_SPARC64
4847 gen_address_mask(dc, cpu_addr);
4848 if (rd == 1) {
4849 TCGv_i64 t64 = tcg_temp_new_i64();
4850 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4851 gen_helper_ldxfsr(cpu_env, t64);
4852 tcg_temp_free_i64(t64);
4853 break;
4855 #endif
4856 cpu_dst_32 = get_temp_i32(dc);
4857 t0 = get_temp_tl(dc);
4858 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4859 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4860 gen_helper_ldfsr(cpu_env, cpu_dst_32);
4861 break;
4862 case 0x22: /* ldqf, load quad fpreg */
4864 TCGv_i32 r_const;
4866 CHECK_FPU_FEATURE(dc, FLOAT128);
4867 r_const = tcg_const_i32(dc->mem_idx);
4868 gen_address_mask(dc, cpu_addr);
4869 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4870 tcg_temp_free_i32(r_const);
4871 gen_op_store_QT0_fpr(QFPREG(rd));
4872 gen_update_fprs_dirty(QFPREG(rd));
4874 break;
4875 case 0x23: /* lddf, load double fpreg */
4876 gen_address_mask(dc, cpu_addr);
4877 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4878 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4879 gen_store_fpr_D(dc, rd, cpu_dst_64);
4880 break;
4881 default:
4882 goto illegal_insn;
4884 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4885 xop == 0xe || xop == 0x1e) {
4886 TCGv cpu_val = gen_load_gpr(dc, rd);
4888 switch (xop) {
4889 case 0x4: /* st, store word */
4890 gen_address_mask(dc, cpu_addr);
4891 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4892 break;
4893 case 0x5: /* stb, store byte */
4894 gen_address_mask(dc, cpu_addr);
4895 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4896 break;
4897 case 0x6: /* sth, store halfword */
4898 gen_address_mask(dc, cpu_addr);
4899 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4900 break;
4901 case 0x7: /* std, store double word */
4902 if (rd & 1)
4903 goto illegal_insn;
4904 else {
4905 TCGv_i32 r_const;
4906 TCGv_i64 t64;
4907 TCGv lo;
4909 save_state(dc);
4910 gen_address_mask(dc, cpu_addr);
4911 r_const = tcg_const_i32(7);
4912 /* XXX remove alignment check */
4913 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4914 tcg_temp_free_i32(r_const);
4915 lo = gen_load_gpr(dc, rd + 1);
4917 t64 = tcg_temp_new_i64();
4918 tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4919 tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4920 tcg_temp_free_i64(t64);
4922 break;
4923 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4924 case 0x14: /* sta, V9 stwa, store word alternate */
4925 #ifndef TARGET_SPARC64
4926 if (IS_IMM)
4927 goto illegal_insn;
4928 if (!supervisor(dc))
4929 goto priv_insn;
4930 #endif
4931 save_state(dc);
4932 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4933 dc->npc = DYNAMIC_PC;
4934 break;
4935 case 0x15: /* stba, store byte alternate */
4936 #ifndef TARGET_SPARC64
4937 if (IS_IMM)
4938 goto illegal_insn;
4939 if (!supervisor(dc))
4940 goto priv_insn;
4941 #endif
4942 save_state(dc);
4943 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4944 dc->npc = DYNAMIC_PC;
4945 break;
4946 case 0x16: /* stha, store halfword alternate */
4947 #ifndef TARGET_SPARC64
4948 if (IS_IMM)
4949 goto illegal_insn;
4950 if (!supervisor(dc))
4951 goto priv_insn;
4952 #endif
4953 save_state(dc);
4954 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4955 dc->npc = DYNAMIC_PC;
4956 break;
4957 case 0x17: /* stda, store double word alternate */
4958 #ifndef TARGET_SPARC64
4959 if (IS_IMM)
4960 goto illegal_insn;
4961 if (!supervisor(dc))
4962 goto priv_insn;
4963 #endif
4964 if (rd & 1)
4965 goto illegal_insn;
4966 else {
4967 save_state(dc);
4968 gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4970 break;
4971 #endif
4972 #ifdef TARGET_SPARC64
4973 case 0x0e: /* V9 stx */
4974 gen_address_mask(dc, cpu_addr);
4975 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4976 break;
4977 case 0x1e: /* V9 stxa */
4978 save_state(dc);
4979 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4980 dc->npc = DYNAMIC_PC;
4981 break;
4982 #endif
4983 default:
4984 goto illegal_insn;
4986 } else if (xop > 0x23 && xop < 0x28) {
4987 if (gen_trap_ifnofpu(dc)) {
4988 goto jmp_insn;
4990 save_state(dc);
4991 switch (xop) {
4992 case 0x24: /* stf, store fpreg */
4994 TCGv t = get_temp_tl(dc);
4995 gen_address_mask(dc, cpu_addr);
4996 cpu_src1_32 = gen_load_fpr_F(dc, rd);
4997 tcg_gen_ext_i32_tl(t, cpu_src1_32);
4998 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5000 break;
5001 case 0x25: /* stfsr, V9 stxfsr */
5003 TCGv t = get_temp_tl(dc);
5005 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5006 #ifdef TARGET_SPARC64
5007 gen_address_mask(dc, cpu_addr);
5008 if (rd == 1) {
5009 tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5010 break;
5012 #endif
5013 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5015 break;
5016 case 0x26:
5017 #ifdef TARGET_SPARC64
5018 /* V9 stqf, store quad fpreg */
5020 TCGv_i32 r_const;
5022 CHECK_FPU_FEATURE(dc, FLOAT128);
5023 gen_op_load_fpr_QT0(QFPREG(rd));
5024 r_const = tcg_const_i32(dc->mem_idx);
5025 gen_address_mask(dc, cpu_addr);
5026 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5027 tcg_temp_free_i32(r_const);
5029 break;
5030 #else /* !TARGET_SPARC64 */
5031 /* stdfq, store floating point queue */
5032 #if defined(CONFIG_USER_ONLY)
5033 goto illegal_insn;
5034 #else
5035 if (!supervisor(dc))
5036 goto priv_insn;
5037 if (gen_trap_ifnofpu(dc)) {
5038 goto jmp_insn;
5040 goto nfq_insn;
5041 #endif
5042 #endif
5043 case 0x27: /* stdf, store double fpreg */
5044 gen_address_mask(dc, cpu_addr);
5045 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5046 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5047 break;
5048 default:
5049 goto illegal_insn;
5051 } else if (xop > 0x33 && xop < 0x3f) {
5052 save_state(dc);
5053 switch (xop) {
5054 #ifdef TARGET_SPARC64
5055 case 0x34: /* V9 stfa */
5056 if (gen_trap_ifnofpu(dc)) {
5057 goto jmp_insn;
5059 gen_stf_asi(cpu_addr, insn, 4, rd);
5060 break;
5061 case 0x36: /* V9 stqfa */
5063 TCGv_i32 r_const;
5065 CHECK_FPU_FEATURE(dc, FLOAT128);
5066 if (gen_trap_ifnofpu(dc)) {
5067 goto jmp_insn;
5069 r_const = tcg_const_i32(7);
5070 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5071 tcg_temp_free_i32(r_const);
5072 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5074 break;
5075 case 0x37: /* V9 stdfa */
5076 if (gen_trap_ifnofpu(dc)) {
5077 goto jmp_insn;
5079 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5080 break;
5081 case 0x3e: /* V9 casxa */
5082 rs2 = GET_FIELD(insn, 27, 31);
5083 cpu_src2 = gen_load_gpr(dc, rs2);
5084 gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5085 break;
5086 #else
5087 case 0x34: /* stc */
5088 case 0x35: /* stcsr */
5089 case 0x36: /* stdcq */
5090 case 0x37: /* stdc */
5091 goto ncp_insn;
5092 #endif
5093 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5094 case 0x3c: /* V9 or LEON3 casa */
5095 #ifndef TARGET_SPARC64
5096 CHECK_IU_FEATURE(dc, CASA);
5097 if (IS_IMM) {
5098 goto illegal_insn;
5100 /* LEON3 allows CASA from user space with ASI 0xa */
5101 if ((GET_FIELD(insn, 19, 26) != 0xa) && !supervisor(dc)) {
5102 goto priv_insn;
5104 #endif
5105 rs2 = GET_FIELD(insn, 27, 31);
5106 cpu_src2 = gen_load_gpr(dc, rs2);
5107 gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5108 break;
5109 #endif
5110 default:
5111 goto illegal_insn;
5113 } else {
5114 goto illegal_insn;
5117 break;
5119 /* default case for non jump instructions */
5120 if (dc->npc == DYNAMIC_PC) {
5121 dc->pc = DYNAMIC_PC;
5122 gen_op_next_insn();
5123 } else if (dc->npc == JUMP_PC) {
5124 /* we can do a static jump */
5125 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5126 dc->is_br = 1;
5127 } else {
5128 dc->pc = dc->npc;
5129 dc->npc = dc->npc + 4;
5131 jmp_insn:
5132 goto egress;
5133 illegal_insn:
5135 TCGv_i32 r_const;
5137 save_state(dc);
5138 r_const = tcg_const_i32(TT_ILL_INSN);
5139 gen_helper_raise_exception(cpu_env, r_const);
5140 tcg_temp_free_i32(r_const);
5141 dc->is_br = 1;
5143 goto egress;
5144 unimp_flush:
5146 TCGv_i32 r_const;
5148 save_state(dc);
5149 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5150 gen_helper_raise_exception(cpu_env, r_const);
5151 tcg_temp_free_i32(r_const);
5152 dc->is_br = 1;
5154 goto egress;
5155 #if !defined(CONFIG_USER_ONLY)
5156 priv_insn:
5158 TCGv_i32 r_const;
5160 save_state(dc);
5161 r_const = tcg_const_i32(TT_PRIV_INSN);
5162 gen_helper_raise_exception(cpu_env, r_const);
5163 tcg_temp_free_i32(r_const);
5164 dc->is_br = 1;
5166 goto egress;
5167 #endif
5168 nfpu_insn:
5169 save_state(dc);
5170 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5171 dc->is_br = 1;
5172 goto egress;
5173 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5174 nfq_insn:
5175 save_state(dc);
5176 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5177 dc->is_br = 1;
5178 goto egress;
5179 #endif
5180 #ifndef TARGET_SPARC64
5181 ncp_insn:
5183 TCGv r_const;
5185 save_state(dc);
5186 r_const = tcg_const_i32(TT_NCP_INSN);
5187 gen_helper_raise_exception(cpu_env, r_const);
5188 tcg_temp_free(r_const);
5189 dc->is_br = 1;
5191 goto egress;
5192 #endif
5193 egress:
5194 if (dc->n_t32 != 0) {
5195 int i;
5196 for (i = dc->n_t32 - 1; i >= 0; --i) {
5197 tcg_temp_free_i32(dc->t32[i]);
5199 dc->n_t32 = 0;
5201 if (dc->n_ttl != 0) {
5202 int i;
5203 for (i = dc->n_ttl - 1; i >= 0; --i) {
5204 tcg_temp_free(dc->ttl[i]);
5206 dc->n_ttl = 0;
5210 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5212 SPARCCPU *cpu = sparc_env_get_cpu(env);
5213 CPUState *cs = CPU(cpu);
5214 target_ulong pc_start, last_pc;
5215 DisasContext dc1, *dc = &dc1;
5216 int num_insns;
5217 int max_insns;
5218 unsigned int insn;
5220 memset(dc, 0, sizeof(DisasContext));
5221 dc->tb = tb;
5222 pc_start = tb->pc;
5223 dc->pc = pc_start;
5224 last_pc = dc->pc;
5225 dc->npc = (target_ulong) tb->cs_base;
5226 dc->cc_op = CC_OP_DYNAMIC;
5227 dc->mem_idx = cpu_mmu_index(env, false);
5228 dc->def = env->def;
5229 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5230 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5231 dc->singlestep = (cs->singlestep_enabled || singlestep);
5233 num_insns = 0;
5234 max_insns = tb->cflags & CF_COUNT_MASK;
5235 if (max_insns == 0) {
5236 max_insns = CF_COUNT_MASK;
5238 if (max_insns > TCG_MAX_INSNS) {
5239 max_insns = TCG_MAX_INSNS;
5242 gen_tb_start(tb);
5243 do {
5244 if (dc->npc & JUMP_PC) {
5245 assert(dc->jump_pc[1] == dc->pc + 4);
5246 tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5247 } else {
5248 tcg_gen_insn_start(dc->pc, dc->npc);
5250 num_insns++;
5251 last_pc = dc->pc;
5253 if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5254 if (dc->pc != pc_start) {
5255 save_state(dc);
5257 gen_helper_debug(cpu_env);
5258 tcg_gen_exit_tb(0);
5259 dc->is_br = 1;
5260 goto exit_gen_loop;
5263 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5264 gen_io_start();
5267 insn = cpu_ldl_code(env, dc->pc);
5269 disas_sparc_insn(dc, insn);
5271 if (dc->is_br)
5272 break;
5273 /* if the next PC is different, we abort now */
5274 if (dc->pc != (last_pc + 4))
5275 break;
5276 /* if we reach a page boundary, we stop generation so that the
5277 PC of a TT_TFAULT exception is always in the right page */
5278 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5279 break;
5280 /* if single step mode, we generate only one instruction and
5281 generate an exception */
5282 if (dc->singlestep) {
5283 break;
5285 } while (!tcg_op_buf_full() &&
5286 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5287 num_insns < max_insns);
5289 exit_gen_loop:
5290 if (tb->cflags & CF_LAST_IO) {
5291 gen_io_end();
5293 if (!dc->is_br) {
5294 if (dc->pc != DYNAMIC_PC &&
5295 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5296 /* static PC and NPC: we can use direct chaining */
5297 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5298 } else {
5299 if (dc->pc != DYNAMIC_PC) {
5300 tcg_gen_movi_tl(cpu_pc, dc->pc);
5302 save_npc(dc);
5303 tcg_gen_exit_tb(0);
5306 gen_tb_end(tb, num_insns);
5308 tb->size = last_pc + 4 - pc_start;
5309 tb->icount = num_insns;
5311 #ifdef DEBUG_DISAS
5312 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5313 qemu_log("--------------\n");
5314 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5315 log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5316 qemu_log("\n");
5318 #endif
5321 void gen_intermediate_code_init(CPUSPARCState *env)
5323 unsigned int i;
5324 static int inited;
5325 static const char * const gregnames[8] = {
5326 NULL, // g0 not used
5327 "g1",
5328 "g2",
5329 "g3",
5330 "g4",
5331 "g5",
5332 "g6",
5333 "g7",
5335 static const char * const fregnames[32] = {
5336 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5337 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5338 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5339 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5342 /* init various static tables */
5343 if (!inited) {
5344 inited = 1;
5346 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5347 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5348 offsetof(CPUSPARCState, regwptr),
5349 "regwptr");
5350 #ifdef TARGET_SPARC64
5351 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5352 "xcc");
5353 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5354 "asi");
5355 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5356 "fprs");
5357 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5358 "gsr");
5359 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5360 offsetof(CPUSPARCState, tick_cmpr),
5361 "tick_cmpr");
5362 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5363 offsetof(CPUSPARCState, stick_cmpr),
5364 "stick_cmpr");
5365 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5366 offsetof(CPUSPARCState, hstick_cmpr),
5367 "hstick_cmpr");
5368 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5369 "hintp");
5370 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5371 "htba");
5372 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5373 "hver");
5374 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5375 offsetof(CPUSPARCState, ssr), "ssr");
5376 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5377 offsetof(CPUSPARCState, version), "ver");
5378 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5379 offsetof(CPUSPARCState, softint),
5380 "softint");
5381 #else
5382 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5383 "wim");
5384 #endif
5385 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5386 "cond");
5387 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5388 "cc_src");
5389 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5390 offsetof(CPUSPARCState, cc_src2),
5391 "cc_src2");
5392 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5393 "cc_dst");
5394 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5395 "cc_op");
5396 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5397 "psr");
5398 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5399 "fsr");
5400 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5401 "pc");
5402 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5403 "npc");
5404 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5405 #ifndef CONFIG_USER_ONLY
5406 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5407 "tbr");
5408 #endif
5409 for (i = 1; i < 8; i++) {
5410 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5411 offsetof(CPUSPARCState, gregs[i]),
5412 gregnames[i]);
5414 for (i = 0; i < TARGET_DPREGS; i++) {
5415 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5416 offsetof(CPUSPARCState, fpr[i]),
5417 fregnames[i]);
5422 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5423 target_ulong *data)
5425 target_ulong pc = data[0];
5426 target_ulong npc = data[1];
5428 env->pc = pc;
5429 if (npc == DYNAMIC_PC) {
5430 /* dynamic NPC: already stored */
5431 } else if (npc & JUMP_PC) {
5432 /* jump PC: use 'cond' and the jump targets of the translation */
5433 if (env->cond) {
5434 env->npc = npc & ~3;
5435 } else {
5436 env->npc = pc + 4;
5438 } else {
5439 env->npc = npc;