cpu_ldst_template.h: Drop unused cpu_ldfq/stfq/ldfl/stfl accessors
[qemu/ar7.git] / target-sparc / translate.c
blob25d1bd69889a0a3724173fc5cb07d1aef84e1b68
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas/disas.h"
29 #include "exec/helper-proto.h"
30 #include "tcg-op.h"
31 #include "exec/cpu_ldst.h"
33 #include "exec/helper-gen.h"
35 #include "trace-tcg.h"
38 #define DEBUG_DISAS
40 #define DYNAMIC_PC 1 /* dynamic pc value */
41 #define JUMP_PC 2 /* dynamic pc value which takes only two values
42 according to jump_pc[T2] */
44 /* global register indexes */
45 static TCGv_ptr cpu_env, cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
50 static TCGv cpu_y;
51 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_tbr;
53 #endif
54 static TCGv cpu_cond;
55 #ifdef TARGET_SPARC64
56 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
57 static TCGv cpu_gsr;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
60 static TCGv_i32 cpu_softint;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
68 static target_ulong gen_opc_jump_pc[2];
70 #include "exec/gen-icount.h"
72 typedef struct DisasContext {
73 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
74 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
75 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
76 int is_br;
77 int mem_idx;
78 int fpu_enabled;
79 int address_mask_32bit;
80 int singlestep;
81 uint32_t cc_op; /* current CC operation */
82 struct TranslationBlock *tb;
83 sparc_def_t *def;
84 TCGv_i32 t32[3];
85 TCGv ttl[5];
86 int n_t32;
87 int n_ttl;
88 } DisasContext;
90 typedef struct {
91 TCGCond cond;
92 bool is_bool;
93 bool g1, g2;
94 TCGv c1, c2;
95 } DisasCompare;
97 // This function uses non-native bit order
98 #define GET_FIELD(X, FROM, TO) \
99 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
101 // This function uses the order in the manuals, i.e. bit 0 is 2^0
102 #define GET_FIELD_SP(X, FROM, TO) \
103 GET_FIELD(X, 31 - (TO), 31 - (FROM))
105 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
106 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
108 #ifdef TARGET_SPARC64
109 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
110 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
111 #else
112 #define DFPREG(r) (r & 0x1e)
113 #define QFPREG(r) (r & 0x1c)
114 #endif
116 #define UA2005_HTRAP_MASK 0xff
117 #define V8_TRAP_MASK 0x7f
119 static int sign_extend(int x, int len)
121 len = 32 - len;
122 return (x << len) >> len;
125 #define IS_IMM (insn & (1<<13))
127 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
129 TCGv_i32 t;
130 assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
131 dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
132 return t;
135 static inline TCGv get_temp_tl(DisasContext *dc)
137 TCGv t;
138 assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
139 dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
140 return t;
143 static inline void gen_update_fprs_dirty(int rd)
145 #if defined(TARGET_SPARC64)
146 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
147 #endif
150 /* floating point registers moves */
151 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
153 #if TCG_TARGET_REG_BITS == 32
154 if (src & 1) {
155 return TCGV_LOW(cpu_fpr[src / 2]);
156 } else {
157 return TCGV_HIGH(cpu_fpr[src / 2]);
159 #else
160 if (src & 1) {
161 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
162 } else {
163 TCGv_i32 ret = get_temp_i32(dc);
164 TCGv_i64 t = tcg_temp_new_i64();
166 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
167 tcg_gen_trunc_i64_i32(ret, t);
168 tcg_temp_free_i64(t);
170 return ret;
172 #endif
175 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
177 #if TCG_TARGET_REG_BITS == 32
178 if (dst & 1) {
179 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
180 } else {
181 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
183 #else
184 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
185 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
186 (dst & 1 ? 0 : 32), 32);
187 #endif
188 gen_update_fprs_dirty(dst);
191 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
193 return get_temp_i32(dc);
196 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
198 src = DFPREG(src);
199 return cpu_fpr[src / 2];
202 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
204 dst = DFPREG(dst);
205 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
206 gen_update_fprs_dirty(dst);
209 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
211 return cpu_fpr[DFPREG(dst) / 2];
214 static void gen_op_load_fpr_QT0(unsigned int src)
216 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
217 offsetof(CPU_QuadU, ll.upper));
218 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
219 offsetof(CPU_QuadU, ll.lower));
222 static void gen_op_load_fpr_QT1(unsigned int src)
224 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
225 offsetof(CPU_QuadU, ll.upper));
226 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
227 offsetof(CPU_QuadU, ll.lower));
230 static void gen_op_store_QT0_fpr(unsigned int dst)
232 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
233 offsetof(CPU_QuadU, ll.upper));
234 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
235 offsetof(CPU_QuadU, ll.lower));
238 #ifdef TARGET_SPARC64
239 static void gen_move_Q(unsigned int rd, unsigned int rs)
241 rd = QFPREG(rd);
242 rs = QFPREG(rs);
244 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
245 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
246 gen_update_fprs_dirty(rd);
248 #endif
250 /* moves */
251 #ifdef CONFIG_USER_ONLY
252 #define supervisor(dc) 0
253 #ifdef TARGET_SPARC64
254 #define hypervisor(dc) 0
255 #endif
256 #else
257 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
258 #ifdef TARGET_SPARC64
259 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
260 #else
261 #endif
262 #endif
264 #ifdef TARGET_SPARC64
265 #ifndef TARGET_ABI32
266 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
267 #else
268 #define AM_CHECK(dc) (1)
269 #endif
270 #endif
272 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
274 #ifdef TARGET_SPARC64
275 if (AM_CHECK(dc))
276 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
277 #endif
280 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
282 if (reg == 0 || reg >= 8) {
283 TCGv t = get_temp_tl(dc);
284 if (reg == 0) {
285 tcg_gen_movi_tl(t, 0);
286 } else {
287 tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
289 return t;
290 } else {
291 return cpu_gregs[reg];
295 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
297 if (reg > 0) {
298 if (reg < 8) {
299 tcg_gen_mov_tl(cpu_gregs[reg], v);
300 } else {
301 tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
306 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
308 if (reg == 0 || reg >= 8) {
309 return get_temp_tl(dc);
310 } else {
311 return cpu_gregs[reg];
315 static inline void gen_goto_tb(DisasContext *s, int tb_num,
316 target_ulong pc, target_ulong npc)
318 TranslationBlock *tb;
320 tb = s->tb;
321 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
322 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
323 !s->singlestep) {
324 /* jump to same page: we can use a direct jump */
325 tcg_gen_goto_tb(tb_num);
326 tcg_gen_movi_tl(cpu_pc, pc);
327 tcg_gen_movi_tl(cpu_npc, npc);
328 tcg_gen_exit_tb((uintptr_t)tb + tb_num);
329 } else {
330 /* jump to another page: currently not optimized */
331 tcg_gen_movi_tl(cpu_pc, pc);
332 tcg_gen_movi_tl(cpu_npc, npc);
333 tcg_gen_exit_tb(0);
337 // XXX suboptimal
338 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
340 tcg_gen_extu_i32_tl(reg, src);
341 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
342 tcg_gen_andi_tl(reg, reg, 0x1);
345 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
347 tcg_gen_extu_i32_tl(reg, src);
348 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
349 tcg_gen_andi_tl(reg, reg, 0x1);
352 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
354 tcg_gen_extu_i32_tl(reg, src);
355 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
356 tcg_gen_andi_tl(reg, reg, 0x1);
359 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
361 tcg_gen_extu_i32_tl(reg, src);
362 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
363 tcg_gen_andi_tl(reg, reg, 0x1);
366 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
368 tcg_gen_mov_tl(cpu_cc_src, src1);
369 tcg_gen_movi_tl(cpu_cc_src2, src2);
370 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
371 tcg_gen_mov_tl(dst, cpu_cc_dst);
374 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
376 tcg_gen_mov_tl(cpu_cc_src, src1);
377 tcg_gen_mov_tl(cpu_cc_src2, src2);
378 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
379 tcg_gen_mov_tl(dst, cpu_cc_dst);
382 static TCGv_i32 gen_add32_carry32(void)
384 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
386 /* Carry is computed from a previous add: (dst < src) */
387 #if TARGET_LONG_BITS == 64
388 cc_src1_32 = tcg_temp_new_i32();
389 cc_src2_32 = tcg_temp_new_i32();
390 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
391 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
392 #else
393 cc_src1_32 = cpu_cc_dst;
394 cc_src2_32 = cpu_cc_src;
395 #endif
397 carry_32 = tcg_temp_new_i32();
398 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
400 #if TARGET_LONG_BITS == 64
401 tcg_temp_free_i32(cc_src1_32);
402 tcg_temp_free_i32(cc_src2_32);
403 #endif
405 return carry_32;
408 static TCGv_i32 gen_sub32_carry32(void)
410 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
412 /* Carry is computed from a previous borrow: (src1 < src2) */
413 #if TARGET_LONG_BITS == 64
414 cc_src1_32 = tcg_temp_new_i32();
415 cc_src2_32 = tcg_temp_new_i32();
416 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
417 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
418 #else
419 cc_src1_32 = cpu_cc_src;
420 cc_src2_32 = cpu_cc_src2;
421 #endif
423 carry_32 = tcg_temp_new_i32();
424 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
426 #if TARGET_LONG_BITS == 64
427 tcg_temp_free_i32(cc_src1_32);
428 tcg_temp_free_i32(cc_src2_32);
429 #endif
431 return carry_32;
434 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
435 TCGv src2, int update_cc)
437 TCGv_i32 carry_32;
438 TCGv carry;
440 switch (dc->cc_op) {
441 case CC_OP_DIV:
442 case CC_OP_LOGIC:
443 /* Carry is known to be zero. Fall back to plain ADD. */
444 if (update_cc) {
445 gen_op_add_cc(dst, src1, src2);
446 } else {
447 tcg_gen_add_tl(dst, src1, src2);
449 return;
451 case CC_OP_ADD:
452 case CC_OP_TADD:
453 case CC_OP_TADDTV:
454 if (TARGET_LONG_BITS == 32) {
455 /* We can re-use the host's hardware carry generation by using
456 an ADD2 opcode. We discard the low part of the output.
457 Ideally we'd combine this operation with the add that
458 generated the carry in the first place. */
459 carry = tcg_temp_new();
460 tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
461 tcg_temp_free(carry);
462 goto add_done;
464 carry_32 = gen_add32_carry32();
465 break;
467 case CC_OP_SUB:
468 case CC_OP_TSUB:
469 case CC_OP_TSUBTV:
470 carry_32 = gen_sub32_carry32();
471 break;
473 default:
474 /* We need external help to produce the carry. */
475 carry_32 = tcg_temp_new_i32();
476 gen_helper_compute_C_icc(carry_32, cpu_env);
477 break;
480 #if TARGET_LONG_BITS == 64
481 carry = tcg_temp_new();
482 tcg_gen_extu_i32_i64(carry, carry_32);
483 #else
484 carry = carry_32;
485 #endif
487 tcg_gen_add_tl(dst, src1, src2);
488 tcg_gen_add_tl(dst, dst, carry);
490 tcg_temp_free_i32(carry_32);
491 #if TARGET_LONG_BITS == 64
492 tcg_temp_free(carry);
493 #endif
495 add_done:
496 if (update_cc) {
497 tcg_gen_mov_tl(cpu_cc_src, src1);
498 tcg_gen_mov_tl(cpu_cc_src2, src2);
499 tcg_gen_mov_tl(cpu_cc_dst, dst);
500 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
501 dc->cc_op = CC_OP_ADDX;
505 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
507 tcg_gen_mov_tl(cpu_cc_src, src1);
508 tcg_gen_movi_tl(cpu_cc_src2, src2);
509 if (src2 == 0) {
510 tcg_gen_mov_tl(cpu_cc_dst, src1);
511 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
512 dc->cc_op = CC_OP_LOGIC;
513 } else {
514 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
515 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
516 dc->cc_op = CC_OP_SUB;
518 tcg_gen_mov_tl(dst, cpu_cc_dst);
521 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
526 tcg_gen_mov_tl(dst, cpu_cc_dst);
529 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
530 TCGv src2, int update_cc)
532 TCGv_i32 carry_32;
533 TCGv carry;
535 switch (dc->cc_op) {
536 case CC_OP_DIV:
537 case CC_OP_LOGIC:
538 /* Carry is known to be zero. Fall back to plain SUB. */
539 if (update_cc) {
540 gen_op_sub_cc(dst, src1, src2);
541 } else {
542 tcg_gen_sub_tl(dst, src1, src2);
544 return;
546 case CC_OP_ADD:
547 case CC_OP_TADD:
548 case CC_OP_TADDTV:
549 carry_32 = gen_add32_carry32();
550 break;
552 case CC_OP_SUB:
553 case CC_OP_TSUB:
554 case CC_OP_TSUBTV:
555 if (TARGET_LONG_BITS == 32) {
556 /* We can re-use the host's hardware carry generation by using
557 a SUB2 opcode. We discard the low part of the output.
558 Ideally we'd combine this operation with the add that
559 generated the carry in the first place. */
560 carry = tcg_temp_new();
561 tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
562 tcg_temp_free(carry);
563 goto sub_done;
565 carry_32 = gen_sub32_carry32();
566 break;
568 default:
569 /* We need external help to produce the carry. */
570 carry_32 = tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32, cpu_env);
572 break;
575 #if TARGET_LONG_BITS == 64
576 carry = tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry, carry_32);
578 #else
579 carry = carry_32;
580 #endif
582 tcg_gen_sub_tl(dst, src1, src2);
583 tcg_gen_sub_tl(dst, dst, carry);
585 tcg_temp_free_i32(carry_32);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry);
588 #endif
590 sub_done:
591 if (update_cc) {
592 tcg_gen_mov_tl(cpu_cc_src, src1);
593 tcg_gen_mov_tl(cpu_cc_src2, src2);
594 tcg_gen_mov_tl(cpu_cc_dst, dst);
595 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
596 dc->cc_op = CC_OP_SUBX;
600 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
602 TCGv r_temp, zero, t0;
604 r_temp = tcg_temp_new();
605 t0 = tcg_temp_new();
607 /* old op:
608 if (!(env->y & 1))
609 T1 = 0;
611 zero = tcg_const_tl(0);
612 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
613 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
614 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
615 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
616 zero, cpu_cc_src2);
617 tcg_temp_free(zero);
619 // b2 = T0 & 1;
620 // env->y = (b2 << 31) | (env->y >> 1);
621 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
622 tcg_gen_shli_tl(r_temp, r_temp, 31);
623 tcg_gen_shri_tl(t0, cpu_y, 1);
624 tcg_gen_andi_tl(t0, t0, 0x7fffffff);
625 tcg_gen_or_tl(t0, t0, r_temp);
626 tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
628 // b1 = N ^ V;
629 gen_mov_reg_N(t0, cpu_psr);
630 gen_mov_reg_V(r_temp, cpu_psr);
631 tcg_gen_xor_tl(t0, t0, r_temp);
632 tcg_temp_free(r_temp);
634 // T0 = (b1 << 31) | (T0 >> 1);
635 // src1 = T0;
636 tcg_gen_shli_tl(t0, t0, 31);
637 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
638 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
639 tcg_temp_free(t0);
641 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
643 tcg_gen_mov_tl(dst, cpu_cc_dst);
646 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
648 #if TARGET_LONG_BITS == 32
649 if (sign_ext) {
650 tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
651 } else {
652 tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
654 #else
655 TCGv t0 = tcg_temp_new_i64();
656 TCGv t1 = tcg_temp_new_i64();
658 if (sign_ext) {
659 tcg_gen_ext32s_i64(t0, src1);
660 tcg_gen_ext32s_i64(t1, src2);
661 } else {
662 tcg_gen_ext32u_i64(t0, src1);
663 tcg_gen_ext32u_i64(t1, src2);
666 tcg_gen_mul_i64(dst, t0, t1);
667 tcg_temp_free(t0);
668 tcg_temp_free(t1);
670 tcg_gen_shri_i64(cpu_y, dst, 32);
671 #endif
674 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
676 /* zero-extend truncated operands before multiplication */
677 gen_op_multiply(dst, src1, src2, 0);
680 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
682 /* sign-extend truncated operands before multiplication */
683 gen_op_multiply(dst, src1, src2, 1);
686 // 1
687 static inline void gen_op_eval_ba(TCGv dst)
689 tcg_gen_movi_tl(dst, 1);
692 // Z
693 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
695 gen_mov_reg_Z(dst, src);
698 // Z | (N ^ V)
699 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
701 TCGv t0 = tcg_temp_new();
702 gen_mov_reg_N(t0, src);
703 gen_mov_reg_V(dst, src);
704 tcg_gen_xor_tl(dst, dst, t0);
705 gen_mov_reg_Z(t0, src);
706 tcg_gen_or_tl(dst, dst, t0);
707 tcg_temp_free(t0);
710 // N ^ V
711 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
713 TCGv t0 = tcg_temp_new();
714 gen_mov_reg_V(t0, src);
715 gen_mov_reg_N(dst, src);
716 tcg_gen_xor_tl(dst, dst, t0);
717 tcg_temp_free(t0);
720 // C | Z
721 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
723 TCGv t0 = tcg_temp_new();
724 gen_mov_reg_Z(t0, src);
725 gen_mov_reg_C(dst, src);
726 tcg_gen_or_tl(dst, dst, t0);
727 tcg_temp_free(t0);
730 // C
731 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
733 gen_mov_reg_C(dst, src);
736 // V
737 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
739 gen_mov_reg_V(dst, src);
742 // 0
743 static inline void gen_op_eval_bn(TCGv dst)
745 tcg_gen_movi_tl(dst, 0);
748 // N
749 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
751 gen_mov_reg_N(dst, src);
754 // !Z
755 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
757 gen_mov_reg_Z(dst, src);
758 tcg_gen_xori_tl(dst, dst, 0x1);
761 // !(Z | (N ^ V))
762 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
764 gen_op_eval_ble(dst, src);
765 tcg_gen_xori_tl(dst, dst, 0x1);
768 // !(N ^ V)
769 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
771 gen_op_eval_bl(dst, src);
772 tcg_gen_xori_tl(dst, dst, 0x1);
775 // !(C | Z)
776 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
778 gen_op_eval_bleu(dst, src);
779 tcg_gen_xori_tl(dst, dst, 0x1);
782 // !C
783 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
785 gen_mov_reg_C(dst, src);
786 tcg_gen_xori_tl(dst, dst, 0x1);
789 // !N
790 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
792 gen_mov_reg_N(dst, src);
793 tcg_gen_xori_tl(dst, dst, 0x1);
796 // !V
797 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
799 gen_mov_reg_V(dst, src);
800 tcg_gen_xori_tl(dst, dst, 0x1);
804 FPSR bit field FCC1 | FCC0:
808 3 unordered
810 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
811 unsigned int fcc_offset)
813 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
814 tcg_gen_andi_tl(reg, reg, 0x1);
817 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
818 unsigned int fcc_offset)
820 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
821 tcg_gen_andi_tl(reg, reg, 0x1);
824 // !0: FCC0 | FCC1
825 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
826 unsigned int fcc_offset)
828 TCGv t0 = tcg_temp_new();
829 gen_mov_reg_FCC0(dst, src, fcc_offset);
830 gen_mov_reg_FCC1(t0, src, fcc_offset);
831 tcg_gen_or_tl(dst, dst, t0);
832 tcg_temp_free(t0);
835 // 1 or 2: FCC0 ^ FCC1
836 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
837 unsigned int fcc_offset)
839 TCGv t0 = tcg_temp_new();
840 gen_mov_reg_FCC0(dst, src, fcc_offset);
841 gen_mov_reg_FCC1(t0, src, fcc_offset);
842 tcg_gen_xor_tl(dst, dst, t0);
843 tcg_temp_free(t0);
846 // 1 or 3: FCC0
847 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
848 unsigned int fcc_offset)
850 gen_mov_reg_FCC0(dst, src, fcc_offset);
853 // 1: FCC0 & !FCC1
854 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
855 unsigned int fcc_offset)
857 TCGv t0 = tcg_temp_new();
858 gen_mov_reg_FCC0(dst, src, fcc_offset);
859 gen_mov_reg_FCC1(t0, src, fcc_offset);
860 tcg_gen_andc_tl(dst, dst, t0);
861 tcg_temp_free(t0);
864 // 2 or 3: FCC1
865 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
866 unsigned int fcc_offset)
868 gen_mov_reg_FCC1(dst, src, fcc_offset);
871 // 2: !FCC0 & FCC1
872 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
873 unsigned int fcc_offset)
875 TCGv t0 = tcg_temp_new();
876 gen_mov_reg_FCC0(dst, src, fcc_offset);
877 gen_mov_reg_FCC1(t0, src, fcc_offset);
878 tcg_gen_andc_tl(dst, t0, dst);
879 tcg_temp_free(t0);
882 // 3: FCC0 & FCC1
883 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
884 unsigned int fcc_offset)
886 TCGv t0 = tcg_temp_new();
887 gen_mov_reg_FCC0(dst, src, fcc_offset);
888 gen_mov_reg_FCC1(t0, src, fcc_offset);
889 tcg_gen_and_tl(dst, dst, t0);
890 tcg_temp_free(t0);
893 // 0: !(FCC0 | FCC1)
894 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
895 unsigned int fcc_offset)
897 TCGv t0 = tcg_temp_new();
898 gen_mov_reg_FCC0(dst, src, fcc_offset);
899 gen_mov_reg_FCC1(t0, src, fcc_offset);
900 tcg_gen_or_tl(dst, dst, t0);
901 tcg_gen_xori_tl(dst, dst, 0x1);
902 tcg_temp_free(t0);
905 // 0 or 3: !(FCC0 ^ FCC1)
906 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
907 unsigned int fcc_offset)
909 TCGv t0 = tcg_temp_new();
910 gen_mov_reg_FCC0(dst, src, fcc_offset);
911 gen_mov_reg_FCC1(t0, src, fcc_offset);
912 tcg_gen_xor_tl(dst, dst, t0);
913 tcg_gen_xori_tl(dst, dst, 0x1);
914 tcg_temp_free(t0);
917 // 0 or 2: !FCC0
918 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
919 unsigned int fcc_offset)
921 gen_mov_reg_FCC0(dst, src, fcc_offset);
922 tcg_gen_xori_tl(dst, dst, 0x1);
925 // !1: !(FCC0 & !FCC1)
926 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
927 unsigned int fcc_offset)
929 TCGv t0 = tcg_temp_new();
930 gen_mov_reg_FCC0(dst, src, fcc_offset);
931 gen_mov_reg_FCC1(t0, src, fcc_offset);
932 tcg_gen_andc_tl(dst, dst, t0);
933 tcg_gen_xori_tl(dst, dst, 0x1);
934 tcg_temp_free(t0);
937 // 0 or 1: !FCC1
938 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
939 unsigned int fcc_offset)
941 gen_mov_reg_FCC1(dst, src, fcc_offset);
942 tcg_gen_xori_tl(dst, dst, 0x1);
945 // !2: !(!FCC0 & FCC1)
946 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
947 unsigned int fcc_offset)
949 TCGv t0 = tcg_temp_new();
950 gen_mov_reg_FCC0(dst, src, fcc_offset);
951 gen_mov_reg_FCC1(t0, src, fcc_offset);
952 tcg_gen_andc_tl(dst, t0, dst);
953 tcg_gen_xori_tl(dst, dst, 0x1);
954 tcg_temp_free(t0);
957 // !3: !(FCC0 & FCC1)
958 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
959 unsigned int fcc_offset)
961 TCGv t0 = tcg_temp_new();
962 gen_mov_reg_FCC0(dst, src, fcc_offset);
963 gen_mov_reg_FCC1(t0, src, fcc_offset);
964 tcg_gen_and_tl(dst, dst, t0);
965 tcg_gen_xori_tl(dst, dst, 0x1);
966 tcg_temp_free(t0);
969 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
970 target_ulong pc2, TCGv r_cond)
972 int l1;
974 l1 = gen_new_label();
976 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
978 gen_goto_tb(dc, 0, pc1, pc1 + 4);
980 gen_set_label(l1);
981 gen_goto_tb(dc, 1, pc2, pc2 + 4);
984 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
985 target_ulong pc2, TCGv r_cond)
987 int l1;
989 l1 = gen_new_label();
991 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
993 gen_goto_tb(dc, 0, pc2, pc1);
995 gen_set_label(l1);
996 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
999 static inline void gen_generic_branch(DisasContext *dc)
1001 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1002 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1003 TCGv zero = tcg_const_tl(0);
1005 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1007 tcg_temp_free(npc0);
1008 tcg_temp_free(npc1);
1009 tcg_temp_free(zero);
1012 /* call this function before using the condition register as it may
1013 have been set for a jump */
1014 static inline void flush_cond(DisasContext *dc)
1016 if (dc->npc == JUMP_PC) {
1017 gen_generic_branch(dc);
1018 dc->npc = DYNAMIC_PC;
1022 static inline void save_npc(DisasContext *dc)
1024 if (dc->npc == JUMP_PC) {
1025 gen_generic_branch(dc);
1026 dc->npc = DYNAMIC_PC;
1027 } else if (dc->npc != DYNAMIC_PC) {
1028 tcg_gen_movi_tl(cpu_npc, dc->npc);
1032 static inline void update_psr(DisasContext *dc)
1034 if (dc->cc_op != CC_OP_FLAGS) {
1035 dc->cc_op = CC_OP_FLAGS;
1036 gen_helper_compute_psr(cpu_env);
1040 static inline void save_state(DisasContext *dc)
1042 tcg_gen_movi_tl(cpu_pc, dc->pc);
1043 save_npc(dc);
1046 static inline void gen_mov_pc_npc(DisasContext *dc)
1048 if (dc->npc == JUMP_PC) {
1049 gen_generic_branch(dc);
1050 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1051 dc->pc = DYNAMIC_PC;
1052 } else if (dc->npc == DYNAMIC_PC) {
1053 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1054 dc->pc = DYNAMIC_PC;
1055 } else {
1056 dc->pc = dc->npc;
1060 static inline void gen_op_next_insn(void)
1062 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1063 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1066 static void free_compare(DisasCompare *cmp)
1068 if (!cmp->g1) {
1069 tcg_temp_free(cmp->c1);
1071 if (!cmp->g2) {
1072 tcg_temp_free(cmp->c2);
1076 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1077 DisasContext *dc)
1079 static int subcc_cond[16] = {
1080 TCG_COND_NEVER,
1081 TCG_COND_EQ,
1082 TCG_COND_LE,
1083 TCG_COND_LT,
1084 TCG_COND_LEU,
1085 TCG_COND_LTU,
1086 -1, /* neg */
1087 -1, /* overflow */
1088 TCG_COND_ALWAYS,
1089 TCG_COND_NE,
1090 TCG_COND_GT,
1091 TCG_COND_GE,
1092 TCG_COND_GTU,
1093 TCG_COND_GEU,
1094 -1, /* pos */
1095 -1, /* no overflow */
1098 static int logic_cond[16] = {
1099 TCG_COND_NEVER,
1100 TCG_COND_EQ, /* eq: Z */
1101 TCG_COND_LE, /* le: Z | (N ^ V) -> Z | N */
1102 TCG_COND_LT, /* lt: N ^ V -> N */
1103 TCG_COND_EQ, /* leu: C | Z -> Z */
1104 TCG_COND_NEVER, /* ltu: C -> 0 */
1105 TCG_COND_LT, /* neg: N */
1106 TCG_COND_NEVER, /* vs: V -> 0 */
1107 TCG_COND_ALWAYS,
1108 TCG_COND_NE, /* ne: !Z */
1109 TCG_COND_GT, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1110 TCG_COND_GE, /* ge: !(N ^ V) -> !N */
1111 TCG_COND_NE, /* gtu: !(C | Z) -> !Z */
1112 TCG_COND_ALWAYS, /* geu: !C -> 1 */
1113 TCG_COND_GE, /* pos: !N */
1114 TCG_COND_ALWAYS, /* vc: !V -> 1 */
1117 TCGv_i32 r_src;
1118 TCGv r_dst;
1120 #ifdef TARGET_SPARC64
1121 if (xcc) {
1122 r_src = cpu_xcc;
1123 } else {
1124 r_src = cpu_psr;
1126 #else
1127 r_src = cpu_psr;
1128 #endif
1130 switch (dc->cc_op) {
1131 case CC_OP_LOGIC:
1132 cmp->cond = logic_cond[cond];
1133 do_compare_dst_0:
1134 cmp->is_bool = false;
1135 cmp->g2 = false;
1136 cmp->c2 = tcg_const_tl(0);
1137 #ifdef TARGET_SPARC64
1138 if (!xcc) {
1139 cmp->g1 = false;
1140 cmp->c1 = tcg_temp_new();
1141 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1142 break;
1144 #endif
1145 cmp->g1 = true;
1146 cmp->c1 = cpu_cc_dst;
1147 break;
1149 case CC_OP_SUB:
1150 switch (cond) {
1151 case 6: /* neg */
1152 case 14: /* pos */
1153 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1154 goto do_compare_dst_0;
1156 case 7: /* overflow */
1157 case 15: /* !overflow */
1158 goto do_dynamic;
1160 default:
1161 cmp->cond = subcc_cond[cond];
1162 cmp->is_bool = false;
1163 #ifdef TARGET_SPARC64
1164 if (!xcc) {
1165 /* Note that sign-extension works for unsigned compares as
1166 long as both operands are sign-extended. */
1167 cmp->g1 = cmp->g2 = false;
1168 cmp->c1 = tcg_temp_new();
1169 cmp->c2 = tcg_temp_new();
1170 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1171 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1172 break;
1174 #endif
1175 cmp->g1 = cmp->g2 = true;
1176 cmp->c1 = cpu_cc_src;
1177 cmp->c2 = cpu_cc_src2;
1178 break;
1180 break;
1182 default:
1183 do_dynamic:
1184 gen_helper_compute_psr(cpu_env);
1185 dc->cc_op = CC_OP_FLAGS;
1186 /* FALLTHRU */
1188 case CC_OP_FLAGS:
1189 /* We're going to generate a boolean result. */
1190 cmp->cond = TCG_COND_NE;
1191 cmp->is_bool = true;
1192 cmp->g1 = cmp->g2 = false;
1193 cmp->c1 = r_dst = tcg_temp_new();
1194 cmp->c2 = tcg_const_tl(0);
1196 switch (cond) {
1197 case 0x0:
1198 gen_op_eval_bn(r_dst);
1199 break;
1200 case 0x1:
1201 gen_op_eval_be(r_dst, r_src);
1202 break;
1203 case 0x2:
1204 gen_op_eval_ble(r_dst, r_src);
1205 break;
1206 case 0x3:
1207 gen_op_eval_bl(r_dst, r_src);
1208 break;
1209 case 0x4:
1210 gen_op_eval_bleu(r_dst, r_src);
1211 break;
1212 case 0x5:
1213 gen_op_eval_bcs(r_dst, r_src);
1214 break;
1215 case 0x6:
1216 gen_op_eval_bneg(r_dst, r_src);
1217 break;
1218 case 0x7:
1219 gen_op_eval_bvs(r_dst, r_src);
1220 break;
1221 case 0x8:
1222 gen_op_eval_ba(r_dst);
1223 break;
1224 case 0x9:
1225 gen_op_eval_bne(r_dst, r_src);
1226 break;
1227 case 0xa:
1228 gen_op_eval_bg(r_dst, r_src);
1229 break;
1230 case 0xb:
1231 gen_op_eval_bge(r_dst, r_src);
1232 break;
1233 case 0xc:
1234 gen_op_eval_bgu(r_dst, r_src);
1235 break;
1236 case 0xd:
1237 gen_op_eval_bcc(r_dst, r_src);
1238 break;
1239 case 0xe:
1240 gen_op_eval_bpos(r_dst, r_src);
1241 break;
1242 case 0xf:
1243 gen_op_eval_bvc(r_dst, r_src);
1244 break;
1246 break;
1250 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1252 unsigned int offset;
1253 TCGv r_dst;
1255 /* For now we still generate a straight boolean result. */
1256 cmp->cond = TCG_COND_NE;
1257 cmp->is_bool = true;
1258 cmp->g1 = cmp->g2 = false;
1259 cmp->c1 = r_dst = tcg_temp_new();
1260 cmp->c2 = tcg_const_tl(0);
1262 switch (cc) {
1263 default:
1264 case 0x0:
1265 offset = 0;
1266 break;
1267 case 0x1:
1268 offset = 32 - 10;
1269 break;
1270 case 0x2:
1271 offset = 34 - 10;
1272 break;
1273 case 0x3:
1274 offset = 36 - 10;
1275 break;
1278 switch (cond) {
1279 case 0x0:
1280 gen_op_eval_bn(r_dst);
1281 break;
1282 case 0x1:
1283 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1284 break;
1285 case 0x2:
1286 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1287 break;
1288 case 0x3:
1289 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1290 break;
1291 case 0x4:
1292 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1293 break;
1294 case 0x5:
1295 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1296 break;
1297 case 0x6:
1298 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1299 break;
1300 case 0x7:
1301 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1302 break;
1303 case 0x8:
1304 gen_op_eval_ba(r_dst);
1305 break;
1306 case 0x9:
1307 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0xa:
1310 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1311 break;
1312 case 0xb:
1313 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1314 break;
1315 case 0xc:
1316 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1317 break;
1318 case 0xd:
1319 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1320 break;
1321 case 0xe:
1322 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1323 break;
1324 case 0xf:
1325 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1326 break;
1330 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1331 DisasContext *dc)
1333 DisasCompare cmp;
1334 gen_compare(&cmp, cc, cond, dc);
1336 /* The interface is to return a boolean in r_dst. */
1337 if (cmp.is_bool) {
1338 tcg_gen_mov_tl(r_dst, cmp.c1);
1339 } else {
1340 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1343 free_compare(&cmp);
1346 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1348 DisasCompare cmp;
1349 gen_fcompare(&cmp, cc, cond);
1351 /* The interface is to return a boolean in r_dst. */
1352 if (cmp.is_bool) {
1353 tcg_gen_mov_tl(r_dst, cmp.c1);
1354 } else {
1355 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1358 free_compare(&cmp);
1361 #ifdef TARGET_SPARC64
1362 // Inverted logic
1363 static const int gen_tcg_cond_reg[8] = {
1365 TCG_COND_NE,
1366 TCG_COND_GT,
1367 TCG_COND_GE,
1369 TCG_COND_EQ,
1370 TCG_COND_LE,
1371 TCG_COND_LT,
1374 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1376 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1377 cmp->is_bool = false;
1378 cmp->g1 = true;
1379 cmp->g2 = false;
1380 cmp->c1 = r_src;
1381 cmp->c2 = tcg_const_tl(0);
1384 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1386 DisasCompare cmp;
1387 gen_compare_reg(&cmp, cond, r_src);
1389 /* The interface is to return a boolean in r_dst. */
1390 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1392 free_compare(&cmp);
1394 #endif
1396 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1398 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1399 target_ulong target = dc->pc + offset;
1401 #ifdef TARGET_SPARC64
1402 if (unlikely(AM_CHECK(dc))) {
1403 target &= 0xffffffffULL;
1405 #endif
1406 if (cond == 0x0) {
1407 /* unconditional not taken */
1408 if (a) {
1409 dc->pc = dc->npc + 4;
1410 dc->npc = dc->pc + 4;
1411 } else {
1412 dc->pc = dc->npc;
1413 dc->npc = dc->pc + 4;
1415 } else if (cond == 0x8) {
1416 /* unconditional taken */
1417 if (a) {
1418 dc->pc = target;
1419 dc->npc = dc->pc + 4;
1420 } else {
1421 dc->pc = dc->npc;
1422 dc->npc = target;
1423 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1425 } else {
1426 flush_cond(dc);
1427 gen_cond(cpu_cond, cc, cond, dc);
1428 if (a) {
1429 gen_branch_a(dc, target, dc->npc, cpu_cond);
1430 dc->is_br = 1;
1431 } else {
1432 dc->pc = dc->npc;
1433 dc->jump_pc[0] = target;
1434 if (unlikely(dc->npc == DYNAMIC_PC)) {
1435 dc->jump_pc[1] = DYNAMIC_PC;
1436 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1437 } else {
1438 dc->jump_pc[1] = dc->npc + 4;
1439 dc->npc = JUMP_PC;
1445 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1447 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1448 target_ulong target = dc->pc + offset;
1450 #ifdef TARGET_SPARC64
1451 if (unlikely(AM_CHECK(dc))) {
1452 target &= 0xffffffffULL;
1454 #endif
1455 if (cond == 0x0) {
1456 /* unconditional not taken */
1457 if (a) {
1458 dc->pc = dc->npc + 4;
1459 dc->npc = dc->pc + 4;
1460 } else {
1461 dc->pc = dc->npc;
1462 dc->npc = dc->pc + 4;
1464 } else if (cond == 0x8) {
1465 /* unconditional taken */
1466 if (a) {
1467 dc->pc = target;
1468 dc->npc = dc->pc + 4;
1469 } else {
1470 dc->pc = dc->npc;
1471 dc->npc = target;
1472 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1474 } else {
1475 flush_cond(dc);
1476 gen_fcond(cpu_cond, cc, cond);
1477 if (a) {
1478 gen_branch_a(dc, target, dc->npc, cpu_cond);
1479 dc->is_br = 1;
1480 } else {
1481 dc->pc = dc->npc;
1482 dc->jump_pc[0] = target;
1483 if (unlikely(dc->npc == DYNAMIC_PC)) {
1484 dc->jump_pc[1] = DYNAMIC_PC;
1485 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1486 } else {
1487 dc->jump_pc[1] = dc->npc + 4;
1488 dc->npc = JUMP_PC;
1494 #ifdef TARGET_SPARC64
1495 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1496 TCGv r_reg)
1498 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1499 target_ulong target = dc->pc + offset;
1501 if (unlikely(AM_CHECK(dc))) {
1502 target &= 0xffffffffULL;
1504 flush_cond(dc);
1505 gen_cond_reg(cpu_cond, cond, r_reg);
1506 if (a) {
1507 gen_branch_a(dc, target, dc->npc, cpu_cond);
1508 dc->is_br = 1;
1509 } else {
1510 dc->pc = dc->npc;
1511 dc->jump_pc[0] = target;
1512 if (unlikely(dc->npc == DYNAMIC_PC)) {
1513 dc->jump_pc[1] = DYNAMIC_PC;
1514 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1515 } else {
1516 dc->jump_pc[1] = dc->npc + 4;
1517 dc->npc = JUMP_PC;
1522 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1524 switch (fccno) {
1525 case 0:
1526 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1527 break;
1528 case 1:
1529 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1530 break;
1531 case 2:
1532 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1533 break;
1534 case 3:
1535 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1536 break;
1540 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1542 switch (fccno) {
1543 case 0:
1544 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1545 break;
1546 case 1:
1547 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1548 break;
1549 case 2:
1550 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1551 break;
1552 case 3:
1553 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1554 break;
1558 static inline void gen_op_fcmpq(int fccno)
1560 switch (fccno) {
1561 case 0:
1562 gen_helper_fcmpq(cpu_env);
1563 break;
1564 case 1:
1565 gen_helper_fcmpq_fcc1(cpu_env);
1566 break;
1567 case 2:
1568 gen_helper_fcmpq_fcc2(cpu_env);
1569 break;
1570 case 3:
1571 gen_helper_fcmpq_fcc3(cpu_env);
1572 break;
1576 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1578 switch (fccno) {
1579 case 0:
1580 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1581 break;
1582 case 1:
1583 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1584 break;
1585 case 2:
1586 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1587 break;
1588 case 3:
1589 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1590 break;
1594 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1596 switch (fccno) {
1597 case 0:
1598 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1599 break;
1600 case 1:
1601 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1602 break;
1603 case 2:
1604 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1605 break;
1606 case 3:
1607 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1608 break;
1612 static inline void gen_op_fcmpeq(int fccno)
1614 switch (fccno) {
1615 case 0:
1616 gen_helper_fcmpeq(cpu_env);
1617 break;
1618 case 1:
1619 gen_helper_fcmpeq_fcc1(cpu_env);
1620 break;
1621 case 2:
1622 gen_helper_fcmpeq_fcc2(cpu_env);
1623 break;
1624 case 3:
1625 gen_helper_fcmpeq_fcc3(cpu_env);
1626 break;
1630 #else
1632 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1634 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1637 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1639 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1642 static inline void gen_op_fcmpq(int fccno)
1644 gen_helper_fcmpq(cpu_env);
1647 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1649 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1652 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1654 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1657 static inline void gen_op_fcmpeq(int fccno)
1659 gen_helper_fcmpeq(cpu_env);
1661 #endif
1663 static inline void gen_op_fpexception_im(int fsr_flags)
1665 TCGv_i32 r_const;
1667 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1668 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1669 r_const = tcg_const_i32(TT_FP_EXCP);
1670 gen_helper_raise_exception(cpu_env, r_const);
1671 tcg_temp_free_i32(r_const);
1674 static int gen_trap_ifnofpu(DisasContext *dc)
1676 #if !defined(CONFIG_USER_ONLY)
1677 if (!dc->fpu_enabled) {
1678 TCGv_i32 r_const;
1680 save_state(dc);
1681 r_const = tcg_const_i32(TT_NFPU_INSN);
1682 gen_helper_raise_exception(cpu_env, r_const);
1683 tcg_temp_free_i32(r_const);
1684 dc->is_br = 1;
1685 return 1;
1687 #endif
1688 return 0;
1691 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1693 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1696 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1697 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1699 TCGv_i32 dst, src;
1701 src = gen_load_fpr_F(dc, rs);
1702 dst = gen_dest_fpr_F(dc);
1704 gen(dst, cpu_env, src);
1706 gen_store_fpr_F(dc, rd, dst);
1709 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1710 void (*gen)(TCGv_i32, TCGv_i32))
1712 TCGv_i32 dst, src;
1714 src = gen_load_fpr_F(dc, rs);
1715 dst = gen_dest_fpr_F(dc);
1717 gen(dst, src);
1719 gen_store_fpr_F(dc, rd, dst);
1722 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1723 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1725 TCGv_i32 dst, src1, src2;
1727 src1 = gen_load_fpr_F(dc, rs1);
1728 src2 = gen_load_fpr_F(dc, rs2);
1729 dst = gen_dest_fpr_F(dc);
1731 gen(dst, cpu_env, src1, src2);
1733 gen_store_fpr_F(dc, rd, dst);
1736 #ifdef TARGET_SPARC64
1737 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1738 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1740 TCGv_i32 dst, src1, src2;
1742 src1 = gen_load_fpr_F(dc, rs1);
1743 src2 = gen_load_fpr_F(dc, rs2);
1744 dst = gen_dest_fpr_F(dc);
1746 gen(dst, src1, src2);
1748 gen_store_fpr_F(dc, rd, dst);
1750 #endif
1752 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1753 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1755 TCGv_i64 dst, src;
1757 src = gen_load_fpr_D(dc, rs);
1758 dst = gen_dest_fpr_D(dc, rd);
1760 gen(dst, cpu_env, src);
1762 gen_store_fpr_D(dc, rd, dst);
1765 #ifdef TARGET_SPARC64
1766 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1767 void (*gen)(TCGv_i64, TCGv_i64))
1769 TCGv_i64 dst, src;
1771 src = gen_load_fpr_D(dc, rs);
1772 dst = gen_dest_fpr_D(dc, rd);
1774 gen(dst, src);
1776 gen_store_fpr_D(dc, rd, dst);
1778 #endif
1780 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1781 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1783 TCGv_i64 dst, src1, src2;
1785 src1 = gen_load_fpr_D(dc, rs1);
1786 src2 = gen_load_fpr_D(dc, rs2);
1787 dst = gen_dest_fpr_D(dc, rd);
1789 gen(dst, cpu_env, src1, src2);
1791 gen_store_fpr_D(dc, rd, dst);
1794 #ifdef TARGET_SPARC64
1795 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1796 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1798 TCGv_i64 dst, src1, src2;
1800 src1 = gen_load_fpr_D(dc, rs1);
1801 src2 = gen_load_fpr_D(dc, rs2);
1802 dst = gen_dest_fpr_D(dc, rd);
1804 gen(dst, src1, src2);
1806 gen_store_fpr_D(dc, rd, dst);
1809 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1810 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1812 TCGv_i64 dst, src1, src2;
1814 src1 = gen_load_fpr_D(dc, rs1);
1815 src2 = gen_load_fpr_D(dc, rs2);
1816 dst = gen_dest_fpr_D(dc, rd);
1818 gen(dst, cpu_gsr, src1, src2);
1820 gen_store_fpr_D(dc, rd, dst);
1823 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1824 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1826 TCGv_i64 dst, src0, src1, src2;
1828 src1 = gen_load_fpr_D(dc, rs1);
1829 src2 = gen_load_fpr_D(dc, rs2);
1830 src0 = gen_load_fpr_D(dc, rd);
1831 dst = gen_dest_fpr_D(dc, rd);
1833 gen(dst, src0, src1, src2);
1835 gen_store_fpr_D(dc, rd, dst);
1837 #endif
1839 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1840 void (*gen)(TCGv_ptr))
1842 gen_op_load_fpr_QT1(QFPREG(rs));
1844 gen(cpu_env);
1846 gen_op_store_QT0_fpr(QFPREG(rd));
1847 gen_update_fprs_dirty(QFPREG(rd));
1850 #ifdef TARGET_SPARC64
1851 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1852 void (*gen)(TCGv_ptr))
1854 gen_op_load_fpr_QT1(QFPREG(rs));
1856 gen(cpu_env);
1858 gen_op_store_QT0_fpr(QFPREG(rd));
1859 gen_update_fprs_dirty(QFPREG(rd));
1861 #endif
1863 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1864 void (*gen)(TCGv_ptr))
1866 gen_op_load_fpr_QT0(QFPREG(rs1));
1867 gen_op_load_fpr_QT1(QFPREG(rs2));
1869 gen(cpu_env);
1871 gen_op_store_QT0_fpr(QFPREG(rd));
1872 gen_update_fprs_dirty(QFPREG(rd));
1875 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1876 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1878 TCGv_i64 dst;
1879 TCGv_i32 src1, src2;
1881 src1 = gen_load_fpr_F(dc, rs1);
1882 src2 = gen_load_fpr_F(dc, rs2);
1883 dst = gen_dest_fpr_D(dc, rd);
1885 gen(dst, cpu_env, src1, src2);
1887 gen_store_fpr_D(dc, rd, dst);
1890 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1891 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1893 TCGv_i64 src1, src2;
1895 src1 = gen_load_fpr_D(dc, rs1);
1896 src2 = gen_load_fpr_D(dc, rs2);
1898 gen(cpu_env, src1, src2);
1900 gen_op_store_QT0_fpr(QFPREG(rd));
1901 gen_update_fprs_dirty(QFPREG(rd));
1904 #ifdef TARGET_SPARC64
1905 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1906 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1908 TCGv_i64 dst;
1909 TCGv_i32 src;
1911 src = gen_load_fpr_F(dc, rs);
1912 dst = gen_dest_fpr_D(dc, rd);
1914 gen(dst, cpu_env, src);
1916 gen_store_fpr_D(dc, rd, dst);
1918 #endif
1920 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1921 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1923 TCGv_i64 dst;
1924 TCGv_i32 src;
1926 src = gen_load_fpr_F(dc, rs);
1927 dst = gen_dest_fpr_D(dc, rd);
1929 gen(dst, cpu_env, src);
1931 gen_store_fpr_D(dc, rd, dst);
1934 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1935 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1937 TCGv_i32 dst;
1938 TCGv_i64 src;
1940 src = gen_load_fpr_D(dc, rs);
1941 dst = gen_dest_fpr_F(dc);
1943 gen(dst, cpu_env, src);
1945 gen_store_fpr_F(dc, rd, dst);
1948 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1949 void (*gen)(TCGv_i32, TCGv_ptr))
1951 TCGv_i32 dst;
1953 gen_op_load_fpr_QT1(QFPREG(rs));
1954 dst = gen_dest_fpr_F(dc);
1956 gen(dst, cpu_env);
1958 gen_store_fpr_F(dc, rd, dst);
1961 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1962 void (*gen)(TCGv_i64, TCGv_ptr))
1964 TCGv_i64 dst;
1966 gen_op_load_fpr_QT1(QFPREG(rs));
1967 dst = gen_dest_fpr_D(dc, rd);
1969 gen(dst, cpu_env);
1971 gen_store_fpr_D(dc, rd, dst);
1974 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1975 void (*gen)(TCGv_ptr, TCGv_i32))
1977 TCGv_i32 src;
1979 src = gen_load_fpr_F(dc, rs);
1981 gen(cpu_env, src);
1983 gen_op_store_QT0_fpr(QFPREG(rd));
1984 gen_update_fprs_dirty(QFPREG(rd));
1987 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1988 void (*gen)(TCGv_ptr, TCGv_i64))
1990 TCGv_i64 src;
1992 src = gen_load_fpr_D(dc, rs);
1994 gen(cpu_env, src);
1996 gen_op_store_QT0_fpr(QFPREG(rd));
1997 gen_update_fprs_dirty(QFPREG(rd));
2000 /* asi moves */
2001 #ifdef TARGET_SPARC64
2002 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
2004 int asi;
2005 TCGv_i32 r_asi;
2007 if (IS_IMM) {
2008 r_asi = tcg_temp_new_i32();
2009 tcg_gen_mov_i32(r_asi, cpu_asi);
2010 } else {
2011 asi = GET_FIELD(insn, 19, 26);
2012 r_asi = tcg_const_i32(asi);
2014 return r_asi;
2017 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2018 int sign)
2020 TCGv_i32 r_asi, r_size, r_sign;
2022 r_asi = gen_get_asi(insn, addr);
2023 r_size = tcg_const_i32(size);
2024 r_sign = tcg_const_i32(sign);
2025 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2026 tcg_temp_free_i32(r_sign);
2027 tcg_temp_free_i32(r_size);
2028 tcg_temp_free_i32(r_asi);
2031 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2033 TCGv_i32 r_asi, r_size;
2035 r_asi = gen_get_asi(insn, addr);
2036 r_size = tcg_const_i32(size);
2037 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2038 tcg_temp_free_i32(r_size);
2039 tcg_temp_free_i32(r_asi);
2042 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2044 TCGv_i32 r_asi, r_size, r_rd;
2046 r_asi = gen_get_asi(insn, addr);
2047 r_size = tcg_const_i32(size);
2048 r_rd = tcg_const_i32(rd);
2049 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2050 tcg_temp_free_i32(r_rd);
2051 tcg_temp_free_i32(r_size);
2052 tcg_temp_free_i32(r_asi);
2055 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2057 TCGv_i32 r_asi, r_size, r_rd;
2059 r_asi = gen_get_asi(insn, addr);
2060 r_size = tcg_const_i32(size);
2061 r_rd = tcg_const_i32(rd);
2062 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2063 tcg_temp_free_i32(r_rd);
2064 tcg_temp_free_i32(r_size);
2065 tcg_temp_free_i32(r_asi);
2068 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2070 TCGv_i32 r_asi, r_size, r_sign;
2071 TCGv_i64 t64 = tcg_temp_new_i64();
2073 r_asi = gen_get_asi(insn, addr);
2074 r_size = tcg_const_i32(4);
2075 r_sign = tcg_const_i32(0);
2076 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2077 tcg_temp_free_i32(r_sign);
2078 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2079 tcg_temp_free_i32(r_size);
2080 tcg_temp_free_i32(r_asi);
2081 tcg_gen_trunc_i64_tl(dst, t64);
2082 tcg_temp_free_i64(t64);
2085 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2086 int insn, int rd)
2088 TCGv_i32 r_asi, r_rd;
2090 r_asi = gen_get_asi(insn, addr);
2091 r_rd = tcg_const_i32(rd);
2092 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2093 tcg_temp_free_i32(r_rd);
2094 tcg_temp_free_i32(r_asi);
2097 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2098 int insn, int rd)
2100 TCGv_i32 r_asi, r_size;
2101 TCGv lo = gen_load_gpr(dc, rd + 1);
2102 TCGv_i64 t64 = tcg_temp_new_i64();
2104 tcg_gen_concat_tl_i64(t64, lo, hi);
2105 r_asi = gen_get_asi(insn, addr);
2106 r_size = tcg_const_i32(8);
2107 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2108 tcg_temp_free_i32(r_size);
2109 tcg_temp_free_i32(r_asi);
2110 tcg_temp_free_i64(t64);
2113 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2114 TCGv val2, int insn, int rd)
2116 TCGv val1 = gen_load_gpr(dc, rd);
2117 TCGv dst = gen_dest_gpr(dc, rd);
2118 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2120 gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2121 tcg_temp_free_i32(r_asi);
2122 gen_store_gpr(dc, rd, dst);
2125 #elif !defined(CONFIG_USER_ONLY)
2127 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2128 int sign)
2130 TCGv_i32 r_asi, r_size, r_sign;
2131 TCGv_i64 t64 = tcg_temp_new_i64();
2133 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2134 r_size = tcg_const_i32(size);
2135 r_sign = tcg_const_i32(sign);
2136 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2137 tcg_temp_free_i32(r_sign);
2138 tcg_temp_free_i32(r_size);
2139 tcg_temp_free_i32(r_asi);
2140 tcg_gen_trunc_i64_tl(dst, t64);
2141 tcg_temp_free_i64(t64);
2144 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2146 TCGv_i32 r_asi, r_size;
2147 TCGv_i64 t64 = tcg_temp_new_i64();
2149 tcg_gen_extu_tl_i64(t64, src);
2150 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2151 r_size = tcg_const_i32(size);
2152 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2153 tcg_temp_free_i32(r_size);
2154 tcg_temp_free_i32(r_asi);
2155 tcg_temp_free_i64(t64);
2158 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2160 TCGv_i32 r_asi, r_size, r_sign;
2161 TCGv_i64 r_val, t64;
2163 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2164 r_size = tcg_const_i32(4);
2165 r_sign = tcg_const_i32(0);
2166 t64 = tcg_temp_new_i64();
2167 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2168 tcg_temp_free(r_sign);
2169 r_val = tcg_temp_new_i64();
2170 tcg_gen_extu_tl_i64(r_val, src);
2171 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2172 tcg_temp_free_i64(r_val);
2173 tcg_temp_free_i32(r_size);
2174 tcg_temp_free_i32(r_asi);
2175 tcg_gen_trunc_i64_tl(dst, t64);
2176 tcg_temp_free_i64(t64);
2179 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2180 int insn, int rd)
2182 TCGv_i32 r_asi, r_size, r_sign;
2183 TCGv t;
2184 TCGv_i64 t64;
2186 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2187 r_size = tcg_const_i32(8);
2188 r_sign = tcg_const_i32(0);
2189 t64 = tcg_temp_new_i64();
2190 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2191 tcg_temp_free_i32(r_sign);
2192 tcg_temp_free_i32(r_size);
2193 tcg_temp_free_i32(r_asi);
2195 t = gen_dest_gpr(dc, rd + 1);
2196 tcg_gen_trunc_i64_tl(t, t64);
2197 gen_store_gpr(dc, rd + 1, t);
2199 tcg_gen_shri_i64(t64, t64, 32);
2200 tcg_gen_trunc_i64_tl(hi, t64);
2201 tcg_temp_free_i64(t64);
2202 gen_store_gpr(dc, rd, hi);
2205 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2206 int insn, int rd)
2208 TCGv_i32 r_asi, r_size;
2209 TCGv lo = gen_load_gpr(dc, rd + 1);
2210 TCGv_i64 t64 = tcg_temp_new_i64();
2212 tcg_gen_concat_tl_i64(t64, lo, hi);
2213 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2214 r_size = tcg_const_i32(8);
2215 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2216 tcg_temp_free_i32(r_size);
2217 tcg_temp_free_i32(r_asi);
2218 tcg_temp_free_i64(t64);
2220 #endif
2222 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2223 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2224 TCGv val2, int insn, int rd)
2226 TCGv val1 = gen_load_gpr(dc, rd);
2227 TCGv dst = gen_dest_gpr(dc, rd);
2228 #ifdef TARGET_SPARC64
2229 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2230 #else
2231 TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2232 #endif
2234 gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2235 tcg_temp_free_i32(r_asi);
2236 gen_store_gpr(dc, rd, dst);
2239 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2241 TCGv_i64 r_val;
2242 TCGv_i32 r_asi, r_size;
2244 gen_ld_asi(dst, addr, insn, 1, 0);
2246 r_val = tcg_const_i64(0xffULL);
2247 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2248 r_size = tcg_const_i32(1);
2249 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2250 tcg_temp_free_i32(r_size);
2251 tcg_temp_free_i32(r_asi);
2252 tcg_temp_free_i64(r_val);
2254 #endif
2256 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2258 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2259 return gen_load_gpr(dc, rs1);
2262 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2264 if (IS_IMM) { /* immediate */
2265 target_long simm = GET_FIELDs(insn, 19, 31);
2266 TCGv t = get_temp_tl(dc);
2267 tcg_gen_movi_tl(t, simm);
2268 return t;
2269 } else { /* register */
2270 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2271 return gen_load_gpr(dc, rs2);
2275 #ifdef TARGET_SPARC64
2276 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2278 TCGv_i32 c32, zero, dst, s1, s2;
2280 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2281 or fold the comparison down to 32 bits and use movcond_i32. Choose
2282 the later. */
2283 c32 = tcg_temp_new_i32();
2284 if (cmp->is_bool) {
2285 tcg_gen_trunc_i64_i32(c32, cmp->c1);
2286 } else {
2287 TCGv_i64 c64 = tcg_temp_new_i64();
2288 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2289 tcg_gen_trunc_i64_i32(c32, c64);
2290 tcg_temp_free_i64(c64);
2293 s1 = gen_load_fpr_F(dc, rs);
2294 s2 = gen_load_fpr_F(dc, rd);
2295 dst = gen_dest_fpr_F(dc);
2296 zero = tcg_const_i32(0);
2298 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2300 tcg_temp_free_i32(c32);
2301 tcg_temp_free_i32(zero);
2302 gen_store_fpr_F(dc, rd, dst);
2305 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2307 TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2308 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2309 gen_load_fpr_D(dc, rs),
2310 gen_load_fpr_D(dc, rd));
2311 gen_store_fpr_D(dc, rd, dst);
2314 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2316 int qd = QFPREG(rd);
2317 int qs = QFPREG(rs);
2319 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2320 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2321 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2322 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2324 gen_update_fprs_dirty(qd);
2327 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2329 TCGv_i32 r_tl = tcg_temp_new_i32();
2331 /* load env->tl into r_tl */
2332 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2334 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2335 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2337 /* calculate offset to current trap state from env->ts, reuse r_tl */
2338 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2339 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2341 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2343 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2344 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2345 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2346 tcg_temp_free_ptr(r_tl_tmp);
2349 tcg_temp_free_i32(r_tl);
2352 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2353 int width, bool cc, bool left)
2355 TCGv lo1, lo2, t1, t2;
2356 uint64_t amask, tabl, tabr;
2357 int shift, imask, omask;
2359 if (cc) {
2360 tcg_gen_mov_tl(cpu_cc_src, s1);
2361 tcg_gen_mov_tl(cpu_cc_src2, s2);
2362 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2363 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2364 dc->cc_op = CC_OP_SUB;
2367 /* Theory of operation: there are two tables, left and right (not to
2368 be confused with the left and right versions of the opcode). These
2369 are indexed by the low 3 bits of the inputs. To make things "easy",
2370 these tables are loaded into two constants, TABL and TABR below.
2371 The operation index = (input & imask) << shift calculates the index
2372 into the constant, while val = (table >> index) & omask calculates
2373 the value we're looking for. */
2374 switch (width) {
2375 case 8:
2376 imask = 0x7;
2377 shift = 3;
2378 omask = 0xff;
2379 if (left) {
2380 tabl = 0x80c0e0f0f8fcfeffULL;
2381 tabr = 0xff7f3f1f0f070301ULL;
2382 } else {
2383 tabl = 0x0103070f1f3f7fffULL;
2384 tabr = 0xfffefcf8f0e0c080ULL;
2386 break;
2387 case 16:
2388 imask = 0x6;
2389 shift = 1;
2390 omask = 0xf;
2391 if (left) {
2392 tabl = 0x8cef;
2393 tabr = 0xf731;
2394 } else {
2395 tabl = 0x137f;
2396 tabr = 0xfec8;
2398 break;
2399 case 32:
2400 imask = 0x4;
2401 shift = 0;
2402 omask = 0x3;
2403 if (left) {
2404 tabl = (2 << 2) | 3;
2405 tabr = (3 << 2) | 1;
2406 } else {
2407 tabl = (1 << 2) | 3;
2408 tabr = (3 << 2) | 2;
2410 break;
2411 default:
2412 abort();
2415 lo1 = tcg_temp_new();
2416 lo2 = tcg_temp_new();
2417 tcg_gen_andi_tl(lo1, s1, imask);
2418 tcg_gen_andi_tl(lo2, s2, imask);
2419 tcg_gen_shli_tl(lo1, lo1, shift);
2420 tcg_gen_shli_tl(lo2, lo2, shift);
2422 t1 = tcg_const_tl(tabl);
2423 t2 = tcg_const_tl(tabr);
2424 tcg_gen_shr_tl(lo1, t1, lo1);
2425 tcg_gen_shr_tl(lo2, t2, lo2);
2426 tcg_gen_andi_tl(dst, lo1, omask);
2427 tcg_gen_andi_tl(lo2, lo2, omask);
2429 amask = -8;
2430 if (AM_CHECK(dc)) {
2431 amask &= 0xffffffffULL;
2433 tcg_gen_andi_tl(s1, s1, amask);
2434 tcg_gen_andi_tl(s2, s2, amask);
2436 /* We want to compute
2437 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2438 We've already done dst = lo1, so this reduces to
2439 dst &= (s1 == s2 ? -1 : lo2)
2440 Which we perform by
2441 lo2 |= -(s1 == s2)
2442 dst &= lo2
2444 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2445 tcg_gen_neg_tl(t1, t1);
2446 tcg_gen_or_tl(lo2, lo2, t1);
2447 tcg_gen_and_tl(dst, dst, lo2);
2449 tcg_temp_free(lo1);
2450 tcg_temp_free(lo2);
2451 tcg_temp_free(t1);
2452 tcg_temp_free(t2);
2455 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2457 TCGv tmp = tcg_temp_new();
2459 tcg_gen_add_tl(tmp, s1, s2);
2460 tcg_gen_andi_tl(dst, tmp, -8);
2461 if (left) {
2462 tcg_gen_neg_tl(tmp, tmp);
2464 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2466 tcg_temp_free(tmp);
2469 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2471 TCGv t1, t2, shift;
2473 t1 = tcg_temp_new();
2474 t2 = tcg_temp_new();
2475 shift = tcg_temp_new();
2477 tcg_gen_andi_tl(shift, gsr, 7);
2478 tcg_gen_shli_tl(shift, shift, 3);
2479 tcg_gen_shl_tl(t1, s1, shift);
2481 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2482 shift of (up to 63) followed by a constant shift of 1. */
2483 tcg_gen_xori_tl(shift, shift, 63);
2484 tcg_gen_shr_tl(t2, s2, shift);
2485 tcg_gen_shri_tl(t2, t2, 1);
2487 tcg_gen_or_tl(dst, t1, t2);
2489 tcg_temp_free(t1);
2490 tcg_temp_free(t2);
2491 tcg_temp_free(shift);
2493 #endif
2495 #define CHECK_IU_FEATURE(dc, FEATURE) \
2496 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2497 goto illegal_insn;
2498 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2499 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2500 goto nfpu_insn;
2502 /* before an instruction, dc->pc must be static */
2503 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2505 unsigned int opc, rs1, rs2, rd;
2506 TCGv cpu_src1, cpu_src2;
2507 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2508 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2509 target_long simm;
2511 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2512 tcg_gen_debug_insn_start(dc->pc);
2515 opc = GET_FIELD(insn, 0, 1);
2516 rd = GET_FIELD(insn, 2, 6);
2518 switch (opc) {
2519 case 0: /* branches/sethi */
2521 unsigned int xop = GET_FIELD(insn, 7, 9);
2522 int32_t target;
2523 switch (xop) {
2524 #ifdef TARGET_SPARC64
2525 case 0x1: /* V9 BPcc */
2527 int cc;
2529 target = GET_FIELD_SP(insn, 0, 18);
2530 target = sign_extend(target, 19);
2531 target <<= 2;
2532 cc = GET_FIELD_SP(insn, 20, 21);
2533 if (cc == 0)
2534 do_branch(dc, target, insn, 0);
2535 else if (cc == 2)
2536 do_branch(dc, target, insn, 1);
2537 else
2538 goto illegal_insn;
2539 goto jmp_insn;
2541 case 0x3: /* V9 BPr */
2543 target = GET_FIELD_SP(insn, 0, 13) |
2544 (GET_FIELD_SP(insn, 20, 21) << 14);
2545 target = sign_extend(target, 16);
2546 target <<= 2;
2547 cpu_src1 = get_src1(dc, insn);
2548 do_branch_reg(dc, target, insn, cpu_src1);
2549 goto jmp_insn;
2551 case 0x5: /* V9 FBPcc */
2553 int cc = GET_FIELD_SP(insn, 20, 21);
2554 if (gen_trap_ifnofpu(dc)) {
2555 goto jmp_insn;
2557 target = GET_FIELD_SP(insn, 0, 18);
2558 target = sign_extend(target, 19);
2559 target <<= 2;
2560 do_fbranch(dc, target, insn, cc);
2561 goto jmp_insn;
2563 #else
2564 case 0x7: /* CBN+x */
2566 goto ncp_insn;
2568 #endif
2569 case 0x2: /* BN+x */
2571 target = GET_FIELD(insn, 10, 31);
2572 target = sign_extend(target, 22);
2573 target <<= 2;
2574 do_branch(dc, target, insn, 0);
2575 goto jmp_insn;
2577 case 0x6: /* FBN+x */
2579 if (gen_trap_ifnofpu(dc)) {
2580 goto jmp_insn;
2582 target = GET_FIELD(insn, 10, 31);
2583 target = sign_extend(target, 22);
2584 target <<= 2;
2585 do_fbranch(dc, target, insn, 0);
2586 goto jmp_insn;
2588 case 0x4: /* SETHI */
2589 /* Special-case %g0 because that's the canonical nop. */
2590 if (rd) {
2591 uint32_t value = GET_FIELD(insn, 10, 31);
2592 TCGv t = gen_dest_gpr(dc, rd);
2593 tcg_gen_movi_tl(t, value << 10);
2594 gen_store_gpr(dc, rd, t);
2596 break;
2597 case 0x0: /* UNIMPL */
2598 default:
2599 goto illegal_insn;
2601 break;
2603 break;
2604 case 1: /*CALL*/
2606 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2607 TCGv o7 = gen_dest_gpr(dc, 15);
2609 tcg_gen_movi_tl(o7, dc->pc);
2610 gen_store_gpr(dc, 15, o7);
2611 target += dc->pc;
2612 gen_mov_pc_npc(dc);
2613 #ifdef TARGET_SPARC64
2614 if (unlikely(AM_CHECK(dc))) {
2615 target &= 0xffffffffULL;
2617 #endif
2618 dc->npc = target;
2620 goto jmp_insn;
2621 case 2: /* FPU & Logical Operations */
2623 unsigned int xop = GET_FIELD(insn, 7, 12);
2624 TCGv cpu_dst = get_temp_tl(dc);
2625 TCGv cpu_tmp0;
2627 if (xop == 0x3a) { /* generate trap */
2628 int cond = GET_FIELD(insn, 3, 6);
2629 TCGv_i32 trap;
2630 int l1 = -1, mask;
2632 if (cond == 0) {
2633 /* Trap never. */
2634 break;
2637 save_state(dc);
2639 if (cond != 8) {
2640 /* Conditional trap. */
2641 DisasCompare cmp;
2642 #ifdef TARGET_SPARC64
2643 /* V9 icc/xcc */
2644 int cc = GET_FIELD_SP(insn, 11, 12);
2645 if (cc == 0) {
2646 gen_compare(&cmp, 0, cond, dc);
2647 } else if (cc == 2) {
2648 gen_compare(&cmp, 1, cond, dc);
2649 } else {
2650 goto illegal_insn;
2652 #else
2653 gen_compare(&cmp, 0, cond, dc);
2654 #endif
2655 l1 = gen_new_label();
2656 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2657 cmp.c1, cmp.c2, l1);
2658 free_compare(&cmp);
2661 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2662 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2664 /* Don't use the normal temporaries, as they may well have
2665 gone out of scope with the branch above. While we're
2666 doing that we might as well pre-truncate to 32-bit. */
2667 trap = tcg_temp_new_i32();
2669 rs1 = GET_FIELD_SP(insn, 14, 18);
2670 if (IS_IMM) {
2671 rs2 = GET_FIELD_SP(insn, 0, 6);
2672 if (rs1 == 0) {
2673 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2674 /* Signal that the trap value is fully constant. */
2675 mask = 0;
2676 } else {
2677 TCGv t1 = gen_load_gpr(dc, rs1);
2678 tcg_gen_trunc_tl_i32(trap, t1);
2679 tcg_gen_addi_i32(trap, trap, rs2);
2681 } else {
2682 TCGv t1, t2;
2683 rs2 = GET_FIELD_SP(insn, 0, 4);
2684 t1 = gen_load_gpr(dc, rs1);
2685 t2 = gen_load_gpr(dc, rs2);
2686 tcg_gen_add_tl(t1, t1, t2);
2687 tcg_gen_trunc_tl_i32(trap, t1);
2689 if (mask != 0) {
2690 tcg_gen_andi_i32(trap, trap, mask);
2691 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2694 gen_helper_raise_exception(cpu_env, trap);
2695 tcg_temp_free_i32(trap);
2697 if (cond == 8) {
2698 /* An unconditional trap ends the TB. */
2699 dc->is_br = 1;
2700 goto jmp_insn;
2701 } else {
2702 /* A conditional trap falls through to the next insn. */
2703 gen_set_label(l1);
2704 break;
2706 } else if (xop == 0x28) {
2707 rs1 = GET_FIELD(insn, 13, 17);
2708 switch(rs1) {
2709 case 0: /* rdy */
2710 #ifndef TARGET_SPARC64
2711 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2712 manual, rdy on the microSPARC
2713 II */
2714 case 0x0f: /* stbar in the SPARCv8 manual,
2715 rdy on the microSPARC II */
2716 case 0x10 ... 0x1f: /* implementation-dependent in the
2717 SPARCv8 manual, rdy on the
2718 microSPARC II */
2719 /* Read Asr17 */
2720 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2721 TCGv t = gen_dest_gpr(dc, rd);
2722 /* Read Asr17 for a Leon3 monoprocessor */
2723 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2724 gen_store_gpr(dc, rd, t);
2725 break;
2727 #endif
2728 gen_store_gpr(dc, rd, cpu_y);
2729 break;
2730 #ifdef TARGET_SPARC64
2731 case 0x2: /* V9 rdccr */
2732 update_psr(dc);
2733 gen_helper_rdccr(cpu_dst, cpu_env);
2734 gen_store_gpr(dc, rd, cpu_dst);
2735 break;
2736 case 0x3: /* V9 rdasi */
2737 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2738 gen_store_gpr(dc, rd, cpu_dst);
2739 break;
2740 case 0x4: /* V9 rdtick */
2742 TCGv_ptr r_tickptr;
2744 r_tickptr = tcg_temp_new_ptr();
2745 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2746 offsetof(CPUSPARCState, tick));
2747 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2748 tcg_temp_free_ptr(r_tickptr);
2749 gen_store_gpr(dc, rd, cpu_dst);
2751 break;
2752 case 0x5: /* V9 rdpc */
2754 TCGv t = gen_dest_gpr(dc, rd);
2755 if (unlikely(AM_CHECK(dc))) {
2756 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2757 } else {
2758 tcg_gen_movi_tl(t, dc->pc);
2760 gen_store_gpr(dc, rd, t);
2762 break;
2763 case 0x6: /* V9 rdfprs */
2764 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2765 gen_store_gpr(dc, rd, cpu_dst);
2766 break;
2767 case 0xf: /* V9 membar */
2768 break; /* no effect */
2769 case 0x13: /* Graphics Status */
2770 if (gen_trap_ifnofpu(dc)) {
2771 goto jmp_insn;
2773 gen_store_gpr(dc, rd, cpu_gsr);
2774 break;
2775 case 0x16: /* Softint */
2776 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2777 gen_store_gpr(dc, rd, cpu_dst);
2778 break;
2779 case 0x17: /* Tick compare */
2780 gen_store_gpr(dc, rd, cpu_tick_cmpr);
2781 break;
2782 case 0x18: /* System tick */
2784 TCGv_ptr r_tickptr;
2786 r_tickptr = tcg_temp_new_ptr();
2787 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2788 offsetof(CPUSPARCState, stick));
2789 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2790 tcg_temp_free_ptr(r_tickptr);
2791 gen_store_gpr(dc, rd, cpu_dst);
2793 break;
2794 case 0x19: /* System tick compare */
2795 gen_store_gpr(dc, rd, cpu_stick_cmpr);
2796 break;
2797 case 0x10: /* Performance Control */
2798 case 0x11: /* Performance Instrumentation Counter */
2799 case 0x12: /* Dispatch Control */
2800 case 0x14: /* Softint set, WO */
2801 case 0x15: /* Softint clear, WO */
2802 #endif
2803 default:
2804 goto illegal_insn;
2806 #if !defined(CONFIG_USER_ONLY)
2807 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2808 #ifndef TARGET_SPARC64
2809 if (!supervisor(dc)) {
2810 goto priv_insn;
2812 update_psr(dc);
2813 gen_helper_rdpsr(cpu_dst, cpu_env);
2814 #else
2815 CHECK_IU_FEATURE(dc, HYPV);
2816 if (!hypervisor(dc))
2817 goto priv_insn;
2818 rs1 = GET_FIELD(insn, 13, 17);
2819 switch (rs1) {
2820 case 0: // hpstate
2821 // gen_op_rdhpstate();
2822 break;
2823 case 1: // htstate
2824 // gen_op_rdhtstate();
2825 break;
2826 case 3: // hintp
2827 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2828 break;
2829 case 5: // htba
2830 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2831 break;
2832 case 6: // hver
2833 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2834 break;
2835 case 31: // hstick_cmpr
2836 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2837 break;
2838 default:
2839 goto illegal_insn;
2841 #endif
2842 gen_store_gpr(dc, rd, cpu_dst);
2843 break;
2844 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2845 if (!supervisor(dc)) {
2846 goto priv_insn;
2848 cpu_tmp0 = get_temp_tl(dc);
2849 #ifdef TARGET_SPARC64
2850 rs1 = GET_FIELD(insn, 13, 17);
2851 switch (rs1) {
2852 case 0: // tpc
2854 TCGv_ptr r_tsptr;
2856 r_tsptr = tcg_temp_new_ptr();
2857 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2858 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2859 offsetof(trap_state, tpc));
2860 tcg_temp_free_ptr(r_tsptr);
2862 break;
2863 case 1: // tnpc
2865 TCGv_ptr r_tsptr;
2867 r_tsptr = tcg_temp_new_ptr();
2868 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2869 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2870 offsetof(trap_state, tnpc));
2871 tcg_temp_free_ptr(r_tsptr);
2873 break;
2874 case 2: // tstate
2876 TCGv_ptr r_tsptr;
2878 r_tsptr = tcg_temp_new_ptr();
2879 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2880 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2881 offsetof(trap_state, tstate));
2882 tcg_temp_free_ptr(r_tsptr);
2884 break;
2885 case 3: // tt
2887 TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2889 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2890 tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2891 offsetof(trap_state, tt));
2892 tcg_temp_free_ptr(r_tsptr);
2894 break;
2895 case 4: // tick
2897 TCGv_ptr r_tickptr;
2899 r_tickptr = tcg_temp_new_ptr();
2900 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2901 offsetof(CPUSPARCState, tick));
2902 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2903 tcg_temp_free_ptr(r_tickptr);
2905 break;
2906 case 5: // tba
2907 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2908 break;
2909 case 6: // pstate
2910 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2911 offsetof(CPUSPARCState, pstate));
2912 break;
2913 case 7: // tl
2914 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2915 offsetof(CPUSPARCState, tl));
2916 break;
2917 case 8: // pil
2918 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2919 offsetof(CPUSPARCState, psrpil));
2920 break;
2921 case 9: // cwp
2922 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2923 break;
2924 case 10: // cansave
2925 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2926 offsetof(CPUSPARCState, cansave));
2927 break;
2928 case 11: // canrestore
2929 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2930 offsetof(CPUSPARCState, canrestore));
2931 break;
2932 case 12: // cleanwin
2933 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2934 offsetof(CPUSPARCState, cleanwin));
2935 break;
2936 case 13: // otherwin
2937 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2938 offsetof(CPUSPARCState, otherwin));
2939 break;
2940 case 14: // wstate
2941 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2942 offsetof(CPUSPARCState, wstate));
2943 break;
2944 case 16: // UA2005 gl
2945 CHECK_IU_FEATURE(dc, GL);
2946 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2947 offsetof(CPUSPARCState, gl));
2948 break;
2949 case 26: // UA2005 strand status
2950 CHECK_IU_FEATURE(dc, HYPV);
2951 if (!hypervisor(dc))
2952 goto priv_insn;
2953 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2954 break;
2955 case 31: // ver
2956 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2957 break;
2958 case 15: // fq
2959 default:
2960 goto illegal_insn;
2962 #else
2963 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2964 #endif
2965 gen_store_gpr(dc, rd, cpu_tmp0);
2966 break;
2967 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2968 #ifdef TARGET_SPARC64
2969 save_state(dc);
2970 gen_helper_flushw(cpu_env);
2971 #else
2972 if (!supervisor(dc))
2973 goto priv_insn;
2974 gen_store_gpr(dc, rd, cpu_tbr);
2975 #endif
2976 break;
2977 #endif
2978 } else if (xop == 0x34) { /* FPU Operations */
2979 if (gen_trap_ifnofpu(dc)) {
2980 goto jmp_insn;
2982 gen_op_clear_ieee_excp_and_FTT();
2983 rs1 = GET_FIELD(insn, 13, 17);
2984 rs2 = GET_FIELD(insn, 27, 31);
2985 xop = GET_FIELD(insn, 18, 26);
2986 save_state(dc);
2987 switch (xop) {
2988 case 0x1: /* fmovs */
2989 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2990 gen_store_fpr_F(dc, rd, cpu_src1_32);
2991 break;
2992 case 0x5: /* fnegs */
2993 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2994 break;
2995 case 0x9: /* fabss */
2996 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2997 break;
2998 case 0x29: /* fsqrts */
2999 CHECK_FPU_FEATURE(dc, FSQRT);
3000 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3001 break;
3002 case 0x2a: /* fsqrtd */
3003 CHECK_FPU_FEATURE(dc, FSQRT);
3004 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3005 break;
3006 case 0x2b: /* fsqrtq */
3007 CHECK_FPU_FEATURE(dc, FLOAT128);
3008 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3009 break;
3010 case 0x41: /* fadds */
3011 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3012 break;
3013 case 0x42: /* faddd */
3014 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3015 break;
3016 case 0x43: /* faddq */
3017 CHECK_FPU_FEATURE(dc, FLOAT128);
3018 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3019 break;
3020 case 0x45: /* fsubs */
3021 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3022 break;
3023 case 0x46: /* fsubd */
3024 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3025 break;
3026 case 0x47: /* fsubq */
3027 CHECK_FPU_FEATURE(dc, FLOAT128);
3028 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3029 break;
3030 case 0x49: /* fmuls */
3031 CHECK_FPU_FEATURE(dc, FMUL);
3032 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3033 break;
3034 case 0x4a: /* fmuld */
3035 CHECK_FPU_FEATURE(dc, FMUL);
3036 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3037 break;
3038 case 0x4b: /* fmulq */
3039 CHECK_FPU_FEATURE(dc, FLOAT128);
3040 CHECK_FPU_FEATURE(dc, FMUL);
3041 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3042 break;
3043 case 0x4d: /* fdivs */
3044 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3045 break;
3046 case 0x4e: /* fdivd */
3047 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3048 break;
3049 case 0x4f: /* fdivq */
3050 CHECK_FPU_FEATURE(dc, FLOAT128);
3051 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3052 break;
3053 case 0x69: /* fsmuld */
3054 CHECK_FPU_FEATURE(dc, FSMULD);
3055 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3056 break;
3057 case 0x6e: /* fdmulq */
3058 CHECK_FPU_FEATURE(dc, FLOAT128);
3059 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3060 break;
3061 case 0xc4: /* fitos */
3062 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3063 break;
3064 case 0xc6: /* fdtos */
3065 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3066 break;
3067 case 0xc7: /* fqtos */
3068 CHECK_FPU_FEATURE(dc, FLOAT128);
3069 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3070 break;
3071 case 0xc8: /* fitod */
3072 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3073 break;
3074 case 0xc9: /* fstod */
3075 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3076 break;
3077 case 0xcb: /* fqtod */
3078 CHECK_FPU_FEATURE(dc, FLOAT128);
3079 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3080 break;
3081 case 0xcc: /* fitoq */
3082 CHECK_FPU_FEATURE(dc, FLOAT128);
3083 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3084 break;
3085 case 0xcd: /* fstoq */
3086 CHECK_FPU_FEATURE(dc, FLOAT128);
3087 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3088 break;
3089 case 0xce: /* fdtoq */
3090 CHECK_FPU_FEATURE(dc, FLOAT128);
3091 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3092 break;
3093 case 0xd1: /* fstoi */
3094 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3095 break;
3096 case 0xd2: /* fdtoi */
3097 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3098 break;
3099 case 0xd3: /* fqtoi */
3100 CHECK_FPU_FEATURE(dc, FLOAT128);
3101 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3102 break;
3103 #ifdef TARGET_SPARC64
3104 case 0x2: /* V9 fmovd */
3105 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3106 gen_store_fpr_D(dc, rd, cpu_src1_64);
3107 break;
3108 case 0x3: /* V9 fmovq */
3109 CHECK_FPU_FEATURE(dc, FLOAT128);
3110 gen_move_Q(rd, rs2);
3111 break;
3112 case 0x6: /* V9 fnegd */
3113 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3114 break;
3115 case 0x7: /* V9 fnegq */
3116 CHECK_FPU_FEATURE(dc, FLOAT128);
3117 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3118 break;
3119 case 0xa: /* V9 fabsd */
3120 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3121 break;
3122 case 0xb: /* V9 fabsq */
3123 CHECK_FPU_FEATURE(dc, FLOAT128);
3124 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3125 break;
3126 case 0x81: /* V9 fstox */
3127 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3128 break;
3129 case 0x82: /* V9 fdtox */
3130 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3131 break;
3132 case 0x83: /* V9 fqtox */
3133 CHECK_FPU_FEATURE(dc, FLOAT128);
3134 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3135 break;
3136 case 0x84: /* V9 fxtos */
3137 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3138 break;
3139 case 0x88: /* V9 fxtod */
3140 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3141 break;
3142 case 0x8c: /* V9 fxtoq */
3143 CHECK_FPU_FEATURE(dc, FLOAT128);
3144 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3145 break;
3146 #endif
3147 default:
3148 goto illegal_insn;
3150 } else if (xop == 0x35) { /* FPU Operations */
3151 #ifdef TARGET_SPARC64
3152 int cond;
3153 #endif
3154 if (gen_trap_ifnofpu(dc)) {
3155 goto jmp_insn;
3157 gen_op_clear_ieee_excp_and_FTT();
3158 rs1 = GET_FIELD(insn, 13, 17);
3159 rs2 = GET_FIELD(insn, 27, 31);
3160 xop = GET_FIELD(insn, 18, 26);
3161 save_state(dc);
3163 #ifdef TARGET_SPARC64
3164 #define FMOVR(sz) \
3165 do { \
3166 DisasCompare cmp; \
3167 cond = GET_FIELD_SP(insn, 10, 12); \
3168 cpu_src1 = get_src1(dc, insn); \
3169 gen_compare_reg(&cmp, cond, cpu_src1); \
3170 gen_fmov##sz(dc, &cmp, rd, rs2); \
3171 free_compare(&cmp); \
3172 } while (0)
3174 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3175 FMOVR(s);
3176 break;
3177 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3178 FMOVR(d);
3179 break;
3180 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3181 CHECK_FPU_FEATURE(dc, FLOAT128);
3182 FMOVR(q);
3183 break;
3185 #undef FMOVR
3186 #endif
3187 switch (xop) {
3188 #ifdef TARGET_SPARC64
3189 #define FMOVCC(fcc, sz) \
3190 do { \
3191 DisasCompare cmp; \
3192 cond = GET_FIELD_SP(insn, 14, 17); \
3193 gen_fcompare(&cmp, fcc, cond); \
3194 gen_fmov##sz(dc, &cmp, rd, rs2); \
3195 free_compare(&cmp); \
3196 } while (0)
3198 case 0x001: /* V9 fmovscc %fcc0 */
3199 FMOVCC(0, s);
3200 break;
3201 case 0x002: /* V9 fmovdcc %fcc0 */
3202 FMOVCC(0, d);
3203 break;
3204 case 0x003: /* V9 fmovqcc %fcc0 */
3205 CHECK_FPU_FEATURE(dc, FLOAT128);
3206 FMOVCC(0, q);
3207 break;
3208 case 0x041: /* V9 fmovscc %fcc1 */
3209 FMOVCC(1, s);
3210 break;
3211 case 0x042: /* V9 fmovdcc %fcc1 */
3212 FMOVCC(1, d);
3213 break;
3214 case 0x043: /* V9 fmovqcc %fcc1 */
3215 CHECK_FPU_FEATURE(dc, FLOAT128);
3216 FMOVCC(1, q);
3217 break;
3218 case 0x081: /* V9 fmovscc %fcc2 */
3219 FMOVCC(2, s);
3220 break;
3221 case 0x082: /* V9 fmovdcc %fcc2 */
3222 FMOVCC(2, d);
3223 break;
3224 case 0x083: /* V9 fmovqcc %fcc2 */
3225 CHECK_FPU_FEATURE(dc, FLOAT128);
3226 FMOVCC(2, q);
3227 break;
3228 case 0x0c1: /* V9 fmovscc %fcc3 */
3229 FMOVCC(3, s);
3230 break;
3231 case 0x0c2: /* V9 fmovdcc %fcc3 */
3232 FMOVCC(3, d);
3233 break;
3234 case 0x0c3: /* V9 fmovqcc %fcc3 */
3235 CHECK_FPU_FEATURE(dc, FLOAT128);
3236 FMOVCC(3, q);
3237 break;
3238 #undef FMOVCC
3239 #define FMOVCC(xcc, sz) \
3240 do { \
3241 DisasCompare cmp; \
3242 cond = GET_FIELD_SP(insn, 14, 17); \
3243 gen_compare(&cmp, xcc, cond, dc); \
3244 gen_fmov##sz(dc, &cmp, rd, rs2); \
3245 free_compare(&cmp); \
3246 } while (0)
3248 case 0x101: /* V9 fmovscc %icc */
3249 FMOVCC(0, s);
3250 break;
3251 case 0x102: /* V9 fmovdcc %icc */
3252 FMOVCC(0, d);
3253 break;
3254 case 0x103: /* V9 fmovqcc %icc */
3255 CHECK_FPU_FEATURE(dc, FLOAT128);
3256 FMOVCC(0, q);
3257 break;
3258 case 0x181: /* V9 fmovscc %xcc */
3259 FMOVCC(1, s);
3260 break;
3261 case 0x182: /* V9 fmovdcc %xcc */
3262 FMOVCC(1, d);
3263 break;
3264 case 0x183: /* V9 fmovqcc %xcc */
3265 CHECK_FPU_FEATURE(dc, FLOAT128);
3266 FMOVCC(1, q);
3267 break;
3268 #undef FMOVCC
3269 #endif
3270 case 0x51: /* fcmps, V9 %fcc */
3271 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3272 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3273 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3274 break;
3275 case 0x52: /* fcmpd, V9 %fcc */
3276 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3277 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3278 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3279 break;
3280 case 0x53: /* fcmpq, V9 %fcc */
3281 CHECK_FPU_FEATURE(dc, FLOAT128);
3282 gen_op_load_fpr_QT0(QFPREG(rs1));
3283 gen_op_load_fpr_QT1(QFPREG(rs2));
3284 gen_op_fcmpq(rd & 3);
3285 break;
3286 case 0x55: /* fcmpes, V9 %fcc */
3287 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3288 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3289 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3290 break;
3291 case 0x56: /* fcmped, V9 %fcc */
3292 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3293 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3294 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3295 break;
3296 case 0x57: /* fcmpeq, V9 %fcc */
3297 CHECK_FPU_FEATURE(dc, FLOAT128);
3298 gen_op_load_fpr_QT0(QFPREG(rs1));
3299 gen_op_load_fpr_QT1(QFPREG(rs2));
3300 gen_op_fcmpeq(rd & 3);
3301 break;
3302 default:
3303 goto illegal_insn;
3305 } else if (xop == 0x2) {
3306 TCGv dst = gen_dest_gpr(dc, rd);
3307 rs1 = GET_FIELD(insn, 13, 17);
3308 if (rs1 == 0) {
3309 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3310 if (IS_IMM) { /* immediate */
3311 simm = GET_FIELDs(insn, 19, 31);
3312 tcg_gen_movi_tl(dst, simm);
3313 gen_store_gpr(dc, rd, dst);
3314 } else { /* register */
3315 rs2 = GET_FIELD(insn, 27, 31);
3316 if (rs2 == 0) {
3317 tcg_gen_movi_tl(dst, 0);
3318 gen_store_gpr(dc, rd, dst);
3319 } else {
3320 cpu_src2 = gen_load_gpr(dc, rs2);
3321 gen_store_gpr(dc, rd, cpu_src2);
3324 } else {
3325 cpu_src1 = get_src1(dc, insn);
3326 if (IS_IMM) { /* immediate */
3327 simm = GET_FIELDs(insn, 19, 31);
3328 tcg_gen_ori_tl(dst, cpu_src1, simm);
3329 gen_store_gpr(dc, rd, dst);
3330 } else { /* register */
3331 rs2 = GET_FIELD(insn, 27, 31);
3332 if (rs2 == 0) {
3333 /* mov shortcut: or x, %g0, y -> mov x, y */
3334 gen_store_gpr(dc, rd, cpu_src1);
3335 } else {
3336 cpu_src2 = gen_load_gpr(dc, rs2);
3337 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3338 gen_store_gpr(dc, rd, dst);
3342 #ifdef TARGET_SPARC64
3343 } else if (xop == 0x25) { /* sll, V9 sllx */
3344 cpu_src1 = get_src1(dc, insn);
3345 if (IS_IMM) { /* immediate */
3346 simm = GET_FIELDs(insn, 20, 31);
3347 if (insn & (1 << 12)) {
3348 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3349 } else {
3350 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3352 } else { /* register */
3353 rs2 = GET_FIELD(insn, 27, 31);
3354 cpu_src2 = gen_load_gpr(dc, rs2);
3355 cpu_tmp0 = get_temp_tl(dc);
3356 if (insn & (1 << 12)) {
3357 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3358 } else {
3359 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3361 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3363 gen_store_gpr(dc, rd, cpu_dst);
3364 } else if (xop == 0x26) { /* srl, V9 srlx */
3365 cpu_src1 = get_src1(dc, insn);
3366 if (IS_IMM) { /* immediate */
3367 simm = GET_FIELDs(insn, 20, 31);
3368 if (insn & (1 << 12)) {
3369 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3370 } else {
3371 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3372 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3374 } else { /* register */
3375 rs2 = GET_FIELD(insn, 27, 31);
3376 cpu_src2 = gen_load_gpr(dc, rs2);
3377 cpu_tmp0 = get_temp_tl(dc);
3378 if (insn & (1 << 12)) {
3379 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3380 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3381 } else {
3382 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3383 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3384 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3387 gen_store_gpr(dc, rd, cpu_dst);
3388 } else if (xop == 0x27) { /* sra, V9 srax */
3389 cpu_src1 = get_src1(dc, insn);
3390 if (IS_IMM) { /* immediate */
3391 simm = GET_FIELDs(insn, 20, 31);
3392 if (insn & (1 << 12)) {
3393 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3394 } else {
3395 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3396 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3398 } else { /* register */
3399 rs2 = GET_FIELD(insn, 27, 31);
3400 cpu_src2 = gen_load_gpr(dc, rs2);
3401 cpu_tmp0 = get_temp_tl(dc);
3402 if (insn & (1 << 12)) {
3403 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3404 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3405 } else {
3406 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3407 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3408 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3411 gen_store_gpr(dc, rd, cpu_dst);
3412 #endif
3413 } else if (xop < 0x36) {
3414 if (xop < 0x20) {
3415 cpu_src1 = get_src1(dc, insn);
3416 cpu_src2 = get_src2(dc, insn);
3417 switch (xop & ~0x10) {
3418 case 0x0: /* add */
3419 if (xop & 0x10) {
3420 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3421 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3422 dc->cc_op = CC_OP_ADD;
3423 } else {
3424 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3426 break;
3427 case 0x1: /* and */
3428 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3429 if (xop & 0x10) {
3430 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3431 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3432 dc->cc_op = CC_OP_LOGIC;
3434 break;
3435 case 0x2: /* or */
3436 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3437 if (xop & 0x10) {
3438 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3439 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3440 dc->cc_op = CC_OP_LOGIC;
3442 break;
3443 case 0x3: /* xor */
3444 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3445 if (xop & 0x10) {
3446 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3447 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3448 dc->cc_op = CC_OP_LOGIC;
3450 break;
3451 case 0x4: /* sub */
3452 if (xop & 0x10) {
3453 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3454 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3455 dc->cc_op = CC_OP_SUB;
3456 } else {
3457 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3459 break;
3460 case 0x5: /* andn */
3461 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3462 if (xop & 0x10) {
3463 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3464 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3465 dc->cc_op = CC_OP_LOGIC;
3467 break;
3468 case 0x6: /* orn */
3469 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3470 if (xop & 0x10) {
3471 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3472 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3473 dc->cc_op = CC_OP_LOGIC;
3475 break;
3476 case 0x7: /* xorn */
3477 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3478 if (xop & 0x10) {
3479 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3480 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3481 dc->cc_op = CC_OP_LOGIC;
3483 break;
3484 case 0x8: /* addx, V9 addc */
3485 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3486 (xop & 0x10));
3487 break;
3488 #ifdef TARGET_SPARC64
3489 case 0x9: /* V9 mulx */
3490 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3491 break;
3492 #endif
3493 case 0xa: /* umul */
3494 CHECK_IU_FEATURE(dc, MUL);
3495 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3496 if (xop & 0x10) {
3497 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3498 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3499 dc->cc_op = CC_OP_LOGIC;
3501 break;
3502 case 0xb: /* smul */
3503 CHECK_IU_FEATURE(dc, MUL);
3504 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3505 if (xop & 0x10) {
3506 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3507 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3508 dc->cc_op = CC_OP_LOGIC;
3510 break;
3511 case 0xc: /* subx, V9 subc */
3512 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3513 (xop & 0x10));
3514 break;
3515 #ifdef TARGET_SPARC64
3516 case 0xd: /* V9 udivx */
3517 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3518 break;
3519 #endif
3520 case 0xe: /* udiv */
3521 CHECK_IU_FEATURE(dc, DIV);
3522 if (xop & 0x10) {
3523 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3524 cpu_src2);
3525 dc->cc_op = CC_OP_DIV;
3526 } else {
3527 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3528 cpu_src2);
3530 break;
3531 case 0xf: /* sdiv */
3532 CHECK_IU_FEATURE(dc, DIV);
3533 if (xop & 0x10) {
3534 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3535 cpu_src2);
3536 dc->cc_op = CC_OP_DIV;
3537 } else {
3538 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3539 cpu_src2);
3541 break;
3542 default:
3543 goto illegal_insn;
3545 gen_store_gpr(dc, rd, cpu_dst);
3546 } else {
3547 cpu_src1 = get_src1(dc, insn);
3548 cpu_src2 = get_src2(dc, insn);
3549 switch (xop) {
3550 case 0x20: /* taddcc */
3551 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3552 gen_store_gpr(dc, rd, cpu_dst);
3553 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3554 dc->cc_op = CC_OP_TADD;
3555 break;
3556 case 0x21: /* tsubcc */
3557 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3558 gen_store_gpr(dc, rd, cpu_dst);
3559 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3560 dc->cc_op = CC_OP_TSUB;
3561 break;
3562 case 0x22: /* taddcctv */
3563 gen_helper_taddcctv(cpu_dst, cpu_env,
3564 cpu_src1, cpu_src2);
3565 gen_store_gpr(dc, rd, cpu_dst);
3566 dc->cc_op = CC_OP_TADDTV;
3567 break;
3568 case 0x23: /* tsubcctv */
3569 gen_helper_tsubcctv(cpu_dst, cpu_env,
3570 cpu_src1, cpu_src2);
3571 gen_store_gpr(dc, rd, cpu_dst);
3572 dc->cc_op = CC_OP_TSUBTV;
3573 break;
3574 case 0x24: /* mulscc */
3575 update_psr(dc);
3576 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3577 gen_store_gpr(dc, rd, cpu_dst);
3578 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3579 dc->cc_op = CC_OP_ADD;
3580 break;
3581 #ifndef TARGET_SPARC64
3582 case 0x25: /* sll */
3583 if (IS_IMM) { /* immediate */
3584 simm = GET_FIELDs(insn, 20, 31);
3585 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3586 } else { /* register */
3587 cpu_tmp0 = get_temp_tl(dc);
3588 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3589 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3591 gen_store_gpr(dc, rd, cpu_dst);
3592 break;
3593 case 0x26: /* srl */
3594 if (IS_IMM) { /* immediate */
3595 simm = GET_FIELDs(insn, 20, 31);
3596 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3597 } else { /* register */
3598 cpu_tmp0 = get_temp_tl(dc);
3599 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3600 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3602 gen_store_gpr(dc, rd, cpu_dst);
3603 break;
3604 case 0x27: /* sra */
3605 if (IS_IMM) { /* immediate */
3606 simm = GET_FIELDs(insn, 20, 31);
3607 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3608 } else { /* register */
3609 cpu_tmp0 = get_temp_tl(dc);
3610 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3611 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3613 gen_store_gpr(dc, rd, cpu_dst);
3614 break;
3615 #endif
3616 case 0x30:
3618 cpu_tmp0 = get_temp_tl(dc);
3619 switch(rd) {
3620 case 0: /* wry */
3621 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3622 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3623 break;
3624 #ifndef TARGET_SPARC64
3625 case 0x01 ... 0x0f: /* undefined in the
3626 SPARCv8 manual, nop
3627 on the microSPARC
3628 II */
3629 case 0x10 ... 0x1f: /* implementation-dependent
3630 in the SPARCv8
3631 manual, nop on the
3632 microSPARC II */
3633 if ((rd == 0x13) && (dc->def->features &
3634 CPU_FEATURE_POWERDOWN)) {
3635 /* LEON3 power-down */
3636 save_state(dc);
3637 gen_helper_power_down(cpu_env);
3639 break;
3640 #else
3641 case 0x2: /* V9 wrccr */
3642 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3643 gen_helper_wrccr(cpu_env, cpu_tmp0);
3644 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3645 dc->cc_op = CC_OP_FLAGS;
3646 break;
3647 case 0x3: /* V9 wrasi */
3648 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3649 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3650 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3651 break;
3652 case 0x6: /* V9 wrfprs */
3653 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3654 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3655 save_state(dc);
3656 gen_op_next_insn();
3657 tcg_gen_exit_tb(0);
3658 dc->is_br = 1;
3659 break;
3660 case 0xf: /* V9 sir, nop if user */
3661 #if !defined(CONFIG_USER_ONLY)
3662 if (supervisor(dc)) {
3663 ; // XXX
3665 #endif
3666 break;
3667 case 0x13: /* Graphics Status */
3668 if (gen_trap_ifnofpu(dc)) {
3669 goto jmp_insn;
3671 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3672 break;
3673 case 0x14: /* Softint set */
3674 if (!supervisor(dc))
3675 goto illegal_insn;
3676 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3677 gen_helper_set_softint(cpu_env, cpu_tmp0);
3678 break;
3679 case 0x15: /* Softint clear */
3680 if (!supervisor(dc))
3681 goto illegal_insn;
3682 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3683 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3684 break;
3685 case 0x16: /* Softint write */
3686 if (!supervisor(dc))
3687 goto illegal_insn;
3688 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3689 gen_helper_write_softint(cpu_env, cpu_tmp0);
3690 break;
3691 case 0x17: /* Tick compare */
3692 #if !defined(CONFIG_USER_ONLY)
3693 if (!supervisor(dc))
3694 goto illegal_insn;
3695 #endif
3697 TCGv_ptr r_tickptr;
3699 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3700 cpu_src2);
3701 r_tickptr = tcg_temp_new_ptr();
3702 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3703 offsetof(CPUSPARCState, tick));
3704 gen_helper_tick_set_limit(r_tickptr,
3705 cpu_tick_cmpr);
3706 tcg_temp_free_ptr(r_tickptr);
3708 break;
3709 case 0x18: /* System tick */
3710 #if !defined(CONFIG_USER_ONLY)
3711 if (!supervisor(dc))
3712 goto illegal_insn;
3713 #endif
3715 TCGv_ptr r_tickptr;
3717 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3718 cpu_src2);
3719 r_tickptr = tcg_temp_new_ptr();
3720 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3721 offsetof(CPUSPARCState, stick));
3722 gen_helper_tick_set_count(r_tickptr,
3723 cpu_tmp0);
3724 tcg_temp_free_ptr(r_tickptr);
3726 break;
3727 case 0x19: /* System tick compare */
3728 #if !defined(CONFIG_USER_ONLY)
3729 if (!supervisor(dc))
3730 goto illegal_insn;
3731 #endif
3733 TCGv_ptr r_tickptr;
3735 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3736 cpu_src2);
3737 r_tickptr = tcg_temp_new_ptr();
3738 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3739 offsetof(CPUSPARCState, stick));
3740 gen_helper_tick_set_limit(r_tickptr,
3741 cpu_stick_cmpr);
3742 tcg_temp_free_ptr(r_tickptr);
3744 break;
3746 case 0x10: /* Performance Control */
3747 case 0x11: /* Performance Instrumentation
3748 Counter */
3749 case 0x12: /* Dispatch Control */
3750 #endif
3751 default:
3752 goto illegal_insn;
3755 break;
3756 #if !defined(CONFIG_USER_ONLY)
3757 case 0x31: /* wrpsr, V9 saved, restored */
3759 if (!supervisor(dc))
3760 goto priv_insn;
3761 #ifdef TARGET_SPARC64
3762 switch (rd) {
3763 case 0:
3764 gen_helper_saved(cpu_env);
3765 break;
3766 case 1:
3767 gen_helper_restored(cpu_env);
3768 break;
3769 case 2: /* UA2005 allclean */
3770 case 3: /* UA2005 otherw */
3771 case 4: /* UA2005 normalw */
3772 case 5: /* UA2005 invalw */
3773 // XXX
3774 default:
3775 goto illegal_insn;
3777 #else
3778 cpu_tmp0 = get_temp_tl(dc);
3779 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3780 gen_helper_wrpsr(cpu_env, cpu_tmp0);
3781 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3782 dc->cc_op = CC_OP_FLAGS;
3783 save_state(dc);
3784 gen_op_next_insn();
3785 tcg_gen_exit_tb(0);
3786 dc->is_br = 1;
3787 #endif
3789 break;
3790 case 0x32: /* wrwim, V9 wrpr */
3792 if (!supervisor(dc))
3793 goto priv_insn;
3794 cpu_tmp0 = get_temp_tl(dc);
3795 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3796 #ifdef TARGET_SPARC64
3797 switch (rd) {
3798 case 0: // tpc
3800 TCGv_ptr r_tsptr;
3802 r_tsptr = tcg_temp_new_ptr();
3803 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3804 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3805 offsetof(trap_state, tpc));
3806 tcg_temp_free_ptr(r_tsptr);
3808 break;
3809 case 1: // tnpc
3811 TCGv_ptr r_tsptr;
3813 r_tsptr = tcg_temp_new_ptr();
3814 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3815 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3816 offsetof(trap_state, tnpc));
3817 tcg_temp_free_ptr(r_tsptr);
3819 break;
3820 case 2: // tstate
3822 TCGv_ptr r_tsptr;
3824 r_tsptr = tcg_temp_new_ptr();
3825 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3826 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3827 offsetof(trap_state,
3828 tstate));
3829 tcg_temp_free_ptr(r_tsptr);
3831 break;
3832 case 3: // tt
3834 TCGv_ptr r_tsptr;
3836 r_tsptr = tcg_temp_new_ptr();
3837 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3838 tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3839 offsetof(trap_state, tt));
3840 tcg_temp_free_ptr(r_tsptr);
3842 break;
3843 case 4: // tick
3845 TCGv_ptr r_tickptr;
3847 r_tickptr = tcg_temp_new_ptr();
3848 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3849 offsetof(CPUSPARCState, tick));
3850 gen_helper_tick_set_count(r_tickptr,
3851 cpu_tmp0);
3852 tcg_temp_free_ptr(r_tickptr);
3854 break;
3855 case 5: // tba
3856 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3857 break;
3858 case 6: // pstate
3859 save_state(dc);
3860 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3861 dc->npc = DYNAMIC_PC;
3862 break;
3863 case 7: // tl
3864 save_state(dc);
3865 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3866 offsetof(CPUSPARCState, tl));
3867 dc->npc = DYNAMIC_PC;
3868 break;
3869 case 8: // pil
3870 gen_helper_wrpil(cpu_env, cpu_tmp0);
3871 break;
3872 case 9: // cwp
3873 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3874 break;
3875 case 10: // cansave
3876 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3877 offsetof(CPUSPARCState,
3878 cansave));
3879 break;
3880 case 11: // canrestore
3881 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3882 offsetof(CPUSPARCState,
3883 canrestore));
3884 break;
3885 case 12: // cleanwin
3886 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3887 offsetof(CPUSPARCState,
3888 cleanwin));
3889 break;
3890 case 13: // otherwin
3891 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3892 offsetof(CPUSPARCState,
3893 otherwin));
3894 break;
3895 case 14: // wstate
3896 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3897 offsetof(CPUSPARCState,
3898 wstate));
3899 break;
3900 case 16: // UA2005 gl
3901 CHECK_IU_FEATURE(dc, GL);
3902 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3903 offsetof(CPUSPARCState, gl));
3904 break;
3905 case 26: // UA2005 strand status
3906 CHECK_IU_FEATURE(dc, HYPV);
3907 if (!hypervisor(dc))
3908 goto priv_insn;
3909 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3910 break;
3911 default:
3912 goto illegal_insn;
3914 #else
3915 tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3916 if (dc->def->nwindows != 32) {
3917 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3918 (1 << dc->def->nwindows) - 1);
3920 #endif
3922 break;
3923 case 0x33: /* wrtbr, UA2005 wrhpr */
3925 #ifndef TARGET_SPARC64
3926 if (!supervisor(dc))
3927 goto priv_insn;
3928 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3929 #else
3930 CHECK_IU_FEATURE(dc, HYPV);
3931 if (!hypervisor(dc))
3932 goto priv_insn;
3933 cpu_tmp0 = get_temp_tl(dc);
3934 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3935 switch (rd) {
3936 case 0: // hpstate
3937 // XXX gen_op_wrhpstate();
3938 save_state(dc);
3939 gen_op_next_insn();
3940 tcg_gen_exit_tb(0);
3941 dc->is_br = 1;
3942 break;
3943 case 1: // htstate
3944 // XXX gen_op_wrhtstate();
3945 break;
3946 case 3: // hintp
3947 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3948 break;
3949 case 5: // htba
3950 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3951 break;
3952 case 31: // hstick_cmpr
3954 TCGv_ptr r_tickptr;
3956 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3957 r_tickptr = tcg_temp_new_ptr();
3958 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3959 offsetof(CPUSPARCState, hstick));
3960 gen_helper_tick_set_limit(r_tickptr,
3961 cpu_hstick_cmpr);
3962 tcg_temp_free_ptr(r_tickptr);
3964 break;
3965 case 6: // hver readonly
3966 default:
3967 goto illegal_insn;
3969 #endif
3971 break;
3972 #endif
3973 #ifdef TARGET_SPARC64
3974 case 0x2c: /* V9 movcc */
3976 int cc = GET_FIELD_SP(insn, 11, 12);
3977 int cond = GET_FIELD_SP(insn, 14, 17);
3978 DisasCompare cmp;
3979 TCGv dst;
3981 if (insn & (1 << 18)) {
3982 if (cc == 0) {
3983 gen_compare(&cmp, 0, cond, dc);
3984 } else if (cc == 2) {
3985 gen_compare(&cmp, 1, cond, dc);
3986 } else {
3987 goto illegal_insn;
3989 } else {
3990 gen_fcompare(&cmp, cc, cond);
3993 /* The get_src2 above loaded the normal 13-bit
3994 immediate field, not the 11-bit field we have
3995 in movcc. But it did handle the reg case. */
3996 if (IS_IMM) {
3997 simm = GET_FIELD_SPs(insn, 0, 10);
3998 tcg_gen_movi_tl(cpu_src2, simm);
4001 dst = gen_load_gpr(dc, rd);
4002 tcg_gen_movcond_tl(cmp.cond, dst,
4003 cmp.c1, cmp.c2,
4004 cpu_src2, dst);
4005 free_compare(&cmp);
4006 gen_store_gpr(dc, rd, dst);
4007 break;
4009 case 0x2d: /* V9 sdivx */
4010 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4011 gen_store_gpr(dc, rd, cpu_dst);
4012 break;
4013 case 0x2e: /* V9 popc */
4014 gen_helper_popc(cpu_dst, cpu_src2);
4015 gen_store_gpr(dc, rd, cpu_dst);
4016 break;
4017 case 0x2f: /* V9 movr */
4019 int cond = GET_FIELD_SP(insn, 10, 12);
4020 DisasCompare cmp;
4021 TCGv dst;
4023 gen_compare_reg(&cmp, cond, cpu_src1);
4025 /* The get_src2 above loaded the normal 13-bit
4026 immediate field, not the 10-bit field we have
4027 in movr. But it did handle the reg case. */
4028 if (IS_IMM) {
4029 simm = GET_FIELD_SPs(insn, 0, 9);
4030 tcg_gen_movi_tl(cpu_src2, simm);
4033 dst = gen_load_gpr(dc, rd);
4034 tcg_gen_movcond_tl(cmp.cond, dst,
4035 cmp.c1, cmp.c2,
4036 cpu_src2, dst);
4037 free_compare(&cmp);
4038 gen_store_gpr(dc, rd, dst);
4039 break;
4041 #endif
4042 default:
4043 goto illegal_insn;
4046 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4047 #ifdef TARGET_SPARC64
4048 int opf = GET_FIELD_SP(insn, 5, 13);
4049 rs1 = GET_FIELD(insn, 13, 17);
4050 rs2 = GET_FIELD(insn, 27, 31);
4051 if (gen_trap_ifnofpu(dc)) {
4052 goto jmp_insn;
4055 switch (opf) {
4056 case 0x000: /* VIS I edge8cc */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 cpu_src1 = gen_load_gpr(dc, rs1);
4059 cpu_src2 = gen_load_gpr(dc, rs2);
4060 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4061 gen_store_gpr(dc, rd, cpu_dst);
4062 break;
4063 case 0x001: /* VIS II edge8n */
4064 CHECK_FPU_FEATURE(dc, VIS2);
4065 cpu_src1 = gen_load_gpr(dc, rs1);
4066 cpu_src2 = gen_load_gpr(dc, rs2);
4067 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4068 gen_store_gpr(dc, rd, cpu_dst);
4069 break;
4070 case 0x002: /* VIS I edge8lcc */
4071 CHECK_FPU_FEATURE(dc, VIS1);
4072 cpu_src1 = gen_load_gpr(dc, rs1);
4073 cpu_src2 = gen_load_gpr(dc, rs2);
4074 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4075 gen_store_gpr(dc, rd, cpu_dst);
4076 break;
4077 case 0x003: /* VIS II edge8ln */
4078 CHECK_FPU_FEATURE(dc, VIS2);
4079 cpu_src1 = gen_load_gpr(dc, rs1);
4080 cpu_src2 = gen_load_gpr(dc, rs2);
4081 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4082 gen_store_gpr(dc, rd, cpu_dst);
4083 break;
4084 case 0x004: /* VIS I edge16cc */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 cpu_src1 = gen_load_gpr(dc, rs1);
4087 cpu_src2 = gen_load_gpr(dc, rs2);
4088 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4089 gen_store_gpr(dc, rd, cpu_dst);
4090 break;
4091 case 0x005: /* VIS II edge16n */
4092 CHECK_FPU_FEATURE(dc, VIS2);
4093 cpu_src1 = gen_load_gpr(dc, rs1);
4094 cpu_src2 = gen_load_gpr(dc, rs2);
4095 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4096 gen_store_gpr(dc, rd, cpu_dst);
4097 break;
4098 case 0x006: /* VIS I edge16lcc */
4099 CHECK_FPU_FEATURE(dc, VIS1);
4100 cpu_src1 = gen_load_gpr(dc, rs1);
4101 cpu_src2 = gen_load_gpr(dc, rs2);
4102 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4103 gen_store_gpr(dc, rd, cpu_dst);
4104 break;
4105 case 0x007: /* VIS II edge16ln */
4106 CHECK_FPU_FEATURE(dc, VIS2);
4107 cpu_src1 = gen_load_gpr(dc, rs1);
4108 cpu_src2 = gen_load_gpr(dc, rs2);
4109 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4110 gen_store_gpr(dc, rd, cpu_dst);
4111 break;
4112 case 0x008: /* VIS I edge32cc */
4113 CHECK_FPU_FEATURE(dc, VIS1);
4114 cpu_src1 = gen_load_gpr(dc, rs1);
4115 cpu_src2 = gen_load_gpr(dc, rs2);
4116 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4117 gen_store_gpr(dc, rd, cpu_dst);
4118 break;
4119 case 0x009: /* VIS II edge32n */
4120 CHECK_FPU_FEATURE(dc, VIS2);
4121 cpu_src1 = gen_load_gpr(dc, rs1);
4122 cpu_src2 = gen_load_gpr(dc, rs2);
4123 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4124 gen_store_gpr(dc, rd, cpu_dst);
4125 break;
4126 case 0x00a: /* VIS I edge32lcc */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 cpu_src1 = gen_load_gpr(dc, rs1);
4129 cpu_src2 = gen_load_gpr(dc, rs2);
4130 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4131 gen_store_gpr(dc, rd, cpu_dst);
4132 break;
4133 case 0x00b: /* VIS II edge32ln */
4134 CHECK_FPU_FEATURE(dc, VIS2);
4135 cpu_src1 = gen_load_gpr(dc, rs1);
4136 cpu_src2 = gen_load_gpr(dc, rs2);
4137 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4138 gen_store_gpr(dc, rd, cpu_dst);
4139 break;
4140 case 0x010: /* VIS I array8 */
4141 CHECK_FPU_FEATURE(dc, VIS1);
4142 cpu_src1 = gen_load_gpr(dc, rs1);
4143 cpu_src2 = gen_load_gpr(dc, rs2);
4144 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4145 gen_store_gpr(dc, rd, cpu_dst);
4146 break;
4147 case 0x012: /* VIS I array16 */
4148 CHECK_FPU_FEATURE(dc, VIS1);
4149 cpu_src1 = gen_load_gpr(dc, rs1);
4150 cpu_src2 = gen_load_gpr(dc, rs2);
4151 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4152 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4153 gen_store_gpr(dc, rd, cpu_dst);
4154 break;
4155 case 0x014: /* VIS I array32 */
4156 CHECK_FPU_FEATURE(dc, VIS1);
4157 cpu_src1 = gen_load_gpr(dc, rs1);
4158 cpu_src2 = gen_load_gpr(dc, rs2);
4159 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4160 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4161 gen_store_gpr(dc, rd, cpu_dst);
4162 break;
4163 case 0x018: /* VIS I alignaddr */
4164 CHECK_FPU_FEATURE(dc, VIS1);
4165 cpu_src1 = gen_load_gpr(dc, rs1);
4166 cpu_src2 = gen_load_gpr(dc, rs2);
4167 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4168 gen_store_gpr(dc, rd, cpu_dst);
4169 break;
4170 case 0x01a: /* VIS I alignaddrl */
4171 CHECK_FPU_FEATURE(dc, VIS1);
4172 cpu_src1 = gen_load_gpr(dc, rs1);
4173 cpu_src2 = gen_load_gpr(dc, rs2);
4174 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4175 gen_store_gpr(dc, rd, cpu_dst);
4176 break;
4177 case 0x019: /* VIS II bmask */
4178 CHECK_FPU_FEATURE(dc, VIS2);
4179 cpu_src1 = gen_load_gpr(dc, rs1);
4180 cpu_src2 = gen_load_gpr(dc, rs2);
4181 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4182 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4183 gen_store_gpr(dc, rd, cpu_dst);
4184 break;
4185 case 0x020: /* VIS I fcmple16 */
4186 CHECK_FPU_FEATURE(dc, VIS1);
4187 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4188 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4189 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4190 gen_store_gpr(dc, rd, cpu_dst);
4191 break;
4192 case 0x022: /* VIS I fcmpne16 */
4193 CHECK_FPU_FEATURE(dc, VIS1);
4194 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4195 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4196 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4197 gen_store_gpr(dc, rd, cpu_dst);
4198 break;
4199 case 0x024: /* VIS I fcmple32 */
4200 CHECK_FPU_FEATURE(dc, VIS1);
4201 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4202 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4203 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4204 gen_store_gpr(dc, rd, cpu_dst);
4205 break;
4206 case 0x026: /* VIS I fcmpne32 */
4207 CHECK_FPU_FEATURE(dc, VIS1);
4208 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4209 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4210 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4211 gen_store_gpr(dc, rd, cpu_dst);
4212 break;
4213 case 0x028: /* VIS I fcmpgt16 */
4214 CHECK_FPU_FEATURE(dc, VIS1);
4215 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4216 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4217 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4218 gen_store_gpr(dc, rd, cpu_dst);
4219 break;
4220 case 0x02a: /* VIS I fcmpeq16 */
4221 CHECK_FPU_FEATURE(dc, VIS1);
4222 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4223 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4224 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4225 gen_store_gpr(dc, rd, cpu_dst);
4226 break;
4227 case 0x02c: /* VIS I fcmpgt32 */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4230 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4231 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4232 gen_store_gpr(dc, rd, cpu_dst);
4233 break;
4234 case 0x02e: /* VIS I fcmpeq32 */
4235 CHECK_FPU_FEATURE(dc, VIS1);
4236 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4237 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4238 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4239 gen_store_gpr(dc, rd, cpu_dst);
4240 break;
4241 case 0x031: /* VIS I fmul8x16 */
4242 CHECK_FPU_FEATURE(dc, VIS1);
4243 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4244 break;
4245 case 0x033: /* VIS I fmul8x16au */
4246 CHECK_FPU_FEATURE(dc, VIS1);
4247 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4248 break;
4249 case 0x035: /* VIS I fmul8x16al */
4250 CHECK_FPU_FEATURE(dc, VIS1);
4251 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4252 break;
4253 case 0x036: /* VIS I fmul8sux16 */
4254 CHECK_FPU_FEATURE(dc, VIS1);
4255 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4256 break;
4257 case 0x037: /* VIS I fmul8ulx16 */
4258 CHECK_FPU_FEATURE(dc, VIS1);
4259 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4260 break;
4261 case 0x038: /* VIS I fmuld8sux16 */
4262 CHECK_FPU_FEATURE(dc, VIS1);
4263 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4264 break;
4265 case 0x039: /* VIS I fmuld8ulx16 */
4266 CHECK_FPU_FEATURE(dc, VIS1);
4267 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4268 break;
4269 case 0x03a: /* VIS I fpack32 */
4270 CHECK_FPU_FEATURE(dc, VIS1);
4271 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4272 break;
4273 case 0x03b: /* VIS I fpack16 */
4274 CHECK_FPU_FEATURE(dc, VIS1);
4275 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4276 cpu_dst_32 = gen_dest_fpr_F(dc);
4277 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4278 gen_store_fpr_F(dc, rd, cpu_dst_32);
4279 break;
4280 case 0x03d: /* VIS I fpackfix */
4281 CHECK_FPU_FEATURE(dc, VIS1);
4282 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4283 cpu_dst_32 = gen_dest_fpr_F(dc);
4284 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4285 gen_store_fpr_F(dc, rd, cpu_dst_32);
4286 break;
4287 case 0x03e: /* VIS I pdist */
4288 CHECK_FPU_FEATURE(dc, VIS1);
4289 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4290 break;
4291 case 0x048: /* VIS I faligndata */
4292 CHECK_FPU_FEATURE(dc, VIS1);
4293 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4294 break;
4295 case 0x04b: /* VIS I fpmerge */
4296 CHECK_FPU_FEATURE(dc, VIS1);
4297 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4298 break;
4299 case 0x04c: /* VIS II bshuffle */
4300 CHECK_FPU_FEATURE(dc, VIS2);
4301 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4302 break;
4303 case 0x04d: /* VIS I fexpand */
4304 CHECK_FPU_FEATURE(dc, VIS1);
4305 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4306 break;
4307 case 0x050: /* VIS I fpadd16 */
4308 CHECK_FPU_FEATURE(dc, VIS1);
4309 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4310 break;
4311 case 0x051: /* VIS I fpadd16s */
4312 CHECK_FPU_FEATURE(dc, VIS1);
4313 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4314 break;
4315 case 0x052: /* VIS I fpadd32 */
4316 CHECK_FPU_FEATURE(dc, VIS1);
4317 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4318 break;
4319 case 0x053: /* VIS I fpadd32s */
4320 CHECK_FPU_FEATURE(dc, VIS1);
4321 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4322 break;
4323 case 0x054: /* VIS I fpsub16 */
4324 CHECK_FPU_FEATURE(dc, VIS1);
4325 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4326 break;
4327 case 0x055: /* VIS I fpsub16s */
4328 CHECK_FPU_FEATURE(dc, VIS1);
4329 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4330 break;
4331 case 0x056: /* VIS I fpsub32 */
4332 CHECK_FPU_FEATURE(dc, VIS1);
4333 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4334 break;
4335 case 0x057: /* VIS I fpsub32s */
4336 CHECK_FPU_FEATURE(dc, VIS1);
4337 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4338 break;
4339 case 0x060: /* VIS I fzero */
4340 CHECK_FPU_FEATURE(dc, VIS1);
4341 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4342 tcg_gen_movi_i64(cpu_dst_64, 0);
4343 gen_store_fpr_D(dc, rd, cpu_dst_64);
4344 break;
4345 case 0x061: /* VIS I fzeros */
4346 CHECK_FPU_FEATURE(dc, VIS1);
4347 cpu_dst_32 = gen_dest_fpr_F(dc);
4348 tcg_gen_movi_i32(cpu_dst_32, 0);
4349 gen_store_fpr_F(dc, rd, cpu_dst_32);
4350 break;
4351 case 0x062: /* VIS I fnor */
4352 CHECK_FPU_FEATURE(dc, VIS1);
4353 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4354 break;
4355 case 0x063: /* VIS I fnors */
4356 CHECK_FPU_FEATURE(dc, VIS1);
4357 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4358 break;
4359 case 0x064: /* VIS I fandnot2 */
4360 CHECK_FPU_FEATURE(dc, VIS1);
4361 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4362 break;
4363 case 0x065: /* VIS I fandnot2s */
4364 CHECK_FPU_FEATURE(dc, VIS1);
4365 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4366 break;
4367 case 0x066: /* VIS I fnot2 */
4368 CHECK_FPU_FEATURE(dc, VIS1);
4369 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4370 break;
4371 case 0x067: /* VIS I fnot2s */
4372 CHECK_FPU_FEATURE(dc, VIS1);
4373 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4374 break;
4375 case 0x068: /* VIS I fandnot1 */
4376 CHECK_FPU_FEATURE(dc, VIS1);
4377 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4378 break;
4379 case 0x069: /* VIS I fandnot1s */
4380 CHECK_FPU_FEATURE(dc, VIS1);
4381 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4382 break;
4383 case 0x06a: /* VIS I fnot1 */
4384 CHECK_FPU_FEATURE(dc, VIS1);
4385 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4386 break;
4387 case 0x06b: /* VIS I fnot1s */
4388 CHECK_FPU_FEATURE(dc, VIS1);
4389 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4390 break;
4391 case 0x06c: /* VIS I fxor */
4392 CHECK_FPU_FEATURE(dc, VIS1);
4393 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4394 break;
4395 case 0x06d: /* VIS I fxors */
4396 CHECK_FPU_FEATURE(dc, VIS1);
4397 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4398 break;
4399 case 0x06e: /* VIS I fnand */
4400 CHECK_FPU_FEATURE(dc, VIS1);
4401 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4402 break;
4403 case 0x06f: /* VIS I fnands */
4404 CHECK_FPU_FEATURE(dc, VIS1);
4405 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4406 break;
4407 case 0x070: /* VIS I fand */
4408 CHECK_FPU_FEATURE(dc, VIS1);
4409 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4410 break;
4411 case 0x071: /* VIS I fands */
4412 CHECK_FPU_FEATURE(dc, VIS1);
4413 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4414 break;
4415 case 0x072: /* VIS I fxnor */
4416 CHECK_FPU_FEATURE(dc, VIS1);
4417 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4418 break;
4419 case 0x073: /* VIS I fxnors */
4420 CHECK_FPU_FEATURE(dc, VIS1);
4421 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4422 break;
4423 case 0x074: /* VIS I fsrc1 */
4424 CHECK_FPU_FEATURE(dc, VIS1);
4425 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4426 gen_store_fpr_D(dc, rd, cpu_src1_64);
4427 break;
4428 case 0x075: /* VIS I fsrc1s */
4429 CHECK_FPU_FEATURE(dc, VIS1);
4430 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4431 gen_store_fpr_F(dc, rd, cpu_src1_32);
4432 break;
4433 case 0x076: /* VIS I fornot2 */
4434 CHECK_FPU_FEATURE(dc, VIS1);
4435 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4436 break;
4437 case 0x077: /* VIS I fornot2s */
4438 CHECK_FPU_FEATURE(dc, VIS1);
4439 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4440 break;
4441 case 0x078: /* VIS I fsrc2 */
4442 CHECK_FPU_FEATURE(dc, VIS1);
4443 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4444 gen_store_fpr_D(dc, rd, cpu_src1_64);
4445 break;
4446 case 0x079: /* VIS I fsrc2s */
4447 CHECK_FPU_FEATURE(dc, VIS1);
4448 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4449 gen_store_fpr_F(dc, rd, cpu_src1_32);
4450 break;
4451 case 0x07a: /* VIS I fornot1 */
4452 CHECK_FPU_FEATURE(dc, VIS1);
4453 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4454 break;
4455 case 0x07b: /* VIS I fornot1s */
4456 CHECK_FPU_FEATURE(dc, VIS1);
4457 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4458 break;
4459 case 0x07c: /* VIS I for */
4460 CHECK_FPU_FEATURE(dc, VIS1);
4461 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4462 break;
4463 case 0x07d: /* VIS I fors */
4464 CHECK_FPU_FEATURE(dc, VIS1);
4465 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4466 break;
4467 case 0x07e: /* VIS I fone */
4468 CHECK_FPU_FEATURE(dc, VIS1);
4469 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4470 tcg_gen_movi_i64(cpu_dst_64, -1);
4471 gen_store_fpr_D(dc, rd, cpu_dst_64);
4472 break;
4473 case 0x07f: /* VIS I fones */
4474 CHECK_FPU_FEATURE(dc, VIS1);
4475 cpu_dst_32 = gen_dest_fpr_F(dc);
4476 tcg_gen_movi_i32(cpu_dst_32, -1);
4477 gen_store_fpr_F(dc, rd, cpu_dst_32);
4478 break;
4479 case 0x080: /* VIS I shutdown */
4480 case 0x081: /* VIS II siam */
4481 // XXX
4482 goto illegal_insn;
4483 default:
4484 goto illegal_insn;
4486 #else
4487 goto ncp_insn;
4488 #endif
4489 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4490 #ifdef TARGET_SPARC64
4491 goto illegal_insn;
4492 #else
4493 goto ncp_insn;
4494 #endif
4495 #ifdef TARGET_SPARC64
4496 } else if (xop == 0x39) { /* V9 return */
4497 TCGv_i32 r_const;
4499 save_state(dc);
4500 cpu_src1 = get_src1(dc, insn);
4501 cpu_tmp0 = get_temp_tl(dc);
4502 if (IS_IMM) { /* immediate */
4503 simm = GET_FIELDs(insn, 19, 31);
4504 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4505 } else { /* register */
4506 rs2 = GET_FIELD(insn, 27, 31);
4507 if (rs2) {
4508 cpu_src2 = gen_load_gpr(dc, rs2);
4509 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4510 } else {
4511 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4514 gen_helper_restore(cpu_env);
4515 gen_mov_pc_npc(dc);
4516 r_const = tcg_const_i32(3);
4517 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4518 tcg_temp_free_i32(r_const);
4519 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4520 dc->npc = DYNAMIC_PC;
4521 goto jmp_insn;
4522 #endif
4523 } else {
4524 cpu_src1 = get_src1(dc, insn);
4525 cpu_tmp0 = get_temp_tl(dc);
4526 if (IS_IMM) { /* immediate */
4527 simm = GET_FIELDs(insn, 19, 31);
4528 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4529 } else { /* register */
4530 rs2 = GET_FIELD(insn, 27, 31);
4531 if (rs2) {
4532 cpu_src2 = gen_load_gpr(dc, rs2);
4533 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4534 } else {
4535 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4538 switch (xop) {
4539 case 0x38: /* jmpl */
4541 TCGv t;
4542 TCGv_i32 r_const;
4544 t = gen_dest_gpr(dc, rd);
4545 tcg_gen_movi_tl(t, dc->pc);
4546 gen_store_gpr(dc, rd, t);
4547 gen_mov_pc_npc(dc);
4548 r_const = tcg_const_i32(3);
4549 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4550 tcg_temp_free_i32(r_const);
4551 gen_address_mask(dc, cpu_tmp0);
4552 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4553 dc->npc = DYNAMIC_PC;
4555 goto jmp_insn;
4556 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4557 case 0x39: /* rett, V9 return */
4559 TCGv_i32 r_const;
4561 if (!supervisor(dc))
4562 goto priv_insn;
4563 gen_mov_pc_npc(dc);
4564 r_const = tcg_const_i32(3);
4565 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4566 tcg_temp_free_i32(r_const);
4567 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4568 dc->npc = DYNAMIC_PC;
4569 gen_helper_rett(cpu_env);
4571 goto jmp_insn;
4572 #endif
4573 case 0x3b: /* flush */
4574 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4575 goto unimp_flush;
4576 /* nop */
4577 break;
4578 case 0x3c: /* save */
4579 save_state(dc);
4580 gen_helper_save(cpu_env);
4581 gen_store_gpr(dc, rd, cpu_tmp0);
4582 break;
4583 case 0x3d: /* restore */
4584 save_state(dc);
4585 gen_helper_restore(cpu_env);
4586 gen_store_gpr(dc, rd, cpu_tmp0);
4587 break;
4588 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4589 case 0x3e: /* V9 done/retry */
4591 switch (rd) {
4592 case 0:
4593 if (!supervisor(dc))
4594 goto priv_insn;
4595 dc->npc = DYNAMIC_PC;
4596 dc->pc = DYNAMIC_PC;
4597 gen_helper_done(cpu_env);
4598 goto jmp_insn;
4599 case 1:
4600 if (!supervisor(dc))
4601 goto priv_insn;
4602 dc->npc = DYNAMIC_PC;
4603 dc->pc = DYNAMIC_PC;
4604 gen_helper_retry(cpu_env);
4605 goto jmp_insn;
4606 default:
4607 goto illegal_insn;
4610 break;
4611 #endif
4612 default:
4613 goto illegal_insn;
4616 break;
4618 break;
4619 case 3: /* load/store instructions */
4621 unsigned int xop = GET_FIELD(insn, 7, 12);
4622 /* ??? gen_address_mask prevents us from using a source
4623 register directly. Always generate a temporary. */
4624 TCGv cpu_addr = get_temp_tl(dc);
4626 tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4627 if (xop == 0x3c || xop == 0x3e) {
4628 /* V9 casa/casxa : no offset */
4629 } else if (IS_IMM) { /* immediate */
4630 simm = GET_FIELDs(insn, 19, 31);
4631 if (simm != 0) {
4632 tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4634 } else { /* register */
4635 rs2 = GET_FIELD(insn, 27, 31);
4636 if (rs2 != 0) {
4637 tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4640 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4641 (xop > 0x17 && xop <= 0x1d ) ||
4642 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4643 TCGv cpu_val = gen_dest_gpr(dc, rd);
4645 switch (xop) {
4646 case 0x0: /* ld, V9 lduw, load unsigned word */
4647 gen_address_mask(dc, cpu_addr);
4648 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4649 break;
4650 case 0x1: /* ldub, load unsigned byte */
4651 gen_address_mask(dc, cpu_addr);
4652 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4653 break;
4654 case 0x2: /* lduh, load unsigned halfword */
4655 gen_address_mask(dc, cpu_addr);
4656 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4657 break;
4658 case 0x3: /* ldd, load double word */
4659 if (rd & 1)
4660 goto illegal_insn;
4661 else {
4662 TCGv_i32 r_const;
4663 TCGv_i64 t64;
4665 save_state(dc);
4666 r_const = tcg_const_i32(7);
4667 /* XXX remove alignment check */
4668 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4669 tcg_temp_free_i32(r_const);
4670 gen_address_mask(dc, cpu_addr);
4671 t64 = tcg_temp_new_i64();
4672 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4673 tcg_gen_trunc_i64_tl(cpu_val, t64);
4674 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4675 gen_store_gpr(dc, rd + 1, cpu_val);
4676 tcg_gen_shri_i64(t64, t64, 32);
4677 tcg_gen_trunc_i64_tl(cpu_val, t64);
4678 tcg_temp_free_i64(t64);
4679 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4681 break;
4682 case 0x9: /* ldsb, load signed byte */
4683 gen_address_mask(dc, cpu_addr);
4684 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4685 break;
4686 case 0xa: /* ldsh, load signed halfword */
4687 gen_address_mask(dc, cpu_addr);
4688 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4689 break;
4690 case 0xd: /* ldstub -- XXX: should be atomically */
4692 TCGv r_const;
4694 gen_address_mask(dc, cpu_addr);
4695 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4696 r_const = tcg_const_tl(0xff);
4697 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4698 tcg_temp_free(r_const);
4700 break;
4701 case 0x0f:
4702 /* swap, swap register with memory. Also atomically */
4704 TCGv t0 = get_temp_tl(dc);
4705 CHECK_IU_FEATURE(dc, SWAP);
4706 cpu_src1 = gen_load_gpr(dc, rd);
4707 gen_address_mask(dc, cpu_addr);
4708 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4709 tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4710 tcg_gen_mov_tl(cpu_val, t0);
4712 break;
4713 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4714 case 0x10: /* lda, V9 lduwa, load word alternate */
4715 #ifndef TARGET_SPARC64
4716 if (IS_IMM)
4717 goto illegal_insn;
4718 if (!supervisor(dc))
4719 goto priv_insn;
4720 #endif
4721 save_state(dc);
4722 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4723 break;
4724 case 0x11: /* lduba, load unsigned byte alternate */
4725 #ifndef TARGET_SPARC64
4726 if (IS_IMM)
4727 goto illegal_insn;
4728 if (!supervisor(dc))
4729 goto priv_insn;
4730 #endif
4731 save_state(dc);
4732 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4733 break;
4734 case 0x12: /* lduha, load unsigned halfword alternate */
4735 #ifndef TARGET_SPARC64
4736 if (IS_IMM)
4737 goto illegal_insn;
4738 if (!supervisor(dc))
4739 goto priv_insn;
4740 #endif
4741 save_state(dc);
4742 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4743 break;
4744 case 0x13: /* ldda, load double word alternate */
4745 #ifndef TARGET_SPARC64
4746 if (IS_IMM)
4747 goto illegal_insn;
4748 if (!supervisor(dc))
4749 goto priv_insn;
4750 #endif
4751 if (rd & 1)
4752 goto illegal_insn;
4753 save_state(dc);
4754 gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4755 goto skip_move;
4756 case 0x19: /* ldsba, load signed byte alternate */
4757 #ifndef TARGET_SPARC64
4758 if (IS_IMM)
4759 goto illegal_insn;
4760 if (!supervisor(dc))
4761 goto priv_insn;
4762 #endif
4763 save_state(dc);
4764 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4765 break;
4766 case 0x1a: /* ldsha, load signed halfword alternate */
4767 #ifndef TARGET_SPARC64
4768 if (IS_IMM)
4769 goto illegal_insn;
4770 if (!supervisor(dc))
4771 goto priv_insn;
4772 #endif
4773 save_state(dc);
4774 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4775 break;
4776 case 0x1d: /* ldstuba -- XXX: should be atomically */
4777 #ifndef TARGET_SPARC64
4778 if (IS_IMM)
4779 goto illegal_insn;
4780 if (!supervisor(dc))
4781 goto priv_insn;
4782 #endif
4783 save_state(dc);
4784 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4785 break;
4786 case 0x1f: /* swapa, swap reg with alt. memory. Also
4787 atomically */
4788 CHECK_IU_FEATURE(dc, SWAP);
4789 #ifndef TARGET_SPARC64
4790 if (IS_IMM)
4791 goto illegal_insn;
4792 if (!supervisor(dc))
4793 goto priv_insn;
4794 #endif
4795 save_state(dc);
4796 cpu_src1 = gen_load_gpr(dc, rd);
4797 gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4798 break;
4800 #ifndef TARGET_SPARC64
4801 case 0x30: /* ldc */
4802 case 0x31: /* ldcsr */
4803 case 0x33: /* lddc */
4804 goto ncp_insn;
4805 #endif
4806 #endif
4807 #ifdef TARGET_SPARC64
4808 case 0x08: /* V9 ldsw */
4809 gen_address_mask(dc, cpu_addr);
4810 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4811 break;
4812 case 0x0b: /* V9 ldx */
4813 gen_address_mask(dc, cpu_addr);
4814 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4815 break;
4816 case 0x18: /* V9 ldswa */
4817 save_state(dc);
4818 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4819 break;
4820 case 0x1b: /* V9 ldxa */
4821 save_state(dc);
4822 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4823 break;
4824 case 0x2d: /* V9 prefetch, no effect */
4825 goto skip_move;
4826 case 0x30: /* V9 ldfa */
4827 if (gen_trap_ifnofpu(dc)) {
4828 goto jmp_insn;
4830 save_state(dc);
4831 gen_ldf_asi(cpu_addr, insn, 4, rd);
4832 gen_update_fprs_dirty(rd);
4833 goto skip_move;
4834 case 0x33: /* V9 lddfa */
4835 if (gen_trap_ifnofpu(dc)) {
4836 goto jmp_insn;
4838 save_state(dc);
4839 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4840 gen_update_fprs_dirty(DFPREG(rd));
4841 goto skip_move;
4842 case 0x3d: /* V9 prefetcha, no effect */
4843 goto skip_move;
4844 case 0x32: /* V9 ldqfa */
4845 CHECK_FPU_FEATURE(dc, FLOAT128);
4846 if (gen_trap_ifnofpu(dc)) {
4847 goto jmp_insn;
4849 save_state(dc);
4850 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4851 gen_update_fprs_dirty(QFPREG(rd));
4852 goto skip_move;
4853 #endif
4854 default:
4855 goto illegal_insn;
4857 gen_store_gpr(dc, rd, cpu_val);
4858 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4859 skip_move: ;
4860 #endif
4861 } else if (xop >= 0x20 && xop < 0x24) {
4862 TCGv t0;
4864 if (gen_trap_ifnofpu(dc)) {
4865 goto jmp_insn;
4867 save_state(dc);
4868 switch (xop) {
4869 case 0x20: /* ldf, load fpreg */
4870 gen_address_mask(dc, cpu_addr);
4871 t0 = get_temp_tl(dc);
4872 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4873 cpu_dst_32 = gen_dest_fpr_F(dc);
4874 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4875 gen_store_fpr_F(dc, rd, cpu_dst_32);
4876 break;
4877 case 0x21: /* ldfsr, V9 ldxfsr */
4878 #ifdef TARGET_SPARC64
4879 gen_address_mask(dc, cpu_addr);
4880 if (rd == 1) {
4881 TCGv_i64 t64 = tcg_temp_new_i64();
4882 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4883 gen_helper_ldxfsr(cpu_env, t64);
4884 tcg_temp_free_i64(t64);
4885 break;
4887 #endif
4888 cpu_dst_32 = get_temp_i32(dc);
4889 t0 = get_temp_tl(dc);
4890 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4891 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4892 gen_helper_ldfsr(cpu_env, cpu_dst_32);
4893 break;
4894 case 0x22: /* ldqf, load quad fpreg */
4896 TCGv_i32 r_const;
4898 CHECK_FPU_FEATURE(dc, FLOAT128);
4899 r_const = tcg_const_i32(dc->mem_idx);
4900 gen_address_mask(dc, cpu_addr);
4901 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4902 tcg_temp_free_i32(r_const);
4903 gen_op_store_QT0_fpr(QFPREG(rd));
4904 gen_update_fprs_dirty(QFPREG(rd));
4906 break;
4907 case 0x23: /* lddf, load double fpreg */
4908 gen_address_mask(dc, cpu_addr);
4909 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4910 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4911 gen_store_fpr_D(dc, rd, cpu_dst_64);
4912 break;
4913 default:
4914 goto illegal_insn;
4916 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4917 xop == 0xe || xop == 0x1e) {
4918 TCGv cpu_val = gen_load_gpr(dc, rd);
4920 switch (xop) {
4921 case 0x4: /* st, store word */
4922 gen_address_mask(dc, cpu_addr);
4923 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4924 break;
4925 case 0x5: /* stb, store byte */
4926 gen_address_mask(dc, cpu_addr);
4927 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4928 break;
4929 case 0x6: /* sth, store halfword */
4930 gen_address_mask(dc, cpu_addr);
4931 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4932 break;
4933 case 0x7: /* std, store double word */
4934 if (rd & 1)
4935 goto illegal_insn;
4936 else {
4937 TCGv_i32 r_const;
4938 TCGv_i64 t64;
4939 TCGv lo;
4941 save_state(dc);
4942 gen_address_mask(dc, cpu_addr);
4943 r_const = tcg_const_i32(7);
4944 /* XXX remove alignment check */
4945 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4946 tcg_temp_free_i32(r_const);
4947 lo = gen_load_gpr(dc, rd + 1);
4949 t64 = tcg_temp_new_i64();
4950 tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4951 tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4952 tcg_temp_free_i64(t64);
4954 break;
4955 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4956 case 0x14: /* sta, V9 stwa, store word alternate */
4957 #ifndef TARGET_SPARC64
4958 if (IS_IMM)
4959 goto illegal_insn;
4960 if (!supervisor(dc))
4961 goto priv_insn;
4962 #endif
4963 save_state(dc);
4964 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4965 dc->npc = DYNAMIC_PC;
4966 break;
4967 case 0x15: /* stba, store byte alternate */
4968 #ifndef TARGET_SPARC64
4969 if (IS_IMM)
4970 goto illegal_insn;
4971 if (!supervisor(dc))
4972 goto priv_insn;
4973 #endif
4974 save_state(dc);
4975 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4976 dc->npc = DYNAMIC_PC;
4977 break;
4978 case 0x16: /* stha, store halfword alternate */
4979 #ifndef TARGET_SPARC64
4980 if (IS_IMM)
4981 goto illegal_insn;
4982 if (!supervisor(dc))
4983 goto priv_insn;
4984 #endif
4985 save_state(dc);
4986 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4987 dc->npc = DYNAMIC_PC;
4988 break;
4989 case 0x17: /* stda, store double word alternate */
4990 #ifndef TARGET_SPARC64
4991 if (IS_IMM)
4992 goto illegal_insn;
4993 if (!supervisor(dc))
4994 goto priv_insn;
4995 #endif
4996 if (rd & 1)
4997 goto illegal_insn;
4998 else {
4999 save_state(dc);
5000 gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5002 break;
5003 #endif
5004 #ifdef TARGET_SPARC64
5005 case 0x0e: /* V9 stx */
5006 gen_address_mask(dc, cpu_addr);
5007 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5008 break;
5009 case 0x1e: /* V9 stxa */
5010 save_state(dc);
5011 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5012 dc->npc = DYNAMIC_PC;
5013 break;
5014 #endif
5015 default:
5016 goto illegal_insn;
5018 } else if (xop > 0x23 && xop < 0x28) {
5019 if (gen_trap_ifnofpu(dc)) {
5020 goto jmp_insn;
5022 save_state(dc);
5023 switch (xop) {
5024 case 0x24: /* stf, store fpreg */
5026 TCGv t = get_temp_tl(dc);
5027 gen_address_mask(dc, cpu_addr);
5028 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5029 tcg_gen_ext_i32_tl(t, cpu_src1_32);
5030 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5032 break;
5033 case 0x25: /* stfsr, V9 stxfsr */
5035 TCGv t = get_temp_tl(dc);
5037 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5038 #ifdef TARGET_SPARC64
5039 gen_address_mask(dc, cpu_addr);
5040 if (rd == 1) {
5041 tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5042 break;
5044 #endif
5045 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5047 break;
5048 case 0x26:
5049 #ifdef TARGET_SPARC64
5050 /* V9 stqf, store quad fpreg */
5052 TCGv_i32 r_const;
5054 CHECK_FPU_FEATURE(dc, FLOAT128);
5055 gen_op_load_fpr_QT0(QFPREG(rd));
5056 r_const = tcg_const_i32(dc->mem_idx);
5057 gen_address_mask(dc, cpu_addr);
5058 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5059 tcg_temp_free_i32(r_const);
5061 break;
5062 #else /* !TARGET_SPARC64 */
5063 /* stdfq, store floating point queue */
5064 #if defined(CONFIG_USER_ONLY)
5065 goto illegal_insn;
5066 #else
5067 if (!supervisor(dc))
5068 goto priv_insn;
5069 if (gen_trap_ifnofpu(dc)) {
5070 goto jmp_insn;
5072 goto nfq_insn;
5073 #endif
5074 #endif
5075 case 0x27: /* stdf, store double fpreg */
5076 gen_address_mask(dc, cpu_addr);
5077 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5078 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5079 break;
5080 default:
5081 goto illegal_insn;
5083 } else if (xop > 0x33 && xop < 0x3f) {
5084 save_state(dc);
5085 switch (xop) {
5086 #ifdef TARGET_SPARC64
5087 case 0x34: /* V9 stfa */
5088 if (gen_trap_ifnofpu(dc)) {
5089 goto jmp_insn;
5091 gen_stf_asi(cpu_addr, insn, 4, rd);
5092 break;
5093 case 0x36: /* V9 stqfa */
5095 TCGv_i32 r_const;
5097 CHECK_FPU_FEATURE(dc, FLOAT128);
5098 if (gen_trap_ifnofpu(dc)) {
5099 goto jmp_insn;
5101 r_const = tcg_const_i32(7);
5102 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5103 tcg_temp_free_i32(r_const);
5104 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5106 break;
5107 case 0x37: /* V9 stdfa */
5108 if (gen_trap_ifnofpu(dc)) {
5109 goto jmp_insn;
5111 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5112 break;
5113 case 0x3e: /* V9 casxa */
5114 rs2 = GET_FIELD(insn, 27, 31);
5115 cpu_src2 = gen_load_gpr(dc, rs2);
5116 gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5117 break;
5118 #else
5119 case 0x34: /* stc */
5120 case 0x35: /* stcsr */
5121 case 0x36: /* stdcq */
5122 case 0x37: /* stdc */
5123 goto ncp_insn;
5124 #endif
5125 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5126 case 0x3c: /* V9 or LEON3 casa */
5127 #ifndef TARGET_SPARC64
5128 CHECK_IU_FEATURE(dc, CASA);
5129 if (IS_IMM) {
5130 goto illegal_insn;
5132 if (!supervisor(dc)) {
5133 goto priv_insn;
5135 #endif
5136 rs2 = GET_FIELD(insn, 27, 31);
5137 cpu_src2 = gen_load_gpr(dc, rs2);
5138 gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5139 break;
5140 #endif
5141 default:
5142 goto illegal_insn;
5144 } else {
5145 goto illegal_insn;
5148 break;
5150 /* default case for non jump instructions */
5151 if (dc->npc == DYNAMIC_PC) {
5152 dc->pc = DYNAMIC_PC;
5153 gen_op_next_insn();
5154 } else if (dc->npc == JUMP_PC) {
5155 /* we can do a static jump */
5156 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5157 dc->is_br = 1;
5158 } else {
5159 dc->pc = dc->npc;
5160 dc->npc = dc->npc + 4;
5162 jmp_insn:
5163 goto egress;
5164 illegal_insn:
5166 TCGv_i32 r_const;
5168 save_state(dc);
5169 r_const = tcg_const_i32(TT_ILL_INSN);
5170 gen_helper_raise_exception(cpu_env, r_const);
5171 tcg_temp_free_i32(r_const);
5172 dc->is_br = 1;
5174 goto egress;
5175 unimp_flush:
5177 TCGv_i32 r_const;
5179 save_state(dc);
5180 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5181 gen_helper_raise_exception(cpu_env, r_const);
5182 tcg_temp_free_i32(r_const);
5183 dc->is_br = 1;
5185 goto egress;
5186 #if !defined(CONFIG_USER_ONLY)
5187 priv_insn:
5189 TCGv_i32 r_const;
5191 save_state(dc);
5192 r_const = tcg_const_i32(TT_PRIV_INSN);
5193 gen_helper_raise_exception(cpu_env, r_const);
5194 tcg_temp_free_i32(r_const);
5195 dc->is_br = 1;
5197 goto egress;
5198 #endif
5199 nfpu_insn:
5200 save_state(dc);
5201 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5202 dc->is_br = 1;
5203 goto egress;
5204 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5205 nfq_insn:
5206 save_state(dc);
5207 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5208 dc->is_br = 1;
5209 goto egress;
5210 #endif
5211 #ifndef TARGET_SPARC64
5212 ncp_insn:
5214 TCGv r_const;
5216 save_state(dc);
5217 r_const = tcg_const_i32(TT_NCP_INSN);
5218 gen_helper_raise_exception(cpu_env, r_const);
5219 tcg_temp_free(r_const);
5220 dc->is_br = 1;
5222 goto egress;
5223 #endif
5224 egress:
5225 if (dc->n_t32 != 0) {
5226 int i;
5227 for (i = dc->n_t32 - 1; i >= 0; --i) {
5228 tcg_temp_free_i32(dc->t32[i]);
5230 dc->n_t32 = 0;
5232 if (dc->n_ttl != 0) {
5233 int i;
5234 for (i = dc->n_ttl - 1; i >= 0; --i) {
5235 tcg_temp_free(dc->ttl[i]);
5237 dc->n_ttl = 0;
5241 static inline void gen_intermediate_code_internal(SPARCCPU *cpu,
5242 TranslationBlock *tb,
5243 bool spc)
5245 CPUState *cs = CPU(cpu);
5246 CPUSPARCState *env = &cpu->env;
5247 target_ulong pc_start, last_pc;
5248 uint16_t *gen_opc_end;
5249 DisasContext dc1, *dc = &dc1;
5250 CPUBreakpoint *bp;
5251 int j, lj = -1;
5252 int num_insns;
5253 int max_insns;
5254 unsigned int insn;
5256 memset(dc, 0, sizeof(DisasContext));
5257 dc->tb = tb;
5258 pc_start = tb->pc;
5259 dc->pc = pc_start;
5260 last_pc = dc->pc;
5261 dc->npc = (target_ulong) tb->cs_base;
5262 dc->cc_op = CC_OP_DYNAMIC;
5263 dc->mem_idx = cpu_mmu_index(env);
5264 dc->def = env->def;
5265 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5266 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5267 dc->singlestep = (cs->singlestep_enabled || singlestep);
5268 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
5270 num_insns = 0;
5271 max_insns = tb->cflags & CF_COUNT_MASK;
5272 if (max_insns == 0)
5273 max_insns = CF_COUNT_MASK;
5274 gen_tb_start(tb);
5275 do {
5276 if (unlikely(!QTAILQ_EMPTY(&cs->breakpoints))) {
5277 QTAILQ_FOREACH(bp, &cs->breakpoints, entry) {
5278 if (bp->pc == dc->pc) {
5279 if (dc->pc != pc_start)
5280 save_state(dc);
5281 gen_helper_debug(cpu_env);
5282 tcg_gen_exit_tb(0);
5283 dc->is_br = 1;
5284 goto exit_gen_loop;
5288 if (spc) {
5289 qemu_log("Search PC...\n");
5290 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5291 if (lj < j) {
5292 lj++;
5293 while (lj < j)
5294 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5295 tcg_ctx.gen_opc_pc[lj] = dc->pc;
5296 gen_opc_npc[lj] = dc->npc;
5297 tcg_ctx.gen_opc_instr_start[lj] = 1;
5298 tcg_ctx.gen_opc_icount[lj] = num_insns;
5301 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5302 gen_io_start();
5303 last_pc = dc->pc;
5304 insn = cpu_ldl_code(env, dc->pc);
5306 disas_sparc_insn(dc, insn);
5307 num_insns++;
5309 if (dc->is_br)
5310 break;
5311 /* if the next PC is different, we abort now */
5312 if (dc->pc != (last_pc + 4))
5313 break;
5314 /* if we reach a page boundary, we stop generation so that the
5315 PC of a TT_TFAULT exception is always in the right page */
5316 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5317 break;
5318 /* if single step mode, we generate only one instruction and
5319 generate an exception */
5320 if (dc->singlestep) {
5321 break;
5323 } while ((tcg_ctx.gen_opc_ptr < gen_opc_end) &&
5324 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5325 num_insns < max_insns);
5327 exit_gen_loop:
5328 if (tb->cflags & CF_LAST_IO) {
5329 gen_io_end();
5331 if (!dc->is_br) {
5332 if (dc->pc != DYNAMIC_PC &&
5333 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5334 /* static PC and NPC: we can use direct chaining */
5335 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5336 } else {
5337 if (dc->pc != DYNAMIC_PC) {
5338 tcg_gen_movi_tl(cpu_pc, dc->pc);
5340 save_npc(dc);
5341 tcg_gen_exit_tb(0);
5344 gen_tb_end(tb, num_insns);
5345 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
5346 if (spc) {
5347 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5348 lj++;
5349 while (lj <= j)
5350 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5351 #if 0
5352 log_page_dump();
5353 #endif
5354 gen_opc_jump_pc[0] = dc->jump_pc[0];
5355 gen_opc_jump_pc[1] = dc->jump_pc[1];
5356 } else {
5357 tb->size = last_pc + 4 - pc_start;
5358 tb->icount = num_insns;
5360 #ifdef DEBUG_DISAS
5361 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5362 qemu_log("--------------\n");
5363 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5364 log_target_disas(env, pc_start, last_pc + 4 - pc_start, 0);
5365 qemu_log("\n");
5367 #endif
5370 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5372 gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, false);
5375 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5377 gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, true);
5380 void gen_intermediate_code_init(CPUSPARCState *env)
5382 unsigned int i;
5383 static int inited;
5384 static const char * const gregnames[8] = {
5385 NULL, // g0 not used
5386 "g1",
5387 "g2",
5388 "g3",
5389 "g4",
5390 "g5",
5391 "g6",
5392 "g7",
5394 static const char * const fregnames[32] = {
5395 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5396 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5397 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5398 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5401 /* init various static tables */
5402 if (!inited) {
5403 inited = 1;
5405 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5406 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5407 offsetof(CPUSPARCState, regwptr),
5408 "regwptr");
5409 #ifdef TARGET_SPARC64
5410 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5411 "xcc");
5412 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5413 "asi");
5414 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5415 "fprs");
5416 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5417 "gsr");
5418 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5419 offsetof(CPUSPARCState, tick_cmpr),
5420 "tick_cmpr");
5421 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5422 offsetof(CPUSPARCState, stick_cmpr),
5423 "stick_cmpr");
5424 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5425 offsetof(CPUSPARCState, hstick_cmpr),
5426 "hstick_cmpr");
5427 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5428 "hintp");
5429 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5430 "htba");
5431 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5432 "hver");
5433 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5434 offsetof(CPUSPARCState, ssr), "ssr");
5435 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5436 offsetof(CPUSPARCState, version), "ver");
5437 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5438 offsetof(CPUSPARCState, softint),
5439 "softint");
5440 #else
5441 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5442 "wim");
5443 #endif
5444 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5445 "cond");
5446 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5447 "cc_src");
5448 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5449 offsetof(CPUSPARCState, cc_src2),
5450 "cc_src2");
5451 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5452 "cc_dst");
5453 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5454 "cc_op");
5455 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5456 "psr");
5457 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5458 "fsr");
5459 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5460 "pc");
5461 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5462 "npc");
5463 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5464 #ifndef CONFIG_USER_ONLY
5465 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5466 "tbr");
5467 #endif
5468 for (i = 1; i < 8; i++) {
5469 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5470 offsetof(CPUSPARCState, gregs[i]),
5471 gregnames[i]);
5473 for (i = 0; i < TARGET_DPREGS; i++) {
5474 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5475 offsetof(CPUSPARCState, fpr[i]),
5476 fregnames[i]);
5481 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5483 target_ulong npc;
5484 env->pc = tcg_ctx.gen_opc_pc[pc_pos];
5485 npc = gen_opc_npc[pc_pos];
5486 if (npc == 1) {
5487 /* dynamic NPC: already stored */
5488 } else if (npc == 2) {
5489 /* jump PC: use 'cond' and the jump targets of the translation */
5490 if (env->cond) {
5491 env->npc = gen_opc_jump_pc[0];
5492 } else {
5493 env->npc = gen_opc_jump_pc[1];
5495 } else {
5496 env->npc = npc;