libcacard: g_malloc cleanups
[qemu/ar7.git] / target-sparc / translate.c
blob2de1c4a58d7df7641b70e354fdc8227aa7356a23
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas/disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define DEBUG_DISAS
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* Floating point registers */
62 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
64 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
65 static target_ulong gen_opc_jump_pc[2];
67 #include "exec/gen-icount.h"
69 typedef struct DisasContext {
70 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
71 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73 int is_br;
74 int mem_idx;
75 int fpu_enabled;
76 int address_mask_32bit;
77 int singlestep;
78 uint32_t cc_op; /* current CC operation */
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 TCGv_i32 t32[3];
82 TCGv ttl[5];
83 int n_t32;
84 int n_ttl;
85 } DisasContext;
87 typedef struct {
88 TCGCond cond;
89 bool is_bool;
90 bool g1, g2;
91 TCGv c1, c2;
92 } DisasCompare;
94 // This function uses non-native bit order
95 #define GET_FIELD(X, FROM, TO) \
96 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
98 // This function uses the order in the manuals, i.e. bit 0 is 2^0
99 #define GET_FIELD_SP(X, FROM, TO) \
100 GET_FIELD(X, 31 - (TO), 31 - (FROM))
102 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
103 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
105 #ifdef TARGET_SPARC64
106 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
107 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
108 #else
109 #define DFPREG(r) (r & 0x1e)
110 #define QFPREG(r) (r & 0x1c)
111 #endif
113 #define UA2005_HTRAP_MASK 0xff
114 #define V8_TRAP_MASK 0x7f
116 static int sign_extend(int x, int len)
118 len = 32 - len;
119 return (x << len) >> len;
122 #define IS_IMM (insn & (1<<13))
124 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
126 TCGv_i32 t;
127 assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
128 dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
129 return t;
132 static inline TCGv get_temp_tl(DisasContext *dc)
134 TCGv t;
135 assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
136 dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
137 return t;
140 static inline void gen_update_fprs_dirty(int rd)
142 #if defined(TARGET_SPARC64)
143 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
144 #endif
147 /* floating point registers moves */
148 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
150 #if TCG_TARGET_REG_BITS == 32
151 if (src & 1) {
152 return TCGV_LOW(cpu_fpr[src / 2]);
153 } else {
154 return TCGV_HIGH(cpu_fpr[src / 2]);
156 #else
157 if (src & 1) {
158 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
159 } else {
160 TCGv_i32 ret = get_temp_i32(dc);
161 TCGv_i64 t = tcg_temp_new_i64();
163 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
164 tcg_gen_trunc_i64_i32(ret, t);
165 tcg_temp_free_i64(t);
167 return ret;
169 #endif
172 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
174 #if TCG_TARGET_REG_BITS == 32
175 if (dst & 1) {
176 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
177 } else {
178 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
180 #else
181 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
182 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
183 (dst & 1 ? 0 : 32), 32);
184 #endif
185 gen_update_fprs_dirty(dst);
188 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
190 return get_temp_i32(dc);
193 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
195 src = DFPREG(src);
196 return cpu_fpr[src / 2];
199 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
201 dst = DFPREG(dst);
202 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
203 gen_update_fprs_dirty(dst);
206 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
208 return cpu_fpr[DFPREG(dst) / 2];
211 static void gen_op_load_fpr_QT0(unsigned int src)
213 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.upper));
215 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
216 offsetof(CPU_QuadU, ll.lower));
219 static void gen_op_load_fpr_QT1(unsigned int src)
221 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
222 offsetof(CPU_QuadU, ll.upper));
223 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
224 offsetof(CPU_QuadU, ll.lower));
227 static void gen_op_store_QT0_fpr(unsigned int dst)
229 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
230 offsetof(CPU_QuadU, ll.upper));
231 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
232 offsetof(CPU_QuadU, ll.lower));
235 #ifdef TARGET_SPARC64
236 static void gen_move_Q(unsigned int rd, unsigned int rs)
238 rd = QFPREG(rd);
239 rs = QFPREG(rs);
241 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
242 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
243 gen_update_fprs_dirty(rd);
245 #endif
247 /* moves */
248 #ifdef CONFIG_USER_ONLY
249 #define supervisor(dc) 0
250 #ifdef TARGET_SPARC64
251 #define hypervisor(dc) 0
252 #endif
253 #else
254 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
255 #ifdef TARGET_SPARC64
256 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
257 #else
258 #endif
259 #endif
261 #ifdef TARGET_SPARC64
262 #ifndef TARGET_ABI32
263 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
264 #else
265 #define AM_CHECK(dc) (1)
266 #endif
267 #endif
269 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
271 #ifdef TARGET_SPARC64
272 if (AM_CHECK(dc))
273 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
274 #endif
277 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
279 if (reg == 0 || reg >= 8) {
280 TCGv t = get_temp_tl(dc);
281 if (reg == 0) {
282 tcg_gen_movi_tl(t, 0);
283 } else {
284 tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
286 return t;
287 } else {
288 return cpu_gregs[reg];
292 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
294 if (reg > 0) {
295 if (reg < 8) {
296 tcg_gen_mov_tl(cpu_gregs[reg], v);
297 } else {
298 tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
303 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
305 if (reg == 0 || reg >= 8) {
306 return get_temp_tl(dc);
307 } else {
308 return cpu_gregs[reg];
312 static inline void gen_goto_tb(DisasContext *s, int tb_num,
313 target_ulong pc, target_ulong npc)
315 TranslationBlock *tb;
317 tb = s->tb;
318 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
319 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
320 !s->singlestep) {
321 /* jump to same page: we can use a direct jump */
322 tcg_gen_goto_tb(tb_num);
323 tcg_gen_movi_tl(cpu_pc, pc);
324 tcg_gen_movi_tl(cpu_npc, npc);
325 tcg_gen_exit_tb((uintptr_t)tb + tb_num);
326 } else {
327 /* jump to another page: currently not optimized */
328 tcg_gen_movi_tl(cpu_pc, pc);
329 tcg_gen_movi_tl(cpu_npc, npc);
330 tcg_gen_exit_tb(0);
334 // XXX suboptimal
335 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
337 tcg_gen_extu_i32_tl(reg, src);
338 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
339 tcg_gen_andi_tl(reg, reg, 0x1);
342 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
344 tcg_gen_extu_i32_tl(reg, src);
345 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
346 tcg_gen_andi_tl(reg, reg, 0x1);
349 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
351 tcg_gen_extu_i32_tl(reg, src);
352 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
353 tcg_gen_andi_tl(reg, reg, 0x1);
356 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
358 tcg_gen_extu_i32_tl(reg, src);
359 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
360 tcg_gen_andi_tl(reg, reg, 0x1);
363 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
365 tcg_gen_mov_tl(cpu_cc_src, src1);
366 tcg_gen_movi_tl(cpu_cc_src2, src2);
367 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
368 tcg_gen_mov_tl(dst, cpu_cc_dst);
371 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
373 tcg_gen_mov_tl(cpu_cc_src, src1);
374 tcg_gen_mov_tl(cpu_cc_src2, src2);
375 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376 tcg_gen_mov_tl(dst, cpu_cc_dst);
379 static TCGv_i32 gen_add32_carry32(void)
381 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
383 /* Carry is computed from a previous add: (dst < src) */
384 #if TARGET_LONG_BITS == 64
385 cc_src1_32 = tcg_temp_new_i32();
386 cc_src2_32 = tcg_temp_new_i32();
387 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
388 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390 cc_src1_32 = cpu_cc_dst;
391 cc_src2_32 = cpu_cc_src;
392 #endif
394 carry_32 = tcg_temp_new_i32();
395 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
397 #if TARGET_LONG_BITS == 64
398 tcg_temp_free_i32(cc_src1_32);
399 tcg_temp_free_i32(cc_src2_32);
400 #endif
402 return carry_32;
405 static TCGv_i32 gen_sub32_carry32(void)
407 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
409 /* Carry is computed from a previous borrow: (src1 < src2) */
410 #if TARGET_LONG_BITS == 64
411 cc_src1_32 = tcg_temp_new_i32();
412 cc_src2_32 = tcg_temp_new_i32();
413 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
414 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
415 #else
416 cc_src1_32 = cpu_cc_src;
417 cc_src2_32 = cpu_cc_src2;
418 #endif
420 carry_32 = tcg_temp_new_i32();
421 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
423 #if TARGET_LONG_BITS == 64
424 tcg_temp_free_i32(cc_src1_32);
425 tcg_temp_free_i32(cc_src2_32);
426 #endif
428 return carry_32;
431 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
432 TCGv src2, int update_cc)
434 TCGv_i32 carry_32;
435 TCGv carry;
437 switch (dc->cc_op) {
438 case CC_OP_DIV:
439 case CC_OP_LOGIC:
440 /* Carry is known to be zero. Fall back to plain ADD. */
441 if (update_cc) {
442 gen_op_add_cc(dst, src1, src2);
443 } else {
444 tcg_gen_add_tl(dst, src1, src2);
446 return;
448 case CC_OP_ADD:
449 case CC_OP_TADD:
450 case CC_OP_TADDTV:
451 if (TARGET_LONG_BITS == 32) {
452 /* We can re-use the host's hardware carry generation by using
453 an ADD2 opcode. We discard the low part of the output.
454 Ideally we'd combine this operation with the add that
455 generated the carry in the first place. */
456 carry = tcg_temp_new();
457 tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
458 tcg_temp_free(carry);
459 goto add_done;
461 carry_32 = gen_add32_carry32();
462 break;
464 case CC_OP_SUB:
465 case CC_OP_TSUB:
466 case CC_OP_TSUBTV:
467 carry_32 = gen_sub32_carry32();
468 break;
470 default:
471 /* We need external help to produce the carry. */
472 carry_32 = tcg_temp_new_i32();
473 gen_helper_compute_C_icc(carry_32, cpu_env);
474 break;
477 #if TARGET_LONG_BITS == 64
478 carry = tcg_temp_new();
479 tcg_gen_extu_i32_i64(carry, carry_32);
480 #else
481 carry = carry_32;
482 #endif
484 tcg_gen_add_tl(dst, src1, src2);
485 tcg_gen_add_tl(dst, dst, carry);
487 tcg_temp_free_i32(carry_32);
488 #if TARGET_LONG_BITS == 64
489 tcg_temp_free(carry);
490 #endif
492 add_done:
493 if (update_cc) {
494 tcg_gen_mov_tl(cpu_cc_src, src1);
495 tcg_gen_mov_tl(cpu_cc_src2, src2);
496 tcg_gen_mov_tl(cpu_cc_dst, dst);
497 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
498 dc->cc_op = CC_OP_ADDX;
502 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
504 tcg_gen_mov_tl(cpu_cc_src, src1);
505 tcg_gen_movi_tl(cpu_cc_src2, src2);
506 if (src2 == 0) {
507 tcg_gen_mov_tl(cpu_cc_dst, src1);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
509 dc->cc_op = CC_OP_LOGIC;
510 } else {
511 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
512 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
513 dc->cc_op = CC_OP_SUB;
515 tcg_gen_mov_tl(dst, cpu_cc_dst);
518 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
520 tcg_gen_mov_tl(cpu_cc_src, src1);
521 tcg_gen_mov_tl(cpu_cc_src2, src2);
522 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523 tcg_gen_mov_tl(dst, cpu_cc_dst);
526 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
527 TCGv src2, int update_cc)
529 TCGv_i32 carry_32;
530 TCGv carry;
532 switch (dc->cc_op) {
533 case CC_OP_DIV:
534 case CC_OP_LOGIC:
535 /* Carry is known to be zero. Fall back to plain SUB. */
536 if (update_cc) {
537 gen_op_sub_cc(dst, src1, src2);
538 } else {
539 tcg_gen_sub_tl(dst, src1, src2);
541 return;
543 case CC_OP_ADD:
544 case CC_OP_TADD:
545 case CC_OP_TADDTV:
546 carry_32 = gen_add32_carry32();
547 break;
549 case CC_OP_SUB:
550 case CC_OP_TSUB:
551 case CC_OP_TSUBTV:
552 if (TARGET_LONG_BITS == 32) {
553 /* We can re-use the host's hardware carry generation by using
554 a SUB2 opcode. We discard the low part of the output.
555 Ideally we'd combine this operation with the add that
556 generated the carry in the first place. */
557 carry = tcg_temp_new();
558 tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
559 tcg_temp_free(carry);
560 goto sub_done;
562 carry_32 = gen_sub32_carry32();
563 break;
565 default:
566 /* We need external help to produce the carry. */
567 carry_32 = tcg_temp_new_i32();
568 gen_helper_compute_C_icc(carry_32, cpu_env);
569 break;
572 #if TARGET_LONG_BITS == 64
573 carry = tcg_temp_new();
574 tcg_gen_extu_i32_i64(carry, carry_32);
575 #else
576 carry = carry_32;
577 #endif
579 tcg_gen_sub_tl(dst, src1, src2);
580 tcg_gen_sub_tl(dst, dst, carry);
582 tcg_temp_free_i32(carry_32);
583 #if TARGET_LONG_BITS == 64
584 tcg_temp_free(carry);
585 #endif
587 sub_done:
588 if (update_cc) {
589 tcg_gen_mov_tl(cpu_cc_src, src1);
590 tcg_gen_mov_tl(cpu_cc_src2, src2);
591 tcg_gen_mov_tl(cpu_cc_dst, dst);
592 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
593 dc->cc_op = CC_OP_SUBX;
597 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
599 TCGv r_temp, zero, t0;
601 r_temp = tcg_temp_new();
602 t0 = tcg_temp_new();
604 /* old op:
605 if (!(env->y & 1))
606 T1 = 0;
608 zero = tcg_const_tl(0);
609 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
610 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
611 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
612 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
613 zero, cpu_cc_src2);
614 tcg_temp_free(zero);
616 // b2 = T0 & 1;
617 // env->y = (b2 << 31) | (env->y >> 1);
618 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
619 tcg_gen_shli_tl(r_temp, r_temp, 31);
620 tcg_gen_shri_tl(t0, cpu_y, 1);
621 tcg_gen_andi_tl(t0, t0, 0x7fffffff);
622 tcg_gen_or_tl(t0, t0, r_temp);
623 tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
625 // b1 = N ^ V;
626 gen_mov_reg_N(t0, cpu_psr);
627 gen_mov_reg_V(r_temp, cpu_psr);
628 tcg_gen_xor_tl(t0, t0, r_temp);
629 tcg_temp_free(r_temp);
631 // T0 = (b1 << 31) | (T0 >> 1);
632 // src1 = T0;
633 tcg_gen_shli_tl(t0, t0, 31);
634 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
635 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
636 tcg_temp_free(t0);
638 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
640 tcg_gen_mov_tl(dst, cpu_cc_dst);
643 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
645 #if TARGET_LONG_BITS == 32
646 if (sign_ext) {
647 tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
648 } else {
649 tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
651 #else
652 TCGv t0 = tcg_temp_new_i64();
653 TCGv t1 = tcg_temp_new_i64();
655 if (sign_ext) {
656 tcg_gen_ext32s_i64(t0, src1);
657 tcg_gen_ext32s_i64(t1, src2);
658 } else {
659 tcg_gen_ext32u_i64(t0, src1);
660 tcg_gen_ext32u_i64(t1, src2);
663 tcg_gen_mul_i64(dst, t0, t1);
664 tcg_temp_free(t0);
665 tcg_temp_free(t1);
667 tcg_gen_shri_i64(cpu_y, dst, 32);
668 #endif
671 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
673 /* zero-extend truncated operands before multiplication */
674 gen_op_multiply(dst, src1, src2, 0);
677 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
679 /* sign-extend truncated operands before multiplication */
680 gen_op_multiply(dst, src1, src2, 1);
683 // 1
684 static inline void gen_op_eval_ba(TCGv dst)
686 tcg_gen_movi_tl(dst, 1);
689 // Z
690 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
692 gen_mov_reg_Z(dst, src);
695 // Z | (N ^ V)
696 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
698 TCGv t0 = tcg_temp_new();
699 gen_mov_reg_N(t0, src);
700 gen_mov_reg_V(dst, src);
701 tcg_gen_xor_tl(dst, dst, t0);
702 gen_mov_reg_Z(t0, src);
703 tcg_gen_or_tl(dst, dst, t0);
704 tcg_temp_free(t0);
707 // N ^ V
708 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
710 TCGv t0 = tcg_temp_new();
711 gen_mov_reg_V(t0, src);
712 gen_mov_reg_N(dst, src);
713 tcg_gen_xor_tl(dst, dst, t0);
714 tcg_temp_free(t0);
717 // C | Z
718 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
720 TCGv t0 = tcg_temp_new();
721 gen_mov_reg_Z(t0, src);
722 gen_mov_reg_C(dst, src);
723 tcg_gen_or_tl(dst, dst, t0);
724 tcg_temp_free(t0);
727 // C
728 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
730 gen_mov_reg_C(dst, src);
733 // V
734 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
736 gen_mov_reg_V(dst, src);
739 // 0
740 static inline void gen_op_eval_bn(TCGv dst)
742 tcg_gen_movi_tl(dst, 0);
745 // N
746 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
748 gen_mov_reg_N(dst, src);
751 // !Z
752 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
754 gen_mov_reg_Z(dst, src);
755 tcg_gen_xori_tl(dst, dst, 0x1);
758 // !(Z | (N ^ V))
759 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
761 gen_op_eval_ble(dst, src);
762 tcg_gen_xori_tl(dst, dst, 0x1);
765 // !(N ^ V)
766 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
768 gen_op_eval_bl(dst, src);
769 tcg_gen_xori_tl(dst, dst, 0x1);
772 // !(C | Z)
773 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
775 gen_op_eval_bleu(dst, src);
776 tcg_gen_xori_tl(dst, dst, 0x1);
779 // !C
780 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
782 gen_mov_reg_C(dst, src);
783 tcg_gen_xori_tl(dst, dst, 0x1);
786 // !N
787 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
789 gen_mov_reg_N(dst, src);
790 tcg_gen_xori_tl(dst, dst, 0x1);
793 // !V
794 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
796 gen_mov_reg_V(dst, src);
797 tcg_gen_xori_tl(dst, dst, 0x1);
801 FPSR bit field FCC1 | FCC0:
805 3 unordered
807 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
808 unsigned int fcc_offset)
810 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
811 tcg_gen_andi_tl(reg, reg, 0x1);
814 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
815 unsigned int fcc_offset)
817 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
818 tcg_gen_andi_tl(reg, reg, 0x1);
821 // !0: FCC0 | FCC1
822 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
823 unsigned int fcc_offset)
825 TCGv t0 = tcg_temp_new();
826 gen_mov_reg_FCC0(dst, src, fcc_offset);
827 gen_mov_reg_FCC1(t0, src, fcc_offset);
828 tcg_gen_or_tl(dst, dst, t0);
829 tcg_temp_free(t0);
832 // 1 or 2: FCC0 ^ FCC1
833 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
834 unsigned int fcc_offset)
836 TCGv t0 = tcg_temp_new();
837 gen_mov_reg_FCC0(dst, src, fcc_offset);
838 gen_mov_reg_FCC1(t0, src, fcc_offset);
839 tcg_gen_xor_tl(dst, dst, t0);
840 tcg_temp_free(t0);
843 // 1 or 3: FCC0
844 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
845 unsigned int fcc_offset)
847 gen_mov_reg_FCC0(dst, src, fcc_offset);
850 // 1: FCC0 & !FCC1
851 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
852 unsigned int fcc_offset)
854 TCGv t0 = tcg_temp_new();
855 gen_mov_reg_FCC0(dst, src, fcc_offset);
856 gen_mov_reg_FCC1(t0, src, fcc_offset);
857 tcg_gen_andc_tl(dst, dst, t0);
858 tcg_temp_free(t0);
861 // 2 or 3: FCC1
862 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
863 unsigned int fcc_offset)
865 gen_mov_reg_FCC1(dst, src, fcc_offset);
868 // 2: !FCC0 & FCC1
869 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
870 unsigned int fcc_offset)
872 TCGv t0 = tcg_temp_new();
873 gen_mov_reg_FCC0(dst, src, fcc_offset);
874 gen_mov_reg_FCC1(t0, src, fcc_offset);
875 tcg_gen_andc_tl(dst, t0, dst);
876 tcg_temp_free(t0);
879 // 3: FCC0 & FCC1
880 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
881 unsigned int fcc_offset)
883 TCGv t0 = tcg_temp_new();
884 gen_mov_reg_FCC0(dst, src, fcc_offset);
885 gen_mov_reg_FCC1(t0, src, fcc_offset);
886 tcg_gen_and_tl(dst, dst, t0);
887 tcg_temp_free(t0);
890 // 0: !(FCC0 | FCC1)
891 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
892 unsigned int fcc_offset)
894 TCGv t0 = tcg_temp_new();
895 gen_mov_reg_FCC0(dst, src, fcc_offset);
896 gen_mov_reg_FCC1(t0, src, fcc_offset);
897 tcg_gen_or_tl(dst, dst, t0);
898 tcg_gen_xori_tl(dst, dst, 0x1);
899 tcg_temp_free(t0);
902 // 0 or 3: !(FCC0 ^ FCC1)
903 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
904 unsigned int fcc_offset)
906 TCGv t0 = tcg_temp_new();
907 gen_mov_reg_FCC0(dst, src, fcc_offset);
908 gen_mov_reg_FCC1(t0, src, fcc_offset);
909 tcg_gen_xor_tl(dst, dst, t0);
910 tcg_gen_xori_tl(dst, dst, 0x1);
911 tcg_temp_free(t0);
914 // 0 or 2: !FCC0
915 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
916 unsigned int fcc_offset)
918 gen_mov_reg_FCC0(dst, src, fcc_offset);
919 tcg_gen_xori_tl(dst, dst, 0x1);
922 // !1: !(FCC0 & !FCC1)
923 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
924 unsigned int fcc_offset)
926 TCGv t0 = tcg_temp_new();
927 gen_mov_reg_FCC0(dst, src, fcc_offset);
928 gen_mov_reg_FCC1(t0, src, fcc_offset);
929 tcg_gen_andc_tl(dst, dst, t0);
930 tcg_gen_xori_tl(dst, dst, 0x1);
931 tcg_temp_free(t0);
934 // 0 or 1: !FCC1
935 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
936 unsigned int fcc_offset)
938 gen_mov_reg_FCC1(dst, src, fcc_offset);
939 tcg_gen_xori_tl(dst, dst, 0x1);
942 // !2: !(!FCC0 & FCC1)
943 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
946 TCGv t0 = tcg_temp_new();
947 gen_mov_reg_FCC0(dst, src, fcc_offset);
948 gen_mov_reg_FCC1(t0, src, fcc_offset);
949 tcg_gen_andc_tl(dst, t0, dst);
950 tcg_gen_xori_tl(dst, dst, 0x1);
951 tcg_temp_free(t0);
954 // !3: !(FCC0 & FCC1)
955 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
956 unsigned int fcc_offset)
958 TCGv t0 = tcg_temp_new();
959 gen_mov_reg_FCC0(dst, src, fcc_offset);
960 gen_mov_reg_FCC1(t0, src, fcc_offset);
961 tcg_gen_and_tl(dst, dst, t0);
962 tcg_gen_xori_tl(dst, dst, 0x1);
963 tcg_temp_free(t0);
966 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
967 target_ulong pc2, TCGv r_cond)
969 int l1;
971 l1 = gen_new_label();
973 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
975 gen_goto_tb(dc, 0, pc1, pc1 + 4);
977 gen_set_label(l1);
978 gen_goto_tb(dc, 1, pc2, pc2 + 4);
981 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
982 target_ulong pc2, TCGv r_cond)
984 int l1;
986 l1 = gen_new_label();
988 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
990 gen_goto_tb(dc, 0, pc2, pc1);
992 gen_set_label(l1);
993 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
996 static inline void gen_generic_branch(DisasContext *dc)
998 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
999 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1000 TCGv zero = tcg_const_tl(0);
1002 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1004 tcg_temp_free(npc0);
1005 tcg_temp_free(npc1);
1006 tcg_temp_free(zero);
1009 /* call this function before using the condition register as it may
1010 have been set for a jump */
1011 static inline void flush_cond(DisasContext *dc)
1013 if (dc->npc == JUMP_PC) {
1014 gen_generic_branch(dc);
1015 dc->npc = DYNAMIC_PC;
1019 static inline void save_npc(DisasContext *dc)
1021 if (dc->npc == JUMP_PC) {
1022 gen_generic_branch(dc);
1023 dc->npc = DYNAMIC_PC;
1024 } else if (dc->npc != DYNAMIC_PC) {
1025 tcg_gen_movi_tl(cpu_npc, dc->npc);
1029 static inline void update_psr(DisasContext *dc)
1031 if (dc->cc_op != CC_OP_FLAGS) {
1032 dc->cc_op = CC_OP_FLAGS;
1033 gen_helper_compute_psr(cpu_env);
1037 static inline void save_state(DisasContext *dc)
1039 tcg_gen_movi_tl(cpu_pc, dc->pc);
1040 save_npc(dc);
1043 static inline void gen_mov_pc_npc(DisasContext *dc)
1045 if (dc->npc == JUMP_PC) {
1046 gen_generic_branch(dc);
1047 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1048 dc->pc = DYNAMIC_PC;
1049 } else if (dc->npc == DYNAMIC_PC) {
1050 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1051 dc->pc = DYNAMIC_PC;
1052 } else {
1053 dc->pc = dc->npc;
1057 static inline void gen_op_next_insn(void)
1059 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1060 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1063 static void free_compare(DisasCompare *cmp)
1065 if (!cmp->g1) {
1066 tcg_temp_free(cmp->c1);
1068 if (!cmp->g2) {
1069 tcg_temp_free(cmp->c2);
1073 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1074 DisasContext *dc)
1076 static int subcc_cond[16] = {
1077 TCG_COND_NEVER,
1078 TCG_COND_EQ,
1079 TCG_COND_LE,
1080 TCG_COND_LT,
1081 TCG_COND_LEU,
1082 TCG_COND_LTU,
1083 -1, /* neg */
1084 -1, /* overflow */
1085 TCG_COND_ALWAYS,
1086 TCG_COND_NE,
1087 TCG_COND_GT,
1088 TCG_COND_GE,
1089 TCG_COND_GTU,
1090 TCG_COND_GEU,
1091 -1, /* pos */
1092 -1, /* no overflow */
1095 static int logic_cond[16] = {
1096 TCG_COND_NEVER,
1097 TCG_COND_EQ, /* eq: Z */
1098 TCG_COND_LE, /* le: Z | (N ^ V) -> Z | N */
1099 TCG_COND_LT, /* lt: N ^ V -> N */
1100 TCG_COND_EQ, /* leu: C | Z -> Z */
1101 TCG_COND_NEVER, /* ltu: C -> 0 */
1102 TCG_COND_LT, /* neg: N */
1103 TCG_COND_NEVER, /* vs: V -> 0 */
1104 TCG_COND_ALWAYS,
1105 TCG_COND_NE, /* ne: !Z */
1106 TCG_COND_GT, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1107 TCG_COND_GE, /* ge: !(N ^ V) -> !N */
1108 TCG_COND_NE, /* gtu: !(C | Z) -> !Z */
1109 TCG_COND_ALWAYS, /* geu: !C -> 1 */
1110 TCG_COND_GE, /* pos: !N */
1111 TCG_COND_ALWAYS, /* vc: !V -> 1 */
1114 TCGv_i32 r_src;
1115 TCGv r_dst;
1117 #ifdef TARGET_SPARC64
1118 if (xcc) {
1119 r_src = cpu_xcc;
1120 } else {
1121 r_src = cpu_psr;
1123 #else
1124 r_src = cpu_psr;
1125 #endif
1127 switch (dc->cc_op) {
1128 case CC_OP_LOGIC:
1129 cmp->cond = logic_cond[cond];
1130 do_compare_dst_0:
1131 cmp->is_bool = false;
1132 cmp->g2 = false;
1133 cmp->c2 = tcg_const_tl(0);
1134 #ifdef TARGET_SPARC64
1135 if (!xcc) {
1136 cmp->g1 = false;
1137 cmp->c1 = tcg_temp_new();
1138 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1139 break;
1141 #endif
1142 cmp->g1 = true;
1143 cmp->c1 = cpu_cc_dst;
1144 break;
1146 case CC_OP_SUB:
1147 switch (cond) {
1148 case 6: /* neg */
1149 case 14: /* pos */
1150 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1151 goto do_compare_dst_0;
1153 case 7: /* overflow */
1154 case 15: /* !overflow */
1155 goto do_dynamic;
1157 default:
1158 cmp->cond = subcc_cond[cond];
1159 cmp->is_bool = false;
1160 #ifdef TARGET_SPARC64
1161 if (!xcc) {
1162 /* Note that sign-extension works for unsigned compares as
1163 long as both operands are sign-extended. */
1164 cmp->g1 = cmp->g2 = false;
1165 cmp->c1 = tcg_temp_new();
1166 cmp->c2 = tcg_temp_new();
1167 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1168 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1169 break;
1171 #endif
1172 cmp->g1 = cmp->g2 = true;
1173 cmp->c1 = cpu_cc_src;
1174 cmp->c2 = cpu_cc_src2;
1175 break;
1177 break;
1179 default:
1180 do_dynamic:
1181 gen_helper_compute_psr(cpu_env);
1182 dc->cc_op = CC_OP_FLAGS;
1183 /* FALLTHRU */
1185 case CC_OP_FLAGS:
1186 /* We're going to generate a boolean result. */
1187 cmp->cond = TCG_COND_NE;
1188 cmp->is_bool = true;
1189 cmp->g1 = cmp->g2 = false;
1190 cmp->c1 = r_dst = tcg_temp_new();
1191 cmp->c2 = tcg_const_tl(0);
1193 switch (cond) {
1194 case 0x0:
1195 gen_op_eval_bn(r_dst);
1196 break;
1197 case 0x1:
1198 gen_op_eval_be(r_dst, r_src);
1199 break;
1200 case 0x2:
1201 gen_op_eval_ble(r_dst, r_src);
1202 break;
1203 case 0x3:
1204 gen_op_eval_bl(r_dst, r_src);
1205 break;
1206 case 0x4:
1207 gen_op_eval_bleu(r_dst, r_src);
1208 break;
1209 case 0x5:
1210 gen_op_eval_bcs(r_dst, r_src);
1211 break;
1212 case 0x6:
1213 gen_op_eval_bneg(r_dst, r_src);
1214 break;
1215 case 0x7:
1216 gen_op_eval_bvs(r_dst, r_src);
1217 break;
1218 case 0x8:
1219 gen_op_eval_ba(r_dst);
1220 break;
1221 case 0x9:
1222 gen_op_eval_bne(r_dst, r_src);
1223 break;
1224 case 0xa:
1225 gen_op_eval_bg(r_dst, r_src);
1226 break;
1227 case 0xb:
1228 gen_op_eval_bge(r_dst, r_src);
1229 break;
1230 case 0xc:
1231 gen_op_eval_bgu(r_dst, r_src);
1232 break;
1233 case 0xd:
1234 gen_op_eval_bcc(r_dst, r_src);
1235 break;
1236 case 0xe:
1237 gen_op_eval_bpos(r_dst, r_src);
1238 break;
1239 case 0xf:
1240 gen_op_eval_bvc(r_dst, r_src);
1241 break;
1243 break;
1247 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1249 unsigned int offset;
1250 TCGv r_dst;
1252 /* For now we still generate a straight boolean result. */
1253 cmp->cond = TCG_COND_NE;
1254 cmp->is_bool = true;
1255 cmp->g1 = cmp->g2 = false;
1256 cmp->c1 = r_dst = tcg_temp_new();
1257 cmp->c2 = tcg_const_tl(0);
1259 switch (cc) {
1260 default:
1261 case 0x0:
1262 offset = 0;
1263 break;
1264 case 0x1:
1265 offset = 32 - 10;
1266 break;
1267 case 0x2:
1268 offset = 34 - 10;
1269 break;
1270 case 0x3:
1271 offset = 36 - 10;
1272 break;
1275 switch (cond) {
1276 case 0x0:
1277 gen_op_eval_bn(r_dst);
1278 break;
1279 case 0x1:
1280 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1281 break;
1282 case 0x2:
1283 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1284 break;
1285 case 0x3:
1286 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1287 break;
1288 case 0x4:
1289 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1290 break;
1291 case 0x5:
1292 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1293 break;
1294 case 0x6:
1295 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1296 break;
1297 case 0x7:
1298 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1299 break;
1300 case 0x8:
1301 gen_op_eval_ba(r_dst);
1302 break;
1303 case 0x9:
1304 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1305 break;
1306 case 0xa:
1307 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0xb:
1310 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1311 break;
1312 case 0xc:
1313 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1314 break;
1315 case 0xd:
1316 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1317 break;
1318 case 0xe:
1319 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1320 break;
1321 case 0xf:
1322 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1323 break;
1327 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1328 DisasContext *dc)
1330 DisasCompare cmp;
1331 gen_compare(&cmp, cc, cond, dc);
1333 /* The interface is to return a boolean in r_dst. */
1334 if (cmp.is_bool) {
1335 tcg_gen_mov_tl(r_dst, cmp.c1);
1336 } else {
1337 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1340 free_compare(&cmp);
1343 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1345 DisasCompare cmp;
1346 gen_fcompare(&cmp, cc, cond);
1348 /* The interface is to return a boolean in r_dst. */
1349 if (cmp.is_bool) {
1350 tcg_gen_mov_tl(r_dst, cmp.c1);
1351 } else {
1352 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1355 free_compare(&cmp);
1358 #ifdef TARGET_SPARC64
1359 // Inverted logic
1360 static const int gen_tcg_cond_reg[8] = {
1362 TCG_COND_NE,
1363 TCG_COND_GT,
1364 TCG_COND_GE,
1366 TCG_COND_EQ,
1367 TCG_COND_LE,
1368 TCG_COND_LT,
1371 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1373 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1374 cmp->is_bool = false;
1375 cmp->g1 = true;
1376 cmp->g2 = false;
1377 cmp->c1 = r_src;
1378 cmp->c2 = tcg_const_tl(0);
1381 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1383 DisasCompare cmp;
1384 gen_compare_reg(&cmp, cond, r_src);
1386 /* The interface is to return a boolean in r_dst. */
1387 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1389 free_compare(&cmp);
1391 #endif
1393 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1395 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1396 target_ulong target = dc->pc + offset;
1398 #ifdef TARGET_SPARC64
1399 if (unlikely(AM_CHECK(dc))) {
1400 target &= 0xffffffffULL;
1402 #endif
1403 if (cond == 0x0) {
1404 /* unconditional not taken */
1405 if (a) {
1406 dc->pc = dc->npc + 4;
1407 dc->npc = dc->pc + 4;
1408 } else {
1409 dc->pc = dc->npc;
1410 dc->npc = dc->pc + 4;
1412 } else if (cond == 0x8) {
1413 /* unconditional taken */
1414 if (a) {
1415 dc->pc = target;
1416 dc->npc = dc->pc + 4;
1417 } else {
1418 dc->pc = dc->npc;
1419 dc->npc = target;
1420 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1422 } else {
1423 flush_cond(dc);
1424 gen_cond(cpu_cond, cc, cond, dc);
1425 if (a) {
1426 gen_branch_a(dc, target, dc->npc, cpu_cond);
1427 dc->is_br = 1;
1428 } else {
1429 dc->pc = dc->npc;
1430 dc->jump_pc[0] = target;
1431 if (unlikely(dc->npc == DYNAMIC_PC)) {
1432 dc->jump_pc[1] = DYNAMIC_PC;
1433 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1434 } else {
1435 dc->jump_pc[1] = dc->npc + 4;
1436 dc->npc = JUMP_PC;
1442 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1444 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1445 target_ulong target = dc->pc + offset;
1447 #ifdef TARGET_SPARC64
1448 if (unlikely(AM_CHECK(dc))) {
1449 target &= 0xffffffffULL;
1451 #endif
1452 if (cond == 0x0) {
1453 /* unconditional not taken */
1454 if (a) {
1455 dc->pc = dc->npc + 4;
1456 dc->npc = dc->pc + 4;
1457 } else {
1458 dc->pc = dc->npc;
1459 dc->npc = dc->pc + 4;
1461 } else if (cond == 0x8) {
1462 /* unconditional taken */
1463 if (a) {
1464 dc->pc = target;
1465 dc->npc = dc->pc + 4;
1466 } else {
1467 dc->pc = dc->npc;
1468 dc->npc = target;
1469 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1471 } else {
1472 flush_cond(dc);
1473 gen_fcond(cpu_cond, cc, cond);
1474 if (a) {
1475 gen_branch_a(dc, target, dc->npc, cpu_cond);
1476 dc->is_br = 1;
1477 } else {
1478 dc->pc = dc->npc;
1479 dc->jump_pc[0] = target;
1480 if (unlikely(dc->npc == DYNAMIC_PC)) {
1481 dc->jump_pc[1] = DYNAMIC_PC;
1482 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1483 } else {
1484 dc->jump_pc[1] = dc->npc + 4;
1485 dc->npc = JUMP_PC;
1491 #ifdef TARGET_SPARC64
1492 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1493 TCGv r_reg)
1495 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1496 target_ulong target = dc->pc + offset;
1498 if (unlikely(AM_CHECK(dc))) {
1499 target &= 0xffffffffULL;
1501 flush_cond(dc);
1502 gen_cond_reg(cpu_cond, cond, r_reg);
1503 if (a) {
1504 gen_branch_a(dc, target, dc->npc, cpu_cond);
1505 dc->is_br = 1;
1506 } else {
1507 dc->pc = dc->npc;
1508 dc->jump_pc[0] = target;
1509 if (unlikely(dc->npc == DYNAMIC_PC)) {
1510 dc->jump_pc[1] = DYNAMIC_PC;
1511 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1512 } else {
1513 dc->jump_pc[1] = dc->npc + 4;
1514 dc->npc = JUMP_PC;
1519 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1521 switch (fccno) {
1522 case 0:
1523 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1524 break;
1525 case 1:
1526 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1527 break;
1528 case 2:
1529 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1530 break;
1531 case 3:
1532 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1533 break;
1537 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1539 switch (fccno) {
1540 case 0:
1541 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1542 break;
1543 case 1:
1544 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1545 break;
1546 case 2:
1547 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1548 break;
1549 case 3:
1550 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1551 break;
1555 static inline void gen_op_fcmpq(int fccno)
1557 switch (fccno) {
1558 case 0:
1559 gen_helper_fcmpq(cpu_env);
1560 break;
1561 case 1:
1562 gen_helper_fcmpq_fcc1(cpu_env);
1563 break;
1564 case 2:
1565 gen_helper_fcmpq_fcc2(cpu_env);
1566 break;
1567 case 3:
1568 gen_helper_fcmpq_fcc3(cpu_env);
1569 break;
1573 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1575 switch (fccno) {
1576 case 0:
1577 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1578 break;
1579 case 1:
1580 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1581 break;
1582 case 2:
1583 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1584 break;
1585 case 3:
1586 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1587 break;
1591 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1593 switch (fccno) {
1594 case 0:
1595 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1596 break;
1597 case 1:
1598 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1599 break;
1600 case 2:
1601 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1602 break;
1603 case 3:
1604 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1605 break;
1609 static inline void gen_op_fcmpeq(int fccno)
1611 switch (fccno) {
1612 case 0:
1613 gen_helper_fcmpeq(cpu_env);
1614 break;
1615 case 1:
1616 gen_helper_fcmpeq_fcc1(cpu_env);
1617 break;
1618 case 2:
1619 gen_helper_fcmpeq_fcc2(cpu_env);
1620 break;
1621 case 3:
1622 gen_helper_fcmpeq_fcc3(cpu_env);
1623 break;
1627 #else
1629 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1631 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1634 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1636 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1639 static inline void gen_op_fcmpq(int fccno)
1641 gen_helper_fcmpq(cpu_env);
1644 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1646 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1649 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1651 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1654 static inline void gen_op_fcmpeq(int fccno)
1656 gen_helper_fcmpeq(cpu_env);
1658 #endif
1660 static inline void gen_op_fpexception_im(int fsr_flags)
1662 TCGv_i32 r_const;
1664 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1665 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1666 r_const = tcg_const_i32(TT_FP_EXCP);
1667 gen_helper_raise_exception(cpu_env, r_const);
1668 tcg_temp_free_i32(r_const);
1671 static int gen_trap_ifnofpu(DisasContext *dc)
1673 #if !defined(CONFIG_USER_ONLY)
1674 if (!dc->fpu_enabled) {
1675 TCGv_i32 r_const;
1677 save_state(dc);
1678 r_const = tcg_const_i32(TT_NFPU_INSN);
1679 gen_helper_raise_exception(cpu_env, r_const);
1680 tcg_temp_free_i32(r_const);
1681 dc->is_br = 1;
1682 return 1;
1684 #endif
1685 return 0;
1688 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1690 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1693 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1694 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1696 TCGv_i32 dst, src;
1698 src = gen_load_fpr_F(dc, rs);
1699 dst = gen_dest_fpr_F(dc);
1701 gen(dst, cpu_env, src);
1703 gen_store_fpr_F(dc, rd, dst);
1706 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1707 void (*gen)(TCGv_i32, TCGv_i32))
1709 TCGv_i32 dst, src;
1711 src = gen_load_fpr_F(dc, rs);
1712 dst = gen_dest_fpr_F(dc);
1714 gen(dst, src);
1716 gen_store_fpr_F(dc, rd, dst);
1719 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1720 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1722 TCGv_i32 dst, src1, src2;
1724 src1 = gen_load_fpr_F(dc, rs1);
1725 src2 = gen_load_fpr_F(dc, rs2);
1726 dst = gen_dest_fpr_F(dc);
1728 gen(dst, cpu_env, src1, src2);
1730 gen_store_fpr_F(dc, rd, dst);
1733 #ifdef TARGET_SPARC64
1734 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1735 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1737 TCGv_i32 dst, src1, src2;
1739 src1 = gen_load_fpr_F(dc, rs1);
1740 src2 = gen_load_fpr_F(dc, rs2);
1741 dst = gen_dest_fpr_F(dc);
1743 gen(dst, src1, src2);
1745 gen_store_fpr_F(dc, rd, dst);
1747 #endif
1749 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1750 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1752 TCGv_i64 dst, src;
1754 src = gen_load_fpr_D(dc, rs);
1755 dst = gen_dest_fpr_D(dc, rd);
1757 gen(dst, cpu_env, src);
1759 gen_store_fpr_D(dc, rd, dst);
1762 #ifdef TARGET_SPARC64
1763 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1764 void (*gen)(TCGv_i64, TCGv_i64))
1766 TCGv_i64 dst, src;
1768 src = gen_load_fpr_D(dc, rs);
1769 dst = gen_dest_fpr_D(dc, rd);
1771 gen(dst, src);
1773 gen_store_fpr_D(dc, rd, dst);
1775 #endif
1777 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1778 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1780 TCGv_i64 dst, src1, src2;
1782 src1 = gen_load_fpr_D(dc, rs1);
1783 src2 = gen_load_fpr_D(dc, rs2);
1784 dst = gen_dest_fpr_D(dc, rd);
1786 gen(dst, cpu_env, src1, src2);
1788 gen_store_fpr_D(dc, rd, dst);
1791 #ifdef TARGET_SPARC64
1792 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1793 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1795 TCGv_i64 dst, src1, src2;
1797 src1 = gen_load_fpr_D(dc, rs1);
1798 src2 = gen_load_fpr_D(dc, rs2);
1799 dst = gen_dest_fpr_D(dc, rd);
1801 gen(dst, src1, src2);
1803 gen_store_fpr_D(dc, rd, dst);
1806 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1807 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1809 TCGv_i64 dst, src1, src2;
1811 src1 = gen_load_fpr_D(dc, rs1);
1812 src2 = gen_load_fpr_D(dc, rs2);
1813 dst = gen_dest_fpr_D(dc, rd);
1815 gen(dst, cpu_gsr, src1, src2);
1817 gen_store_fpr_D(dc, rd, dst);
1820 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1821 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1823 TCGv_i64 dst, src0, src1, src2;
1825 src1 = gen_load_fpr_D(dc, rs1);
1826 src2 = gen_load_fpr_D(dc, rs2);
1827 src0 = gen_load_fpr_D(dc, rd);
1828 dst = gen_dest_fpr_D(dc, rd);
1830 gen(dst, src0, src1, src2);
1832 gen_store_fpr_D(dc, rd, dst);
1834 #endif
1836 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1837 void (*gen)(TCGv_ptr))
1839 gen_op_load_fpr_QT1(QFPREG(rs));
1841 gen(cpu_env);
1843 gen_op_store_QT0_fpr(QFPREG(rd));
1844 gen_update_fprs_dirty(QFPREG(rd));
1847 #ifdef TARGET_SPARC64
1848 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1849 void (*gen)(TCGv_ptr))
1851 gen_op_load_fpr_QT1(QFPREG(rs));
1853 gen(cpu_env);
1855 gen_op_store_QT0_fpr(QFPREG(rd));
1856 gen_update_fprs_dirty(QFPREG(rd));
1858 #endif
1860 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1861 void (*gen)(TCGv_ptr))
1863 gen_op_load_fpr_QT0(QFPREG(rs1));
1864 gen_op_load_fpr_QT1(QFPREG(rs2));
1866 gen(cpu_env);
1868 gen_op_store_QT0_fpr(QFPREG(rd));
1869 gen_update_fprs_dirty(QFPREG(rd));
1872 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1873 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1875 TCGv_i64 dst;
1876 TCGv_i32 src1, src2;
1878 src1 = gen_load_fpr_F(dc, rs1);
1879 src2 = gen_load_fpr_F(dc, rs2);
1880 dst = gen_dest_fpr_D(dc, rd);
1882 gen(dst, cpu_env, src1, src2);
1884 gen_store_fpr_D(dc, rd, dst);
1887 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1888 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1890 TCGv_i64 src1, src2;
1892 src1 = gen_load_fpr_D(dc, rs1);
1893 src2 = gen_load_fpr_D(dc, rs2);
1895 gen(cpu_env, src1, src2);
1897 gen_op_store_QT0_fpr(QFPREG(rd));
1898 gen_update_fprs_dirty(QFPREG(rd));
1901 #ifdef TARGET_SPARC64
1902 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1903 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1905 TCGv_i64 dst;
1906 TCGv_i32 src;
1908 src = gen_load_fpr_F(dc, rs);
1909 dst = gen_dest_fpr_D(dc, rd);
1911 gen(dst, cpu_env, src);
1913 gen_store_fpr_D(dc, rd, dst);
1915 #endif
1917 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1918 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1920 TCGv_i64 dst;
1921 TCGv_i32 src;
1923 src = gen_load_fpr_F(dc, rs);
1924 dst = gen_dest_fpr_D(dc, rd);
1926 gen(dst, cpu_env, src);
1928 gen_store_fpr_D(dc, rd, dst);
1931 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1932 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1934 TCGv_i32 dst;
1935 TCGv_i64 src;
1937 src = gen_load_fpr_D(dc, rs);
1938 dst = gen_dest_fpr_F(dc);
1940 gen(dst, cpu_env, src);
1942 gen_store_fpr_F(dc, rd, dst);
1945 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1946 void (*gen)(TCGv_i32, TCGv_ptr))
1948 TCGv_i32 dst;
1950 gen_op_load_fpr_QT1(QFPREG(rs));
1951 dst = gen_dest_fpr_F(dc);
1953 gen(dst, cpu_env);
1955 gen_store_fpr_F(dc, rd, dst);
1958 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1959 void (*gen)(TCGv_i64, TCGv_ptr))
1961 TCGv_i64 dst;
1963 gen_op_load_fpr_QT1(QFPREG(rs));
1964 dst = gen_dest_fpr_D(dc, rd);
1966 gen(dst, cpu_env);
1968 gen_store_fpr_D(dc, rd, dst);
1971 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1972 void (*gen)(TCGv_ptr, TCGv_i32))
1974 TCGv_i32 src;
1976 src = gen_load_fpr_F(dc, rs);
1978 gen(cpu_env, src);
1980 gen_op_store_QT0_fpr(QFPREG(rd));
1981 gen_update_fprs_dirty(QFPREG(rd));
1984 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1985 void (*gen)(TCGv_ptr, TCGv_i64))
1987 TCGv_i64 src;
1989 src = gen_load_fpr_D(dc, rs);
1991 gen(cpu_env, src);
1993 gen_op_store_QT0_fpr(QFPREG(rd));
1994 gen_update_fprs_dirty(QFPREG(rd));
1997 /* asi moves */
1998 #ifdef TARGET_SPARC64
1999 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
2001 int asi;
2002 TCGv_i32 r_asi;
2004 if (IS_IMM) {
2005 r_asi = tcg_temp_new_i32();
2006 tcg_gen_mov_i32(r_asi, cpu_asi);
2007 } else {
2008 asi = GET_FIELD(insn, 19, 26);
2009 r_asi = tcg_const_i32(asi);
2011 return r_asi;
2014 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2015 int sign)
2017 TCGv_i32 r_asi, r_size, r_sign;
2019 r_asi = gen_get_asi(insn, addr);
2020 r_size = tcg_const_i32(size);
2021 r_sign = tcg_const_i32(sign);
2022 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2023 tcg_temp_free_i32(r_sign);
2024 tcg_temp_free_i32(r_size);
2025 tcg_temp_free_i32(r_asi);
2028 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2030 TCGv_i32 r_asi, r_size;
2032 r_asi = gen_get_asi(insn, addr);
2033 r_size = tcg_const_i32(size);
2034 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2035 tcg_temp_free_i32(r_size);
2036 tcg_temp_free_i32(r_asi);
2039 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2041 TCGv_i32 r_asi, r_size, r_rd;
2043 r_asi = gen_get_asi(insn, addr);
2044 r_size = tcg_const_i32(size);
2045 r_rd = tcg_const_i32(rd);
2046 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2047 tcg_temp_free_i32(r_rd);
2048 tcg_temp_free_i32(r_size);
2049 tcg_temp_free_i32(r_asi);
2052 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2054 TCGv_i32 r_asi, r_size, r_rd;
2056 r_asi = gen_get_asi(insn, addr);
2057 r_size = tcg_const_i32(size);
2058 r_rd = tcg_const_i32(rd);
2059 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2060 tcg_temp_free_i32(r_rd);
2061 tcg_temp_free_i32(r_size);
2062 tcg_temp_free_i32(r_asi);
2065 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2067 TCGv_i32 r_asi, r_size, r_sign;
2068 TCGv_i64 t64 = tcg_temp_new_i64();
2070 r_asi = gen_get_asi(insn, addr);
2071 r_size = tcg_const_i32(4);
2072 r_sign = tcg_const_i32(0);
2073 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2074 tcg_temp_free_i32(r_sign);
2075 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2076 tcg_temp_free_i32(r_size);
2077 tcg_temp_free_i32(r_asi);
2078 tcg_gen_trunc_i64_tl(dst, t64);
2079 tcg_temp_free_i64(t64);
2082 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2083 int insn, int rd)
2085 TCGv_i32 r_asi, r_rd;
2087 r_asi = gen_get_asi(insn, addr);
2088 r_rd = tcg_const_i32(rd);
2089 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2090 tcg_temp_free_i32(r_rd);
2091 tcg_temp_free_i32(r_asi);
2094 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2095 int insn, int rd)
2097 TCGv_i32 r_asi, r_size;
2098 TCGv lo = gen_load_gpr(dc, rd + 1);
2099 TCGv_i64 t64 = tcg_temp_new_i64();
2101 tcg_gen_concat_tl_i64(t64, lo, hi);
2102 r_asi = gen_get_asi(insn, addr);
2103 r_size = tcg_const_i32(8);
2104 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2105 tcg_temp_free_i32(r_size);
2106 tcg_temp_free_i32(r_asi);
2107 tcg_temp_free_i64(t64);
2110 static inline void gen_casx_asi(DisasContext *dc, TCGv addr,
2111 TCGv val2, int insn, int rd)
2113 TCGv val1 = gen_load_gpr(dc, rd);
2114 TCGv dst = gen_dest_gpr(dc, rd);
2115 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2117 gen_helper_casx_asi(dst, cpu_env, addr, val1, val2, r_asi);
2118 tcg_temp_free_i32(r_asi);
2119 gen_store_gpr(dc, rd, dst);
2122 #elif !defined(CONFIG_USER_ONLY)
2124 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2125 int sign)
2127 TCGv_i32 r_asi, r_size, r_sign;
2128 TCGv_i64 t64 = tcg_temp_new_i64();
2130 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2131 r_size = tcg_const_i32(size);
2132 r_sign = tcg_const_i32(sign);
2133 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2134 tcg_temp_free_i32(r_sign);
2135 tcg_temp_free_i32(r_size);
2136 tcg_temp_free_i32(r_asi);
2137 tcg_gen_trunc_i64_tl(dst, t64);
2138 tcg_temp_free_i64(t64);
2141 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2143 TCGv_i32 r_asi, r_size;
2144 TCGv_i64 t64 = tcg_temp_new_i64();
2146 tcg_gen_extu_tl_i64(t64, src);
2147 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2148 r_size = tcg_const_i32(size);
2149 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2150 tcg_temp_free_i32(r_size);
2151 tcg_temp_free_i32(r_asi);
2152 tcg_temp_free_i64(t64);
2155 static inline void gen_swap_asi(TCGv dst, TCGv src, TCGv addr, int insn)
2157 TCGv_i32 r_asi, r_size, r_sign;
2158 TCGv_i64 r_val, t64;
2160 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2161 r_size = tcg_const_i32(4);
2162 r_sign = tcg_const_i32(0);
2163 t64 = tcg_temp_new_i64();
2164 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2165 tcg_temp_free(r_sign);
2166 r_val = tcg_temp_new_i64();
2167 tcg_gen_extu_tl_i64(r_val, src);
2168 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2169 tcg_temp_free_i64(r_val);
2170 tcg_temp_free_i32(r_size);
2171 tcg_temp_free_i32(r_asi);
2172 tcg_gen_trunc_i64_tl(dst, t64);
2173 tcg_temp_free_i64(t64);
2176 static inline void gen_ldda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2177 int insn, int rd)
2179 TCGv_i32 r_asi, r_size, r_sign;
2180 TCGv t;
2181 TCGv_i64 t64;
2183 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2184 r_size = tcg_const_i32(8);
2185 r_sign = tcg_const_i32(0);
2186 t64 = tcg_temp_new_i64();
2187 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_size, r_sign);
2188 tcg_temp_free_i32(r_sign);
2189 tcg_temp_free_i32(r_size);
2190 tcg_temp_free_i32(r_asi);
2192 t = gen_dest_gpr(dc, rd + 1);
2193 tcg_gen_trunc_i64_tl(t, t64);
2194 gen_store_gpr(dc, rd + 1, t);
2196 tcg_gen_shri_i64(t64, t64, 32);
2197 tcg_gen_trunc_i64_tl(hi, t64);
2198 tcg_temp_free_i64(t64);
2199 gen_store_gpr(dc, rd, hi);
2202 static inline void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2203 int insn, int rd)
2205 TCGv_i32 r_asi, r_size;
2206 TCGv lo = gen_load_gpr(dc, rd + 1);
2207 TCGv_i64 t64 = tcg_temp_new_i64();
2209 tcg_gen_concat_tl_i64(t64, lo, hi);
2210 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2211 r_size = tcg_const_i32(8);
2212 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_size);
2213 tcg_temp_free_i32(r_size);
2214 tcg_temp_free_i32(r_asi);
2215 tcg_temp_free_i64(t64);
2217 #endif
2219 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2220 static inline void gen_cas_asi(DisasContext *dc, TCGv addr,
2221 TCGv val2, int insn, int rd)
2223 TCGv val1 = gen_load_gpr(dc, rd);
2224 TCGv dst = gen_dest_gpr(dc, rd);
2225 #ifdef TARGET_SPARC64
2226 TCGv_i32 r_asi = gen_get_asi(insn, addr);
2227 #else
2228 TCGv_i32 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2229 #endif
2231 gen_helper_cas_asi(dst, cpu_env, addr, val1, val2, r_asi);
2232 tcg_temp_free_i32(r_asi);
2233 gen_store_gpr(dc, rd, dst);
2236 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2238 TCGv_i64 r_val;
2239 TCGv_i32 r_asi, r_size;
2241 gen_ld_asi(dst, addr, insn, 1, 0);
2243 r_val = tcg_const_i64(0xffULL);
2244 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2245 r_size = tcg_const_i32(1);
2246 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2247 tcg_temp_free_i32(r_size);
2248 tcg_temp_free_i32(r_asi);
2249 tcg_temp_free_i64(r_val);
2251 #endif
2253 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2255 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2256 return gen_load_gpr(dc, rs1);
2259 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2261 if (IS_IMM) { /* immediate */
2262 target_long simm = GET_FIELDs(insn, 19, 31);
2263 TCGv t = get_temp_tl(dc);
2264 tcg_gen_movi_tl(t, simm);
2265 return t;
2266 } else { /* register */
2267 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2268 return gen_load_gpr(dc, rs2);
2272 #ifdef TARGET_SPARC64
2273 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2275 TCGv_i32 c32, zero, dst, s1, s2;
2277 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2278 or fold the comparison down to 32 bits and use movcond_i32. Choose
2279 the later. */
2280 c32 = tcg_temp_new_i32();
2281 if (cmp->is_bool) {
2282 tcg_gen_trunc_i64_i32(c32, cmp->c1);
2283 } else {
2284 TCGv_i64 c64 = tcg_temp_new_i64();
2285 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2286 tcg_gen_trunc_i64_i32(c32, c64);
2287 tcg_temp_free_i64(c64);
2290 s1 = gen_load_fpr_F(dc, rs);
2291 s2 = gen_load_fpr_F(dc, rd);
2292 dst = gen_dest_fpr_F(dc);
2293 zero = tcg_const_i32(0);
2295 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2297 tcg_temp_free_i32(c32);
2298 tcg_temp_free_i32(zero);
2299 gen_store_fpr_F(dc, rd, dst);
2302 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2304 TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2305 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2306 gen_load_fpr_D(dc, rs),
2307 gen_load_fpr_D(dc, rd));
2308 gen_store_fpr_D(dc, rd, dst);
2311 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2313 int qd = QFPREG(rd);
2314 int qs = QFPREG(rs);
2316 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2317 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2318 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2319 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2321 gen_update_fprs_dirty(qd);
2324 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2326 TCGv_i32 r_tl = tcg_temp_new_i32();
2328 /* load env->tl into r_tl */
2329 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2331 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2332 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2334 /* calculate offset to current trap state from env->ts, reuse r_tl */
2335 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2336 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2338 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2340 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2341 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2342 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2343 tcg_temp_free_ptr(r_tl_tmp);
2346 tcg_temp_free_i32(r_tl);
2349 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2350 int width, bool cc, bool left)
2352 TCGv lo1, lo2, t1, t2;
2353 uint64_t amask, tabl, tabr;
2354 int shift, imask, omask;
2356 if (cc) {
2357 tcg_gen_mov_tl(cpu_cc_src, s1);
2358 tcg_gen_mov_tl(cpu_cc_src2, s2);
2359 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2360 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2361 dc->cc_op = CC_OP_SUB;
2364 /* Theory of operation: there are two tables, left and right (not to
2365 be confused with the left and right versions of the opcode). These
2366 are indexed by the low 3 bits of the inputs. To make things "easy",
2367 these tables are loaded into two constants, TABL and TABR below.
2368 The operation index = (input & imask) << shift calculates the index
2369 into the constant, while val = (table >> index) & omask calculates
2370 the value we're looking for. */
2371 switch (width) {
2372 case 8:
2373 imask = 0x7;
2374 shift = 3;
2375 omask = 0xff;
2376 if (left) {
2377 tabl = 0x80c0e0f0f8fcfeffULL;
2378 tabr = 0xff7f3f1f0f070301ULL;
2379 } else {
2380 tabl = 0x0103070f1f3f7fffULL;
2381 tabr = 0xfffefcf8f0e0c080ULL;
2383 break;
2384 case 16:
2385 imask = 0x6;
2386 shift = 1;
2387 omask = 0xf;
2388 if (left) {
2389 tabl = 0x8cef;
2390 tabr = 0xf731;
2391 } else {
2392 tabl = 0x137f;
2393 tabr = 0xfec8;
2395 break;
2396 case 32:
2397 imask = 0x4;
2398 shift = 0;
2399 omask = 0x3;
2400 if (left) {
2401 tabl = (2 << 2) | 3;
2402 tabr = (3 << 2) | 1;
2403 } else {
2404 tabl = (1 << 2) | 3;
2405 tabr = (3 << 2) | 2;
2407 break;
2408 default:
2409 abort();
2412 lo1 = tcg_temp_new();
2413 lo2 = tcg_temp_new();
2414 tcg_gen_andi_tl(lo1, s1, imask);
2415 tcg_gen_andi_tl(lo2, s2, imask);
2416 tcg_gen_shli_tl(lo1, lo1, shift);
2417 tcg_gen_shli_tl(lo2, lo2, shift);
2419 t1 = tcg_const_tl(tabl);
2420 t2 = tcg_const_tl(tabr);
2421 tcg_gen_shr_tl(lo1, t1, lo1);
2422 tcg_gen_shr_tl(lo2, t2, lo2);
2423 tcg_gen_andi_tl(dst, lo1, omask);
2424 tcg_gen_andi_tl(lo2, lo2, omask);
2426 amask = -8;
2427 if (AM_CHECK(dc)) {
2428 amask &= 0xffffffffULL;
2430 tcg_gen_andi_tl(s1, s1, amask);
2431 tcg_gen_andi_tl(s2, s2, amask);
2433 /* We want to compute
2434 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2435 We've already done dst = lo1, so this reduces to
2436 dst &= (s1 == s2 ? -1 : lo2)
2437 Which we perform by
2438 lo2 |= -(s1 == s2)
2439 dst &= lo2
2441 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2442 tcg_gen_neg_tl(t1, t1);
2443 tcg_gen_or_tl(lo2, lo2, t1);
2444 tcg_gen_and_tl(dst, dst, lo2);
2446 tcg_temp_free(lo1);
2447 tcg_temp_free(lo2);
2448 tcg_temp_free(t1);
2449 tcg_temp_free(t2);
2452 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2454 TCGv tmp = tcg_temp_new();
2456 tcg_gen_add_tl(tmp, s1, s2);
2457 tcg_gen_andi_tl(dst, tmp, -8);
2458 if (left) {
2459 tcg_gen_neg_tl(tmp, tmp);
2461 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2463 tcg_temp_free(tmp);
2466 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2468 TCGv t1, t2, shift;
2470 t1 = tcg_temp_new();
2471 t2 = tcg_temp_new();
2472 shift = tcg_temp_new();
2474 tcg_gen_andi_tl(shift, gsr, 7);
2475 tcg_gen_shli_tl(shift, shift, 3);
2476 tcg_gen_shl_tl(t1, s1, shift);
2478 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2479 shift of (up to 63) followed by a constant shift of 1. */
2480 tcg_gen_xori_tl(shift, shift, 63);
2481 tcg_gen_shr_tl(t2, s2, shift);
2482 tcg_gen_shri_tl(t2, t2, 1);
2484 tcg_gen_or_tl(dst, t1, t2);
2486 tcg_temp_free(t1);
2487 tcg_temp_free(t2);
2488 tcg_temp_free(shift);
2490 #endif
2492 #define CHECK_IU_FEATURE(dc, FEATURE) \
2493 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2494 goto illegal_insn;
2495 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2496 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2497 goto nfpu_insn;
2499 /* before an instruction, dc->pc must be static */
2500 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2502 unsigned int opc, rs1, rs2, rd;
2503 TCGv cpu_src1, cpu_src2;
2504 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2505 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2506 target_long simm;
2508 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2509 tcg_gen_debug_insn_start(dc->pc);
2512 opc = GET_FIELD(insn, 0, 1);
2513 rd = GET_FIELD(insn, 2, 6);
2515 switch (opc) {
2516 case 0: /* branches/sethi */
2518 unsigned int xop = GET_FIELD(insn, 7, 9);
2519 int32_t target;
2520 switch (xop) {
2521 #ifdef TARGET_SPARC64
2522 case 0x1: /* V9 BPcc */
2524 int cc;
2526 target = GET_FIELD_SP(insn, 0, 18);
2527 target = sign_extend(target, 19);
2528 target <<= 2;
2529 cc = GET_FIELD_SP(insn, 20, 21);
2530 if (cc == 0)
2531 do_branch(dc, target, insn, 0);
2532 else if (cc == 2)
2533 do_branch(dc, target, insn, 1);
2534 else
2535 goto illegal_insn;
2536 goto jmp_insn;
2538 case 0x3: /* V9 BPr */
2540 target = GET_FIELD_SP(insn, 0, 13) |
2541 (GET_FIELD_SP(insn, 20, 21) << 14);
2542 target = sign_extend(target, 16);
2543 target <<= 2;
2544 cpu_src1 = get_src1(dc, insn);
2545 do_branch_reg(dc, target, insn, cpu_src1);
2546 goto jmp_insn;
2548 case 0x5: /* V9 FBPcc */
2550 int cc = GET_FIELD_SP(insn, 20, 21);
2551 if (gen_trap_ifnofpu(dc)) {
2552 goto jmp_insn;
2554 target = GET_FIELD_SP(insn, 0, 18);
2555 target = sign_extend(target, 19);
2556 target <<= 2;
2557 do_fbranch(dc, target, insn, cc);
2558 goto jmp_insn;
2560 #else
2561 case 0x7: /* CBN+x */
2563 goto ncp_insn;
2565 #endif
2566 case 0x2: /* BN+x */
2568 target = GET_FIELD(insn, 10, 31);
2569 target = sign_extend(target, 22);
2570 target <<= 2;
2571 do_branch(dc, target, insn, 0);
2572 goto jmp_insn;
2574 case 0x6: /* FBN+x */
2576 if (gen_trap_ifnofpu(dc)) {
2577 goto jmp_insn;
2579 target = GET_FIELD(insn, 10, 31);
2580 target = sign_extend(target, 22);
2581 target <<= 2;
2582 do_fbranch(dc, target, insn, 0);
2583 goto jmp_insn;
2585 case 0x4: /* SETHI */
2586 /* Special-case %g0 because that's the canonical nop. */
2587 if (rd) {
2588 uint32_t value = GET_FIELD(insn, 10, 31);
2589 TCGv t = gen_dest_gpr(dc, rd);
2590 tcg_gen_movi_tl(t, value << 10);
2591 gen_store_gpr(dc, rd, t);
2593 break;
2594 case 0x0: /* UNIMPL */
2595 default:
2596 goto illegal_insn;
2598 break;
2600 break;
2601 case 1: /*CALL*/
2603 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2604 TCGv o7 = gen_dest_gpr(dc, 15);
2606 tcg_gen_movi_tl(o7, dc->pc);
2607 gen_store_gpr(dc, 15, o7);
2608 target += dc->pc;
2609 gen_mov_pc_npc(dc);
2610 #ifdef TARGET_SPARC64
2611 if (unlikely(AM_CHECK(dc))) {
2612 target &= 0xffffffffULL;
2614 #endif
2615 dc->npc = target;
2617 goto jmp_insn;
2618 case 2: /* FPU & Logical Operations */
2620 unsigned int xop = GET_FIELD(insn, 7, 12);
2621 TCGv cpu_dst = get_temp_tl(dc);
2622 TCGv cpu_tmp0;
2624 if (xop == 0x3a) { /* generate trap */
2625 int cond = GET_FIELD(insn, 3, 6);
2626 TCGv_i32 trap;
2627 int l1 = -1, mask;
2629 if (cond == 0) {
2630 /* Trap never. */
2631 break;
2634 save_state(dc);
2636 if (cond != 8) {
2637 /* Conditional trap. */
2638 DisasCompare cmp;
2639 #ifdef TARGET_SPARC64
2640 /* V9 icc/xcc */
2641 int cc = GET_FIELD_SP(insn, 11, 12);
2642 if (cc == 0) {
2643 gen_compare(&cmp, 0, cond, dc);
2644 } else if (cc == 2) {
2645 gen_compare(&cmp, 1, cond, dc);
2646 } else {
2647 goto illegal_insn;
2649 #else
2650 gen_compare(&cmp, 0, cond, dc);
2651 #endif
2652 l1 = gen_new_label();
2653 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2654 cmp.c1, cmp.c2, l1);
2655 free_compare(&cmp);
2658 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2659 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2661 /* Don't use the normal temporaries, as they may well have
2662 gone out of scope with the branch above. While we're
2663 doing that we might as well pre-truncate to 32-bit. */
2664 trap = tcg_temp_new_i32();
2666 rs1 = GET_FIELD_SP(insn, 14, 18);
2667 if (IS_IMM) {
2668 rs2 = GET_FIELD_SP(insn, 0, 6);
2669 if (rs1 == 0) {
2670 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2671 /* Signal that the trap value is fully constant. */
2672 mask = 0;
2673 } else {
2674 TCGv t1 = gen_load_gpr(dc, rs1);
2675 tcg_gen_trunc_tl_i32(trap, t1);
2676 tcg_gen_addi_i32(trap, trap, rs2);
2678 } else {
2679 TCGv t1, t2;
2680 rs2 = GET_FIELD_SP(insn, 0, 4);
2681 t1 = gen_load_gpr(dc, rs1);
2682 t2 = gen_load_gpr(dc, rs2);
2683 tcg_gen_add_tl(t1, t1, t2);
2684 tcg_gen_trunc_tl_i32(trap, t1);
2686 if (mask != 0) {
2687 tcg_gen_andi_i32(trap, trap, mask);
2688 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2691 gen_helper_raise_exception(cpu_env, trap);
2692 tcg_temp_free_i32(trap);
2694 if (cond == 8) {
2695 /* An unconditional trap ends the TB. */
2696 dc->is_br = 1;
2697 goto jmp_insn;
2698 } else {
2699 /* A conditional trap falls through to the next insn. */
2700 gen_set_label(l1);
2701 break;
2703 } else if (xop == 0x28) {
2704 rs1 = GET_FIELD(insn, 13, 17);
2705 switch(rs1) {
2706 case 0: /* rdy */
2707 #ifndef TARGET_SPARC64
2708 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2709 manual, rdy on the microSPARC
2710 II */
2711 case 0x0f: /* stbar in the SPARCv8 manual,
2712 rdy on the microSPARC II */
2713 case 0x10 ... 0x1f: /* implementation-dependent in the
2714 SPARCv8 manual, rdy on the
2715 microSPARC II */
2716 /* Read Asr17 */
2717 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2718 TCGv t = gen_dest_gpr(dc, rd);
2719 /* Read Asr17 for a Leon3 monoprocessor */
2720 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2721 gen_store_gpr(dc, rd, t);
2722 break;
2724 #endif
2725 gen_store_gpr(dc, rd, cpu_y);
2726 break;
2727 #ifdef TARGET_SPARC64
2728 case 0x2: /* V9 rdccr */
2729 update_psr(dc);
2730 gen_helper_rdccr(cpu_dst, cpu_env);
2731 gen_store_gpr(dc, rd, cpu_dst);
2732 break;
2733 case 0x3: /* V9 rdasi */
2734 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2735 gen_store_gpr(dc, rd, cpu_dst);
2736 break;
2737 case 0x4: /* V9 rdtick */
2739 TCGv_ptr r_tickptr;
2741 r_tickptr = tcg_temp_new_ptr();
2742 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2743 offsetof(CPUSPARCState, tick));
2744 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2745 tcg_temp_free_ptr(r_tickptr);
2746 gen_store_gpr(dc, rd, cpu_dst);
2748 break;
2749 case 0x5: /* V9 rdpc */
2751 TCGv t = gen_dest_gpr(dc, rd);
2752 if (unlikely(AM_CHECK(dc))) {
2753 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2754 } else {
2755 tcg_gen_movi_tl(t, dc->pc);
2757 gen_store_gpr(dc, rd, t);
2759 break;
2760 case 0x6: /* V9 rdfprs */
2761 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2762 gen_store_gpr(dc, rd, cpu_dst);
2763 break;
2764 case 0xf: /* V9 membar */
2765 break; /* no effect */
2766 case 0x13: /* Graphics Status */
2767 if (gen_trap_ifnofpu(dc)) {
2768 goto jmp_insn;
2770 gen_store_gpr(dc, rd, cpu_gsr);
2771 break;
2772 case 0x16: /* Softint */
2773 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2774 gen_store_gpr(dc, rd, cpu_dst);
2775 break;
2776 case 0x17: /* Tick compare */
2777 gen_store_gpr(dc, rd, cpu_tick_cmpr);
2778 break;
2779 case 0x18: /* System tick */
2781 TCGv_ptr r_tickptr;
2783 r_tickptr = tcg_temp_new_ptr();
2784 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2785 offsetof(CPUSPARCState, stick));
2786 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2787 tcg_temp_free_ptr(r_tickptr);
2788 gen_store_gpr(dc, rd, cpu_dst);
2790 break;
2791 case 0x19: /* System tick compare */
2792 gen_store_gpr(dc, rd, cpu_stick_cmpr);
2793 break;
2794 case 0x10: /* Performance Control */
2795 case 0x11: /* Performance Instrumentation Counter */
2796 case 0x12: /* Dispatch Control */
2797 case 0x14: /* Softint set, WO */
2798 case 0x15: /* Softint clear, WO */
2799 #endif
2800 default:
2801 goto illegal_insn;
2803 #if !defined(CONFIG_USER_ONLY)
2804 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2805 #ifndef TARGET_SPARC64
2806 if (!supervisor(dc)) {
2807 goto priv_insn;
2809 update_psr(dc);
2810 gen_helper_rdpsr(cpu_dst, cpu_env);
2811 #else
2812 CHECK_IU_FEATURE(dc, HYPV);
2813 if (!hypervisor(dc))
2814 goto priv_insn;
2815 rs1 = GET_FIELD(insn, 13, 17);
2816 switch (rs1) {
2817 case 0: // hpstate
2818 // gen_op_rdhpstate();
2819 break;
2820 case 1: // htstate
2821 // gen_op_rdhtstate();
2822 break;
2823 case 3: // hintp
2824 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2825 break;
2826 case 5: // htba
2827 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2828 break;
2829 case 6: // hver
2830 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2831 break;
2832 case 31: // hstick_cmpr
2833 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2834 break;
2835 default:
2836 goto illegal_insn;
2838 #endif
2839 gen_store_gpr(dc, rd, cpu_dst);
2840 break;
2841 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2842 if (!supervisor(dc)) {
2843 goto priv_insn;
2845 cpu_tmp0 = get_temp_tl(dc);
2846 #ifdef TARGET_SPARC64
2847 rs1 = GET_FIELD(insn, 13, 17);
2848 switch (rs1) {
2849 case 0: // tpc
2851 TCGv_ptr r_tsptr;
2853 r_tsptr = tcg_temp_new_ptr();
2854 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2855 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2856 offsetof(trap_state, tpc));
2857 tcg_temp_free_ptr(r_tsptr);
2859 break;
2860 case 1: // tnpc
2862 TCGv_ptr r_tsptr;
2864 r_tsptr = tcg_temp_new_ptr();
2865 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2866 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2867 offsetof(trap_state, tnpc));
2868 tcg_temp_free_ptr(r_tsptr);
2870 break;
2871 case 2: // tstate
2873 TCGv_ptr r_tsptr;
2875 r_tsptr = tcg_temp_new_ptr();
2876 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2877 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2878 offsetof(trap_state, tstate));
2879 tcg_temp_free_ptr(r_tsptr);
2881 break;
2882 case 3: // tt
2884 TCGv_ptr r_tsptr = tcg_temp_new_ptr();
2886 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2887 tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
2888 offsetof(trap_state, tt));
2889 tcg_temp_free_ptr(r_tsptr);
2891 break;
2892 case 4: // tick
2894 TCGv_ptr r_tickptr;
2896 r_tickptr = tcg_temp_new_ptr();
2897 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2898 offsetof(CPUSPARCState, tick));
2899 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2900 tcg_temp_free_ptr(r_tickptr);
2902 break;
2903 case 5: // tba
2904 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2905 break;
2906 case 6: // pstate
2907 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2908 offsetof(CPUSPARCState, pstate));
2909 break;
2910 case 7: // tl
2911 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2912 offsetof(CPUSPARCState, tl));
2913 break;
2914 case 8: // pil
2915 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2916 offsetof(CPUSPARCState, psrpil));
2917 break;
2918 case 9: // cwp
2919 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2920 break;
2921 case 10: // cansave
2922 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2923 offsetof(CPUSPARCState, cansave));
2924 break;
2925 case 11: // canrestore
2926 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2927 offsetof(CPUSPARCState, canrestore));
2928 break;
2929 case 12: // cleanwin
2930 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2931 offsetof(CPUSPARCState, cleanwin));
2932 break;
2933 case 13: // otherwin
2934 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2935 offsetof(CPUSPARCState, otherwin));
2936 break;
2937 case 14: // wstate
2938 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2939 offsetof(CPUSPARCState, wstate));
2940 break;
2941 case 16: // UA2005 gl
2942 CHECK_IU_FEATURE(dc, GL);
2943 tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
2944 offsetof(CPUSPARCState, gl));
2945 break;
2946 case 26: // UA2005 strand status
2947 CHECK_IU_FEATURE(dc, HYPV);
2948 if (!hypervisor(dc))
2949 goto priv_insn;
2950 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2951 break;
2952 case 31: // ver
2953 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2954 break;
2955 case 15: // fq
2956 default:
2957 goto illegal_insn;
2959 #else
2960 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2961 #endif
2962 gen_store_gpr(dc, rd, cpu_tmp0);
2963 break;
2964 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2965 #ifdef TARGET_SPARC64
2966 save_state(dc);
2967 gen_helper_flushw(cpu_env);
2968 #else
2969 if (!supervisor(dc))
2970 goto priv_insn;
2971 gen_store_gpr(dc, rd, cpu_tbr);
2972 #endif
2973 break;
2974 #endif
2975 } else if (xop == 0x34) { /* FPU Operations */
2976 if (gen_trap_ifnofpu(dc)) {
2977 goto jmp_insn;
2979 gen_op_clear_ieee_excp_and_FTT();
2980 rs1 = GET_FIELD(insn, 13, 17);
2981 rs2 = GET_FIELD(insn, 27, 31);
2982 xop = GET_FIELD(insn, 18, 26);
2983 save_state(dc);
2984 switch (xop) {
2985 case 0x1: /* fmovs */
2986 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2987 gen_store_fpr_F(dc, rd, cpu_src1_32);
2988 break;
2989 case 0x5: /* fnegs */
2990 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2991 break;
2992 case 0x9: /* fabss */
2993 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2994 break;
2995 case 0x29: /* fsqrts */
2996 CHECK_FPU_FEATURE(dc, FSQRT);
2997 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2998 break;
2999 case 0x2a: /* fsqrtd */
3000 CHECK_FPU_FEATURE(dc, FSQRT);
3001 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3002 break;
3003 case 0x2b: /* fsqrtq */
3004 CHECK_FPU_FEATURE(dc, FLOAT128);
3005 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3006 break;
3007 case 0x41: /* fadds */
3008 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3009 break;
3010 case 0x42: /* faddd */
3011 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3012 break;
3013 case 0x43: /* faddq */
3014 CHECK_FPU_FEATURE(dc, FLOAT128);
3015 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3016 break;
3017 case 0x45: /* fsubs */
3018 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3019 break;
3020 case 0x46: /* fsubd */
3021 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3022 break;
3023 case 0x47: /* fsubq */
3024 CHECK_FPU_FEATURE(dc, FLOAT128);
3025 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3026 break;
3027 case 0x49: /* fmuls */
3028 CHECK_FPU_FEATURE(dc, FMUL);
3029 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3030 break;
3031 case 0x4a: /* fmuld */
3032 CHECK_FPU_FEATURE(dc, FMUL);
3033 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3034 break;
3035 case 0x4b: /* fmulq */
3036 CHECK_FPU_FEATURE(dc, FLOAT128);
3037 CHECK_FPU_FEATURE(dc, FMUL);
3038 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3039 break;
3040 case 0x4d: /* fdivs */
3041 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3042 break;
3043 case 0x4e: /* fdivd */
3044 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3045 break;
3046 case 0x4f: /* fdivq */
3047 CHECK_FPU_FEATURE(dc, FLOAT128);
3048 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3049 break;
3050 case 0x69: /* fsmuld */
3051 CHECK_FPU_FEATURE(dc, FSMULD);
3052 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3053 break;
3054 case 0x6e: /* fdmulq */
3055 CHECK_FPU_FEATURE(dc, FLOAT128);
3056 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3057 break;
3058 case 0xc4: /* fitos */
3059 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3060 break;
3061 case 0xc6: /* fdtos */
3062 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3063 break;
3064 case 0xc7: /* fqtos */
3065 CHECK_FPU_FEATURE(dc, FLOAT128);
3066 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3067 break;
3068 case 0xc8: /* fitod */
3069 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3070 break;
3071 case 0xc9: /* fstod */
3072 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3073 break;
3074 case 0xcb: /* fqtod */
3075 CHECK_FPU_FEATURE(dc, FLOAT128);
3076 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3077 break;
3078 case 0xcc: /* fitoq */
3079 CHECK_FPU_FEATURE(dc, FLOAT128);
3080 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3081 break;
3082 case 0xcd: /* fstoq */
3083 CHECK_FPU_FEATURE(dc, FLOAT128);
3084 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3085 break;
3086 case 0xce: /* fdtoq */
3087 CHECK_FPU_FEATURE(dc, FLOAT128);
3088 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3089 break;
3090 case 0xd1: /* fstoi */
3091 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3092 break;
3093 case 0xd2: /* fdtoi */
3094 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3095 break;
3096 case 0xd3: /* fqtoi */
3097 CHECK_FPU_FEATURE(dc, FLOAT128);
3098 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3099 break;
3100 #ifdef TARGET_SPARC64
3101 case 0x2: /* V9 fmovd */
3102 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3103 gen_store_fpr_D(dc, rd, cpu_src1_64);
3104 break;
3105 case 0x3: /* V9 fmovq */
3106 CHECK_FPU_FEATURE(dc, FLOAT128);
3107 gen_move_Q(rd, rs2);
3108 break;
3109 case 0x6: /* V9 fnegd */
3110 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3111 break;
3112 case 0x7: /* V9 fnegq */
3113 CHECK_FPU_FEATURE(dc, FLOAT128);
3114 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3115 break;
3116 case 0xa: /* V9 fabsd */
3117 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3118 break;
3119 case 0xb: /* V9 fabsq */
3120 CHECK_FPU_FEATURE(dc, FLOAT128);
3121 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3122 break;
3123 case 0x81: /* V9 fstox */
3124 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3125 break;
3126 case 0x82: /* V9 fdtox */
3127 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3128 break;
3129 case 0x83: /* V9 fqtox */
3130 CHECK_FPU_FEATURE(dc, FLOAT128);
3131 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3132 break;
3133 case 0x84: /* V9 fxtos */
3134 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3135 break;
3136 case 0x88: /* V9 fxtod */
3137 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3138 break;
3139 case 0x8c: /* V9 fxtoq */
3140 CHECK_FPU_FEATURE(dc, FLOAT128);
3141 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3142 break;
3143 #endif
3144 default:
3145 goto illegal_insn;
3147 } else if (xop == 0x35) { /* FPU Operations */
3148 #ifdef TARGET_SPARC64
3149 int cond;
3150 #endif
3151 if (gen_trap_ifnofpu(dc)) {
3152 goto jmp_insn;
3154 gen_op_clear_ieee_excp_and_FTT();
3155 rs1 = GET_FIELD(insn, 13, 17);
3156 rs2 = GET_FIELD(insn, 27, 31);
3157 xop = GET_FIELD(insn, 18, 26);
3158 save_state(dc);
3160 #ifdef TARGET_SPARC64
3161 #define FMOVR(sz) \
3162 do { \
3163 DisasCompare cmp; \
3164 cond = GET_FIELD_SP(insn, 10, 12); \
3165 cpu_src1 = get_src1(dc, insn); \
3166 gen_compare_reg(&cmp, cond, cpu_src1); \
3167 gen_fmov##sz(dc, &cmp, rd, rs2); \
3168 free_compare(&cmp); \
3169 } while (0)
3171 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3172 FMOVR(s);
3173 break;
3174 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3175 FMOVR(d);
3176 break;
3177 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3178 CHECK_FPU_FEATURE(dc, FLOAT128);
3179 FMOVR(q);
3180 break;
3182 #undef FMOVR
3183 #endif
3184 switch (xop) {
3185 #ifdef TARGET_SPARC64
3186 #define FMOVCC(fcc, sz) \
3187 do { \
3188 DisasCompare cmp; \
3189 cond = GET_FIELD_SP(insn, 14, 17); \
3190 gen_fcompare(&cmp, fcc, cond); \
3191 gen_fmov##sz(dc, &cmp, rd, rs2); \
3192 free_compare(&cmp); \
3193 } while (0)
3195 case 0x001: /* V9 fmovscc %fcc0 */
3196 FMOVCC(0, s);
3197 break;
3198 case 0x002: /* V9 fmovdcc %fcc0 */
3199 FMOVCC(0, d);
3200 break;
3201 case 0x003: /* V9 fmovqcc %fcc0 */
3202 CHECK_FPU_FEATURE(dc, FLOAT128);
3203 FMOVCC(0, q);
3204 break;
3205 case 0x041: /* V9 fmovscc %fcc1 */
3206 FMOVCC(1, s);
3207 break;
3208 case 0x042: /* V9 fmovdcc %fcc1 */
3209 FMOVCC(1, d);
3210 break;
3211 case 0x043: /* V9 fmovqcc %fcc1 */
3212 CHECK_FPU_FEATURE(dc, FLOAT128);
3213 FMOVCC(1, q);
3214 break;
3215 case 0x081: /* V9 fmovscc %fcc2 */
3216 FMOVCC(2, s);
3217 break;
3218 case 0x082: /* V9 fmovdcc %fcc2 */
3219 FMOVCC(2, d);
3220 break;
3221 case 0x083: /* V9 fmovqcc %fcc2 */
3222 CHECK_FPU_FEATURE(dc, FLOAT128);
3223 FMOVCC(2, q);
3224 break;
3225 case 0x0c1: /* V9 fmovscc %fcc3 */
3226 FMOVCC(3, s);
3227 break;
3228 case 0x0c2: /* V9 fmovdcc %fcc3 */
3229 FMOVCC(3, d);
3230 break;
3231 case 0x0c3: /* V9 fmovqcc %fcc3 */
3232 CHECK_FPU_FEATURE(dc, FLOAT128);
3233 FMOVCC(3, q);
3234 break;
3235 #undef FMOVCC
3236 #define FMOVCC(xcc, sz) \
3237 do { \
3238 DisasCompare cmp; \
3239 cond = GET_FIELD_SP(insn, 14, 17); \
3240 gen_compare(&cmp, xcc, cond, dc); \
3241 gen_fmov##sz(dc, &cmp, rd, rs2); \
3242 free_compare(&cmp); \
3243 } while (0)
3245 case 0x101: /* V9 fmovscc %icc */
3246 FMOVCC(0, s);
3247 break;
3248 case 0x102: /* V9 fmovdcc %icc */
3249 FMOVCC(0, d);
3250 break;
3251 case 0x103: /* V9 fmovqcc %icc */
3252 CHECK_FPU_FEATURE(dc, FLOAT128);
3253 FMOVCC(0, q);
3254 break;
3255 case 0x181: /* V9 fmovscc %xcc */
3256 FMOVCC(1, s);
3257 break;
3258 case 0x182: /* V9 fmovdcc %xcc */
3259 FMOVCC(1, d);
3260 break;
3261 case 0x183: /* V9 fmovqcc %xcc */
3262 CHECK_FPU_FEATURE(dc, FLOAT128);
3263 FMOVCC(1, q);
3264 break;
3265 #undef FMOVCC
3266 #endif
3267 case 0x51: /* fcmps, V9 %fcc */
3268 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3269 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3270 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3271 break;
3272 case 0x52: /* fcmpd, V9 %fcc */
3273 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3274 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3275 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3276 break;
3277 case 0x53: /* fcmpq, V9 %fcc */
3278 CHECK_FPU_FEATURE(dc, FLOAT128);
3279 gen_op_load_fpr_QT0(QFPREG(rs1));
3280 gen_op_load_fpr_QT1(QFPREG(rs2));
3281 gen_op_fcmpq(rd & 3);
3282 break;
3283 case 0x55: /* fcmpes, V9 %fcc */
3284 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3285 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3286 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3287 break;
3288 case 0x56: /* fcmped, V9 %fcc */
3289 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3290 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3291 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3292 break;
3293 case 0x57: /* fcmpeq, V9 %fcc */
3294 CHECK_FPU_FEATURE(dc, FLOAT128);
3295 gen_op_load_fpr_QT0(QFPREG(rs1));
3296 gen_op_load_fpr_QT1(QFPREG(rs2));
3297 gen_op_fcmpeq(rd & 3);
3298 break;
3299 default:
3300 goto illegal_insn;
3302 } else if (xop == 0x2) {
3303 TCGv dst = gen_dest_gpr(dc, rd);
3304 rs1 = GET_FIELD(insn, 13, 17);
3305 if (rs1 == 0) {
3306 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3307 if (IS_IMM) { /* immediate */
3308 simm = GET_FIELDs(insn, 19, 31);
3309 tcg_gen_movi_tl(dst, simm);
3310 gen_store_gpr(dc, rd, dst);
3311 } else { /* register */
3312 rs2 = GET_FIELD(insn, 27, 31);
3313 if (rs2 == 0) {
3314 tcg_gen_movi_tl(dst, 0);
3315 gen_store_gpr(dc, rd, dst);
3316 } else {
3317 cpu_src2 = gen_load_gpr(dc, rs2);
3318 gen_store_gpr(dc, rd, cpu_src2);
3321 } else {
3322 cpu_src1 = get_src1(dc, insn);
3323 if (IS_IMM) { /* immediate */
3324 simm = GET_FIELDs(insn, 19, 31);
3325 tcg_gen_ori_tl(dst, cpu_src1, simm);
3326 gen_store_gpr(dc, rd, dst);
3327 } else { /* register */
3328 rs2 = GET_FIELD(insn, 27, 31);
3329 if (rs2 == 0) {
3330 /* mov shortcut: or x, %g0, y -> mov x, y */
3331 gen_store_gpr(dc, rd, cpu_src1);
3332 } else {
3333 cpu_src2 = gen_load_gpr(dc, rs2);
3334 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3335 gen_store_gpr(dc, rd, dst);
3339 #ifdef TARGET_SPARC64
3340 } else if (xop == 0x25) { /* sll, V9 sllx */
3341 cpu_src1 = get_src1(dc, insn);
3342 if (IS_IMM) { /* immediate */
3343 simm = GET_FIELDs(insn, 20, 31);
3344 if (insn & (1 << 12)) {
3345 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3346 } else {
3347 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3349 } else { /* register */
3350 rs2 = GET_FIELD(insn, 27, 31);
3351 cpu_src2 = gen_load_gpr(dc, rs2);
3352 cpu_tmp0 = get_temp_tl(dc);
3353 if (insn & (1 << 12)) {
3354 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3355 } else {
3356 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3358 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3360 gen_store_gpr(dc, rd, cpu_dst);
3361 } else if (xop == 0x26) { /* srl, V9 srlx */
3362 cpu_src1 = get_src1(dc, insn);
3363 if (IS_IMM) { /* immediate */
3364 simm = GET_FIELDs(insn, 20, 31);
3365 if (insn & (1 << 12)) {
3366 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3367 } else {
3368 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3369 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3371 } else { /* register */
3372 rs2 = GET_FIELD(insn, 27, 31);
3373 cpu_src2 = gen_load_gpr(dc, rs2);
3374 cpu_tmp0 = get_temp_tl(dc);
3375 if (insn & (1 << 12)) {
3376 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3377 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3378 } else {
3379 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3380 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3381 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3384 gen_store_gpr(dc, rd, cpu_dst);
3385 } else if (xop == 0x27) { /* sra, V9 srax */
3386 cpu_src1 = get_src1(dc, insn);
3387 if (IS_IMM) { /* immediate */
3388 simm = GET_FIELDs(insn, 20, 31);
3389 if (insn & (1 << 12)) {
3390 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3391 } else {
3392 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3393 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3395 } else { /* register */
3396 rs2 = GET_FIELD(insn, 27, 31);
3397 cpu_src2 = gen_load_gpr(dc, rs2);
3398 cpu_tmp0 = get_temp_tl(dc);
3399 if (insn & (1 << 12)) {
3400 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3401 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3402 } else {
3403 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3404 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3405 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3408 gen_store_gpr(dc, rd, cpu_dst);
3409 #endif
3410 } else if (xop < 0x36) {
3411 if (xop < 0x20) {
3412 cpu_src1 = get_src1(dc, insn);
3413 cpu_src2 = get_src2(dc, insn);
3414 switch (xop & ~0x10) {
3415 case 0x0: /* add */
3416 if (xop & 0x10) {
3417 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3418 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3419 dc->cc_op = CC_OP_ADD;
3420 } else {
3421 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3423 break;
3424 case 0x1: /* and */
3425 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3426 if (xop & 0x10) {
3427 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3428 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3429 dc->cc_op = CC_OP_LOGIC;
3431 break;
3432 case 0x2: /* or */
3433 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3434 if (xop & 0x10) {
3435 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3436 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3437 dc->cc_op = CC_OP_LOGIC;
3439 break;
3440 case 0x3: /* xor */
3441 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3442 if (xop & 0x10) {
3443 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3444 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3445 dc->cc_op = CC_OP_LOGIC;
3447 break;
3448 case 0x4: /* sub */
3449 if (xop & 0x10) {
3450 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3451 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3452 dc->cc_op = CC_OP_SUB;
3453 } else {
3454 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3456 break;
3457 case 0x5: /* andn */
3458 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3459 if (xop & 0x10) {
3460 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3461 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3462 dc->cc_op = CC_OP_LOGIC;
3464 break;
3465 case 0x6: /* orn */
3466 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3467 if (xop & 0x10) {
3468 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3469 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3470 dc->cc_op = CC_OP_LOGIC;
3472 break;
3473 case 0x7: /* xorn */
3474 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3475 if (xop & 0x10) {
3476 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3477 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3478 dc->cc_op = CC_OP_LOGIC;
3480 break;
3481 case 0x8: /* addx, V9 addc */
3482 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3483 (xop & 0x10));
3484 break;
3485 #ifdef TARGET_SPARC64
3486 case 0x9: /* V9 mulx */
3487 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3488 break;
3489 #endif
3490 case 0xa: /* umul */
3491 CHECK_IU_FEATURE(dc, MUL);
3492 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3493 if (xop & 0x10) {
3494 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3495 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3496 dc->cc_op = CC_OP_LOGIC;
3498 break;
3499 case 0xb: /* smul */
3500 CHECK_IU_FEATURE(dc, MUL);
3501 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3502 if (xop & 0x10) {
3503 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3504 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3505 dc->cc_op = CC_OP_LOGIC;
3507 break;
3508 case 0xc: /* subx, V9 subc */
3509 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3510 (xop & 0x10));
3511 break;
3512 #ifdef TARGET_SPARC64
3513 case 0xd: /* V9 udivx */
3514 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3515 break;
3516 #endif
3517 case 0xe: /* udiv */
3518 CHECK_IU_FEATURE(dc, DIV);
3519 if (xop & 0x10) {
3520 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3521 cpu_src2);
3522 dc->cc_op = CC_OP_DIV;
3523 } else {
3524 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3525 cpu_src2);
3527 break;
3528 case 0xf: /* sdiv */
3529 CHECK_IU_FEATURE(dc, DIV);
3530 if (xop & 0x10) {
3531 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3532 cpu_src2);
3533 dc->cc_op = CC_OP_DIV;
3534 } else {
3535 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3536 cpu_src2);
3538 break;
3539 default:
3540 goto illegal_insn;
3542 gen_store_gpr(dc, rd, cpu_dst);
3543 } else {
3544 cpu_src1 = get_src1(dc, insn);
3545 cpu_src2 = get_src2(dc, insn);
3546 switch (xop) {
3547 case 0x20: /* taddcc */
3548 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3549 gen_store_gpr(dc, rd, cpu_dst);
3550 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3551 dc->cc_op = CC_OP_TADD;
3552 break;
3553 case 0x21: /* tsubcc */
3554 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3555 gen_store_gpr(dc, rd, cpu_dst);
3556 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3557 dc->cc_op = CC_OP_TSUB;
3558 break;
3559 case 0x22: /* taddcctv */
3560 gen_helper_taddcctv(cpu_dst, cpu_env,
3561 cpu_src1, cpu_src2);
3562 gen_store_gpr(dc, rd, cpu_dst);
3563 dc->cc_op = CC_OP_TADDTV;
3564 break;
3565 case 0x23: /* tsubcctv */
3566 gen_helper_tsubcctv(cpu_dst, cpu_env,
3567 cpu_src1, cpu_src2);
3568 gen_store_gpr(dc, rd, cpu_dst);
3569 dc->cc_op = CC_OP_TSUBTV;
3570 break;
3571 case 0x24: /* mulscc */
3572 update_psr(dc);
3573 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3574 gen_store_gpr(dc, rd, cpu_dst);
3575 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3576 dc->cc_op = CC_OP_ADD;
3577 break;
3578 #ifndef TARGET_SPARC64
3579 case 0x25: /* sll */
3580 if (IS_IMM) { /* immediate */
3581 simm = GET_FIELDs(insn, 20, 31);
3582 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3583 } else { /* register */
3584 cpu_tmp0 = get_temp_tl(dc);
3585 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3586 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3588 gen_store_gpr(dc, rd, cpu_dst);
3589 break;
3590 case 0x26: /* srl */
3591 if (IS_IMM) { /* immediate */
3592 simm = GET_FIELDs(insn, 20, 31);
3593 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3594 } else { /* register */
3595 cpu_tmp0 = get_temp_tl(dc);
3596 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3597 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3599 gen_store_gpr(dc, rd, cpu_dst);
3600 break;
3601 case 0x27: /* sra */
3602 if (IS_IMM) { /* immediate */
3603 simm = GET_FIELDs(insn, 20, 31);
3604 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3605 } else { /* register */
3606 cpu_tmp0 = get_temp_tl(dc);
3607 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3608 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3610 gen_store_gpr(dc, rd, cpu_dst);
3611 break;
3612 #endif
3613 case 0x30:
3615 cpu_tmp0 = get_temp_tl(dc);
3616 switch(rd) {
3617 case 0: /* wry */
3618 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3619 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3620 break;
3621 #ifndef TARGET_SPARC64
3622 case 0x01 ... 0x0f: /* undefined in the
3623 SPARCv8 manual, nop
3624 on the microSPARC
3625 II */
3626 case 0x10 ... 0x1f: /* implementation-dependent
3627 in the SPARCv8
3628 manual, nop on the
3629 microSPARC II */
3630 if ((rd == 0x13) && (dc->def->features &
3631 CPU_FEATURE_POWERDOWN)) {
3632 /* LEON3 power-down */
3633 save_state(dc);
3634 gen_helper_power_down(cpu_env);
3636 break;
3637 #else
3638 case 0x2: /* V9 wrccr */
3639 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3640 gen_helper_wrccr(cpu_env, cpu_tmp0);
3641 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3642 dc->cc_op = CC_OP_FLAGS;
3643 break;
3644 case 0x3: /* V9 wrasi */
3645 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3646 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
3647 tcg_gen_trunc_tl_i32(cpu_asi, cpu_tmp0);
3648 break;
3649 case 0x6: /* V9 wrfprs */
3650 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3651 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
3652 save_state(dc);
3653 gen_op_next_insn();
3654 tcg_gen_exit_tb(0);
3655 dc->is_br = 1;
3656 break;
3657 case 0xf: /* V9 sir, nop if user */
3658 #if !defined(CONFIG_USER_ONLY)
3659 if (supervisor(dc)) {
3660 ; // XXX
3662 #endif
3663 break;
3664 case 0x13: /* Graphics Status */
3665 if (gen_trap_ifnofpu(dc)) {
3666 goto jmp_insn;
3668 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3669 break;
3670 case 0x14: /* Softint set */
3671 if (!supervisor(dc))
3672 goto illegal_insn;
3673 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3674 gen_helper_set_softint(cpu_env, cpu_tmp0);
3675 break;
3676 case 0x15: /* Softint clear */
3677 if (!supervisor(dc))
3678 goto illegal_insn;
3679 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3680 gen_helper_clear_softint(cpu_env, cpu_tmp0);
3681 break;
3682 case 0x16: /* Softint write */
3683 if (!supervisor(dc))
3684 goto illegal_insn;
3685 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3686 gen_helper_write_softint(cpu_env, cpu_tmp0);
3687 break;
3688 case 0x17: /* Tick compare */
3689 #if !defined(CONFIG_USER_ONLY)
3690 if (!supervisor(dc))
3691 goto illegal_insn;
3692 #endif
3694 TCGv_ptr r_tickptr;
3696 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3697 cpu_src2);
3698 r_tickptr = tcg_temp_new_ptr();
3699 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3700 offsetof(CPUSPARCState, tick));
3701 gen_helper_tick_set_limit(r_tickptr,
3702 cpu_tick_cmpr);
3703 tcg_temp_free_ptr(r_tickptr);
3705 break;
3706 case 0x18: /* System tick */
3707 #if !defined(CONFIG_USER_ONLY)
3708 if (!supervisor(dc))
3709 goto illegal_insn;
3710 #endif
3712 TCGv_ptr r_tickptr;
3714 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3715 cpu_src2);
3716 r_tickptr = tcg_temp_new_ptr();
3717 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3718 offsetof(CPUSPARCState, stick));
3719 gen_helper_tick_set_count(r_tickptr,
3720 cpu_tmp0);
3721 tcg_temp_free_ptr(r_tickptr);
3723 break;
3724 case 0x19: /* System tick compare */
3725 #if !defined(CONFIG_USER_ONLY)
3726 if (!supervisor(dc))
3727 goto illegal_insn;
3728 #endif
3730 TCGv_ptr r_tickptr;
3732 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3733 cpu_src2);
3734 r_tickptr = tcg_temp_new_ptr();
3735 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3736 offsetof(CPUSPARCState, stick));
3737 gen_helper_tick_set_limit(r_tickptr,
3738 cpu_stick_cmpr);
3739 tcg_temp_free_ptr(r_tickptr);
3741 break;
3743 case 0x10: /* Performance Control */
3744 case 0x11: /* Performance Instrumentation
3745 Counter */
3746 case 0x12: /* Dispatch Control */
3747 #endif
3748 default:
3749 goto illegal_insn;
3752 break;
3753 #if !defined(CONFIG_USER_ONLY)
3754 case 0x31: /* wrpsr, V9 saved, restored */
3756 if (!supervisor(dc))
3757 goto priv_insn;
3758 #ifdef TARGET_SPARC64
3759 switch (rd) {
3760 case 0:
3761 gen_helper_saved(cpu_env);
3762 break;
3763 case 1:
3764 gen_helper_restored(cpu_env);
3765 break;
3766 case 2: /* UA2005 allclean */
3767 case 3: /* UA2005 otherw */
3768 case 4: /* UA2005 normalw */
3769 case 5: /* UA2005 invalw */
3770 // XXX
3771 default:
3772 goto illegal_insn;
3774 #else
3775 cpu_tmp0 = get_temp_tl(dc);
3776 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3777 gen_helper_wrpsr(cpu_env, cpu_tmp0);
3778 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3779 dc->cc_op = CC_OP_FLAGS;
3780 save_state(dc);
3781 gen_op_next_insn();
3782 tcg_gen_exit_tb(0);
3783 dc->is_br = 1;
3784 #endif
3786 break;
3787 case 0x32: /* wrwim, V9 wrpr */
3789 if (!supervisor(dc))
3790 goto priv_insn;
3791 cpu_tmp0 = get_temp_tl(dc);
3792 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3793 #ifdef TARGET_SPARC64
3794 switch (rd) {
3795 case 0: // tpc
3797 TCGv_ptr r_tsptr;
3799 r_tsptr = tcg_temp_new_ptr();
3800 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3801 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3802 offsetof(trap_state, tpc));
3803 tcg_temp_free_ptr(r_tsptr);
3805 break;
3806 case 1: // tnpc
3808 TCGv_ptr r_tsptr;
3810 r_tsptr = tcg_temp_new_ptr();
3811 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3812 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3813 offsetof(trap_state, tnpc));
3814 tcg_temp_free_ptr(r_tsptr);
3816 break;
3817 case 2: // tstate
3819 TCGv_ptr r_tsptr;
3821 r_tsptr = tcg_temp_new_ptr();
3822 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3823 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3824 offsetof(trap_state,
3825 tstate));
3826 tcg_temp_free_ptr(r_tsptr);
3828 break;
3829 case 3: // tt
3831 TCGv_ptr r_tsptr;
3833 r_tsptr = tcg_temp_new_ptr();
3834 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3835 tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
3836 offsetof(trap_state, tt));
3837 tcg_temp_free_ptr(r_tsptr);
3839 break;
3840 case 4: // tick
3842 TCGv_ptr r_tickptr;
3844 r_tickptr = tcg_temp_new_ptr();
3845 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3846 offsetof(CPUSPARCState, tick));
3847 gen_helper_tick_set_count(r_tickptr,
3848 cpu_tmp0);
3849 tcg_temp_free_ptr(r_tickptr);
3851 break;
3852 case 5: // tba
3853 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3854 break;
3855 case 6: // pstate
3856 save_state(dc);
3857 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3858 dc->npc = DYNAMIC_PC;
3859 break;
3860 case 7: // tl
3861 save_state(dc);
3862 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3863 offsetof(CPUSPARCState, tl));
3864 dc->npc = DYNAMIC_PC;
3865 break;
3866 case 8: // pil
3867 gen_helper_wrpil(cpu_env, cpu_tmp0);
3868 break;
3869 case 9: // cwp
3870 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3871 break;
3872 case 10: // cansave
3873 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3874 offsetof(CPUSPARCState,
3875 cansave));
3876 break;
3877 case 11: // canrestore
3878 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3879 offsetof(CPUSPARCState,
3880 canrestore));
3881 break;
3882 case 12: // cleanwin
3883 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3884 offsetof(CPUSPARCState,
3885 cleanwin));
3886 break;
3887 case 13: // otherwin
3888 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3889 offsetof(CPUSPARCState,
3890 otherwin));
3891 break;
3892 case 14: // wstate
3893 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3894 offsetof(CPUSPARCState,
3895 wstate));
3896 break;
3897 case 16: // UA2005 gl
3898 CHECK_IU_FEATURE(dc, GL);
3899 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
3900 offsetof(CPUSPARCState, gl));
3901 break;
3902 case 26: // UA2005 strand status
3903 CHECK_IU_FEATURE(dc, HYPV);
3904 if (!hypervisor(dc))
3905 goto priv_insn;
3906 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3907 break;
3908 default:
3909 goto illegal_insn;
3911 #else
3912 tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
3913 if (dc->def->nwindows != 32) {
3914 tcg_gen_andi_tl(cpu_wim, cpu_wim,
3915 (1 << dc->def->nwindows) - 1);
3917 #endif
3919 break;
3920 case 0x33: /* wrtbr, UA2005 wrhpr */
3922 #ifndef TARGET_SPARC64
3923 if (!supervisor(dc))
3924 goto priv_insn;
3925 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3926 #else
3927 CHECK_IU_FEATURE(dc, HYPV);
3928 if (!hypervisor(dc))
3929 goto priv_insn;
3930 cpu_tmp0 = get_temp_tl(dc);
3931 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3932 switch (rd) {
3933 case 0: // hpstate
3934 // XXX gen_op_wrhpstate();
3935 save_state(dc);
3936 gen_op_next_insn();
3937 tcg_gen_exit_tb(0);
3938 dc->is_br = 1;
3939 break;
3940 case 1: // htstate
3941 // XXX gen_op_wrhtstate();
3942 break;
3943 case 3: // hintp
3944 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3945 break;
3946 case 5: // htba
3947 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3948 break;
3949 case 31: // hstick_cmpr
3951 TCGv_ptr r_tickptr;
3953 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3954 r_tickptr = tcg_temp_new_ptr();
3955 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3956 offsetof(CPUSPARCState, hstick));
3957 gen_helper_tick_set_limit(r_tickptr,
3958 cpu_hstick_cmpr);
3959 tcg_temp_free_ptr(r_tickptr);
3961 break;
3962 case 6: // hver readonly
3963 default:
3964 goto illegal_insn;
3966 #endif
3968 break;
3969 #endif
3970 #ifdef TARGET_SPARC64
3971 case 0x2c: /* V9 movcc */
3973 int cc = GET_FIELD_SP(insn, 11, 12);
3974 int cond = GET_FIELD_SP(insn, 14, 17);
3975 DisasCompare cmp;
3976 TCGv dst;
3978 if (insn & (1 << 18)) {
3979 if (cc == 0) {
3980 gen_compare(&cmp, 0, cond, dc);
3981 } else if (cc == 2) {
3982 gen_compare(&cmp, 1, cond, dc);
3983 } else {
3984 goto illegal_insn;
3986 } else {
3987 gen_fcompare(&cmp, cc, cond);
3990 /* The get_src2 above loaded the normal 13-bit
3991 immediate field, not the 11-bit field we have
3992 in movcc. But it did handle the reg case. */
3993 if (IS_IMM) {
3994 simm = GET_FIELD_SPs(insn, 0, 10);
3995 tcg_gen_movi_tl(cpu_src2, simm);
3998 dst = gen_load_gpr(dc, rd);
3999 tcg_gen_movcond_tl(cmp.cond, dst,
4000 cmp.c1, cmp.c2,
4001 cpu_src2, dst);
4002 free_compare(&cmp);
4003 gen_store_gpr(dc, rd, dst);
4004 break;
4006 case 0x2d: /* V9 sdivx */
4007 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4008 gen_store_gpr(dc, rd, cpu_dst);
4009 break;
4010 case 0x2e: /* V9 popc */
4011 gen_helper_popc(cpu_dst, cpu_src2);
4012 gen_store_gpr(dc, rd, cpu_dst);
4013 break;
4014 case 0x2f: /* V9 movr */
4016 int cond = GET_FIELD_SP(insn, 10, 12);
4017 DisasCompare cmp;
4018 TCGv dst;
4020 gen_compare_reg(&cmp, cond, cpu_src1);
4022 /* The get_src2 above loaded the normal 13-bit
4023 immediate field, not the 10-bit field we have
4024 in movr. But it did handle the reg case. */
4025 if (IS_IMM) {
4026 simm = GET_FIELD_SPs(insn, 0, 9);
4027 tcg_gen_movi_tl(cpu_src2, simm);
4030 dst = gen_load_gpr(dc, rd);
4031 tcg_gen_movcond_tl(cmp.cond, dst,
4032 cmp.c1, cmp.c2,
4033 cpu_src2, dst);
4034 free_compare(&cmp);
4035 gen_store_gpr(dc, rd, dst);
4036 break;
4038 #endif
4039 default:
4040 goto illegal_insn;
4043 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4044 #ifdef TARGET_SPARC64
4045 int opf = GET_FIELD_SP(insn, 5, 13);
4046 rs1 = GET_FIELD(insn, 13, 17);
4047 rs2 = GET_FIELD(insn, 27, 31);
4048 if (gen_trap_ifnofpu(dc)) {
4049 goto jmp_insn;
4052 switch (opf) {
4053 case 0x000: /* VIS I edge8cc */
4054 CHECK_FPU_FEATURE(dc, VIS1);
4055 cpu_src1 = gen_load_gpr(dc, rs1);
4056 cpu_src2 = gen_load_gpr(dc, rs2);
4057 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4058 gen_store_gpr(dc, rd, cpu_dst);
4059 break;
4060 case 0x001: /* VIS II edge8n */
4061 CHECK_FPU_FEATURE(dc, VIS2);
4062 cpu_src1 = gen_load_gpr(dc, rs1);
4063 cpu_src2 = gen_load_gpr(dc, rs2);
4064 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4065 gen_store_gpr(dc, rd, cpu_dst);
4066 break;
4067 case 0x002: /* VIS I edge8lcc */
4068 CHECK_FPU_FEATURE(dc, VIS1);
4069 cpu_src1 = gen_load_gpr(dc, rs1);
4070 cpu_src2 = gen_load_gpr(dc, rs2);
4071 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4072 gen_store_gpr(dc, rd, cpu_dst);
4073 break;
4074 case 0x003: /* VIS II edge8ln */
4075 CHECK_FPU_FEATURE(dc, VIS2);
4076 cpu_src1 = gen_load_gpr(dc, rs1);
4077 cpu_src2 = gen_load_gpr(dc, rs2);
4078 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4079 gen_store_gpr(dc, rd, cpu_dst);
4080 break;
4081 case 0x004: /* VIS I edge16cc */
4082 CHECK_FPU_FEATURE(dc, VIS1);
4083 cpu_src1 = gen_load_gpr(dc, rs1);
4084 cpu_src2 = gen_load_gpr(dc, rs2);
4085 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4086 gen_store_gpr(dc, rd, cpu_dst);
4087 break;
4088 case 0x005: /* VIS II edge16n */
4089 CHECK_FPU_FEATURE(dc, VIS2);
4090 cpu_src1 = gen_load_gpr(dc, rs1);
4091 cpu_src2 = gen_load_gpr(dc, rs2);
4092 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4093 gen_store_gpr(dc, rd, cpu_dst);
4094 break;
4095 case 0x006: /* VIS I edge16lcc */
4096 CHECK_FPU_FEATURE(dc, VIS1);
4097 cpu_src1 = gen_load_gpr(dc, rs1);
4098 cpu_src2 = gen_load_gpr(dc, rs2);
4099 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4100 gen_store_gpr(dc, rd, cpu_dst);
4101 break;
4102 case 0x007: /* VIS II edge16ln */
4103 CHECK_FPU_FEATURE(dc, VIS2);
4104 cpu_src1 = gen_load_gpr(dc, rs1);
4105 cpu_src2 = gen_load_gpr(dc, rs2);
4106 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4107 gen_store_gpr(dc, rd, cpu_dst);
4108 break;
4109 case 0x008: /* VIS I edge32cc */
4110 CHECK_FPU_FEATURE(dc, VIS1);
4111 cpu_src1 = gen_load_gpr(dc, rs1);
4112 cpu_src2 = gen_load_gpr(dc, rs2);
4113 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4114 gen_store_gpr(dc, rd, cpu_dst);
4115 break;
4116 case 0x009: /* VIS II edge32n */
4117 CHECK_FPU_FEATURE(dc, VIS2);
4118 cpu_src1 = gen_load_gpr(dc, rs1);
4119 cpu_src2 = gen_load_gpr(dc, rs2);
4120 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4121 gen_store_gpr(dc, rd, cpu_dst);
4122 break;
4123 case 0x00a: /* VIS I edge32lcc */
4124 CHECK_FPU_FEATURE(dc, VIS1);
4125 cpu_src1 = gen_load_gpr(dc, rs1);
4126 cpu_src2 = gen_load_gpr(dc, rs2);
4127 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4128 gen_store_gpr(dc, rd, cpu_dst);
4129 break;
4130 case 0x00b: /* VIS II edge32ln */
4131 CHECK_FPU_FEATURE(dc, VIS2);
4132 cpu_src1 = gen_load_gpr(dc, rs1);
4133 cpu_src2 = gen_load_gpr(dc, rs2);
4134 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4135 gen_store_gpr(dc, rd, cpu_dst);
4136 break;
4137 case 0x010: /* VIS I array8 */
4138 CHECK_FPU_FEATURE(dc, VIS1);
4139 cpu_src1 = gen_load_gpr(dc, rs1);
4140 cpu_src2 = gen_load_gpr(dc, rs2);
4141 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4142 gen_store_gpr(dc, rd, cpu_dst);
4143 break;
4144 case 0x012: /* VIS I array16 */
4145 CHECK_FPU_FEATURE(dc, VIS1);
4146 cpu_src1 = gen_load_gpr(dc, rs1);
4147 cpu_src2 = gen_load_gpr(dc, rs2);
4148 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4149 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4150 gen_store_gpr(dc, rd, cpu_dst);
4151 break;
4152 case 0x014: /* VIS I array32 */
4153 CHECK_FPU_FEATURE(dc, VIS1);
4154 cpu_src1 = gen_load_gpr(dc, rs1);
4155 cpu_src2 = gen_load_gpr(dc, rs2);
4156 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4157 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4158 gen_store_gpr(dc, rd, cpu_dst);
4159 break;
4160 case 0x018: /* VIS I alignaddr */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 cpu_src1 = gen_load_gpr(dc, rs1);
4163 cpu_src2 = gen_load_gpr(dc, rs2);
4164 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4165 gen_store_gpr(dc, rd, cpu_dst);
4166 break;
4167 case 0x01a: /* VIS I alignaddrl */
4168 CHECK_FPU_FEATURE(dc, VIS1);
4169 cpu_src1 = gen_load_gpr(dc, rs1);
4170 cpu_src2 = gen_load_gpr(dc, rs2);
4171 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4172 gen_store_gpr(dc, rd, cpu_dst);
4173 break;
4174 case 0x019: /* VIS II bmask */
4175 CHECK_FPU_FEATURE(dc, VIS2);
4176 cpu_src1 = gen_load_gpr(dc, rs1);
4177 cpu_src2 = gen_load_gpr(dc, rs2);
4178 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4179 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4180 gen_store_gpr(dc, rd, cpu_dst);
4181 break;
4182 case 0x020: /* VIS I fcmple16 */
4183 CHECK_FPU_FEATURE(dc, VIS1);
4184 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4185 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4186 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4187 gen_store_gpr(dc, rd, cpu_dst);
4188 break;
4189 case 0x022: /* VIS I fcmpne16 */
4190 CHECK_FPU_FEATURE(dc, VIS1);
4191 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4192 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4193 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4194 gen_store_gpr(dc, rd, cpu_dst);
4195 break;
4196 case 0x024: /* VIS I fcmple32 */
4197 CHECK_FPU_FEATURE(dc, VIS1);
4198 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4199 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4200 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4201 gen_store_gpr(dc, rd, cpu_dst);
4202 break;
4203 case 0x026: /* VIS I fcmpne32 */
4204 CHECK_FPU_FEATURE(dc, VIS1);
4205 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4206 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4207 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4208 gen_store_gpr(dc, rd, cpu_dst);
4209 break;
4210 case 0x028: /* VIS I fcmpgt16 */
4211 CHECK_FPU_FEATURE(dc, VIS1);
4212 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4213 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4214 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4215 gen_store_gpr(dc, rd, cpu_dst);
4216 break;
4217 case 0x02a: /* VIS I fcmpeq16 */
4218 CHECK_FPU_FEATURE(dc, VIS1);
4219 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4220 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4221 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4222 gen_store_gpr(dc, rd, cpu_dst);
4223 break;
4224 case 0x02c: /* VIS I fcmpgt32 */
4225 CHECK_FPU_FEATURE(dc, VIS1);
4226 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4227 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4228 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4229 gen_store_gpr(dc, rd, cpu_dst);
4230 break;
4231 case 0x02e: /* VIS I fcmpeq32 */
4232 CHECK_FPU_FEATURE(dc, VIS1);
4233 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4234 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4235 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4236 gen_store_gpr(dc, rd, cpu_dst);
4237 break;
4238 case 0x031: /* VIS I fmul8x16 */
4239 CHECK_FPU_FEATURE(dc, VIS1);
4240 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4241 break;
4242 case 0x033: /* VIS I fmul8x16au */
4243 CHECK_FPU_FEATURE(dc, VIS1);
4244 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4245 break;
4246 case 0x035: /* VIS I fmul8x16al */
4247 CHECK_FPU_FEATURE(dc, VIS1);
4248 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4249 break;
4250 case 0x036: /* VIS I fmul8sux16 */
4251 CHECK_FPU_FEATURE(dc, VIS1);
4252 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4253 break;
4254 case 0x037: /* VIS I fmul8ulx16 */
4255 CHECK_FPU_FEATURE(dc, VIS1);
4256 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4257 break;
4258 case 0x038: /* VIS I fmuld8sux16 */
4259 CHECK_FPU_FEATURE(dc, VIS1);
4260 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4261 break;
4262 case 0x039: /* VIS I fmuld8ulx16 */
4263 CHECK_FPU_FEATURE(dc, VIS1);
4264 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4265 break;
4266 case 0x03a: /* VIS I fpack32 */
4267 CHECK_FPU_FEATURE(dc, VIS1);
4268 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4269 break;
4270 case 0x03b: /* VIS I fpack16 */
4271 CHECK_FPU_FEATURE(dc, VIS1);
4272 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4273 cpu_dst_32 = gen_dest_fpr_F(dc);
4274 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4275 gen_store_fpr_F(dc, rd, cpu_dst_32);
4276 break;
4277 case 0x03d: /* VIS I fpackfix */
4278 CHECK_FPU_FEATURE(dc, VIS1);
4279 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4280 cpu_dst_32 = gen_dest_fpr_F(dc);
4281 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4282 gen_store_fpr_F(dc, rd, cpu_dst_32);
4283 break;
4284 case 0x03e: /* VIS I pdist */
4285 CHECK_FPU_FEATURE(dc, VIS1);
4286 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4287 break;
4288 case 0x048: /* VIS I faligndata */
4289 CHECK_FPU_FEATURE(dc, VIS1);
4290 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4291 break;
4292 case 0x04b: /* VIS I fpmerge */
4293 CHECK_FPU_FEATURE(dc, VIS1);
4294 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4295 break;
4296 case 0x04c: /* VIS II bshuffle */
4297 CHECK_FPU_FEATURE(dc, VIS2);
4298 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4299 break;
4300 case 0x04d: /* VIS I fexpand */
4301 CHECK_FPU_FEATURE(dc, VIS1);
4302 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4303 break;
4304 case 0x050: /* VIS I fpadd16 */
4305 CHECK_FPU_FEATURE(dc, VIS1);
4306 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4307 break;
4308 case 0x051: /* VIS I fpadd16s */
4309 CHECK_FPU_FEATURE(dc, VIS1);
4310 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4311 break;
4312 case 0x052: /* VIS I fpadd32 */
4313 CHECK_FPU_FEATURE(dc, VIS1);
4314 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4315 break;
4316 case 0x053: /* VIS I fpadd32s */
4317 CHECK_FPU_FEATURE(dc, VIS1);
4318 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4319 break;
4320 case 0x054: /* VIS I fpsub16 */
4321 CHECK_FPU_FEATURE(dc, VIS1);
4322 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4323 break;
4324 case 0x055: /* VIS I fpsub16s */
4325 CHECK_FPU_FEATURE(dc, VIS1);
4326 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4327 break;
4328 case 0x056: /* VIS I fpsub32 */
4329 CHECK_FPU_FEATURE(dc, VIS1);
4330 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4331 break;
4332 case 0x057: /* VIS I fpsub32s */
4333 CHECK_FPU_FEATURE(dc, VIS1);
4334 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4335 break;
4336 case 0x060: /* VIS I fzero */
4337 CHECK_FPU_FEATURE(dc, VIS1);
4338 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4339 tcg_gen_movi_i64(cpu_dst_64, 0);
4340 gen_store_fpr_D(dc, rd, cpu_dst_64);
4341 break;
4342 case 0x061: /* VIS I fzeros */
4343 CHECK_FPU_FEATURE(dc, VIS1);
4344 cpu_dst_32 = gen_dest_fpr_F(dc);
4345 tcg_gen_movi_i32(cpu_dst_32, 0);
4346 gen_store_fpr_F(dc, rd, cpu_dst_32);
4347 break;
4348 case 0x062: /* VIS I fnor */
4349 CHECK_FPU_FEATURE(dc, VIS1);
4350 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4351 break;
4352 case 0x063: /* VIS I fnors */
4353 CHECK_FPU_FEATURE(dc, VIS1);
4354 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4355 break;
4356 case 0x064: /* VIS I fandnot2 */
4357 CHECK_FPU_FEATURE(dc, VIS1);
4358 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4359 break;
4360 case 0x065: /* VIS I fandnot2s */
4361 CHECK_FPU_FEATURE(dc, VIS1);
4362 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4363 break;
4364 case 0x066: /* VIS I fnot2 */
4365 CHECK_FPU_FEATURE(dc, VIS1);
4366 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4367 break;
4368 case 0x067: /* VIS I fnot2s */
4369 CHECK_FPU_FEATURE(dc, VIS1);
4370 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4371 break;
4372 case 0x068: /* VIS I fandnot1 */
4373 CHECK_FPU_FEATURE(dc, VIS1);
4374 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4375 break;
4376 case 0x069: /* VIS I fandnot1s */
4377 CHECK_FPU_FEATURE(dc, VIS1);
4378 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4379 break;
4380 case 0x06a: /* VIS I fnot1 */
4381 CHECK_FPU_FEATURE(dc, VIS1);
4382 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4383 break;
4384 case 0x06b: /* VIS I fnot1s */
4385 CHECK_FPU_FEATURE(dc, VIS1);
4386 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4387 break;
4388 case 0x06c: /* VIS I fxor */
4389 CHECK_FPU_FEATURE(dc, VIS1);
4390 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4391 break;
4392 case 0x06d: /* VIS I fxors */
4393 CHECK_FPU_FEATURE(dc, VIS1);
4394 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4395 break;
4396 case 0x06e: /* VIS I fnand */
4397 CHECK_FPU_FEATURE(dc, VIS1);
4398 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4399 break;
4400 case 0x06f: /* VIS I fnands */
4401 CHECK_FPU_FEATURE(dc, VIS1);
4402 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4403 break;
4404 case 0x070: /* VIS I fand */
4405 CHECK_FPU_FEATURE(dc, VIS1);
4406 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4407 break;
4408 case 0x071: /* VIS I fands */
4409 CHECK_FPU_FEATURE(dc, VIS1);
4410 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4411 break;
4412 case 0x072: /* VIS I fxnor */
4413 CHECK_FPU_FEATURE(dc, VIS1);
4414 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4415 break;
4416 case 0x073: /* VIS I fxnors */
4417 CHECK_FPU_FEATURE(dc, VIS1);
4418 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4419 break;
4420 case 0x074: /* VIS I fsrc1 */
4421 CHECK_FPU_FEATURE(dc, VIS1);
4422 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4423 gen_store_fpr_D(dc, rd, cpu_src1_64);
4424 break;
4425 case 0x075: /* VIS I fsrc1s */
4426 CHECK_FPU_FEATURE(dc, VIS1);
4427 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4428 gen_store_fpr_F(dc, rd, cpu_src1_32);
4429 break;
4430 case 0x076: /* VIS I fornot2 */
4431 CHECK_FPU_FEATURE(dc, VIS1);
4432 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4433 break;
4434 case 0x077: /* VIS I fornot2s */
4435 CHECK_FPU_FEATURE(dc, VIS1);
4436 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4437 break;
4438 case 0x078: /* VIS I fsrc2 */
4439 CHECK_FPU_FEATURE(dc, VIS1);
4440 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4441 gen_store_fpr_D(dc, rd, cpu_src1_64);
4442 break;
4443 case 0x079: /* VIS I fsrc2s */
4444 CHECK_FPU_FEATURE(dc, VIS1);
4445 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4446 gen_store_fpr_F(dc, rd, cpu_src1_32);
4447 break;
4448 case 0x07a: /* VIS I fornot1 */
4449 CHECK_FPU_FEATURE(dc, VIS1);
4450 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4451 break;
4452 case 0x07b: /* VIS I fornot1s */
4453 CHECK_FPU_FEATURE(dc, VIS1);
4454 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4455 break;
4456 case 0x07c: /* VIS I for */
4457 CHECK_FPU_FEATURE(dc, VIS1);
4458 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4459 break;
4460 case 0x07d: /* VIS I fors */
4461 CHECK_FPU_FEATURE(dc, VIS1);
4462 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4463 break;
4464 case 0x07e: /* VIS I fone */
4465 CHECK_FPU_FEATURE(dc, VIS1);
4466 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4467 tcg_gen_movi_i64(cpu_dst_64, -1);
4468 gen_store_fpr_D(dc, rd, cpu_dst_64);
4469 break;
4470 case 0x07f: /* VIS I fones */
4471 CHECK_FPU_FEATURE(dc, VIS1);
4472 cpu_dst_32 = gen_dest_fpr_F(dc);
4473 tcg_gen_movi_i32(cpu_dst_32, -1);
4474 gen_store_fpr_F(dc, rd, cpu_dst_32);
4475 break;
4476 case 0x080: /* VIS I shutdown */
4477 case 0x081: /* VIS II siam */
4478 // XXX
4479 goto illegal_insn;
4480 default:
4481 goto illegal_insn;
4483 #else
4484 goto ncp_insn;
4485 #endif
4486 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4487 #ifdef TARGET_SPARC64
4488 goto illegal_insn;
4489 #else
4490 goto ncp_insn;
4491 #endif
4492 #ifdef TARGET_SPARC64
4493 } else if (xop == 0x39) { /* V9 return */
4494 TCGv_i32 r_const;
4496 save_state(dc);
4497 cpu_src1 = get_src1(dc, insn);
4498 cpu_tmp0 = get_temp_tl(dc);
4499 if (IS_IMM) { /* immediate */
4500 simm = GET_FIELDs(insn, 19, 31);
4501 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4502 } else { /* register */
4503 rs2 = GET_FIELD(insn, 27, 31);
4504 if (rs2) {
4505 cpu_src2 = gen_load_gpr(dc, rs2);
4506 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4507 } else {
4508 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4511 gen_helper_restore(cpu_env);
4512 gen_mov_pc_npc(dc);
4513 r_const = tcg_const_i32(3);
4514 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4515 tcg_temp_free_i32(r_const);
4516 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4517 dc->npc = DYNAMIC_PC;
4518 goto jmp_insn;
4519 #endif
4520 } else {
4521 cpu_src1 = get_src1(dc, insn);
4522 cpu_tmp0 = get_temp_tl(dc);
4523 if (IS_IMM) { /* immediate */
4524 simm = GET_FIELDs(insn, 19, 31);
4525 tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4526 } else { /* register */
4527 rs2 = GET_FIELD(insn, 27, 31);
4528 if (rs2) {
4529 cpu_src2 = gen_load_gpr(dc, rs2);
4530 tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4531 } else {
4532 tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4535 switch (xop) {
4536 case 0x38: /* jmpl */
4538 TCGv t;
4539 TCGv_i32 r_const;
4541 t = gen_dest_gpr(dc, rd);
4542 tcg_gen_movi_tl(t, dc->pc);
4543 gen_store_gpr(dc, rd, t);
4544 gen_mov_pc_npc(dc);
4545 r_const = tcg_const_i32(3);
4546 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4547 tcg_temp_free_i32(r_const);
4548 gen_address_mask(dc, cpu_tmp0);
4549 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4550 dc->npc = DYNAMIC_PC;
4552 goto jmp_insn;
4553 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4554 case 0x39: /* rett, V9 return */
4556 TCGv_i32 r_const;
4558 if (!supervisor(dc))
4559 goto priv_insn;
4560 gen_mov_pc_npc(dc);
4561 r_const = tcg_const_i32(3);
4562 gen_helper_check_align(cpu_env, cpu_tmp0, r_const);
4563 tcg_temp_free_i32(r_const);
4564 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4565 dc->npc = DYNAMIC_PC;
4566 gen_helper_rett(cpu_env);
4568 goto jmp_insn;
4569 #endif
4570 case 0x3b: /* flush */
4571 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4572 goto unimp_flush;
4573 /* nop */
4574 break;
4575 case 0x3c: /* save */
4576 save_state(dc);
4577 gen_helper_save(cpu_env);
4578 gen_store_gpr(dc, rd, cpu_tmp0);
4579 break;
4580 case 0x3d: /* restore */
4581 save_state(dc);
4582 gen_helper_restore(cpu_env);
4583 gen_store_gpr(dc, rd, cpu_tmp0);
4584 break;
4585 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4586 case 0x3e: /* V9 done/retry */
4588 switch (rd) {
4589 case 0:
4590 if (!supervisor(dc))
4591 goto priv_insn;
4592 dc->npc = DYNAMIC_PC;
4593 dc->pc = DYNAMIC_PC;
4594 gen_helper_done(cpu_env);
4595 goto jmp_insn;
4596 case 1:
4597 if (!supervisor(dc))
4598 goto priv_insn;
4599 dc->npc = DYNAMIC_PC;
4600 dc->pc = DYNAMIC_PC;
4601 gen_helper_retry(cpu_env);
4602 goto jmp_insn;
4603 default:
4604 goto illegal_insn;
4607 break;
4608 #endif
4609 default:
4610 goto illegal_insn;
4613 break;
4615 break;
4616 case 3: /* load/store instructions */
4618 unsigned int xop = GET_FIELD(insn, 7, 12);
4619 /* ??? gen_address_mask prevents us from using a source
4620 register directly. Always generate a temporary. */
4621 TCGv cpu_addr = get_temp_tl(dc);
4623 tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
4624 if (xop == 0x3c || xop == 0x3e) {
4625 /* V9 casa/casxa : no offset */
4626 } else if (IS_IMM) { /* immediate */
4627 simm = GET_FIELDs(insn, 19, 31);
4628 if (simm != 0) {
4629 tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
4631 } else { /* register */
4632 rs2 = GET_FIELD(insn, 27, 31);
4633 if (rs2 != 0) {
4634 tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
4637 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4638 (xop > 0x17 && xop <= 0x1d ) ||
4639 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4640 TCGv cpu_val = gen_dest_gpr(dc, rd);
4642 switch (xop) {
4643 case 0x0: /* ld, V9 lduw, load unsigned word */
4644 gen_address_mask(dc, cpu_addr);
4645 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4646 break;
4647 case 0x1: /* ldub, load unsigned byte */
4648 gen_address_mask(dc, cpu_addr);
4649 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4650 break;
4651 case 0x2: /* lduh, load unsigned halfword */
4652 gen_address_mask(dc, cpu_addr);
4653 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4654 break;
4655 case 0x3: /* ldd, load double word */
4656 if (rd & 1)
4657 goto illegal_insn;
4658 else {
4659 TCGv_i32 r_const;
4660 TCGv_i64 t64;
4662 save_state(dc);
4663 r_const = tcg_const_i32(7);
4664 /* XXX remove alignment check */
4665 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4666 tcg_temp_free_i32(r_const);
4667 gen_address_mask(dc, cpu_addr);
4668 t64 = tcg_temp_new_i64();
4669 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4670 tcg_gen_trunc_i64_tl(cpu_val, t64);
4671 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4672 gen_store_gpr(dc, rd + 1, cpu_val);
4673 tcg_gen_shri_i64(t64, t64, 32);
4674 tcg_gen_trunc_i64_tl(cpu_val, t64);
4675 tcg_temp_free_i64(t64);
4676 tcg_gen_ext32u_tl(cpu_val, cpu_val);
4678 break;
4679 case 0x9: /* ldsb, load signed byte */
4680 gen_address_mask(dc, cpu_addr);
4681 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4682 break;
4683 case 0xa: /* ldsh, load signed halfword */
4684 gen_address_mask(dc, cpu_addr);
4685 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4686 break;
4687 case 0xd: /* ldstub -- XXX: should be atomically */
4689 TCGv r_const;
4691 gen_address_mask(dc, cpu_addr);
4692 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4693 r_const = tcg_const_tl(0xff);
4694 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4695 tcg_temp_free(r_const);
4697 break;
4698 case 0x0f:
4699 /* swap, swap register with memory. Also atomically */
4701 TCGv t0 = get_temp_tl(dc);
4702 CHECK_IU_FEATURE(dc, SWAP);
4703 cpu_src1 = gen_load_gpr(dc, rd);
4704 gen_address_mask(dc, cpu_addr);
4705 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4706 tcg_gen_qemu_st32(cpu_src1, cpu_addr, dc->mem_idx);
4707 tcg_gen_mov_tl(cpu_val, t0);
4709 break;
4710 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4711 case 0x10: /* lda, V9 lduwa, load word alternate */
4712 #ifndef TARGET_SPARC64
4713 if (IS_IMM)
4714 goto illegal_insn;
4715 if (!supervisor(dc))
4716 goto priv_insn;
4717 #endif
4718 save_state(dc);
4719 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4720 break;
4721 case 0x11: /* lduba, load unsigned byte alternate */
4722 #ifndef TARGET_SPARC64
4723 if (IS_IMM)
4724 goto illegal_insn;
4725 if (!supervisor(dc))
4726 goto priv_insn;
4727 #endif
4728 save_state(dc);
4729 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4730 break;
4731 case 0x12: /* lduha, load unsigned halfword alternate */
4732 #ifndef TARGET_SPARC64
4733 if (IS_IMM)
4734 goto illegal_insn;
4735 if (!supervisor(dc))
4736 goto priv_insn;
4737 #endif
4738 save_state(dc);
4739 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4740 break;
4741 case 0x13: /* ldda, load double word alternate */
4742 #ifndef TARGET_SPARC64
4743 if (IS_IMM)
4744 goto illegal_insn;
4745 if (!supervisor(dc))
4746 goto priv_insn;
4747 #endif
4748 if (rd & 1)
4749 goto illegal_insn;
4750 save_state(dc);
4751 gen_ldda_asi(dc, cpu_val, cpu_addr, insn, rd);
4752 goto skip_move;
4753 case 0x19: /* ldsba, load signed byte alternate */
4754 #ifndef TARGET_SPARC64
4755 if (IS_IMM)
4756 goto illegal_insn;
4757 if (!supervisor(dc))
4758 goto priv_insn;
4759 #endif
4760 save_state(dc);
4761 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4762 break;
4763 case 0x1a: /* ldsha, load signed halfword alternate */
4764 #ifndef TARGET_SPARC64
4765 if (IS_IMM)
4766 goto illegal_insn;
4767 if (!supervisor(dc))
4768 goto priv_insn;
4769 #endif
4770 save_state(dc);
4771 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4772 break;
4773 case 0x1d: /* ldstuba -- XXX: should be atomically */
4774 #ifndef TARGET_SPARC64
4775 if (IS_IMM)
4776 goto illegal_insn;
4777 if (!supervisor(dc))
4778 goto priv_insn;
4779 #endif
4780 save_state(dc);
4781 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4782 break;
4783 case 0x1f: /* swapa, swap reg with alt. memory. Also
4784 atomically */
4785 CHECK_IU_FEATURE(dc, SWAP);
4786 #ifndef TARGET_SPARC64
4787 if (IS_IMM)
4788 goto illegal_insn;
4789 if (!supervisor(dc))
4790 goto priv_insn;
4791 #endif
4792 save_state(dc);
4793 cpu_src1 = gen_load_gpr(dc, rd);
4794 gen_swap_asi(cpu_val, cpu_src1, cpu_addr, insn);
4795 break;
4797 #ifndef TARGET_SPARC64
4798 case 0x30: /* ldc */
4799 case 0x31: /* ldcsr */
4800 case 0x33: /* lddc */
4801 goto ncp_insn;
4802 #endif
4803 #endif
4804 #ifdef TARGET_SPARC64
4805 case 0x08: /* V9 ldsw */
4806 gen_address_mask(dc, cpu_addr);
4807 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4808 break;
4809 case 0x0b: /* V9 ldx */
4810 gen_address_mask(dc, cpu_addr);
4811 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4812 break;
4813 case 0x18: /* V9 ldswa */
4814 save_state(dc);
4815 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4816 break;
4817 case 0x1b: /* V9 ldxa */
4818 save_state(dc);
4819 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4820 break;
4821 case 0x2d: /* V9 prefetch, no effect */
4822 goto skip_move;
4823 case 0x30: /* V9 ldfa */
4824 if (gen_trap_ifnofpu(dc)) {
4825 goto jmp_insn;
4827 save_state(dc);
4828 gen_ldf_asi(cpu_addr, insn, 4, rd);
4829 gen_update_fprs_dirty(rd);
4830 goto skip_move;
4831 case 0x33: /* V9 lddfa */
4832 if (gen_trap_ifnofpu(dc)) {
4833 goto jmp_insn;
4835 save_state(dc);
4836 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4837 gen_update_fprs_dirty(DFPREG(rd));
4838 goto skip_move;
4839 case 0x3d: /* V9 prefetcha, no effect */
4840 goto skip_move;
4841 case 0x32: /* V9 ldqfa */
4842 CHECK_FPU_FEATURE(dc, FLOAT128);
4843 if (gen_trap_ifnofpu(dc)) {
4844 goto jmp_insn;
4846 save_state(dc);
4847 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4848 gen_update_fprs_dirty(QFPREG(rd));
4849 goto skip_move;
4850 #endif
4851 default:
4852 goto illegal_insn;
4854 gen_store_gpr(dc, rd, cpu_val);
4855 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4856 skip_move: ;
4857 #endif
4858 } else if (xop >= 0x20 && xop < 0x24) {
4859 TCGv t0;
4861 if (gen_trap_ifnofpu(dc)) {
4862 goto jmp_insn;
4864 save_state(dc);
4865 switch (xop) {
4866 case 0x20: /* ldf, load fpreg */
4867 gen_address_mask(dc, cpu_addr);
4868 t0 = get_temp_tl(dc);
4869 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4870 cpu_dst_32 = gen_dest_fpr_F(dc);
4871 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4872 gen_store_fpr_F(dc, rd, cpu_dst_32);
4873 break;
4874 case 0x21: /* ldfsr, V9 ldxfsr */
4875 #ifdef TARGET_SPARC64
4876 gen_address_mask(dc, cpu_addr);
4877 if (rd == 1) {
4878 TCGv_i64 t64 = tcg_temp_new_i64();
4879 tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
4880 gen_helper_ldxfsr(cpu_env, t64);
4881 tcg_temp_free_i64(t64);
4882 break;
4884 #endif
4885 cpu_dst_32 = get_temp_i32(dc);
4886 t0 = get_temp_tl(dc);
4887 tcg_gen_qemu_ld32u(t0, cpu_addr, dc->mem_idx);
4888 tcg_gen_trunc_tl_i32(cpu_dst_32, t0);
4889 gen_helper_ldfsr(cpu_env, cpu_dst_32);
4890 break;
4891 case 0x22: /* ldqf, load quad fpreg */
4893 TCGv_i32 r_const;
4895 CHECK_FPU_FEATURE(dc, FLOAT128);
4896 r_const = tcg_const_i32(dc->mem_idx);
4897 gen_address_mask(dc, cpu_addr);
4898 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4899 tcg_temp_free_i32(r_const);
4900 gen_op_store_QT0_fpr(QFPREG(rd));
4901 gen_update_fprs_dirty(QFPREG(rd));
4903 break;
4904 case 0x23: /* lddf, load double fpreg */
4905 gen_address_mask(dc, cpu_addr);
4906 cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4907 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4908 gen_store_fpr_D(dc, rd, cpu_dst_64);
4909 break;
4910 default:
4911 goto illegal_insn;
4913 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4914 xop == 0xe || xop == 0x1e) {
4915 TCGv cpu_val = gen_load_gpr(dc, rd);
4917 switch (xop) {
4918 case 0x4: /* st, store word */
4919 gen_address_mask(dc, cpu_addr);
4920 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4921 break;
4922 case 0x5: /* stb, store byte */
4923 gen_address_mask(dc, cpu_addr);
4924 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4925 break;
4926 case 0x6: /* sth, store halfword */
4927 gen_address_mask(dc, cpu_addr);
4928 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4929 break;
4930 case 0x7: /* std, store double word */
4931 if (rd & 1)
4932 goto illegal_insn;
4933 else {
4934 TCGv_i32 r_const;
4935 TCGv_i64 t64;
4936 TCGv lo;
4938 save_state(dc);
4939 gen_address_mask(dc, cpu_addr);
4940 r_const = tcg_const_i32(7);
4941 /* XXX remove alignment check */
4942 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4943 tcg_temp_free_i32(r_const);
4944 lo = gen_load_gpr(dc, rd + 1);
4946 t64 = tcg_temp_new_i64();
4947 tcg_gen_concat_tl_i64(t64, lo, cpu_val);
4948 tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
4949 tcg_temp_free_i64(t64);
4951 break;
4952 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4953 case 0x14: /* sta, V9 stwa, store word alternate */
4954 #ifndef TARGET_SPARC64
4955 if (IS_IMM)
4956 goto illegal_insn;
4957 if (!supervisor(dc))
4958 goto priv_insn;
4959 #endif
4960 save_state(dc);
4961 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4962 dc->npc = DYNAMIC_PC;
4963 break;
4964 case 0x15: /* stba, store byte alternate */
4965 #ifndef TARGET_SPARC64
4966 if (IS_IMM)
4967 goto illegal_insn;
4968 if (!supervisor(dc))
4969 goto priv_insn;
4970 #endif
4971 save_state(dc);
4972 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4973 dc->npc = DYNAMIC_PC;
4974 break;
4975 case 0x16: /* stha, store halfword alternate */
4976 #ifndef TARGET_SPARC64
4977 if (IS_IMM)
4978 goto illegal_insn;
4979 if (!supervisor(dc))
4980 goto priv_insn;
4981 #endif
4982 save_state(dc);
4983 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4984 dc->npc = DYNAMIC_PC;
4985 break;
4986 case 0x17: /* stda, store double word alternate */
4987 #ifndef TARGET_SPARC64
4988 if (IS_IMM)
4989 goto illegal_insn;
4990 if (!supervisor(dc))
4991 goto priv_insn;
4992 #endif
4993 if (rd & 1)
4994 goto illegal_insn;
4995 else {
4996 save_state(dc);
4997 gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
4999 break;
5000 #endif
5001 #ifdef TARGET_SPARC64
5002 case 0x0e: /* V9 stx */
5003 gen_address_mask(dc, cpu_addr);
5004 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5005 break;
5006 case 0x1e: /* V9 stxa */
5007 save_state(dc);
5008 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5009 dc->npc = DYNAMIC_PC;
5010 break;
5011 #endif
5012 default:
5013 goto illegal_insn;
5015 } else if (xop > 0x23 && xop < 0x28) {
5016 if (gen_trap_ifnofpu(dc)) {
5017 goto jmp_insn;
5019 save_state(dc);
5020 switch (xop) {
5021 case 0x24: /* stf, store fpreg */
5023 TCGv t = get_temp_tl(dc);
5024 gen_address_mask(dc, cpu_addr);
5025 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5026 tcg_gen_ext_i32_tl(t, cpu_src1_32);
5027 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5029 break;
5030 case 0x25: /* stfsr, V9 stxfsr */
5032 TCGv t = get_temp_tl(dc);
5034 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUSPARCState, fsr));
5035 #ifdef TARGET_SPARC64
5036 gen_address_mask(dc, cpu_addr);
5037 if (rd == 1) {
5038 tcg_gen_qemu_st64(t, cpu_addr, dc->mem_idx);
5039 break;
5041 #endif
5042 tcg_gen_qemu_st32(t, cpu_addr, dc->mem_idx);
5044 break;
5045 case 0x26:
5046 #ifdef TARGET_SPARC64
5047 /* V9 stqf, store quad fpreg */
5049 TCGv_i32 r_const;
5051 CHECK_FPU_FEATURE(dc, FLOAT128);
5052 gen_op_load_fpr_QT0(QFPREG(rd));
5053 r_const = tcg_const_i32(dc->mem_idx);
5054 gen_address_mask(dc, cpu_addr);
5055 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5056 tcg_temp_free_i32(r_const);
5058 break;
5059 #else /* !TARGET_SPARC64 */
5060 /* stdfq, store floating point queue */
5061 #if defined(CONFIG_USER_ONLY)
5062 goto illegal_insn;
5063 #else
5064 if (!supervisor(dc))
5065 goto priv_insn;
5066 if (gen_trap_ifnofpu(dc)) {
5067 goto jmp_insn;
5069 goto nfq_insn;
5070 #endif
5071 #endif
5072 case 0x27: /* stdf, store double fpreg */
5073 gen_address_mask(dc, cpu_addr);
5074 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5075 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5076 break;
5077 default:
5078 goto illegal_insn;
5080 } else if (xop > 0x33 && xop < 0x3f) {
5081 save_state(dc);
5082 switch (xop) {
5083 #ifdef TARGET_SPARC64
5084 case 0x34: /* V9 stfa */
5085 if (gen_trap_ifnofpu(dc)) {
5086 goto jmp_insn;
5088 gen_stf_asi(cpu_addr, insn, 4, rd);
5089 break;
5090 case 0x36: /* V9 stqfa */
5092 TCGv_i32 r_const;
5094 CHECK_FPU_FEATURE(dc, FLOAT128);
5095 if (gen_trap_ifnofpu(dc)) {
5096 goto jmp_insn;
5098 r_const = tcg_const_i32(7);
5099 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5100 tcg_temp_free_i32(r_const);
5101 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5103 break;
5104 case 0x37: /* V9 stdfa */
5105 if (gen_trap_ifnofpu(dc)) {
5106 goto jmp_insn;
5108 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5109 break;
5110 case 0x3e: /* V9 casxa */
5111 rs2 = GET_FIELD(insn, 27, 31);
5112 cpu_src2 = gen_load_gpr(dc, rs2);
5113 gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5114 break;
5115 #else
5116 case 0x34: /* stc */
5117 case 0x35: /* stcsr */
5118 case 0x36: /* stdcq */
5119 case 0x37: /* stdc */
5120 goto ncp_insn;
5121 #endif
5122 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5123 case 0x3c: /* V9 or LEON3 casa */
5124 #ifndef TARGET_SPARC64
5125 CHECK_IU_FEATURE(dc, CASA);
5126 if (IS_IMM) {
5127 goto illegal_insn;
5129 if (!supervisor(dc)) {
5130 goto priv_insn;
5132 #endif
5133 rs2 = GET_FIELD(insn, 27, 31);
5134 cpu_src2 = gen_load_gpr(dc, rs2);
5135 gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5136 break;
5137 #endif
5138 default:
5139 goto illegal_insn;
5141 } else {
5142 goto illegal_insn;
5145 break;
5147 /* default case for non jump instructions */
5148 if (dc->npc == DYNAMIC_PC) {
5149 dc->pc = DYNAMIC_PC;
5150 gen_op_next_insn();
5151 } else if (dc->npc == JUMP_PC) {
5152 /* we can do a static jump */
5153 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5154 dc->is_br = 1;
5155 } else {
5156 dc->pc = dc->npc;
5157 dc->npc = dc->npc + 4;
5159 jmp_insn:
5160 goto egress;
5161 illegal_insn:
5163 TCGv_i32 r_const;
5165 save_state(dc);
5166 r_const = tcg_const_i32(TT_ILL_INSN);
5167 gen_helper_raise_exception(cpu_env, r_const);
5168 tcg_temp_free_i32(r_const);
5169 dc->is_br = 1;
5171 goto egress;
5172 unimp_flush:
5174 TCGv_i32 r_const;
5176 save_state(dc);
5177 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5178 gen_helper_raise_exception(cpu_env, r_const);
5179 tcg_temp_free_i32(r_const);
5180 dc->is_br = 1;
5182 goto egress;
5183 #if !defined(CONFIG_USER_ONLY)
5184 priv_insn:
5186 TCGv_i32 r_const;
5188 save_state(dc);
5189 r_const = tcg_const_i32(TT_PRIV_INSN);
5190 gen_helper_raise_exception(cpu_env, r_const);
5191 tcg_temp_free_i32(r_const);
5192 dc->is_br = 1;
5194 goto egress;
5195 #endif
5196 nfpu_insn:
5197 save_state(dc);
5198 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5199 dc->is_br = 1;
5200 goto egress;
5201 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5202 nfq_insn:
5203 save_state(dc);
5204 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5205 dc->is_br = 1;
5206 goto egress;
5207 #endif
5208 #ifndef TARGET_SPARC64
5209 ncp_insn:
5211 TCGv r_const;
5213 save_state(dc);
5214 r_const = tcg_const_i32(TT_NCP_INSN);
5215 gen_helper_raise_exception(cpu_env, r_const);
5216 tcg_temp_free(r_const);
5217 dc->is_br = 1;
5219 goto egress;
5220 #endif
5221 egress:
5222 if (dc->n_t32 != 0) {
5223 int i;
5224 for (i = dc->n_t32 - 1; i >= 0; --i) {
5225 tcg_temp_free_i32(dc->t32[i]);
5227 dc->n_t32 = 0;
5229 if (dc->n_ttl != 0) {
5230 int i;
5231 for (i = dc->n_ttl - 1; i >= 0; --i) {
5232 tcg_temp_free(dc->ttl[i]);
5234 dc->n_ttl = 0;
5238 static inline void gen_intermediate_code_internal(SPARCCPU *cpu,
5239 TranslationBlock *tb,
5240 bool spc)
5242 CPUState *cs = CPU(cpu);
5243 CPUSPARCState *env = &cpu->env;
5244 target_ulong pc_start, last_pc;
5245 uint16_t *gen_opc_end;
5246 DisasContext dc1, *dc = &dc1;
5247 CPUBreakpoint *bp;
5248 int j, lj = -1;
5249 int num_insns;
5250 int max_insns;
5251 unsigned int insn;
5253 memset(dc, 0, sizeof(DisasContext));
5254 dc->tb = tb;
5255 pc_start = tb->pc;
5256 dc->pc = pc_start;
5257 last_pc = dc->pc;
5258 dc->npc = (target_ulong) tb->cs_base;
5259 dc->cc_op = CC_OP_DYNAMIC;
5260 dc->mem_idx = cpu_mmu_index(env);
5261 dc->def = env->def;
5262 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5263 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5264 dc->singlestep = (cs->singlestep_enabled || singlestep);
5265 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
5267 num_insns = 0;
5268 max_insns = tb->cflags & CF_COUNT_MASK;
5269 if (max_insns == 0)
5270 max_insns = CF_COUNT_MASK;
5271 gen_tb_start();
5272 do {
5273 if (unlikely(!QTAILQ_EMPTY(&cs->breakpoints))) {
5274 QTAILQ_FOREACH(bp, &cs->breakpoints, entry) {
5275 if (bp->pc == dc->pc) {
5276 if (dc->pc != pc_start)
5277 save_state(dc);
5278 gen_helper_debug(cpu_env);
5279 tcg_gen_exit_tb(0);
5280 dc->is_br = 1;
5281 goto exit_gen_loop;
5285 if (spc) {
5286 qemu_log("Search PC...\n");
5287 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5288 if (lj < j) {
5289 lj++;
5290 while (lj < j)
5291 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5292 tcg_ctx.gen_opc_pc[lj] = dc->pc;
5293 gen_opc_npc[lj] = dc->npc;
5294 tcg_ctx.gen_opc_instr_start[lj] = 1;
5295 tcg_ctx.gen_opc_icount[lj] = num_insns;
5298 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5299 gen_io_start();
5300 last_pc = dc->pc;
5301 insn = cpu_ldl_code(env, dc->pc);
5303 disas_sparc_insn(dc, insn);
5304 num_insns++;
5306 if (dc->is_br)
5307 break;
5308 /* if the next PC is different, we abort now */
5309 if (dc->pc != (last_pc + 4))
5310 break;
5311 /* if we reach a page boundary, we stop generation so that the
5312 PC of a TT_TFAULT exception is always in the right page */
5313 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5314 break;
5315 /* if single step mode, we generate only one instruction and
5316 generate an exception */
5317 if (dc->singlestep) {
5318 break;
5320 } while ((tcg_ctx.gen_opc_ptr < gen_opc_end) &&
5321 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5322 num_insns < max_insns);
5324 exit_gen_loop:
5325 if (tb->cflags & CF_LAST_IO) {
5326 gen_io_end();
5328 if (!dc->is_br) {
5329 if (dc->pc != DYNAMIC_PC &&
5330 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5331 /* static PC and NPC: we can use direct chaining */
5332 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5333 } else {
5334 if (dc->pc != DYNAMIC_PC) {
5335 tcg_gen_movi_tl(cpu_pc, dc->pc);
5337 save_npc(dc);
5338 tcg_gen_exit_tb(0);
5341 gen_tb_end(tb, num_insns);
5342 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
5343 if (spc) {
5344 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5345 lj++;
5346 while (lj <= j)
5347 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5348 #if 0
5349 log_page_dump();
5350 #endif
5351 gen_opc_jump_pc[0] = dc->jump_pc[0];
5352 gen_opc_jump_pc[1] = dc->jump_pc[1];
5353 } else {
5354 tb->size = last_pc + 4 - pc_start;
5355 tb->icount = num_insns;
5357 #ifdef DEBUG_DISAS
5358 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5359 qemu_log("--------------\n");
5360 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5361 log_target_disas(env, pc_start, last_pc + 4 - pc_start, 0);
5362 qemu_log("\n");
5364 #endif
5367 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5369 gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, false);
5372 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5374 gen_intermediate_code_internal(sparc_env_get_cpu(env), tb, true);
5377 void gen_intermediate_code_init(CPUSPARCState *env)
5379 unsigned int i;
5380 static int inited;
5381 static const char * const gregnames[8] = {
5382 NULL, // g0 not used
5383 "g1",
5384 "g2",
5385 "g3",
5386 "g4",
5387 "g5",
5388 "g6",
5389 "g7",
5391 static const char * const fregnames[32] = {
5392 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5393 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5394 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5395 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5398 /* init various static tables */
5399 if (!inited) {
5400 inited = 1;
5402 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5403 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5404 offsetof(CPUSPARCState, regwptr),
5405 "regwptr");
5406 #ifdef TARGET_SPARC64
5407 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5408 "xcc");
5409 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5410 "asi");
5411 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5412 "fprs");
5413 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5414 "gsr");
5415 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5416 offsetof(CPUSPARCState, tick_cmpr),
5417 "tick_cmpr");
5418 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5419 offsetof(CPUSPARCState, stick_cmpr),
5420 "stick_cmpr");
5421 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5422 offsetof(CPUSPARCState, hstick_cmpr),
5423 "hstick_cmpr");
5424 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5425 "hintp");
5426 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5427 "htba");
5428 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5429 "hver");
5430 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5431 offsetof(CPUSPARCState, ssr), "ssr");
5432 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5433 offsetof(CPUSPARCState, version), "ver");
5434 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5435 offsetof(CPUSPARCState, softint),
5436 "softint");
5437 #else
5438 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5439 "wim");
5440 #endif
5441 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5442 "cond");
5443 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5444 "cc_src");
5445 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5446 offsetof(CPUSPARCState, cc_src2),
5447 "cc_src2");
5448 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5449 "cc_dst");
5450 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5451 "cc_op");
5452 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5453 "psr");
5454 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5455 "fsr");
5456 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5457 "pc");
5458 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5459 "npc");
5460 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5461 #ifndef CONFIG_USER_ONLY
5462 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5463 "tbr");
5464 #endif
5465 for (i = 1; i < 8; i++) {
5466 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5467 offsetof(CPUSPARCState, gregs[i]),
5468 gregnames[i]);
5470 for (i = 0; i < TARGET_DPREGS; i++) {
5471 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5472 offsetof(CPUSPARCState, fpr[i]),
5473 fregnames[i]);
5478 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5480 target_ulong npc;
5481 env->pc = tcg_ctx.gen_opc_pc[pc_pos];
5482 npc = gen_opc_npc[pc_pos];
5483 if (npc == 1) {
5484 /* dynamic NPC: already stored */
5485 } else if (npc == 2) {
5486 /* jump PC: use 'cond' and the jump targets of the translation */
5487 if (env->cond) {
5488 env->npc = gen_opc_jump_pc[0];
5489 } else {
5490 env->npc = gen_opc_jump_pc[1];
5492 } else {
5493 env->npc = npc;