MAINTAINERS: Add PReP maintainer
[qemu.git] / target-sparc / translate.c
blob93185402fdc3c73b5078040cad892606bfae34c9
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define DEBUG_DISAS
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 TCGv_i32 t32[3];
86 int n_t32;
87 } DisasContext;
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #else
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
106 #endif
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
111 static int sign_extend(int x, int len)
113 len = 32 - len;
114 return (x << len) >> len;
117 #define IS_IMM (insn & (1<<13))
119 static inline void gen_update_fprs_dirty(int rd)
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123 #endif
126 /* floating point registers moves */
127 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
129 #if TCG_TARGET_REG_BITS == 32
130 if (src & 1) {
131 return TCGV_LOW(cpu_fpr[src / 2]);
132 } else {
133 return TCGV_HIGH(cpu_fpr[src / 2]);
135 #else
136 if (src & 1) {
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138 } else {
139 TCGv_i32 ret = tcg_temp_local_new_i32();
140 TCGv_i64 t = tcg_temp_new_i64();
142 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143 tcg_gen_trunc_i64_i32(ret, t);
144 tcg_temp_free_i64(t);
146 dc->t32[dc->n_t32++] = ret;
147 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
149 return ret;
151 #endif
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
156 #if TCG_TARGET_REG_BITS == 32
157 if (dst & 1) {
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159 } else {
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
162 #else
163 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165 (dst & 1 ? 0 : 32), 32);
166 #endif
167 gen_update_fprs_dirty(dst);
170 static TCGv_i32 gen_dest_fpr_F(void)
172 return cpu_tmp32;
175 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
177 src = DFPREG(src);
178 return cpu_fpr[src / 2];
181 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
183 dst = DFPREG(dst);
184 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185 gen_update_fprs_dirty(dst);
188 static TCGv_i64 gen_dest_fpr_D(void)
190 return cpu_tmp64;
193 static void gen_op_load_fpr_QT0(unsigned int src)
195 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196 offsetof(CPU_QuadU, ll.upper));
197 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198 offsetof(CPU_QuadU, ll.lower));
201 static void gen_op_load_fpr_QT1(unsigned int src)
203 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204 offsetof(CPU_QuadU, ll.upper));
205 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206 offsetof(CPU_QuadU, ll.lower));
209 static void gen_op_store_QT0_fpr(unsigned int dst)
211 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212 offsetof(CPU_QuadU, ll.upper));
213 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.lower));
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd, unsigned int rs)
220 rd = QFPREG(rd);
221 rs = QFPREG(rs);
223 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225 gen_update_fprs_dirty(rd);
227 #endif
229 /* moves */
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
234 #endif
235 #else
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239 #else
240 #endif
241 #endif
243 #ifdef TARGET_SPARC64
244 #ifndef TARGET_ABI32
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
246 #else
247 #define AM_CHECK(dc) (1)
248 #endif
249 #endif
251 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
253 #ifdef TARGET_SPARC64
254 if (AM_CHECK(dc))
255 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256 #endif
259 static inline void gen_movl_reg_TN(int reg, TCGv tn)
261 if (reg == 0)
262 tcg_gen_movi_tl(tn, 0);
263 else if (reg < 8)
264 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265 else {
266 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
270 static inline void gen_movl_TN_reg(int reg, TCGv tn)
272 if (reg == 0)
273 return;
274 else if (reg < 8)
275 tcg_gen_mov_tl(cpu_gregs[reg], tn);
276 else {
277 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
281 static inline void gen_goto_tb(DisasContext *s, int tb_num,
282 target_ulong pc, target_ulong npc)
284 TranslationBlock *tb;
286 tb = s->tb;
287 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289 !s->singlestep) {
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num);
292 tcg_gen_movi_tl(cpu_pc, pc);
293 tcg_gen_movi_tl(cpu_npc, npc);
294 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295 } else {
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc, pc);
298 tcg_gen_movi_tl(cpu_npc, npc);
299 tcg_gen_exit_tb(0);
303 // XXX suboptimal
304 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
306 tcg_gen_extu_i32_tl(reg, src);
307 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308 tcg_gen_andi_tl(reg, reg, 0x1);
311 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
318 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
325 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
332 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
334 TCGv r_temp;
335 TCGv_i32 r_const;
336 int l1;
338 l1 = gen_new_label();
340 r_temp = tcg_temp_new();
341 tcg_gen_xor_tl(r_temp, src1, src2);
342 tcg_gen_not_tl(r_temp, r_temp);
343 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347 r_const = tcg_const_i32(TT_TOVF);
348 gen_helper_raise_exception(cpu_env, r_const);
349 tcg_temp_free_i32(r_const);
350 gen_set_label(l1);
351 tcg_temp_free(r_temp);
354 static inline void gen_tag_tv(TCGv src1, TCGv src2)
356 int l1;
357 TCGv_i32 r_const;
359 l1 = gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0, src1, src2);
361 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363 r_const = tcg_const_i32(TT_TOVF);
364 gen_helper_raise_exception(cpu_env, r_const);
365 tcg_temp_free_i32(r_const);
366 gen_set_label(l1);
369 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
371 tcg_gen_mov_tl(cpu_cc_src, src1);
372 tcg_gen_movi_tl(cpu_cc_src2, src2);
373 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374 tcg_gen_mov_tl(dst, cpu_cc_dst);
377 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
379 tcg_gen_mov_tl(cpu_cc_src, src1);
380 tcg_gen_mov_tl(cpu_cc_src2, src2);
381 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382 tcg_gen_mov_tl(dst, cpu_cc_dst);
385 static TCGv_i32 gen_add32_carry32(void)
387 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32 = tcg_temp_new_i32();
392 cc_src2_32 = tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395 #else
396 cc_src1_32 = cpu_cc_dst;
397 cc_src2_32 = cpu_cc_src;
398 #endif
400 carry_32 = tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32);
405 tcg_temp_free_i32(cc_src2_32);
406 #endif
408 return carry_32;
411 static TCGv_i32 gen_sub32_carry32(void)
413 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32 = tcg_temp_new_i32();
418 cc_src2_32 = tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421 #else
422 cc_src1_32 = cpu_cc_src;
423 cc_src2_32 = cpu_cc_src2;
424 #endif
426 carry_32 = tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32);
431 tcg_temp_free_i32(cc_src2_32);
432 #endif
434 return carry_32;
437 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438 TCGv src2, int update_cc)
440 TCGv_i32 carry_32;
441 TCGv carry;
443 switch (dc->cc_op) {
444 case CC_OP_DIV:
445 case CC_OP_LOGIC:
446 /* Carry is known to be zero. Fall back to plain ADD. */
447 if (update_cc) {
448 gen_op_add_cc(dst, src1, src2);
449 } else {
450 tcg_gen_add_tl(dst, src1, src2);
452 return;
454 case CC_OP_ADD:
455 case CC_OP_TADD:
456 case CC_OP_TADDTV:
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low = tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465 cpu_cc_src, src1, cpu_cc_src2, src2);
466 tcg_temp_free(dst_low);
467 goto add_done;
469 #endif
470 carry_32 = gen_add32_carry32();
471 break;
473 case CC_OP_SUB:
474 case CC_OP_TSUB:
475 case CC_OP_TSUBTV:
476 carry_32 = gen_sub32_carry32();
477 break;
479 default:
480 /* We need external help to produce the carry. */
481 carry_32 = tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32, cpu_env);
483 break;
486 #if TARGET_LONG_BITS == 64
487 carry = tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry, carry_32);
489 #else
490 carry = carry_32;
491 #endif
493 tcg_gen_add_tl(dst, src1, src2);
494 tcg_gen_add_tl(dst, dst, carry);
496 tcg_temp_free_i32(carry_32);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry);
499 #endif
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502 add_done:
503 #endif
504 if (update_cc) {
505 tcg_gen_mov_tl(cpu_cc_src, src1);
506 tcg_gen_mov_tl(cpu_cc_src2, src2);
507 tcg_gen_mov_tl(cpu_cc_dst, dst);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509 dc->cc_op = CC_OP_ADDX;
513 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_mov_tl(cpu_cc_src2, src2);
517 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518 tcg_gen_mov_tl(dst, cpu_cc_dst);
521 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528 tcg_gen_mov_tl(dst, cpu_cc_dst);
531 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
533 TCGv r_temp;
534 TCGv_i32 r_const;
535 int l1;
537 l1 = gen_new_label();
539 r_temp = tcg_temp_new();
540 tcg_gen_xor_tl(r_temp, src1, src2);
541 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545 r_const = tcg_const_i32(TT_TOVF);
546 gen_helper_raise_exception(cpu_env, r_const);
547 tcg_temp_free_i32(r_const);
548 gen_set_label(l1);
549 tcg_temp_free(r_temp);
552 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
554 tcg_gen_mov_tl(cpu_cc_src, src1);
555 tcg_gen_movi_tl(cpu_cc_src2, src2);
556 if (src2 == 0) {
557 tcg_gen_mov_tl(cpu_cc_dst, src1);
558 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559 dc->cc_op = CC_OP_LOGIC;
560 } else {
561 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563 dc->cc_op = CC_OP_SUB;
565 tcg_gen_mov_tl(dst, cpu_cc_dst);
568 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
570 tcg_gen_mov_tl(cpu_cc_src, src1);
571 tcg_gen_mov_tl(cpu_cc_src2, src2);
572 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573 tcg_gen_mov_tl(dst, cpu_cc_dst);
576 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577 TCGv src2, int update_cc)
579 TCGv_i32 carry_32;
580 TCGv carry;
582 switch (dc->cc_op) {
583 case CC_OP_DIV:
584 case CC_OP_LOGIC:
585 /* Carry is known to be zero. Fall back to plain SUB. */
586 if (update_cc) {
587 gen_op_sub_cc(dst, src1, src2);
588 } else {
589 tcg_gen_sub_tl(dst, src1, src2);
591 return;
593 case CC_OP_ADD:
594 case CC_OP_TADD:
595 case CC_OP_TADDTV:
596 carry_32 = gen_add32_carry32();
597 break;
599 case CC_OP_SUB:
600 case CC_OP_TSUB:
601 case CC_OP_TSUBTV:
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low = tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610 cpu_cc_src, src1, cpu_cc_src2, src2);
611 tcg_temp_free(dst_low);
612 goto sub_done;
614 #endif
615 carry_32 = gen_sub32_carry32();
616 break;
618 default:
619 /* We need external help to produce the carry. */
620 carry_32 = tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32, cpu_env);
622 break;
625 #if TARGET_LONG_BITS == 64
626 carry = tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry, carry_32);
628 #else
629 carry = carry_32;
630 #endif
632 tcg_gen_sub_tl(dst, src1, src2);
633 tcg_gen_sub_tl(dst, dst, carry);
635 tcg_temp_free_i32(carry_32);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry);
638 #endif
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641 sub_done:
642 #endif
643 if (update_cc) {
644 tcg_gen_mov_tl(cpu_cc_src, src1);
645 tcg_gen_mov_tl(cpu_cc_src2, src2);
646 tcg_gen_mov_tl(cpu_cc_dst, dst);
647 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648 dc->cc_op = CC_OP_SUBX;
652 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
654 tcg_gen_mov_tl(cpu_cc_src, src1);
655 tcg_gen_mov_tl(cpu_cc_src2, src2);
656 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 tcg_gen_mov_tl(dst, cpu_cc_dst);
660 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
662 tcg_gen_mov_tl(cpu_cc_src, src1);
663 tcg_gen_mov_tl(cpu_cc_src2, src2);
664 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 tcg_gen_mov_tl(dst, cpu_cc_dst);
670 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
672 TCGv r_temp;
673 int l1;
675 l1 = gen_new_label();
676 r_temp = tcg_temp_new();
678 /* old op:
679 if (!(env->y & 1))
680 T1 = 0;
682 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686 tcg_gen_movi_tl(cpu_cc_src2, 0);
687 gen_set_label(l1);
689 // b2 = T0 & 1;
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692 tcg_gen_shli_tl(r_temp, r_temp, 31);
693 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
698 // b1 = N ^ V;
699 gen_mov_reg_N(cpu_tmp0, cpu_psr);
700 gen_mov_reg_V(r_temp, cpu_psr);
701 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702 tcg_temp_free(r_temp);
704 // T0 = (b1 << 31) | (T0 >> 1);
705 // src1 = T0;
706 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
710 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
712 tcg_gen_mov_tl(dst, cpu_cc_dst);
715 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
717 TCGv_i32 r_src1, r_src2;
718 TCGv_i64 r_temp, r_temp2;
720 r_src1 = tcg_temp_new_i32();
721 r_src2 = tcg_temp_new_i32();
723 tcg_gen_trunc_tl_i32(r_src1, src1);
724 tcg_gen_trunc_tl_i32(r_src2, src2);
726 r_temp = tcg_temp_new_i64();
727 r_temp2 = tcg_temp_new_i64();
729 if (sign_ext) {
730 tcg_gen_ext_i32_i64(r_temp, r_src2);
731 tcg_gen_ext_i32_i64(r_temp2, r_src1);
732 } else {
733 tcg_gen_extu_i32_i64(r_temp, r_src2);
734 tcg_gen_extu_i32_i64(r_temp2, r_src1);
737 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
739 tcg_gen_shri_i64(r_temp, r_temp2, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741 tcg_temp_free_i64(r_temp);
742 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
744 tcg_gen_trunc_i64_tl(dst, r_temp2);
746 tcg_temp_free_i64(r_temp2);
748 tcg_temp_free_i32(r_src1);
749 tcg_temp_free_i32(r_src2);
752 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst, src1, src2, 0);
758 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst, src1, src2, 1);
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
767 TCGv_i32 r_const;
768 int l1;
770 l1 = gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772 r_const = tcg_const_i32(TT_DIV_ZERO);
773 gen_helper_raise_exception(cpu_env, r_const);
774 tcg_temp_free_i32(r_const);
775 gen_set_label(l1);
778 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
780 int l1, l2;
781 TCGv r_temp1, r_temp2;
783 l1 = gen_new_label();
784 l2 = gen_new_label();
785 r_temp1 = tcg_temp_local_new();
786 r_temp2 = tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1, src1);
788 tcg_gen_mov_tl(r_temp2, src2);
789 gen_trap_ifdivzero_tl(r_temp2);
790 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792 tcg_gen_movi_i64(dst, INT64_MIN);
793 tcg_gen_br(l2);
794 gen_set_label(l1);
795 tcg_gen_div_i64(dst, r_temp1, r_temp2);
796 gen_set_label(l2);
797 tcg_temp_free(r_temp1);
798 tcg_temp_free(r_temp2);
800 #endif
802 // 1
803 static inline void gen_op_eval_ba(TCGv dst)
805 tcg_gen_movi_tl(dst, 1);
808 // Z
809 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
811 gen_mov_reg_Z(dst, src);
814 // Z | (N ^ V)
815 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
817 gen_mov_reg_N(cpu_tmp0, src);
818 gen_mov_reg_V(dst, src);
819 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820 gen_mov_reg_Z(cpu_tmp0, src);
821 tcg_gen_or_tl(dst, dst, cpu_tmp0);
824 // N ^ V
825 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
827 gen_mov_reg_V(cpu_tmp0, src);
828 gen_mov_reg_N(dst, src);
829 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
832 // C | Z
833 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
835 gen_mov_reg_Z(cpu_tmp0, src);
836 gen_mov_reg_C(dst, src);
837 tcg_gen_or_tl(dst, dst, cpu_tmp0);
840 // C
841 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
843 gen_mov_reg_C(dst, src);
846 // V
847 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
849 gen_mov_reg_V(dst, src);
852 // 0
853 static inline void gen_op_eval_bn(TCGv dst)
855 tcg_gen_movi_tl(dst, 0);
858 // N
859 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
861 gen_mov_reg_N(dst, src);
864 // !Z
865 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
867 gen_mov_reg_Z(dst, src);
868 tcg_gen_xori_tl(dst, dst, 0x1);
871 // !(Z | (N ^ V))
872 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
874 gen_mov_reg_N(cpu_tmp0, src);
875 gen_mov_reg_V(dst, src);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877 gen_mov_reg_Z(cpu_tmp0, src);
878 tcg_gen_or_tl(dst, dst, cpu_tmp0);
879 tcg_gen_xori_tl(dst, dst, 0x1);
882 // !(N ^ V)
883 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
885 gen_mov_reg_V(cpu_tmp0, src);
886 gen_mov_reg_N(dst, src);
887 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888 tcg_gen_xori_tl(dst, dst, 0x1);
891 // !(C | Z)
892 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
894 gen_mov_reg_Z(cpu_tmp0, src);
895 gen_mov_reg_C(dst, src);
896 tcg_gen_or_tl(dst, dst, cpu_tmp0);
897 tcg_gen_xori_tl(dst, dst, 0x1);
900 // !C
901 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
903 gen_mov_reg_C(dst, src);
904 tcg_gen_xori_tl(dst, dst, 0x1);
907 // !N
908 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
910 gen_mov_reg_N(dst, src);
911 tcg_gen_xori_tl(dst, dst, 0x1);
914 // !V
915 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
917 gen_mov_reg_V(dst, src);
918 tcg_gen_xori_tl(dst, dst, 0x1);
922 FPSR bit field FCC1 | FCC0:
926 3 unordered
928 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929 unsigned int fcc_offset)
931 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932 tcg_gen_andi_tl(reg, reg, 0x1);
935 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936 unsigned int fcc_offset)
938 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939 tcg_gen_andi_tl(reg, reg, 0x1);
942 // !0: FCC0 | FCC1
943 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948 tcg_gen_or_tl(dst, dst, cpu_tmp0);
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953 unsigned int fcc_offset)
955 gen_mov_reg_FCC0(dst, src, fcc_offset);
956 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
960 // 1 or 3: FCC0
961 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962 unsigned int fcc_offset)
964 gen_mov_reg_FCC0(dst, src, fcc_offset);
967 // 1: FCC0 & !FCC1
968 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
971 gen_mov_reg_FCC0(dst, src, fcc_offset);
972 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974 tcg_gen_and_tl(dst, dst, cpu_tmp0);
977 // 2 or 3: FCC1
978 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979 unsigned int fcc_offset)
981 gen_mov_reg_FCC1(dst, src, fcc_offset);
984 // 2: !FCC0 & FCC1
985 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986 unsigned int fcc_offset)
988 gen_mov_reg_FCC0(dst, src, fcc_offset);
989 tcg_gen_xori_tl(dst, dst, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991 tcg_gen_and_tl(dst, dst, cpu_tmp0);
994 // 3: FCC0 & FCC1
995 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996 unsigned int fcc_offset)
998 gen_mov_reg_FCC0(dst, src, fcc_offset);
999 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010 tcg_gen_xori_tl(dst, dst, 0x1);
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020 tcg_gen_xori_tl(dst, dst, 0x1);
1023 // 0 or 2: !FCC0
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033 unsigned int fcc_offset)
1035 gen_mov_reg_FCC0(dst, src, fcc_offset);
1036 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 tcg_gen_xori_tl(dst, dst, 0x1);
1042 // 0 or 1: !FCC1
1043 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044 unsigned int fcc_offset)
1046 gen_mov_reg_FCC1(dst, src, fcc_offset);
1047 tcg_gen_xori_tl(dst, dst, 0x1);
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052 unsigned int fcc_offset)
1054 gen_mov_reg_FCC0(dst, src, fcc_offset);
1055 tcg_gen_xori_tl(dst, dst, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058 tcg_gen_xori_tl(dst, dst, 0x1);
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1071 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072 target_ulong pc2, TCGv r_cond)
1074 int l1;
1076 l1 = gen_new_label();
1078 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1080 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1082 gen_set_label(l1);
1083 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1086 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087 target_ulong pc2, TCGv r_cond)
1089 int l1;
1091 l1 = gen_new_label();
1093 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1095 gen_goto_tb(dc, 0, pc2, pc1);
1097 gen_set_label(l1);
1098 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1101 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1102 TCGv r_cond)
1104 int l1, l2;
1106 l1 = gen_new_label();
1107 l2 = gen_new_label();
1109 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1111 tcg_gen_movi_tl(cpu_npc, npc1);
1112 tcg_gen_br(l2);
1114 gen_set_label(l1);
1115 tcg_gen_movi_tl(cpu_npc, npc2);
1116 gen_set_label(l2);
1119 /* call this function before using the condition register as it may
1120 have been set for a jump */
1121 static inline void flush_cond(DisasContext *dc, TCGv cond)
1123 if (dc->npc == JUMP_PC) {
1124 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1125 dc->npc = DYNAMIC_PC;
1129 static inline void save_npc(DisasContext *dc, TCGv cond)
1131 if (dc->npc == JUMP_PC) {
1132 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133 dc->npc = DYNAMIC_PC;
1134 } else if (dc->npc != DYNAMIC_PC) {
1135 tcg_gen_movi_tl(cpu_npc, dc->npc);
1139 static inline void save_state(DisasContext *dc, TCGv cond)
1141 tcg_gen_movi_tl(cpu_pc, dc->pc);
1142 /* flush pending conditional evaluations before exposing cpu state */
1143 if (dc->cc_op != CC_OP_FLAGS) {
1144 dc->cc_op = CC_OP_FLAGS;
1145 gen_helper_compute_psr(cpu_env);
1147 save_npc(dc, cond);
1150 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1152 if (dc->npc == JUMP_PC) {
1153 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155 dc->pc = DYNAMIC_PC;
1156 } else if (dc->npc == DYNAMIC_PC) {
1157 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158 dc->pc = DYNAMIC_PC;
1159 } else {
1160 dc->pc = dc->npc;
1164 static inline void gen_op_next_insn(void)
1166 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1170 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1171 DisasContext *dc)
1173 TCGv_i32 r_src;
1175 #ifdef TARGET_SPARC64
1176 if (cc)
1177 r_src = cpu_xcc;
1178 else
1179 r_src = cpu_psr;
1180 #else
1181 r_src = cpu_psr;
1182 #endif
1183 switch (dc->cc_op) {
1184 case CC_OP_FLAGS:
1185 break;
1186 default:
1187 gen_helper_compute_psr(cpu_env);
1188 dc->cc_op = CC_OP_FLAGS;
1189 break;
1191 switch (cond) {
1192 case 0x0:
1193 gen_op_eval_bn(r_dst);
1194 break;
1195 case 0x1:
1196 gen_op_eval_be(r_dst, r_src);
1197 break;
1198 case 0x2:
1199 gen_op_eval_ble(r_dst, r_src);
1200 break;
1201 case 0x3:
1202 gen_op_eval_bl(r_dst, r_src);
1203 break;
1204 case 0x4:
1205 gen_op_eval_bleu(r_dst, r_src);
1206 break;
1207 case 0x5:
1208 gen_op_eval_bcs(r_dst, r_src);
1209 break;
1210 case 0x6:
1211 gen_op_eval_bneg(r_dst, r_src);
1212 break;
1213 case 0x7:
1214 gen_op_eval_bvs(r_dst, r_src);
1215 break;
1216 case 0x8:
1217 gen_op_eval_ba(r_dst);
1218 break;
1219 case 0x9:
1220 gen_op_eval_bne(r_dst, r_src);
1221 break;
1222 case 0xa:
1223 gen_op_eval_bg(r_dst, r_src);
1224 break;
1225 case 0xb:
1226 gen_op_eval_bge(r_dst, r_src);
1227 break;
1228 case 0xc:
1229 gen_op_eval_bgu(r_dst, r_src);
1230 break;
1231 case 0xd:
1232 gen_op_eval_bcc(r_dst, r_src);
1233 break;
1234 case 0xe:
1235 gen_op_eval_bpos(r_dst, r_src);
1236 break;
1237 case 0xf:
1238 gen_op_eval_bvc(r_dst, r_src);
1239 break;
1243 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1245 unsigned int offset;
1247 switch (cc) {
1248 default:
1249 case 0x0:
1250 offset = 0;
1251 break;
1252 case 0x1:
1253 offset = 32 - 10;
1254 break;
1255 case 0x2:
1256 offset = 34 - 10;
1257 break;
1258 case 0x3:
1259 offset = 36 - 10;
1260 break;
1263 switch (cond) {
1264 case 0x0:
1265 gen_op_eval_bn(r_dst);
1266 break;
1267 case 0x1:
1268 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1269 break;
1270 case 0x2:
1271 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1272 break;
1273 case 0x3:
1274 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1275 break;
1276 case 0x4:
1277 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1278 break;
1279 case 0x5:
1280 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1281 break;
1282 case 0x6:
1283 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1284 break;
1285 case 0x7:
1286 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1287 break;
1288 case 0x8:
1289 gen_op_eval_ba(r_dst);
1290 break;
1291 case 0x9:
1292 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1293 break;
1294 case 0xa:
1295 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1296 break;
1297 case 0xb:
1298 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1299 break;
1300 case 0xc:
1301 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1302 break;
1303 case 0xd:
1304 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1305 break;
1306 case 0xe:
1307 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0xf:
1310 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1311 break;
1315 #ifdef TARGET_SPARC64
1316 // Inverted logic
1317 static const int gen_tcg_cond_reg[8] = {
1319 TCG_COND_NE,
1320 TCG_COND_GT,
1321 TCG_COND_GE,
1323 TCG_COND_EQ,
1324 TCG_COND_LE,
1325 TCG_COND_LT,
1328 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1330 int l1;
1332 l1 = gen_new_label();
1333 tcg_gen_movi_tl(r_dst, 0);
1334 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335 tcg_gen_movi_tl(r_dst, 1);
1336 gen_set_label(l1);
1338 #endif
1340 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1341 TCGv r_cond)
1343 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344 target_ulong target = dc->pc + offset;
1346 if (cond == 0x0) {
1347 /* unconditional not taken */
1348 if (a) {
1349 dc->pc = dc->npc + 4;
1350 dc->npc = dc->pc + 4;
1351 } else {
1352 dc->pc = dc->npc;
1353 dc->npc = dc->pc + 4;
1355 } else if (cond == 0x8) {
1356 /* unconditional taken */
1357 if (a) {
1358 dc->pc = target;
1359 dc->npc = dc->pc + 4;
1360 } else {
1361 dc->pc = dc->npc;
1362 dc->npc = target;
1363 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1365 } else {
1366 flush_cond(dc, r_cond);
1367 gen_cond(r_cond, cc, cond, dc);
1368 if (a) {
1369 gen_branch_a(dc, target, dc->npc, r_cond);
1370 dc->is_br = 1;
1371 } else {
1372 dc->pc = dc->npc;
1373 dc->jump_pc[0] = target;
1374 if (unlikely(dc->npc == DYNAMIC_PC)) {
1375 dc->jump_pc[1] = DYNAMIC_PC;
1376 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1377 } else {
1378 dc->jump_pc[1] = dc->npc + 4;
1379 dc->npc = JUMP_PC;
1385 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1386 TCGv r_cond)
1388 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1389 target_ulong target = dc->pc + offset;
1391 if (cond == 0x0) {
1392 /* unconditional not taken */
1393 if (a) {
1394 dc->pc = dc->npc + 4;
1395 dc->npc = dc->pc + 4;
1396 } else {
1397 dc->pc = dc->npc;
1398 dc->npc = dc->pc + 4;
1400 } else if (cond == 0x8) {
1401 /* unconditional taken */
1402 if (a) {
1403 dc->pc = target;
1404 dc->npc = dc->pc + 4;
1405 } else {
1406 dc->pc = dc->npc;
1407 dc->npc = target;
1408 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1410 } else {
1411 flush_cond(dc, r_cond);
1412 gen_fcond(r_cond, cc, cond);
1413 if (a) {
1414 gen_branch_a(dc, target, dc->npc, r_cond);
1415 dc->is_br = 1;
1416 } else {
1417 dc->pc = dc->npc;
1418 dc->jump_pc[0] = target;
1419 if (unlikely(dc->npc == DYNAMIC_PC)) {
1420 dc->jump_pc[1] = DYNAMIC_PC;
1421 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1422 } else {
1423 dc->jump_pc[1] = dc->npc + 4;
1424 dc->npc = JUMP_PC;
1430 #ifdef TARGET_SPARC64
1431 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1432 TCGv r_cond, TCGv r_reg)
1434 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1435 target_ulong target = dc->pc + offset;
1437 flush_cond(dc, r_cond);
1438 gen_cond_reg(r_cond, cond, r_reg);
1439 if (a) {
1440 gen_branch_a(dc, target, dc->npc, r_cond);
1441 dc->is_br = 1;
1442 } else {
1443 dc->pc = dc->npc;
1444 dc->jump_pc[0] = target;
1445 if (unlikely(dc->npc == DYNAMIC_PC)) {
1446 dc->jump_pc[1] = DYNAMIC_PC;
1447 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1448 } else {
1449 dc->jump_pc[1] = dc->npc + 4;
1450 dc->npc = JUMP_PC;
1455 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1457 switch (fccno) {
1458 case 0:
1459 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1460 break;
1461 case 1:
1462 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1463 break;
1464 case 2:
1465 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1466 break;
1467 case 3:
1468 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1469 break;
1473 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1475 switch (fccno) {
1476 case 0:
1477 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1478 break;
1479 case 1:
1480 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1481 break;
1482 case 2:
1483 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1484 break;
1485 case 3:
1486 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1487 break;
1491 static inline void gen_op_fcmpq(int fccno)
1493 switch (fccno) {
1494 case 0:
1495 gen_helper_fcmpq(cpu_env);
1496 break;
1497 case 1:
1498 gen_helper_fcmpq_fcc1(cpu_env);
1499 break;
1500 case 2:
1501 gen_helper_fcmpq_fcc2(cpu_env);
1502 break;
1503 case 3:
1504 gen_helper_fcmpq_fcc3(cpu_env);
1505 break;
1509 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1511 switch (fccno) {
1512 case 0:
1513 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1514 break;
1515 case 1:
1516 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1517 break;
1518 case 2:
1519 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1520 break;
1521 case 3:
1522 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1523 break;
1527 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1529 switch (fccno) {
1530 case 0:
1531 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1532 break;
1533 case 1:
1534 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1535 break;
1536 case 2:
1537 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1538 break;
1539 case 3:
1540 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1541 break;
1545 static inline void gen_op_fcmpeq(int fccno)
1547 switch (fccno) {
1548 case 0:
1549 gen_helper_fcmpeq(cpu_env);
1550 break;
1551 case 1:
1552 gen_helper_fcmpeq_fcc1(cpu_env);
1553 break;
1554 case 2:
1555 gen_helper_fcmpeq_fcc2(cpu_env);
1556 break;
1557 case 3:
1558 gen_helper_fcmpeq_fcc3(cpu_env);
1559 break;
1563 #else
1565 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1567 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1570 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1572 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1575 static inline void gen_op_fcmpq(int fccno)
1577 gen_helper_fcmpq(cpu_env);
1580 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1582 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1585 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1587 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1590 static inline void gen_op_fcmpeq(int fccno)
1592 gen_helper_fcmpeq(cpu_env);
1594 #endif
1596 static inline void gen_op_fpexception_im(int fsr_flags)
1598 TCGv_i32 r_const;
1600 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1601 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1602 r_const = tcg_const_i32(TT_FP_EXCP);
1603 gen_helper_raise_exception(cpu_env, r_const);
1604 tcg_temp_free_i32(r_const);
1607 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1609 #if !defined(CONFIG_USER_ONLY)
1610 if (!dc->fpu_enabled) {
1611 TCGv_i32 r_const;
1613 save_state(dc, r_cond);
1614 r_const = tcg_const_i32(TT_NFPU_INSN);
1615 gen_helper_raise_exception(cpu_env, r_const);
1616 tcg_temp_free_i32(r_const);
1617 dc->is_br = 1;
1618 return 1;
1620 #endif
1621 return 0;
1624 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1626 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1629 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1630 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1632 TCGv_i32 dst, src;
1634 src = gen_load_fpr_F(dc, rs);
1635 dst = gen_dest_fpr_F();
1637 gen(dst, cpu_env, src);
1639 gen_store_fpr_F(dc, rd, dst);
1642 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1643 void (*gen)(TCGv_i32, TCGv_i32))
1645 TCGv_i32 dst, src;
1647 src = gen_load_fpr_F(dc, rs);
1648 dst = gen_dest_fpr_F();
1650 gen(dst, src);
1652 gen_store_fpr_F(dc, rd, dst);
1655 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1656 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1658 TCGv_i32 dst, src1, src2;
1660 src1 = gen_load_fpr_F(dc, rs1);
1661 src2 = gen_load_fpr_F(dc, rs2);
1662 dst = gen_dest_fpr_F();
1664 gen(dst, cpu_env, src1, src2);
1666 gen_store_fpr_F(dc, rd, dst);
1669 #ifdef TARGET_SPARC64
1670 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1671 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1673 TCGv_i32 dst, src1, src2;
1675 src1 = gen_load_fpr_F(dc, rs1);
1676 src2 = gen_load_fpr_F(dc, rs2);
1677 dst = gen_dest_fpr_F();
1679 gen(dst, src1, src2);
1681 gen_store_fpr_F(dc, rd, dst);
1683 #endif
1685 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1686 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1688 TCGv_i64 dst, src;
1690 src = gen_load_fpr_D(dc, rs);
1691 dst = gen_dest_fpr_D();
1693 gen(dst, cpu_env, src);
1695 gen_store_fpr_D(dc, rd, dst);
1698 #ifdef TARGET_SPARC64
1699 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1700 void (*gen)(TCGv_i64, TCGv_i64))
1702 TCGv_i64 dst, src;
1704 src = gen_load_fpr_D(dc, rs);
1705 dst = gen_dest_fpr_D();
1707 gen(dst, src);
1709 gen_store_fpr_D(dc, rd, dst);
1711 #endif
1713 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1714 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1716 TCGv_i64 dst, src1, src2;
1718 src1 = gen_load_fpr_D(dc, rs1);
1719 src2 = gen_load_fpr_D(dc, rs2);
1720 dst = gen_dest_fpr_D();
1722 gen(dst, cpu_env, src1, src2);
1724 gen_store_fpr_D(dc, rd, dst);
1727 #ifdef TARGET_SPARC64
1728 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1729 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1731 TCGv_i64 dst, src1, src2;
1733 src1 = gen_load_fpr_D(dc, rs1);
1734 src2 = gen_load_fpr_D(dc, rs2);
1735 dst = gen_dest_fpr_D();
1737 gen(dst, src1, src2);
1739 gen_store_fpr_D(dc, rd, dst);
1742 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1743 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1745 TCGv_i64 dst, src1, src2;
1747 src1 = gen_load_fpr_D(dc, rs1);
1748 src2 = gen_load_fpr_D(dc, rs2);
1749 dst = gen_dest_fpr_D();
1751 gen(dst, cpu_gsr, src1, src2);
1753 gen_store_fpr_D(dc, rd, dst);
1756 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1757 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1759 TCGv_i64 dst, src0, src1, src2;
1761 src1 = gen_load_fpr_D(dc, rs1);
1762 src2 = gen_load_fpr_D(dc, rs2);
1763 src0 = gen_load_fpr_D(dc, rd);
1764 dst = gen_dest_fpr_D();
1766 gen(dst, src0, src1, src2);
1768 gen_store_fpr_D(dc, rd, dst);
1770 #endif
1772 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1773 void (*gen)(TCGv_ptr))
1775 gen_op_load_fpr_QT1(QFPREG(rs));
1777 gen(cpu_env);
1779 gen_op_store_QT0_fpr(QFPREG(rd));
1780 gen_update_fprs_dirty(QFPREG(rd));
1783 #ifdef TARGET_SPARC64
1784 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1785 void (*gen)(TCGv_ptr))
1787 gen_op_load_fpr_QT1(QFPREG(rs));
1789 gen(cpu_env);
1791 gen_op_store_QT0_fpr(QFPREG(rd));
1792 gen_update_fprs_dirty(QFPREG(rd));
1794 #endif
1796 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1797 void (*gen)(TCGv_ptr))
1799 gen_op_load_fpr_QT0(QFPREG(rs1));
1800 gen_op_load_fpr_QT1(QFPREG(rs2));
1802 gen(cpu_env);
1804 gen_op_store_QT0_fpr(QFPREG(rd));
1805 gen_update_fprs_dirty(QFPREG(rd));
1808 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1809 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1811 TCGv_i64 dst;
1812 TCGv_i32 src1, src2;
1814 src1 = gen_load_fpr_F(dc, rs1);
1815 src2 = gen_load_fpr_F(dc, rs2);
1816 dst = gen_dest_fpr_D();
1818 gen(dst, cpu_env, src1, src2);
1820 gen_store_fpr_D(dc, rd, dst);
1823 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1824 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1826 TCGv_i64 src1, src2;
1828 src1 = gen_load_fpr_D(dc, rs1);
1829 src2 = gen_load_fpr_D(dc, rs2);
1831 gen(cpu_env, src1, src2);
1833 gen_op_store_QT0_fpr(QFPREG(rd));
1834 gen_update_fprs_dirty(QFPREG(rd));
1837 #ifdef TARGET_SPARC64
1838 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1839 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1841 TCGv_i64 dst;
1842 TCGv_i32 src;
1844 src = gen_load_fpr_F(dc, rs);
1845 dst = gen_dest_fpr_D();
1847 gen(dst, cpu_env, src);
1849 gen_store_fpr_D(dc, rd, dst);
1851 #endif
1853 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1854 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1856 TCGv_i64 dst;
1857 TCGv_i32 src;
1859 src = gen_load_fpr_F(dc, rs);
1860 dst = gen_dest_fpr_D();
1862 gen(dst, cpu_env, src);
1864 gen_store_fpr_D(dc, rd, dst);
1867 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1868 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1870 TCGv_i32 dst;
1871 TCGv_i64 src;
1873 src = gen_load_fpr_D(dc, rs);
1874 dst = gen_dest_fpr_F();
1876 gen(dst, cpu_env, src);
1878 gen_store_fpr_F(dc, rd, dst);
1881 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1882 void (*gen)(TCGv_i32, TCGv_ptr))
1884 TCGv_i32 dst;
1886 gen_op_load_fpr_QT1(QFPREG(rs));
1887 dst = gen_dest_fpr_F();
1889 gen(dst, cpu_env);
1891 gen_store_fpr_F(dc, rd, dst);
1894 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1895 void (*gen)(TCGv_i64, TCGv_ptr))
1897 TCGv_i64 dst;
1899 gen_op_load_fpr_QT1(QFPREG(rs));
1900 dst = gen_dest_fpr_D();
1902 gen(dst, cpu_env);
1904 gen_store_fpr_D(dc, rd, dst);
1907 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1908 void (*gen)(TCGv_ptr, TCGv_i32))
1910 TCGv_i32 src;
1912 src = gen_load_fpr_F(dc, rs);
1914 gen(cpu_env, src);
1916 gen_op_store_QT0_fpr(QFPREG(rd));
1917 gen_update_fprs_dirty(QFPREG(rd));
1920 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1921 void (*gen)(TCGv_ptr, TCGv_i64))
1923 TCGv_i64 src;
1925 src = gen_load_fpr_D(dc, rs);
1927 gen(cpu_env, src);
1929 gen_op_store_QT0_fpr(QFPREG(rd));
1930 gen_update_fprs_dirty(QFPREG(rd));
1933 /* asi moves */
1934 #ifdef TARGET_SPARC64
1935 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1937 int asi;
1938 TCGv_i32 r_asi;
1940 if (IS_IMM) {
1941 r_asi = tcg_temp_new_i32();
1942 tcg_gen_mov_i32(r_asi, cpu_asi);
1943 } else {
1944 asi = GET_FIELD(insn, 19, 26);
1945 r_asi = tcg_const_i32(asi);
1947 return r_asi;
1950 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1951 int sign)
1953 TCGv_i32 r_asi, r_size, r_sign;
1955 r_asi = gen_get_asi(insn, addr);
1956 r_size = tcg_const_i32(size);
1957 r_sign = tcg_const_i32(sign);
1958 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1959 tcg_temp_free_i32(r_sign);
1960 tcg_temp_free_i32(r_size);
1961 tcg_temp_free_i32(r_asi);
1964 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1966 TCGv_i32 r_asi, r_size;
1968 r_asi = gen_get_asi(insn, addr);
1969 r_size = tcg_const_i32(size);
1970 gen_helper_st_asi(addr, src, r_asi, r_size);
1971 tcg_temp_free_i32(r_size);
1972 tcg_temp_free_i32(r_asi);
1975 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1977 TCGv_i32 r_asi, r_size, r_rd;
1979 r_asi = gen_get_asi(insn, addr);
1980 r_size = tcg_const_i32(size);
1981 r_rd = tcg_const_i32(rd);
1982 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1983 tcg_temp_free_i32(r_rd);
1984 tcg_temp_free_i32(r_size);
1985 tcg_temp_free_i32(r_asi);
1988 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1990 TCGv_i32 r_asi, r_size, r_rd;
1992 r_asi = gen_get_asi(insn, addr);
1993 r_size = tcg_const_i32(size);
1994 r_rd = tcg_const_i32(rd);
1995 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1996 tcg_temp_free_i32(r_rd);
1997 tcg_temp_free_i32(r_size);
1998 tcg_temp_free_i32(r_asi);
2001 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2003 TCGv_i32 r_asi, r_size, r_sign;
2005 r_asi = gen_get_asi(insn, addr);
2006 r_size = tcg_const_i32(4);
2007 r_sign = tcg_const_i32(0);
2008 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2009 tcg_temp_free_i32(r_sign);
2010 gen_helper_st_asi(addr, dst, r_asi, r_size);
2011 tcg_temp_free_i32(r_size);
2012 tcg_temp_free_i32(r_asi);
2013 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2016 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2018 TCGv_i32 r_asi, r_rd;
2020 r_asi = gen_get_asi(insn, addr);
2021 r_rd = tcg_const_i32(rd);
2022 gen_helper_ldda_asi(addr, r_asi, r_rd);
2023 tcg_temp_free_i32(r_rd);
2024 tcg_temp_free_i32(r_asi);
2027 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2029 TCGv_i32 r_asi, r_size;
2031 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2032 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2033 r_asi = gen_get_asi(insn, addr);
2034 r_size = tcg_const_i32(8);
2035 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2036 tcg_temp_free_i32(r_size);
2037 tcg_temp_free_i32(r_asi);
2040 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2041 int rd)
2043 TCGv r_val1;
2044 TCGv_i32 r_asi;
2046 r_val1 = tcg_temp_new();
2047 gen_movl_reg_TN(rd, r_val1);
2048 r_asi = gen_get_asi(insn, addr);
2049 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
2050 tcg_temp_free_i32(r_asi);
2051 tcg_temp_free(r_val1);
2054 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2055 int rd)
2057 TCGv_i32 r_asi;
2059 gen_movl_reg_TN(rd, cpu_tmp64);
2060 r_asi = gen_get_asi(insn, addr);
2061 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
2062 tcg_temp_free_i32(r_asi);
2065 #elif !defined(CONFIG_USER_ONLY)
2067 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2068 int sign)
2070 TCGv_i32 r_asi, r_size, r_sign;
2072 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2073 r_size = tcg_const_i32(size);
2074 r_sign = tcg_const_i32(sign);
2075 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2076 tcg_temp_free(r_sign);
2077 tcg_temp_free(r_size);
2078 tcg_temp_free(r_asi);
2079 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2082 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2084 TCGv_i32 r_asi, r_size;
2086 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2087 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2088 r_size = tcg_const_i32(size);
2089 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2090 tcg_temp_free(r_size);
2091 tcg_temp_free(r_asi);
2094 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2096 TCGv_i32 r_asi, r_size, r_sign;
2097 TCGv_i64 r_val;
2099 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2100 r_size = tcg_const_i32(4);
2101 r_sign = tcg_const_i32(0);
2102 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2103 tcg_temp_free(r_sign);
2104 r_val = tcg_temp_new_i64();
2105 tcg_gen_extu_tl_i64(r_val, dst);
2106 gen_helper_st_asi(addr, r_val, r_asi, r_size);
2107 tcg_temp_free_i64(r_val);
2108 tcg_temp_free(r_size);
2109 tcg_temp_free(r_asi);
2110 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2113 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2115 TCGv_i32 r_asi, r_size, r_sign;
2117 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2118 r_size = tcg_const_i32(8);
2119 r_sign = tcg_const_i32(0);
2120 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
2121 tcg_temp_free(r_sign);
2122 tcg_temp_free(r_size);
2123 tcg_temp_free(r_asi);
2124 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2125 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2126 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2127 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2128 gen_movl_TN_reg(rd, hi);
2131 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2133 TCGv_i32 r_asi, r_size;
2135 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2136 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2137 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2138 r_size = tcg_const_i32(8);
2139 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
2140 tcg_temp_free(r_size);
2141 tcg_temp_free(r_asi);
2143 #endif
2145 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2146 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2148 TCGv_i64 r_val;
2149 TCGv_i32 r_asi, r_size;
2151 gen_ld_asi(dst, addr, insn, 1, 0);
2153 r_val = tcg_const_i64(0xffULL);
2154 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2155 r_size = tcg_const_i32(1);
2156 gen_helper_st_asi(addr, r_val, r_asi, r_size);
2157 tcg_temp_free_i32(r_size);
2158 tcg_temp_free_i32(r_asi);
2159 tcg_temp_free_i64(r_val);
2161 #endif
2163 static inline TCGv get_src1(unsigned int insn, TCGv def)
2165 TCGv r_rs1 = def;
2166 unsigned int rs1;
2168 rs1 = GET_FIELD(insn, 13, 17);
2169 if (rs1 == 0) {
2170 tcg_gen_movi_tl(def, 0);
2171 } else if (rs1 < 8) {
2172 r_rs1 = cpu_gregs[rs1];
2173 } else {
2174 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2176 return r_rs1;
2179 static inline TCGv get_src2(unsigned int insn, TCGv def)
2181 TCGv r_rs2 = def;
2183 if (IS_IMM) { /* immediate */
2184 target_long simm = GET_FIELDs(insn, 19, 31);
2185 tcg_gen_movi_tl(def, simm);
2186 } else { /* register */
2187 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2188 if (rs2 == 0) {
2189 tcg_gen_movi_tl(def, 0);
2190 } else if (rs2 < 8) {
2191 r_rs2 = cpu_gregs[rs2];
2192 } else {
2193 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2196 return r_rs2;
2199 #ifdef TARGET_SPARC64
2200 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2202 TCGv_i32 r_tl = tcg_temp_new_i32();
2204 /* load env->tl into r_tl */
2205 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2207 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2208 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2210 /* calculate offset to current trap state from env->ts, reuse r_tl */
2211 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2212 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
2214 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2216 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2217 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2218 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2219 tcg_temp_free_ptr(r_tl_tmp);
2222 tcg_temp_free_i32(r_tl);
2225 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2226 int width, bool cc, bool left)
2228 TCGv lo1, lo2, t1, t2;
2229 uint64_t amask, tabl, tabr;
2230 int shift, imask, omask;
2232 if (cc) {
2233 tcg_gen_mov_tl(cpu_cc_src, s1);
2234 tcg_gen_mov_tl(cpu_cc_src2, s2);
2235 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2236 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2237 dc->cc_op = CC_OP_SUB;
2240 /* Theory of operation: there are two tables, left and right (not to
2241 be confused with the left and right versions of the opcode). These
2242 are indexed by the low 3 bits of the inputs. To make things "easy",
2243 these tables are loaded into two constants, TABL and TABR below.
2244 The operation index = (input & imask) << shift calculates the index
2245 into the constant, while val = (table >> index) & omask calculates
2246 the value we're looking for. */
2247 switch (width) {
2248 case 8:
2249 imask = 0x7;
2250 shift = 3;
2251 omask = 0xff;
2252 if (left) {
2253 tabl = 0x80c0e0f0f8fcfeffULL;
2254 tabr = 0xff7f3f1f0f070301ULL;
2255 } else {
2256 tabl = 0x0103070f1f3f7fffULL;
2257 tabr = 0xfffefcf8f0e0c080ULL;
2259 break;
2260 case 16:
2261 imask = 0x6;
2262 shift = 1;
2263 omask = 0xf;
2264 if (left) {
2265 tabl = 0x8cef;
2266 tabr = 0xf731;
2267 } else {
2268 tabl = 0x137f;
2269 tabr = 0xfec8;
2271 break;
2272 case 32:
2273 imask = 0x4;
2274 shift = 0;
2275 omask = 0x3;
2276 if (left) {
2277 tabl = (2 << 2) | 3;
2278 tabr = (3 << 2) | 1;
2279 } else {
2280 tabl = (1 << 2) | 3;
2281 tabr = (3 << 2) | 2;
2283 break;
2284 default:
2285 abort();
2288 lo1 = tcg_temp_new();
2289 lo2 = tcg_temp_new();
2290 tcg_gen_andi_tl(lo1, s1, imask);
2291 tcg_gen_andi_tl(lo2, s2, imask);
2292 tcg_gen_shli_tl(lo1, lo1, shift);
2293 tcg_gen_shli_tl(lo2, lo2, shift);
2295 t1 = tcg_const_tl(tabl);
2296 t2 = tcg_const_tl(tabr);
2297 tcg_gen_shr_tl(lo1, t1, lo1);
2298 tcg_gen_shr_tl(lo2, t2, lo2);
2299 tcg_gen_andi_tl(dst, lo1, omask);
2300 tcg_gen_andi_tl(lo2, lo2, omask);
2302 amask = -8;
2303 if (AM_CHECK(dc)) {
2304 amask &= 0xffffffffULL;
2306 tcg_gen_andi_tl(s1, s1, amask);
2307 tcg_gen_andi_tl(s2, s2, amask);
2309 /* We want to compute
2310 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2311 We've already done dst = lo1, so this reduces to
2312 dst &= (s1 == s2 ? -1 : lo2)
2313 Which we perform by
2314 lo2 |= -(s1 == s2)
2315 dst &= lo2
2317 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2318 tcg_gen_neg_tl(t1, t1);
2319 tcg_gen_or_tl(lo2, lo2, t1);
2320 tcg_gen_and_tl(dst, dst, lo2);
2322 tcg_temp_free(lo1);
2323 tcg_temp_free(lo2);
2324 tcg_temp_free(t1);
2325 tcg_temp_free(t2);
2328 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2330 TCGv tmp = tcg_temp_new();
2332 tcg_gen_add_tl(tmp, s1, s2);
2333 tcg_gen_andi_tl(dst, tmp, -8);
2334 if (left) {
2335 tcg_gen_neg_tl(tmp, tmp);
2337 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2339 tcg_temp_free(tmp);
2342 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2344 TCGv t1, t2, shift;
2346 t1 = tcg_temp_new();
2347 t2 = tcg_temp_new();
2348 shift = tcg_temp_new();
2350 tcg_gen_andi_tl(shift, gsr, 7);
2351 tcg_gen_shli_tl(shift, shift, 3);
2352 tcg_gen_shl_tl(t1, s1, shift);
2354 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2355 shift of (up to 63) followed by a constant shift of 1. */
2356 tcg_gen_xori_tl(shift, shift, 63);
2357 tcg_gen_shr_tl(t2, s2, shift);
2358 tcg_gen_shri_tl(t2, t2, 1);
2360 tcg_gen_or_tl(dst, t1, t2);
2362 tcg_temp_free(t1);
2363 tcg_temp_free(t2);
2364 tcg_temp_free(shift);
2366 #endif
2368 #define CHECK_IU_FEATURE(dc, FEATURE) \
2369 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2370 goto illegal_insn;
2371 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2372 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2373 goto nfpu_insn;
2375 /* before an instruction, dc->pc must be static */
2376 static void disas_sparc_insn(DisasContext * dc)
2378 unsigned int insn, opc, rs1, rs2, rd;
2379 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2380 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2381 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2382 target_long simm;
2384 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2385 tcg_gen_debug_insn_start(dc->pc);
2386 insn = ldl_code(dc->pc);
2387 opc = GET_FIELD(insn, 0, 1);
2389 rd = GET_FIELD(insn, 2, 6);
2391 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2392 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2394 switch (opc) {
2395 case 0: /* branches/sethi */
2397 unsigned int xop = GET_FIELD(insn, 7, 9);
2398 int32_t target;
2399 switch (xop) {
2400 #ifdef TARGET_SPARC64
2401 case 0x1: /* V9 BPcc */
2403 int cc;
2405 target = GET_FIELD_SP(insn, 0, 18);
2406 target = sign_extend(target, 19);
2407 target <<= 2;
2408 cc = GET_FIELD_SP(insn, 20, 21);
2409 if (cc == 0)
2410 do_branch(dc, target, insn, 0, cpu_cond);
2411 else if (cc == 2)
2412 do_branch(dc, target, insn, 1, cpu_cond);
2413 else
2414 goto illegal_insn;
2415 goto jmp_insn;
2417 case 0x3: /* V9 BPr */
2419 target = GET_FIELD_SP(insn, 0, 13) |
2420 (GET_FIELD_SP(insn, 20, 21) << 14);
2421 target = sign_extend(target, 16);
2422 target <<= 2;
2423 cpu_src1 = get_src1(insn, cpu_src1);
2424 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2425 goto jmp_insn;
2427 case 0x5: /* V9 FBPcc */
2429 int cc = GET_FIELD_SP(insn, 20, 21);
2430 if (gen_trap_ifnofpu(dc, cpu_cond))
2431 goto jmp_insn;
2432 target = GET_FIELD_SP(insn, 0, 18);
2433 target = sign_extend(target, 19);
2434 target <<= 2;
2435 do_fbranch(dc, target, insn, cc, cpu_cond);
2436 goto jmp_insn;
2438 #else
2439 case 0x7: /* CBN+x */
2441 goto ncp_insn;
2443 #endif
2444 case 0x2: /* BN+x */
2446 target = GET_FIELD(insn, 10, 31);
2447 target = sign_extend(target, 22);
2448 target <<= 2;
2449 do_branch(dc, target, insn, 0, cpu_cond);
2450 goto jmp_insn;
2452 case 0x6: /* FBN+x */
2454 if (gen_trap_ifnofpu(dc, cpu_cond))
2455 goto jmp_insn;
2456 target = GET_FIELD(insn, 10, 31);
2457 target = sign_extend(target, 22);
2458 target <<= 2;
2459 do_fbranch(dc, target, insn, 0, cpu_cond);
2460 goto jmp_insn;
2462 case 0x4: /* SETHI */
2463 if (rd) { // nop
2464 uint32_t value = GET_FIELD(insn, 10, 31);
2465 TCGv r_const;
2467 r_const = tcg_const_tl(value << 10);
2468 gen_movl_TN_reg(rd, r_const);
2469 tcg_temp_free(r_const);
2471 break;
2472 case 0x0: /* UNIMPL */
2473 default:
2474 goto illegal_insn;
2476 break;
2478 break;
2479 case 1: /*CALL*/
2481 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2482 TCGv r_const;
2484 r_const = tcg_const_tl(dc->pc);
2485 gen_movl_TN_reg(15, r_const);
2486 tcg_temp_free(r_const);
2487 target += dc->pc;
2488 gen_mov_pc_npc(dc, cpu_cond);
2489 dc->npc = target;
2491 goto jmp_insn;
2492 case 2: /* FPU & Logical Operations */
2494 unsigned int xop = GET_FIELD(insn, 7, 12);
2495 if (xop == 0x3a) { /* generate trap */
2496 int cond;
2498 cpu_src1 = get_src1(insn, cpu_src1);
2499 if (IS_IMM) {
2500 rs2 = GET_FIELD(insn, 25, 31);
2501 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2502 } else {
2503 rs2 = GET_FIELD(insn, 27, 31);
2504 if (rs2 != 0) {
2505 gen_movl_reg_TN(rs2, cpu_src2);
2506 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2507 } else
2508 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2511 cond = GET_FIELD(insn, 3, 6);
2512 if (cond == 0x8) { /* Trap Always */
2513 save_state(dc, cpu_cond);
2514 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2515 supervisor(dc))
2516 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2517 else
2518 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2519 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2520 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2522 if (rs2 == 0 &&
2523 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2525 gen_helper_shutdown();
2527 } else {
2528 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2530 } else if (cond != 0) {
2531 TCGv r_cond = tcg_temp_new();
2532 int l1;
2533 #ifdef TARGET_SPARC64
2534 /* V9 icc/xcc */
2535 int cc = GET_FIELD_SP(insn, 11, 12);
2537 save_state(dc, cpu_cond);
2538 if (cc == 0)
2539 gen_cond(r_cond, 0, cond, dc);
2540 else if (cc == 2)
2541 gen_cond(r_cond, 1, cond, dc);
2542 else
2543 goto illegal_insn;
2544 #else
2545 save_state(dc, cpu_cond);
2546 gen_cond(r_cond, 0, cond, dc);
2547 #endif
2548 l1 = gen_new_label();
2549 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2551 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2552 supervisor(dc))
2553 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2554 else
2555 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2556 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2557 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2558 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2560 gen_set_label(l1);
2561 tcg_temp_free(r_cond);
2563 gen_op_next_insn();
2564 tcg_gen_exit_tb(0);
2565 dc->is_br = 1;
2566 goto jmp_insn;
2567 } else if (xop == 0x28) {
2568 rs1 = GET_FIELD(insn, 13, 17);
2569 switch(rs1) {
2570 case 0: /* rdy */
2571 #ifndef TARGET_SPARC64
2572 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2573 manual, rdy on the microSPARC
2574 II */
2575 case 0x0f: /* stbar in the SPARCv8 manual,
2576 rdy on the microSPARC II */
2577 case 0x10 ... 0x1f: /* implementation-dependent in the
2578 SPARCv8 manual, rdy on the
2579 microSPARC II */
2580 /* Read Asr17 */
2581 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2582 TCGv r_const;
2584 /* Read Asr17 for a Leon3 monoprocessor */
2585 r_const = tcg_const_tl((1 << 8)
2586 | (dc->def->nwindows - 1));
2587 gen_movl_TN_reg(rd, r_const);
2588 tcg_temp_free(r_const);
2589 break;
2591 #endif
2592 gen_movl_TN_reg(rd, cpu_y);
2593 break;
2594 #ifdef TARGET_SPARC64
2595 case 0x2: /* V9 rdccr */
2596 gen_helper_compute_psr(cpu_env);
2597 gen_helper_rdccr(cpu_dst, cpu_env);
2598 gen_movl_TN_reg(rd, cpu_dst);
2599 break;
2600 case 0x3: /* V9 rdasi */
2601 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2602 gen_movl_TN_reg(rd, cpu_dst);
2603 break;
2604 case 0x4: /* V9 rdtick */
2606 TCGv_ptr r_tickptr;
2608 r_tickptr = tcg_temp_new_ptr();
2609 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2610 offsetof(CPUState, tick));
2611 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2612 tcg_temp_free_ptr(r_tickptr);
2613 gen_movl_TN_reg(rd, cpu_dst);
2615 break;
2616 case 0x5: /* V9 rdpc */
2618 TCGv r_const;
2620 r_const = tcg_const_tl(dc->pc);
2621 gen_movl_TN_reg(rd, r_const);
2622 tcg_temp_free(r_const);
2624 break;
2625 case 0x6: /* V9 rdfprs */
2626 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2627 gen_movl_TN_reg(rd, cpu_dst);
2628 break;
2629 case 0xf: /* V9 membar */
2630 break; /* no effect */
2631 case 0x13: /* Graphics Status */
2632 if (gen_trap_ifnofpu(dc, cpu_cond))
2633 goto jmp_insn;
2634 gen_movl_TN_reg(rd, cpu_gsr);
2635 break;
2636 case 0x16: /* Softint */
2637 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2638 gen_movl_TN_reg(rd, cpu_dst);
2639 break;
2640 case 0x17: /* Tick compare */
2641 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2642 break;
2643 case 0x18: /* System tick */
2645 TCGv_ptr r_tickptr;
2647 r_tickptr = tcg_temp_new_ptr();
2648 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2649 offsetof(CPUState, stick));
2650 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2651 tcg_temp_free_ptr(r_tickptr);
2652 gen_movl_TN_reg(rd, cpu_dst);
2654 break;
2655 case 0x19: /* System tick compare */
2656 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2657 break;
2658 case 0x10: /* Performance Control */
2659 case 0x11: /* Performance Instrumentation Counter */
2660 case 0x12: /* Dispatch Control */
2661 case 0x14: /* Softint set, WO */
2662 case 0x15: /* Softint clear, WO */
2663 #endif
2664 default:
2665 goto illegal_insn;
2667 #if !defined(CONFIG_USER_ONLY)
2668 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2669 #ifndef TARGET_SPARC64
2670 if (!supervisor(dc))
2671 goto priv_insn;
2672 gen_helper_compute_psr(cpu_env);
2673 dc->cc_op = CC_OP_FLAGS;
2674 gen_helper_rdpsr(cpu_dst, cpu_env);
2675 #else
2676 CHECK_IU_FEATURE(dc, HYPV);
2677 if (!hypervisor(dc))
2678 goto priv_insn;
2679 rs1 = GET_FIELD(insn, 13, 17);
2680 switch (rs1) {
2681 case 0: // hpstate
2682 // gen_op_rdhpstate();
2683 break;
2684 case 1: // htstate
2685 // gen_op_rdhtstate();
2686 break;
2687 case 3: // hintp
2688 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2689 break;
2690 case 5: // htba
2691 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2692 break;
2693 case 6: // hver
2694 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2695 break;
2696 case 31: // hstick_cmpr
2697 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2698 break;
2699 default:
2700 goto illegal_insn;
2702 #endif
2703 gen_movl_TN_reg(rd, cpu_dst);
2704 break;
2705 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2706 if (!supervisor(dc))
2707 goto priv_insn;
2708 #ifdef TARGET_SPARC64
2709 rs1 = GET_FIELD(insn, 13, 17);
2710 switch (rs1) {
2711 case 0: // tpc
2713 TCGv_ptr r_tsptr;
2715 r_tsptr = tcg_temp_new_ptr();
2716 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2717 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2718 offsetof(trap_state, tpc));
2719 tcg_temp_free_ptr(r_tsptr);
2721 break;
2722 case 1: // tnpc
2724 TCGv_ptr r_tsptr;
2726 r_tsptr = tcg_temp_new_ptr();
2727 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2728 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2729 offsetof(trap_state, tnpc));
2730 tcg_temp_free_ptr(r_tsptr);
2732 break;
2733 case 2: // tstate
2735 TCGv_ptr r_tsptr;
2737 r_tsptr = tcg_temp_new_ptr();
2738 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2739 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2740 offsetof(trap_state, tstate));
2741 tcg_temp_free_ptr(r_tsptr);
2743 break;
2744 case 3: // tt
2746 TCGv_ptr r_tsptr;
2748 r_tsptr = tcg_temp_new_ptr();
2749 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2750 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2751 offsetof(trap_state, tt));
2752 tcg_temp_free_ptr(r_tsptr);
2753 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2755 break;
2756 case 4: // tick
2758 TCGv_ptr r_tickptr;
2760 r_tickptr = tcg_temp_new_ptr();
2761 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2762 offsetof(CPUState, tick));
2763 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2764 gen_movl_TN_reg(rd, cpu_tmp0);
2765 tcg_temp_free_ptr(r_tickptr);
2767 break;
2768 case 5: // tba
2769 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2770 break;
2771 case 6: // pstate
2772 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2773 offsetof(CPUSPARCState, pstate));
2774 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2775 break;
2776 case 7: // tl
2777 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2778 offsetof(CPUSPARCState, tl));
2779 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2780 break;
2781 case 8: // pil
2782 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2783 offsetof(CPUSPARCState, psrpil));
2784 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2785 break;
2786 case 9: // cwp
2787 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2788 break;
2789 case 10: // cansave
2790 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2791 offsetof(CPUSPARCState, cansave));
2792 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2793 break;
2794 case 11: // canrestore
2795 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2796 offsetof(CPUSPARCState, canrestore));
2797 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2798 break;
2799 case 12: // cleanwin
2800 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2801 offsetof(CPUSPARCState, cleanwin));
2802 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2803 break;
2804 case 13: // otherwin
2805 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2806 offsetof(CPUSPARCState, otherwin));
2807 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2808 break;
2809 case 14: // wstate
2810 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2811 offsetof(CPUSPARCState, wstate));
2812 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2813 break;
2814 case 16: // UA2005 gl
2815 CHECK_IU_FEATURE(dc, GL);
2816 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2817 offsetof(CPUSPARCState, gl));
2818 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2819 break;
2820 case 26: // UA2005 strand status
2821 CHECK_IU_FEATURE(dc, HYPV);
2822 if (!hypervisor(dc))
2823 goto priv_insn;
2824 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2825 break;
2826 case 31: // ver
2827 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2828 break;
2829 case 15: // fq
2830 default:
2831 goto illegal_insn;
2833 #else
2834 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2835 #endif
2836 gen_movl_TN_reg(rd, cpu_tmp0);
2837 break;
2838 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2839 #ifdef TARGET_SPARC64
2840 save_state(dc, cpu_cond);
2841 gen_helper_flushw(cpu_env);
2842 #else
2843 if (!supervisor(dc))
2844 goto priv_insn;
2845 gen_movl_TN_reg(rd, cpu_tbr);
2846 #endif
2847 break;
2848 #endif
2849 } else if (xop == 0x34) { /* FPU Operations */
2850 if (gen_trap_ifnofpu(dc, cpu_cond))
2851 goto jmp_insn;
2852 gen_op_clear_ieee_excp_and_FTT();
2853 rs1 = GET_FIELD(insn, 13, 17);
2854 rs2 = GET_FIELD(insn, 27, 31);
2855 xop = GET_FIELD(insn, 18, 26);
2856 save_state(dc, cpu_cond);
2857 switch (xop) {
2858 case 0x1: /* fmovs */
2859 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2860 gen_store_fpr_F(dc, rd, cpu_src1_32);
2861 break;
2862 case 0x5: /* fnegs */
2863 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2864 break;
2865 case 0x9: /* fabss */
2866 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2867 break;
2868 case 0x29: /* fsqrts */
2869 CHECK_FPU_FEATURE(dc, FSQRT);
2870 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2871 break;
2872 case 0x2a: /* fsqrtd */
2873 CHECK_FPU_FEATURE(dc, FSQRT);
2874 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2875 break;
2876 case 0x2b: /* fsqrtq */
2877 CHECK_FPU_FEATURE(dc, FLOAT128);
2878 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2879 break;
2880 case 0x41: /* fadds */
2881 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2882 break;
2883 case 0x42: /* faddd */
2884 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2885 break;
2886 case 0x43: /* faddq */
2887 CHECK_FPU_FEATURE(dc, FLOAT128);
2888 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2889 break;
2890 case 0x45: /* fsubs */
2891 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2892 break;
2893 case 0x46: /* fsubd */
2894 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2895 break;
2896 case 0x47: /* fsubq */
2897 CHECK_FPU_FEATURE(dc, FLOAT128);
2898 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2899 break;
2900 case 0x49: /* fmuls */
2901 CHECK_FPU_FEATURE(dc, FMUL);
2902 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2903 break;
2904 case 0x4a: /* fmuld */
2905 CHECK_FPU_FEATURE(dc, FMUL);
2906 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2907 break;
2908 case 0x4b: /* fmulq */
2909 CHECK_FPU_FEATURE(dc, FLOAT128);
2910 CHECK_FPU_FEATURE(dc, FMUL);
2911 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2912 break;
2913 case 0x4d: /* fdivs */
2914 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2915 break;
2916 case 0x4e: /* fdivd */
2917 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2918 break;
2919 case 0x4f: /* fdivq */
2920 CHECK_FPU_FEATURE(dc, FLOAT128);
2921 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2922 break;
2923 case 0x69: /* fsmuld */
2924 CHECK_FPU_FEATURE(dc, FSMULD);
2925 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2926 break;
2927 case 0x6e: /* fdmulq */
2928 CHECK_FPU_FEATURE(dc, FLOAT128);
2929 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2930 break;
2931 case 0xc4: /* fitos */
2932 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2933 break;
2934 case 0xc6: /* fdtos */
2935 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2936 break;
2937 case 0xc7: /* fqtos */
2938 CHECK_FPU_FEATURE(dc, FLOAT128);
2939 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2940 break;
2941 case 0xc8: /* fitod */
2942 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2943 break;
2944 case 0xc9: /* fstod */
2945 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2946 break;
2947 case 0xcb: /* fqtod */
2948 CHECK_FPU_FEATURE(dc, FLOAT128);
2949 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2950 break;
2951 case 0xcc: /* fitoq */
2952 CHECK_FPU_FEATURE(dc, FLOAT128);
2953 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2954 break;
2955 case 0xcd: /* fstoq */
2956 CHECK_FPU_FEATURE(dc, FLOAT128);
2957 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2958 break;
2959 case 0xce: /* fdtoq */
2960 CHECK_FPU_FEATURE(dc, FLOAT128);
2961 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2962 break;
2963 case 0xd1: /* fstoi */
2964 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2965 break;
2966 case 0xd2: /* fdtoi */
2967 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2968 break;
2969 case 0xd3: /* fqtoi */
2970 CHECK_FPU_FEATURE(dc, FLOAT128);
2971 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2972 break;
2973 #ifdef TARGET_SPARC64
2974 case 0x2: /* V9 fmovd */
2975 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2976 gen_store_fpr_D(dc, rd, cpu_src1_64);
2977 break;
2978 case 0x3: /* V9 fmovq */
2979 CHECK_FPU_FEATURE(dc, FLOAT128);
2980 gen_move_Q(rd, rs2);
2981 break;
2982 case 0x6: /* V9 fnegd */
2983 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
2984 break;
2985 case 0x7: /* V9 fnegq */
2986 CHECK_FPU_FEATURE(dc, FLOAT128);
2987 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
2988 break;
2989 case 0xa: /* V9 fabsd */
2990 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
2991 break;
2992 case 0xb: /* V9 fabsq */
2993 CHECK_FPU_FEATURE(dc, FLOAT128);
2994 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
2995 break;
2996 case 0x81: /* V9 fstox */
2997 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
2998 break;
2999 case 0x82: /* V9 fdtox */
3000 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3001 break;
3002 case 0x83: /* V9 fqtox */
3003 CHECK_FPU_FEATURE(dc, FLOAT128);
3004 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3005 break;
3006 case 0x84: /* V9 fxtos */
3007 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3008 break;
3009 case 0x88: /* V9 fxtod */
3010 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3011 break;
3012 case 0x8c: /* V9 fxtoq */
3013 CHECK_FPU_FEATURE(dc, FLOAT128);
3014 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3015 break;
3016 #endif
3017 default:
3018 goto illegal_insn;
3020 } else if (xop == 0x35) { /* FPU Operations */
3021 #ifdef TARGET_SPARC64
3022 int cond;
3023 #endif
3024 if (gen_trap_ifnofpu(dc, cpu_cond))
3025 goto jmp_insn;
3026 gen_op_clear_ieee_excp_and_FTT();
3027 rs1 = GET_FIELD(insn, 13, 17);
3028 rs2 = GET_FIELD(insn, 27, 31);
3029 xop = GET_FIELD(insn, 18, 26);
3030 save_state(dc, cpu_cond);
3031 #ifdef TARGET_SPARC64
3032 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
3033 int l1;
3035 l1 = gen_new_label();
3036 cond = GET_FIELD_SP(insn, 14, 17);
3037 cpu_src1 = get_src1(insn, cpu_src1);
3038 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3039 0, l1);
3040 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3041 gen_store_fpr_F(dc, rd, cpu_src1_32);
3042 gen_set_label(l1);
3043 break;
3044 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3045 int l1;
3047 l1 = gen_new_label();
3048 cond = GET_FIELD_SP(insn, 14, 17);
3049 cpu_src1 = get_src1(insn, cpu_src1);
3050 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3051 0, l1);
3052 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3053 gen_store_fpr_D(dc, rd, cpu_src1_64);
3054 gen_set_label(l1);
3055 break;
3056 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3057 int l1;
3059 CHECK_FPU_FEATURE(dc, FLOAT128);
3060 l1 = gen_new_label();
3061 cond = GET_FIELD_SP(insn, 14, 17);
3062 cpu_src1 = get_src1(insn, cpu_src1);
3063 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3064 0, l1);
3065 gen_move_Q(rd, rs2);
3066 gen_set_label(l1);
3067 break;
3069 #endif
3070 switch (xop) {
3071 #ifdef TARGET_SPARC64
3072 #define FMOVSCC(fcc) \
3074 TCGv r_cond; \
3075 int l1; \
3077 l1 = gen_new_label(); \
3078 r_cond = tcg_temp_new(); \
3079 cond = GET_FIELD_SP(insn, 14, 17); \
3080 gen_fcond(r_cond, fcc, cond); \
3081 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3082 0, l1); \
3083 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3084 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3085 gen_set_label(l1); \
3086 tcg_temp_free(r_cond); \
3088 #define FMOVDCC(fcc) \
3090 TCGv r_cond; \
3091 int l1; \
3093 l1 = gen_new_label(); \
3094 r_cond = tcg_temp_new(); \
3095 cond = GET_FIELD_SP(insn, 14, 17); \
3096 gen_fcond(r_cond, fcc, cond); \
3097 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3098 0, l1); \
3099 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3100 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3101 gen_set_label(l1); \
3102 tcg_temp_free(r_cond); \
3104 #define FMOVQCC(fcc) \
3106 TCGv r_cond; \
3107 int l1; \
3109 l1 = gen_new_label(); \
3110 r_cond = tcg_temp_new(); \
3111 cond = GET_FIELD_SP(insn, 14, 17); \
3112 gen_fcond(r_cond, fcc, cond); \
3113 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3114 0, l1); \
3115 gen_move_Q(rd, rs2); \
3116 gen_set_label(l1); \
3117 tcg_temp_free(r_cond); \
3119 case 0x001: /* V9 fmovscc %fcc0 */
3120 FMOVSCC(0);
3121 break;
3122 case 0x002: /* V9 fmovdcc %fcc0 */
3123 FMOVDCC(0);
3124 break;
3125 case 0x003: /* V9 fmovqcc %fcc0 */
3126 CHECK_FPU_FEATURE(dc, FLOAT128);
3127 FMOVQCC(0);
3128 break;
3129 case 0x041: /* V9 fmovscc %fcc1 */
3130 FMOVSCC(1);
3131 break;
3132 case 0x042: /* V9 fmovdcc %fcc1 */
3133 FMOVDCC(1);
3134 break;
3135 case 0x043: /* V9 fmovqcc %fcc1 */
3136 CHECK_FPU_FEATURE(dc, FLOAT128);
3137 FMOVQCC(1);
3138 break;
3139 case 0x081: /* V9 fmovscc %fcc2 */
3140 FMOVSCC(2);
3141 break;
3142 case 0x082: /* V9 fmovdcc %fcc2 */
3143 FMOVDCC(2);
3144 break;
3145 case 0x083: /* V9 fmovqcc %fcc2 */
3146 CHECK_FPU_FEATURE(dc, FLOAT128);
3147 FMOVQCC(2);
3148 break;
3149 case 0x0c1: /* V9 fmovscc %fcc3 */
3150 FMOVSCC(3);
3151 break;
3152 case 0x0c2: /* V9 fmovdcc %fcc3 */
3153 FMOVDCC(3);
3154 break;
3155 case 0x0c3: /* V9 fmovqcc %fcc3 */
3156 CHECK_FPU_FEATURE(dc, FLOAT128);
3157 FMOVQCC(3);
3158 break;
3159 #undef FMOVSCC
3160 #undef FMOVDCC
3161 #undef FMOVQCC
3162 #define FMOVSCC(icc) \
3164 TCGv r_cond; \
3165 int l1; \
3167 l1 = gen_new_label(); \
3168 r_cond = tcg_temp_new(); \
3169 cond = GET_FIELD_SP(insn, 14, 17); \
3170 gen_cond(r_cond, icc, cond, dc); \
3171 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3172 0, l1); \
3173 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3174 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3175 gen_set_label(l1); \
3176 tcg_temp_free(r_cond); \
3178 #define FMOVDCC(icc) \
3180 TCGv r_cond; \
3181 int l1; \
3183 l1 = gen_new_label(); \
3184 r_cond = tcg_temp_new(); \
3185 cond = GET_FIELD_SP(insn, 14, 17); \
3186 gen_cond(r_cond, icc, cond, dc); \
3187 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3188 0, l1); \
3189 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3190 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3191 gen_update_fprs_dirty(DFPREG(rd)); \
3192 gen_set_label(l1); \
3193 tcg_temp_free(r_cond); \
3195 #define FMOVQCC(icc) \
3197 TCGv r_cond; \
3198 int l1; \
3200 l1 = gen_new_label(); \
3201 r_cond = tcg_temp_new(); \
3202 cond = GET_FIELD_SP(insn, 14, 17); \
3203 gen_cond(r_cond, icc, cond, dc); \
3204 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3205 0, l1); \
3206 gen_move_Q(rd, rs2); \
3207 gen_set_label(l1); \
3208 tcg_temp_free(r_cond); \
3211 case 0x101: /* V9 fmovscc %icc */
3212 FMOVSCC(0);
3213 break;
3214 case 0x102: /* V9 fmovdcc %icc */
3215 FMOVDCC(0);
3216 break;
3217 case 0x103: /* V9 fmovqcc %icc */
3218 CHECK_FPU_FEATURE(dc, FLOAT128);
3219 FMOVQCC(0);
3220 break;
3221 case 0x181: /* V9 fmovscc %xcc */
3222 FMOVSCC(1);
3223 break;
3224 case 0x182: /* V9 fmovdcc %xcc */
3225 FMOVDCC(1);
3226 break;
3227 case 0x183: /* V9 fmovqcc %xcc */
3228 CHECK_FPU_FEATURE(dc, FLOAT128);
3229 FMOVQCC(1);
3230 break;
3231 #undef FMOVSCC
3232 #undef FMOVDCC
3233 #undef FMOVQCC
3234 #endif
3235 case 0x51: /* fcmps, V9 %fcc */
3236 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3237 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3238 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3239 break;
3240 case 0x52: /* fcmpd, V9 %fcc */
3241 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3242 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3243 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3244 break;
3245 case 0x53: /* fcmpq, V9 %fcc */
3246 CHECK_FPU_FEATURE(dc, FLOAT128);
3247 gen_op_load_fpr_QT0(QFPREG(rs1));
3248 gen_op_load_fpr_QT1(QFPREG(rs2));
3249 gen_op_fcmpq(rd & 3);
3250 break;
3251 case 0x55: /* fcmpes, V9 %fcc */
3252 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3253 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3254 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3255 break;
3256 case 0x56: /* fcmped, V9 %fcc */
3257 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3258 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3259 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3260 break;
3261 case 0x57: /* fcmpeq, V9 %fcc */
3262 CHECK_FPU_FEATURE(dc, FLOAT128);
3263 gen_op_load_fpr_QT0(QFPREG(rs1));
3264 gen_op_load_fpr_QT1(QFPREG(rs2));
3265 gen_op_fcmpeq(rd & 3);
3266 break;
3267 default:
3268 goto illegal_insn;
3270 } else if (xop == 0x2) {
3271 // clr/mov shortcut
3273 rs1 = GET_FIELD(insn, 13, 17);
3274 if (rs1 == 0) {
3275 // or %g0, x, y -> mov T0, x; mov y, T0
3276 if (IS_IMM) { /* immediate */
3277 TCGv r_const;
3279 simm = GET_FIELDs(insn, 19, 31);
3280 r_const = tcg_const_tl(simm);
3281 gen_movl_TN_reg(rd, r_const);
3282 tcg_temp_free(r_const);
3283 } else { /* register */
3284 rs2 = GET_FIELD(insn, 27, 31);
3285 gen_movl_reg_TN(rs2, cpu_dst);
3286 gen_movl_TN_reg(rd, cpu_dst);
3288 } else {
3289 cpu_src1 = get_src1(insn, cpu_src1);
3290 if (IS_IMM) { /* immediate */
3291 simm = GET_FIELDs(insn, 19, 31);
3292 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3293 gen_movl_TN_reg(rd, cpu_dst);
3294 } else { /* register */
3295 // or x, %g0, y -> mov T1, x; mov y, T1
3296 rs2 = GET_FIELD(insn, 27, 31);
3297 if (rs2 != 0) {
3298 gen_movl_reg_TN(rs2, cpu_src2);
3299 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3300 gen_movl_TN_reg(rd, cpu_dst);
3301 } else
3302 gen_movl_TN_reg(rd, cpu_src1);
3305 #ifdef TARGET_SPARC64
3306 } else if (xop == 0x25) { /* sll, V9 sllx */
3307 cpu_src1 = get_src1(insn, cpu_src1);
3308 if (IS_IMM) { /* immediate */
3309 simm = GET_FIELDs(insn, 20, 31);
3310 if (insn & (1 << 12)) {
3311 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3312 } else {
3313 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3315 } else { /* register */
3316 rs2 = GET_FIELD(insn, 27, 31);
3317 gen_movl_reg_TN(rs2, cpu_src2);
3318 if (insn & (1 << 12)) {
3319 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3320 } else {
3321 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3323 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3325 gen_movl_TN_reg(rd, cpu_dst);
3326 } else if (xop == 0x26) { /* srl, V9 srlx */
3327 cpu_src1 = get_src1(insn, cpu_src1);
3328 if (IS_IMM) { /* immediate */
3329 simm = GET_FIELDs(insn, 20, 31);
3330 if (insn & (1 << 12)) {
3331 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3332 } else {
3333 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3334 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3336 } else { /* register */
3337 rs2 = GET_FIELD(insn, 27, 31);
3338 gen_movl_reg_TN(rs2, cpu_src2);
3339 if (insn & (1 << 12)) {
3340 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3341 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3342 } else {
3343 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3344 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3345 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3348 gen_movl_TN_reg(rd, cpu_dst);
3349 } else if (xop == 0x27) { /* sra, V9 srax */
3350 cpu_src1 = get_src1(insn, cpu_src1);
3351 if (IS_IMM) { /* immediate */
3352 simm = GET_FIELDs(insn, 20, 31);
3353 if (insn & (1 << 12)) {
3354 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3355 } else {
3356 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3357 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3358 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3360 } else { /* register */
3361 rs2 = GET_FIELD(insn, 27, 31);
3362 gen_movl_reg_TN(rs2, cpu_src2);
3363 if (insn & (1 << 12)) {
3364 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3365 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3366 } else {
3367 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3368 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3369 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3370 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3373 gen_movl_TN_reg(rd, cpu_dst);
3374 #endif
3375 } else if (xop < 0x36) {
3376 if (xop < 0x20) {
3377 cpu_src1 = get_src1(insn, cpu_src1);
3378 cpu_src2 = get_src2(insn, cpu_src2);
3379 switch (xop & ~0x10) {
3380 case 0x0: /* add */
3381 if (IS_IMM) {
3382 simm = GET_FIELDs(insn, 19, 31);
3383 if (xop & 0x10) {
3384 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3385 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3386 dc->cc_op = CC_OP_ADD;
3387 } else {
3388 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3390 } else {
3391 if (xop & 0x10) {
3392 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3393 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3394 dc->cc_op = CC_OP_ADD;
3395 } else {
3396 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3399 break;
3400 case 0x1: /* and */
3401 if (IS_IMM) {
3402 simm = GET_FIELDs(insn, 19, 31);
3403 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3404 } else {
3405 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3407 if (xop & 0x10) {
3408 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3409 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3410 dc->cc_op = CC_OP_LOGIC;
3412 break;
3413 case 0x2: /* or */
3414 if (IS_IMM) {
3415 simm = GET_FIELDs(insn, 19, 31);
3416 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3417 } else {
3418 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3420 if (xop & 0x10) {
3421 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3422 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3423 dc->cc_op = CC_OP_LOGIC;
3425 break;
3426 case 0x3: /* xor */
3427 if (IS_IMM) {
3428 simm = GET_FIELDs(insn, 19, 31);
3429 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3430 } else {
3431 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3433 if (xop & 0x10) {
3434 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3435 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3436 dc->cc_op = CC_OP_LOGIC;
3438 break;
3439 case 0x4: /* sub */
3440 if (IS_IMM) {
3441 simm = GET_FIELDs(insn, 19, 31);
3442 if (xop & 0x10) {
3443 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3444 } else {
3445 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3447 } else {
3448 if (xop & 0x10) {
3449 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3450 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3451 dc->cc_op = CC_OP_SUB;
3452 } else {
3453 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3456 break;
3457 case 0x5: /* andn */
3458 if (IS_IMM) {
3459 simm = GET_FIELDs(insn, 19, 31);
3460 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3461 } else {
3462 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3464 if (xop & 0x10) {
3465 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3466 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3467 dc->cc_op = CC_OP_LOGIC;
3469 break;
3470 case 0x6: /* orn */
3471 if (IS_IMM) {
3472 simm = GET_FIELDs(insn, 19, 31);
3473 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3474 } else {
3475 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3477 if (xop & 0x10) {
3478 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3479 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3480 dc->cc_op = CC_OP_LOGIC;
3482 break;
3483 case 0x7: /* xorn */
3484 if (IS_IMM) {
3485 simm = GET_FIELDs(insn, 19, 31);
3486 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3487 } else {
3488 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3489 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3491 if (xop & 0x10) {
3492 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3493 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3494 dc->cc_op = CC_OP_LOGIC;
3496 break;
3497 case 0x8: /* addx, V9 addc */
3498 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3499 (xop & 0x10));
3500 break;
3501 #ifdef TARGET_SPARC64
3502 case 0x9: /* V9 mulx */
3503 if (IS_IMM) {
3504 simm = GET_FIELDs(insn, 19, 31);
3505 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3506 } else {
3507 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3509 break;
3510 #endif
3511 case 0xa: /* umul */
3512 CHECK_IU_FEATURE(dc, MUL);
3513 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3514 if (xop & 0x10) {
3515 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3516 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3517 dc->cc_op = CC_OP_LOGIC;
3519 break;
3520 case 0xb: /* smul */
3521 CHECK_IU_FEATURE(dc, MUL);
3522 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3523 if (xop & 0x10) {
3524 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3525 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3526 dc->cc_op = CC_OP_LOGIC;
3528 break;
3529 case 0xc: /* subx, V9 subc */
3530 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3531 (xop & 0x10));
3532 break;
3533 #ifdef TARGET_SPARC64
3534 case 0xd: /* V9 udivx */
3536 TCGv r_temp1, r_temp2;
3537 r_temp1 = tcg_temp_local_new();
3538 r_temp2 = tcg_temp_local_new();
3539 tcg_gen_mov_tl(r_temp1, cpu_src1);
3540 tcg_gen_mov_tl(r_temp2, cpu_src2);
3541 gen_trap_ifdivzero_tl(r_temp2);
3542 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3543 tcg_temp_free(r_temp1);
3544 tcg_temp_free(r_temp2);
3546 break;
3547 #endif
3548 case 0xe: /* udiv */
3549 CHECK_IU_FEATURE(dc, DIV);
3550 if (xop & 0x10) {
3551 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3552 cpu_src2);
3553 dc->cc_op = CC_OP_DIV;
3554 } else {
3555 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3556 cpu_src2);
3558 break;
3559 case 0xf: /* sdiv */
3560 CHECK_IU_FEATURE(dc, DIV);
3561 if (xop & 0x10) {
3562 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3563 cpu_src2);
3564 dc->cc_op = CC_OP_DIV;
3565 } else {
3566 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3567 cpu_src2);
3569 break;
3570 default:
3571 goto illegal_insn;
3573 gen_movl_TN_reg(rd, cpu_dst);
3574 } else {
3575 cpu_src1 = get_src1(insn, cpu_src1);
3576 cpu_src2 = get_src2(insn, cpu_src2);
3577 switch (xop) {
3578 case 0x20: /* taddcc */
3579 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3580 gen_movl_TN_reg(rd, cpu_dst);
3581 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3582 dc->cc_op = CC_OP_TADD;
3583 break;
3584 case 0x21: /* tsubcc */
3585 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3586 gen_movl_TN_reg(rd, cpu_dst);
3587 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3588 dc->cc_op = CC_OP_TSUB;
3589 break;
3590 case 0x22: /* taddcctv */
3591 save_state(dc, cpu_cond);
3592 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3593 gen_movl_TN_reg(rd, cpu_dst);
3594 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3595 dc->cc_op = CC_OP_TADDTV;
3596 break;
3597 case 0x23: /* tsubcctv */
3598 save_state(dc, cpu_cond);
3599 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3600 gen_movl_TN_reg(rd, cpu_dst);
3601 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3602 dc->cc_op = CC_OP_TSUBTV;
3603 break;
3604 case 0x24: /* mulscc */
3605 gen_helper_compute_psr(cpu_env);
3606 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3607 gen_movl_TN_reg(rd, cpu_dst);
3608 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3609 dc->cc_op = CC_OP_ADD;
3610 break;
3611 #ifndef TARGET_SPARC64
3612 case 0x25: /* sll */
3613 if (IS_IMM) { /* immediate */
3614 simm = GET_FIELDs(insn, 20, 31);
3615 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3616 } else { /* register */
3617 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3618 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3620 gen_movl_TN_reg(rd, cpu_dst);
3621 break;
3622 case 0x26: /* srl */
3623 if (IS_IMM) { /* immediate */
3624 simm = GET_FIELDs(insn, 20, 31);
3625 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3626 } else { /* register */
3627 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3628 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3630 gen_movl_TN_reg(rd, cpu_dst);
3631 break;
3632 case 0x27: /* sra */
3633 if (IS_IMM) { /* immediate */
3634 simm = GET_FIELDs(insn, 20, 31);
3635 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3636 } else { /* register */
3637 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3638 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3640 gen_movl_TN_reg(rd, cpu_dst);
3641 break;
3642 #endif
3643 case 0x30:
3645 switch(rd) {
3646 case 0: /* wry */
3647 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3648 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3649 break;
3650 #ifndef TARGET_SPARC64
3651 case 0x01 ... 0x0f: /* undefined in the
3652 SPARCv8 manual, nop
3653 on the microSPARC
3654 II */
3655 case 0x10 ... 0x1f: /* implementation-dependent
3656 in the SPARCv8
3657 manual, nop on the
3658 microSPARC II */
3659 break;
3660 #else
3661 case 0x2: /* V9 wrccr */
3662 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3663 gen_helper_wrccr(cpu_env, cpu_dst);
3664 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3665 dc->cc_op = CC_OP_FLAGS;
3666 break;
3667 case 0x3: /* V9 wrasi */
3668 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3669 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3670 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3671 break;
3672 case 0x6: /* V9 wrfprs */
3673 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3674 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3675 save_state(dc, cpu_cond);
3676 gen_op_next_insn();
3677 tcg_gen_exit_tb(0);
3678 dc->is_br = 1;
3679 break;
3680 case 0xf: /* V9 sir, nop if user */
3681 #if !defined(CONFIG_USER_ONLY)
3682 if (supervisor(dc)) {
3683 ; // XXX
3685 #endif
3686 break;
3687 case 0x13: /* Graphics Status */
3688 if (gen_trap_ifnofpu(dc, cpu_cond))
3689 goto jmp_insn;
3690 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3691 break;
3692 case 0x14: /* Softint set */
3693 if (!supervisor(dc))
3694 goto illegal_insn;
3695 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3696 gen_helper_set_softint(cpu_env, cpu_tmp64);
3697 break;
3698 case 0x15: /* Softint clear */
3699 if (!supervisor(dc))
3700 goto illegal_insn;
3701 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3702 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3703 break;
3704 case 0x16: /* Softint write */
3705 if (!supervisor(dc))
3706 goto illegal_insn;
3707 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3708 gen_helper_write_softint(cpu_env, cpu_tmp64);
3709 break;
3710 case 0x17: /* Tick compare */
3711 #if !defined(CONFIG_USER_ONLY)
3712 if (!supervisor(dc))
3713 goto illegal_insn;
3714 #endif
3716 TCGv_ptr r_tickptr;
3718 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3719 cpu_src2);
3720 r_tickptr = tcg_temp_new_ptr();
3721 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3722 offsetof(CPUState, tick));
3723 gen_helper_tick_set_limit(r_tickptr,
3724 cpu_tick_cmpr);
3725 tcg_temp_free_ptr(r_tickptr);
3727 break;
3728 case 0x18: /* System tick */
3729 #if !defined(CONFIG_USER_ONLY)
3730 if (!supervisor(dc))
3731 goto illegal_insn;
3732 #endif
3734 TCGv_ptr r_tickptr;
3736 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3737 cpu_src2);
3738 r_tickptr = tcg_temp_new_ptr();
3739 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3740 offsetof(CPUState, stick));
3741 gen_helper_tick_set_count(r_tickptr,
3742 cpu_dst);
3743 tcg_temp_free_ptr(r_tickptr);
3745 break;
3746 case 0x19: /* System tick compare */
3747 #if !defined(CONFIG_USER_ONLY)
3748 if (!supervisor(dc))
3749 goto illegal_insn;
3750 #endif
3752 TCGv_ptr r_tickptr;
3754 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3755 cpu_src2);
3756 r_tickptr = tcg_temp_new_ptr();
3757 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3758 offsetof(CPUState, stick));
3759 gen_helper_tick_set_limit(r_tickptr,
3760 cpu_stick_cmpr);
3761 tcg_temp_free_ptr(r_tickptr);
3763 break;
3765 case 0x10: /* Performance Control */
3766 case 0x11: /* Performance Instrumentation
3767 Counter */
3768 case 0x12: /* Dispatch Control */
3769 #endif
3770 default:
3771 goto illegal_insn;
3774 break;
3775 #if !defined(CONFIG_USER_ONLY)
3776 case 0x31: /* wrpsr, V9 saved, restored */
3778 if (!supervisor(dc))
3779 goto priv_insn;
3780 #ifdef TARGET_SPARC64
3781 switch (rd) {
3782 case 0:
3783 gen_helper_saved(cpu_env);
3784 break;
3785 case 1:
3786 gen_helper_restored(cpu_env);
3787 break;
3788 case 2: /* UA2005 allclean */
3789 case 3: /* UA2005 otherw */
3790 case 4: /* UA2005 normalw */
3791 case 5: /* UA2005 invalw */
3792 // XXX
3793 default:
3794 goto illegal_insn;
3796 #else
3797 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3798 gen_helper_wrpsr(cpu_env, cpu_dst);
3799 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3800 dc->cc_op = CC_OP_FLAGS;
3801 save_state(dc, cpu_cond);
3802 gen_op_next_insn();
3803 tcg_gen_exit_tb(0);
3804 dc->is_br = 1;
3805 #endif
3807 break;
3808 case 0x32: /* wrwim, V9 wrpr */
3810 if (!supervisor(dc))
3811 goto priv_insn;
3812 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3813 #ifdef TARGET_SPARC64
3814 switch (rd) {
3815 case 0: // tpc
3817 TCGv_ptr r_tsptr;
3819 r_tsptr = tcg_temp_new_ptr();
3820 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3821 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3822 offsetof(trap_state, tpc));
3823 tcg_temp_free_ptr(r_tsptr);
3825 break;
3826 case 1: // tnpc
3828 TCGv_ptr r_tsptr;
3830 r_tsptr = tcg_temp_new_ptr();
3831 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3832 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3833 offsetof(trap_state, tnpc));
3834 tcg_temp_free_ptr(r_tsptr);
3836 break;
3837 case 2: // tstate
3839 TCGv_ptr r_tsptr;
3841 r_tsptr = tcg_temp_new_ptr();
3842 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3843 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3844 offsetof(trap_state,
3845 tstate));
3846 tcg_temp_free_ptr(r_tsptr);
3848 break;
3849 case 3: // tt
3851 TCGv_ptr r_tsptr;
3853 r_tsptr = tcg_temp_new_ptr();
3854 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3855 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3856 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3857 offsetof(trap_state, tt));
3858 tcg_temp_free_ptr(r_tsptr);
3860 break;
3861 case 4: // tick
3863 TCGv_ptr r_tickptr;
3865 r_tickptr = tcg_temp_new_ptr();
3866 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3867 offsetof(CPUState, tick));
3868 gen_helper_tick_set_count(r_tickptr,
3869 cpu_tmp0);
3870 tcg_temp_free_ptr(r_tickptr);
3872 break;
3873 case 5: // tba
3874 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3875 break;
3876 case 6: // pstate
3878 TCGv r_tmp = tcg_temp_local_new();
3880 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3881 save_state(dc, cpu_cond);
3882 gen_helper_wrpstate(cpu_env, r_tmp);
3883 tcg_temp_free(r_tmp);
3884 dc->npc = DYNAMIC_PC;
3886 break;
3887 case 7: // tl
3889 TCGv r_tmp = tcg_temp_local_new();
3891 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3892 save_state(dc, cpu_cond);
3893 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3894 tcg_temp_free(r_tmp);
3895 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3896 offsetof(CPUSPARCState, tl));
3897 dc->npc = DYNAMIC_PC;
3899 break;
3900 case 8: // pil
3901 gen_helper_wrpil(cpu_env, cpu_tmp0);
3902 break;
3903 case 9: // cwp
3904 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3905 break;
3906 case 10: // cansave
3907 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3908 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3909 offsetof(CPUSPARCState,
3910 cansave));
3911 break;
3912 case 11: // canrestore
3913 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3914 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3915 offsetof(CPUSPARCState,
3916 canrestore));
3917 break;
3918 case 12: // cleanwin
3919 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3920 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3921 offsetof(CPUSPARCState,
3922 cleanwin));
3923 break;
3924 case 13: // otherwin
3925 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3926 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3927 offsetof(CPUSPARCState,
3928 otherwin));
3929 break;
3930 case 14: // wstate
3931 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3932 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3933 offsetof(CPUSPARCState,
3934 wstate));
3935 break;
3936 case 16: // UA2005 gl
3937 CHECK_IU_FEATURE(dc, GL);
3938 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3939 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3940 offsetof(CPUSPARCState, gl));
3941 break;
3942 case 26: // UA2005 strand status
3943 CHECK_IU_FEATURE(dc, HYPV);
3944 if (!hypervisor(dc))
3945 goto priv_insn;
3946 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3947 break;
3948 default:
3949 goto illegal_insn;
3951 #else
3952 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3953 if (dc->def->nwindows != 32)
3954 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3955 (1 << dc->def->nwindows) - 1);
3956 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3957 #endif
3959 break;
3960 case 0x33: /* wrtbr, UA2005 wrhpr */
3962 #ifndef TARGET_SPARC64
3963 if (!supervisor(dc))
3964 goto priv_insn;
3965 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3966 #else
3967 CHECK_IU_FEATURE(dc, HYPV);
3968 if (!hypervisor(dc))
3969 goto priv_insn;
3970 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3971 switch (rd) {
3972 case 0: // hpstate
3973 // XXX gen_op_wrhpstate();
3974 save_state(dc, cpu_cond);
3975 gen_op_next_insn();
3976 tcg_gen_exit_tb(0);
3977 dc->is_br = 1;
3978 break;
3979 case 1: // htstate
3980 // XXX gen_op_wrhtstate();
3981 break;
3982 case 3: // hintp
3983 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3984 break;
3985 case 5: // htba
3986 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3987 break;
3988 case 31: // hstick_cmpr
3990 TCGv_ptr r_tickptr;
3992 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3993 r_tickptr = tcg_temp_new_ptr();
3994 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3995 offsetof(CPUState, hstick));
3996 gen_helper_tick_set_limit(r_tickptr,
3997 cpu_hstick_cmpr);
3998 tcg_temp_free_ptr(r_tickptr);
4000 break;
4001 case 6: // hver readonly
4002 default:
4003 goto illegal_insn;
4005 #endif
4007 break;
4008 #endif
4009 #ifdef TARGET_SPARC64
4010 case 0x2c: /* V9 movcc */
4012 int cc = GET_FIELD_SP(insn, 11, 12);
4013 int cond = GET_FIELD_SP(insn, 14, 17);
4014 TCGv r_cond;
4015 int l1;
4017 r_cond = tcg_temp_new();
4018 if (insn & (1 << 18)) {
4019 if (cc == 0)
4020 gen_cond(r_cond, 0, cond, dc);
4021 else if (cc == 2)
4022 gen_cond(r_cond, 1, cond, dc);
4023 else
4024 goto illegal_insn;
4025 } else {
4026 gen_fcond(r_cond, cc, cond);
4029 l1 = gen_new_label();
4031 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
4032 if (IS_IMM) { /* immediate */
4033 TCGv r_const;
4035 simm = GET_FIELD_SPs(insn, 0, 10);
4036 r_const = tcg_const_tl(simm);
4037 gen_movl_TN_reg(rd, r_const);
4038 tcg_temp_free(r_const);
4039 } else {
4040 rs2 = GET_FIELD_SP(insn, 0, 4);
4041 gen_movl_reg_TN(rs2, cpu_tmp0);
4042 gen_movl_TN_reg(rd, cpu_tmp0);
4044 gen_set_label(l1);
4045 tcg_temp_free(r_cond);
4046 break;
4048 case 0x2d: /* V9 sdivx */
4049 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
4050 gen_movl_TN_reg(rd, cpu_dst);
4051 break;
4052 case 0x2e: /* V9 popc */
4054 cpu_src2 = get_src2(insn, cpu_src2);
4055 gen_helper_popc(cpu_dst, cpu_src2);
4056 gen_movl_TN_reg(rd, cpu_dst);
4058 case 0x2f: /* V9 movr */
4060 int cond = GET_FIELD_SP(insn, 10, 12);
4061 int l1;
4063 cpu_src1 = get_src1(insn, cpu_src1);
4065 l1 = gen_new_label();
4067 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
4068 cpu_src1, 0, l1);
4069 if (IS_IMM) { /* immediate */
4070 TCGv r_const;
4072 simm = GET_FIELD_SPs(insn, 0, 9);
4073 r_const = tcg_const_tl(simm);
4074 gen_movl_TN_reg(rd, r_const);
4075 tcg_temp_free(r_const);
4076 } else {
4077 rs2 = GET_FIELD_SP(insn, 0, 4);
4078 gen_movl_reg_TN(rs2, cpu_tmp0);
4079 gen_movl_TN_reg(rd, cpu_tmp0);
4081 gen_set_label(l1);
4082 break;
4084 #endif
4085 default:
4086 goto illegal_insn;
4089 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4090 #ifdef TARGET_SPARC64
4091 int opf = GET_FIELD_SP(insn, 5, 13);
4092 rs1 = GET_FIELD(insn, 13, 17);
4093 rs2 = GET_FIELD(insn, 27, 31);
4094 if (gen_trap_ifnofpu(dc, cpu_cond))
4095 goto jmp_insn;
4097 switch (opf) {
4098 case 0x000: /* VIS I edge8cc */
4099 CHECK_FPU_FEATURE(dc, VIS1);
4100 gen_movl_reg_TN(rs1, cpu_src1);
4101 gen_movl_reg_TN(rs2, cpu_src2);
4102 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4103 gen_movl_TN_reg(rd, cpu_dst);
4104 break;
4105 case 0x001: /* VIS II edge8n */
4106 CHECK_FPU_FEATURE(dc, VIS2);
4107 gen_movl_reg_TN(rs1, cpu_src1);
4108 gen_movl_reg_TN(rs2, cpu_src2);
4109 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4110 gen_movl_TN_reg(rd, cpu_dst);
4111 break;
4112 case 0x002: /* VIS I edge8lcc */
4113 CHECK_FPU_FEATURE(dc, VIS1);
4114 gen_movl_reg_TN(rs1, cpu_src1);
4115 gen_movl_reg_TN(rs2, cpu_src2);
4116 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4117 gen_movl_TN_reg(rd, cpu_dst);
4118 break;
4119 case 0x003: /* VIS II edge8ln */
4120 CHECK_FPU_FEATURE(dc, VIS2);
4121 gen_movl_reg_TN(rs1, cpu_src1);
4122 gen_movl_reg_TN(rs2, cpu_src2);
4123 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4124 gen_movl_TN_reg(rd, cpu_dst);
4125 break;
4126 case 0x004: /* VIS I edge16cc */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 gen_movl_reg_TN(rs1, cpu_src1);
4129 gen_movl_reg_TN(rs2, cpu_src2);
4130 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4131 gen_movl_TN_reg(rd, cpu_dst);
4132 break;
4133 case 0x005: /* VIS II edge16n */
4134 CHECK_FPU_FEATURE(dc, VIS2);
4135 gen_movl_reg_TN(rs1, cpu_src1);
4136 gen_movl_reg_TN(rs2, cpu_src2);
4137 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4138 gen_movl_TN_reg(rd, cpu_dst);
4139 break;
4140 case 0x006: /* VIS I edge16lcc */
4141 CHECK_FPU_FEATURE(dc, VIS1);
4142 gen_movl_reg_TN(rs1, cpu_src1);
4143 gen_movl_reg_TN(rs2, cpu_src2);
4144 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4145 gen_movl_TN_reg(rd, cpu_dst);
4146 break;
4147 case 0x007: /* VIS II edge16ln */
4148 CHECK_FPU_FEATURE(dc, VIS2);
4149 gen_movl_reg_TN(rs1, cpu_src1);
4150 gen_movl_reg_TN(rs2, cpu_src2);
4151 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4152 gen_movl_TN_reg(rd, cpu_dst);
4153 break;
4154 case 0x008: /* VIS I edge32cc */
4155 CHECK_FPU_FEATURE(dc, VIS1);
4156 gen_movl_reg_TN(rs1, cpu_src1);
4157 gen_movl_reg_TN(rs2, cpu_src2);
4158 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4159 gen_movl_TN_reg(rd, cpu_dst);
4160 break;
4161 case 0x009: /* VIS II edge32n */
4162 CHECK_FPU_FEATURE(dc, VIS2);
4163 gen_movl_reg_TN(rs1, cpu_src1);
4164 gen_movl_reg_TN(rs2, cpu_src2);
4165 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4166 gen_movl_TN_reg(rd, cpu_dst);
4167 break;
4168 case 0x00a: /* VIS I edge32lcc */
4169 CHECK_FPU_FEATURE(dc, VIS1);
4170 gen_movl_reg_TN(rs1, cpu_src1);
4171 gen_movl_reg_TN(rs2, cpu_src2);
4172 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4173 gen_movl_TN_reg(rd, cpu_dst);
4174 break;
4175 case 0x00b: /* VIS II edge32ln */
4176 CHECK_FPU_FEATURE(dc, VIS2);
4177 gen_movl_reg_TN(rs1, cpu_src1);
4178 gen_movl_reg_TN(rs2, cpu_src2);
4179 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4180 gen_movl_TN_reg(rd, cpu_dst);
4181 break;
4182 case 0x010: /* VIS I array8 */
4183 CHECK_FPU_FEATURE(dc, VIS1);
4184 cpu_src1 = get_src1(insn, cpu_src1);
4185 gen_movl_reg_TN(rs2, cpu_src2);
4186 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4187 gen_movl_TN_reg(rd, cpu_dst);
4188 break;
4189 case 0x012: /* VIS I array16 */
4190 CHECK_FPU_FEATURE(dc, VIS1);
4191 cpu_src1 = get_src1(insn, cpu_src1);
4192 gen_movl_reg_TN(rs2, cpu_src2);
4193 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4194 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4195 gen_movl_TN_reg(rd, cpu_dst);
4196 break;
4197 case 0x014: /* VIS I array32 */
4198 CHECK_FPU_FEATURE(dc, VIS1);
4199 cpu_src1 = get_src1(insn, cpu_src1);
4200 gen_movl_reg_TN(rs2, cpu_src2);
4201 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4202 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4203 gen_movl_TN_reg(rd, cpu_dst);
4204 break;
4205 case 0x018: /* VIS I alignaddr */
4206 CHECK_FPU_FEATURE(dc, VIS1);
4207 cpu_src1 = get_src1(insn, cpu_src1);
4208 gen_movl_reg_TN(rs2, cpu_src2);
4209 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4210 gen_movl_TN_reg(rd, cpu_dst);
4211 break;
4212 case 0x01a: /* VIS I alignaddrl */
4213 CHECK_FPU_FEATURE(dc, VIS1);
4214 cpu_src1 = get_src1(insn, cpu_src1);
4215 gen_movl_reg_TN(rs2, cpu_src2);
4216 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4217 gen_movl_TN_reg(rd, cpu_dst);
4218 break;
4219 case 0x019: /* VIS II bmask */
4220 CHECK_FPU_FEATURE(dc, VIS2);
4221 cpu_src1 = get_src1(insn, cpu_src1);
4222 cpu_src2 = get_src1(insn, cpu_src2);
4223 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4224 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4225 gen_movl_TN_reg(rd, cpu_dst);
4226 break;
4227 case 0x020: /* VIS I fcmple16 */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4230 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4231 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4232 gen_movl_TN_reg(rd, cpu_dst);
4233 break;
4234 case 0x022: /* VIS I fcmpne16 */
4235 CHECK_FPU_FEATURE(dc, VIS1);
4236 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4237 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4238 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4239 gen_movl_TN_reg(rd, cpu_dst);
4240 break;
4241 case 0x024: /* VIS I fcmple32 */
4242 CHECK_FPU_FEATURE(dc, VIS1);
4243 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4244 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4245 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4246 gen_movl_TN_reg(rd, cpu_dst);
4247 break;
4248 case 0x026: /* VIS I fcmpne32 */
4249 CHECK_FPU_FEATURE(dc, VIS1);
4250 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4251 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4252 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4253 gen_movl_TN_reg(rd, cpu_dst);
4254 break;
4255 case 0x028: /* VIS I fcmpgt16 */
4256 CHECK_FPU_FEATURE(dc, VIS1);
4257 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4258 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4259 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4260 gen_movl_TN_reg(rd, cpu_dst);
4261 break;
4262 case 0x02a: /* VIS I fcmpeq16 */
4263 CHECK_FPU_FEATURE(dc, VIS1);
4264 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4265 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4266 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4267 gen_movl_TN_reg(rd, cpu_dst);
4268 break;
4269 case 0x02c: /* VIS I fcmpgt32 */
4270 CHECK_FPU_FEATURE(dc, VIS1);
4271 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4272 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4273 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4274 gen_movl_TN_reg(rd, cpu_dst);
4275 break;
4276 case 0x02e: /* VIS I fcmpeq32 */
4277 CHECK_FPU_FEATURE(dc, VIS1);
4278 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4279 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4280 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4281 gen_movl_TN_reg(rd, cpu_dst);
4282 break;
4283 case 0x031: /* VIS I fmul8x16 */
4284 CHECK_FPU_FEATURE(dc, VIS1);
4285 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4286 break;
4287 case 0x033: /* VIS I fmul8x16au */
4288 CHECK_FPU_FEATURE(dc, VIS1);
4289 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4290 break;
4291 case 0x035: /* VIS I fmul8x16al */
4292 CHECK_FPU_FEATURE(dc, VIS1);
4293 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4294 break;
4295 case 0x036: /* VIS I fmul8sux16 */
4296 CHECK_FPU_FEATURE(dc, VIS1);
4297 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4298 break;
4299 case 0x037: /* VIS I fmul8ulx16 */
4300 CHECK_FPU_FEATURE(dc, VIS1);
4301 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4302 break;
4303 case 0x038: /* VIS I fmuld8sux16 */
4304 CHECK_FPU_FEATURE(dc, VIS1);
4305 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4306 break;
4307 case 0x039: /* VIS I fmuld8ulx16 */
4308 CHECK_FPU_FEATURE(dc, VIS1);
4309 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4310 break;
4311 case 0x03a: /* VIS I fpack32 */
4312 CHECK_FPU_FEATURE(dc, VIS1);
4313 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4314 break;
4315 case 0x03b: /* VIS I fpack16 */
4316 CHECK_FPU_FEATURE(dc, VIS1);
4317 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4318 cpu_dst_32 = gen_dest_fpr_F();
4319 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4320 gen_store_fpr_F(dc, rd, cpu_dst_32);
4321 break;
4322 case 0x03d: /* VIS I fpackfix */
4323 CHECK_FPU_FEATURE(dc, VIS1);
4324 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4325 cpu_dst_32 = gen_dest_fpr_F();
4326 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4327 gen_store_fpr_F(dc, rd, cpu_dst_32);
4328 break;
4329 case 0x03e: /* VIS I pdist */
4330 CHECK_FPU_FEATURE(dc, VIS1);
4331 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4332 break;
4333 case 0x048: /* VIS I faligndata */
4334 CHECK_FPU_FEATURE(dc, VIS1);
4335 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4336 break;
4337 case 0x04b: /* VIS I fpmerge */
4338 CHECK_FPU_FEATURE(dc, VIS1);
4339 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4340 break;
4341 case 0x04c: /* VIS II bshuffle */
4342 CHECK_FPU_FEATURE(dc, VIS2);
4343 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4344 break;
4345 case 0x04d: /* VIS I fexpand */
4346 CHECK_FPU_FEATURE(dc, VIS1);
4347 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4348 break;
4349 case 0x050: /* VIS I fpadd16 */
4350 CHECK_FPU_FEATURE(dc, VIS1);
4351 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4352 break;
4353 case 0x051: /* VIS I fpadd16s */
4354 CHECK_FPU_FEATURE(dc, VIS1);
4355 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4356 break;
4357 case 0x052: /* VIS I fpadd32 */
4358 CHECK_FPU_FEATURE(dc, VIS1);
4359 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4360 break;
4361 case 0x053: /* VIS I fpadd32s */
4362 CHECK_FPU_FEATURE(dc, VIS1);
4363 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4364 break;
4365 case 0x054: /* VIS I fpsub16 */
4366 CHECK_FPU_FEATURE(dc, VIS1);
4367 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4368 break;
4369 case 0x055: /* VIS I fpsub16s */
4370 CHECK_FPU_FEATURE(dc, VIS1);
4371 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4372 break;
4373 case 0x056: /* VIS I fpsub32 */
4374 CHECK_FPU_FEATURE(dc, VIS1);
4375 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4376 break;
4377 case 0x057: /* VIS I fpsub32s */
4378 CHECK_FPU_FEATURE(dc, VIS1);
4379 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4380 break;
4381 case 0x060: /* VIS I fzero */
4382 CHECK_FPU_FEATURE(dc, VIS1);
4383 cpu_dst_64 = gen_dest_fpr_D();
4384 tcg_gen_movi_i64(cpu_dst_64, 0);
4385 gen_store_fpr_D(dc, rd, cpu_dst_64);
4386 break;
4387 case 0x061: /* VIS I fzeros */
4388 CHECK_FPU_FEATURE(dc, VIS1);
4389 cpu_dst_32 = gen_dest_fpr_F();
4390 tcg_gen_movi_i32(cpu_dst_32, 0);
4391 gen_store_fpr_F(dc, rd, cpu_dst_32);
4392 break;
4393 case 0x062: /* VIS I fnor */
4394 CHECK_FPU_FEATURE(dc, VIS1);
4395 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4396 break;
4397 case 0x063: /* VIS I fnors */
4398 CHECK_FPU_FEATURE(dc, VIS1);
4399 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4400 break;
4401 case 0x064: /* VIS I fandnot2 */
4402 CHECK_FPU_FEATURE(dc, VIS1);
4403 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4404 break;
4405 case 0x065: /* VIS I fandnot2s */
4406 CHECK_FPU_FEATURE(dc, VIS1);
4407 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4408 break;
4409 case 0x066: /* VIS I fnot2 */
4410 CHECK_FPU_FEATURE(dc, VIS1);
4411 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4412 break;
4413 case 0x067: /* VIS I fnot2s */
4414 CHECK_FPU_FEATURE(dc, VIS1);
4415 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4416 break;
4417 case 0x068: /* VIS I fandnot1 */
4418 CHECK_FPU_FEATURE(dc, VIS1);
4419 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4420 break;
4421 case 0x069: /* VIS I fandnot1s */
4422 CHECK_FPU_FEATURE(dc, VIS1);
4423 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4424 break;
4425 case 0x06a: /* VIS I fnot1 */
4426 CHECK_FPU_FEATURE(dc, VIS1);
4427 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4428 break;
4429 case 0x06b: /* VIS I fnot1s */
4430 CHECK_FPU_FEATURE(dc, VIS1);
4431 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4432 break;
4433 case 0x06c: /* VIS I fxor */
4434 CHECK_FPU_FEATURE(dc, VIS1);
4435 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4436 break;
4437 case 0x06d: /* VIS I fxors */
4438 CHECK_FPU_FEATURE(dc, VIS1);
4439 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4440 break;
4441 case 0x06e: /* VIS I fnand */
4442 CHECK_FPU_FEATURE(dc, VIS1);
4443 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4444 break;
4445 case 0x06f: /* VIS I fnands */
4446 CHECK_FPU_FEATURE(dc, VIS1);
4447 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4448 break;
4449 case 0x070: /* VIS I fand */
4450 CHECK_FPU_FEATURE(dc, VIS1);
4451 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4452 break;
4453 case 0x071: /* VIS I fands */
4454 CHECK_FPU_FEATURE(dc, VIS1);
4455 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4456 break;
4457 case 0x072: /* VIS I fxnor */
4458 CHECK_FPU_FEATURE(dc, VIS1);
4459 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4460 break;
4461 case 0x073: /* VIS I fxnors */
4462 CHECK_FPU_FEATURE(dc, VIS1);
4463 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4464 break;
4465 case 0x074: /* VIS I fsrc1 */
4466 CHECK_FPU_FEATURE(dc, VIS1);
4467 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4468 gen_store_fpr_D(dc, rd, cpu_src1_64);
4469 break;
4470 case 0x075: /* VIS I fsrc1s */
4471 CHECK_FPU_FEATURE(dc, VIS1);
4472 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4473 gen_store_fpr_F(dc, rd, cpu_src1_32);
4474 break;
4475 case 0x076: /* VIS I fornot2 */
4476 CHECK_FPU_FEATURE(dc, VIS1);
4477 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4478 break;
4479 case 0x077: /* VIS I fornot2s */
4480 CHECK_FPU_FEATURE(dc, VIS1);
4481 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4482 break;
4483 case 0x078: /* VIS I fsrc2 */
4484 CHECK_FPU_FEATURE(dc, VIS1);
4485 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4486 gen_store_fpr_D(dc, rd, cpu_src1_64);
4487 break;
4488 case 0x079: /* VIS I fsrc2s */
4489 CHECK_FPU_FEATURE(dc, VIS1);
4490 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4491 gen_store_fpr_F(dc, rd, cpu_src1_32);
4492 break;
4493 case 0x07a: /* VIS I fornot1 */
4494 CHECK_FPU_FEATURE(dc, VIS1);
4495 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4496 break;
4497 case 0x07b: /* VIS I fornot1s */
4498 CHECK_FPU_FEATURE(dc, VIS1);
4499 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4500 break;
4501 case 0x07c: /* VIS I for */
4502 CHECK_FPU_FEATURE(dc, VIS1);
4503 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4504 break;
4505 case 0x07d: /* VIS I fors */
4506 CHECK_FPU_FEATURE(dc, VIS1);
4507 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4508 break;
4509 case 0x07e: /* VIS I fone */
4510 CHECK_FPU_FEATURE(dc, VIS1);
4511 cpu_dst_64 = gen_dest_fpr_D();
4512 tcg_gen_movi_i64(cpu_dst_64, -1);
4513 gen_store_fpr_D(dc, rd, cpu_dst_64);
4514 break;
4515 case 0x07f: /* VIS I fones */
4516 CHECK_FPU_FEATURE(dc, VIS1);
4517 cpu_dst_32 = gen_dest_fpr_F();
4518 tcg_gen_movi_i32(cpu_dst_32, -1);
4519 gen_store_fpr_F(dc, rd, cpu_dst_32);
4520 break;
4521 case 0x080: /* VIS I shutdown */
4522 case 0x081: /* VIS II siam */
4523 // XXX
4524 goto illegal_insn;
4525 default:
4526 goto illegal_insn;
4528 #else
4529 goto ncp_insn;
4530 #endif
4531 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4532 #ifdef TARGET_SPARC64
4533 goto illegal_insn;
4534 #else
4535 goto ncp_insn;
4536 #endif
4537 #ifdef TARGET_SPARC64
4538 } else if (xop == 0x39) { /* V9 return */
4539 TCGv_i32 r_const;
4541 save_state(dc, cpu_cond);
4542 cpu_src1 = get_src1(insn, cpu_src1);
4543 if (IS_IMM) { /* immediate */
4544 simm = GET_FIELDs(insn, 19, 31);
4545 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4546 } else { /* register */
4547 rs2 = GET_FIELD(insn, 27, 31);
4548 if (rs2) {
4549 gen_movl_reg_TN(rs2, cpu_src2);
4550 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4551 } else
4552 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4554 gen_helper_restore(cpu_env);
4555 gen_mov_pc_npc(dc, cpu_cond);
4556 r_const = tcg_const_i32(3);
4557 gen_helper_check_align(cpu_dst, r_const);
4558 tcg_temp_free_i32(r_const);
4559 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4560 dc->npc = DYNAMIC_PC;
4561 goto jmp_insn;
4562 #endif
4563 } else {
4564 cpu_src1 = get_src1(insn, cpu_src1);
4565 if (IS_IMM) { /* immediate */
4566 simm = GET_FIELDs(insn, 19, 31);
4567 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4568 } else { /* register */
4569 rs2 = GET_FIELD(insn, 27, 31);
4570 if (rs2) {
4571 gen_movl_reg_TN(rs2, cpu_src2);
4572 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4573 } else
4574 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4576 switch (xop) {
4577 case 0x38: /* jmpl */
4579 TCGv r_pc;
4580 TCGv_i32 r_const;
4582 r_pc = tcg_const_tl(dc->pc);
4583 gen_movl_TN_reg(rd, r_pc);
4584 tcg_temp_free(r_pc);
4585 gen_mov_pc_npc(dc, cpu_cond);
4586 r_const = tcg_const_i32(3);
4587 gen_helper_check_align(cpu_dst, r_const);
4588 tcg_temp_free_i32(r_const);
4589 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4590 dc->npc = DYNAMIC_PC;
4592 goto jmp_insn;
4593 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4594 case 0x39: /* rett, V9 return */
4596 TCGv_i32 r_const;
4598 if (!supervisor(dc))
4599 goto priv_insn;
4600 gen_mov_pc_npc(dc, cpu_cond);
4601 r_const = tcg_const_i32(3);
4602 gen_helper_check_align(cpu_dst, r_const);
4603 tcg_temp_free_i32(r_const);
4604 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4605 dc->npc = DYNAMIC_PC;
4606 gen_helper_rett(cpu_env);
4608 goto jmp_insn;
4609 #endif
4610 case 0x3b: /* flush */
4611 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4612 goto unimp_flush;
4613 /* nop */
4614 break;
4615 case 0x3c: /* save */
4616 save_state(dc, cpu_cond);
4617 gen_helper_save(cpu_env);
4618 gen_movl_TN_reg(rd, cpu_dst);
4619 break;
4620 case 0x3d: /* restore */
4621 save_state(dc, cpu_cond);
4622 gen_helper_restore(cpu_env);
4623 gen_movl_TN_reg(rd, cpu_dst);
4624 break;
4625 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4626 case 0x3e: /* V9 done/retry */
4628 switch (rd) {
4629 case 0:
4630 if (!supervisor(dc))
4631 goto priv_insn;
4632 dc->npc = DYNAMIC_PC;
4633 dc->pc = DYNAMIC_PC;
4634 gen_helper_done(cpu_env);
4635 goto jmp_insn;
4636 case 1:
4637 if (!supervisor(dc))
4638 goto priv_insn;
4639 dc->npc = DYNAMIC_PC;
4640 dc->pc = DYNAMIC_PC;
4641 gen_helper_retry(cpu_env);
4642 goto jmp_insn;
4643 default:
4644 goto illegal_insn;
4647 break;
4648 #endif
4649 default:
4650 goto illegal_insn;
4653 break;
4655 break;
4656 case 3: /* load/store instructions */
4658 unsigned int xop = GET_FIELD(insn, 7, 12);
4660 /* flush pending conditional evaluations before exposing
4661 cpu state */
4662 if (dc->cc_op != CC_OP_FLAGS) {
4663 dc->cc_op = CC_OP_FLAGS;
4664 gen_helper_compute_psr(cpu_env);
4666 cpu_src1 = get_src1(insn, cpu_src1);
4667 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4668 rs2 = GET_FIELD(insn, 27, 31);
4669 gen_movl_reg_TN(rs2, cpu_src2);
4670 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4671 } else if (IS_IMM) { /* immediate */
4672 simm = GET_FIELDs(insn, 19, 31);
4673 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4674 } else { /* register */
4675 rs2 = GET_FIELD(insn, 27, 31);
4676 if (rs2 != 0) {
4677 gen_movl_reg_TN(rs2, cpu_src2);
4678 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4679 } else
4680 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4682 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4683 (xop > 0x17 && xop <= 0x1d ) ||
4684 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4685 switch (xop) {
4686 case 0x0: /* ld, V9 lduw, load unsigned word */
4687 gen_address_mask(dc, cpu_addr);
4688 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4689 break;
4690 case 0x1: /* ldub, load unsigned byte */
4691 gen_address_mask(dc, cpu_addr);
4692 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4693 break;
4694 case 0x2: /* lduh, load unsigned halfword */
4695 gen_address_mask(dc, cpu_addr);
4696 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4697 break;
4698 case 0x3: /* ldd, load double word */
4699 if (rd & 1)
4700 goto illegal_insn;
4701 else {
4702 TCGv_i32 r_const;
4704 save_state(dc, cpu_cond);
4705 r_const = tcg_const_i32(7);
4706 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4707 tcg_temp_free_i32(r_const);
4708 gen_address_mask(dc, cpu_addr);
4709 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4710 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4711 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4712 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4713 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4714 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4715 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4717 break;
4718 case 0x9: /* ldsb, load signed byte */
4719 gen_address_mask(dc, cpu_addr);
4720 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4721 break;
4722 case 0xa: /* ldsh, load signed halfword */
4723 gen_address_mask(dc, cpu_addr);
4724 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4725 break;
4726 case 0xd: /* ldstub -- XXX: should be atomically */
4728 TCGv r_const;
4730 gen_address_mask(dc, cpu_addr);
4731 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4732 r_const = tcg_const_tl(0xff);
4733 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4734 tcg_temp_free(r_const);
4736 break;
4737 case 0x0f: /* swap, swap register with memory. Also
4738 atomically */
4739 CHECK_IU_FEATURE(dc, SWAP);
4740 gen_movl_reg_TN(rd, cpu_val);
4741 gen_address_mask(dc, cpu_addr);
4742 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4743 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4744 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4745 break;
4746 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4747 case 0x10: /* lda, V9 lduwa, load word alternate */
4748 #ifndef TARGET_SPARC64
4749 if (IS_IMM)
4750 goto illegal_insn;
4751 if (!supervisor(dc))
4752 goto priv_insn;
4753 #endif
4754 save_state(dc, cpu_cond);
4755 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4756 break;
4757 case 0x11: /* lduba, load unsigned byte alternate */
4758 #ifndef TARGET_SPARC64
4759 if (IS_IMM)
4760 goto illegal_insn;
4761 if (!supervisor(dc))
4762 goto priv_insn;
4763 #endif
4764 save_state(dc, cpu_cond);
4765 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4766 break;
4767 case 0x12: /* lduha, load unsigned halfword alternate */
4768 #ifndef TARGET_SPARC64
4769 if (IS_IMM)
4770 goto illegal_insn;
4771 if (!supervisor(dc))
4772 goto priv_insn;
4773 #endif
4774 save_state(dc, cpu_cond);
4775 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4776 break;
4777 case 0x13: /* ldda, load double word alternate */
4778 #ifndef TARGET_SPARC64
4779 if (IS_IMM)
4780 goto illegal_insn;
4781 if (!supervisor(dc))
4782 goto priv_insn;
4783 #endif
4784 if (rd & 1)
4785 goto illegal_insn;
4786 save_state(dc, cpu_cond);
4787 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4788 goto skip_move;
4789 case 0x19: /* ldsba, load signed byte alternate */
4790 #ifndef TARGET_SPARC64
4791 if (IS_IMM)
4792 goto illegal_insn;
4793 if (!supervisor(dc))
4794 goto priv_insn;
4795 #endif
4796 save_state(dc, cpu_cond);
4797 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4798 break;
4799 case 0x1a: /* ldsha, load signed halfword alternate */
4800 #ifndef TARGET_SPARC64
4801 if (IS_IMM)
4802 goto illegal_insn;
4803 if (!supervisor(dc))
4804 goto priv_insn;
4805 #endif
4806 save_state(dc, cpu_cond);
4807 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4808 break;
4809 case 0x1d: /* ldstuba -- XXX: should be atomically */
4810 #ifndef TARGET_SPARC64
4811 if (IS_IMM)
4812 goto illegal_insn;
4813 if (!supervisor(dc))
4814 goto priv_insn;
4815 #endif
4816 save_state(dc, cpu_cond);
4817 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4818 break;
4819 case 0x1f: /* swapa, swap reg with alt. memory. Also
4820 atomically */
4821 CHECK_IU_FEATURE(dc, SWAP);
4822 #ifndef TARGET_SPARC64
4823 if (IS_IMM)
4824 goto illegal_insn;
4825 if (!supervisor(dc))
4826 goto priv_insn;
4827 #endif
4828 save_state(dc, cpu_cond);
4829 gen_movl_reg_TN(rd, cpu_val);
4830 gen_swap_asi(cpu_val, cpu_addr, insn);
4831 break;
4833 #ifndef TARGET_SPARC64
4834 case 0x30: /* ldc */
4835 case 0x31: /* ldcsr */
4836 case 0x33: /* lddc */
4837 goto ncp_insn;
4838 #endif
4839 #endif
4840 #ifdef TARGET_SPARC64
4841 case 0x08: /* V9 ldsw */
4842 gen_address_mask(dc, cpu_addr);
4843 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4844 break;
4845 case 0x0b: /* V9 ldx */
4846 gen_address_mask(dc, cpu_addr);
4847 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4848 break;
4849 case 0x18: /* V9 ldswa */
4850 save_state(dc, cpu_cond);
4851 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4852 break;
4853 case 0x1b: /* V9 ldxa */
4854 save_state(dc, cpu_cond);
4855 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4856 break;
4857 case 0x2d: /* V9 prefetch, no effect */
4858 goto skip_move;
4859 case 0x30: /* V9 ldfa */
4860 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4861 goto jmp_insn;
4863 save_state(dc, cpu_cond);
4864 gen_ldf_asi(cpu_addr, insn, 4, rd);
4865 gen_update_fprs_dirty(rd);
4866 goto skip_move;
4867 case 0x33: /* V9 lddfa */
4868 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4869 goto jmp_insn;
4871 save_state(dc, cpu_cond);
4872 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4873 gen_update_fprs_dirty(DFPREG(rd));
4874 goto skip_move;
4875 case 0x3d: /* V9 prefetcha, no effect */
4876 goto skip_move;
4877 case 0x32: /* V9 ldqfa */
4878 CHECK_FPU_FEATURE(dc, FLOAT128);
4879 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4880 goto jmp_insn;
4882 save_state(dc, cpu_cond);
4883 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4884 gen_update_fprs_dirty(QFPREG(rd));
4885 goto skip_move;
4886 #endif
4887 default:
4888 goto illegal_insn;
4890 gen_movl_TN_reg(rd, cpu_val);
4891 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4892 skip_move: ;
4893 #endif
4894 } else if (xop >= 0x20 && xop < 0x24) {
4895 if (gen_trap_ifnofpu(dc, cpu_cond))
4896 goto jmp_insn;
4897 save_state(dc, cpu_cond);
4898 switch (xop) {
4899 case 0x20: /* ldf, load fpreg */
4900 gen_address_mask(dc, cpu_addr);
4901 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4902 cpu_dst_32 = gen_dest_fpr_F();
4903 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4904 gen_store_fpr_F(dc, rd, cpu_dst_32);
4905 break;
4906 case 0x21: /* ldfsr, V9 ldxfsr */
4907 #ifdef TARGET_SPARC64
4908 gen_address_mask(dc, cpu_addr);
4909 if (rd == 1) {
4910 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4911 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4912 } else {
4913 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4914 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4915 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4917 #else
4919 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4920 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4922 #endif
4923 break;
4924 case 0x22: /* ldqf, load quad fpreg */
4926 TCGv_i32 r_const;
4928 CHECK_FPU_FEATURE(dc, FLOAT128);
4929 r_const = tcg_const_i32(dc->mem_idx);
4930 gen_address_mask(dc, cpu_addr);
4931 gen_helper_ldqf(cpu_addr, r_const);
4932 tcg_temp_free_i32(r_const);
4933 gen_op_store_QT0_fpr(QFPREG(rd));
4934 gen_update_fprs_dirty(QFPREG(rd));
4936 break;
4937 case 0x23: /* lddf, load double fpreg */
4938 gen_address_mask(dc, cpu_addr);
4939 cpu_dst_64 = gen_dest_fpr_D();
4940 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4941 gen_store_fpr_D(dc, rd, cpu_dst_64);
4942 break;
4943 default:
4944 goto illegal_insn;
4946 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4947 xop == 0xe || xop == 0x1e) {
4948 gen_movl_reg_TN(rd, cpu_val);
4949 switch (xop) {
4950 case 0x4: /* st, store word */
4951 gen_address_mask(dc, cpu_addr);
4952 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4953 break;
4954 case 0x5: /* stb, store byte */
4955 gen_address_mask(dc, cpu_addr);
4956 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4957 break;
4958 case 0x6: /* sth, store halfword */
4959 gen_address_mask(dc, cpu_addr);
4960 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4961 break;
4962 case 0x7: /* std, store double word */
4963 if (rd & 1)
4964 goto illegal_insn;
4965 else {
4966 TCGv_i32 r_const;
4968 save_state(dc, cpu_cond);
4969 gen_address_mask(dc, cpu_addr);
4970 r_const = tcg_const_i32(7);
4971 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4972 tcg_temp_free_i32(r_const);
4973 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4974 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4975 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4977 break;
4978 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4979 case 0x14: /* sta, V9 stwa, store word alternate */
4980 #ifndef TARGET_SPARC64
4981 if (IS_IMM)
4982 goto illegal_insn;
4983 if (!supervisor(dc))
4984 goto priv_insn;
4985 #endif
4986 save_state(dc, cpu_cond);
4987 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4988 dc->npc = DYNAMIC_PC;
4989 break;
4990 case 0x15: /* stba, store byte alternate */
4991 #ifndef TARGET_SPARC64
4992 if (IS_IMM)
4993 goto illegal_insn;
4994 if (!supervisor(dc))
4995 goto priv_insn;
4996 #endif
4997 save_state(dc, cpu_cond);
4998 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4999 dc->npc = DYNAMIC_PC;
5000 break;
5001 case 0x16: /* stha, store halfword alternate */
5002 #ifndef TARGET_SPARC64
5003 if (IS_IMM)
5004 goto illegal_insn;
5005 if (!supervisor(dc))
5006 goto priv_insn;
5007 #endif
5008 save_state(dc, cpu_cond);
5009 gen_st_asi(cpu_val, cpu_addr, insn, 2);
5010 dc->npc = DYNAMIC_PC;
5011 break;
5012 case 0x17: /* stda, store double word alternate */
5013 #ifndef TARGET_SPARC64
5014 if (IS_IMM)
5015 goto illegal_insn;
5016 if (!supervisor(dc))
5017 goto priv_insn;
5018 #endif
5019 if (rd & 1)
5020 goto illegal_insn;
5021 else {
5022 save_state(dc, cpu_cond);
5023 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
5025 break;
5026 #endif
5027 #ifdef TARGET_SPARC64
5028 case 0x0e: /* V9 stx */
5029 gen_address_mask(dc, cpu_addr);
5030 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5031 break;
5032 case 0x1e: /* V9 stxa */
5033 save_state(dc, cpu_cond);
5034 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5035 dc->npc = DYNAMIC_PC;
5036 break;
5037 #endif
5038 default:
5039 goto illegal_insn;
5041 } else if (xop > 0x23 && xop < 0x28) {
5042 if (gen_trap_ifnofpu(dc, cpu_cond))
5043 goto jmp_insn;
5044 save_state(dc, cpu_cond);
5045 switch (xop) {
5046 case 0x24: /* stf, store fpreg */
5047 gen_address_mask(dc, cpu_addr);
5048 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5049 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5050 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5051 break;
5052 case 0x25: /* stfsr, V9 stxfsr */
5053 #ifdef TARGET_SPARC64
5054 gen_address_mask(dc, cpu_addr);
5055 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
5056 if (rd == 1)
5057 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5058 else
5059 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5060 #else
5061 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
5062 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5063 #endif
5064 break;
5065 case 0x26:
5066 #ifdef TARGET_SPARC64
5067 /* V9 stqf, store quad fpreg */
5069 TCGv_i32 r_const;
5071 CHECK_FPU_FEATURE(dc, FLOAT128);
5072 gen_op_load_fpr_QT0(QFPREG(rd));
5073 r_const = tcg_const_i32(dc->mem_idx);
5074 gen_address_mask(dc, cpu_addr);
5075 gen_helper_stqf(cpu_addr, r_const);
5076 tcg_temp_free_i32(r_const);
5078 break;
5079 #else /* !TARGET_SPARC64 */
5080 /* stdfq, store floating point queue */
5081 #if defined(CONFIG_USER_ONLY)
5082 goto illegal_insn;
5083 #else
5084 if (!supervisor(dc))
5085 goto priv_insn;
5086 if (gen_trap_ifnofpu(dc, cpu_cond))
5087 goto jmp_insn;
5088 goto nfq_insn;
5089 #endif
5090 #endif
5091 case 0x27: /* stdf, store double fpreg */
5092 gen_address_mask(dc, cpu_addr);
5093 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5094 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5095 break;
5096 default:
5097 goto illegal_insn;
5099 } else if (xop > 0x33 && xop < 0x3f) {
5100 save_state(dc, cpu_cond);
5101 switch (xop) {
5102 #ifdef TARGET_SPARC64
5103 case 0x34: /* V9 stfa */
5104 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5105 goto jmp_insn;
5107 gen_stf_asi(cpu_addr, insn, 4, rd);
5108 break;
5109 case 0x36: /* V9 stqfa */
5111 TCGv_i32 r_const;
5113 CHECK_FPU_FEATURE(dc, FLOAT128);
5114 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5115 goto jmp_insn;
5117 r_const = tcg_const_i32(7);
5118 gen_helper_check_align(cpu_addr, r_const);
5119 tcg_temp_free_i32(r_const);
5120 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5122 break;
5123 case 0x37: /* V9 stdfa */
5124 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5125 goto jmp_insn;
5127 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5128 break;
5129 case 0x3c: /* V9 casa */
5130 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5131 gen_movl_TN_reg(rd, cpu_val);
5132 break;
5133 case 0x3e: /* V9 casxa */
5134 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5135 gen_movl_TN_reg(rd, cpu_val);
5136 break;
5137 #else
5138 case 0x34: /* stc */
5139 case 0x35: /* stcsr */
5140 case 0x36: /* stdcq */
5141 case 0x37: /* stdc */
5142 goto ncp_insn;
5143 #endif
5144 default:
5145 goto illegal_insn;
5147 } else
5148 goto illegal_insn;
5150 break;
5152 /* default case for non jump instructions */
5153 if (dc->npc == DYNAMIC_PC) {
5154 dc->pc = DYNAMIC_PC;
5155 gen_op_next_insn();
5156 } else if (dc->npc == JUMP_PC) {
5157 /* we can do a static jump */
5158 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5159 dc->is_br = 1;
5160 } else {
5161 dc->pc = dc->npc;
5162 dc->npc = dc->npc + 4;
5164 jmp_insn:
5165 goto egress;
5166 illegal_insn:
5168 TCGv_i32 r_const;
5170 save_state(dc, cpu_cond);
5171 r_const = tcg_const_i32(TT_ILL_INSN);
5172 gen_helper_raise_exception(cpu_env, r_const);
5173 tcg_temp_free_i32(r_const);
5174 dc->is_br = 1;
5176 goto egress;
5177 unimp_flush:
5179 TCGv_i32 r_const;
5181 save_state(dc, cpu_cond);
5182 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5183 gen_helper_raise_exception(cpu_env, r_const);
5184 tcg_temp_free_i32(r_const);
5185 dc->is_br = 1;
5187 goto egress;
5188 #if !defined(CONFIG_USER_ONLY)
5189 priv_insn:
5191 TCGv_i32 r_const;
5193 save_state(dc, cpu_cond);
5194 r_const = tcg_const_i32(TT_PRIV_INSN);
5195 gen_helper_raise_exception(cpu_env, r_const);
5196 tcg_temp_free_i32(r_const);
5197 dc->is_br = 1;
5199 goto egress;
5200 #endif
5201 nfpu_insn:
5202 save_state(dc, cpu_cond);
5203 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5204 dc->is_br = 1;
5205 goto egress;
5206 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5207 nfq_insn:
5208 save_state(dc, cpu_cond);
5209 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5210 dc->is_br = 1;
5211 goto egress;
5212 #endif
5213 #ifndef TARGET_SPARC64
5214 ncp_insn:
5216 TCGv r_const;
5218 save_state(dc, cpu_cond);
5219 r_const = tcg_const_i32(TT_NCP_INSN);
5220 gen_helper_raise_exception(cpu_env, r_const);
5221 tcg_temp_free(r_const);
5222 dc->is_br = 1;
5224 goto egress;
5225 #endif
5226 egress:
5227 tcg_temp_free(cpu_tmp1);
5228 tcg_temp_free(cpu_tmp2);
5229 if (dc->n_t32 != 0) {
5230 int i;
5231 for (i = dc->n_t32 - 1; i >= 0; --i) {
5232 tcg_temp_free_i32(dc->t32[i]);
5234 dc->n_t32 = 0;
5238 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5239 int spc, CPUSPARCState *env)
5241 target_ulong pc_start, last_pc;
5242 uint16_t *gen_opc_end;
5243 DisasContext dc1, *dc = &dc1;
5244 CPUBreakpoint *bp;
5245 int j, lj = -1;
5246 int num_insns;
5247 int max_insns;
5249 memset(dc, 0, sizeof(DisasContext));
5250 dc->tb = tb;
5251 pc_start = tb->pc;
5252 dc->pc = pc_start;
5253 last_pc = dc->pc;
5254 dc->npc = (target_ulong) tb->cs_base;
5255 dc->cc_op = CC_OP_DYNAMIC;
5256 dc->mem_idx = cpu_mmu_index(env);
5257 dc->def = env->def;
5258 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5259 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5260 dc->singlestep = (env->singlestep_enabled || singlestep);
5261 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5263 cpu_tmp0 = tcg_temp_new();
5264 cpu_tmp32 = tcg_temp_new_i32();
5265 cpu_tmp64 = tcg_temp_new_i64();
5267 cpu_dst = tcg_temp_local_new();
5269 // loads and stores
5270 cpu_val = tcg_temp_local_new();
5271 cpu_addr = tcg_temp_local_new();
5273 num_insns = 0;
5274 max_insns = tb->cflags & CF_COUNT_MASK;
5275 if (max_insns == 0)
5276 max_insns = CF_COUNT_MASK;
5277 gen_icount_start();
5278 do {
5279 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5280 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5281 if (bp->pc == dc->pc) {
5282 if (dc->pc != pc_start)
5283 save_state(dc, cpu_cond);
5284 gen_helper_debug(cpu_env);
5285 tcg_gen_exit_tb(0);
5286 dc->is_br = 1;
5287 goto exit_gen_loop;
5291 if (spc) {
5292 qemu_log("Search PC...\n");
5293 j = gen_opc_ptr - gen_opc_buf;
5294 if (lj < j) {
5295 lj++;
5296 while (lj < j)
5297 gen_opc_instr_start[lj++] = 0;
5298 gen_opc_pc[lj] = dc->pc;
5299 gen_opc_npc[lj] = dc->npc;
5300 gen_opc_instr_start[lj] = 1;
5301 gen_opc_icount[lj] = num_insns;
5304 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5305 gen_io_start();
5306 last_pc = dc->pc;
5307 disas_sparc_insn(dc);
5308 num_insns++;
5310 if (dc->is_br)
5311 break;
5312 /* if the next PC is different, we abort now */
5313 if (dc->pc != (last_pc + 4))
5314 break;
5315 /* if we reach a page boundary, we stop generation so that the
5316 PC of a TT_TFAULT exception is always in the right page */
5317 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5318 break;
5319 /* if single step mode, we generate only one instruction and
5320 generate an exception */
5321 if (dc->singlestep) {
5322 break;
5324 } while ((gen_opc_ptr < gen_opc_end) &&
5325 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5326 num_insns < max_insns);
5328 exit_gen_loop:
5329 tcg_temp_free(cpu_addr);
5330 tcg_temp_free(cpu_val);
5331 tcg_temp_free(cpu_dst);
5332 tcg_temp_free_i64(cpu_tmp64);
5333 tcg_temp_free_i32(cpu_tmp32);
5334 tcg_temp_free(cpu_tmp0);
5336 if (tb->cflags & CF_LAST_IO)
5337 gen_io_end();
5338 if (!dc->is_br) {
5339 if (dc->pc != DYNAMIC_PC &&
5340 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5341 /* static PC and NPC: we can use direct chaining */
5342 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5343 } else {
5344 if (dc->pc != DYNAMIC_PC)
5345 tcg_gen_movi_tl(cpu_pc, dc->pc);
5346 save_npc(dc, cpu_cond);
5347 tcg_gen_exit_tb(0);
5350 gen_icount_end(tb, num_insns);
5351 *gen_opc_ptr = INDEX_op_end;
5352 if (spc) {
5353 j = gen_opc_ptr - gen_opc_buf;
5354 lj++;
5355 while (lj <= j)
5356 gen_opc_instr_start[lj++] = 0;
5357 #if 0
5358 log_page_dump();
5359 #endif
5360 gen_opc_jump_pc[0] = dc->jump_pc[0];
5361 gen_opc_jump_pc[1] = dc->jump_pc[1];
5362 } else {
5363 tb->size = last_pc + 4 - pc_start;
5364 tb->icount = num_insns;
5366 #ifdef DEBUG_DISAS
5367 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5368 qemu_log("--------------\n");
5369 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5370 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5371 qemu_log("\n");
5373 #endif
5376 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5378 gen_intermediate_code_internal(tb, 0, env);
5381 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5383 gen_intermediate_code_internal(tb, 1, env);
5386 void gen_intermediate_code_init(CPUSPARCState *env)
5388 unsigned int i;
5389 static int inited;
5390 static const char * const gregnames[8] = {
5391 NULL, // g0 not used
5392 "g1",
5393 "g2",
5394 "g3",
5395 "g4",
5396 "g5",
5397 "g6",
5398 "g7",
5400 static const char * const fregnames[32] = {
5401 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5402 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5403 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5404 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5407 /* init various static tables */
5408 if (!inited) {
5409 inited = 1;
5411 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5412 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5413 offsetof(CPUState, regwptr),
5414 "regwptr");
5415 #ifdef TARGET_SPARC64
5416 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5417 "xcc");
5418 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5419 "asi");
5420 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5421 "fprs");
5422 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5423 "gsr");
5424 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5425 offsetof(CPUState, tick_cmpr),
5426 "tick_cmpr");
5427 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5428 offsetof(CPUState, stick_cmpr),
5429 "stick_cmpr");
5430 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5431 offsetof(CPUState, hstick_cmpr),
5432 "hstick_cmpr");
5433 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5434 "hintp");
5435 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5436 "htba");
5437 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5438 "hver");
5439 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5440 offsetof(CPUState, ssr), "ssr");
5441 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5442 offsetof(CPUState, version), "ver");
5443 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5444 offsetof(CPUState, softint),
5445 "softint");
5446 #else
5447 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5448 "wim");
5449 #endif
5450 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5451 "cond");
5452 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5453 "cc_src");
5454 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5455 offsetof(CPUState, cc_src2),
5456 "cc_src2");
5457 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5458 "cc_dst");
5459 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5460 "cc_op");
5461 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5462 "psr");
5463 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5464 "fsr");
5465 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5466 "pc");
5467 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5468 "npc");
5469 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5470 #ifndef CONFIG_USER_ONLY
5471 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5472 "tbr");
5473 #endif
5474 for (i = 1; i < 8; i++) {
5475 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5476 offsetof(CPUState, gregs[i]),
5477 gregnames[i]);
5479 for (i = 0; i < TARGET_DPREGS; i++) {
5480 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5481 offsetof(CPUState, fpr[i]),
5482 fregnames[i]);
5485 /* register helpers */
5487 #define GEN_HELPER 2
5488 #include "helper.h"
5492 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5494 target_ulong npc;
5495 env->pc = gen_opc_pc[pc_pos];
5496 npc = gen_opc_npc[pc_pos];
5497 if (npc == 1) {
5498 /* dynamic NPC: already stored */
5499 } else if (npc == 2) {
5500 /* jump PC: use 'cond' and the jump targets of the translation */
5501 if (env->cond) {
5502 env->npc = gen_opc_jump_pc[0];
5503 } else {
5504 env->npc = gen_opc_jump_pc[1];
5506 } else {
5507 env->npc = npc;
5510 /* flush pending conditional evaluations before exposing cpu state */
5511 if (CC_OP != CC_OP_FLAGS) {
5512 helper_compute_psr(env);